tree-ssa-pre.c (bitmap_set_t): New.
[gcc.git] / gcc / passes.c
1 /* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27 #include "config.h"
28 #undef FLOAT /* This is for hpux. They should change hpux. */
29 #undef FFS /* Some systems define this in param.h. */
30 #include "system.h"
31 #include "coretypes.h"
32 #include "tm.h"
33 #include <signal.h>
34
35 #ifdef HAVE_SYS_RESOURCE_H
36 # include <sys/resource.h>
37 #endif
38
39 #ifdef HAVE_SYS_TIMES_H
40 # include <sys/times.h>
41 #endif
42
43 #include "line-map.h"
44 #include "input.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "tm_p.h"
48 #include "flags.h"
49 #include "insn-attr.h"
50 #include "insn-config.h"
51 #include "insn-flags.h"
52 #include "hard-reg-set.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "except.h"
56 #include "function.h"
57 #include "toplev.h"
58 #include "expr.h"
59 #include "basic-block.h"
60 #include "intl.h"
61 #include "ggc.h"
62 #include "graph.h"
63 #include "loop.h"
64 #include "regs.h"
65 #include "timevar.h"
66 #include "diagnostic.h"
67 #include "params.h"
68 #include "reload.h"
69 #include "dwarf2asm.h"
70 #include "integrate.h"
71 #include "real.h"
72 #include "debug.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "cfglayout.h"
76 #include "cfgloop.h"
77 #include "hosthooks.h"
78 #include "cgraph.h"
79 #include "opts.h"
80 #include "coverage.h"
81 #include "value-prof.h"
82 #include "alloc-pool.h"
83 #include "tree-pass.h"
84
85 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
86 #include "dwarf2out.h"
87 #endif
88
89 #if defined(DBX_DEBUGGING_INFO) || defined(XCOFF_DEBUGGING_INFO)
90 #include "dbxout.h"
91 #endif
92
93 #ifdef SDB_DEBUGGING_INFO
94 #include "sdbout.h"
95 #endif
96
97 #ifdef XCOFF_DEBUGGING_INFO
98 #include "xcoffout.h" /* Needed for external data
99 declarations for e.g. AIX 4.x. */
100 #endif
101
102 #ifndef HAVE_conditional_execution
103 #define HAVE_conditional_execution 0
104 #endif
105
106 /* Format to use to print dumpfile index value */
107 #ifndef DUMPFILE_FORMAT
108 #define DUMPFILE_FORMAT ".%02d."
109 #endif
110
111 /* Describes a dump file. */
112
113 struct dump_file_info
114 {
115 /* The unique extension to apply, e.g. ".jump". */
116 const char *const extension;
117
118 /* The -d<c> character that enables this dump file. */
119 char const debug_switch;
120
121 /* True if there is a corresponding graph dump file. */
122 char const graph_dump_p;
123
124 /* True if the user selected this dump. */
125 char enabled;
126
127 /* True if the files have been initialized (ie truncated). */
128 char initialized;
129 };
130
131 /* Enumerate the extant dump files. */
132
133 enum dump_file_index
134 {
135 DFI_cgraph,
136 DFI_rtl,
137 DFI_sibling,
138 DFI_eh,
139 DFI_jump,
140 DFI_null,
141 DFI_cse,
142 DFI_addressof,
143 DFI_gcse,
144 DFI_loop,
145 DFI_bypass,
146 DFI_cfg,
147 DFI_bp,
148 DFI_vpt,
149 DFI_ce1,
150 DFI_tracer,
151 DFI_loop2,
152 DFI_web,
153 DFI_cse2,
154 DFI_life,
155 DFI_combine,
156 DFI_ce2,
157 DFI_regmove,
158 DFI_sms,
159 DFI_sched,
160 DFI_lreg,
161 DFI_greg,
162 DFI_postreload,
163 DFI_gcse2,
164 DFI_flow2,
165 DFI_peephole2,
166 DFI_ce3,
167 DFI_rnreg,
168 DFI_bbro,
169 DFI_branch_target_load,
170 DFI_sched2,
171 DFI_stack,
172 DFI_vartrack,
173 DFI_mach,
174 DFI_dbr,
175 DFI_MAX
176 };
177
178 /* Describes all the dump files. Should be kept in order of the
179 pass and in sync with dump_file_index above.
180
181 Remaining -d letters:
182
183 " e q "
184 " K O Q WXY "
185 */
186
187 static struct dump_file_info dump_file_tbl[DFI_MAX] =
188 {
189 { "cgraph", 'U', 0, 0, 0 },
190 { "rtl", 'r', 0, 0, 0 },
191 { "sibling", 'i', 0, 0, 0 },
192 { "eh", 'h', 0, 0, 0 },
193 { "jump", 'j', 0, 0, 0 },
194 { "null", 'u', 0, 0, 0 },
195 { "cse", 's', 0, 0, 0 },
196 { "addressof", 'F', 0, 0, 0 },
197 { "gcse", 'G', 1, 0, 0 },
198 { "loop", 'L', 1, 0, 0 },
199 { "bypass", 'G', 1, 0, 0 }, /* Yes, duplicate enable switch. */
200 { "cfg", 'f', 1, 0, 0 },
201 { "bp", 'b', 1, 0, 0 },
202 { "vpt", 'V', 1, 0, 0 },
203 { "ce1", 'C', 1, 0, 0 },
204 { "tracer", 'T', 1, 0, 0 },
205 { "loop2", 'L', 1, 0, 0 },
206 { "web", 'Z', 0, 0, 0 },
207 { "cse2", 't', 1, 0, 0 },
208 { "life", 'f', 1, 0, 0 }, /* Yes, duplicate enable switch. */
209 { "combine", 'c', 1, 0, 0 },
210 { "ce2", 'C', 1, 0, 0 },
211 { "regmove", 'N', 1, 0, 0 },
212 { "sms", 'm', 0, 0, 0 },
213 { "sched", 'S', 1, 0, 0 },
214 { "lreg", 'l', 1, 0, 0 },
215 { "greg", 'g', 1, 0, 0 },
216 { "postreload", 'o', 1, 0, 0 },
217 { "gcse2", 'J', 0, 0, 0 },
218 { "flow2", 'w', 1, 0, 0 },
219 { "peephole2", 'z', 1, 0, 0 },
220 { "ce3", 'E', 1, 0, 0 },
221 { "rnreg", 'n', 1, 0, 0 },
222 { "bbro", 'B', 1, 0, 0 },
223 { "btl", 'd', 1, 0, 0 }, /* Yes, duplicate enable switch. */
224 { "sched2", 'R', 1, 0, 0 },
225 { "stack", 'k', 1, 0, 0 },
226 { "vartrack", 'V', 1, 0, 0 }, /* Yes, duplicate enable switch. */
227 { "mach", 'M', 1, 0, 0 },
228 { "dbr", 'd', 0, 0, 0 },
229 };
230
231 /* Routine to open a dump file. Return true if the dump file is enabled. */
232
233 static int
234 open_dump_file (enum dump_file_index index, tree decl)
235 {
236 char *dump_name;
237 const char *open_arg;
238 char seq[16];
239
240 if (! dump_file_tbl[index].enabled)
241 return 0;
242
243 timevar_push (TV_DUMP);
244 if (dump_file != NULL)
245 fclose (dump_file);
246
247 sprintf (seq, DUMPFILE_FORMAT, index);
248
249 if (! dump_file_tbl[index].initialized)
250 {
251 /* If we've not initialized the files, do so now. */
252 if (graph_dump_format != no_graph
253 && dump_file_tbl[index].graph_dump_p)
254 {
255 dump_name = concat (seq, dump_file_tbl[index].extension, NULL);
256 clean_graph_dump_file (dump_base_name, dump_name);
257 free (dump_name);
258 }
259 dump_file_tbl[index].initialized = 1;
260 open_arg = "w";
261 }
262 else
263 open_arg = "a";
264
265 dump_name = concat (dump_base_name, seq,
266 dump_file_tbl[index].extension, NULL);
267
268 dump_file = fopen (dump_name, open_arg);
269 if (dump_file == NULL)
270 fatal_error ("can't open %s: %m", dump_name);
271
272 free (dump_name);
273
274 if (decl)
275 fprintf (dump_file, "\n;; Function %s%s\n\n",
276 lang_hooks.decl_printable_name (decl, 2),
277 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
278 ? " (hot)"
279 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
280 ? " (unlikely executed)"
281 : "");
282
283 timevar_pop (TV_DUMP);
284 return 1;
285 }
286
287 /* Routine to close a dump file. */
288
289 static void
290 close_dump_file (enum dump_file_index index,
291 void (*func) (FILE *, rtx),
292 rtx insns)
293 {
294 if (! dump_file)
295 return;
296
297 timevar_push (TV_DUMP);
298 if (insns
299 && graph_dump_format != no_graph
300 && dump_file_tbl[index].graph_dump_p)
301 {
302 char seq[16];
303 char *suffix;
304
305 sprintf (seq, DUMPFILE_FORMAT, index);
306 suffix = concat (seq, dump_file_tbl[index].extension, NULL);
307 print_rtl_graph_with_bb (dump_base_name, suffix, insns);
308 free (suffix);
309 }
310
311 if (func && insns)
312 func (dump_file, insns);
313
314 fflush (dump_file);
315 fclose (dump_file);
316
317 dump_file = NULL;
318 timevar_pop (TV_DUMP);
319 }
320
321 /* This is called from various places for FUNCTION_DECL, VAR_DECL,
322 and TYPE_DECL nodes.
323
324 This does nothing for local (non-static) variables, unless the
325 variable is a register variable with an ASMSPEC. In that case, or
326 if the variable is not an automatic, it sets up the RTL and
327 outputs any assembler code (label definition, storage allocation
328 and initialization).
329
330 DECL is the declaration. If ASMSPEC is nonzero, it specifies
331 the assembler symbol name to be used. TOP_LEVEL is nonzero
332 if this declaration is not within a function. */
333
334 void
335 rest_of_decl_compilation (tree decl,
336 const char *asmspec,
337 int top_level,
338 int at_end)
339 {
340 /* We deferred calling assemble_alias so that we could collect
341 other attributes such as visibility. Emit the alias now. */
342 {
343 tree alias;
344 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
345 if (alias)
346 {
347 alias = TREE_VALUE (TREE_VALUE (alias));
348 alias = get_identifier (TREE_STRING_POINTER (alias));
349 assemble_alias (decl, alias);
350 }
351 }
352
353 /* Forward declarations for nested functions are not "external",
354 but we need to treat them as if they were. */
355 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
356 || TREE_CODE (decl) == FUNCTION_DECL)
357 {
358 timevar_push (TV_VARCONST);
359
360 if (asmspec)
361 make_decl_rtl (decl, asmspec);
362
363 /* Don't output anything when a tentative file-scope definition
364 is seen. But at end of compilation, do output code for them.
365
366 We do output all variables when unit-at-a-time is active and rely on
367 callgraph code to defer them except for forward declarations
368 (see gcc.c-torture/compile/920624-1.c) */
369 if ((at_end
370 || !DECL_DEFER_OUTPUT (decl)
371 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
372 && !DECL_EXTERNAL (decl))
373 {
374 if (flag_unit_at_a_time && !cgraph_global_info_ready
375 && TREE_CODE (decl) != FUNCTION_DECL && top_level)
376 cgraph_varpool_finalize_decl (decl);
377 else
378 assemble_variable (decl, top_level, at_end, 0);
379 }
380
381 #ifdef ASM_FINISH_DECLARE_OBJECT
382 if (decl == last_assemble_variable_decl)
383 {
384 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
385 top_level, at_end);
386 }
387 #endif
388
389 timevar_pop (TV_VARCONST);
390 }
391 else if (DECL_REGISTER (decl) && asmspec != 0)
392 {
393 if (decode_reg_name (asmspec) >= 0)
394 {
395 SET_DECL_RTL (decl, NULL_RTX);
396 make_decl_rtl (decl, asmspec);
397 }
398 else
399 {
400 error ("%Hinvalid register name `%s' for register variable",
401 &DECL_SOURCE_LOCATION (decl), asmspec);
402 DECL_REGISTER (decl) = 0;
403 if (!top_level)
404 expand_decl (decl);
405 }
406 }
407 else if (TREE_CODE (decl) == TYPE_DECL)
408 {
409 timevar_push (TV_SYMOUT);
410 debug_hooks->type_decl (decl, !top_level);
411 timevar_pop (TV_SYMOUT);
412 }
413 }
414
415 /* Called after finishing a record, union or enumeral type. */
416
417 void
418 rest_of_type_compilation (tree type, int toplev)
419 {
420 /* Avoid confusing the debug information machinery when there are
421 errors. */
422 if (errorcount != 0 || sorrycount != 0)
423 return;
424
425 timevar_push (TV_SYMOUT);
426 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
427 timevar_pop (TV_SYMOUT);
428 }
429
430 /* Turn the RTL into assembly. */
431 static void
432 rest_of_handle_final (void)
433 {
434 timevar_push (TV_FINAL);
435 {
436 rtx x;
437 const char *fnname;
438
439 /* Get the function's name, as described by its RTL. This may be
440 different from the DECL_NAME name used in the source file. */
441
442 x = DECL_RTL (current_function_decl);
443 if (!MEM_P (x))
444 abort ();
445 x = XEXP (x, 0);
446 if (GET_CODE (x) != SYMBOL_REF)
447 abort ();
448 fnname = XSTR (x, 0);
449
450 assemble_start_function (current_function_decl, fnname);
451 final_start_function (get_insns (), asm_out_file, optimize);
452 final (get_insns (), asm_out_file, optimize, 0);
453 final_end_function ();
454
455 #ifdef IA64_UNWIND_INFO
456 /* ??? The IA-64 ".handlerdata" directive must be issued before
457 the ".endp" directive that closes the procedure descriptor. */
458 output_function_exception_table ();
459 #endif
460
461 assemble_end_function (current_function_decl, fnname);
462
463 #ifndef IA64_UNWIND_INFO
464 /* Otherwise, it feels unclean to switch sections in the middle. */
465 output_function_exception_table ();
466 #endif
467
468 if (! quiet_flag)
469 fflush (asm_out_file);
470
471 /* Release all memory allocated by flow. */
472 free_basic_block_vars ();
473
474 /* Release all memory held by regsets now. */
475 regset_release_memory ();
476 }
477 timevar_pop (TV_FINAL);
478
479 ggc_collect ();
480 }
481
482 #ifdef DELAY_SLOTS
483 /* Run delay slot optimization. */
484 static void
485 rest_of_handle_delay_slots (void)
486 {
487 timevar_push (TV_DBR_SCHED);
488 open_dump_file (DFI_dbr, current_function_decl);
489
490 dbr_schedule (get_insns (), dump_file);
491
492 close_dump_file (DFI_dbr, print_rtl, get_insns ());
493 timevar_pop (TV_DBR_SCHED);
494
495 ggc_collect ();
496 }
497 #endif
498
499 #ifdef STACK_REGS
500 /* Convert register usage from flat register file usage to a stack
501 register file. */
502 static void
503 rest_of_handle_stack_regs (void)
504 {
505 #if defined (HAVE_ATTR_length)
506 /* If flow2 creates new instructions which need splitting
507 and scheduling after reload is not done, they might not be
508 split until final which doesn't allow splitting
509 if HAVE_ATTR_length. */
510 #ifdef INSN_SCHEDULING
511 if (optimize && !flag_schedule_insns_after_reload)
512 #else
513 if (optimize)
514 #endif
515 {
516 timevar_push (TV_SHORTEN_BRANCH);
517 split_all_insns (1);
518 timevar_pop (TV_SHORTEN_BRANCH);
519 }
520 #endif
521
522 timevar_push (TV_REG_STACK);
523 open_dump_file (DFI_stack, current_function_decl);
524
525 if (reg_to_stack (dump_file) && optimize)
526 {
527 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
528 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
529 && (flag_reorder_blocks || flag_reorder_blocks_and_partition))
530 {
531 reorder_basic_blocks ();
532 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
533 }
534 }
535
536 close_dump_file (DFI_stack, print_rtl_with_bb, get_insns ());
537 timevar_pop (TV_REG_STACK);
538
539 ggc_collect ();
540 }
541 #endif
542
543 /* Track the variables, ie. compute where the variable is stored at each position in function. */
544 static void
545 rest_of_handle_variable_tracking (void)
546 {
547 timevar_push (TV_VAR_TRACKING);
548 open_dump_file (DFI_vartrack, current_function_decl);
549
550 variable_tracking_main ();
551
552 close_dump_file (DFI_vartrack, print_rtl_with_bb, get_insns ());
553 timevar_pop (TV_VAR_TRACKING);
554 }
555
556 /* Machine independent reorg pass. */
557 static void
558 rest_of_handle_machine_reorg (void)
559 {
560 timevar_push (TV_MACH_DEP);
561 open_dump_file (DFI_mach, current_function_decl);
562
563 targetm.machine_dependent_reorg ();
564
565 close_dump_file (DFI_mach, print_rtl, get_insns ());
566 timevar_pop (TV_MACH_DEP);
567
568 ggc_collect ();
569 }
570
571
572 /* Run new register allocator. Return TRUE if we must exit
573 rest_of_compilation upon return. */
574 static bool
575 rest_of_handle_new_regalloc (void)
576 {
577 int failure;
578
579 delete_trivially_dead_insns (get_insns (), max_reg_num ());
580 reg_alloc ();
581
582 timevar_pop (TV_LOCAL_ALLOC);
583 if (dump_file_tbl[DFI_lreg].enabled)
584 {
585 timevar_push (TV_DUMP);
586
587 close_dump_file (DFI_lreg, NULL, NULL);
588 timevar_pop (TV_DUMP);
589 }
590
591 /* XXX clean up the whole mess to bring live info in shape again. */
592 timevar_push (TV_GLOBAL_ALLOC);
593 open_dump_file (DFI_greg, current_function_decl);
594
595 build_insn_chain (get_insns ());
596 failure = reload (get_insns (), 0);
597
598 timevar_pop (TV_GLOBAL_ALLOC);
599
600 if (dump_file_tbl[DFI_greg].enabled)
601 {
602 timevar_push (TV_DUMP);
603
604 dump_global_regs (dump_file);
605
606 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
607 timevar_pop (TV_DUMP);
608 }
609
610 if (failure)
611 return true;
612
613 reload_completed = 1;
614
615 return false;
616 }
617
618 /* Run old register allocator. Return TRUE if we must exit
619 rest_of_compilation upon return. */
620 static bool
621 rest_of_handle_old_regalloc (void)
622 {
623 int failure;
624 int rebuild_notes;
625
626 /* Allocate the reg_renumber array. */
627 allocate_reg_info (max_regno, FALSE, TRUE);
628
629 /* And the reg_equiv_memory_loc array. */
630 VARRAY_GROW (reg_equiv_memory_loc_varray, max_regno);
631 reg_equiv_memory_loc = &VARRAY_RTX (reg_equiv_memory_loc_varray, 0);
632
633 allocate_initial_values (reg_equiv_memory_loc);
634
635 regclass (get_insns (), max_reg_num (), dump_file);
636 rebuild_notes = local_alloc ();
637
638 timevar_pop (TV_LOCAL_ALLOC);
639
640 /* Local allocation may have turned an indirect jump into a direct
641 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
642 instructions. */
643 if (rebuild_notes)
644 {
645 timevar_push (TV_JUMP);
646
647 rebuild_jump_labels (get_insns ());
648 purge_all_dead_edges (0);
649
650 timevar_pop (TV_JUMP);
651 }
652
653 if (dump_file_tbl[DFI_lreg].enabled)
654 {
655 timevar_push (TV_DUMP);
656
657 dump_flow_info (dump_file);
658 dump_local_alloc (dump_file);
659
660 close_dump_file (DFI_lreg, print_rtl_with_bb, get_insns ());
661 timevar_pop (TV_DUMP);
662 }
663
664 ggc_collect ();
665
666 timevar_push (TV_GLOBAL_ALLOC);
667 open_dump_file (DFI_greg, current_function_decl);
668
669 /* If optimizing, allocate remaining pseudo-regs. Do the reload
670 pass fixing up any insns that are invalid. */
671
672 if (optimize)
673 failure = global_alloc (dump_file);
674 else
675 {
676 build_insn_chain (get_insns ());
677 failure = reload (get_insns (), 0);
678 }
679
680 timevar_pop (TV_GLOBAL_ALLOC);
681
682 if (dump_file_tbl[DFI_greg].enabled)
683 {
684 timevar_push (TV_DUMP);
685
686 dump_global_regs (dump_file);
687
688 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
689 timevar_pop (TV_DUMP);
690 }
691
692 return failure;
693 }
694
695 /* Run the regrename and cprop passes. */
696 static void
697 rest_of_handle_regrename (void)
698 {
699 timevar_push (TV_RENAME_REGISTERS);
700 open_dump_file (DFI_rnreg, current_function_decl);
701
702 if (flag_rename_registers)
703 regrename_optimize ();
704 if (flag_cprop_registers)
705 copyprop_hardreg_forward ();
706
707 close_dump_file (DFI_rnreg, print_rtl_with_bb, get_insns ());
708 timevar_pop (TV_RENAME_REGISTERS);
709 }
710
711 /* Reorder basic blocks. */
712 static void
713 rest_of_handle_reorder_blocks (void)
714 {
715 bool changed;
716 open_dump_file (DFI_bbro, current_function_decl);
717
718 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
719 splitting possibly introduced more crossjumping opportunities. */
720 changed = cleanup_cfg (CLEANUP_EXPENSIVE
721 | (!HAVE_conditional_execution
722 ? CLEANUP_UPDATE_LIFE : 0));
723
724 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
725 tracer ();
726 if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
727 reorder_basic_blocks ();
728 if (flag_reorder_blocks || flag_reorder_blocks_and_partition
729 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
730 changed |= cleanup_cfg (CLEANUP_EXPENSIVE
731 | (!HAVE_conditional_execution
732 ? CLEANUP_UPDATE_LIFE : 0));
733
734 /* On conditional execution targets we can not update the life cheaply, so
735 we deffer the updating to after both cleanups. This may lose some cases
736 but should not be terribly bad. */
737 if (changed && HAVE_conditional_execution)
738 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
739 PROP_DEATH_NOTES);
740 close_dump_file (DFI_bbro, print_rtl_with_bb, get_insns ());
741 }
742
743 #ifdef INSN_SCHEDULING
744 /* Run instruction scheduler. */
745 static void
746 rest_of_handle_sched (void)
747 {
748 timevar_push (TV_SMS);
749 if (optimize > 0 && flag_modulo_sched)
750 {
751
752 /* Perform SMS module scheduling. */
753 open_dump_file (DFI_sms, current_function_decl);
754
755 /* We want to be able to create new pseudos. */
756 no_new_pseudos = 0;
757 sms_schedule (dump_file);
758 close_dump_file (DFI_sms, print_rtl, get_insns ());
759
760
761 /* Update the life information, because we add pseudos. */
762 max_regno = max_reg_num ();
763 allocate_reg_info (max_regno, FALSE, FALSE);
764 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
765 (PROP_DEATH_NOTES
766 | PROP_KILL_DEAD_CODE
767 | PROP_SCAN_DEAD_CODE));
768 no_new_pseudos = 1;
769 }
770 timevar_pop (TV_SMS);
771 timevar_push (TV_SCHED);
772
773 /* Print function header into sched dump now
774 because doing the sched analysis makes some of the dump. */
775 if (optimize > 0 && flag_schedule_insns)
776 {
777 open_dump_file (DFI_sched, current_function_decl);
778
779 /* Do control and data sched analysis,
780 and write some of the results to dump file. */
781
782 schedule_insns (dump_file);
783
784 close_dump_file (DFI_sched, print_rtl_with_bb, get_insns ());
785 }
786 timevar_pop (TV_SCHED);
787
788 ggc_collect ();
789 }
790
791 /* Run second scheduling pass after reload. */
792 static void
793 rest_of_handle_sched2 (void)
794 {
795 timevar_push (TV_SCHED2);
796 open_dump_file (DFI_sched2, current_function_decl);
797
798 /* Do control and data sched analysis again,
799 and write some more of the results to dump file. */
800
801 split_all_insns (1);
802
803 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
804 {
805 schedule_ebbs (dump_file);
806 /* No liveness updating code yet, but it should be easy to do.
807 reg-stack recomputes the liveness when needed for now. */
808 count_or_remove_death_notes (NULL, 1);
809 cleanup_cfg (CLEANUP_EXPENSIVE);
810 }
811 else
812 schedule_insns (dump_file);
813
814 close_dump_file (DFI_sched2, print_rtl_with_bb, get_insns ());
815 timevar_pop (TV_SCHED2);
816
817 ggc_collect ();
818 }
819 #endif
820
821 static void
822 rest_of_handle_gcse2 (void)
823 {
824 open_dump_file (DFI_gcse2, current_function_decl);
825
826 gcse_after_reload_main (get_insns (), dump_file);
827 rebuild_jump_labels (get_insns ());
828 delete_trivially_dead_insns (get_insns (), max_reg_num ());
829 close_dump_file (DFI_gcse2, print_rtl_with_bb, get_insns ());
830
831 ggc_collect ();
832
833 #ifdef ENABLE_CHECKING
834 verify_flow_info ();
835 #endif
836 }
837
838 /* Register allocation pre-pass, to reduce number of moves necessary
839 for two-address machines. */
840 static void
841 rest_of_handle_regmove (void)
842 {
843 timevar_push (TV_REGMOVE);
844 open_dump_file (DFI_regmove, current_function_decl);
845
846 regmove_optimize (get_insns (), max_reg_num (), dump_file);
847
848 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
849 close_dump_file (DFI_regmove, print_rtl_with_bb, get_insns ());
850 timevar_pop (TV_REGMOVE);
851
852 ggc_collect ();
853 }
854
855 /* Run tracer. */
856 static void
857 rest_of_handle_tracer (void)
858 {
859 open_dump_file (DFI_tracer, current_function_decl);
860 if (dump_file)
861 dump_flow_info (dump_file);
862 tracer ();
863 cleanup_cfg (CLEANUP_EXPENSIVE);
864 reg_scan (get_insns (), max_reg_num (), 0);
865 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
866 }
867
868 /* If-conversion and CFG cleanup. */
869 static void
870 rest_of_handle_if_conversion (void)
871 {
872 open_dump_file (DFI_ce1, current_function_decl);
873 if (flag_if_conversion)
874 {
875 timevar_push (TV_IFCVT);
876 if (dump_file)
877 dump_flow_info (dump_file);
878 cleanup_cfg (CLEANUP_EXPENSIVE);
879 reg_scan (get_insns (), max_reg_num (), 0);
880 if_convert (0);
881 timevar_pop (TV_IFCVT);
882 }
883 timevar_push (TV_JUMP);
884 cleanup_cfg (CLEANUP_EXPENSIVE);
885 reg_scan (get_insns (), max_reg_num (), 0);
886 timevar_pop (TV_JUMP);
887 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
888 }
889
890 /* Rerun if-conversion, as combine may have simplified things enough
891 to now meet sequence length restrictions. */
892 static void
893 rest_of_handle_if_after_combine (void)
894 {
895 timevar_push (TV_IFCVT);
896 open_dump_file (DFI_ce2, current_function_decl);
897
898 no_new_pseudos = 0;
899 if_convert (1);
900 no_new_pseudos = 1;
901
902 close_dump_file (DFI_ce2, print_rtl_with_bb, get_insns ());
903 timevar_pop (TV_IFCVT);
904 }
905
906 static void
907 rest_of_handle_web (void)
908 {
909 open_dump_file (DFI_web, current_function_decl);
910 timevar_push (TV_WEB);
911 web_main ();
912 delete_trivially_dead_insns (get_insns (), max_reg_num ());
913 cleanup_cfg (CLEANUP_EXPENSIVE);
914
915 timevar_pop (TV_WEB);
916 close_dump_file (DFI_web, print_rtl_with_bb, get_insns ());
917 reg_scan (get_insns (), max_reg_num (), 0);
918 }
919
920 /* Do branch profiling and static profile estimation passes. */
921 static void
922 rest_of_handle_branch_prob (void)
923 {
924 struct loops loops;
925 timevar_push (TV_BRANCH_PROB);
926 open_dump_file (DFI_bp, current_function_decl);
927
928 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
929 branch_prob ();
930
931 /* Discover and record the loop depth at the head of each basic
932 block. The loop infrastructure does the real job for us. */
933 flow_loops_find (&loops, LOOP_TREE);
934
935 if (dump_file)
936 flow_loops_dump (&loops, dump_file, NULL, 0);
937
938 /* Estimate using heuristics if no profiling info is available. */
939 if (flag_guess_branch_prob)
940 estimate_probability (&loops);
941
942 flow_loops_free (&loops);
943 free_dominance_info (CDI_DOMINATORS);
944 close_dump_file (DFI_bp, print_rtl_with_bb, get_insns ());
945 timevar_pop (TV_BRANCH_PROB);
946 }
947
948 /* Do optimizations based on expression value profiles. */
949 static void
950 rest_of_handle_value_profile_transformations (void)
951 {
952 open_dump_file (DFI_vpt, current_function_decl);
953 timevar_push (TV_VPT);
954
955 if (value_profile_transformations ())
956 cleanup_cfg (CLEANUP_EXPENSIVE);
957
958 timevar_pop (TV_VPT);
959 close_dump_file (DFI_vpt, print_rtl_with_bb, get_insns ());
960 }
961
962 /* Do control and data flow analysis; write some of the results to the
963 dump file. */
964 static void
965 rest_of_handle_cfg (void)
966 {
967 open_dump_file (DFI_cfg, current_function_decl);
968 if (dump_file)
969 dump_flow_info (dump_file);
970 if (optimize)
971 cleanup_cfg (CLEANUP_EXPENSIVE
972 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
973
974 /* It may make more sense to mark constant functions after dead code is
975 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
976 may insert code making function non-constant, but we still must consider
977 it as constant, otherwise -fbranch-probabilities will not read data back.
978
979 life_analysis rarely eliminates modification of external memory.
980 */
981 if (optimize)
982 {
983 /* Alias analysis depends on this information and mark_constant_function
984 depends on alias analysis. */
985 reg_scan (get_insns (), max_reg_num (), 1);
986 mark_constant_function ();
987 }
988
989 close_dump_file (DFI_cfg, print_rtl_with_bb, get_insns ());
990 }
991
992 /* Purge addressofs. */
993 static void
994 rest_of_handle_addressof (void)
995 {
996 open_dump_file (DFI_addressof, current_function_decl);
997
998 purge_addressof (get_insns ());
999 if (optimize && purge_all_dead_edges (0))
1000 delete_unreachable_blocks ();
1001 reg_scan (get_insns (), max_reg_num (), 1);
1002
1003 close_dump_file (DFI_addressof, print_rtl, get_insns ());
1004 }
1005
1006 /* Perform jump bypassing and control flow optimizations. */
1007 static void
1008 rest_of_handle_jump_bypass (void)
1009 {
1010 timevar_push (TV_BYPASS);
1011 open_dump_file (DFI_bypass, current_function_decl);
1012
1013 cleanup_cfg (CLEANUP_EXPENSIVE);
1014 reg_scan (get_insns (), max_reg_num (), 1);
1015
1016 if (bypass_jumps (dump_file))
1017 {
1018 rebuild_jump_labels (get_insns ());
1019 cleanup_cfg (CLEANUP_EXPENSIVE);
1020 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1021 }
1022
1023 close_dump_file (DFI_bypass, print_rtl_with_bb, get_insns ());
1024 timevar_pop (TV_BYPASS);
1025
1026 ggc_collect ();
1027
1028 #ifdef ENABLE_CHECKING
1029 verify_flow_info ();
1030 #endif
1031 }
1032
1033 /* Try combining insns through substitution. */
1034 static void
1035 rest_of_handle_combine (void)
1036 {
1037 int rebuild_jump_labels_after_combine = 0;
1038
1039 timevar_push (TV_COMBINE);
1040 open_dump_file (DFI_combine, current_function_decl);
1041
1042 rebuild_jump_labels_after_combine
1043 = combine_instructions (get_insns (), max_reg_num ());
1044
1045 /* Combining get_insns () may have turned an indirect jump into a
1046 direct jump. Rebuild the JUMP_LABEL fields of jumping
1047 instructions. */
1048 if (rebuild_jump_labels_after_combine)
1049 {
1050 timevar_push (TV_JUMP);
1051 rebuild_jump_labels (get_insns ());
1052 timevar_pop (TV_JUMP);
1053
1054 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
1055 }
1056
1057 close_dump_file (DFI_combine, print_rtl_with_bb, get_insns ());
1058 timevar_pop (TV_COMBINE);
1059
1060 ggc_collect ();
1061 }
1062
1063 /* Perform life analysis. */
1064 static void
1065 rest_of_handle_life (void)
1066 {
1067 open_dump_file (DFI_life, current_function_decl);
1068 regclass_init ();
1069
1070 #ifdef ENABLE_CHECKING
1071 verify_flow_info ();
1072 #endif
1073 life_analysis (dump_file, PROP_FINAL);
1074 if (optimize)
1075 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
1076 | CLEANUP_LOG_LINKS
1077 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1078 timevar_pop (TV_FLOW);
1079
1080 if (extra_warnings)
1081 {
1082 setjmp_vars_warning (DECL_INITIAL (current_function_decl));
1083 setjmp_args_warning ();
1084 }
1085
1086 if (optimize)
1087 {
1088 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
1089 {
1090 /* Insns were inserted, and possibly pseudos created, so
1091 things might look a bit different. */
1092 allocate_reg_life_data ();
1093 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1094 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1095 }
1096 }
1097
1098 no_new_pseudos = 1;
1099
1100 close_dump_file (DFI_life, print_rtl_with_bb, get_insns ());
1101
1102 ggc_collect ();
1103 }
1104
1105 /* Perform common subexpression elimination. Nonzero value from
1106 `cse_main' means that jumps were simplified and some code may now
1107 be unreachable, so do jump optimization again. */
1108 static void
1109 rest_of_handle_cse (void)
1110 {
1111 int tem;
1112 open_dump_file (DFI_cse, current_function_decl);
1113 if (dump_file)
1114 dump_flow_info (dump_file);
1115 timevar_push (TV_CSE);
1116
1117 reg_scan (get_insns (), max_reg_num (), 1);
1118
1119 tem = cse_main (get_insns (), max_reg_num (), 0, dump_file);
1120 if (tem)
1121 rebuild_jump_labels (get_insns ());
1122 if (purge_all_dead_edges (0))
1123 delete_unreachable_blocks ();
1124
1125 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1126
1127 /* If we are not running more CSE passes, then we are no longer
1128 expecting CSE to be run. But always rerun it in a cheap mode. */
1129 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1130
1131 if (tem || optimize > 1)
1132 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1133
1134 timevar_pop (TV_CSE);
1135 close_dump_file (DFI_cse, print_rtl_with_bb, get_insns ());
1136 }
1137
1138 /* Run second CSE pass after loop optimizations. */
1139 static void
1140 rest_of_handle_cse2 (void)
1141 {
1142 int tem;
1143 timevar_push (TV_CSE2);
1144 open_dump_file (DFI_cse2, current_function_decl);
1145 if (dump_file)
1146 dump_flow_info (dump_file);
1147 /* CFG is no longer maintained up-to-date. */
1148 tem = cse_main (get_insns (), max_reg_num (), 1, dump_file);
1149
1150 /* Run a pass to eliminate duplicated assignments to condition code
1151 registers. We have to run this after bypass_jumps, because it
1152 makes it harder for that pass to determine whether a jump can be
1153 bypassed safely. */
1154 cse_condition_code_reg ();
1155
1156 purge_all_dead_edges (0);
1157 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1158
1159 if (tem)
1160 {
1161 timevar_push (TV_JUMP);
1162 rebuild_jump_labels (get_insns ());
1163 cleanup_cfg (CLEANUP_EXPENSIVE);
1164 timevar_pop (TV_JUMP);
1165 }
1166 reg_scan (get_insns (), max_reg_num (), 0);
1167 close_dump_file (DFI_cse2, print_rtl_with_bb, get_insns ());
1168 ggc_collect ();
1169 timevar_pop (TV_CSE2);
1170 }
1171
1172 /* Perform global cse. */
1173 static void
1174 rest_of_handle_gcse (void)
1175 {
1176 int save_csb, save_cfj;
1177 int tem2 = 0, tem;
1178 timevar_push (TV_GCSE);
1179 open_dump_file (DFI_gcse, current_function_decl);
1180
1181 tem = gcse_main (get_insns (), dump_file);
1182 rebuild_jump_labels (get_insns ());
1183 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1184
1185 save_csb = flag_cse_skip_blocks;
1186 save_cfj = flag_cse_follow_jumps;
1187 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1188
1189 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1190 by gcse. */
1191 if (flag_expensive_optimizations)
1192 {
1193 timevar_push (TV_CSE);
1194 reg_scan (get_insns (), max_reg_num (), 1);
1195 tem2 = cse_main (get_insns (), max_reg_num (), 0, dump_file);
1196 purge_all_dead_edges (0);
1197 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1198 timevar_pop (TV_CSE);
1199 cse_not_expected = !flag_rerun_cse_after_loop;
1200 }
1201
1202 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1203 things up. Then possibly re-run CSE again. */
1204 while (tem || tem2)
1205 {
1206 tem = tem2 = 0;
1207 timevar_push (TV_JUMP);
1208 rebuild_jump_labels (get_insns ());
1209 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1210 timevar_pop (TV_JUMP);
1211
1212 if (flag_expensive_optimizations)
1213 {
1214 timevar_push (TV_CSE);
1215 reg_scan (get_insns (), max_reg_num (), 1);
1216 tem2 = cse_main (get_insns (), max_reg_num (), 0, dump_file);
1217 purge_all_dead_edges (0);
1218 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1219 timevar_pop (TV_CSE);
1220 }
1221 }
1222
1223 close_dump_file (DFI_gcse, print_rtl_with_bb, get_insns ());
1224 timevar_pop (TV_GCSE);
1225
1226 ggc_collect ();
1227 flag_cse_skip_blocks = save_csb;
1228 flag_cse_follow_jumps = save_cfj;
1229 #ifdef ENABLE_CHECKING
1230 verify_flow_info ();
1231 #endif
1232 }
1233
1234 /* Move constant computations out of loops. */
1235 static void
1236 rest_of_handle_loop_optimize (void)
1237 {
1238 int do_unroll, do_prefetch;
1239
1240 timevar_push (TV_LOOP);
1241 delete_dead_jumptables ();
1242 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1243 open_dump_file (DFI_loop, current_function_decl);
1244
1245 /* CFG is no longer maintained up-to-date. */
1246 free_bb_for_insn ();
1247
1248 if (flag_unroll_loops)
1249 do_unroll = LOOP_AUTO_UNROLL; /* Having two unrollers is useless. */
1250 else
1251 do_unroll = flag_old_unroll_loops ? LOOP_UNROLL : LOOP_AUTO_UNROLL;
1252 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1253
1254 if (flag_rerun_loop_opt)
1255 {
1256 cleanup_barriers ();
1257
1258 /* We only want to perform unrolling once. */
1259 loop_optimize (get_insns (), dump_file, do_unroll);
1260 do_unroll = 0;
1261
1262 /* The first call to loop_optimize makes some instructions
1263 trivially dead. We delete those instructions now in the
1264 hope that doing so will make the heuristics in loop work
1265 better and possibly speed up compilation. */
1266 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1267
1268 /* The regscan pass is currently necessary as the alias
1269 analysis code depends on this information. */
1270 reg_scan (get_insns (), max_reg_num (), 1);
1271 }
1272 cleanup_barriers ();
1273 loop_optimize (get_insns (), dump_file, do_unroll | do_prefetch);
1274
1275 /* Loop can create trivially dead instructions. */
1276 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1277 close_dump_file (DFI_loop, print_rtl, get_insns ());
1278 timevar_pop (TV_LOOP);
1279 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1280
1281 ggc_collect ();
1282 }
1283
1284 /* Perform loop optimizations. It might be better to do them a bit
1285 sooner, but we want the profile feedback to work more
1286 efficiently. */
1287 static void
1288 rest_of_handle_loop2 (void)
1289 {
1290 struct loops *loops;
1291 basic_block bb;
1292
1293 if (!flag_move_loop_invariants
1294 && !flag_unswitch_loops
1295 && !flag_peel_loops
1296 && !flag_unroll_loops
1297 && !flag_branch_on_count_reg)
1298 return;
1299
1300 timevar_push (TV_LOOP);
1301 open_dump_file (DFI_loop2, current_function_decl);
1302 if (dump_file)
1303 dump_flow_info (dump_file);
1304
1305 /* Initialize structures for layout changes. */
1306 cfg_layout_initialize ();
1307
1308 loops = loop_optimizer_init (dump_file);
1309
1310 if (loops)
1311 {
1312 /* The optimizations: */
1313 if (flag_move_loop_invariants)
1314 move_loop_invariants (loops);
1315
1316 if (flag_unswitch_loops)
1317 unswitch_loops (loops);
1318
1319 if (flag_peel_loops || flag_unroll_loops)
1320 unroll_and_peel_loops (loops,
1321 (flag_peel_loops ? UAP_PEEL : 0) |
1322 (flag_unroll_loops ? UAP_UNROLL : 0) |
1323 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1324
1325 #ifdef HAVE_doloop_end
1326 if (flag_branch_on_count_reg && HAVE_doloop_end)
1327 doloop_optimize_loops (loops);
1328 #endif /* HAVE_doloop_end */
1329
1330 loop_optimizer_finalize (loops, dump_file);
1331 }
1332
1333 /* Finalize layout changes. */
1334 FOR_EACH_BB (bb)
1335 if (bb->next_bb != EXIT_BLOCK_PTR)
1336 bb->rbi->next = bb->next_bb;
1337 cfg_layout_finalize ();
1338
1339 cleanup_cfg (CLEANUP_EXPENSIVE);
1340 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1341 reg_scan (get_insns (), max_reg_num (), 0);
1342 if (dump_file)
1343 dump_flow_info (dump_file);
1344 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1345 timevar_pop (TV_LOOP);
1346 ggc_collect ();
1347 }
1348
1349 /* This is called from finish_function (within langhooks.parse_file)
1350 after each top-level definition is parsed.
1351 It is supposed to compile that function or variable
1352 and output the assembler code for it.
1353 After we return, the tree storage is freed. */
1354
1355 void
1356 rest_of_compilation (void)
1357 {
1358 /* There's no need to defer outputting this function any more; we
1359 know we want to output it. */
1360 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1361
1362 /* There's no need to defer outputting this function any more; we
1363 know we want to output it. */
1364 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1365
1366 /* Register rtl specific functions for cfg. */
1367 rtl_register_cfg_hooks ();
1368
1369 /* Now that we're out of the frontend, we shouldn't have any more
1370 CONCATs anywhere. */
1371 generating_concat_p = 0;
1372
1373 /* When processing delayed functions, prepare_function_start() won't
1374 have been run to re-initialize it. */
1375 cse_not_expected = ! optimize;
1376
1377 finalize_block_changes ();
1378
1379 /* Dump the rtl code if we are dumping rtl. */
1380 if (open_dump_file (DFI_rtl, current_function_decl))
1381 close_dump_file (DFI_rtl, print_rtl, get_insns ());
1382
1383 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1384 sorts of eh initialization. Delay this until after the
1385 initial rtl dump so that we can see the original nesting. */
1386 convert_from_eh_region_ranges ();
1387
1388 /* If we're emitting a nested function, make sure its parent gets
1389 emitted as well. Doing otherwise confuses debug info. */
1390 {
1391 tree parent;
1392 for (parent = DECL_CONTEXT (current_function_decl);
1393 parent != NULL_TREE;
1394 parent = get_containing_scope (parent))
1395 if (TREE_CODE (parent) == FUNCTION_DECL)
1396 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1397 }
1398
1399 /* We are now committed to emitting code for this function. Do any
1400 preparation, such as emitting abstract debug info for the inline
1401 before it gets mangled by optimization. */
1402 if (cgraph_function_possibly_inlined_p (current_function_decl))
1403 (*debug_hooks->outlining_inline_function) (current_function_decl);
1404
1405 /* Remove any notes we don't need. That will make iterating
1406 over the instruction sequence faster, and allow the garbage
1407 collector to reclaim the memory used by the notes. */
1408 remove_unnecessary_notes ();
1409
1410 ggc_collect ();
1411
1412 /* Initialize some variables used by the optimizers. */
1413 init_function_for_compilation ();
1414
1415 TREE_ASM_WRITTEN (current_function_decl) = 1;
1416
1417 /* Now that integrate will no longer see our rtl, we need not
1418 distinguish between the return value of this function and the
1419 return value of called functions. Also, we can remove all SETs
1420 of subregs of hard registers; they are only here because of
1421 integrate. Also, we can now initialize pseudos intended to
1422 carry magic hard reg data throughout the function. */
1423 rtx_equal_function_value_matters = 0;
1424 purge_hard_subreg_sets (get_insns ());
1425
1426 /* Early return if there were errors. We can run afoul of our
1427 consistency checks, and there's not really much point in fixing them. */
1428 if (rtl_dump_and_exit || flag_syntax_only || errorcount || sorrycount)
1429 goto exit_rest_of_compilation;
1430
1431 timevar_push (TV_JUMP);
1432 open_dump_file (DFI_sibling, current_function_decl);
1433
1434 /* ??? We may get called either via tree_rest_of_compilation when the CFG
1435 is already built or directly (for instance from coverage code).
1436 The direct callers shall be updated. */
1437 if (!basic_block_info)
1438 {
1439 init_flow ();
1440 rebuild_jump_labels (get_insns ());
1441 find_exception_handler_labels ();
1442 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1443 }
1444 delete_unreachable_blocks ();
1445 #ifdef ENABLE_CHECKING
1446 verify_flow_info();
1447 #endif
1448
1449 /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */
1450 if (flag_guess_branch_prob)
1451 {
1452 timevar_push (TV_BRANCH_PROB);
1453 note_prediction_to_br_prob ();
1454 timevar_pop (TV_BRANCH_PROB);
1455 }
1456
1457 timevar_pop (TV_JUMP);
1458
1459 if (cfun->tail_call_emit)
1460 fixup_tail_calls ();
1461
1462 insn_locators_initialize ();
1463 /* Complete generation of exception handling code. */
1464 if (doing_eh (0))
1465 {
1466 timevar_push (TV_JUMP);
1467 open_dump_file (DFI_eh, current_function_decl);
1468
1469 finish_eh_generation ();
1470
1471 close_dump_file (DFI_eh, print_rtl, get_insns ());
1472 timevar_pop (TV_JUMP);
1473 }
1474
1475 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1476 generation, which might create new sets. */
1477 emit_initial_value_sets ();
1478
1479 #ifdef FINALIZE_PIC
1480 /* If we are doing position-independent code generation, now
1481 is the time to output special prologues and epilogues.
1482 We do not want to do this earlier, because it just clutters
1483 up inline functions with meaningless insns. */
1484 if (flag_pic)
1485 FINALIZE_PIC;
1486 #endif
1487
1488 /* Copy any shared structure that should not be shared. */
1489 unshare_all_rtl ();
1490
1491 #ifdef SETJMP_VIA_SAVE_AREA
1492 /* This must be performed before virtual register instantiation.
1493 Please be aware that everything in the compiler that can look
1494 at the RTL up to this point must understand that REG_SAVE_AREA
1495 is just like a use of the REG contained inside. */
1496 if (current_function_calls_alloca)
1497 optimize_save_area_alloca (get_insns ());
1498 #endif
1499
1500 /* Instantiate all virtual registers. */
1501 instantiate_virtual_regs ();
1502
1503 open_dump_file (DFI_jump, current_function_decl);
1504
1505 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1506 are initialized and to compute whether control can drop off the end
1507 of the function. */
1508
1509 timevar_push (TV_JUMP);
1510 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1511 before jump optimization switches branch directions. */
1512 if (flag_guess_branch_prob)
1513 expected_value_to_br_prob ();
1514
1515 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1516 reg_scan (get_insns(), max_reg_num (), 0);
1517 if (dump_file)
1518 dump_flow_info (dump_file);
1519 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1520 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1521
1522 create_loop_notes ();
1523
1524 purge_line_number_notes (get_insns ());
1525
1526 close_dump_file (DFI_jump, print_rtl, get_insns ());
1527
1528 if (optimize)
1529 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1530
1531 /* Jump optimization, and the removal of NULL pointer checks, may
1532 have reduced the number of instructions substantially. CSE, and
1533 future passes, allocate arrays whose dimensions involve the
1534 maximum instruction UID, so if we can reduce the maximum UID
1535 we'll save big on memory. */
1536 renumber_insns (dump_file);
1537 timevar_pop (TV_JUMP);
1538
1539 close_dump_file (DFI_jump, print_rtl_with_bb, get_insns ());
1540
1541 ggc_collect ();
1542
1543 if (optimize > 0)
1544 rest_of_handle_cse ();
1545
1546 rest_of_handle_addressof ();
1547
1548 ggc_collect ();
1549
1550 if (optimize > 0)
1551 {
1552 if (flag_gcse)
1553 rest_of_handle_gcse ();
1554
1555 if (flag_loop_optimize)
1556 rest_of_handle_loop_optimize ();
1557
1558 if (flag_gcse)
1559 rest_of_handle_jump_bypass ();
1560 }
1561
1562 timevar_push (TV_FLOW);
1563
1564 rest_of_handle_cfg ();
1565
1566 if (!flag_tree_based_profiling
1567 && (optimize > 0 || profile_arc_flag
1568 || flag_test_coverage || flag_branch_probabilities))
1569 {
1570 rtl_register_profile_hooks ();
1571 rtl_register_value_prof_hooks ();
1572 rest_of_handle_branch_prob ();
1573
1574 if (flag_branch_probabilities
1575 && flag_profile_values
1576 && flag_value_profile_transformations)
1577 rest_of_handle_value_profile_transformations ();
1578
1579 /* Remove the death notes created for vpt. */
1580 if (flag_profile_values)
1581 count_or_remove_death_notes (NULL, 1);
1582 }
1583
1584 if (optimize > 0)
1585 rest_of_handle_if_conversion ();
1586
1587 if (flag_tracer)
1588 rest_of_handle_tracer ();
1589
1590 if (optimize > 0
1591 && flag_loop_optimize2)
1592 rest_of_handle_loop2 ();
1593
1594 if (flag_web)
1595 rest_of_handle_web ();
1596
1597 if (flag_rerun_cse_after_loop)
1598 rest_of_handle_cse2 ();
1599
1600 cse_not_expected = 1;
1601
1602 rest_of_handle_life ();
1603
1604 if (optimize > 0)
1605 rest_of_handle_combine ();
1606
1607 if (flag_if_conversion)
1608 rest_of_handle_if_after_combine ();
1609
1610 /* The optimization to partition hot/cold basic blocks into separate
1611 sections of the .o file does not work well with exception handling.
1612 Don't call it if there are exceptions. */
1613
1614 if (flag_reorder_blocks_and_partition && !flag_exceptions)
1615 {
1616 no_new_pseudos = 0;
1617 partition_hot_cold_basic_blocks ();
1618 allocate_reg_life_data ();
1619 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1620 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1621 no_new_pseudos = 1;
1622 }
1623
1624 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1625 rest_of_handle_regmove ();
1626
1627 /* Do unconditional splitting before register allocation to allow machine
1628 description to add extra information not needed previously. */
1629 split_all_insns (1);
1630
1631 #ifdef OPTIMIZE_MODE_SWITCHING
1632 timevar_push (TV_MODE_SWITCH);
1633
1634 no_new_pseudos = 0;
1635 optimize_mode_switching (NULL);
1636 no_new_pseudos = 1;
1637
1638 timevar_pop (TV_MODE_SWITCH);
1639 #endif
1640
1641 /* Any of the several passes since flow1 will have munged register
1642 lifetime data a bit. We need it to be up to date for scheduling
1643 (see handling of reg_known_equiv in init_alias_analysis). */
1644 recompute_reg_usage (get_insns (), !optimize_size);
1645
1646 #ifdef INSN_SCHEDULING
1647 rest_of_handle_sched ();
1648 #endif
1649
1650 /* Determine if the current function is a leaf before running reload
1651 since this can impact optimizations done by the prologue and
1652 epilogue thus changing register elimination offsets. */
1653 current_function_is_leaf = leaf_function_p ();
1654
1655 timevar_push (TV_LOCAL_ALLOC);
1656 open_dump_file (DFI_lreg, current_function_decl);
1657
1658 if (flag_new_regalloc)
1659 {
1660 if (rest_of_handle_new_regalloc ())
1661 goto exit_rest_of_compilation;
1662 }
1663 else
1664 {
1665 if (rest_of_handle_old_regalloc ())
1666 goto exit_rest_of_compilation;
1667 }
1668
1669 ggc_collect ();
1670
1671 open_dump_file (DFI_postreload, current_function_decl);
1672
1673 /* Do a very simple CSE pass over just the hard registers. */
1674 if (optimize > 0)
1675 {
1676 timevar_push (TV_RELOAD_CSE_REGS);
1677 reload_cse_regs (get_insns ());
1678 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1679 Remove any EH edges associated with them. */
1680 if (flag_non_call_exceptions)
1681 purge_all_dead_edges (0);
1682 timevar_pop (TV_RELOAD_CSE_REGS);
1683 }
1684
1685 close_dump_file (DFI_postreload, print_rtl_with_bb, get_insns ());
1686
1687 if (optimize > 0 && flag_gcse_after_reload)
1688 rest_of_handle_gcse2 ();
1689
1690 /* Re-create the death notes which were deleted during reload. */
1691 timevar_push (TV_FLOW2);
1692 open_dump_file (DFI_flow2, current_function_decl);
1693
1694 #ifdef ENABLE_CHECKING
1695 verify_flow_info ();
1696 #endif
1697
1698 /* If optimizing, then go ahead and split get_insns () now. */
1699 #ifndef STACK_REGS
1700 if (optimize > 0)
1701 #endif
1702 split_all_insns (0);
1703
1704 if (flag_branch_target_load_optimize)
1705 {
1706 open_dump_file (DFI_branch_target_load, current_function_decl);
1707
1708 branch_target_load_optimize (/*after_prologue_epilogue_gen=*/false);
1709
1710 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, get_insns ());
1711
1712 ggc_collect ();
1713 }
1714
1715 if (! targetm.late_rtl_prologue_epilogue)
1716 {
1717 if (optimize)
1718 cleanup_cfg (CLEANUP_EXPENSIVE);
1719
1720 /* On some machines, the prologue and epilogue code, or parts thereof,
1721 can be represented as RTL. Doing so lets us schedule insns between
1722 it and the rest of the code and also allows delayed branch
1723 scheduling to operate in the epilogue. */
1724 thread_prologue_and_epilogue_insns (get_insns ());
1725 epilogue_completed = 1;
1726 }
1727
1728 if (optimize)
1729 {
1730 life_analysis (dump_file, PROP_POSTRELOAD);
1731 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1732 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1733
1734 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1735 even for machines with possibly nonzero RETURN_POPS_ARGS
1736 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1737 push instructions will have popping returns. */
1738 #ifndef PUSH_ROUNDING
1739 if (!ACCUMULATE_OUTGOING_ARGS)
1740 #endif
1741 combine_stack_adjustments ();
1742
1743 ggc_collect ();
1744 }
1745
1746 flow2_completed = 1;
1747
1748 close_dump_file (DFI_flow2, print_rtl_with_bb, get_insns ());
1749 timevar_pop (TV_FLOW2);
1750
1751 #ifdef HAVE_peephole2
1752 if (optimize > 0 && flag_peephole2)
1753 {
1754 timevar_push (TV_PEEPHOLE2);
1755 open_dump_file (DFI_peephole2, current_function_decl);
1756
1757 peephole2_optimize (dump_file);
1758
1759 close_dump_file (DFI_peephole2, print_rtl_with_bb, get_insns ());
1760 timevar_pop (TV_PEEPHOLE2);
1761 }
1762 #endif
1763
1764 open_dump_file (DFI_ce3, current_function_decl);
1765 if (optimize)
1766 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
1767 splitting possibly introduced more crossjumping opportunities. */
1768 cleanup_cfg (CLEANUP_EXPENSIVE
1769 | CLEANUP_UPDATE_LIFE
1770 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1771 if (flag_if_conversion2)
1772 {
1773 timevar_push (TV_IFCVT2);
1774
1775 if_convert (1);
1776
1777 timevar_pop (TV_IFCVT2);
1778 }
1779 close_dump_file (DFI_ce3, print_rtl_with_bb, get_insns ());
1780
1781 if (optimize > 0)
1782 {
1783 if (flag_rename_registers || flag_cprop_registers)
1784 rest_of_handle_regrename ();
1785
1786 rest_of_handle_reorder_blocks ();
1787 }
1788
1789 if (flag_branch_target_load_optimize2)
1790 {
1791 /* Leave this a warning for now so that it is possible to experiment
1792 with running this pass twice. In 3.6, we should either make this
1793 an error, or use separate dump files. */
1794 if (flag_branch_target_load_optimize)
1795 warning ("branch target register load optimization is not intended "
1796 "to be run twice");
1797
1798 open_dump_file (DFI_branch_target_load, current_function_decl);
1799
1800 branch_target_load_optimize (/*after_prologue_epilogue_gen=*/true);
1801
1802 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, get_insns ());
1803
1804 ggc_collect ();
1805 }
1806
1807 #ifdef LEAF_REGISTERS
1808 current_function_uses_only_leaf_regs
1809 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
1810 #endif
1811
1812 if (targetm.late_rtl_prologue_epilogue)
1813 {
1814 /* On some machines, the prologue and epilogue code, or parts thereof,
1815 can be represented as RTL. Doing so lets us schedule insns between
1816 it and the rest of the code and also allows delayed branch
1817 scheduling to operate in the epilogue. */
1818 thread_prologue_and_epilogue_insns (get_insns ());
1819 epilogue_completed = 1;
1820 if (optimize)
1821 life_analysis (dump_file, PROP_POSTRELOAD);
1822 }
1823
1824 #ifdef INSN_SCHEDULING
1825 if (optimize > 0 && flag_schedule_insns_after_reload)
1826 rest_of_handle_sched2 ();
1827 #endif
1828
1829 #ifdef STACK_REGS
1830 rest_of_handle_stack_regs ();
1831 #endif
1832
1833 compute_alignments ();
1834
1835 if (flag_var_tracking)
1836 rest_of_handle_variable_tracking ();
1837
1838 /* CFG is no longer maintained up-to-date. */
1839 free_bb_for_insn ();
1840
1841 if (targetm.machine_dependent_reorg != 0)
1842 rest_of_handle_machine_reorg ();
1843
1844 purge_line_number_notes (get_insns ());
1845 cleanup_barriers ();
1846
1847 #ifdef DELAY_SLOTS
1848 if (optimize > 0 && flag_delayed_branch)
1849 rest_of_handle_delay_slots ();
1850 #endif
1851
1852 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
1853 timevar_push (TV_SHORTEN_BRANCH);
1854 split_all_insns_noflow ();
1855 timevar_pop (TV_SHORTEN_BRANCH);
1856 #endif
1857
1858 convert_to_eh_region_ranges ();
1859
1860 /* Shorten branches. */
1861 timevar_push (TV_SHORTEN_BRANCH);
1862 shorten_branches (get_insns ());
1863 timevar_pop (TV_SHORTEN_BRANCH);
1864
1865 set_nothrow_function_flags ();
1866 if (current_function_nothrow)
1867 /* Now we know that this can't throw; set the flag for the benefit
1868 of other functions later in this translation unit. */
1869 TREE_NOTHROW (current_function_decl) = 1;
1870
1871 rest_of_handle_final ();
1872
1873 /* Write DBX symbols if requested. */
1874
1875 /* Note that for those inline functions where we don't initially
1876 know for certain that we will be generating an out-of-line copy,
1877 the first invocation of this routine (rest_of_compilation) will
1878 skip over this code by doing a `goto exit_rest_of_compilation;'.
1879 Later on, wrapup_global_declarations will (indirectly) call
1880 rest_of_compilation again for those inline functions that need
1881 to have out-of-line copies generated. During that call, we
1882 *will* be routed past here. */
1883
1884 timevar_push (TV_SYMOUT);
1885 (*debug_hooks->function_decl) (current_function_decl);
1886 timevar_pop (TV_SYMOUT);
1887
1888 exit_rest_of_compilation:
1889
1890 coverage_end_function ();
1891
1892 /* In case the function was not output,
1893 don't leave any temporary anonymous types
1894 queued up for sdb output. */
1895 #ifdef SDB_DEBUGGING_INFO
1896 if (write_symbols == SDB_DEBUG)
1897 sdbout_types (NULL_TREE);
1898 #endif
1899
1900 reload_completed = 0;
1901 epilogue_completed = 0;
1902 flow2_completed = 0;
1903 no_new_pseudos = 0;
1904
1905 timevar_push (TV_FINAL);
1906
1907 /* Clear out the insn_length contents now that they are no
1908 longer valid. */
1909 init_insn_lengths ();
1910
1911 /* Show no temporary slots allocated. */
1912 init_temp_slots ();
1913
1914 free_basic_block_vars ();
1915 free_bb_for_insn ();
1916
1917 timevar_pop (TV_FINAL);
1918
1919 if (targetm.binds_local_p (current_function_decl))
1920 {
1921 int pref = cfun->preferred_stack_boundary;
1922 if (cfun->recursive_call_emit
1923 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
1924 pref = cfun->stack_alignment_needed;
1925 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
1926 = pref;
1927 }
1928
1929 /* Make sure volatile mem refs aren't considered valid operands for
1930 arithmetic insns. We must call this here if this is a nested inline
1931 function, since the above code leaves us in the init_recog state
1932 (from final.c), and the function context push/pop code does not
1933 save/restore volatile_ok.
1934
1935 ??? Maybe it isn't necessary for expand_start_function to call this
1936 anymore if we do it here? */
1937
1938 init_recog_no_volatile ();
1939
1940 /* We're done with this function. Free up memory if we can. */
1941 free_after_parsing (cfun);
1942 }
1943
1944 void
1945 init_optimization_passes (void)
1946 {
1947 open_dump_file (DFI_cgraph, NULL);
1948 cgraph_dump_file = dump_file;
1949 dump_file = NULL;
1950 }
1951
1952 void
1953 finish_optimization_passes (void)
1954 {
1955 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1956 {
1957 timevar_push (TV_DUMP);
1958 open_dump_file (DFI_bp, NULL);
1959
1960 end_branch_prob ();
1961
1962 close_dump_file (DFI_bp, NULL, NULL_RTX);
1963 timevar_pop (TV_DUMP);
1964 }
1965
1966 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
1967 {
1968 timevar_push (TV_DUMP);
1969 dump_combine_total_stats (dump_file);
1970 close_dump_file (DFI_combine, NULL, NULL_RTX);
1971 timevar_pop (TV_DUMP);
1972 }
1973
1974 dump_file = cgraph_dump_file;
1975 cgraph_dump_file = NULL;
1976 close_dump_file (DFI_cgraph, NULL, NULL_RTX);
1977
1978 /* Do whatever is necessary to finish printing the graphs. */
1979 if (graph_dump_format != no_graph)
1980 {
1981 int i;
1982
1983 for (i = 0; i < (int) DFI_MAX; ++i)
1984 if (dump_file_tbl[i].initialized && dump_file_tbl[i].graph_dump_p)
1985 {
1986 char seq[16];
1987 char *suffix;
1988
1989 sprintf (seq, DUMPFILE_FORMAT, i);
1990 suffix = concat (seq, dump_file_tbl[i].extension, NULL);
1991 finish_graph_dump_file (dump_base_name, suffix);
1992 free (suffix);
1993 }
1994 }
1995
1996 }
1997
1998 bool
1999 enable_rtl_dump_file (int letter)
2000 {
2001 bool matched = false;
2002 int i;
2003
2004 if (letter == 'a')
2005 {
2006 for (i = 0; i < (int) DFI_MAX; ++i)
2007 dump_file_tbl[i].enabled = 1;
2008 matched = true;
2009 }
2010 else
2011 {
2012 for (i = 0; i < (int) DFI_MAX; ++i)
2013 if (letter == dump_file_tbl[i].debug_switch)
2014 {
2015 dump_file_tbl[i].enabled = 1;
2016 matched = true;
2017 }
2018 }
2019
2020 return matched;
2021 }
2022
2023 struct tree_opt_pass pass_rest_of_compilation =
2024 {
2025 "rest of compilation", /* name */
2026 NULL, /* gate */
2027 rest_of_compilation, /* execute */
2028 NULL, /* sub */
2029 NULL, /* next */
2030 0, /* static_pass_number */
2031 TV_REST_OF_COMPILATION, /* tv_id */
2032 PROP_rtl, /* properties_required */
2033 0, /* properties_provided */
2034 PROP_rtl, /* properties_destroyed */
2035 0, /* todo_flags_start */
2036 TODO_ggc_collect /* todo_flags_finish */
2037 };
2038
2039