usage.adb: Change "pragma inline" to "pragma Inline" in information and error messages
[gcc.git] / gcc / passes.c
1 /* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27 #include "config.h"
28 #undef FLOAT /* This is for hpux. They should change hpux. */
29 #undef FFS /* Some systems define this in param.h. */
30 #include "system.h"
31 #include "coretypes.h"
32 #include "tm.h"
33 #include <signal.h>
34
35 #ifdef HAVE_SYS_RESOURCE_H
36 # include <sys/resource.h>
37 #endif
38
39 #ifdef HAVE_SYS_TIMES_H
40 # include <sys/times.h>
41 #endif
42
43 #include "line-map.h"
44 #include "input.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "tm_p.h"
48 #include "flags.h"
49 #include "insn-attr.h"
50 #include "insn-config.h"
51 #include "insn-flags.h"
52 #include "hard-reg-set.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "except.h"
56 #include "function.h"
57 #include "toplev.h"
58 #include "expr.h"
59 #include "basic-block.h"
60 #include "intl.h"
61 #include "ggc.h"
62 #include "graph.h"
63 #include "regs.h"
64 #include "timevar.h"
65 #include "diagnostic.h"
66 #include "params.h"
67 #include "reload.h"
68 #include "dwarf2asm.h"
69 #include "integrate.h"
70 #include "real.h"
71 #include "debug.h"
72 #include "target.h"
73 #include "langhooks.h"
74 #include "cfglayout.h"
75 #include "cfgloop.h"
76 #include "hosthooks.h"
77 #include "cgraph.h"
78 #include "opts.h"
79 #include "coverage.h"
80 #include "value-prof.h"
81 #include "alloc-pool.h"
82 #include "tree-pass.h"
83 #include "tree-dump.h"
84
85 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
86 #include "dwarf2out.h"
87 #endif
88
89 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
90 #include "dbxout.h"
91 #endif
92
93 #ifdef SDB_DEBUGGING_INFO
94 #include "sdbout.h"
95 #endif
96
97 #ifdef XCOFF_DEBUGGING_INFO
98 #include "xcoffout.h" /* Needed for external data
99 declarations for e.g. AIX 4.x. */
100 #endif
101
102 #ifndef HAVE_conditional_execution
103 #define HAVE_conditional_execution 0
104 #endif
105
106 /* Format to use to print dumpfile index value */
107 #ifndef DUMPFILE_FORMAT
108 #define DUMPFILE_FORMAT ".%02d."
109 #endif
110
111 static int initializing_dump = 0;
112
113 /* Routine to open a dump file. Return true if the dump file is enabled. */
114
115 static int
116 open_dump_file (enum tree_dump_index index, tree decl)
117 {
118 if (! dump_enabled_p (index))
119 return 0;
120
121 timevar_push (TV_DUMP);
122
123 if (dump_file != NULL || dump_file_name != NULL)
124 abort ();
125
126 dump_file_name = get_dump_file_name (index);
127 initializing_dump = !dump_initialized_p (index);
128 dump_file = dump_begin (index, NULL);
129
130 if (dump_file == NULL)
131 fatal_error ("can't open %s: %m", dump_file_name);
132
133 if (decl)
134 fprintf (dump_file, "\n;; Function %s%s\n\n",
135 lang_hooks.decl_printable_name (decl, 2),
136 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
137 ? " (hot)"
138 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
139 ? " (unlikely executed)"
140 : "");
141
142 timevar_pop (TV_DUMP);
143 return 1;
144 }
145
146 /* Routine to close a dump file. */
147
148 static void
149 close_dump_file (enum tree_dump_index index,
150 void (*func) (FILE *, rtx),
151 rtx insns)
152 {
153 if (! dump_file)
154 return;
155
156 timevar_push (TV_DUMP);
157 if (insns
158 && graph_dump_format != no_graph)
159 {
160 /* If we've not initialized the files, do so now. */
161 if (initializing_dump)
162 clean_graph_dump_file (dump_file_name);
163
164 print_rtl_graph_with_bb (dump_file_name, insns);
165 }
166
167 if (func && insns)
168 func (dump_file, insns);
169
170 dump_end (index, dump_file);
171 free ((char *) dump_file_name);
172
173 dump_file = NULL;
174 dump_file_name = NULL;
175 timevar_pop (TV_DUMP);
176 }
177
178 /* This is called from various places for FUNCTION_DECL, VAR_DECL,
179 and TYPE_DECL nodes.
180
181 This does nothing for local (non-static) variables, unless the
182 variable is a register variable with DECL_ASSEMBLER_NAME set. In
183 that case, or if the variable is not an automatic, it sets up the
184 RTL and outputs any assembler code (label definition, storage
185 allocation and initialization).
186
187 DECL is the declaration. TOP_LEVEL is nonzero
188 if this declaration is not within a function. */
189
190 void
191 rest_of_decl_compilation (tree decl,
192 int top_level,
193 int at_end)
194 {
195 /* We deferred calling assemble_alias so that we could collect
196 other attributes such as visibility. Emit the alias now. */
197 {
198 tree alias;
199 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
200 if (alias)
201 {
202 alias = TREE_VALUE (TREE_VALUE (alias));
203 alias = get_identifier (TREE_STRING_POINTER (alias));
204 assemble_alias (decl, alias);
205 }
206 }
207
208 /* Can't defer this, because it needs to happen before any
209 later function definitions are processed. */
210 if (DECL_REGISTER (decl) && DECL_ASSEMBLER_NAME_SET_P (decl))
211 make_decl_rtl (decl);
212
213 /* Forward declarations for nested functions are not "external",
214 but we need to treat them as if they were. */
215 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
216 || TREE_CODE (decl) == FUNCTION_DECL)
217 {
218 timevar_push (TV_VARCONST);
219
220 /* Don't output anything when a tentative file-scope definition
221 is seen. But at end of compilation, do output code for them.
222
223 We do output all variables when unit-at-a-time is active and rely on
224 callgraph code to defer them except for forward declarations
225 (see gcc.c-torture/compile/920624-1.c) */
226 if ((at_end
227 || !DECL_DEFER_OUTPUT (decl)
228 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
229 && !DECL_EXTERNAL (decl))
230 {
231 if (flag_unit_at_a_time && !cgraph_global_info_ready
232 && TREE_CODE (decl) != FUNCTION_DECL && top_level
233 /* If we defer processing of decls that have had their
234 DECL_RTL set above (say, in make_decl_rtl),
235 check_global_declarations() will clear it before
236 assemble_variable has a chance to act on it. This
237 would remove all traces of the register name in a
238 global register variable, for example. */
239 && !DECL_RTL_SET_P (decl))
240 cgraph_varpool_finalize_decl (decl);
241 else
242 assemble_variable (decl, top_level, at_end, 0);
243 }
244
245 #ifdef ASM_FINISH_DECLARE_OBJECT
246 if (decl == last_assemble_variable_decl)
247 {
248 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
249 top_level, at_end);
250 }
251 #endif
252
253 timevar_pop (TV_VARCONST);
254 }
255 else if (TREE_CODE (decl) == TYPE_DECL)
256 {
257 timevar_push (TV_SYMOUT);
258 debug_hooks->type_decl (decl, !top_level);
259 timevar_pop (TV_SYMOUT);
260 }
261 }
262
263 /* Called after finishing a record, union or enumeral type. */
264
265 void
266 rest_of_type_compilation (tree type, int toplev)
267 {
268 /* Avoid confusing the debug information machinery when there are
269 errors. */
270 if (errorcount != 0 || sorrycount != 0)
271 return;
272
273 timevar_push (TV_SYMOUT);
274 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
275 timevar_pop (TV_SYMOUT);
276 }
277
278 /* Turn the RTL into assembly. */
279 static void
280 rest_of_handle_final (void)
281 {
282 timevar_push (TV_FINAL);
283 {
284 rtx x;
285 const char *fnname;
286
287 /* Get the function's name, as described by its RTL. This may be
288 different from the DECL_NAME name used in the source file. */
289
290 x = DECL_RTL (current_function_decl);
291 if (!MEM_P (x))
292 abort ();
293 x = XEXP (x, 0);
294 if (GET_CODE (x) != SYMBOL_REF)
295 abort ();
296 fnname = XSTR (x, 0);
297
298 assemble_start_function (current_function_decl, fnname);
299 final_start_function (get_insns (), asm_out_file, optimize);
300 final (get_insns (), asm_out_file, optimize, 0);
301 final_end_function ();
302
303 #ifdef TARGET_UNWIND_INFO
304 /* ??? The IA-64 ".handlerdata" directive must be issued before
305 the ".endp" directive that closes the procedure descriptor. */
306 output_function_exception_table ();
307 #endif
308
309 assemble_end_function (current_function_decl, fnname);
310
311 #ifndef TARGET_UNWIND_INFO
312 /* Otherwise, it feels unclean to switch sections in the middle. */
313 output_function_exception_table ();
314 #endif
315
316 user_defined_section_attribute = false;
317
318 if (! quiet_flag)
319 fflush (asm_out_file);
320
321 /* Release all memory allocated by flow. */
322 free_basic_block_vars ();
323
324 /* Release all memory held by regsets now. */
325 regset_release_memory ();
326 }
327
328 /* Write DBX symbols if requested. */
329
330 /* Note that for those inline functions where we don't initially
331 know for certain that we will be generating an out-of-line copy,
332 the first invocation of this routine (rest_of_compilation) will
333 skip over this code by doing a `goto exit_rest_of_compilation;'.
334 Later on, wrapup_global_declarations will (indirectly) call
335 rest_of_compilation again for those inline functions that need
336 to have out-of-line copies generated. During that call, we
337 *will* be routed past here. */
338
339 timevar_push (TV_SYMOUT);
340 (*debug_hooks->function_decl) (current_function_decl);
341 timevar_pop (TV_SYMOUT);
342
343 ggc_collect ();
344 timevar_pop (TV_FINAL);
345 }
346
347 #ifdef DELAY_SLOTS
348 /* Run delay slot optimization. */
349 static void
350 rest_of_handle_delay_slots (void)
351 {
352 timevar_push (TV_DBR_SCHED);
353 open_dump_file (DFI_dbr, current_function_decl);
354
355 dbr_schedule (get_insns (), dump_file);
356
357 close_dump_file (DFI_dbr, print_rtl, get_insns ());
358
359 ggc_collect ();
360
361 timevar_pop (TV_DBR_SCHED);
362 }
363 #endif
364
365 #ifdef STACK_REGS
366 /* Convert register usage from flat register file usage to a stack
367 register file. */
368 static void
369 rest_of_handle_stack_regs (void)
370 {
371 #if defined (HAVE_ATTR_length)
372 /* If flow2 creates new instructions which need splitting
373 and scheduling after reload is not done, they might not be
374 split until final which doesn't allow splitting
375 if HAVE_ATTR_length. */
376 #ifdef INSN_SCHEDULING
377 if (optimize && !flag_schedule_insns_after_reload)
378 #else
379 if (optimize)
380 #endif
381 {
382 timevar_push (TV_SHORTEN_BRANCH);
383 split_all_insns (1);
384 timevar_pop (TV_SHORTEN_BRANCH);
385 }
386 #endif
387
388 timevar_push (TV_REG_STACK);
389 open_dump_file (DFI_stack, current_function_decl);
390
391 if (reg_to_stack (dump_file) && optimize)
392 {
393 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
394 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
395 && (flag_reorder_blocks || flag_reorder_blocks_and_partition))
396 {
397 reorder_basic_blocks (0);
398 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
399 }
400 }
401
402 close_dump_file (DFI_stack, print_rtl_with_bb, get_insns ());
403
404 ggc_collect ();
405 timevar_pop (TV_REG_STACK);
406 }
407 #endif
408
409 /* Track the variables, i.e. compute where the variable is stored at each position in function. */
410 static void
411 rest_of_handle_variable_tracking (void)
412 {
413 timevar_push (TV_VAR_TRACKING);
414 open_dump_file (DFI_vartrack, current_function_decl);
415
416 variable_tracking_main ();
417
418 close_dump_file (DFI_vartrack, print_rtl_with_bb, get_insns ());
419 timevar_pop (TV_VAR_TRACKING);
420 }
421
422 /* Machine dependent reorg pass. */
423 static void
424 rest_of_handle_machine_reorg (void)
425 {
426 timevar_push (TV_MACH_DEP);
427 open_dump_file (DFI_mach, current_function_decl);
428
429 targetm.machine_dependent_reorg ();
430
431 close_dump_file (DFI_mach, print_rtl, get_insns ());
432
433 ggc_collect ();
434 timevar_pop (TV_MACH_DEP);
435 }
436
437
438 /* Run new register allocator. Return TRUE if we must exit
439 rest_of_compilation upon return. */
440 static bool
441 rest_of_handle_new_regalloc (void)
442 {
443 int failure;
444
445 timevar_push (TV_LOCAL_ALLOC);
446 open_dump_file (DFI_lreg, current_function_decl);
447
448 delete_trivially_dead_insns (get_insns (), max_reg_num ());
449 reg_alloc ();
450
451 timevar_pop (TV_LOCAL_ALLOC);
452 close_dump_file (DFI_lreg, NULL, NULL);
453
454 /* XXX clean up the whole mess to bring live info in shape again. */
455 timevar_push (TV_GLOBAL_ALLOC);
456 open_dump_file (DFI_greg, current_function_decl);
457
458 build_insn_chain (get_insns ());
459 failure = reload (get_insns (), 0);
460
461 timevar_pop (TV_GLOBAL_ALLOC);
462
463 ggc_collect ();
464
465 if (dump_enabled_p (DFI_greg))
466 {
467 timevar_push (TV_DUMP);
468 dump_global_regs (dump_file);
469 timevar_pop (TV_DUMP);
470 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
471 }
472
473 if (failure)
474 return true;
475
476 reload_completed = 1;
477
478 return false;
479 }
480
481 /* Run old register allocator. Return TRUE if we must exit
482 rest_of_compilation upon return. */
483 static bool
484 rest_of_handle_old_regalloc (void)
485 {
486 int failure;
487 int rebuild_notes;
488
489 timevar_push (TV_LOCAL_ALLOC);
490 open_dump_file (DFI_lreg, current_function_decl);
491
492 /* Allocate the reg_renumber array. */
493 allocate_reg_info (max_regno, FALSE, TRUE);
494
495 /* And the reg_equiv_memory_loc array. */
496 VARRAY_GROW (reg_equiv_memory_loc_varray, max_regno);
497 reg_equiv_memory_loc = &VARRAY_RTX (reg_equiv_memory_loc_varray, 0);
498
499 allocate_initial_values (reg_equiv_memory_loc);
500
501 regclass (get_insns (), max_reg_num (), dump_file);
502 rebuild_notes = local_alloc ();
503
504 timevar_pop (TV_LOCAL_ALLOC);
505
506 /* Local allocation may have turned an indirect jump into a direct
507 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
508 instructions. */
509 if (rebuild_notes)
510 {
511 timevar_push (TV_JUMP);
512
513 rebuild_jump_labels (get_insns ());
514 purge_all_dead_edges (0);
515
516 timevar_pop (TV_JUMP);
517 }
518
519 if (dump_enabled_p (DFI_lreg))
520 {
521 timevar_push (TV_DUMP);
522 dump_flow_info (dump_file);
523 dump_local_alloc (dump_file);
524 timevar_pop (TV_DUMP);
525 }
526
527 close_dump_file (DFI_lreg, print_rtl_with_bb, get_insns ());
528
529 ggc_collect ();
530
531 timevar_push (TV_GLOBAL_ALLOC);
532 open_dump_file (DFI_greg, current_function_decl);
533
534 /* If optimizing, allocate remaining pseudo-regs. Do the reload
535 pass fixing up any insns that are invalid. */
536
537 if (optimize)
538 failure = global_alloc (dump_file);
539 else
540 {
541 build_insn_chain (get_insns ());
542 failure = reload (get_insns (), 0);
543 }
544
545 if (dump_enabled_p (DFI_greg))
546 {
547 timevar_push (TV_DUMP);
548 dump_global_regs (dump_file);
549 timevar_pop (TV_DUMP);
550
551 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
552 }
553
554 ggc_collect ();
555
556 timevar_pop (TV_GLOBAL_ALLOC);
557
558 return failure;
559 }
560
561 /* Run the regrename and cprop passes. */
562 static void
563 rest_of_handle_regrename (void)
564 {
565 timevar_push (TV_RENAME_REGISTERS);
566 open_dump_file (DFI_rnreg, current_function_decl);
567
568 if (flag_rename_registers)
569 regrename_optimize ();
570 if (flag_cprop_registers)
571 copyprop_hardreg_forward ();
572
573 close_dump_file (DFI_rnreg, print_rtl_with_bb, get_insns ());
574 timevar_pop (TV_RENAME_REGISTERS);
575 }
576
577 /* Reorder basic blocks. */
578 static void
579 rest_of_handle_reorder_blocks (void)
580 {
581 bool changed;
582 unsigned int liveness_flags;
583
584 open_dump_file (DFI_bbro, current_function_decl);
585
586 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
587 splitting possibly introduced more crossjumping opportunities. */
588 liveness_flags = (!HAVE_conditional_execution ? CLEANUP_UPDATE_LIFE : 0);
589 changed = cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
590
591 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
592 tracer (liveness_flags);
593 if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
594 reorder_basic_blocks (liveness_flags);
595 if (flag_reorder_blocks || flag_reorder_blocks_and_partition
596 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
597 changed |= cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
598
599 /* On conditional execution targets we can not update the life cheaply, so
600 we deffer the updating to after both cleanups. This may lose some cases
601 but should not be terribly bad. */
602 if (changed && HAVE_conditional_execution)
603 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
604 PROP_DEATH_NOTES);
605 close_dump_file (DFI_bbro, print_rtl_with_bb, get_insns ());
606 }
607
608 /* Partition hot and cold basic blocks. */
609 static void
610 rest_of_handle_partition_blocks (void)
611 {
612 no_new_pseudos = 0;
613 partition_hot_cold_basic_blocks ();
614 allocate_reg_life_data ();
615 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
616 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
617 no_new_pseudos = 1;
618 }
619
620 #ifdef INSN_SCHEDULING
621 /* Run instruction scheduler. */
622 /* Perform SMS module scheduling. */
623 static void
624 rest_of_handle_sms (void)
625 {
626 timevar_push (TV_SMS);
627 open_dump_file (DFI_sms, current_function_decl);
628
629 /* We want to be able to create new pseudos. */
630 no_new_pseudos = 0;
631 sms_schedule (dump_file);
632 close_dump_file (DFI_sms, print_rtl, get_insns ());
633
634
635 /* Update the life information, because we add pseudos. */
636 max_regno = max_reg_num ();
637 allocate_reg_info (max_regno, FALSE, FALSE);
638 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
639 (PROP_DEATH_NOTES
640 | PROP_KILL_DEAD_CODE
641 | PROP_SCAN_DEAD_CODE));
642 no_new_pseudos = 1;
643
644 ggc_collect ();
645 timevar_pop (TV_SMS);
646 }
647
648 /* Run instruction scheduler. */
649 static void
650 rest_of_handle_sched (void)
651 {
652 timevar_push (TV_SCHED);
653
654 /* Print function header into sched dump now
655 because doing the sched analysis makes some of the dump. */
656 open_dump_file (DFI_sched, current_function_decl);
657
658 /* Do control and data sched analysis,
659 and write some of the results to dump file. */
660
661 schedule_insns (dump_file);
662
663 close_dump_file (DFI_sched, print_rtl_with_bb, get_insns ());
664
665 ggc_collect ();
666 timevar_pop (TV_SCHED);
667 }
668
669 /* Run second scheduling pass after reload. */
670 static void
671 rest_of_handle_sched2 (void)
672 {
673 timevar_push (TV_SCHED2);
674 open_dump_file (DFI_sched2, current_function_decl);
675
676 /* Do control and data sched analysis again,
677 and write some more of the results to dump file. */
678
679 split_all_insns (1);
680
681 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
682 {
683 schedule_ebbs (dump_file);
684 /* No liveness updating code yet, but it should be easy to do.
685 reg-stack recomputes the liveness when needed for now. */
686 count_or_remove_death_notes (NULL, 1);
687 cleanup_cfg (CLEANUP_EXPENSIVE);
688 }
689 else
690 schedule_insns (dump_file);
691
692 close_dump_file (DFI_sched2, print_rtl_with_bb, get_insns ());
693
694 ggc_collect ();
695
696 timevar_pop (TV_SCHED2);
697 }
698 #endif
699
700 static void
701 rest_of_handle_gcse2 (void)
702 {
703 timevar_push (TV_GCSE_AFTER_RELOAD);
704 open_dump_file (DFI_gcse2, current_function_decl);
705
706 gcse_after_reload_main (get_insns ());
707 rebuild_jump_labels (get_insns ());
708 delete_trivially_dead_insns (get_insns (), max_reg_num ());
709 close_dump_file (DFI_gcse2, print_rtl_with_bb, get_insns ());
710
711 ggc_collect ();
712
713 #ifdef ENABLE_CHECKING
714 verify_flow_info ();
715 #endif
716
717 timevar_pop (TV_GCSE_AFTER_RELOAD);
718 }
719
720 /* Register allocation pre-pass, to reduce number of moves necessary
721 for two-address machines. */
722 static void
723 rest_of_handle_regmove (void)
724 {
725 timevar_push (TV_REGMOVE);
726 open_dump_file (DFI_regmove, current_function_decl);
727
728 regmove_optimize (get_insns (), max_reg_num (), dump_file);
729
730 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
731 close_dump_file (DFI_regmove, print_rtl_with_bb, get_insns ());
732
733 ggc_collect ();
734 timevar_pop (TV_REGMOVE);
735 }
736
737 /* Run tracer. */
738 static void
739 rest_of_handle_tracer (void)
740 {
741 open_dump_file (DFI_tracer, current_function_decl);
742 if (dump_file)
743 dump_flow_info (dump_file);
744 tracer (0);
745 cleanup_cfg (CLEANUP_EXPENSIVE);
746 reg_scan (get_insns (), max_reg_num (), 0);
747 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
748 }
749
750 /* If-conversion and CFG cleanup. */
751 static void
752 rest_of_handle_if_conversion (void)
753 {
754 timevar_push (TV_IFCVT);
755 open_dump_file (DFI_ce1, current_function_decl);
756
757 if (flag_if_conversion)
758 {
759 if (dump_file)
760 dump_flow_info (dump_file);
761 cleanup_cfg (CLEANUP_EXPENSIVE);
762 reg_scan (get_insns (), max_reg_num (), 0);
763 if_convert (0);
764 }
765
766 timevar_push (TV_JUMP);
767 cleanup_cfg (CLEANUP_EXPENSIVE);
768 reg_scan (get_insns (), max_reg_num (), 0);
769 timevar_pop (TV_JUMP);
770
771 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
772 timevar_pop (TV_IFCVT);
773 }
774
775 /* Rerun if-conversion, as combine may have simplified things enough
776 to now meet sequence length restrictions. */
777 static void
778 rest_of_handle_if_after_combine (void)
779 {
780 timevar_push (TV_IFCVT);
781 open_dump_file (DFI_ce2, current_function_decl);
782
783 no_new_pseudos = 0;
784 if_convert (1);
785 no_new_pseudos = 1;
786
787 close_dump_file (DFI_ce2, print_rtl_with_bb, get_insns ());
788 timevar_pop (TV_IFCVT);
789 }
790
791 static void
792 rest_of_handle_if_after_reload (void)
793 {
794 timevar_push (TV_IFCVT2);
795 open_dump_file (DFI_ce3, current_function_decl);
796
797 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
798 splitting possibly introduced more crossjumping opportunities. */
799 cleanup_cfg (CLEANUP_EXPENSIVE
800 | CLEANUP_UPDATE_LIFE
801 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
802 if (flag_if_conversion2)
803 if_convert (1);
804 close_dump_file (DFI_ce3, print_rtl_with_bb, get_insns ());
805 timevar_pop (TV_IFCVT2);
806 }
807
808 static void
809 rest_of_handle_web (void)
810 {
811 open_dump_file (DFI_web, current_function_decl);
812 timevar_push (TV_WEB);
813 web_main ();
814 delete_trivially_dead_insns (get_insns (), max_reg_num ());
815 cleanup_cfg (CLEANUP_EXPENSIVE);
816
817 timevar_pop (TV_WEB);
818 close_dump_file (DFI_web, print_rtl_with_bb, get_insns ());
819 reg_scan (get_insns (), max_reg_num (), 0);
820 }
821
822 /* Do branch profiling and static profile estimation passes. */
823 static void
824 rest_of_handle_branch_prob (void)
825 {
826 struct loops loops;
827
828 timevar_push (TV_BRANCH_PROB);
829 open_dump_file (DFI_bp, current_function_decl);
830
831 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
832 branch_prob ();
833
834 /* Discover and record the loop depth at the head of each basic
835 block. The loop infrastructure does the real job for us. */
836 flow_loops_find (&loops, LOOP_TREE);
837
838 if (dump_file)
839 flow_loops_dump (&loops, dump_file, NULL, 0);
840
841 /* Estimate using heuristics if no profiling info is available. */
842 if (flag_guess_branch_prob)
843 estimate_probability (&loops);
844
845 flow_loops_free (&loops);
846 free_dominance_info (CDI_DOMINATORS);
847 close_dump_file (DFI_bp, print_rtl_with_bb, get_insns ());
848 timevar_pop (TV_BRANCH_PROB);
849 }
850
851 /* Do optimizations based on expression value profiles. */
852 static void
853 rest_of_handle_value_profile_transformations (void)
854 {
855 open_dump_file (DFI_vpt, current_function_decl);
856 timevar_push (TV_VPT);
857
858 if (value_profile_transformations ())
859 cleanup_cfg (CLEANUP_EXPENSIVE);
860
861 timevar_pop (TV_VPT);
862 close_dump_file (DFI_vpt, print_rtl_with_bb, get_insns ());
863 }
864
865 /* Do control and data flow analysis; write some of the results to the
866 dump file. */
867 static void
868 rest_of_handle_cfg (void)
869 {
870 open_dump_file (DFI_cfg, current_function_decl);
871 if (dump_file)
872 dump_flow_info (dump_file);
873 if (optimize)
874 cleanup_cfg (CLEANUP_EXPENSIVE
875 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
876
877 /* It may make more sense to mark constant functions after dead code is
878 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
879 may insert code making function non-constant, but we still must consider
880 it as constant, otherwise -fbranch-probabilities will not read data back.
881
882 life_analysis rarely eliminates modification of external memory.
883
884 FIXME: now with tree based profiling we are in the trap described above
885 again. It seems to be easiest to disable the optimization for time
886 being before the problem is either solved by moving the transformation
887 to the IPA level (we need the CFG for this) or the very early optimization
888 passes are made to ignore the const/pure flags so code does not change. */
889 if (optimize
890 && (!flag_tree_based_profiling
891 || (!profile_arc_flag && !flag_branch_probabilities)))
892 {
893 /* Alias analysis depends on this information and mark_constant_function
894 depends on alias analysis. */
895 reg_scan (get_insns (), max_reg_num (), 1);
896 mark_constant_function ();
897 }
898
899 close_dump_file (DFI_cfg, print_rtl_with_bb, get_insns ());
900 }
901
902 /* Perform jump bypassing and control flow optimizations. */
903 static void
904 rest_of_handle_jump_bypass (void)
905 {
906 timevar_push (TV_BYPASS);
907 open_dump_file (DFI_bypass, current_function_decl);
908
909 cleanup_cfg (CLEANUP_EXPENSIVE);
910 reg_scan (get_insns (), max_reg_num (), 1);
911
912 if (bypass_jumps (dump_file))
913 {
914 rebuild_jump_labels (get_insns ());
915 cleanup_cfg (CLEANUP_EXPENSIVE);
916 delete_trivially_dead_insns (get_insns (), max_reg_num ());
917 }
918
919 close_dump_file (DFI_bypass, print_rtl_with_bb, get_insns ());
920 timevar_pop (TV_BYPASS);
921
922 ggc_collect ();
923
924 #ifdef ENABLE_CHECKING
925 verify_flow_info ();
926 #endif
927 }
928
929 /* Try combining insns through substitution. */
930 static void
931 rest_of_handle_combine (void)
932 {
933 int rebuild_jump_labels_after_combine = 0;
934
935 timevar_push (TV_COMBINE);
936 open_dump_file (DFI_combine, current_function_decl);
937
938 rebuild_jump_labels_after_combine
939 = combine_instructions (get_insns (), max_reg_num ());
940
941 /* Combining insns may have turned an indirect jump into a
942 direct jump. Rebuild the JUMP_LABEL fields of jumping
943 instructions. */
944 if (rebuild_jump_labels_after_combine)
945 {
946 timevar_push (TV_JUMP);
947 rebuild_jump_labels (get_insns ());
948 timevar_pop (TV_JUMP);
949
950 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
951 }
952
953 close_dump_file (DFI_combine, print_rtl_with_bb, get_insns ());
954 timevar_pop (TV_COMBINE);
955
956 ggc_collect ();
957 }
958
959 /* Perform life analysis. */
960 static void
961 rest_of_handle_life (void)
962 {
963 open_dump_file (DFI_life, current_function_decl);
964 regclass_init ();
965
966 #ifdef ENABLE_CHECKING
967 verify_flow_info ();
968 #endif
969 life_analysis (dump_file, PROP_FINAL);
970 if (optimize)
971 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
972 | CLEANUP_LOG_LINKS
973 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
974
975 if (extra_warnings)
976 {
977 setjmp_vars_warning (DECL_INITIAL (current_function_decl));
978 setjmp_args_warning ();
979 }
980
981 if (optimize)
982 {
983 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
984 {
985 /* Insns were inserted, and possibly pseudos created, so
986 things might look a bit different. */
987 allocate_reg_life_data ();
988 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
989 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
990 }
991 }
992
993 no_new_pseudos = 1;
994
995 close_dump_file (DFI_life, print_rtl_with_bb, get_insns ());
996
997 ggc_collect ();
998 }
999
1000 /* Perform common subexpression elimination. Nonzero value from
1001 `cse_main' means that jumps were simplified and some code may now
1002 be unreachable, so do jump optimization again. */
1003 static void
1004 rest_of_handle_cse (void)
1005 {
1006 int tem;
1007
1008 open_dump_file (DFI_cse, current_function_decl);
1009 if (dump_file)
1010 dump_flow_info (dump_file);
1011 timevar_push (TV_CSE);
1012
1013 reg_scan (get_insns (), max_reg_num (), 1);
1014
1015 tem = cse_main (get_insns (), max_reg_num (), dump_file);
1016 if (tem)
1017 rebuild_jump_labels (get_insns ());
1018 if (purge_all_dead_edges (0))
1019 delete_unreachable_blocks ();
1020
1021 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1022
1023 /* If we are not running more CSE passes, then we are no longer
1024 expecting CSE to be run. But always rerun it in a cheap mode. */
1025 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1026
1027 if (tem || optimize > 1)
1028 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1029
1030 timevar_pop (TV_CSE);
1031 close_dump_file (DFI_cse, print_rtl_with_bb, get_insns ());
1032
1033 ggc_collect ();
1034 }
1035
1036 /* Run second CSE pass after loop optimizations. */
1037 static void
1038 rest_of_handle_cse2 (void)
1039 {
1040 int tem;
1041
1042 timevar_push (TV_CSE2);
1043 open_dump_file (DFI_cse2, current_function_decl);
1044 if (dump_file)
1045 dump_flow_info (dump_file);
1046 /* CFG is no longer maintained up-to-date. */
1047 tem = cse_main (get_insns (), max_reg_num (), dump_file);
1048
1049 /* Run a pass to eliminate duplicated assignments to condition code
1050 registers. We have to run this after bypass_jumps, because it
1051 makes it harder for that pass to determine whether a jump can be
1052 bypassed safely. */
1053 cse_condition_code_reg ();
1054
1055 purge_all_dead_edges (0);
1056 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1057
1058 if (tem)
1059 {
1060 timevar_push (TV_JUMP);
1061 rebuild_jump_labels (get_insns ());
1062 cleanup_cfg (CLEANUP_EXPENSIVE);
1063 timevar_pop (TV_JUMP);
1064 }
1065 reg_scan (get_insns (), max_reg_num (), 0);
1066 close_dump_file (DFI_cse2, print_rtl_with_bb, get_insns ());
1067 timevar_pop (TV_CSE2);
1068
1069 ggc_collect ();
1070 }
1071
1072 /* Perform global cse. */
1073 static void
1074 rest_of_handle_gcse (void)
1075 {
1076 int save_csb, save_cfj;
1077 int tem2 = 0, tem;
1078
1079 timevar_push (TV_GCSE);
1080 open_dump_file (DFI_gcse, current_function_decl);
1081
1082 tem = gcse_main (get_insns (), dump_file);
1083 rebuild_jump_labels (get_insns ());
1084 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1085
1086 save_csb = flag_cse_skip_blocks;
1087 save_cfj = flag_cse_follow_jumps;
1088 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1089
1090 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1091 by gcse. */
1092 if (flag_expensive_optimizations)
1093 {
1094 timevar_push (TV_CSE);
1095 reg_scan (get_insns (), max_reg_num (), 1);
1096 tem2 = cse_main (get_insns (), max_reg_num (), dump_file);
1097 purge_all_dead_edges (0);
1098 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1099 timevar_pop (TV_CSE);
1100 cse_not_expected = !flag_rerun_cse_after_loop;
1101 }
1102
1103 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1104 things up. Then possibly re-run CSE again. */
1105 while (tem || tem2)
1106 {
1107 tem = tem2 = 0;
1108 timevar_push (TV_JUMP);
1109 rebuild_jump_labels (get_insns ());
1110 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1111 timevar_pop (TV_JUMP);
1112
1113 if (flag_expensive_optimizations)
1114 {
1115 timevar_push (TV_CSE);
1116 reg_scan (get_insns (), max_reg_num (), 1);
1117 tem2 = cse_main (get_insns (), max_reg_num (), dump_file);
1118 purge_all_dead_edges (0);
1119 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1120 timevar_pop (TV_CSE);
1121 }
1122 }
1123
1124 close_dump_file (DFI_gcse, print_rtl_with_bb, get_insns ());
1125 timevar_pop (TV_GCSE);
1126
1127 ggc_collect ();
1128 flag_cse_skip_blocks = save_csb;
1129 flag_cse_follow_jumps = save_cfj;
1130 #ifdef ENABLE_CHECKING
1131 verify_flow_info ();
1132 #endif
1133 }
1134
1135 /* Move constant computations out of loops. */
1136 static void
1137 rest_of_handle_loop_optimize (void)
1138 {
1139 int do_prefetch;
1140
1141 timevar_push (TV_LOOP);
1142 delete_dead_jumptables ();
1143 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1144 open_dump_file (DFI_loop, current_function_decl);
1145
1146 /* CFG is no longer maintained up-to-date. */
1147 free_bb_for_insn ();
1148 profile_status = PROFILE_ABSENT;
1149
1150 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1151
1152 if (flag_rerun_loop_opt)
1153 {
1154 cleanup_barriers ();
1155
1156 /* We only want to perform unrolling once. */
1157 loop_optimize (get_insns (), dump_file, 0);
1158
1159 /* The first call to loop_optimize makes some instructions
1160 trivially dead. We delete those instructions now in the
1161 hope that doing so will make the heuristics in loop work
1162 better and possibly speed up compilation. */
1163 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1164
1165 /* The regscan pass is currently necessary as the alias
1166 analysis code depends on this information. */
1167 reg_scan (get_insns (), max_reg_num (), 1);
1168 }
1169 cleanup_barriers ();
1170 loop_optimize (get_insns (), dump_file, do_prefetch);
1171
1172 /* Loop can create trivially dead instructions. */
1173 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1174 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1175 close_dump_file (DFI_loop, print_rtl, get_insns ());
1176 timevar_pop (TV_LOOP);
1177
1178 ggc_collect ();
1179 }
1180
1181 /* Perform loop optimizations. It might be better to do them a bit
1182 sooner, but we want the profile feedback to work more
1183 efficiently. */
1184 static void
1185 rest_of_handle_loop2 (void)
1186 {
1187 struct loops *loops;
1188 basic_block bb;
1189
1190 if (!flag_move_loop_invariants
1191 && !flag_unswitch_loops
1192 && !flag_peel_loops
1193 && !flag_unroll_loops
1194 && !flag_branch_on_count_reg)
1195 return;
1196
1197 timevar_push (TV_LOOP);
1198 open_dump_file (DFI_loop2, current_function_decl);
1199 if (dump_file)
1200 dump_flow_info (dump_file);
1201
1202 /* Initialize structures for layout changes. */
1203 cfg_layout_initialize (0);
1204
1205 loops = loop_optimizer_init (dump_file);
1206
1207 if (loops)
1208 {
1209 /* The optimizations: */
1210 if (flag_move_loop_invariants)
1211 move_loop_invariants (loops);
1212
1213 if (flag_unswitch_loops)
1214 unswitch_loops (loops);
1215
1216 if (flag_peel_loops || flag_unroll_loops)
1217 unroll_and_peel_loops (loops,
1218 (flag_peel_loops ? UAP_PEEL : 0) |
1219 (flag_unroll_loops ? UAP_UNROLL : 0) |
1220 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1221
1222 #ifdef HAVE_doloop_end
1223 if (flag_branch_on_count_reg && HAVE_doloop_end)
1224 doloop_optimize_loops (loops);
1225 #endif /* HAVE_doloop_end */
1226
1227 loop_optimizer_finalize (loops, dump_file);
1228 }
1229
1230 free_dominance_info (CDI_DOMINATORS);
1231
1232 /* Finalize layout changes. */
1233 FOR_EACH_BB (bb)
1234 if (bb->next_bb != EXIT_BLOCK_PTR)
1235 bb->rbi->next = bb->next_bb;
1236 cfg_layout_finalize ();
1237
1238 cleanup_cfg (CLEANUP_EXPENSIVE);
1239 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1240 reg_scan (get_insns (), max_reg_num (), 0);
1241 if (dump_file)
1242 dump_flow_info (dump_file);
1243 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1244 timevar_pop (TV_LOOP);
1245 ggc_collect ();
1246 }
1247
1248 static void
1249 rest_of_handle_branch_target_load_optimize (void)
1250 {
1251 static int warned = 0;
1252
1253 /* Leave this a warning for now so that it is possible to experiment
1254 with running this pass twice. In 3.6, we should either make this
1255 an error, or use separate dump files. */
1256 if (flag_branch_target_load_optimize
1257 && flag_branch_target_load_optimize2
1258 && !warned)
1259 {
1260 warning ("branch target register load optimization is not intended "
1261 "to be run twice");
1262
1263 warned = 1;
1264 }
1265
1266 open_dump_file (DFI_branch_target_load, current_function_decl);
1267 branch_target_load_optimize (epilogue_completed);
1268 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, get_insns ());
1269 ggc_collect ();
1270 }
1271
1272 #ifdef OPTIMIZE_MODE_SWITCHING
1273 static void
1274 rest_of_handle_mode_switching (void)
1275 {
1276 timevar_push (TV_MODE_SWITCH);
1277
1278 no_new_pseudos = 0;
1279 optimize_mode_switching (NULL);
1280 no_new_pseudos = 1;
1281
1282 timevar_pop (TV_MODE_SWITCH);
1283 }
1284 #endif
1285
1286 static void
1287 rest_of_handle_jump (void)
1288 {
1289 ggc_collect ();
1290
1291 timevar_push (TV_JUMP);
1292 open_dump_file (DFI_sibling, current_function_decl);
1293
1294 /* ??? We may get called either via tree_rest_of_compilation when the CFG
1295 is already built or directly (for instance from coverage code).
1296 The direct callers shall be updated. */
1297 if (!basic_block_info)
1298 {
1299 init_flow ();
1300 rebuild_jump_labels (get_insns ());
1301 find_exception_handler_labels ();
1302 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1303 }
1304
1305 /* ??? We may get called either via tree_rest_of_compilation when the CFG
1306 is already built or directly (for instance from coverage code).
1307 The direct callers shall be updated. */
1308 if (!basic_block_info)
1309 {
1310 init_flow ();
1311 rebuild_jump_labels (get_insns ());
1312 find_exception_handler_labels ();
1313 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1314 }
1315 delete_unreachable_blocks ();
1316 #ifdef ENABLE_CHECKING
1317 verify_flow_info ();
1318 #endif
1319
1320 if (cfun->tail_call_emit)
1321 fixup_tail_calls ();
1322
1323 close_dump_file (DFI_sibling, print_rtl, get_insns ());
1324 timevar_pop (TV_JUMP);
1325 }
1326
1327 static void
1328 rest_of_handle_eh (void)
1329 {
1330 insn_locators_initialize ();
1331 /* Complete generation of exception handling code. */
1332 if (doing_eh (0))
1333 {
1334 timevar_push (TV_JUMP);
1335 open_dump_file (DFI_eh, current_function_decl);
1336
1337 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
1338
1339 finish_eh_generation ();
1340
1341 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
1342
1343 close_dump_file (DFI_eh, print_rtl, get_insns ());
1344 timevar_pop (TV_JUMP);
1345 }
1346 }
1347
1348
1349 static void
1350 rest_of_handle_prologue_epilogue (void)
1351 {
1352 if (optimize && !flow2_completed)
1353 cleanup_cfg (CLEANUP_EXPENSIVE);
1354
1355 /* On some machines, the prologue and epilogue code, or parts thereof,
1356 can be represented as RTL. Doing so lets us schedule insns between
1357 it and the rest of the code and also allows delayed branch
1358 scheduling to operate in the epilogue. */
1359 thread_prologue_and_epilogue_insns (get_insns ());
1360 epilogue_completed = 1;
1361
1362 if (optimize && flow2_completed)
1363 life_analysis (dump_file, PROP_POSTRELOAD);
1364 }
1365
1366 static void
1367 rest_of_handle_stack_adjustments (void)
1368 {
1369 life_analysis (dump_file, PROP_POSTRELOAD);
1370 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1371 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1372
1373 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1374 even for machines with possibly nonzero RETURN_POPS_ARGS
1375 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1376 push instructions will have popping returns. */
1377 #ifndef PUSH_ROUNDING
1378 if (!ACCUMULATE_OUTGOING_ARGS)
1379 #endif
1380 combine_stack_adjustments ();
1381 }
1382
1383 static void
1384 rest_of_handle_flow2 (void)
1385 {
1386 timevar_push (TV_FLOW2);
1387 open_dump_file (DFI_flow2, current_function_decl);
1388
1389 /* Re-create the death notes which were deleted during reload. */
1390 #ifdef ENABLE_CHECKING
1391 verify_flow_info ();
1392 #endif
1393
1394 /* If optimizing, then go ahead and split insns now. */
1395 #ifndef STACK_REGS
1396 if (optimize > 0)
1397 #endif
1398 split_all_insns (0);
1399
1400 if (flag_branch_target_load_optimize)
1401 rest_of_handle_branch_target_load_optimize ();
1402
1403 if (!targetm.late_rtl_prologue_epilogue)
1404 rest_of_handle_prologue_epilogue ();
1405
1406 if (optimize)
1407 rest_of_handle_stack_adjustments ();
1408
1409 flow2_completed = 1;
1410
1411 close_dump_file (DFI_flow2, print_rtl_with_bb, get_insns ());
1412 timevar_pop (TV_FLOW2);
1413
1414 ggc_collect ();
1415 }
1416
1417
1418 static void
1419 rest_of_handle_jump2 (void)
1420 {
1421 open_dump_file (DFI_jump, current_function_decl);
1422
1423 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1424 are initialized and to compute whether control can drop off the end
1425 of the function. */
1426
1427 timevar_push (TV_JUMP);
1428 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1429 before jump optimization switches branch directions. */
1430 if (flag_guess_branch_prob)
1431 expected_value_to_br_prob ();
1432
1433 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1434 reg_scan (get_insns (), max_reg_num (), 0);
1435 if (dump_file)
1436 dump_flow_info (dump_file);
1437 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1438 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1439
1440 create_loop_notes ();
1441
1442 purge_line_number_notes (get_insns ());
1443
1444 if (optimize)
1445 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1446
1447 /* Jump optimization, and the removal of NULL pointer checks, may
1448 have reduced the number of instructions substantially. CSE, and
1449 future passes, allocate arrays whose dimensions involve the
1450 maximum instruction UID, so if we can reduce the maximum UID
1451 we'll save big on memory. */
1452 renumber_insns (dump_file);
1453
1454 close_dump_file (DFI_jump, print_rtl_with_bb, get_insns ());
1455 timevar_pop (TV_JUMP);
1456
1457 ggc_collect ();
1458 }
1459
1460 #ifdef HAVE_peephole2
1461 static void
1462 rest_of_handle_peephole2 (void)
1463 {
1464 timevar_push (TV_PEEPHOLE2);
1465 open_dump_file (DFI_peephole2, current_function_decl);
1466
1467 peephole2_optimize (dump_file);
1468
1469 close_dump_file (DFI_peephole2, print_rtl_with_bb, get_insns ());
1470 timevar_pop (TV_PEEPHOLE2);
1471 }
1472 #endif
1473
1474 static void
1475 rest_of_handle_postreload (void)
1476 {
1477 timevar_push (TV_RELOAD_CSE_REGS);
1478 open_dump_file (DFI_postreload, current_function_decl);
1479
1480 /* Do a very simple CSE pass over just the hard registers. */
1481 reload_cse_regs (get_insns ());
1482 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1483 Remove any EH edges associated with them. */
1484 if (flag_non_call_exceptions)
1485 purge_all_dead_edges (0);
1486
1487 close_dump_file (DFI_postreload, print_rtl_with_bb, get_insns ());
1488 timevar_pop (TV_RELOAD_CSE_REGS);
1489 }
1490
1491 static void
1492 rest_of_handle_shorten_branches (void)
1493 {
1494 /* Shorten branches. */
1495 timevar_push (TV_SHORTEN_BRANCH);
1496 shorten_branches (get_insns ());
1497 timevar_pop (TV_SHORTEN_BRANCH);
1498 }
1499
1500 static void
1501 rest_of_clean_state (void)
1502 {
1503 rtx insn, next;
1504 coverage_end_function ();
1505
1506 /* It is very important to decompose the RTL instruction chain here:
1507 debug information keeps pointing into CODE_LABEL insns inside the function
1508 body. If these remain pointing to the other insns, we end up preserving
1509 whole RTL chain and attached detailed debug info in memory. */
1510 for (insn = get_insns (); insn; insn = next)
1511 {
1512 next = NEXT_INSN (insn);
1513 NEXT_INSN (insn) = NULL;
1514 PREV_INSN (insn) = NULL;
1515 }
1516
1517 /* In case the function was not output,
1518 don't leave any temporary anonymous types
1519 queued up for sdb output. */
1520 #ifdef SDB_DEBUGGING_INFO
1521 if (write_symbols == SDB_DEBUG)
1522 sdbout_types (NULL_TREE);
1523 #endif
1524
1525 reload_completed = 0;
1526 epilogue_completed = 0;
1527 flow2_completed = 0;
1528 no_new_pseudos = 0;
1529
1530 timevar_push (TV_FINAL);
1531
1532 /* Clear out the insn_length contents now that they are no
1533 longer valid. */
1534 init_insn_lengths ();
1535
1536 /* Show no temporary slots allocated. */
1537 init_temp_slots ();
1538
1539 free_basic_block_vars ();
1540 free_bb_for_insn ();
1541
1542 timevar_pop (TV_FINAL);
1543
1544 if (targetm.binds_local_p (current_function_decl))
1545 {
1546 int pref = cfun->preferred_stack_boundary;
1547 if (cfun->recursive_call_emit
1548 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
1549 pref = cfun->stack_alignment_needed;
1550 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
1551 = pref;
1552 }
1553
1554 /* Make sure volatile mem refs aren't considered valid operands for
1555 arithmetic insns. We must call this here if this is a nested inline
1556 function, since the above code leaves us in the init_recog state
1557 (from final.c), and the function context push/pop code does not
1558 save/restore volatile_ok.
1559
1560 ??? Maybe it isn't necessary for expand_start_function to call this
1561 anymore if we do it here? */
1562
1563 init_recog_no_volatile ();
1564
1565 /* We're done with this function. Free up memory if we can. */
1566 free_after_parsing (cfun);
1567 free_after_compilation (cfun);
1568 }
1569 \f
1570
1571 /* This function is called from the pass manager in tree-optimize.c
1572 after all tree passes have finished for a single function, and we
1573 have expanded the function body from trees to RTL.
1574 Once we are here, we have decided that we're supposed to output
1575 that function, i.e. that we should write assembler code for it.
1576
1577 We run a series of low-level passes here on the function's RTL
1578 representation. Each pass is called via a rest_of_* function. */
1579
1580 void
1581 rest_of_compilation (void)
1582 {
1583 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1584 sorts of eh initialization. */
1585 convert_from_eh_region_ranges ();
1586
1587 /* If we're emitting a nested function, make sure its parent gets
1588 emitted as well. Doing otherwise confuses debug info. */
1589 {
1590 tree parent;
1591 for (parent = DECL_CONTEXT (current_function_decl);
1592 parent != NULL_TREE;
1593 parent = get_containing_scope (parent))
1594 if (TREE_CODE (parent) == FUNCTION_DECL)
1595 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1596 }
1597
1598 /* We are now committed to emitting code for this function. Do any
1599 preparation, such as emitting abstract debug info for the inline
1600 before it gets mangled by optimization. */
1601 if (cgraph_function_possibly_inlined_p (current_function_decl))
1602 (*debug_hooks->outlining_inline_function) (current_function_decl);
1603
1604 /* Remove any notes we don't need. That will make iterating
1605 over the instruction sequence faster, and allow the garbage
1606 collector to reclaim the memory used by the notes. */
1607 remove_unnecessary_notes ();
1608
1609 /* Initialize some variables used by the optimizers. */
1610 init_function_for_compilation ();
1611
1612 TREE_ASM_WRITTEN (current_function_decl) = 1;
1613
1614 /* Early return if there were errors. We can run afoul of our
1615 consistency checks, and there's not really much point in fixing them. */
1616 if (rtl_dump_and_exit || flag_syntax_only || errorcount || sorrycount)
1617 goto exit_rest_of_compilation;
1618
1619 rest_of_handle_jump ();
1620
1621 rest_of_handle_eh ();
1622
1623 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1624 generation, which might create new sets. */
1625 emit_initial_value_sets ();
1626
1627 #ifdef FINALIZE_PIC
1628 /* If we are doing position-independent code generation, now
1629 is the time to output special prologues and epilogues.
1630 We do not want to do this earlier, because it just clutters
1631 up inline functions with meaningless insns. */
1632 if (flag_pic)
1633 FINALIZE_PIC;
1634 #endif
1635
1636 /* Copy any shared structure that should not be shared. */
1637 unshare_all_rtl ();
1638
1639 #ifdef SETJMP_VIA_SAVE_AREA
1640 /* This must be performed before virtual register instantiation.
1641 Please be aware that everything in the compiler that can look
1642 at the RTL up to this point must understand that REG_SAVE_AREA
1643 is just like a use of the REG contained inside. */
1644 if (current_function_calls_alloca)
1645 optimize_save_area_alloca ();
1646 #endif
1647
1648 /* Instantiate all virtual registers. */
1649 instantiate_virtual_regs ();
1650
1651 rest_of_handle_jump2 ();
1652
1653 if (optimize > 0)
1654 rest_of_handle_cse ();
1655
1656 if (optimize > 0)
1657 {
1658 if (flag_gcse)
1659 rest_of_handle_gcse ();
1660
1661 if (flag_loop_optimize)
1662 rest_of_handle_loop_optimize ();
1663
1664 if (flag_gcse)
1665 rest_of_handle_jump_bypass ();
1666 }
1667
1668 timevar_push (TV_FLOW);
1669 rest_of_handle_cfg ();
1670
1671 if (!flag_tree_based_profiling
1672 && (optimize > 0 || profile_arc_flag
1673 || flag_test_coverage || flag_branch_probabilities))
1674 {
1675 rtl_register_profile_hooks ();
1676 rtl_register_value_prof_hooks ();
1677 rest_of_handle_branch_prob ();
1678
1679 if (flag_branch_probabilities
1680 && flag_profile_values
1681 && (flag_value_profile_transformations
1682 || flag_speculative_prefetching))
1683 rest_of_handle_value_profile_transformations ();
1684
1685 /* Remove the death notes created for vpt. */
1686 if (flag_profile_values)
1687 count_or_remove_death_notes (NULL, 1);
1688 }
1689
1690 if (optimize > 0)
1691 rest_of_handle_if_conversion ();
1692
1693 if (optimize > 0 && flag_tracer)
1694 rest_of_handle_tracer ();
1695
1696 if (optimize > 0
1697 && flag_loop_optimize2)
1698 rest_of_handle_loop2 ();
1699
1700 if (optimize > 0 && flag_web)
1701 rest_of_handle_web ();
1702
1703 if (optimize > 0 && flag_rerun_cse_after_loop)
1704 rest_of_handle_cse2 ();
1705
1706 cse_not_expected = 1;
1707
1708 rest_of_handle_life ();
1709 timevar_pop (TV_FLOW);
1710
1711 if (optimize > 0)
1712 rest_of_handle_combine ();
1713
1714 if (optimize > 0 && flag_if_conversion)
1715 rest_of_handle_if_after_combine ();
1716
1717 /* The optimization to partition hot/cold basic blocks into separate
1718 sections of the .o file does not work well with linkonce or with
1719 user defined section attributes. Don't call it if either case
1720 arises. */
1721
1722 if (flag_reorder_blocks_and_partition
1723 && !DECL_ONE_ONLY (current_function_decl)
1724 && !user_defined_section_attribute)
1725 rest_of_handle_partition_blocks ();
1726
1727 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1728 rest_of_handle_regmove ();
1729
1730 /* Do unconditional splitting before register allocation to allow machine
1731 description to add extra information not needed previously. */
1732 split_all_insns (1);
1733
1734 #ifdef OPTIMIZE_MODE_SWITCHING
1735 rest_of_handle_mode_switching ();
1736 #endif
1737
1738 /* Any of the several passes since flow1 will have munged register
1739 lifetime data a bit. We need it to be up to date for scheduling
1740 (see handling of reg_known_equiv in init_alias_analysis). */
1741 recompute_reg_usage (get_insns (), !optimize_size);
1742
1743 #ifdef INSN_SCHEDULING
1744 if (optimize > 0 && flag_modulo_sched)
1745 rest_of_handle_sms ();
1746
1747 if (flag_schedule_insns)
1748 rest_of_handle_sched ();
1749 #endif
1750
1751 /* Determine if the current function is a leaf before running reload
1752 since this can impact optimizations done by the prologue and
1753 epilogue thus changing register elimination offsets. */
1754 current_function_is_leaf = leaf_function_p ();
1755
1756 if (flag_new_regalloc)
1757 {
1758 if (rest_of_handle_new_regalloc ())
1759 goto exit_rest_of_compilation;
1760 }
1761 else
1762 {
1763 if (rest_of_handle_old_regalloc ())
1764 goto exit_rest_of_compilation;
1765 }
1766
1767 if (optimize > 0)
1768 rest_of_handle_postreload ();
1769
1770 if (optimize > 0 && flag_gcse_after_reload)
1771 rest_of_handle_gcse2 ();
1772
1773 rest_of_handle_flow2 ();
1774
1775 #ifdef HAVE_peephole2
1776 if (optimize > 0 && flag_peephole2)
1777 rest_of_handle_peephole2 ();
1778 #endif
1779
1780 if (optimize > 0)
1781 rest_of_handle_if_after_reload ();
1782
1783 if (optimize > 0)
1784 {
1785 if (flag_rename_registers || flag_cprop_registers)
1786 rest_of_handle_regrename ();
1787
1788 rest_of_handle_reorder_blocks ();
1789 }
1790
1791 if (flag_branch_target_load_optimize2)
1792 rest_of_handle_branch_target_load_optimize ();
1793
1794 #ifdef LEAF_REGISTERS
1795 current_function_uses_only_leaf_regs
1796 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
1797 #endif
1798
1799 if (targetm.late_rtl_prologue_epilogue)
1800 rest_of_handle_prologue_epilogue ();
1801
1802 #ifdef INSN_SCHEDULING
1803 if (optimize > 0 && flag_schedule_insns_after_reload)
1804 rest_of_handle_sched2 ();
1805 #endif
1806
1807 #ifdef STACK_REGS
1808 rest_of_handle_stack_regs ();
1809 #endif
1810
1811 compute_alignments ();
1812
1813 if (flag_var_tracking)
1814 rest_of_handle_variable_tracking ();
1815
1816 /* CFG is no longer maintained up-to-date. */
1817 free_bb_for_insn ();
1818
1819 if (targetm.machine_dependent_reorg != 0)
1820 rest_of_handle_machine_reorg ();
1821
1822 purge_line_number_notes (get_insns ());
1823 cleanup_barriers ();
1824
1825 #ifdef DELAY_SLOTS
1826 if (flag_delayed_branch)
1827 rest_of_handle_delay_slots ();
1828 #endif
1829
1830 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
1831 timevar_push (TV_SHORTEN_BRANCH);
1832 split_all_insns_noflow ();
1833 timevar_pop (TV_SHORTEN_BRANCH);
1834 #endif
1835
1836 convert_to_eh_region_ranges ();
1837
1838 rest_of_handle_shorten_branches ();
1839
1840 set_nothrow_function_flags ();
1841
1842 rest_of_handle_final ();
1843
1844 exit_rest_of_compilation:
1845
1846 rest_of_clean_state ();
1847 }
1848
1849 void
1850 finish_optimization_passes (void)
1851 {
1852 enum tree_dump_index i;
1853 struct dump_file_info *dfi;
1854 char *name;
1855
1856 timevar_push (TV_DUMP);
1857 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1858 {
1859 open_dump_file (DFI_bp, NULL);
1860 end_branch_prob ();
1861 close_dump_file (DFI_bp, NULL, NULL_RTX);
1862 }
1863
1864 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
1865 {
1866 dump_combine_total_stats (dump_file);
1867 close_dump_file (DFI_combine, NULL, NULL_RTX);
1868 }
1869
1870 /* Do whatever is necessary to finish printing the graphs. */
1871 if (graph_dump_format != no_graph)
1872 for (i = DFI_MIN; (dfi = get_dump_file_info (i)) != NULL; ++i)
1873 if (dump_initialized_p (i)
1874 && (dfi->flags & TDF_RTL) != 0
1875 && (name = get_dump_file_name (i)) != NULL)
1876 {
1877 finish_graph_dump_file (name);
1878 free (name);
1879 }
1880
1881 timevar_pop (TV_DUMP);
1882 }
1883
1884 struct tree_opt_pass pass_rest_of_compilation =
1885 {
1886 NULL, /* name */
1887 NULL, /* gate */
1888 rest_of_compilation, /* execute */
1889 NULL, /* sub */
1890 NULL, /* next */
1891 0, /* static_pass_number */
1892 TV_REST_OF_COMPILATION, /* tv_id */
1893 PROP_rtl, /* properties_required */
1894 0, /* properties_provided */
1895 PROP_rtl, /* properties_destroyed */
1896 0, /* todo_flags_start */
1897 TODO_ggc_collect, /* todo_flags_finish */
1898 0 /* letter */
1899 };
1900
1901