Rename macros (basic_block_info_for_function, BASIC_BLOCK_FOR_FUNCTION,
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
74
75 /* Local declarations. */
76
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity = 20;
79
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
84
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
87
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
92
93 static struct pointer_map_t *edge_to_cases;
94
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
98
99 static bitmap touched_switch_bbs;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto;
111
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
114 {
115 location_t locus;
116 int discriminator;
117 };
118
119 /* Hashtable helpers. */
120
121 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 {
123 typedef locus_discrim_map value_type;
124 typedef locus_discrim_map compare_type;
125 static inline hashval_t hash (const value_type *);
126 static inline bool equal (const value_type *, const compare_type *);
127 };
128
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
131
132 inline hashval_t
133 locus_discrim_hasher::hash (const value_type *item)
134 {
135 return LOCATION_LINE (item->locus);
136 }
137
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
140
141 inline bool
142 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 {
144 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 }
146
147 static hash_table <locus_discrim_hasher> discriminator_per_locus;
148
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq);
151 static void factor_computed_gotos (void);
152
153 /* Edges. */
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block);
157 static void make_gimple_switch_edges (basic_block);
158 static void make_goto_expr_edges (basic_block);
159 static void make_gimple_asm_edges (basic_block);
160 static edge gimple_redirect_edge_and_branch (edge, basic_block);
161 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
162 static unsigned int split_critical_edges (void);
163
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple, gimple);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge);
168 static gimple first_non_label_stmt (basic_block);
169 static bool verify_gimple_transaction (gimple);
170
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block, basic_block);
173 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
174 static void remove_bb (basic_block);
175 static edge find_taken_edge_computed_goto (basic_block, tree);
176 static edge find_taken_edge_cond_expr (basic_block, tree);
177 static edge find_taken_edge_switch_expr (basic_block, tree);
178 static tree find_case_label_for_value (gimple, tree);
179
180 void
181 init_empty_tree_cfg_for_function (struct function *fn)
182 {
183 /* Initialize the basic block array. */
184 init_flow (fn);
185 profile_status_for_function (fn) = PROFILE_ABSENT;
186 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
187 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
188 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity);
191
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
194 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
195 initial_cfg_capacity);
196
197 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
198 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199
200 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
201 = EXIT_BLOCK_PTR_FOR_FN (fn);
202 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
203 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 }
205
206 void
207 init_empty_tree_cfg (void)
208 {
209 init_empty_tree_cfg_for_function (cfun);
210 }
211
212 /*---------------------------------------------------------------------------
213 Create basic blocks
214 ---------------------------------------------------------------------------*/
215
216 /* Entry point to the CFG builder for trees. SEQ is the sequence of
217 statements to be added to the flowgraph. */
218
219 static void
220 build_gimple_cfg (gimple_seq seq)
221 {
222 /* Register specific gimple functions. */
223 gimple_register_cfg_hooks ();
224
225 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226
227 init_empty_tree_cfg ();
228
229 found_computed_goto = 0;
230 make_blocks (seq);
231
232 /* Computed gotos are hell to deal with, especially if there are
233 lots of them with a large number of destinations. So we factor
234 them to a common computed goto location before we build the
235 edge list. After we convert back to normal form, we will un-factor
236 the computed gotos since factoring introduces an unwanted jump. */
237 if (found_computed_goto)
238 factor_computed_gotos ();
239
240 /* Make sure there is always at least one block, even if it's empty. */
241 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
242 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
243
244 /* Adjust the size of the array. */
245 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
246 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
247
248 /* To speed up statement iterator walks, we first purge dead labels. */
249 cleanup_dead_labels ();
250
251 /* Group case nodes to reduce the number of edges.
252 We do this after cleaning up dead labels because otherwise we miss
253 a lot of obvious case merging opportunities. */
254 group_case_labels ();
255
256 /* Create the edges of the flowgraph. */
257 discriminator_per_locus.create (13);
258 make_edges ();
259 assign_discriminators ();
260 cleanup_dead_labels ();
261 discriminator_per_locus.dispose ();
262 }
263
264
265 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
266 it and set loop->safelen to INT_MAX. We assume that the annotation
267 comes immediately before the condition. */
268
269 static void
270 replace_loop_annotate ()
271 {
272 struct loop *loop;
273 basic_block bb;
274 gimple_stmt_iterator gsi;
275 gimple stmt;
276
277 FOR_EACH_LOOP (loop, 0)
278 {
279 gsi = gsi_last_bb (loop->header);
280 stmt = gsi_stmt (gsi);
281 if (stmt && gimple_code (stmt) == GIMPLE_COND)
282 {
283 gsi_prev_nondebug (&gsi);
284 if (gsi_end_p (gsi))
285 continue;
286 stmt = gsi_stmt (gsi);
287 if (gimple_code (stmt) != GIMPLE_CALL)
288 continue;
289 if (!gimple_call_internal_p (stmt)
290 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
291 continue;
292 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
293 != annot_expr_ivdep_kind)
294 continue;
295 stmt = gimple_build_assign (gimple_call_lhs (stmt),
296 gimple_call_arg (stmt, 0));
297 gsi_replace (&gsi, stmt, true);
298 loop->safelen = INT_MAX;
299 }
300 }
301
302 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
303 FOR_EACH_BB (bb)
304 {
305 gsi = gsi_last_bb (bb);
306 stmt = gsi_stmt (gsi);
307 if (stmt && gimple_code (stmt) == GIMPLE_COND)
308 gsi_prev_nondebug (&gsi);
309 if (gsi_end_p (gsi))
310 continue;
311 stmt = gsi_stmt (gsi);
312 if (gimple_code (stmt) != GIMPLE_CALL)
313 continue;
314 if (!gimple_call_internal_p (stmt)
315 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
316 continue;
317 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
318 != annot_expr_ivdep_kind)
319 continue;
320 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
321 "annotation");
322 stmt = gimple_build_assign (gimple_call_lhs (stmt),
323 gimple_call_arg (stmt, 0));
324 gsi_replace (&gsi, stmt, true);
325 }
326 }
327
328
329 static unsigned int
330 execute_build_cfg (void)
331 {
332 gimple_seq body = gimple_body (current_function_decl);
333
334 build_gimple_cfg (body);
335 gimple_set_body (current_function_decl, NULL);
336 if (dump_file && (dump_flags & TDF_DETAILS))
337 {
338 fprintf (dump_file, "Scope blocks:\n");
339 dump_scope_blocks (dump_file, dump_flags);
340 }
341 cleanup_tree_cfg ();
342 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
343 replace_loop_annotate ();
344 return 0;
345 }
346
347 namespace {
348
349 const pass_data pass_data_build_cfg =
350 {
351 GIMPLE_PASS, /* type */
352 "cfg", /* name */
353 OPTGROUP_NONE, /* optinfo_flags */
354 false, /* has_gate */
355 true, /* has_execute */
356 TV_TREE_CFG, /* tv_id */
357 PROP_gimple_leh, /* properties_required */
358 ( PROP_cfg | PROP_loops ), /* properties_provided */
359 0, /* properties_destroyed */
360 0, /* todo_flags_start */
361 TODO_verify_stmts, /* todo_flags_finish */
362 };
363
364 class pass_build_cfg : public gimple_opt_pass
365 {
366 public:
367 pass_build_cfg (gcc::context *ctxt)
368 : gimple_opt_pass (pass_data_build_cfg, ctxt)
369 {}
370
371 /* opt_pass methods: */
372 unsigned int execute () { return execute_build_cfg (); }
373
374 }; // class pass_build_cfg
375
376 } // anon namespace
377
378 gimple_opt_pass *
379 make_pass_build_cfg (gcc::context *ctxt)
380 {
381 return new pass_build_cfg (ctxt);
382 }
383
384
385 /* Return true if T is a computed goto. */
386
387 static bool
388 computed_goto_p (gimple t)
389 {
390 return (gimple_code (t) == GIMPLE_GOTO
391 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
392 }
393
394 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
395 the other edge points to a bb with just __builtin_unreachable ().
396 I.e. return true for C->M edge in:
397 <bb C>:
398 ...
399 if (something)
400 goto <bb N>;
401 else
402 goto <bb M>;
403 <bb N>:
404 __builtin_unreachable ();
405 <bb M>: */
406
407 bool
408 assert_unreachable_fallthru_edge_p (edge e)
409 {
410 basic_block pred_bb = e->src;
411 gimple last = last_stmt (pred_bb);
412 if (last && gimple_code (last) == GIMPLE_COND)
413 {
414 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
415 if (other_bb == e->dest)
416 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
417 if (EDGE_COUNT (other_bb->succs) == 0)
418 {
419 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
420 gimple stmt;
421
422 if (gsi_end_p (gsi))
423 return false;
424 stmt = gsi_stmt (gsi);
425 if (is_gimple_debug (stmt))
426 {
427 gsi_next_nondebug (&gsi);
428 if (gsi_end_p (gsi))
429 return false;
430 stmt = gsi_stmt (gsi);
431 }
432 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
433 }
434 }
435 return false;
436 }
437
438
439 /* Search the CFG for any computed gotos. If found, factor them to a
440 common computed goto site. Also record the location of that site so
441 that we can un-factor the gotos after we have converted back to
442 normal form. */
443
444 static void
445 factor_computed_gotos (void)
446 {
447 basic_block bb;
448 tree factored_label_decl = NULL;
449 tree var = NULL;
450 gimple factored_computed_goto_label = NULL;
451 gimple factored_computed_goto = NULL;
452
453 /* We know there are one or more computed gotos in this function.
454 Examine the last statement in each basic block to see if the block
455 ends with a computed goto. */
456
457 FOR_EACH_BB (bb)
458 {
459 gimple_stmt_iterator gsi = gsi_last_bb (bb);
460 gimple last;
461
462 if (gsi_end_p (gsi))
463 continue;
464
465 last = gsi_stmt (gsi);
466
467 /* Ignore the computed goto we create when we factor the original
468 computed gotos. */
469 if (last == factored_computed_goto)
470 continue;
471
472 /* If the last statement is a computed goto, factor it. */
473 if (computed_goto_p (last))
474 {
475 gimple assignment;
476
477 /* The first time we find a computed goto we need to create
478 the factored goto block and the variable each original
479 computed goto will use for their goto destination. */
480 if (!factored_computed_goto)
481 {
482 basic_block new_bb = create_empty_bb (bb);
483 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
484
485 /* Create the destination of the factored goto. Each original
486 computed goto will put its desired destination into this
487 variable and jump to the label we create immediately
488 below. */
489 var = create_tmp_var (ptr_type_node, "gotovar");
490
491 /* Build a label for the new block which will contain the
492 factored computed goto. */
493 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
494 factored_computed_goto_label
495 = gimple_build_label (factored_label_decl);
496 gsi_insert_after (&new_gsi, factored_computed_goto_label,
497 GSI_NEW_STMT);
498
499 /* Build our new computed goto. */
500 factored_computed_goto = gimple_build_goto (var);
501 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
502 }
503
504 /* Copy the original computed goto's destination into VAR. */
505 assignment = gimple_build_assign (var, gimple_goto_dest (last));
506 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
507
508 /* And re-vector the computed goto to the new destination. */
509 gimple_goto_set_dest (last, factored_label_decl);
510 }
511 }
512 }
513
514
515 /* Build a flowgraph for the sequence of stmts SEQ. */
516
517 static void
518 make_blocks (gimple_seq seq)
519 {
520 gimple_stmt_iterator i = gsi_start (seq);
521 gimple stmt = NULL;
522 bool start_new_block = true;
523 bool first_stmt_of_seq = true;
524 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
525
526 while (!gsi_end_p (i))
527 {
528 gimple prev_stmt;
529
530 prev_stmt = stmt;
531 stmt = gsi_stmt (i);
532
533 /* If the statement starts a new basic block or if we have determined
534 in a previous pass that we need to create a new block for STMT, do
535 so now. */
536 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
537 {
538 if (!first_stmt_of_seq)
539 gsi_split_seq_before (&i, &seq);
540 bb = create_basic_block (seq, NULL, bb);
541 start_new_block = false;
542 }
543
544 /* Now add STMT to BB and create the subgraphs for special statement
545 codes. */
546 gimple_set_bb (stmt, bb);
547
548 if (computed_goto_p (stmt))
549 found_computed_goto = true;
550
551 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
552 next iteration. */
553 if (stmt_ends_bb_p (stmt))
554 {
555 /* If the stmt can make abnormal goto use a new temporary
556 for the assignment to the LHS. This makes sure the old value
557 of the LHS is available on the abnormal edge. Otherwise
558 we will end up with overlapping life-ranges for abnormal
559 SSA names. */
560 if (gimple_has_lhs (stmt)
561 && stmt_can_make_abnormal_goto (stmt)
562 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
563 {
564 tree lhs = gimple_get_lhs (stmt);
565 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
566 gimple s = gimple_build_assign (lhs, tmp);
567 gimple_set_location (s, gimple_location (stmt));
568 gimple_set_block (s, gimple_block (stmt));
569 gimple_set_lhs (stmt, tmp);
570 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
571 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
572 DECL_GIMPLE_REG_P (tmp) = 1;
573 gsi_insert_after (&i, s, GSI_SAME_STMT);
574 }
575 start_new_block = true;
576 }
577
578 gsi_next (&i);
579 first_stmt_of_seq = false;
580 }
581 }
582
583
584 /* Create and return a new empty basic block after bb AFTER. */
585
586 static basic_block
587 create_bb (void *h, void *e, basic_block after)
588 {
589 basic_block bb;
590
591 gcc_assert (!e);
592
593 /* Create and initialize a new basic block. Since alloc_block uses
594 GC allocation that clears memory to allocate a basic block, we do
595 not have to clear the newly allocated basic block here. */
596 bb = alloc_block ();
597
598 bb->index = last_basic_block;
599 bb->flags = BB_NEW;
600 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
601
602 /* Add the new block to the linked list of blocks. */
603 link_block (bb, after);
604
605 /* Grow the basic block array if needed. */
606 if ((size_t) last_basic_block == basic_block_info->length ())
607 {
608 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
609 vec_safe_grow_cleared (basic_block_info, new_size);
610 }
611
612 /* Add the newly created block to the array. */
613 SET_BASIC_BLOCK (last_basic_block, bb);
614
615 n_basic_blocks_for_fn (cfun)++;
616 last_basic_block++;
617
618 return bb;
619 }
620
621
622 /*---------------------------------------------------------------------------
623 Edge creation
624 ---------------------------------------------------------------------------*/
625
626 /* Fold COND_EXPR_COND of each COND_EXPR. */
627
628 void
629 fold_cond_expr_cond (void)
630 {
631 basic_block bb;
632
633 FOR_EACH_BB (bb)
634 {
635 gimple stmt = last_stmt (bb);
636
637 if (stmt && gimple_code (stmt) == GIMPLE_COND)
638 {
639 location_t loc = gimple_location (stmt);
640 tree cond;
641 bool zerop, onep;
642
643 fold_defer_overflow_warnings ();
644 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
645 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
646 if (cond)
647 {
648 zerop = integer_zerop (cond);
649 onep = integer_onep (cond);
650 }
651 else
652 zerop = onep = false;
653
654 fold_undefer_overflow_warnings (zerop || onep,
655 stmt,
656 WARN_STRICT_OVERFLOW_CONDITIONAL);
657 if (zerop)
658 gimple_cond_make_false (stmt);
659 else if (onep)
660 gimple_cond_make_true (stmt);
661 }
662 }
663 }
664
665 /* Join all the blocks in the flowgraph. */
666
667 static void
668 make_edges (void)
669 {
670 basic_block bb;
671 struct omp_region *cur_region = NULL;
672
673 /* Create an edge from entry to the first block with executable
674 statements in it. */
675 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
676 EDGE_FALLTHRU);
677
678 /* Traverse the basic block array placing edges. */
679 FOR_EACH_BB (bb)
680 {
681 gimple last = last_stmt (bb);
682 bool fallthru;
683
684 if (last)
685 {
686 enum gimple_code code = gimple_code (last);
687 switch (code)
688 {
689 case GIMPLE_GOTO:
690 make_goto_expr_edges (bb);
691 fallthru = false;
692 break;
693 case GIMPLE_RETURN:
694 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
695 fallthru = false;
696 break;
697 case GIMPLE_COND:
698 make_cond_expr_edges (bb);
699 fallthru = false;
700 break;
701 case GIMPLE_SWITCH:
702 make_gimple_switch_edges (bb);
703 fallthru = false;
704 break;
705 case GIMPLE_RESX:
706 make_eh_edges (last);
707 fallthru = false;
708 break;
709 case GIMPLE_EH_DISPATCH:
710 fallthru = make_eh_dispatch_edges (last);
711 break;
712
713 case GIMPLE_CALL:
714 /* If this function receives a nonlocal goto, then we need to
715 make edges from this call site to all the nonlocal goto
716 handlers. */
717 if (stmt_can_make_abnormal_goto (last))
718 make_abnormal_goto_edges (bb, true);
719
720 /* If this statement has reachable exception handlers, then
721 create abnormal edges to them. */
722 make_eh_edges (last);
723
724 /* BUILTIN_RETURN is really a return statement. */
725 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
726 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
727 false;
728 /* Some calls are known not to return. */
729 else
730 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
731 break;
732
733 case GIMPLE_ASSIGN:
734 /* A GIMPLE_ASSIGN may throw internally and thus be considered
735 control-altering. */
736 if (is_ctrl_altering_stmt (last))
737 make_eh_edges (last);
738 fallthru = true;
739 break;
740
741 case GIMPLE_ASM:
742 make_gimple_asm_edges (bb);
743 fallthru = true;
744 break;
745
746 CASE_GIMPLE_OMP:
747 fallthru = make_gimple_omp_edges (bb, &cur_region);
748 break;
749
750 case GIMPLE_TRANSACTION:
751 {
752 tree abort_label = gimple_transaction_label (last);
753 if (abort_label)
754 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
755 fallthru = true;
756 }
757 break;
758
759 default:
760 gcc_assert (!stmt_ends_bb_p (last));
761 fallthru = true;
762 }
763 }
764 else
765 fallthru = true;
766
767 if (fallthru)
768 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
769 }
770
771 free_omp_regions ();
772
773 /* Fold COND_EXPR_COND of each COND_EXPR. */
774 fold_cond_expr_cond ();
775 }
776
777 /* Find the next available discriminator value for LOCUS. The
778 discriminator distinguishes among several basic blocks that
779 share a common locus, allowing for more accurate sample-based
780 profiling. */
781
782 static int
783 next_discriminator_for_locus (location_t locus)
784 {
785 struct locus_discrim_map item;
786 struct locus_discrim_map **slot;
787
788 item.locus = locus;
789 item.discriminator = 0;
790 slot = discriminator_per_locus.find_slot_with_hash (
791 &item, LOCATION_LINE (locus), INSERT);
792 gcc_assert (slot);
793 if (*slot == HTAB_EMPTY_ENTRY)
794 {
795 *slot = XNEW (struct locus_discrim_map);
796 gcc_assert (*slot);
797 (*slot)->locus = locus;
798 (*slot)->discriminator = 0;
799 }
800 (*slot)->discriminator++;
801 return (*slot)->discriminator;
802 }
803
804 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
805
806 static bool
807 same_line_p (location_t locus1, location_t locus2)
808 {
809 expanded_location from, to;
810
811 if (locus1 == locus2)
812 return true;
813
814 from = expand_location (locus1);
815 to = expand_location (locus2);
816
817 if (from.line != to.line)
818 return false;
819 if (from.file == to.file)
820 return true;
821 return (from.file != NULL
822 && to.file != NULL
823 && filename_cmp (from.file, to.file) == 0);
824 }
825
826 /* Assign discriminators to each basic block. */
827
828 static void
829 assign_discriminators (void)
830 {
831 basic_block bb;
832
833 FOR_EACH_BB (bb)
834 {
835 edge e;
836 edge_iterator ei;
837 gimple last = last_stmt (bb);
838 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
839
840 if (locus == UNKNOWN_LOCATION)
841 continue;
842
843 FOR_EACH_EDGE (e, ei, bb->succs)
844 {
845 gimple first = first_non_label_stmt (e->dest);
846 gimple last = last_stmt (e->dest);
847 if ((first && same_line_p (locus, gimple_location (first)))
848 || (last && same_line_p (locus, gimple_location (last))))
849 {
850 if (e->dest->discriminator != 0 && bb->discriminator == 0)
851 bb->discriminator = next_discriminator_for_locus (locus);
852 else
853 e->dest->discriminator = next_discriminator_for_locus (locus);
854 }
855 }
856 }
857 }
858
859 /* Create the edges for a GIMPLE_COND starting at block BB. */
860
861 static void
862 make_cond_expr_edges (basic_block bb)
863 {
864 gimple entry = last_stmt (bb);
865 gimple then_stmt, else_stmt;
866 basic_block then_bb, else_bb;
867 tree then_label, else_label;
868 edge e;
869
870 gcc_assert (entry);
871 gcc_assert (gimple_code (entry) == GIMPLE_COND);
872
873 /* Entry basic blocks for each component. */
874 then_label = gimple_cond_true_label (entry);
875 else_label = gimple_cond_false_label (entry);
876 then_bb = label_to_block (then_label);
877 else_bb = label_to_block (else_label);
878 then_stmt = first_stmt (then_bb);
879 else_stmt = first_stmt (else_bb);
880
881 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
882 e->goto_locus = gimple_location (then_stmt);
883 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
884 if (e)
885 e->goto_locus = gimple_location (else_stmt);
886
887 /* We do not need the labels anymore. */
888 gimple_cond_set_true_label (entry, NULL_TREE);
889 gimple_cond_set_false_label (entry, NULL_TREE);
890 }
891
892
893 /* Called for each element in the hash table (P) as we delete the
894 edge to cases hash table.
895
896 Clear all the TREE_CHAINs to prevent problems with copying of
897 SWITCH_EXPRs and structure sharing rules, then free the hash table
898 element. */
899
900 static bool
901 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
902 void *data ATTRIBUTE_UNUSED)
903 {
904 tree t, next;
905
906 for (t = (tree) *value; t; t = next)
907 {
908 next = CASE_CHAIN (t);
909 CASE_CHAIN (t) = NULL;
910 }
911
912 *value = NULL;
913 return true;
914 }
915
916 /* Start recording information mapping edges to case labels. */
917
918 void
919 start_recording_case_labels (void)
920 {
921 gcc_assert (edge_to_cases == NULL);
922 edge_to_cases = pointer_map_create ();
923 touched_switch_bbs = BITMAP_ALLOC (NULL);
924 }
925
926 /* Return nonzero if we are recording information for case labels. */
927
928 static bool
929 recording_case_labels_p (void)
930 {
931 return (edge_to_cases != NULL);
932 }
933
934 /* Stop recording information mapping edges to case labels and
935 remove any information we have recorded. */
936 void
937 end_recording_case_labels (void)
938 {
939 bitmap_iterator bi;
940 unsigned i;
941 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
942 pointer_map_destroy (edge_to_cases);
943 edge_to_cases = NULL;
944 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
945 {
946 basic_block bb = BASIC_BLOCK (i);
947 if (bb)
948 {
949 gimple stmt = last_stmt (bb);
950 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
951 group_case_labels_stmt (stmt);
952 }
953 }
954 BITMAP_FREE (touched_switch_bbs);
955 }
956
957 /* If we are inside a {start,end}_recording_cases block, then return
958 a chain of CASE_LABEL_EXPRs from T which reference E.
959
960 Otherwise return NULL. */
961
962 static tree
963 get_cases_for_edge (edge e, gimple t)
964 {
965 void **slot;
966 size_t i, n;
967
968 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
969 chains available. Return NULL so the caller can detect this case. */
970 if (!recording_case_labels_p ())
971 return NULL;
972
973 slot = pointer_map_contains (edge_to_cases, e);
974 if (slot)
975 return (tree) *slot;
976
977 /* If we did not find E in the hash table, then this must be the first
978 time we have been queried for information about E & T. Add all the
979 elements from T to the hash table then perform the query again. */
980
981 n = gimple_switch_num_labels (t);
982 for (i = 0; i < n; i++)
983 {
984 tree elt = gimple_switch_label (t, i);
985 tree lab = CASE_LABEL (elt);
986 basic_block label_bb = label_to_block (lab);
987 edge this_edge = find_edge (e->src, label_bb);
988
989 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
990 a new chain. */
991 slot = pointer_map_insert (edge_to_cases, this_edge);
992 CASE_CHAIN (elt) = (tree) *slot;
993 *slot = elt;
994 }
995
996 return (tree) *pointer_map_contains (edge_to_cases, e);
997 }
998
999 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1000
1001 static void
1002 make_gimple_switch_edges (basic_block bb)
1003 {
1004 gimple entry = last_stmt (bb);
1005 size_t i, n;
1006
1007 n = gimple_switch_num_labels (entry);
1008
1009 for (i = 0; i < n; ++i)
1010 {
1011 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1012 basic_block label_bb = label_to_block (lab);
1013 make_edge (bb, label_bb, 0);
1014 }
1015 }
1016
1017
1018 /* Return the basic block holding label DEST. */
1019
1020 basic_block
1021 label_to_block_fn (struct function *ifun, tree dest)
1022 {
1023 int uid = LABEL_DECL_UID (dest);
1024
1025 /* We would die hard when faced by an undefined label. Emit a label to
1026 the very first basic block. This will hopefully make even the dataflow
1027 and undefined variable warnings quite right. */
1028 if (seen_error () && uid < 0)
1029 {
1030 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1031 gimple stmt;
1032
1033 stmt = gimple_build_label (dest);
1034 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1035 uid = LABEL_DECL_UID (dest);
1036 }
1037 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1038 return NULL;
1039 return (*ifun->cfg->x_label_to_block_map)[uid];
1040 }
1041
1042 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1043 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1044
1045 void
1046 make_abnormal_goto_edges (basic_block bb, bool for_call)
1047 {
1048 basic_block target_bb;
1049 gimple_stmt_iterator gsi;
1050
1051 FOR_EACH_BB (target_bb)
1052 {
1053 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1054 {
1055 gimple label_stmt = gsi_stmt (gsi);
1056 tree target;
1057
1058 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1059 break;
1060
1061 target = gimple_label_label (label_stmt);
1062
1063 /* Make an edge to every label block that has been marked as a
1064 potential target for a computed goto or a non-local goto. */
1065 if ((FORCED_LABEL (target) && !for_call)
1066 || (DECL_NONLOCAL (target) && for_call))
1067 {
1068 make_edge (bb, target_bb, EDGE_ABNORMAL);
1069 break;
1070 }
1071 }
1072 if (!gsi_end_p (gsi)
1073 && is_gimple_debug (gsi_stmt (gsi)))
1074 gsi_next_nondebug (&gsi);
1075 if (!gsi_end_p (gsi))
1076 {
1077 /* Make an edge to every setjmp-like call. */
1078 gimple call_stmt = gsi_stmt (gsi);
1079 if (is_gimple_call (call_stmt)
1080 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1081 make_edge (bb, target_bb, EDGE_ABNORMAL);
1082 }
1083 }
1084 }
1085
1086 /* Create edges for a goto statement at block BB. */
1087
1088 static void
1089 make_goto_expr_edges (basic_block bb)
1090 {
1091 gimple_stmt_iterator last = gsi_last_bb (bb);
1092 gimple goto_t = gsi_stmt (last);
1093
1094 /* A simple GOTO creates normal edges. */
1095 if (simple_goto_p (goto_t))
1096 {
1097 tree dest = gimple_goto_dest (goto_t);
1098 basic_block label_bb = label_to_block (dest);
1099 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1100 e->goto_locus = gimple_location (goto_t);
1101 gsi_remove (&last, true);
1102 return;
1103 }
1104
1105 /* A computed GOTO creates abnormal edges. */
1106 make_abnormal_goto_edges (bb, false);
1107 }
1108
1109 /* Create edges for an asm statement with labels at block BB. */
1110
1111 static void
1112 make_gimple_asm_edges (basic_block bb)
1113 {
1114 gimple stmt = last_stmt (bb);
1115 int i, n = gimple_asm_nlabels (stmt);
1116
1117 for (i = 0; i < n; ++i)
1118 {
1119 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1120 basic_block label_bb = label_to_block (label);
1121 make_edge (bb, label_bb, 0);
1122 }
1123 }
1124
1125 /*---------------------------------------------------------------------------
1126 Flowgraph analysis
1127 ---------------------------------------------------------------------------*/
1128
1129 /* Cleanup useless labels in basic blocks. This is something we wish
1130 to do early because it allows us to group case labels before creating
1131 the edges for the CFG, and it speeds up block statement iterators in
1132 all passes later on.
1133 We rerun this pass after CFG is created, to get rid of the labels that
1134 are no longer referenced. After then we do not run it any more, since
1135 (almost) no new labels should be created. */
1136
1137 /* A map from basic block index to the leading label of that block. */
1138 static struct label_record
1139 {
1140 /* The label. */
1141 tree label;
1142
1143 /* True if the label is referenced from somewhere. */
1144 bool used;
1145 } *label_for_bb;
1146
1147 /* Given LABEL return the first label in the same basic block. */
1148
1149 static tree
1150 main_block_label (tree label)
1151 {
1152 basic_block bb = label_to_block (label);
1153 tree main_label = label_for_bb[bb->index].label;
1154
1155 /* label_to_block possibly inserted undefined label into the chain. */
1156 if (!main_label)
1157 {
1158 label_for_bb[bb->index].label = label;
1159 main_label = label;
1160 }
1161
1162 label_for_bb[bb->index].used = true;
1163 return main_label;
1164 }
1165
1166 /* Clean up redundant labels within the exception tree. */
1167
1168 static void
1169 cleanup_dead_labels_eh (void)
1170 {
1171 eh_landing_pad lp;
1172 eh_region r;
1173 tree lab;
1174 int i;
1175
1176 if (cfun->eh == NULL)
1177 return;
1178
1179 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1180 if (lp && lp->post_landing_pad)
1181 {
1182 lab = main_block_label (lp->post_landing_pad);
1183 if (lab != lp->post_landing_pad)
1184 {
1185 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1186 EH_LANDING_PAD_NR (lab) = lp->index;
1187 }
1188 }
1189
1190 FOR_ALL_EH_REGION (r)
1191 switch (r->type)
1192 {
1193 case ERT_CLEANUP:
1194 case ERT_MUST_NOT_THROW:
1195 break;
1196
1197 case ERT_TRY:
1198 {
1199 eh_catch c;
1200 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1201 {
1202 lab = c->label;
1203 if (lab)
1204 c->label = main_block_label (lab);
1205 }
1206 }
1207 break;
1208
1209 case ERT_ALLOWED_EXCEPTIONS:
1210 lab = r->u.allowed.label;
1211 if (lab)
1212 r->u.allowed.label = main_block_label (lab);
1213 break;
1214 }
1215 }
1216
1217
1218 /* Cleanup redundant labels. This is a three-step process:
1219 1) Find the leading label for each block.
1220 2) Redirect all references to labels to the leading labels.
1221 3) Cleanup all useless labels. */
1222
1223 void
1224 cleanup_dead_labels (void)
1225 {
1226 basic_block bb;
1227 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1228
1229 /* Find a suitable label for each block. We use the first user-defined
1230 label if there is one, or otherwise just the first label we see. */
1231 FOR_EACH_BB (bb)
1232 {
1233 gimple_stmt_iterator i;
1234
1235 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1236 {
1237 tree label;
1238 gimple stmt = gsi_stmt (i);
1239
1240 if (gimple_code (stmt) != GIMPLE_LABEL)
1241 break;
1242
1243 label = gimple_label_label (stmt);
1244
1245 /* If we have not yet seen a label for the current block,
1246 remember this one and see if there are more labels. */
1247 if (!label_for_bb[bb->index].label)
1248 {
1249 label_for_bb[bb->index].label = label;
1250 continue;
1251 }
1252
1253 /* If we did see a label for the current block already, but it
1254 is an artificially created label, replace it if the current
1255 label is a user defined label. */
1256 if (!DECL_ARTIFICIAL (label)
1257 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1258 {
1259 label_for_bb[bb->index].label = label;
1260 break;
1261 }
1262 }
1263 }
1264
1265 /* Now redirect all jumps/branches to the selected label.
1266 First do so for each block ending in a control statement. */
1267 FOR_EACH_BB (bb)
1268 {
1269 gimple stmt = last_stmt (bb);
1270 tree label, new_label;
1271
1272 if (!stmt)
1273 continue;
1274
1275 switch (gimple_code (stmt))
1276 {
1277 case GIMPLE_COND:
1278 label = gimple_cond_true_label (stmt);
1279 if (label)
1280 {
1281 new_label = main_block_label (label);
1282 if (new_label != label)
1283 gimple_cond_set_true_label (stmt, new_label);
1284 }
1285
1286 label = gimple_cond_false_label (stmt);
1287 if (label)
1288 {
1289 new_label = main_block_label (label);
1290 if (new_label != label)
1291 gimple_cond_set_false_label (stmt, new_label);
1292 }
1293 break;
1294
1295 case GIMPLE_SWITCH:
1296 {
1297 size_t i, n = gimple_switch_num_labels (stmt);
1298
1299 /* Replace all destination labels. */
1300 for (i = 0; i < n; ++i)
1301 {
1302 tree case_label = gimple_switch_label (stmt, i);
1303 label = CASE_LABEL (case_label);
1304 new_label = main_block_label (label);
1305 if (new_label != label)
1306 CASE_LABEL (case_label) = new_label;
1307 }
1308 break;
1309 }
1310
1311 case GIMPLE_ASM:
1312 {
1313 int i, n = gimple_asm_nlabels (stmt);
1314
1315 for (i = 0; i < n; ++i)
1316 {
1317 tree cons = gimple_asm_label_op (stmt, i);
1318 tree label = main_block_label (TREE_VALUE (cons));
1319 TREE_VALUE (cons) = label;
1320 }
1321 break;
1322 }
1323
1324 /* We have to handle gotos until they're removed, and we don't
1325 remove them until after we've created the CFG edges. */
1326 case GIMPLE_GOTO:
1327 if (!computed_goto_p (stmt))
1328 {
1329 label = gimple_goto_dest (stmt);
1330 new_label = main_block_label (label);
1331 if (new_label != label)
1332 gimple_goto_set_dest (stmt, new_label);
1333 }
1334 break;
1335
1336 case GIMPLE_TRANSACTION:
1337 {
1338 tree label = gimple_transaction_label (stmt);
1339 if (label)
1340 {
1341 tree new_label = main_block_label (label);
1342 if (new_label != label)
1343 gimple_transaction_set_label (stmt, new_label);
1344 }
1345 }
1346 break;
1347
1348 default:
1349 break;
1350 }
1351 }
1352
1353 /* Do the same for the exception region tree labels. */
1354 cleanup_dead_labels_eh ();
1355
1356 /* Finally, purge dead labels. All user-defined labels and labels that
1357 can be the target of non-local gotos and labels which have their
1358 address taken are preserved. */
1359 FOR_EACH_BB (bb)
1360 {
1361 gimple_stmt_iterator i;
1362 tree label_for_this_bb = label_for_bb[bb->index].label;
1363
1364 if (!label_for_this_bb)
1365 continue;
1366
1367 /* If the main label of the block is unused, we may still remove it. */
1368 if (!label_for_bb[bb->index].used)
1369 label_for_this_bb = NULL;
1370
1371 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1372 {
1373 tree label;
1374 gimple stmt = gsi_stmt (i);
1375
1376 if (gimple_code (stmt) != GIMPLE_LABEL)
1377 break;
1378
1379 label = gimple_label_label (stmt);
1380
1381 if (label == label_for_this_bb
1382 || !DECL_ARTIFICIAL (label)
1383 || DECL_NONLOCAL (label)
1384 || FORCED_LABEL (label))
1385 gsi_next (&i);
1386 else
1387 gsi_remove (&i, true);
1388 }
1389 }
1390
1391 free (label_for_bb);
1392 }
1393
1394 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1395 the ones jumping to the same label.
1396 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1397
1398 void
1399 group_case_labels_stmt (gimple stmt)
1400 {
1401 int old_size = gimple_switch_num_labels (stmt);
1402 int i, j, new_size = old_size;
1403 basic_block default_bb = NULL;
1404
1405 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1406
1407 /* Look for possible opportunities to merge cases. */
1408 i = 1;
1409 while (i < old_size)
1410 {
1411 tree base_case, base_high;
1412 basic_block base_bb;
1413
1414 base_case = gimple_switch_label (stmt, i);
1415
1416 gcc_assert (base_case);
1417 base_bb = label_to_block (CASE_LABEL (base_case));
1418
1419 /* Discard cases that have the same destination as the
1420 default case. */
1421 if (base_bb == default_bb)
1422 {
1423 gimple_switch_set_label (stmt, i, NULL_TREE);
1424 i++;
1425 new_size--;
1426 continue;
1427 }
1428
1429 base_high = CASE_HIGH (base_case)
1430 ? CASE_HIGH (base_case)
1431 : CASE_LOW (base_case);
1432 i++;
1433
1434 /* Try to merge case labels. Break out when we reach the end
1435 of the label vector or when we cannot merge the next case
1436 label with the current one. */
1437 while (i < old_size)
1438 {
1439 tree merge_case = gimple_switch_label (stmt, i);
1440 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1441 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1442
1443 /* Merge the cases if they jump to the same place,
1444 and their ranges are consecutive. */
1445 if (merge_bb == base_bb
1446 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1447 {
1448 base_high = CASE_HIGH (merge_case) ?
1449 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1450 CASE_HIGH (base_case) = base_high;
1451 gimple_switch_set_label (stmt, i, NULL_TREE);
1452 new_size--;
1453 i++;
1454 }
1455 else
1456 break;
1457 }
1458 }
1459
1460 /* Compress the case labels in the label vector, and adjust the
1461 length of the vector. */
1462 for (i = 0, j = 0; i < new_size; i++)
1463 {
1464 while (! gimple_switch_label (stmt, j))
1465 j++;
1466 gimple_switch_set_label (stmt, i,
1467 gimple_switch_label (stmt, j++));
1468 }
1469
1470 gcc_assert (new_size <= old_size);
1471 gimple_switch_set_num_labels (stmt, new_size);
1472 }
1473
1474 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1475 and scan the sorted vector of cases. Combine the ones jumping to the
1476 same label. */
1477
1478 void
1479 group_case_labels (void)
1480 {
1481 basic_block bb;
1482
1483 FOR_EACH_BB (bb)
1484 {
1485 gimple stmt = last_stmt (bb);
1486 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1487 group_case_labels_stmt (stmt);
1488 }
1489 }
1490
1491 /* Checks whether we can merge block B into block A. */
1492
1493 static bool
1494 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1495 {
1496 gimple stmt;
1497 gimple_stmt_iterator gsi;
1498
1499 if (!single_succ_p (a))
1500 return false;
1501
1502 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1503 return false;
1504
1505 if (single_succ (a) != b)
1506 return false;
1507
1508 if (!single_pred_p (b))
1509 return false;
1510
1511 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1512 return false;
1513
1514 /* If A ends by a statement causing exceptions or something similar, we
1515 cannot merge the blocks. */
1516 stmt = last_stmt (a);
1517 if (stmt && stmt_ends_bb_p (stmt))
1518 return false;
1519
1520 /* Do not allow a block with only a non-local label to be merged. */
1521 if (stmt
1522 && gimple_code (stmt) == GIMPLE_LABEL
1523 && DECL_NONLOCAL (gimple_label_label (stmt)))
1524 return false;
1525
1526 /* Examine the labels at the beginning of B. */
1527 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1528 {
1529 tree lab;
1530 stmt = gsi_stmt (gsi);
1531 if (gimple_code (stmt) != GIMPLE_LABEL)
1532 break;
1533 lab = gimple_label_label (stmt);
1534
1535 /* Do not remove user forced labels or for -O0 any user labels. */
1536 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1537 return false;
1538 }
1539
1540 /* Protect the loop latches. */
1541 if (current_loops && b->loop_father->latch == b)
1542 return false;
1543
1544 /* It must be possible to eliminate all phi nodes in B. If ssa form
1545 is not up-to-date and a name-mapping is registered, we cannot eliminate
1546 any phis. Symbols marked for renaming are never a problem though. */
1547 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1548 {
1549 gimple phi = gsi_stmt (gsi);
1550 /* Technically only new names matter. */
1551 if (name_registered_for_update_p (PHI_RESULT (phi)))
1552 return false;
1553 }
1554
1555 /* When not optimizing, don't merge if we'd lose goto_locus. */
1556 if (!optimize
1557 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1558 {
1559 location_t goto_locus = single_succ_edge (a)->goto_locus;
1560 gimple_stmt_iterator prev, next;
1561 prev = gsi_last_nondebug_bb (a);
1562 next = gsi_after_labels (b);
1563 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1564 gsi_next_nondebug (&next);
1565 if ((gsi_end_p (prev)
1566 || gimple_location (gsi_stmt (prev)) != goto_locus)
1567 && (gsi_end_p (next)
1568 || gimple_location (gsi_stmt (next)) != goto_locus))
1569 return false;
1570 }
1571
1572 return true;
1573 }
1574
1575 /* Replaces all uses of NAME by VAL. */
1576
1577 void
1578 replace_uses_by (tree name, tree val)
1579 {
1580 imm_use_iterator imm_iter;
1581 use_operand_p use;
1582 gimple stmt;
1583 edge e;
1584
1585 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1586 {
1587 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1588 {
1589 replace_exp (use, val);
1590
1591 if (gimple_code (stmt) == GIMPLE_PHI)
1592 {
1593 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1594 if (e->flags & EDGE_ABNORMAL)
1595 {
1596 /* This can only occur for virtual operands, since
1597 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1598 would prevent replacement. */
1599 gcc_checking_assert (virtual_operand_p (name));
1600 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1601 }
1602 }
1603 }
1604
1605 if (gimple_code (stmt) != GIMPLE_PHI)
1606 {
1607 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1608 gimple orig_stmt = stmt;
1609 size_t i;
1610
1611 /* Mark the block if we changed the last stmt in it. */
1612 if (cfgcleanup_altered_bbs
1613 && stmt_ends_bb_p (stmt))
1614 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1615
1616 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1617 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1618 only change sth from non-invariant to invariant, and only
1619 when propagating constants. */
1620 if (is_gimple_min_invariant (val))
1621 for (i = 0; i < gimple_num_ops (stmt); i++)
1622 {
1623 tree op = gimple_op (stmt, i);
1624 /* Operands may be empty here. For example, the labels
1625 of a GIMPLE_COND are nulled out following the creation
1626 of the corresponding CFG edges. */
1627 if (op && TREE_CODE (op) == ADDR_EXPR)
1628 recompute_tree_invariant_for_addr_expr (op);
1629 }
1630
1631 if (fold_stmt (&gsi))
1632 stmt = gsi_stmt (gsi);
1633
1634 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1635 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1636
1637 update_stmt (stmt);
1638 }
1639 }
1640
1641 gcc_checking_assert (has_zero_uses (name));
1642
1643 /* Also update the trees stored in loop structures. */
1644 if (current_loops)
1645 {
1646 struct loop *loop;
1647
1648 FOR_EACH_LOOP (loop, 0)
1649 {
1650 substitute_in_loop_info (loop, name, val);
1651 }
1652 }
1653 }
1654
1655 /* Merge block B into block A. */
1656
1657 static void
1658 gimple_merge_blocks (basic_block a, basic_block b)
1659 {
1660 gimple_stmt_iterator last, gsi, psi;
1661
1662 if (dump_file)
1663 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1664
1665 /* Remove all single-valued PHI nodes from block B of the form
1666 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1667 gsi = gsi_last_bb (a);
1668 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1669 {
1670 gimple phi = gsi_stmt (psi);
1671 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1672 gimple copy;
1673 bool may_replace_uses = (virtual_operand_p (def)
1674 || may_propagate_copy (def, use));
1675
1676 /* In case we maintain loop closed ssa form, do not propagate arguments
1677 of loop exit phi nodes. */
1678 if (current_loops
1679 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1680 && !virtual_operand_p (def)
1681 && TREE_CODE (use) == SSA_NAME
1682 && a->loop_father != b->loop_father)
1683 may_replace_uses = false;
1684
1685 if (!may_replace_uses)
1686 {
1687 gcc_assert (!virtual_operand_p (def));
1688
1689 /* Note that just emitting the copies is fine -- there is no problem
1690 with ordering of phi nodes. This is because A is the single
1691 predecessor of B, therefore results of the phi nodes cannot
1692 appear as arguments of the phi nodes. */
1693 copy = gimple_build_assign (def, use);
1694 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1695 remove_phi_node (&psi, false);
1696 }
1697 else
1698 {
1699 /* If we deal with a PHI for virtual operands, we can simply
1700 propagate these without fussing with folding or updating
1701 the stmt. */
1702 if (virtual_operand_p (def))
1703 {
1704 imm_use_iterator iter;
1705 use_operand_p use_p;
1706 gimple stmt;
1707
1708 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1709 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1710 SET_USE (use_p, use);
1711
1712 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1713 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1714 }
1715 else
1716 replace_uses_by (def, use);
1717
1718 remove_phi_node (&psi, true);
1719 }
1720 }
1721
1722 /* Ensure that B follows A. */
1723 move_block_after (b, a);
1724
1725 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1726 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1727
1728 /* Remove labels from B and set gimple_bb to A for other statements. */
1729 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1730 {
1731 gimple stmt = gsi_stmt (gsi);
1732 if (gimple_code (stmt) == GIMPLE_LABEL)
1733 {
1734 tree label = gimple_label_label (stmt);
1735 int lp_nr;
1736
1737 gsi_remove (&gsi, false);
1738
1739 /* Now that we can thread computed gotos, we might have
1740 a situation where we have a forced label in block B
1741 However, the label at the start of block B might still be
1742 used in other ways (think about the runtime checking for
1743 Fortran assigned gotos). So we can not just delete the
1744 label. Instead we move the label to the start of block A. */
1745 if (FORCED_LABEL (label))
1746 {
1747 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1748 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1749 }
1750 /* Other user labels keep around in a form of a debug stmt. */
1751 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1752 {
1753 gimple dbg = gimple_build_debug_bind (label,
1754 integer_zero_node,
1755 stmt);
1756 gimple_debug_bind_reset_value (dbg);
1757 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1758 }
1759
1760 lp_nr = EH_LANDING_PAD_NR (label);
1761 if (lp_nr)
1762 {
1763 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1764 lp->post_landing_pad = NULL;
1765 }
1766 }
1767 else
1768 {
1769 gimple_set_bb (stmt, a);
1770 gsi_next (&gsi);
1771 }
1772 }
1773
1774 /* Merge the sequences. */
1775 last = gsi_last_bb (a);
1776 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1777 set_bb_seq (b, NULL);
1778
1779 if (cfgcleanup_altered_bbs)
1780 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1781 }
1782
1783
1784 /* Return the one of two successors of BB that is not reachable by a
1785 complex edge, if there is one. Else, return BB. We use
1786 this in optimizations that use post-dominators for their heuristics,
1787 to catch the cases in C++ where function calls are involved. */
1788
1789 basic_block
1790 single_noncomplex_succ (basic_block bb)
1791 {
1792 edge e0, e1;
1793 if (EDGE_COUNT (bb->succs) != 2)
1794 return bb;
1795
1796 e0 = EDGE_SUCC (bb, 0);
1797 e1 = EDGE_SUCC (bb, 1);
1798 if (e0->flags & EDGE_COMPLEX)
1799 return e1->dest;
1800 if (e1->flags & EDGE_COMPLEX)
1801 return e0->dest;
1802
1803 return bb;
1804 }
1805
1806 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1807
1808 void
1809 notice_special_calls (gimple call)
1810 {
1811 int flags = gimple_call_flags (call);
1812
1813 if (flags & ECF_MAY_BE_ALLOCA)
1814 cfun->calls_alloca = true;
1815 if (flags & ECF_RETURNS_TWICE)
1816 cfun->calls_setjmp = true;
1817 }
1818
1819
1820 /* Clear flags set by notice_special_calls. Used by dead code removal
1821 to update the flags. */
1822
1823 void
1824 clear_special_calls (void)
1825 {
1826 cfun->calls_alloca = false;
1827 cfun->calls_setjmp = false;
1828 }
1829
1830 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1831
1832 static void
1833 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1834 {
1835 /* Since this block is no longer reachable, we can just delete all
1836 of its PHI nodes. */
1837 remove_phi_nodes (bb);
1838
1839 /* Remove edges to BB's successors. */
1840 while (EDGE_COUNT (bb->succs) > 0)
1841 remove_edge (EDGE_SUCC (bb, 0));
1842 }
1843
1844
1845 /* Remove statements of basic block BB. */
1846
1847 static void
1848 remove_bb (basic_block bb)
1849 {
1850 gimple_stmt_iterator i;
1851
1852 if (dump_file)
1853 {
1854 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1855 if (dump_flags & TDF_DETAILS)
1856 {
1857 dump_bb (dump_file, bb, 0, dump_flags);
1858 fprintf (dump_file, "\n");
1859 }
1860 }
1861
1862 if (current_loops)
1863 {
1864 struct loop *loop = bb->loop_father;
1865
1866 /* If a loop gets removed, clean up the information associated
1867 with it. */
1868 if (loop->latch == bb
1869 || loop->header == bb)
1870 free_numbers_of_iterations_estimates_loop (loop);
1871 }
1872
1873 /* Remove all the instructions in the block. */
1874 if (bb_seq (bb) != NULL)
1875 {
1876 /* Walk backwards so as to get a chance to substitute all
1877 released DEFs into debug stmts. See
1878 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1879 details. */
1880 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1881 {
1882 gimple stmt = gsi_stmt (i);
1883 if (gimple_code (stmt) == GIMPLE_LABEL
1884 && (FORCED_LABEL (gimple_label_label (stmt))
1885 || DECL_NONLOCAL (gimple_label_label (stmt))))
1886 {
1887 basic_block new_bb;
1888 gimple_stmt_iterator new_gsi;
1889
1890 /* A non-reachable non-local label may still be referenced.
1891 But it no longer needs to carry the extra semantics of
1892 non-locality. */
1893 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1894 {
1895 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1896 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1897 }
1898
1899 new_bb = bb->prev_bb;
1900 new_gsi = gsi_start_bb (new_bb);
1901 gsi_remove (&i, false);
1902 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1903 }
1904 else
1905 {
1906 /* Release SSA definitions if we are in SSA. Note that we
1907 may be called when not in SSA. For example,
1908 final_cleanup calls this function via
1909 cleanup_tree_cfg. */
1910 if (gimple_in_ssa_p (cfun))
1911 release_defs (stmt);
1912
1913 gsi_remove (&i, true);
1914 }
1915
1916 if (gsi_end_p (i))
1917 i = gsi_last_bb (bb);
1918 else
1919 gsi_prev (&i);
1920 }
1921 }
1922
1923 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1924 bb->il.gimple.seq = NULL;
1925 bb->il.gimple.phi_nodes = NULL;
1926 }
1927
1928
1929 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1930 predicate VAL, return the edge that will be taken out of the block.
1931 If VAL does not match a unique edge, NULL is returned. */
1932
1933 edge
1934 find_taken_edge (basic_block bb, tree val)
1935 {
1936 gimple stmt;
1937
1938 stmt = last_stmt (bb);
1939
1940 gcc_assert (stmt);
1941 gcc_assert (is_ctrl_stmt (stmt));
1942
1943 if (val == NULL)
1944 return NULL;
1945
1946 if (!is_gimple_min_invariant (val))
1947 return NULL;
1948
1949 if (gimple_code (stmt) == GIMPLE_COND)
1950 return find_taken_edge_cond_expr (bb, val);
1951
1952 if (gimple_code (stmt) == GIMPLE_SWITCH)
1953 return find_taken_edge_switch_expr (bb, val);
1954
1955 if (computed_goto_p (stmt))
1956 {
1957 /* Only optimize if the argument is a label, if the argument is
1958 not a label then we can not construct a proper CFG.
1959
1960 It may be the case that we only need to allow the LABEL_REF to
1961 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1962 appear inside a LABEL_EXPR just to be safe. */
1963 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1964 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1965 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1966 return NULL;
1967 }
1968
1969 gcc_unreachable ();
1970 }
1971
1972 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1973 statement, determine which of the outgoing edges will be taken out of the
1974 block. Return NULL if either edge may be taken. */
1975
1976 static edge
1977 find_taken_edge_computed_goto (basic_block bb, tree val)
1978 {
1979 basic_block dest;
1980 edge e = NULL;
1981
1982 dest = label_to_block (val);
1983 if (dest)
1984 {
1985 e = find_edge (bb, dest);
1986 gcc_assert (e != NULL);
1987 }
1988
1989 return e;
1990 }
1991
1992 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1993 statement, determine which of the two edges will be taken out of the
1994 block. Return NULL if either edge may be taken. */
1995
1996 static edge
1997 find_taken_edge_cond_expr (basic_block bb, tree val)
1998 {
1999 edge true_edge, false_edge;
2000
2001 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2002
2003 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2004 return (integer_zerop (val) ? false_edge : true_edge);
2005 }
2006
2007 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2008 statement, determine which edge will be taken out of the block. Return
2009 NULL if any edge may be taken. */
2010
2011 static edge
2012 find_taken_edge_switch_expr (basic_block bb, tree val)
2013 {
2014 basic_block dest_bb;
2015 edge e;
2016 gimple switch_stmt;
2017 tree taken_case;
2018
2019 switch_stmt = last_stmt (bb);
2020 taken_case = find_case_label_for_value (switch_stmt, val);
2021 dest_bb = label_to_block (CASE_LABEL (taken_case));
2022
2023 e = find_edge (bb, dest_bb);
2024 gcc_assert (e);
2025 return e;
2026 }
2027
2028
2029 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2030 We can make optimal use here of the fact that the case labels are
2031 sorted: We can do a binary search for a case matching VAL. */
2032
2033 static tree
2034 find_case_label_for_value (gimple switch_stmt, tree val)
2035 {
2036 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2037 tree default_case = gimple_switch_default_label (switch_stmt);
2038
2039 for (low = 0, high = n; high - low > 1; )
2040 {
2041 size_t i = (high + low) / 2;
2042 tree t = gimple_switch_label (switch_stmt, i);
2043 int cmp;
2044
2045 /* Cache the result of comparing CASE_LOW and val. */
2046 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2047
2048 if (cmp > 0)
2049 high = i;
2050 else
2051 low = i;
2052
2053 if (CASE_HIGH (t) == NULL)
2054 {
2055 /* A singe-valued case label. */
2056 if (cmp == 0)
2057 return t;
2058 }
2059 else
2060 {
2061 /* A case range. We can only handle integer ranges. */
2062 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2063 return t;
2064 }
2065 }
2066
2067 return default_case;
2068 }
2069
2070
2071 /* Dump a basic block on stderr. */
2072
2073 void
2074 gimple_debug_bb (basic_block bb)
2075 {
2076 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2077 }
2078
2079
2080 /* Dump basic block with index N on stderr. */
2081
2082 basic_block
2083 gimple_debug_bb_n (int n)
2084 {
2085 gimple_debug_bb (BASIC_BLOCK (n));
2086 return BASIC_BLOCK (n);
2087 }
2088
2089
2090 /* Dump the CFG on stderr.
2091
2092 FLAGS are the same used by the tree dumping functions
2093 (see TDF_* in dumpfile.h). */
2094
2095 void
2096 gimple_debug_cfg (int flags)
2097 {
2098 gimple_dump_cfg (stderr, flags);
2099 }
2100
2101
2102 /* Dump the program showing basic block boundaries on the given FILE.
2103
2104 FLAGS are the same used by the tree dumping functions (see TDF_* in
2105 tree.h). */
2106
2107 void
2108 gimple_dump_cfg (FILE *file, int flags)
2109 {
2110 if (flags & TDF_DETAILS)
2111 {
2112 dump_function_header (file, current_function_decl, flags);
2113 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2114 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2115 last_basic_block);
2116
2117 brief_dump_cfg (file, flags | TDF_COMMENT);
2118 fprintf (file, "\n");
2119 }
2120
2121 if (flags & TDF_STATS)
2122 dump_cfg_stats (file);
2123
2124 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2125 }
2126
2127
2128 /* Dump CFG statistics on FILE. */
2129
2130 void
2131 dump_cfg_stats (FILE *file)
2132 {
2133 static long max_num_merged_labels = 0;
2134 unsigned long size, total = 0;
2135 long num_edges;
2136 basic_block bb;
2137 const char * const fmt_str = "%-30s%-13s%12s\n";
2138 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2139 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2140 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2141 const char *funcname = current_function_name ();
2142
2143 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2144
2145 fprintf (file, "---------------------------------------------------------\n");
2146 fprintf (file, fmt_str, "", " Number of ", "Memory");
2147 fprintf (file, fmt_str, "", " instances ", "used ");
2148 fprintf (file, "---------------------------------------------------------\n");
2149
2150 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2151 total += size;
2152 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2153 SCALE (size), LABEL (size));
2154
2155 num_edges = 0;
2156 FOR_EACH_BB (bb)
2157 num_edges += EDGE_COUNT (bb->succs);
2158 size = num_edges * sizeof (struct edge_def);
2159 total += size;
2160 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2161
2162 fprintf (file, "---------------------------------------------------------\n");
2163 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2164 LABEL (total));
2165 fprintf (file, "---------------------------------------------------------\n");
2166 fprintf (file, "\n");
2167
2168 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2169 max_num_merged_labels = cfg_stats.num_merged_labels;
2170
2171 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2172 cfg_stats.num_merged_labels, max_num_merged_labels);
2173
2174 fprintf (file, "\n");
2175 }
2176
2177
2178 /* Dump CFG statistics on stderr. Keep extern so that it's always
2179 linked in the final executable. */
2180
2181 DEBUG_FUNCTION void
2182 debug_cfg_stats (void)
2183 {
2184 dump_cfg_stats (stderr);
2185 }
2186
2187 /*---------------------------------------------------------------------------
2188 Miscellaneous helpers
2189 ---------------------------------------------------------------------------*/
2190
2191 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2192 flow. Transfers of control flow associated with EH are excluded. */
2193
2194 static bool
2195 call_can_make_abnormal_goto (gimple t)
2196 {
2197 /* If the function has no non-local labels, then a call cannot make an
2198 abnormal transfer of control. */
2199 if (!cfun->has_nonlocal_label
2200 && !cfun->calls_setjmp)
2201 return false;
2202
2203 /* Likewise if the call has no side effects. */
2204 if (!gimple_has_side_effects (t))
2205 return false;
2206
2207 /* Likewise if the called function is leaf. */
2208 if (gimple_call_flags (t) & ECF_LEAF)
2209 return false;
2210
2211 return true;
2212 }
2213
2214
2215 /* Return true if T can make an abnormal transfer of control flow.
2216 Transfers of control flow associated with EH are excluded. */
2217
2218 bool
2219 stmt_can_make_abnormal_goto (gimple t)
2220 {
2221 if (computed_goto_p (t))
2222 return true;
2223 if (is_gimple_call (t))
2224 return call_can_make_abnormal_goto (t);
2225 return false;
2226 }
2227
2228
2229 /* Return true if T represents a stmt that always transfers control. */
2230
2231 bool
2232 is_ctrl_stmt (gimple t)
2233 {
2234 switch (gimple_code (t))
2235 {
2236 case GIMPLE_COND:
2237 case GIMPLE_SWITCH:
2238 case GIMPLE_GOTO:
2239 case GIMPLE_RETURN:
2240 case GIMPLE_RESX:
2241 return true;
2242 default:
2243 return false;
2244 }
2245 }
2246
2247
2248 /* Return true if T is a statement that may alter the flow of control
2249 (e.g., a call to a non-returning function). */
2250
2251 bool
2252 is_ctrl_altering_stmt (gimple t)
2253 {
2254 gcc_assert (t);
2255
2256 switch (gimple_code (t))
2257 {
2258 case GIMPLE_CALL:
2259 {
2260 int flags = gimple_call_flags (t);
2261
2262 /* A call alters control flow if it can make an abnormal goto. */
2263 if (call_can_make_abnormal_goto (t))
2264 return true;
2265
2266 /* A call also alters control flow if it does not return. */
2267 if (flags & ECF_NORETURN)
2268 return true;
2269
2270 /* TM ending statements have backedges out of the transaction.
2271 Return true so we split the basic block containing them.
2272 Note that the TM_BUILTIN test is merely an optimization. */
2273 if ((flags & ECF_TM_BUILTIN)
2274 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2275 return true;
2276
2277 /* BUILT_IN_RETURN call is same as return statement. */
2278 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2279 return true;
2280 }
2281 break;
2282
2283 case GIMPLE_EH_DISPATCH:
2284 /* EH_DISPATCH branches to the individual catch handlers at
2285 this level of a try or allowed-exceptions region. It can
2286 fallthru to the next statement as well. */
2287 return true;
2288
2289 case GIMPLE_ASM:
2290 if (gimple_asm_nlabels (t) > 0)
2291 return true;
2292 break;
2293
2294 CASE_GIMPLE_OMP:
2295 /* OpenMP directives alter control flow. */
2296 return true;
2297
2298 case GIMPLE_TRANSACTION:
2299 /* A transaction start alters control flow. */
2300 return true;
2301
2302 default:
2303 break;
2304 }
2305
2306 /* If a statement can throw, it alters control flow. */
2307 return stmt_can_throw_internal (t);
2308 }
2309
2310
2311 /* Return true if T is a simple local goto. */
2312
2313 bool
2314 simple_goto_p (gimple t)
2315 {
2316 return (gimple_code (t) == GIMPLE_GOTO
2317 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2318 }
2319
2320
2321 /* Return true if STMT should start a new basic block. PREV_STMT is
2322 the statement preceding STMT. It is used when STMT is a label or a
2323 case label. Labels should only start a new basic block if their
2324 previous statement wasn't a label. Otherwise, sequence of labels
2325 would generate unnecessary basic blocks that only contain a single
2326 label. */
2327
2328 static inline bool
2329 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2330 {
2331 if (stmt == NULL)
2332 return false;
2333
2334 /* Labels start a new basic block only if the preceding statement
2335 wasn't a label of the same type. This prevents the creation of
2336 consecutive blocks that have nothing but a single label. */
2337 if (gimple_code (stmt) == GIMPLE_LABEL)
2338 {
2339 /* Nonlocal and computed GOTO targets always start a new block. */
2340 if (DECL_NONLOCAL (gimple_label_label (stmt))
2341 || FORCED_LABEL (gimple_label_label (stmt)))
2342 return true;
2343
2344 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2345 {
2346 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2347 return true;
2348
2349 cfg_stats.num_merged_labels++;
2350 return false;
2351 }
2352 else
2353 return true;
2354 }
2355 else if (gimple_code (stmt) == GIMPLE_CALL
2356 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2357 /* setjmp acts similar to a nonlocal GOTO target and thus should
2358 start a new block. */
2359 return true;
2360
2361 return false;
2362 }
2363
2364
2365 /* Return true if T should end a basic block. */
2366
2367 bool
2368 stmt_ends_bb_p (gimple t)
2369 {
2370 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2371 }
2372
2373 /* Remove block annotations and other data structures. */
2374
2375 void
2376 delete_tree_cfg_annotations (void)
2377 {
2378 vec_free (label_to_block_map);
2379 }
2380
2381
2382 /* Return the first statement in basic block BB. */
2383
2384 gimple
2385 first_stmt (basic_block bb)
2386 {
2387 gimple_stmt_iterator i = gsi_start_bb (bb);
2388 gimple stmt = NULL;
2389
2390 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2391 {
2392 gsi_next (&i);
2393 stmt = NULL;
2394 }
2395 return stmt;
2396 }
2397
2398 /* Return the first non-label statement in basic block BB. */
2399
2400 static gimple
2401 first_non_label_stmt (basic_block bb)
2402 {
2403 gimple_stmt_iterator i = gsi_start_bb (bb);
2404 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2405 gsi_next (&i);
2406 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2407 }
2408
2409 /* Return the last statement in basic block BB. */
2410
2411 gimple
2412 last_stmt (basic_block bb)
2413 {
2414 gimple_stmt_iterator i = gsi_last_bb (bb);
2415 gimple stmt = NULL;
2416
2417 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2418 {
2419 gsi_prev (&i);
2420 stmt = NULL;
2421 }
2422 return stmt;
2423 }
2424
2425 /* Return the last statement of an otherwise empty block. Return NULL
2426 if the block is totally empty, or if it contains more than one
2427 statement. */
2428
2429 gimple
2430 last_and_only_stmt (basic_block bb)
2431 {
2432 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2433 gimple last, prev;
2434
2435 if (gsi_end_p (i))
2436 return NULL;
2437
2438 last = gsi_stmt (i);
2439 gsi_prev_nondebug (&i);
2440 if (gsi_end_p (i))
2441 return last;
2442
2443 /* Empty statements should no longer appear in the instruction stream.
2444 Everything that might have appeared before should be deleted by
2445 remove_useless_stmts, and the optimizers should just gsi_remove
2446 instead of smashing with build_empty_stmt.
2447
2448 Thus the only thing that should appear here in a block containing
2449 one executable statement is a label. */
2450 prev = gsi_stmt (i);
2451 if (gimple_code (prev) == GIMPLE_LABEL)
2452 return last;
2453 else
2454 return NULL;
2455 }
2456
2457 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2458
2459 static void
2460 reinstall_phi_args (edge new_edge, edge old_edge)
2461 {
2462 edge_var_map_vector *v;
2463 edge_var_map *vm;
2464 int i;
2465 gimple_stmt_iterator phis;
2466
2467 v = redirect_edge_var_map_vector (old_edge);
2468 if (!v)
2469 return;
2470
2471 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2472 v->iterate (i, &vm) && !gsi_end_p (phis);
2473 i++, gsi_next (&phis))
2474 {
2475 gimple phi = gsi_stmt (phis);
2476 tree result = redirect_edge_var_map_result (vm);
2477 tree arg = redirect_edge_var_map_def (vm);
2478
2479 gcc_assert (result == gimple_phi_result (phi));
2480
2481 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2482 }
2483
2484 redirect_edge_var_map_clear (old_edge);
2485 }
2486
2487 /* Returns the basic block after which the new basic block created
2488 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2489 near its "logical" location. This is of most help to humans looking
2490 at debugging dumps. */
2491
2492 static basic_block
2493 split_edge_bb_loc (edge edge_in)
2494 {
2495 basic_block dest = edge_in->dest;
2496 basic_block dest_prev = dest->prev_bb;
2497
2498 if (dest_prev)
2499 {
2500 edge e = find_edge (dest_prev, dest);
2501 if (e && !(e->flags & EDGE_COMPLEX))
2502 return edge_in->src;
2503 }
2504 return dest_prev;
2505 }
2506
2507 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2508 Abort on abnormal edges. */
2509
2510 static basic_block
2511 gimple_split_edge (edge edge_in)
2512 {
2513 basic_block new_bb, after_bb, dest;
2514 edge new_edge, e;
2515
2516 /* Abnormal edges cannot be split. */
2517 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2518
2519 dest = edge_in->dest;
2520
2521 after_bb = split_edge_bb_loc (edge_in);
2522
2523 new_bb = create_empty_bb (after_bb);
2524 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2525 new_bb->count = edge_in->count;
2526 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2527 new_edge->probability = REG_BR_PROB_BASE;
2528 new_edge->count = edge_in->count;
2529
2530 e = redirect_edge_and_branch (edge_in, new_bb);
2531 gcc_assert (e == edge_in);
2532 reinstall_phi_args (new_edge, e);
2533
2534 return new_bb;
2535 }
2536
2537
2538 /* Verify properties of the address expression T with base object BASE. */
2539
2540 static tree
2541 verify_address (tree t, tree base)
2542 {
2543 bool old_constant;
2544 bool old_side_effects;
2545 bool new_constant;
2546 bool new_side_effects;
2547
2548 old_constant = TREE_CONSTANT (t);
2549 old_side_effects = TREE_SIDE_EFFECTS (t);
2550
2551 recompute_tree_invariant_for_addr_expr (t);
2552 new_side_effects = TREE_SIDE_EFFECTS (t);
2553 new_constant = TREE_CONSTANT (t);
2554
2555 if (old_constant != new_constant)
2556 {
2557 error ("constant not recomputed when ADDR_EXPR changed");
2558 return t;
2559 }
2560 if (old_side_effects != new_side_effects)
2561 {
2562 error ("side effects not recomputed when ADDR_EXPR changed");
2563 return t;
2564 }
2565
2566 if (!(TREE_CODE (base) == VAR_DECL
2567 || TREE_CODE (base) == PARM_DECL
2568 || TREE_CODE (base) == RESULT_DECL))
2569 return NULL_TREE;
2570
2571 if (DECL_GIMPLE_REG_P (base))
2572 {
2573 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2574 return base;
2575 }
2576
2577 return NULL_TREE;
2578 }
2579
2580 /* Callback for walk_tree, check that all elements with address taken are
2581 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2582 inside a PHI node. */
2583
2584 static tree
2585 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2586 {
2587 tree t = *tp, x;
2588
2589 if (TYPE_P (t))
2590 *walk_subtrees = 0;
2591
2592 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2593 #define CHECK_OP(N, MSG) \
2594 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2595 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2596
2597 switch (TREE_CODE (t))
2598 {
2599 case SSA_NAME:
2600 if (SSA_NAME_IN_FREE_LIST (t))
2601 {
2602 error ("SSA name in freelist but still referenced");
2603 return *tp;
2604 }
2605 break;
2606
2607 case INDIRECT_REF:
2608 error ("INDIRECT_REF in gimple IL");
2609 return t;
2610
2611 case MEM_REF:
2612 x = TREE_OPERAND (t, 0);
2613 if (!POINTER_TYPE_P (TREE_TYPE (x))
2614 || !is_gimple_mem_ref_addr (x))
2615 {
2616 error ("invalid first operand of MEM_REF");
2617 return x;
2618 }
2619 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2620 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2621 {
2622 error ("invalid offset operand of MEM_REF");
2623 return TREE_OPERAND (t, 1);
2624 }
2625 if (TREE_CODE (x) == ADDR_EXPR
2626 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2627 return x;
2628 *walk_subtrees = 0;
2629 break;
2630
2631 case ASSERT_EXPR:
2632 x = fold (ASSERT_EXPR_COND (t));
2633 if (x == boolean_false_node)
2634 {
2635 error ("ASSERT_EXPR with an always-false condition");
2636 return *tp;
2637 }
2638 break;
2639
2640 case MODIFY_EXPR:
2641 error ("MODIFY_EXPR not expected while having tuples");
2642 return *tp;
2643
2644 case ADDR_EXPR:
2645 {
2646 tree tem;
2647
2648 gcc_assert (is_gimple_address (t));
2649
2650 /* Skip any references (they will be checked when we recurse down the
2651 tree) and ensure that any variable used as a prefix is marked
2652 addressable. */
2653 for (x = TREE_OPERAND (t, 0);
2654 handled_component_p (x);
2655 x = TREE_OPERAND (x, 0))
2656 ;
2657
2658 if ((tem = verify_address (t, x)))
2659 return tem;
2660
2661 if (!(TREE_CODE (x) == VAR_DECL
2662 || TREE_CODE (x) == PARM_DECL
2663 || TREE_CODE (x) == RESULT_DECL))
2664 return NULL;
2665
2666 if (!TREE_ADDRESSABLE (x))
2667 {
2668 error ("address taken, but ADDRESSABLE bit not set");
2669 return x;
2670 }
2671
2672 break;
2673 }
2674
2675 case COND_EXPR:
2676 x = COND_EXPR_COND (t);
2677 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2678 {
2679 error ("non-integral used in condition");
2680 return x;
2681 }
2682 if (!is_gimple_condexpr (x))
2683 {
2684 error ("invalid conditional operand");
2685 return x;
2686 }
2687 break;
2688
2689 case NON_LVALUE_EXPR:
2690 case TRUTH_NOT_EXPR:
2691 gcc_unreachable ();
2692
2693 CASE_CONVERT:
2694 case FIX_TRUNC_EXPR:
2695 case FLOAT_EXPR:
2696 case NEGATE_EXPR:
2697 case ABS_EXPR:
2698 case BIT_NOT_EXPR:
2699 CHECK_OP (0, "invalid operand to unary operator");
2700 break;
2701
2702 case REALPART_EXPR:
2703 case IMAGPART_EXPR:
2704 case BIT_FIELD_REF:
2705 if (!is_gimple_reg_type (TREE_TYPE (t)))
2706 {
2707 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2708 return t;
2709 }
2710
2711 if (TREE_CODE (t) == BIT_FIELD_REF)
2712 {
2713 tree t0 = TREE_OPERAND (t, 0);
2714 tree t1 = TREE_OPERAND (t, 1);
2715 tree t2 = TREE_OPERAND (t, 2);
2716 if (!tree_fits_uhwi_p (t1)
2717 || !tree_fits_uhwi_p (t2))
2718 {
2719 error ("invalid position or size operand to BIT_FIELD_REF");
2720 return t;
2721 }
2722 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2723 && (TYPE_PRECISION (TREE_TYPE (t))
2724 != tree_to_uhwi (t1)))
2725 {
2726 error ("integral result type precision does not match "
2727 "field size of BIT_FIELD_REF");
2728 return t;
2729 }
2730 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2731 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2732 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2733 != tree_to_uhwi (t1)))
2734 {
2735 error ("mode precision of non-integral result does not "
2736 "match field size of BIT_FIELD_REF");
2737 return t;
2738 }
2739 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2740 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2741 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2742 {
2743 error ("position plus size exceeds size of referenced object in "
2744 "BIT_FIELD_REF");
2745 return t;
2746 }
2747 }
2748 t = TREE_OPERAND (t, 0);
2749
2750 /* Fall-through. */
2751 case COMPONENT_REF:
2752 case ARRAY_REF:
2753 case ARRAY_RANGE_REF:
2754 case VIEW_CONVERT_EXPR:
2755 /* We have a nest of references. Verify that each of the operands
2756 that determine where to reference is either a constant or a variable,
2757 verify that the base is valid, and then show we've already checked
2758 the subtrees. */
2759 while (handled_component_p (t))
2760 {
2761 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2762 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2763 else if (TREE_CODE (t) == ARRAY_REF
2764 || TREE_CODE (t) == ARRAY_RANGE_REF)
2765 {
2766 CHECK_OP (1, "invalid array index");
2767 if (TREE_OPERAND (t, 2))
2768 CHECK_OP (2, "invalid array lower bound");
2769 if (TREE_OPERAND (t, 3))
2770 CHECK_OP (3, "invalid array stride");
2771 }
2772 else if (TREE_CODE (t) == BIT_FIELD_REF
2773 || TREE_CODE (t) == REALPART_EXPR
2774 || TREE_CODE (t) == IMAGPART_EXPR)
2775 {
2776 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2777 "REALPART_EXPR");
2778 return t;
2779 }
2780
2781 t = TREE_OPERAND (t, 0);
2782 }
2783
2784 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2785 {
2786 error ("invalid reference prefix");
2787 return t;
2788 }
2789 *walk_subtrees = 0;
2790 break;
2791 case PLUS_EXPR:
2792 case MINUS_EXPR:
2793 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2794 POINTER_PLUS_EXPR. */
2795 if (POINTER_TYPE_P (TREE_TYPE (t)))
2796 {
2797 error ("invalid operand to plus/minus, type is a pointer");
2798 return t;
2799 }
2800 CHECK_OP (0, "invalid operand to binary operator");
2801 CHECK_OP (1, "invalid operand to binary operator");
2802 break;
2803
2804 case POINTER_PLUS_EXPR:
2805 /* Check to make sure the first operand is a pointer or reference type. */
2806 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2807 {
2808 error ("invalid operand to pointer plus, first operand is not a pointer");
2809 return t;
2810 }
2811 /* Check to make sure the second operand is a ptrofftype. */
2812 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2813 {
2814 error ("invalid operand to pointer plus, second operand is not an "
2815 "integer type of appropriate width");
2816 return t;
2817 }
2818 /* FALLTHROUGH */
2819 case LT_EXPR:
2820 case LE_EXPR:
2821 case GT_EXPR:
2822 case GE_EXPR:
2823 case EQ_EXPR:
2824 case NE_EXPR:
2825 case UNORDERED_EXPR:
2826 case ORDERED_EXPR:
2827 case UNLT_EXPR:
2828 case UNLE_EXPR:
2829 case UNGT_EXPR:
2830 case UNGE_EXPR:
2831 case UNEQ_EXPR:
2832 case LTGT_EXPR:
2833 case MULT_EXPR:
2834 case TRUNC_DIV_EXPR:
2835 case CEIL_DIV_EXPR:
2836 case FLOOR_DIV_EXPR:
2837 case ROUND_DIV_EXPR:
2838 case TRUNC_MOD_EXPR:
2839 case CEIL_MOD_EXPR:
2840 case FLOOR_MOD_EXPR:
2841 case ROUND_MOD_EXPR:
2842 case RDIV_EXPR:
2843 case EXACT_DIV_EXPR:
2844 case MIN_EXPR:
2845 case MAX_EXPR:
2846 case LSHIFT_EXPR:
2847 case RSHIFT_EXPR:
2848 case LROTATE_EXPR:
2849 case RROTATE_EXPR:
2850 case BIT_IOR_EXPR:
2851 case BIT_XOR_EXPR:
2852 case BIT_AND_EXPR:
2853 CHECK_OP (0, "invalid operand to binary operator");
2854 CHECK_OP (1, "invalid operand to binary operator");
2855 break;
2856
2857 case CONSTRUCTOR:
2858 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2859 *walk_subtrees = 0;
2860 break;
2861
2862 case CASE_LABEL_EXPR:
2863 if (CASE_CHAIN (t))
2864 {
2865 error ("invalid CASE_CHAIN");
2866 return t;
2867 }
2868 break;
2869
2870 default:
2871 break;
2872 }
2873 return NULL;
2874
2875 #undef CHECK_OP
2876 }
2877
2878
2879 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2880 Returns true if there is an error, otherwise false. */
2881
2882 static bool
2883 verify_types_in_gimple_min_lval (tree expr)
2884 {
2885 tree op;
2886
2887 if (is_gimple_id (expr))
2888 return false;
2889
2890 if (TREE_CODE (expr) != TARGET_MEM_REF
2891 && TREE_CODE (expr) != MEM_REF)
2892 {
2893 error ("invalid expression for min lvalue");
2894 return true;
2895 }
2896
2897 /* TARGET_MEM_REFs are strange beasts. */
2898 if (TREE_CODE (expr) == TARGET_MEM_REF)
2899 return false;
2900
2901 op = TREE_OPERAND (expr, 0);
2902 if (!is_gimple_val (op))
2903 {
2904 error ("invalid operand in indirect reference");
2905 debug_generic_stmt (op);
2906 return true;
2907 }
2908 /* Memory references now generally can involve a value conversion. */
2909
2910 return false;
2911 }
2912
2913 /* Verify if EXPR is a valid GIMPLE reference expression. If
2914 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2915 if there is an error, otherwise false. */
2916
2917 static bool
2918 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2919 {
2920 while (handled_component_p (expr))
2921 {
2922 tree op = TREE_OPERAND (expr, 0);
2923
2924 if (TREE_CODE (expr) == ARRAY_REF
2925 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2926 {
2927 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2928 || (TREE_OPERAND (expr, 2)
2929 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2930 || (TREE_OPERAND (expr, 3)
2931 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2932 {
2933 error ("invalid operands to array reference");
2934 debug_generic_stmt (expr);
2935 return true;
2936 }
2937 }
2938
2939 /* Verify if the reference array element types are compatible. */
2940 if (TREE_CODE (expr) == ARRAY_REF
2941 && !useless_type_conversion_p (TREE_TYPE (expr),
2942 TREE_TYPE (TREE_TYPE (op))))
2943 {
2944 error ("type mismatch in array reference");
2945 debug_generic_stmt (TREE_TYPE (expr));
2946 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2947 return true;
2948 }
2949 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2950 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2951 TREE_TYPE (TREE_TYPE (op))))
2952 {
2953 error ("type mismatch in array range reference");
2954 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2955 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2956 return true;
2957 }
2958
2959 if ((TREE_CODE (expr) == REALPART_EXPR
2960 || TREE_CODE (expr) == IMAGPART_EXPR)
2961 && !useless_type_conversion_p (TREE_TYPE (expr),
2962 TREE_TYPE (TREE_TYPE (op))))
2963 {
2964 error ("type mismatch in real/imagpart reference");
2965 debug_generic_stmt (TREE_TYPE (expr));
2966 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2967 return true;
2968 }
2969
2970 if (TREE_CODE (expr) == COMPONENT_REF
2971 && !useless_type_conversion_p (TREE_TYPE (expr),
2972 TREE_TYPE (TREE_OPERAND (expr, 1))))
2973 {
2974 error ("type mismatch in component reference");
2975 debug_generic_stmt (TREE_TYPE (expr));
2976 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2977 return true;
2978 }
2979
2980 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2981 {
2982 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2983 that their operand is not an SSA name or an invariant when
2984 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2985 bug). Otherwise there is nothing to verify, gross mismatches at
2986 most invoke undefined behavior. */
2987 if (require_lvalue
2988 && (TREE_CODE (op) == SSA_NAME
2989 || is_gimple_min_invariant (op)))
2990 {
2991 error ("conversion of an SSA_NAME on the left hand side");
2992 debug_generic_stmt (expr);
2993 return true;
2994 }
2995 else if (TREE_CODE (op) == SSA_NAME
2996 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2997 {
2998 error ("conversion of register to a different size");
2999 debug_generic_stmt (expr);
3000 return true;
3001 }
3002 else if (!handled_component_p (op))
3003 return false;
3004 }
3005
3006 expr = op;
3007 }
3008
3009 if (TREE_CODE (expr) == MEM_REF)
3010 {
3011 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3012 {
3013 error ("invalid address operand in MEM_REF");
3014 debug_generic_stmt (expr);
3015 return true;
3016 }
3017 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3018 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3019 {
3020 error ("invalid offset operand in MEM_REF");
3021 debug_generic_stmt (expr);
3022 return true;
3023 }
3024 }
3025 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3026 {
3027 if (!TMR_BASE (expr)
3028 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3029 {
3030 error ("invalid address operand in TARGET_MEM_REF");
3031 return true;
3032 }
3033 if (!TMR_OFFSET (expr)
3034 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3035 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3036 {
3037 error ("invalid offset operand in TARGET_MEM_REF");
3038 debug_generic_stmt (expr);
3039 return true;
3040 }
3041 }
3042
3043 return ((require_lvalue || !is_gimple_min_invariant (expr))
3044 && verify_types_in_gimple_min_lval (expr));
3045 }
3046
3047 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3048 list of pointer-to types that is trivially convertible to DEST. */
3049
3050 static bool
3051 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3052 {
3053 tree src;
3054
3055 if (!TYPE_POINTER_TO (src_obj))
3056 return true;
3057
3058 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3059 if (useless_type_conversion_p (dest, src))
3060 return true;
3061
3062 return false;
3063 }
3064
3065 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3066 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3067
3068 static bool
3069 valid_fixed_convert_types_p (tree type1, tree type2)
3070 {
3071 return (FIXED_POINT_TYPE_P (type1)
3072 && (INTEGRAL_TYPE_P (type2)
3073 || SCALAR_FLOAT_TYPE_P (type2)
3074 || FIXED_POINT_TYPE_P (type2)));
3075 }
3076
3077 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3078 is a problem, otherwise false. */
3079
3080 static bool
3081 verify_gimple_call (gimple stmt)
3082 {
3083 tree fn = gimple_call_fn (stmt);
3084 tree fntype, fndecl;
3085 unsigned i;
3086
3087 if (gimple_call_internal_p (stmt))
3088 {
3089 if (fn)
3090 {
3091 error ("gimple call has two targets");
3092 debug_generic_stmt (fn);
3093 return true;
3094 }
3095 }
3096 else
3097 {
3098 if (!fn)
3099 {
3100 error ("gimple call has no target");
3101 return true;
3102 }
3103 }
3104
3105 if (fn && !is_gimple_call_addr (fn))
3106 {
3107 error ("invalid function in gimple call");
3108 debug_generic_stmt (fn);
3109 return true;
3110 }
3111
3112 if (fn
3113 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3114 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3115 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3116 {
3117 error ("non-function in gimple call");
3118 return true;
3119 }
3120
3121 fndecl = gimple_call_fndecl (stmt);
3122 if (fndecl
3123 && TREE_CODE (fndecl) == FUNCTION_DECL
3124 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3125 && !DECL_PURE_P (fndecl)
3126 && !TREE_READONLY (fndecl))
3127 {
3128 error ("invalid pure const state for function");
3129 return true;
3130 }
3131
3132 if (gimple_call_lhs (stmt)
3133 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3134 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3135 {
3136 error ("invalid LHS in gimple call");
3137 return true;
3138 }
3139
3140 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3141 {
3142 error ("LHS in noreturn call");
3143 return true;
3144 }
3145
3146 fntype = gimple_call_fntype (stmt);
3147 if (fntype
3148 && gimple_call_lhs (stmt)
3149 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3150 TREE_TYPE (fntype))
3151 /* ??? At least C++ misses conversions at assignments from
3152 void * call results.
3153 ??? Java is completely off. Especially with functions
3154 returning java.lang.Object.
3155 For now simply allow arbitrary pointer type conversions. */
3156 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3157 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3158 {
3159 error ("invalid conversion in gimple call");
3160 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3161 debug_generic_stmt (TREE_TYPE (fntype));
3162 return true;
3163 }
3164
3165 if (gimple_call_chain (stmt)
3166 && !is_gimple_val (gimple_call_chain (stmt)))
3167 {
3168 error ("invalid static chain in gimple call");
3169 debug_generic_stmt (gimple_call_chain (stmt));
3170 return true;
3171 }
3172
3173 /* If there is a static chain argument, this should not be an indirect
3174 call, and the decl should have DECL_STATIC_CHAIN set. */
3175 if (gimple_call_chain (stmt))
3176 {
3177 if (!gimple_call_fndecl (stmt))
3178 {
3179 error ("static chain in indirect gimple call");
3180 return true;
3181 }
3182 fn = TREE_OPERAND (fn, 0);
3183
3184 if (!DECL_STATIC_CHAIN (fn))
3185 {
3186 error ("static chain with function that doesn%'t use one");
3187 return true;
3188 }
3189 }
3190
3191 /* ??? The C frontend passes unpromoted arguments in case it
3192 didn't see a function declaration before the call. So for now
3193 leave the call arguments mostly unverified. Once we gimplify
3194 unit-at-a-time we have a chance to fix this. */
3195
3196 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3197 {
3198 tree arg = gimple_call_arg (stmt, i);
3199 if ((is_gimple_reg_type (TREE_TYPE (arg))
3200 && !is_gimple_val (arg))
3201 || (!is_gimple_reg_type (TREE_TYPE (arg))
3202 && !is_gimple_lvalue (arg)))
3203 {
3204 error ("invalid argument to gimple call");
3205 debug_generic_expr (arg);
3206 return true;
3207 }
3208 }
3209
3210 return false;
3211 }
3212
3213 /* Verifies the gimple comparison with the result type TYPE and
3214 the operands OP0 and OP1. */
3215
3216 static bool
3217 verify_gimple_comparison (tree type, tree op0, tree op1)
3218 {
3219 tree op0_type = TREE_TYPE (op0);
3220 tree op1_type = TREE_TYPE (op1);
3221
3222 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3223 {
3224 error ("invalid operands in gimple comparison");
3225 return true;
3226 }
3227
3228 /* For comparisons we do not have the operations type as the
3229 effective type the comparison is carried out in. Instead
3230 we require that either the first operand is trivially
3231 convertible into the second, or the other way around.
3232 Because we special-case pointers to void we allow
3233 comparisons of pointers with the same mode as well. */
3234 if (!useless_type_conversion_p (op0_type, op1_type)
3235 && !useless_type_conversion_p (op1_type, op0_type)
3236 && (!POINTER_TYPE_P (op0_type)
3237 || !POINTER_TYPE_P (op1_type)
3238 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3239 {
3240 error ("mismatching comparison operand types");
3241 debug_generic_expr (op0_type);
3242 debug_generic_expr (op1_type);
3243 return true;
3244 }
3245
3246 /* The resulting type of a comparison may be an effective boolean type. */
3247 if (INTEGRAL_TYPE_P (type)
3248 && (TREE_CODE (type) == BOOLEAN_TYPE
3249 || TYPE_PRECISION (type) == 1))
3250 {
3251 if (TREE_CODE (op0_type) == VECTOR_TYPE
3252 || TREE_CODE (op1_type) == VECTOR_TYPE)
3253 {
3254 error ("vector comparison returning a boolean");
3255 debug_generic_expr (op0_type);
3256 debug_generic_expr (op1_type);
3257 return true;
3258 }
3259 }
3260 /* Or an integer vector type with the same size and element count
3261 as the comparison operand types. */
3262 else if (TREE_CODE (type) == VECTOR_TYPE
3263 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3264 {
3265 if (TREE_CODE (op0_type) != VECTOR_TYPE
3266 || TREE_CODE (op1_type) != VECTOR_TYPE)
3267 {
3268 error ("non-vector operands in vector comparison");
3269 debug_generic_expr (op0_type);
3270 debug_generic_expr (op1_type);
3271 return true;
3272 }
3273
3274 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3275 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3276 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3277 /* The result of a vector comparison is of signed
3278 integral type. */
3279 || TYPE_UNSIGNED (TREE_TYPE (type)))
3280 {
3281 error ("invalid vector comparison resulting type");
3282 debug_generic_expr (type);
3283 return true;
3284 }
3285 }
3286 else
3287 {
3288 error ("bogus comparison result type");
3289 debug_generic_expr (type);
3290 return true;
3291 }
3292
3293 return false;
3294 }
3295
3296 /* Verify a gimple assignment statement STMT with an unary rhs.
3297 Returns true if anything is wrong. */
3298
3299 static bool
3300 verify_gimple_assign_unary (gimple stmt)
3301 {
3302 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3303 tree lhs = gimple_assign_lhs (stmt);
3304 tree lhs_type = TREE_TYPE (lhs);
3305 tree rhs1 = gimple_assign_rhs1 (stmt);
3306 tree rhs1_type = TREE_TYPE (rhs1);
3307
3308 if (!is_gimple_reg (lhs))
3309 {
3310 error ("non-register as LHS of unary operation");
3311 return true;
3312 }
3313
3314 if (!is_gimple_val (rhs1))
3315 {
3316 error ("invalid operand in unary operation");
3317 return true;
3318 }
3319
3320 /* First handle conversions. */
3321 switch (rhs_code)
3322 {
3323 CASE_CONVERT:
3324 {
3325 /* Allow conversions from pointer type to integral type only if
3326 there is no sign or zero extension involved.
3327 For targets were the precision of ptrofftype doesn't match that
3328 of pointers we need to allow arbitrary conversions to ptrofftype. */
3329 if ((POINTER_TYPE_P (lhs_type)
3330 && INTEGRAL_TYPE_P (rhs1_type))
3331 || (POINTER_TYPE_P (rhs1_type)
3332 && INTEGRAL_TYPE_P (lhs_type)
3333 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3334 || ptrofftype_p (sizetype))))
3335 return false;
3336
3337 /* Allow conversion from integral to offset type and vice versa. */
3338 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3339 && INTEGRAL_TYPE_P (rhs1_type))
3340 || (INTEGRAL_TYPE_P (lhs_type)
3341 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3342 return false;
3343
3344 /* Otherwise assert we are converting between types of the
3345 same kind. */
3346 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3347 {
3348 error ("invalid types in nop conversion");
3349 debug_generic_expr (lhs_type);
3350 debug_generic_expr (rhs1_type);
3351 return true;
3352 }
3353
3354 return false;
3355 }
3356
3357 case ADDR_SPACE_CONVERT_EXPR:
3358 {
3359 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3360 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3361 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3362 {
3363 error ("invalid types in address space conversion");
3364 debug_generic_expr (lhs_type);
3365 debug_generic_expr (rhs1_type);
3366 return true;
3367 }
3368
3369 return false;
3370 }
3371
3372 case FIXED_CONVERT_EXPR:
3373 {
3374 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3375 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3376 {
3377 error ("invalid types in fixed-point conversion");
3378 debug_generic_expr (lhs_type);
3379 debug_generic_expr (rhs1_type);
3380 return true;
3381 }
3382
3383 return false;
3384 }
3385
3386 case FLOAT_EXPR:
3387 {
3388 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3389 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3390 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3391 {
3392 error ("invalid types in conversion to floating point");
3393 debug_generic_expr (lhs_type);
3394 debug_generic_expr (rhs1_type);
3395 return true;
3396 }
3397
3398 return false;
3399 }
3400
3401 case FIX_TRUNC_EXPR:
3402 {
3403 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3404 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3405 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3406 {
3407 error ("invalid types in conversion to integer");
3408 debug_generic_expr (lhs_type);
3409 debug_generic_expr (rhs1_type);
3410 return true;
3411 }
3412
3413 return false;
3414 }
3415
3416 case VEC_UNPACK_HI_EXPR:
3417 case VEC_UNPACK_LO_EXPR:
3418 case REDUC_MAX_EXPR:
3419 case REDUC_MIN_EXPR:
3420 case REDUC_PLUS_EXPR:
3421 case VEC_UNPACK_FLOAT_HI_EXPR:
3422 case VEC_UNPACK_FLOAT_LO_EXPR:
3423 /* FIXME. */
3424 return false;
3425
3426 case NEGATE_EXPR:
3427 case ABS_EXPR:
3428 case BIT_NOT_EXPR:
3429 case PAREN_EXPR:
3430 case NON_LVALUE_EXPR:
3431 case CONJ_EXPR:
3432 break;
3433
3434 default:
3435 gcc_unreachable ();
3436 }
3437
3438 /* For the remaining codes assert there is no conversion involved. */
3439 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3440 {
3441 error ("non-trivial conversion in unary operation");
3442 debug_generic_expr (lhs_type);
3443 debug_generic_expr (rhs1_type);
3444 return true;
3445 }
3446
3447 return false;
3448 }
3449
3450 /* Verify a gimple assignment statement STMT with a binary rhs.
3451 Returns true if anything is wrong. */
3452
3453 static bool
3454 verify_gimple_assign_binary (gimple stmt)
3455 {
3456 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3457 tree lhs = gimple_assign_lhs (stmt);
3458 tree lhs_type = TREE_TYPE (lhs);
3459 tree rhs1 = gimple_assign_rhs1 (stmt);
3460 tree rhs1_type = TREE_TYPE (rhs1);
3461 tree rhs2 = gimple_assign_rhs2 (stmt);
3462 tree rhs2_type = TREE_TYPE (rhs2);
3463
3464 if (!is_gimple_reg (lhs))
3465 {
3466 error ("non-register as LHS of binary operation");
3467 return true;
3468 }
3469
3470 if (!is_gimple_val (rhs1)
3471 || !is_gimple_val (rhs2))
3472 {
3473 error ("invalid operands in binary operation");
3474 return true;
3475 }
3476
3477 /* First handle operations that involve different types. */
3478 switch (rhs_code)
3479 {
3480 case COMPLEX_EXPR:
3481 {
3482 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3483 || !(INTEGRAL_TYPE_P (rhs1_type)
3484 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3485 || !(INTEGRAL_TYPE_P (rhs2_type)
3486 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3487 {
3488 error ("type mismatch in complex expression");
3489 debug_generic_expr (lhs_type);
3490 debug_generic_expr (rhs1_type);
3491 debug_generic_expr (rhs2_type);
3492 return true;
3493 }
3494
3495 return false;
3496 }
3497
3498 case LSHIFT_EXPR:
3499 case RSHIFT_EXPR:
3500 case LROTATE_EXPR:
3501 case RROTATE_EXPR:
3502 {
3503 /* Shifts and rotates are ok on integral types, fixed point
3504 types and integer vector types. */
3505 if ((!INTEGRAL_TYPE_P (rhs1_type)
3506 && !FIXED_POINT_TYPE_P (rhs1_type)
3507 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3508 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3509 || (!INTEGRAL_TYPE_P (rhs2_type)
3510 /* Vector shifts of vectors are also ok. */
3511 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3512 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3513 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3514 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3515 || !useless_type_conversion_p (lhs_type, rhs1_type))
3516 {
3517 error ("type mismatch in shift expression");
3518 debug_generic_expr (lhs_type);
3519 debug_generic_expr (rhs1_type);
3520 debug_generic_expr (rhs2_type);
3521 return true;
3522 }
3523
3524 return false;
3525 }
3526
3527 case VEC_LSHIFT_EXPR:
3528 case VEC_RSHIFT_EXPR:
3529 {
3530 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3531 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3532 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3533 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3534 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3535 || (!INTEGRAL_TYPE_P (rhs2_type)
3536 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3537 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3538 || !useless_type_conversion_p (lhs_type, rhs1_type))
3539 {
3540 error ("type mismatch in vector shift expression");
3541 debug_generic_expr (lhs_type);
3542 debug_generic_expr (rhs1_type);
3543 debug_generic_expr (rhs2_type);
3544 return true;
3545 }
3546 /* For shifting a vector of non-integral components we
3547 only allow shifting by a constant multiple of the element size. */
3548 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3549 && (TREE_CODE (rhs2) != INTEGER_CST
3550 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3551 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3552 {
3553 error ("non-element sized vector shift of floating point vector");
3554 return true;
3555 }
3556
3557 return false;
3558 }
3559
3560 case WIDEN_LSHIFT_EXPR:
3561 {
3562 if (!INTEGRAL_TYPE_P (lhs_type)
3563 || !INTEGRAL_TYPE_P (rhs1_type)
3564 || TREE_CODE (rhs2) != INTEGER_CST
3565 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3566 {
3567 error ("type mismatch in widening vector shift expression");
3568 debug_generic_expr (lhs_type);
3569 debug_generic_expr (rhs1_type);
3570 debug_generic_expr (rhs2_type);
3571 return true;
3572 }
3573
3574 return false;
3575 }
3576
3577 case VEC_WIDEN_LSHIFT_HI_EXPR:
3578 case VEC_WIDEN_LSHIFT_LO_EXPR:
3579 {
3580 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3581 || TREE_CODE (lhs_type) != VECTOR_TYPE
3582 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3583 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3584 || TREE_CODE (rhs2) != INTEGER_CST
3585 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3586 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3587 {
3588 error ("type mismatch in widening vector shift expression");
3589 debug_generic_expr (lhs_type);
3590 debug_generic_expr (rhs1_type);
3591 debug_generic_expr (rhs2_type);
3592 return true;
3593 }
3594
3595 return false;
3596 }
3597
3598 case PLUS_EXPR:
3599 case MINUS_EXPR:
3600 {
3601 tree lhs_etype = lhs_type;
3602 tree rhs1_etype = rhs1_type;
3603 tree rhs2_etype = rhs2_type;
3604 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3605 {
3606 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3607 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3608 {
3609 error ("invalid non-vector operands to vector valued plus");
3610 return true;
3611 }
3612 lhs_etype = TREE_TYPE (lhs_type);
3613 rhs1_etype = TREE_TYPE (rhs1_type);
3614 rhs2_etype = TREE_TYPE (rhs2_type);
3615 }
3616 if (POINTER_TYPE_P (lhs_etype)
3617 || POINTER_TYPE_P (rhs1_etype)
3618 || POINTER_TYPE_P (rhs2_etype))
3619 {
3620 error ("invalid (pointer) operands to plus/minus");
3621 return true;
3622 }
3623
3624 /* Continue with generic binary expression handling. */
3625 break;
3626 }
3627
3628 case POINTER_PLUS_EXPR:
3629 {
3630 if (!POINTER_TYPE_P (rhs1_type)
3631 || !useless_type_conversion_p (lhs_type, rhs1_type)
3632 || !ptrofftype_p (rhs2_type))
3633 {
3634 error ("type mismatch in pointer plus expression");
3635 debug_generic_stmt (lhs_type);
3636 debug_generic_stmt (rhs1_type);
3637 debug_generic_stmt (rhs2_type);
3638 return true;
3639 }
3640
3641 return false;
3642 }
3643
3644 case TRUTH_ANDIF_EXPR:
3645 case TRUTH_ORIF_EXPR:
3646 case TRUTH_AND_EXPR:
3647 case TRUTH_OR_EXPR:
3648 case TRUTH_XOR_EXPR:
3649
3650 gcc_unreachable ();
3651
3652 case LT_EXPR:
3653 case LE_EXPR:
3654 case GT_EXPR:
3655 case GE_EXPR:
3656 case EQ_EXPR:
3657 case NE_EXPR:
3658 case UNORDERED_EXPR:
3659 case ORDERED_EXPR:
3660 case UNLT_EXPR:
3661 case UNLE_EXPR:
3662 case UNGT_EXPR:
3663 case UNGE_EXPR:
3664 case UNEQ_EXPR:
3665 case LTGT_EXPR:
3666 /* Comparisons are also binary, but the result type is not
3667 connected to the operand types. */
3668 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3669
3670 case WIDEN_MULT_EXPR:
3671 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3672 return true;
3673 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3674 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3675
3676 case WIDEN_SUM_EXPR:
3677 case VEC_WIDEN_MULT_HI_EXPR:
3678 case VEC_WIDEN_MULT_LO_EXPR:
3679 case VEC_WIDEN_MULT_EVEN_EXPR:
3680 case VEC_WIDEN_MULT_ODD_EXPR:
3681 case VEC_PACK_TRUNC_EXPR:
3682 case VEC_PACK_SAT_EXPR:
3683 case VEC_PACK_FIX_TRUNC_EXPR:
3684 /* FIXME. */
3685 return false;
3686
3687 case MULT_EXPR:
3688 case MULT_HIGHPART_EXPR:
3689 case TRUNC_DIV_EXPR:
3690 case CEIL_DIV_EXPR:
3691 case FLOOR_DIV_EXPR:
3692 case ROUND_DIV_EXPR:
3693 case TRUNC_MOD_EXPR:
3694 case CEIL_MOD_EXPR:
3695 case FLOOR_MOD_EXPR:
3696 case ROUND_MOD_EXPR:
3697 case RDIV_EXPR:
3698 case EXACT_DIV_EXPR:
3699 case MIN_EXPR:
3700 case MAX_EXPR:
3701 case BIT_IOR_EXPR:
3702 case BIT_XOR_EXPR:
3703 case BIT_AND_EXPR:
3704 /* Continue with generic binary expression handling. */
3705 break;
3706
3707 default:
3708 gcc_unreachable ();
3709 }
3710
3711 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3712 || !useless_type_conversion_p (lhs_type, rhs2_type))
3713 {
3714 error ("type mismatch in binary expression");
3715 debug_generic_stmt (lhs_type);
3716 debug_generic_stmt (rhs1_type);
3717 debug_generic_stmt (rhs2_type);
3718 return true;
3719 }
3720
3721 return false;
3722 }
3723
3724 /* Verify a gimple assignment statement STMT with a ternary rhs.
3725 Returns true if anything is wrong. */
3726
3727 static bool
3728 verify_gimple_assign_ternary (gimple stmt)
3729 {
3730 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3731 tree lhs = gimple_assign_lhs (stmt);
3732 tree lhs_type = TREE_TYPE (lhs);
3733 tree rhs1 = gimple_assign_rhs1 (stmt);
3734 tree rhs1_type = TREE_TYPE (rhs1);
3735 tree rhs2 = gimple_assign_rhs2 (stmt);
3736 tree rhs2_type = TREE_TYPE (rhs2);
3737 tree rhs3 = gimple_assign_rhs3 (stmt);
3738 tree rhs3_type = TREE_TYPE (rhs3);
3739
3740 if (!is_gimple_reg (lhs))
3741 {
3742 error ("non-register as LHS of ternary operation");
3743 return true;
3744 }
3745
3746 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3747 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3748 || !is_gimple_val (rhs2)
3749 || !is_gimple_val (rhs3))
3750 {
3751 error ("invalid operands in ternary operation");
3752 return true;
3753 }
3754
3755 /* First handle operations that involve different types. */
3756 switch (rhs_code)
3757 {
3758 case WIDEN_MULT_PLUS_EXPR:
3759 case WIDEN_MULT_MINUS_EXPR:
3760 if ((!INTEGRAL_TYPE_P (rhs1_type)
3761 && !FIXED_POINT_TYPE_P (rhs1_type))
3762 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3763 || !useless_type_conversion_p (lhs_type, rhs3_type)
3764 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3765 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3766 {
3767 error ("type mismatch in widening multiply-accumulate expression");
3768 debug_generic_expr (lhs_type);
3769 debug_generic_expr (rhs1_type);
3770 debug_generic_expr (rhs2_type);
3771 debug_generic_expr (rhs3_type);
3772 return true;
3773 }
3774 break;
3775
3776 case FMA_EXPR:
3777 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3778 || !useless_type_conversion_p (lhs_type, rhs2_type)
3779 || !useless_type_conversion_p (lhs_type, rhs3_type))
3780 {
3781 error ("type mismatch in fused multiply-add expression");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 debug_generic_expr (rhs2_type);
3785 debug_generic_expr (rhs3_type);
3786 return true;
3787 }
3788 break;
3789
3790 case COND_EXPR:
3791 case VEC_COND_EXPR:
3792 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3793 || !useless_type_conversion_p (lhs_type, rhs3_type))
3794 {
3795 error ("type mismatch in conditional expression");
3796 debug_generic_expr (lhs_type);
3797 debug_generic_expr (rhs2_type);
3798 debug_generic_expr (rhs3_type);
3799 return true;
3800 }
3801 break;
3802
3803 case VEC_PERM_EXPR:
3804 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3805 || !useless_type_conversion_p (lhs_type, rhs2_type))
3806 {
3807 error ("type mismatch in vector permute expression");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 debug_generic_expr (rhs2_type);
3811 debug_generic_expr (rhs3_type);
3812 return true;
3813 }
3814
3815 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3816 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3817 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3818 {
3819 error ("vector types expected in vector permute expression");
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs1_type);
3822 debug_generic_expr (rhs2_type);
3823 debug_generic_expr (rhs3_type);
3824 return true;
3825 }
3826
3827 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3828 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3829 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3830 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3831 != TYPE_VECTOR_SUBPARTS (lhs_type))
3832 {
3833 error ("vectors with different element number found "
3834 "in vector permute expression");
3835 debug_generic_expr (lhs_type);
3836 debug_generic_expr (rhs1_type);
3837 debug_generic_expr (rhs2_type);
3838 debug_generic_expr (rhs3_type);
3839 return true;
3840 }
3841
3842 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3843 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3844 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3845 {
3846 error ("invalid mask type in vector permute expression");
3847 debug_generic_expr (lhs_type);
3848 debug_generic_expr (rhs1_type);
3849 debug_generic_expr (rhs2_type);
3850 debug_generic_expr (rhs3_type);
3851 return true;
3852 }
3853
3854 return false;
3855
3856 case DOT_PROD_EXPR:
3857 case REALIGN_LOAD_EXPR:
3858 /* FIXME. */
3859 return false;
3860
3861 default:
3862 gcc_unreachable ();
3863 }
3864 return false;
3865 }
3866
3867 /* Verify a gimple assignment statement STMT with a single rhs.
3868 Returns true if anything is wrong. */
3869
3870 static bool
3871 verify_gimple_assign_single (gimple stmt)
3872 {
3873 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3874 tree lhs = gimple_assign_lhs (stmt);
3875 tree lhs_type = TREE_TYPE (lhs);
3876 tree rhs1 = gimple_assign_rhs1 (stmt);
3877 tree rhs1_type = TREE_TYPE (rhs1);
3878 bool res = false;
3879
3880 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3881 {
3882 error ("non-trivial conversion at assignment");
3883 debug_generic_expr (lhs_type);
3884 debug_generic_expr (rhs1_type);
3885 return true;
3886 }
3887
3888 if (gimple_clobber_p (stmt)
3889 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3890 {
3891 error ("non-decl/MEM_REF LHS in clobber statement");
3892 debug_generic_expr (lhs);
3893 return true;
3894 }
3895
3896 if (handled_component_p (lhs))
3897 res |= verify_types_in_gimple_reference (lhs, true);
3898
3899 /* Special codes we cannot handle via their class. */
3900 switch (rhs_code)
3901 {
3902 case ADDR_EXPR:
3903 {
3904 tree op = TREE_OPERAND (rhs1, 0);
3905 if (!is_gimple_addressable (op))
3906 {
3907 error ("invalid operand in unary expression");
3908 return true;
3909 }
3910
3911 /* Technically there is no longer a need for matching types, but
3912 gimple hygiene asks for this check. In LTO we can end up
3913 combining incompatible units and thus end up with addresses
3914 of globals that change their type to a common one. */
3915 if (!in_lto_p
3916 && !types_compatible_p (TREE_TYPE (op),
3917 TREE_TYPE (TREE_TYPE (rhs1)))
3918 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3919 TREE_TYPE (op)))
3920 {
3921 error ("type mismatch in address expression");
3922 debug_generic_stmt (TREE_TYPE (rhs1));
3923 debug_generic_stmt (TREE_TYPE (op));
3924 return true;
3925 }
3926
3927 return verify_types_in_gimple_reference (op, true);
3928 }
3929
3930 /* tcc_reference */
3931 case INDIRECT_REF:
3932 error ("INDIRECT_REF in gimple IL");
3933 return true;
3934
3935 case COMPONENT_REF:
3936 case BIT_FIELD_REF:
3937 case ARRAY_REF:
3938 case ARRAY_RANGE_REF:
3939 case VIEW_CONVERT_EXPR:
3940 case REALPART_EXPR:
3941 case IMAGPART_EXPR:
3942 case TARGET_MEM_REF:
3943 case MEM_REF:
3944 if (!is_gimple_reg (lhs)
3945 && is_gimple_reg_type (TREE_TYPE (lhs)))
3946 {
3947 error ("invalid rhs for gimple memory store");
3948 debug_generic_stmt (lhs);
3949 debug_generic_stmt (rhs1);
3950 return true;
3951 }
3952 return res || verify_types_in_gimple_reference (rhs1, false);
3953
3954 /* tcc_constant */
3955 case SSA_NAME:
3956 case INTEGER_CST:
3957 case REAL_CST:
3958 case FIXED_CST:
3959 case COMPLEX_CST:
3960 case VECTOR_CST:
3961 case STRING_CST:
3962 return res;
3963
3964 /* tcc_declaration */
3965 case CONST_DECL:
3966 return res;
3967 case VAR_DECL:
3968 case PARM_DECL:
3969 if (!is_gimple_reg (lhs)
3970 && !is_gimple_reg (rhs1)
3971 && is_gimple_reg_type (TREE_TYPE (lhs)))
3972 {
3973 error ("invalid rhs for gimple memory store");
3974 debug_generic_stmt (lhs);
3975 debug_generic_stmt (rhs1);
3976 return true;
3977 }
3978 return res;
3979
3980 case CONSTRUCTOR:
3981 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3982 {
3983 unsigned int i;
3984 tree elt_i, elt_v, elt_t = NULL_TREE;
3985
3986 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3987 return res;
3988 /* For vector CONSTRUCTORs we require that either it is empty
3989 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3990 (then the element count must be correct to cover the whole
3991 outer vector and index must be NULL on all elements, or it is
3992 a CONSTRUCTOR of scalar elements, where we as an exception allow
3993 smaller number of elements (assuming zero filling) and
3994 consecutive indexes as compared to NULL indexes (such
3995 CONSTRUCTORs can appear in the IL from FEs). */
3996 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3997 {
3998 if (elt_t == NULL_TREE)
3999 {
4000 elt_t = TREE_TYPE (elt_v);
4001 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4002 {
4003 tree elt_t = TREE_TYPE (elt_v);
4004 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4005 TREE_TYPE (elt_t)))
4006 {
4007 error ("incorrect type of vector CONSTRUCTOR"
4008 " elements");
4009 debug_generic_stmt (rhs1);
4010 return true;
4011 }
4012 else if (CONSTRUCTOR_NELTS (rhs1)
4013 * TYPE_VECTOR_SUBPARTS (elt_t)
4014 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4015 {
4016 error ("incorrect number of vector CONSTRUCTOR"
4017 " elements");
4018 debug_generic_stmt (rhs1);
4019 return true;
4020 }
4021 }
4022 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4023 elt_t))
4024 {
4025 error ("incorrect type of vector CONSTRUCTOR elements");
4026 debug_generic_stmt (rhs1);
4027 return true;
4028 }
4029 else if (CONSTRUCTOR_NELTS (rhs1)
4030 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4031 {
4032 error ("incorrect number of vector CONSTRUCTOR elements");
4033 debug_generic_stmt (rhs1);
4034 return true;
4035 }
4036 }
4037 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4038 {
4039 error ("incorrect type of vector CONSTRUCTOR elements");
4040 debug_generic_stmt (rhs1);
4041 return true;
4042 }
4043 if (elt_i != NULL_TREE
4044 && (TREE_CODE (elt_t) == VECTOR_TYPE
4045 || TREE_CODE (elt_i) != INTEGER_CST
4046 || compare_tree_int (elt_i, i) != 0))
4047 {
4048 error ("vector CONSTRUCTOR with non-NULL element index");
4049 debug_generic_stmt (rhs1);
4050 return true;
4051 }
4052 }
4053 }
4054 return res;
4055 case OBJ_TYPE_REF:
4056 case ASSERT_EXPR:
4057 case WITH_SIZE_EXPR:
4058 /* FIXME. */
4059 return res;
4060
4061 default:;
4062 }
4063
4064 return res;
4065 }
4066
4067 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4068 is a problem, otherwise false. */
4069
4070 static bool
4071 verify_gimple_assign (gimple stmt)
4072 {
4073 switch (gimple_assign_rhs_class (stmt))
4074 {
4075 case GIMPLE_SINGLE_RHS:
4076 return verify_gimple_assign_single (stmt);
4077
4078 case GIMPLE_UNARY_RHS:
4079 return verify_gimple_assign_unary (stmt);
4080
4081 case GIMPLE_BINARY_RHS:
4082 return verify_gimple_assign_binary (stmt);
4083
4084 case GIMPLE_TERNARY_RHS:
4085 return verify_gimple_assign_ternary (stmt);
4086
4087 default:
4088 gcc_unreachable ();
4089 }
4090 }
4091
4092 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4093 is a problem, otherwise false. */
4094
4095 static bool
4096 verify_gimple_return (gimple stmt)
4097 {
4098 tree op = gimple_return_retval (stmt);
4099 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4100
4101 /* We cannot test for present return values as we do not fix up missing
4102 return values from the original source. */
4103 if (op == NULL)
4104 return false;
4105
4106 if (!is_gimple_val (op)
4107 && TREE_CODE (op) != RESULT_DECL)
4108 {
4109 error ("invalid operand in return statement");
4110 debug_generic_stmt (op);
4111 return true;
4112 }
4113
4114 if ((TREE_CODE (op) == RESULT_DECL
4115 && DECL_BY_REFERENCE (op))
4116 || (TREE_CODE (op) == SSA_NAME
4117 && SSA_NAME_VAR (op)
4118 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4119 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4120 op = TREE_TYPE (op);
4121
4122 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4123 {
4124 error ("invalid conversion in return statement");
4125 debug_generic_stmt (restype);
4126 debug_generic_stmt (TREE_TYPE (op));
4127 return true;
4128 }
4129
4130 return false;
4131 }
4132
4133
4134 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4135 is a problem, otherwise false. */
4136
4137 static bool
4138 verify_gimple_goto (gimple stmt)
4139 {
4140 tree dest = gimple_goto_dest (stmt);
4141
4142 /* ??? We have two canonical forms of direct goto destinations, a
4143 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4144 if (TREE_CODE (dest) != LABEL_DECL
4145 && (!is_gimple_val (dest)
4146 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4147 {
4148 error ("goto destination is neither a label nor a pointer");
4149 return true;
4150 }
4151
4152 return false;
4153 }
4154
4155 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4156 is a problem, otherwise false. */
4157
4158 static bool
4159 verify_gimple_switch (gimple stmt)
4160 {
4161 unsigned int i, n;
4162 tree elt, prev_upper_bound = NULL_TREE;
4163 tree index_type, elt_type = NULL_TREE;
4164
4165 if (!is_gimple_val (gimple_switch_index (stmt)))
4166 {
4167 error ("invalid operand to switch statement");
4168 debug_generic_stmt (gimple_switch_index (stmt));
4169 return true;
4170 }
4171
4172 index_type = TREE_TYPE (gimple_switch_index (stmt));
4173 if (! INTEGRAL_TYPE_P (index_type))
4174 {
4175 error ("non-integral type switch statement");
4176 debug_generic_expr (index_type);
4177 return true;
4178 }
4179
4180 elt = gimple_switch_label (stmt, 0);
4181 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4182 {
4183 error ("invalid default case label in switch statement");
4184 debug_generic_expr (elt);
4185 return true;
4186 }
4187
4188 n = gimple_switch_num_labels (stmt);
4189 for (i = 1; i < n; i++)
4190 {
4191 elt = gimple_switch_label (stmt, i);
4192
4193 if (! CASE_LOW (elt))
4194 {
4195 error ("invalid case label in switch statement");
4196 debug_generic_expr (elt);
4197 return true;
4198 }
4199 if (CASE_HIGH (elt)
4200 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4201 {
4202 error ("invalid case range in switch statement");
4203 debug_generic_expr (elt);
4204 return true;
4205 }
4206
4207 if (elt_type)
4208 {
4209 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4210 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4211 {
4212 error ("type mismatch for case label in switch statement");
4213 debug_generic_expr (elt);
4214 return true;
4215 }
4216 }
4217 else
4218 {
4219 elt_type = TREE_TYPE (CASE_LOW (elt));
4220 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4221 {
4222 error ("type precision mismatch in switch statement");
4223 return true;
4224 }
4225 }
4226
4227 if (prev_upper_bound)
4228 {
4229 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4230 {
4231 error ("case labels not sorted in switch statement");
4232 return true;
4233 }
4234 }
4235
4236 prev_upper_bound = CASE_HIGH (elt);
4237 if (! prev_upper_bound)
4238 prev_upper_bound = CASE_LOW (elt);
4239 }
4240
4241 return false;
4242 }
4243
4244 /* Verify a gimple debug statement STMT.
4245 Returns true if anything is wrong. */
4246
4247 static bool
4248 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4249 {
4250 /* There isn't much that could be wrong in a gimple debug stmt. A
4251 gimple debug bind stmt, for example, maps a tree, that's usually
4252 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4253 component or member of an aggregate type, to another tree, that
4254 can be an arbitrary expression. These stmts expand into debug
4255 insns, and are converted to debug notes by var-tracking.c. */
4256 return false;
4257 }
4258
4259 /* Verify a gimple label statement STMT.
4260 Returns true if anything is wrong. */
4261
4262 static bool
4263 verify_gimple_label (gimple stmt)
4264 {
4265 tree decl = gimple_label_label (stmt);
4266 int uid;
4267 bool err = false;
4268
4269 if (TREE_CODE (decl) != LABEL_DECL)
4270 return true;
4271 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4272 && DECL_CONTEXT (decl) != current_function_decl)
4273 {
4274 error ("label's context is not the current function decl");
4275 err |= true;
4276 }
4277
4278 uid = LABEL_DECL_UID (decl);
4279 if (cfun->cfg
4280 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4281 {
4282 error ("incorrect entry in label_to_block_map");
4283 err |= true;
4284 }
4285
4286 uid = EH_LANDING_PAD_NR (decl);
4287 if (uid)
4288 {
4289 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4290 if (decl != lp->post_landing_pad)
4291 {
4292 error ("incorrect setting of landing pad number");
4293 err |= true;
4294 }
4295 }
4296
4297 return err;
4298 }
4299
4300 /* Verify the GIMPLE statement STMT. Returns true if there is an
4301 error, otherwise false. */
4302
4303 static bool
4304 verify_gimple_stmt (gimple stmt)
4305 {
4306 switch (gimple_code (stmt))
4307 {
4308 case GIMPLE_ASSIGN:
4309 return verify_gimple_assign (stmt);
4310
4311 case GIMPLE_LABEL:
4312 return verify_gimple_label (stmt);
4313
4314 case GIMPLE_CALL:
4315 return verify_gimple_call (stmt);
4316
4317 case GIMPLE_COND:
4318 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4319 {
4320 error ("invalid comparison code in gimple cond");
4321 return true;
4322 }
4323 if (!(!gimple_cond_true_label (stmt)
4324 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4325 || !(!gimple_cond_false_label (stmt)
4326 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4327 {
4328 error ("invalid labels in gimple cond");
4329 return true;
4330 }
4331
4332 return verify_gimple_comparison (boolean_type_node,
4333 gimple_cond_lhs (stmt),
4334 gimple_cond_rhs (stmt));
4335
4336 case GIMPLE_GOTO:
4337 return verify_gimple_goto (stmt);
4338
4339 case GIMPLE_SWITCH:
4340 return verify_gimple_switch (stmt);
4341
4342 case GIMPLE_RETURN:
4343 return verify_gimple_return (stmt);
4344
4345 case GIMPLE_ASM:
4346 return false;
4347
4348 case GIMPLE_TRANSACTION:
4349 return verify_gimple_transaction (stmt);
4350
4351 /* Tuples that do not have tree operands. */
4352 case GIMPLE_NOP:
4353 case GIMPLE_PREDICT:
4354 case GIMPLE_RESX:
4355 case GIMPLE_EH_DISPATCH:
4356 case GIMPLE_EH_MUST_NOT_THROW:
4357 return false;
4358
4359 CASE_GIMPLE_OMP:
4360 /* OpenMP directives are validated by the FE and never operated
4361 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4362 non-gimple expressions when the main index variable has had
4363 its address taken. This does not affect the loop itself
4364 because the header of an GIMPLE_OMP_FOR is merely used to determine
4365 how to setup the parallel iteration. */
4366 return false;
4367
4368 case GIMPLE_DEBUG:
4369 return verify_gimple_debug (stmt);
4370
4371 default:
4372 gcc_unreachable ();
4373 }
4374 }
4375
4376 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4377 and false otherwise. */
4378
4379 static bool
4380 verify_gimple_phi (gimple phi)
4381 {
4382 bool err = false;
4383 unsigned i;
4384 tree phi_result = gimple_phi_result (phi);
4385 bool virtual_p;
4386
4387 if (!phi_result)
4388 {
4389 error ("invalid PHI result");
4390 return true;
4391 }
4392
4393 virtual_p = virtual_operand_p (phi_result);
4394 if (TREE_CODE (phi_result) != SSA_NAME
4395 || (virtual_p
4396 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4397 {
4398 error ("invalid PHI result");
4399 err = true;
4400 }
4401
4402 for (i = 0; i < gimple_phi_num_args (phi); i++)
4403 {
4404 tree t = gimple_phi_arg_def (phi, i);
4405
4406 if (!t)
4407 {
4408 error ("missing PHI def");
4409 err |= true;
4410 continue;
4411 }
4412 /* Addressable variables do have SSA_NAMEs but they
4413 are not considered gimple values. */
4414 else if ((TREE_CODE (t) == SSA_NAME
4415 && virtual_p != virtual_operand_p (t))
4416 || (virtual_p
4417 && (TREE_CODE (t) != SSA_NAME
4418 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4419 || (!virtual_p
4420 && !is_gimple_val (t)))
4421 {
4422 error ("invalid PHI argument");
4423 debug_generic_expr (t);
4424 err |= true;
4425 }
4426 #ifdef ENABLE_TYPES_CHECKING
4427 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4428 {
4429 error ("incompatible types in PHI argument %u", i);
4430 debug_generic_stmt (TREE_TYPE (phi_result));
4431 debug_generic_stmt (TREE_TYPE (t));
4432 err |= true;
4433 }
4434 #endif
4435 }
4436
4437 return err;
4438 }
4439
4440 /* Verify the GIMPLE statements inside the sequence STMTS. */
4441
4442 static bool
4443 verify_gimple_in_seq_2 (gimple_seq stmts)
4444 {
4445 gimple_stmt_iterator ittr;
4446 bool err = false;
4447
4448 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4449 {
4450 gimple stmt = gsi_stmt (ittr);
4451
4452 switch (gimple_code (stmt))
4453 {
4454 case GIMPLE_BIND:
4455 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4456 break;
4457
4458 case GIMPLE_TRY:
4459 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4460 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4461 break;
4462
4463 case GIMPLE_EH_FILTER:
4464 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4465 break;
4466
4467 case GIMPLE_EH_ELSE:
4468 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4469 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4470 break;
4471
4472 case GIMPLE_CATCH:
4473 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4474 break;
4475
4476 case GIMPLE_TRANSACTION:
4477 err |= verify_gimple_transaction (stmt);
4478 break;
4479
4480 default:
4481 {
4482 bool err2 = verify_gimple_stmt (stmt);
4483 if (err2)
4484 debug_gimple_stmt (stmt);
4485 err |= err2;
4486 }
4487 }
4488 }
4489
4490 return err;
4491 }
4492
4493 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4494 is a problem, otherwise false. */
4495
4496 static bool
4497 verify_gimple_transaction (gimple stmt)
4498 {
4499 tree lab = gimple_transaction_label (stmt);
4500 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4501 return true;
4502 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4503 }
4504
4505
4506 /* Verify the GIMPLE statements inside the statement list STMTS. */
4507
4508 DEBUG_FUNCTION void
4509 verify_gimple_in_seq (gimple_seq stmts)
4510 {
4511 timevar_push (TV_TREE_STMT_VERIFY);
4512 if (verify_gimple_in_seq_2 (stmts))
4513 internal_error ("verify_gimple failed");
4514 timevar_pop (TV_TREE_STMT_VERIFY);
4515 }
4516
4517 /* Return true when the T can be shared. */
4518
4519 static bool
4520 tree_node_can_be_shared (tree t)
4521 {
4522 if (IS_TYPE_OR_DECL_P (t)
4523 || is_gimple_min_invariant (t)
4524 || TREE_CODE (t) == SSA_NAME
4525 || t == error_mark_node
4526 || TREE_CODE (t) == IDENTIFIER_NODE)
4527 return true;
4528
4529 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4530 return true;
4531
4532 if (DECL_P (t))
4533 return true;
4534
4535 return false;
4536 }
4537
4538 /* Called via walk_tree. Verify tree sharing. */
4539
4540 static tree
4541 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4542 {
4543 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4544
4545 if (tree_node_can_be_shared (*tp))
4546 {
4547 *walk_subtrees = false;
4548 return NULL;
4549 }
4550
4551 if (pointer_set_insert (visited, *tp))
4552 return *tp;
4553
4554 return NULL;
4555 }
4556
4557 /* Called via walk_gimple_stmt. Verify tree sharing. */
4558
4559 static tree
4560 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4561 {
4562 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4563 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4564 }
4565
4566 static bool eh_error_found;
4567 static int
4568 verify_eh_throw_stmt_node (void **slot, void *data)
4569 {
4570 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4571 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4572
4573 if (!pointer_set_contains (visited, node->stmt))
4574 {
4575 error ("dead STMT in EH table");
4576 debug_gimple_stmt (node->stmt);
4577 eh_error_found = true;
4578 }
4579 return 1;
4580 }
4581
4582 /* Verify if the location LOCs block is in BLOCKS. */
4583
4584 static bool
4585 verify_location (pointer_set_t *blocks, location_t loc)
4586 {
4587 tree block = LOCATION_BLOCK (loc);
4588 if (block != NULL_TREE
4589 && !pointer_set_contains (blocks, block))
4590 {
4591 error ("location references block not in block tree");
4592 return true;
4593 }
4594 if (block != NULL_TREE)
4595 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4596 return false;
4597 }
4598
4599 /* Called via walk_tree. Verify that expressions have no blocks. */
4600
4601 static tree
4602 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4603 {
4604 if (!EXPR_P (*tp))
4605 {
4606 *walk_subtrees = false;
4607 return NULL;
4608 }
4609
4610 location_t loc = EXPR_LOCATION (*tp);
4611 if (LOCATION_BLOCK (loc) != NULL)
4612 return *tp;
4613
4614 return NULL;
4615 }
4616
4617 /* Called via walk_tree. Verify locations of expressions. */
4618
4619 static tree
4620 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4621 {
4622 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4623
4624 if (TREE_CODE (*tp) == VAR_DECL
4625 && DECL_HAS_DEBUG_EXPR_P (*tp))
4626 {
4627 tree t = DECL_DEBUG_EXPR (*tp);
4628 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4629 if (addr)
4630 return addr;
4631 }
4632 if ((TREE_CODE (*tp) == VAR_DECL
4633 || TREE_CODE (*tp) == PARM_DECL
4634 || TREE_CODE (*tp) == RESULT_DECL)
4635 && DECL_HAS_VALUE_EXPR_P (*tp))
4636 {
4637 tree t = DECL_VALUE_EXPR (*tp);
4638 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4639 if (addr)
4640 return addr;
4641 }
4642
4643 if (!EXPR_P (*tp))
4644 {
4645 *walk_subtrees = false;
4646 return NULL;
4647 }
4648
4649 location_t loc = EXPR_LOCATION (*tp);
4650 if (verify_location (blocks, loc))
4651 return *tp;
4652
4653 return NULL;
4654 }
4655
4656 /* Called via walk_gimple_op. Verify locations of expressions. */
4657
4658 static tree
4659 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4660 {
4661 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4662 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4663 }
4664
4665 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4666
4667 static void
4668 collect_subblocks (pointer_set_t *blocks, tree block)
4669 {
4670 tree t;
4671 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4672 {
4673 pointer_set_insert (blocks, t);
4674 collect_subblocks (blocks, t);
4675 }
4676 }
4677
4678 /* Verify the GIMPLE statements in the CFG of FN. */
4679
4680 DEBUG_FUNCTION void
4681 verify_gimple_in_cfg (struct function *fn)
4682 {
4683 basic_block bb;
4684 bool err = false;
4685 struct pointer_set_t *visited, *visited_stmts, *blocks;
4686
4687 timevar_push (TV_TREE_STMT_VERIFY);
4688 visited = pointer_set_create ();
4689 visited_stmts = pointer_set_create ();
4690
4691 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4692 blocks = pointer_set_create ();
4693 if (DECL_INITIAL (fn->decl))
4694 {
4695 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4696 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4697 }
4698
4699 FOR_EACH_BB_FN (bb, fn)
4700 {
4701 gimple_stmt_iterator gsi;
4702
4703 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4704 {
4705 gimple phi = gsi_stmt (gsi);
4706 bool err2 = false;
4707 unsigned i;
4708
4709 pointer_set_insert (visited_stmts, phi);
4710
4711 if (gimple_bb (phi) != bb)
4712 {
4713 error ("gimple_bb (phi) is set to a wrong basic block");
4714 err2 = true;
4715 }
4716
4717 err2 |= verify_gimple_phi (phi);
4718
4719 /* Only PHI arguments have locations. */
4720 if (gimple_location (phi) != UNKNOWN_LOCATION)
4721 {
4722 error ("PHI node with location");
4723 err2 = true;
4724 }
4725
4726 for (i = 0; i < gimple_phi_num_args (phi); i++)
4727 {
4728 tree arg = gimple_phi_arg_def (phi, i);
4729 tree addr = walk_tree (&arg, verify_node_sharing_1,
4730 visited, NULL);
4731 if (addr)
4732 {
4733 error ("incorrect sharing of tree nodes");
4734 debug_generic_expr (addr);
4735 err2 |= true;
4736 }
4737 location_t loc = gimple_phi_arg_location (phi, i);
4738 if (virtual_operand_p (gimple_phi_result (phi))
4739 && loc != UNKNOWN_LOCATION)
4740 {
4741 error ("virtual PHI with argument locations");
4742 err2 = true;
4743 }
4744 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4745 if (addr)
4746 {
4747 debug_generic_expr (addr);
4748 err2 = true;
4749 }
4750 err2 |= verify_location (blocks, loc);
4751 }
4752
4753 if (err2)
4754 debug_gimple_stmt (phi);
4755 err |= err2;
4756 }
4757
4758 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4759 {
4760 gimple stmt = gsi_stmt (gsi);
4761 bool err2 = false;
4762 struct walk_stmt_info wi;
4763 tree addr;
4764 int lp_nr;
4765
4766 pointer_set_insert (visited_stmts, stmt);
4767
4768 if (gimple_bb (stmt) != bb)
4769 {
4770 error ("gimple_bb (stmt) is set to a wrong basic block");
4771 err2 = true;
4772 }
4773
4774 err2 |= verify_gimple_stmt (stmt);
4775 err2 |= verify_location (blocks, gimple_location (stmt));
4776
4777 memset (&wi, 0, sizeof (wi));
4778 wi.info = (void *) visited;
4779 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4780 if (addr)
4781 {
4782 error ("incorrect sharing of tree nodes");
4783 debug_generic_expr (addr);
4784 err2 |= true;
4785 }
4786
4787 memset (&wi, 0, sizeof (wi));
4788 wi.info = (void *) blocks;
4789 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4790 if (addr)
4791 {
4792 debug_generic_expr (addr);
4793 err2 |= true;
4794 }
4795
4796 /* ??? Instead of not checking these stmts at all the walker
4797 should know its context via wi. */
4798 if (!is_gimple_debug (stmt)
4799 && !is_gimple_omp (stmt))
4800 {
4801 memset (&wi, 0, sizeof (wi));
4802 addr = walk_gimple_op (stmt, verify_expr, &wi);
4803 if (addr)
4804 {
4805 debug_generic_expr (addr);
4806 inform (gimple_location (stmt), "in statement");
4807 err2 |= true;
4808 }
4809 }
4810
4811 /* If the statement is marked as part of an EH region, then it is
4812 expected that the statement could throw. Verify that when we
4813 have optimizations that simplify statements such that we prove
4814 that they cannot throw, that we update other data structures
4815 to match. */
4816 lp_nr = lookup_stmt_eh_lp (stmt);
4817 if (lp_nr != 0)
4818 {
4819 if (!stmt_could_throw_p (stmt))
4820 {
4821 error ("statement marked for throw, but doesn%'t");
4822 err2 |= true;
4823 }
4824 else if (lp_nr > 0
4825 && !gsi_one_before_end_p (gsi)
4826 && stmt_can_throw_internal (stmt))
4827 {
4828 error ("statement marked for throw in middle of block");
4829 err2 |= true;
4830 }
4831 }
4832
4833 if (err2)
4834 debug_gimple_stmt (stmt);
4835 err |= err2;
4836 }
4837 }
4838
4839 eh_error_found = false;
4840 if (get_eh_throw_stmt_table (cfun))
4841 htab_traverse (get_eh_throw_stmt_table (cfun),
4842 verify_eh_throw_stmt_node,
4843 visited_stmts);
4844
4845 if (err || eh_error_found)
4846 internal_error ("verify_gimple failed");
4847
4848 pointer_set_destroy (visited);
4849 pointer_set_destroy (visited_stmts);
4850 pointer_set_destroy (blocks);
4851 verify_histograms ();
4852 timevar_pop (TV_TREE_STMT_VERIFY);
4853 }
4854
4855
4856 /* Verifies that the flow information is OK. */
4857
4858 static int
4859 gimple_verify_flow_info (void)
4860 {
4861 int err = 0;
4862 basic_block bb;
4863 gimple_stmt_iterator gsi;
4864 gimple stmt;
4865 edge e;
4866 edge_iterator ei;
4867
4868 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4869 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4870 {
4871 error ("ENTRY_BLOCK has IL associated with it");
4872 err = 1;
4873 }
4874
4875 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4876 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4877 {
4878 error ("EXIT_BLOCK has IL associated with it");
4879 err = 1;
4880 }
4881
4882 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4883 if (e->flags & EDGE_FALLTHRU)
4884 {
4885 error ("fallthru to exit from bb %d", e->src->index);
4886 err = 1;
4887 }
4888
4889 FOR_EACH_BB (bb)
4890 {
4891 bool found_ctrl_stmt = false;
4892
4893 stmt = NULL;
4894
4895 /* Skip labels on the start of basic block. */
4896 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4897 {
4898 tree label;
4899 gimple prev_stmt = stmt;
4900
4901 stmt = gsi_stmt (gsi);
4902
4903 if (gimple_code (stmt) != GIMPLE_LABEL)
4904 break;
4905
4906 label = gimple_label_label (stmt);
4907 if (prev_stmt && DECL_NONLOCAL (label))
4908 {
4909 error ("nonlocal label ");
4910 print_generic_expr (stderr, label, 0);
4911 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4912 bb->index);
4913 err = 1;
4914 }
4915
4916 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4917 {
4918 error ("EH landing pad label ");
4919 print_generic_expr (stderr, label, 0);
4920 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4921 bb->index);
4922 err = 1;
4923 }
4924
4925 if (label_to_block (label) != bb)
4926 {
4927 error ("label ");
4928 print_generic_expr (stderr, label, 0);
4929 fprintf (stderr, " to block does not match in bb %d",
4930 bb->index);
4931 err = 1;
4932 }
4933
4934 if (decl_function_context (label) != current_function_decl)
4935 {
4936 error ("label ");
4937 print_generic_expr (stderr, label, 0);
4938 fprintf (stderr, " has incorrect context in bb %d",
4939 bb->index);
4940 err = 1;
4941 }
4942 }
4943
4944 /* Verify that body of basic block BB is free of control flow. */
4945 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4946 {
4947 gimple stmt = gsi_stmt (gsi);
4948
4949 if (found_ctrl_stmt)
4950 {
4951 error ("control flow in the middle of basic block %d",
4952 bb->index);
4953 err = 1;
4954 }
4955
4956 if (stmt_ends_bb_p (stmt))
4957 found_ctrl_stmt = true;
4958
4959 if (gimple_code (stmt) == GIMPLE_LABEL)
4960 {
4961 error ("label ");
4962 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4963 fprintf (stderr, " in the middle of basic block %d", bb->index);
4964 err = 1;
4965 }
4966 }
4967
4968 gsi = gsi_last_bb (bb);
4969 if (gsi_end_p (gsi))
4970 continue;
4971
4972 stmt = gsi_stmt (gsi);
4973
4974 if (gimple_code (stmt) == GIMPLE_LABEL)
4975 continue;
4976
4977 err |= verify_eh_edges (stmt);
4978
4979 if (is_ctrl_stmt (stmt))
4980 {
4981 FOR_EACH_EDGE (e, ei, bb->succs)
4982 if (e->flags & EDGE_FALLTHRU)
4983 {
4984 error ("fallthru edge after a control statement in bb %d",
4985 bb->index);
4986 err = 1;
4987 }
4988 }
4989
4990 if (gimple_code (stmt) != GIMPLE_COND)
4991 {
4992 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4993 after anything else but if statement. */
4994 FOR_EACH_EDGE (e, ei, bb->succs)
4995 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4996 {
4997 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4998 bb->index);
4999 err = 1;
5000 }
5001 }
5002
5003 switch (gimple_code (stmt))
5004 {
5005 case GIMPLE_COND:
5006 {
5007 edge true_edge;
5008 edge false_edge;
5009
5010 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5011
5012 if (!true_edge
5013 || !false_edge
5014 || !(true_edge->flags & EDGE_TRUE_VALUE)
5015 || !(false_edge->flags & EDGE_FALSE_VALUE)
5016 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5017 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5018 || EDGE_COUNT (bb->succs) >= 3)
5019 {
5020 error ("wrong outgoing edge flags at end of bb %d",
5021 bb->index);
5022 err = 1;
5023 }
5024 }
5025 break;
5026
5027 case GIMPLE_GOTO:
5028 if (simple_goto_p (stmt))
5029 {
5030 error ("explicit goto at end of bb %d", bb->index);
5031 err = 1;
5032 }
5033 else
5034 {
5035 /* FIXME. We should double check that the labels in the
5036 destination blocks have their address taken. */
5037 FOR_EACH_EDGE (e, ei, bb->succs)
5038 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5039 | EDGE_FALSE_VALUE))
5040 || !(e->flags & EDGE_ABNORMAL))
5041 {
5042 error ("wrong outgoing edge flags at end of bb %d",
5043 bb->index);
5044 err = 1;
5045 }
5046 }
5047 break;
5048
5049 case GIMPLE_CALL:
5050 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5051 break;
5052 /* ... fallthru ... */
5053 case GIMPLE_RETURN:
5054 if (!single_succ_p (bb)
5055 || (single_succ_edge (bb)->flags
5056 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5057 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5058 {
5059 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5060 err = 1;
5061 }
5062 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5063 {
5064 error ("return edge does not point to exit in bb %d",
5065 bb->index);
5066 err = 1;
5067 }
5068 break;
5069
5070 case GIMPLE_SWITCH:
5071 {
5072 tree prev;
5073 edge e;
5074 size_t i, n;
5075
5076 n = gimple_switch_num_labels (stmt);
5077
5078 /* Mark all the destination basic blocks. */
5079 for (i = 0; i < n; ++i)
5080 {
5081 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5082 basic_block label_bb = label_to_block (lab);
5083 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5084 label_bb->aux = (void *)1;
5085 }
5086
5087 /* Verify that the case labels are sorted. */
5088 prev = gimple_switch_label (stmt, 0);
5089 for (i = 1; i < n; ++i)
5090 {
5091 tree c = gimple_switch_label (stmt, i);
5092 if (!CASE_LOW (c))
5093 {
5094 error ("found default case not at the start of "
5095 "case vector");
5096 err = 1;
5097 continue;
5098 }
5099 if (CASE_LOW (prev)
5100 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5101 {
5102 error ("case labels not sorted: ");
5103 print_generic_expr (stderr, prev, 0);
5104 fprintf (stderr," is greater than ");
5105 print_generic_expr (stderr, c, 0);
5106 fprintf (stderr," but comes before it.\n");
5107 err = 1;
5108 }
5109 prev = c;
5110 }
5111 /* VRP will remove the default case if it can prove it will
5112 never be executed. So do not verify there always exists
5113 a default case here. */
5114
5115 FOR_EACH_EDGE (e, ei, bb->succs)
5116 {
5117 if (!e->dest->aux)
5118 {
5119 error ("extra outgoing edge %d->%d",
5120 bb->index, e->dest->index);
5121 err = 1;
5122 }
5123
5124 e->dest->aux = (void *)2;
5125 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5126 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5127 {
5128 error ("wrong outgoing edge flags at end of bb %d",
5129 bb->index);
5130 err = 1;
5131 }
5132 }
5133
5134 /* Check that we have all of them. */
5135 for (i = 0; i < n; ++i)
5136 {
5137 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5138 basic_block label_bb = label_to_block (lab);
5139
5140 if (label_bb->aux != (void *)2)
5141 {
5142 error ("missing edge %i->%i", bb->index, label_bb->index);
5143 err = 1;
5144 }
5145 }
5146
5147 FOR_EACH_EDGE (e, ei, bb->succs)
5148 e->dest->aux = (void *)0;
5149 }
5150 break;
5151
5152 case GIMPLE_EH_DISPATCH:
5153 err |= verify_eh_dispatch_edge (stmt);
5154 break;
5155
5156 default:
5157 break;
5158 }
5159 }
5160
5161 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5162 verify_dominators (CDI_DOMINATORS);
5163
5164 return err;
5165 }
5166
5167
5168 /* Updates phi nodes after creating a forwarder block joined
5169 by edge FALLTHRU. */
5170
5171 static void
5172 gimple_make_forwarder_block (edge fallthru)
5173 {
5174 edge e;
5175 edge_iterator ei;
5176 basic_block dummy, bb;
5177 tree var;
5178 gimple_stmt_iterator gsi;
5179
5180 dummy = fallthru->src;
5181 bb = fallthru->dest;
5182
5183 if (single_pred_p (bb))
5184 return;
5185
5186 /* If we redirected a branch we must create new PHI nodes at the
5187 start of BB. */
5188 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5189 {
5190 gimple phi, new_phi;
5191
5192 phi = gsi_stmt (gsi);
5193 var = gimple_phi_result (phi);
5194 new_phi = create_phi_node (var, bb);
5195 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5196 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5197 UNKNOWN_LOCATION);
5198 }
5199
5200 /* Add the arguments we have stored on edges. */
5201 FOR_EACH_EDGE (e, ei, bb->preds)
5202 {
5203 if (e == fallthru)
5204 continue;
5205
5206 flush_pending_stmts (e);
5207 }
5208 }
5209
5210
5211 /* Return a non-special label in the head of basic block BLOCK.
5212 Create one if it doesn't exist. */
5213
5214 tree
5215 gimple_block_label (basic_block bb)
5216 {
5217 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5218 bool first = true;
5219 tree label;
5220 gimple stmt;
5221
5222 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5223 {
5224 stmt = gsi_stmt (i);
5225 if (gimple_code (stmt) != GIMPLE_LABEL)
5226 break;
5227 label = gimple_label_label (stmt);
5228 if (!DECL_NONLOCAL (label))
5229 {
5230 if (!first)
5231 gsi_move_before (&i, &s);
5232 return label;
5233 }
5234 }
5235
5236 label = create_artificial_label (UNKNOWN_LOCATION);
5237 stmt = gimple_build_label (label);
5238 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5239 return label;
5240 }
5241
5242
5243 /* Attempt to perform edge redirection by replacing a possibly complex
5244 jump instruction by a goto or by removing the jump completely.
5245 This can apply only if all edges now point to the same block. The
5246 parameters and return values are equivalent to
5247 redirect_edge_and_branch. */
5248
5249 static edge
5250 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5251 {
5252 basic_block src = e->src;
5253 gimple_stmt_iterator i;
5254 gimple stmt;
5255
5256 /* We can replace or remove a complex jump only when we have exactly
5257 two edges. */
5258 if (EDGE_COUNT (src->succs) != 2
5259 /* Verify that all targets will be TARGET. Specifically, the
5260 edge that is not E must also go to TARGET. */
5261 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5262 return NULL;
5263
5264 i = gsi_last_bb (src);
5265 if (gsi_end_p (i))
5266 return NULL;
5267
5268 stmt = gsi_stmt (i);
5269
5270 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5271 {
5272 gsi_remove (&i, true);
5273 e = ssa_redirect_edge (e, target);
5274 e->flags = EDGE_FALLTHRU;
5275 return e;
5276 }
5277
5278 return NULL;
5279 }
5280
5281
5282 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5283 edge representing the redirected branch. */
5284
5285 static edge
5286 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5287 {
5288 basic_block bb = e->src;
5289 gimple_stmt_iterator gsi;
5290 edge ret;
5291 gimple stmt;
5292
5293 if (e->flags & EDGE_ABNORMAL)
5294 return NULL;
5295
5296 if (e->dest == dest)
5297 return NULL;
5298
5299 if (e->flags & EDGE_EH)
5300 return redirect_eh_edge (e, dest);
5301
5302 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5303 {
5304 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5305 if (ret)
5306 return ret;
5307 }
5308
5309 gsi = gsi_last_bb (bb);
5310 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5311
5312 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5313 {
5314 case GIMPLE_COND:
5315 /* For COND_EXPR, we only need to redirect the edge. */
5316 break;
5317
5318 case GIMPLE_GOTO:
5319 /* No non-abnormal edges should lead from a non-simple goto, and
5320 simple ones should be represented implicitly. */
5321 gcc_unreachable ();
5322
5323 case GIMPLE_SWITCH:
5324 {
5325 tree label = gimple_block_label (dest);
5326 tree cases = get_cases_for_edge (e, stmt);
5327
5328 /* If we have a list of cases associated with E, then use it
5329 as it's a lot faster than walking the entire case vector. */
5330 if (cases)
5331 {
5332 edge e2 = find_edge (e->src, dest);
5333 tree last, first;
5334
5335 first = cases;
5336 while (cases)
5337 {
5338 last = cases;
5339 CASE_LABEL (cases) = label;
5340 cases = CASE_CHAIN (cases);
5341 }
5342
5343 /* If there was already an edge in the CFG, then we need
5344 to move all the cases associated with E to E2. */
5345 if (e2)
5346 {
5347 tree cases2 = get_cases_for_edge (e2, stmt);
5348
5349 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5350 CASE_CHAIN (cases2) = first;
5351 }
5352 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5353 }
5354 else
5355 {
5356 size_t i, n = gimple_switch_num_labels (stmt);
5357
5358 for (i = 0; i < n; i++)
5359 {
5360 tree elt = gimple_switch_label (stmt, i);
5361 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5362 CASE_LABEL (elt) = label;
5363 }
5364 }
5365 }
5366 break;
5367
5368 case GIMPLE_ASM:
5369 {
5370 int i, n = gimple_asm_nlabels (stmt);
5371 tree label = NULL;
5372
5373 for (i = 0; i < n; ++i)
5374 {
5375 tree cons = gimple_asm_label_op (stmt, i);
5376 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5377 {
5378 if (!label)
5379 label = gimple_block_label (dest);
5380 TREE_VALUE (cons) = label;
5381 }
5382 }
5383
5384 /* If we didn't find any label matching the former edge in the
5385 asm labels, we must be redirecting the fallthrough
5386 edge. */
5387 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5388 }
5389 break;
5390
5391 case GIMPLE_RETURN:
5392 gsi_remove (&gsi, true);
5393 e->flags |= EDGE_FALLTHRU;
5394 break;
5395
5396 case GIMPLE_OMP_RETURN:
5397 case GIMPLE_OMP_CONTINUE:
5398 case GIMPLE_OMP_SECTIONS_SWITCH:
5399 case GIMPLE_OMP_FOR:
5400 /* The edges from OMP constructs can be simply redirected. */
5401 break;
5402
5403 case GIMPLE_EH_DISPATCH:
5404 if (!(e->flags & EDGE_FALLTHRU))
5405 redirect_eh_dispatch_edge (stmt, e, dest);
5406 break;
5407
5408 case GIMPLE_TRANSACTION:
5409 /* The ABORT edge has a stored label associated with it, otherwise
5410 the edges are simply redirectable. */
5411 if (e->flags == 0)
5412 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5413 break;
5414
5415 default:
5416 /* Otherwise it must be a fallthru edge, and we don't need to
5417 do anything besides redirecting it. */
5418 gcc_assert (e->flags & EDGE_FALLTHRU);
5419 break;
5420 }
5421
5422 /* Update/insert PHI nodes as necessary. */
5423
5424 /* Now update the edges in the CFG. */
5425 e = ssa_redirect_edge (e, dest);
5426
5427 return e;
5428 }
5429
5430 /* Returns true if it is possible to remove edge E by redirecting
5431 it to the destination of the other edge from E->src. */
5432
5433 static bool
5434 gimple_can_remove_branch_p (const_edge e)
5435 {
5436 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5437 return false;
5438
5439 return true;
5440 }
5441
5442 /* Simple wrapper, as we can always redirect fallthru edges. */
5443
5444 static basic_block
5445 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5446 {
5447 e = gimple_redirect_edge_and_branch (e, dest);
5448 gcc_assert (e);
5449
5450 return NULL;
5451 }
5452
5453
5454 /* Splits basic block BB after statement STMT (but at least after the
5455 labels). If STMT is NULL, BB is split just after the labels. */
5456
5457 static basic_block
5458 gimple_split_block (basic_block bb, void *stmt)
5459 {
5460 gimple_stmt_iterator gsi;
5461 gimple_stmt_iterator gsi_tgt;
5462 gimple act;
5463 gimple_seq list;
5464 basic_block new_bb;
5465 edge e;
5466 edge_iterator ei;
5467
5468 new_bb = create_empty_bb (bb);
5469
5470 /* Redirect the outgoing edges. */
5471 new_bb->succs = bb->succs;
5472 bb->succs = NULL;
5473 FOR_EACH_EDGE (e, ei, new_bb->succs)
5474 e->src = new_bb;
5475
5476 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5477 stmt = NULL;
5478
5479 /* Move everything from GSI to the new basic block. */
5480 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5481 {
5482 act = gsi_stmt (gsi);
5483 if (gimple_code (act) == GIMPLE_LABEL)
5484 continue;
5485
5486 if (!stmt)
5487 break;
5488
5489 if (stmt == act)
5490 {
5491 gsi_next (&gsi);
5492 break;
5493 }
5494 }
5495
5496 if (gsi_end_p (gsi))
5497 return new_bb;
5498
5499 /* Split the statement list - avoid re-creating new containers as this
5500 brings ugly quadratic memory consumption in the inliner.
5501 (We are still quadratic since we need to update stmt BB pointers,
5502 sadly.) */
5503 gsi_split_seq_before (&gsi, &list);
5504 set_bb_seq (new_bb, list);
5505 for (gsi_tgt = gsi_start (list);
5506 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5507 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5508
5509 return new_bb;
5510 }
5511
5512
5513 /* Moves basic block BB after block AFTER. */
5514
5515 static bool
5516 gimple_move_block_after (basic_block bb, basic_block after)
5517 {
5518 if (bb->prev_bb == after)
5519 return true;
5520
5521 unlink_block (bb);
5522 link_block (bb, after);
5523
5524 return true;
5525 }
5526
5527
5528 /* Return TRUE if block BB has no executable statements, otherwise return
5529 FALSE. */
5530
5531 static bool
5532 gimple_empty_block_p (basic_block bb)
5533 {
5534 /* BB must have no executable statements. */
5535 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5536 if (phi_nodes (bb))
5537 return false;
5538 if (gsi_end_p (gsi))
5539 return true;
5540 if (is_gimple_debug (gsi_stmt (gsi)))
5541 gsi_next_nondebug (&gsi);
5542 return gsi_end_p (gsi);
5543 }
5544
5545
5546 /* Split a basic block if it ends with a conditional branch and if the
5547 other part of the block is not empty. */
5548
5549 static basic_block
5550 gimple_split_block_before_cond_jump (basic_block bb)
5551 {
5552 gimple last, split_point;
5553 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5554 if (gsi_end_p (gsi))
5555 return NULL;
5556 last = gsi_stmt (gsi);
5557 if (gimple_code (last) != GIMPLE_COND
5558 && gimple_code (last) != GIMPLE_SWITCH)
5559 return NULL;
5560 gsi_prev_nondebug (&gsi);
5561 split_point = gsi_stmt (gsi);
5562 return split_block (bb, split_point)->dest;
5563 }
5564
5565
5566 /* Return true if basic_block can be duplicated. */
5567
5568 static bool
5569 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5570 {
5571 return true;
5572 }
5573
5574 /* Create a duplicate of the basic block BB. NOTE: This does not
5575 preserve SSA form. */
5576
5577 static basic_block
5578 gimple_duplicate_bb (basic_block bb)
5579 {
5580 basic_block new_bb;
5581 gimple_stmt_iterator gsi, gsi_tgt;
5582 gimple_seq phis = phi_nodes (bb);
5583 gimple phi, stmt, copy;
5584
5585 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5586
5587 /* Copy the PHI nodes. We ignore PHI node arguments here because
5588 the incoming edges have not been setup yet. */
5589 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5590 {
5591 phi = gsi_stmt (gsi);
5592 copy = create_phi_node (NULL_TREE, new_bb);
5593 create_new_def_for (gimple_phi_result (phi), copy,
5594 gimple_phi_result_ptr (copy));
5595 gimple_set_uid (copy, gimple_uid (phi));
5596 }
5597
5598 gsi_tgt = gsi_start_bb (new_bb);
5599 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5600 {
5601 def_operand_p def_p;
5602 ssa_op_iter op_iter;
5603 tree lhs;
5604
5605 stmt = gsi_stmt (gsi);
5606 if (gimple_code (stmt) == GIMPLE_LABEL)
5607 continue;
5608
5609 /* Don't duplicate label debug stmts. */
5610 if (gimple_debug_bind_p (stmt)
5611 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5612 == LABEL_DECL)
5613 continue;
5614
5615 /* Create a new copy of STMT and duplicate STMT's virtual
5616 operands. */
5617 copy = gimple_copy (stmt);
5618 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5619
5620 maybe_duplicate_eh_stmt (copy, stmt);
5621 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5622
5623 /* When copying around a stmt writing into a local non-user
5624 aggregate, make sure it won't share stack slot with other
5625 vars. */
5626 lhs = gimple_get_lhs (stmt);
5627 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5628 {
5629 tree base = get_base_address (lhs);
5630 if (base
5631 && (TREE_CODE (base) == VAR_DECL
5632 || TREE_CODE (base) == RESULT_DECL)
5633 && DECL_IGNORED_P (base)
5634 && !TREE_STATIC (base)
5635 && !DECL_EXTERNAL (base)
5636 && (TREE_CODE (base) != VAR_DECL
5637 || !DECL_HAS_VALUE_EXPR_P (base)))
5638 DECL_NONSHAREABLE (base) = 1;
5639 }
5640
5641 /* Create new names for all the definitions created by COPY and
5642 add replacement mappings for each new name. */
5643 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5644 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5645 }
5646
5647 return new_bb;
5648 }
5649
5650 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5651
5652 static void
5653 add_phi_args_after_copy_edge (edge e_copy)
5654 {
5655 basic_block bb, bb_copy = e_copy->src, dest;
5656 edge e;
5657 edge_iterator ei;
5658 gimple phi, phi_copy;
5659 tree def;
5660 gimple_stmt_iterator psi, psi_copy;
5661
5662 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5663 return;
5664
5665 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5666
5667 if (e_copy->dest->flags & BB_DUPLICATED)
5668 dest = get_bb_original (e_copy->dest);
5669 else
5670 dest = e_copy->dest;
5671
5672 e = find_edge (bb, dest);
5673 if (!e)
5674 {
5675 /* During loop unrolling the target of the latch edge is copied.
5676 In this case we are not looking for edge to dest, but to
5677 duplicated block whose original was dest. */
5678 FOR_EACH_EDGE (e, ei, bb->succs)
5679 {
5680 if ((e->dest->flags & BB_DUPLICATED)
5681 && get_bb_original (e->dest) == dest)
5682 break;
5683 }
5684
5685 gcc_assert (e != NULL);
5686 }
5687
5688 for (psi = gsi_start_phis (e->dest),
5689 psi_copy = gsi_start_phis (e_copy->dest);
5690 !gsi_end_p (psi);
5691 gsi_next (&psi), gsi_next (&psi_copy))
5692 {
5693 phi = gsi_stmt (psi);
5694 phi_copy = gsi_stmt (psi_copy);
5695 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5696 add_phi_arg (phi_copy, def, e_copy,
5697 gimple_phi_arg_location_from_edge (phi, e));
5698 }
5699 }
5700
5701
5702 /* Basic block BB_COPY was created by code duplication. Add phi node
5703 arguments for edges going out of BB_COPY. The blocks that were
5704 duplicated have BB_DUPLICATED set. */
5705
5706 void
5707 add_phi_args_after_copy_bb (basic_block bb_copy)
5708 {
5709 edge e_copy;
5710 edge_iterator ei;
5711
5712 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5713 {
5714 add_phi_args_after_copy_edge (e_copy);
5715 }
5716 }
5717
5718 /* Blocks in REGION_COPY array of length N_REGION were created by
5719 duplication of basic blocks. Add phi node arguments for edges
5720 going from these blocks. If E_COPY is not NULL, also add
5721 phi node arguments for its destination.*/
5722
5723 void
5724 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5725 edge e_copy)
5726 {
5727 unsigned i;
5728
5729 for (i = 0; i < n_region; i++)
5730 region_copy[i]->flags |= BB_DUPLICATED;
5731
5732 for (i = 0; i < n_region; i++)
5733 add_phi_args_after_copy_bb (region_copy[i]);
5734 if (e_copy)
5735 add_phi_args_after_copy_edge (e_copy);
5736
5737 for (i = 0; i < n_region; i++)
5738 region_copy[i]->flags &= ~BB_DUPLICATED;
5739 }
5740
5741 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5742 important exit edge EXIT. By important we mean that no SSA name defined
5743 inside region is live over the other exit edges of the region. All entry
5744 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5745 to the duplicate of the region. Dominance and loop information is
5746 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5747 UPDATE_DOMINANCE is false then we assume that the caller will update the
5748 dominance information after calling this function. The new basic
5749 blocks are stored to REGION_COPY in the same order as they had in REGION,
5750 provided that REGION_COPY is not NULL.
5751 The function returns false if it is unable to copy the region,
5752 true otherwise. */
5753
5754 bool
5755 gimple_duplicate_sese_region (edge entry, edge exit,
5756 basic_block *region, unsigned n_region,
5757 basic_block *region_copy,
5758 bool update_dominance)
5759 {
5760 unsigned i;
5761 bool free_region_copy = false, copying_header = false;
5762 struct loop *loop = entry->dest->loop_father;
5763 edge exit_copy;
5764 vec<basic_block> doms;
5765 edge redirected;
5766 int total_freq = 0, entry_freq = 0;
5767 gcov_type total_count = 0, entry_count = 0;
5768
5769 if (!can_copy_bbs_p (region, n_region))
5770 return false;
5771
5772 /* Some sanity checking. Note that we do not check for all possible
5773 missuses of the functions. I.e. if you ask to copy something weird,
5774 it will work, but the state of structures probably will not be
5775 correct. */
5776 for (i = 0; i < n_region; i++)
5777 {
5778 /* We do not handle subloops, i.e. all the blocks must belong to the
5779 same loop. */
5780 if (region[i]->loop_father != loop)
5781 return false;
5782
5783 if (region[i] != entry->dest
5784 && region[i] == loop->header)
5785 return false;
5786 }
5787
5788 set_loop_copy (loop, loop);
5789
5790 /* In case the function is used for loop header copying (which is the primary
5791 use), ensure that EXIT and its copy will be new latch and entry edges. */
5792 if (loop->header == entry->dest)
5793 {
5794 copying_header = true;
5795 set_loop_copy (loop, loop_outer (loop));
5796
5797 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5798 return false;
5799
5800 for (i = 0; i < n_region; i++)
5801 if (region[i] != exit->src
5802 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5803 return false;
5804 }
5805
5806 if (!region_copy)
5807 {
5808 region_copy = XNEWVEC (basic_block, n_region);
5809 free_region_copy = true;
5810 }
5811
5812 initialize_original_copy_tables ();
5813
5814 /* Record blocks outside the region that are dominated by something
5815 inside. */
5816 if (update_dominance)
5817 {
5818 doms.create (0);
5819 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5820 }
5821
5822 if (entry->dest->count)
5823 {
5824 total_count = entry->dest->count;
5825 entry_count = entry->count;
5826 /* Fix up corner cases, to avoid division by zero or creation of negative
5827 frequencies. */
5828 if (entry_count > total_count)
5829 entry_count = total_count;
5830 }
5831 else
5832 {
5833 total_freq = entry->dest->frequency;
5834 entry_freq = EDGE_FREQUENCY (entry);
5835 /* Fix up corner cases, to avoid division by zero or creation of negative
5836 frequencies. */
5837 if (total_freq == 0)
5838 total_freq = 1;
5839 else if (entry_freq > total_freq)
5840 entry_freq = total_freq;
5841 }
5842
5843 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5844 split_edge_bb_loc (entry), update_dominance);
5845 if (total_count)
5846 {
5847 scale_bbs_frequencies_gcov_type (region, n_region,
5848 total_count - entry_count,
5849 total_count);
5850 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5851 total_count);
5852 }
5853 else
5854 {
5855 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5856 total_freq);
5857 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5858 }
5859
5860 if (copying_header)
5861 {
5862 loop->header = exit->dest;
5863 loop->latch = exit->src;
5864 }
5865
5866 /* Redirect the entry and add the phi node arguments. */
5867 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5868 gcc_assert (redirected != NULL);
5869 flush_pending_stmts (entry);
5870
5871 /* Concerning updating of dominators: We must recount dominators
5872 for entry block and its copy. Anything that is outside of the
5873 region, but was dominated by something inside needs recounting as
5874 well. */
5875 if (update_dominance)
5876 {
5877 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5878 doms.safe_push (get_bb_original (entry->dest));
5879 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5880 doms.release ();
5881 }
5882
5883 /* Add the other PHI node arguments. */
5884 add_phi_args_after_copy (region_copy, n_region, NULL);
5885
5886 if (free_region_copy)
5887 free (region_copy);
5888
5889 free_original_copy_tables ();
5890 return true;
5891 }
5892
5893 /* Checks if BB is part of the region defined by N_REGION BBS. */
5894 static bool
5895 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5896 {
5897 unsigned int n;
5898
5899 for (n = 0; n < n_region; n++)
5900 {
5901 if (bb == bbs[n])
5902 return true;
5903 }
5904 return false;
5905 }
5906
5907 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5908 are stored to REGION_COPY in the same order in that they appear
5909 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5910 the region, EXIT an exit from it. The condition guarding EXIT
5911 is moved to ENTRY. Returns true if duplication succeeds, false
5912 otherwise.
5913
5914 For example,
5915
5916 some_code;
5917 if (cond)
5918 A;
5919 else
5920 B;
5921
5922 is transformed to
5923
5924 if (cond)
5925 {
5926 some_code;
5927 A;
5928 }
5929 else
5930 {
5931 some_code;
5932 B;
5933 }
5934 */
5935
5936 bool
5937 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5938 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5939 basic_block *region_copy ATTRIBUTE_UNUSED)
5940 {
5941 unsigned i;
5942 bool free_region_copy = false;
5943 struct loop *loop = exit->dest->loop_father;
5944 struct loop *orig_loop = entry->dest->loop_father;
5945 basic_block switch_bb, entry_bb, nentry_bb;
5946 vec<basic_block> doms;
5947 int total_freq = 0, exit_freq = 0;
5948 gcov_type total_count = 0, exit_count = 0;
5949 edge exits[2], nexits[2], e;
5950 gimple_stmt_iterator gsi;
5951 gimple cond_stmt;
5952 edge sorig, snew;
5953 basic_block exit_bb;
5954 gimple_stmt_iterator psi;
5955 gimple phi;
5956 tree def;
5957 struct loop *target, *aloop, *cloop;
5958
5959 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5960 exits[0] = exit;
5961 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5962
5963 if (!can_copy_bbs_p (region, n_region))
5964 return false;
5965
5966 initialize_original_copy_tables ();
5967 set_loop_copy (orig_loop, loop);
5968
5969 target= loop;
5970 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5971 {
5972 if (bb_part_of_region_p (aloop->header, region, n_region))
5973 {
5974 cloop = duplicate_loop (aloop, target);
5975 duplicate_subloops (aloop, cloop);
5976 }
5977 }
5978
5979 if (!region_copy)
5980 {
5981 region_copy = XNEWVEC (basic_block, n_region);
5982 free_region_copy = true;
5983 }
5984
5985 gcc_assert (!need_ssa_update_p (cfun));
5986
5987 /* Record blocks outside the region that are dominated by something
5988 inside. */
5989 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5990
5991 if (exit->src->count)
5992 {
5993 total_count = exit->src->count;
5994 exit_count = exit->count;
5995 /* Fix up corner cases, to avoid division by zero or creation of negative
5996 frequencies. */
5997 if (exit_count > total_count)
5998 exit_count = total_count;
5999 }
6000 else
6001 {
6002 total_freq = exit->src->frequency;
6003 exit_freq = EDGE_FREQUENCY (exit);
6004 /* Fix up corner cases, to avoid division by zero or creation of negative
6005 frequencies. */
6006 if (total_freq == 0)
6007 total_freq = 1;
6008 if (exit_freq > total_freq)
6009 exit_freq = total_freq;
6010 }
6011
6012 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6013 split_edge_bb_loc (exit), true);
6014 if (total_count)
6015 {
6016 scale_bbs_frequencies_gcov_type (region, n_region,
6017 total_count - exit_count,
6018 total_count);
6019 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6020 total_count);
6021 }
6022 else
6023 {
6024 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6025 total_freq);
6026 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6027 }
6028
6029 /* Create the switch block, and put the exit condition to it. */
6030 entry_bb = entry->dest;
6031 nentry_bb = get_bb_copy (entry_bb);
6032 if (!last_stmt (entry->src)
6033 || !stmt_ends_bb_p (last_stmt (entry->src)))
6034 switch_bb = entry->src;
6035 else
6036 switch_bb = split_edge (entry);
6037 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6038
6039 gsi = gsi_last_bb (switch_bb);
6040 cond_stmt = last_stmt (exit->src);
6041 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6042 cond_stmt = gimple_copy (cond_stmt);
6043
6044 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6045
6046 sorig = single_succ_edge (switch_bb);
6047 sorig->flags = exits[1]->flags;
6048 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6049
6050 /* Register the new edge from SWITCH_BB in loop exit lists. */
6051 rescan_loop_exit (snew, true, false);
6052
6053 /* Add the PHI node arguments. */
6054 add_phi_args_after_copy (region_copy, n_region, snew);
6055
6056 /* Get rid of now superfluous conditions and associated edges (and phi node
6057 arguments). */
6058 exit_bb = exit->dest;
6059
6060 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6061 PENDING_STMT (e) = NULL;
6062
6063 /* The latch of ORIG_LOOP was copied, and so was the backedge
6064 to the original header. We redirect this backedge to EXIT_BB. */
6065 for (i = 0; i < n_region; i++)
6066 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6067 {
6068 gcc_assert (single_succ_edge (region_copy[i]));
6069 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6070 PENDING_STMT (e) = NULL;
6071 for (psi = gsi_start_phis (exit_bb);
6072 !gsi_end_p (psi);
6073 gsi_next (&psi))
6074 {
6075 phi = gsi_stmt (psi);
6076 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6077 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6078 }
6079 }
6080 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6081 PENDING_STMT (e) = NULL;
6082
6083 /* Anything that is outside of the region, but was dominated by something
6084 inside needs to update dominance info. */
6085 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6086 doms.release ();
6087 /* Update the SSA web. */
6088 update_ssa (TODO_update_ssa);
6089
6090 if (free_region_copy)
6091 free (region_copy);
6092
6093 free_original_copy_tables ();
6094 return true;
6095 }
6096
6097 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6098 adding blocks when the dominator traversal reaches EXIT. This
6099 function silently assumes that ENTRY strictly dominates EXIT. */
6100
6101 void
6102 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6103 vec<basic_block> *bbs_p)
6104 {
6105 basic_block son;
6106
6107 for (son = first_dom_son (CDI_DOMINATORS, entry);
6108 son;
6109 son = next_dom_son (CDI_DOMINATORS, son))
6110 {
6111 bbs_p->safe_push (son);
6112 if (son != exit)
6113 gather_blocks_in_sese_region (son, exit, bbs_p);
6114 }
6115 }
6116
6117 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6118 The duplicates are recorded in VARS_MAP. */
6119
6120 static void
6121 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6122 tree to_context)
6123 {
6124 tree t = *tp, new_t;
6125 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6126 void **loc;
6127
6128 if (DECL_CONTEXT (t) == to_context)
6129 return;
6130
6131 loc = pointer_map_contains (vars_map, t);
6132
6133 if (!loc)
6134 {
6135 loc = pointer_map_insert (vars_map, t);
6136
6137 if (SSA_VAR_P (t))
6138 {
6139 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6140 add_local_decl (f, new_t);
6141 }
6142 else
6143 {
6144 gcc_assert (TREE_CODE (t) == CONST_DECL);
6145 new_t = copy_node (t);
6146 }
6147 DECL_CONTEXT (new_t) = to_context;
6148
6149 *loc = new_t;
6150 }
6151 else
6152 new_t = (tree) *loc;
6153
6154 *tp = new_t;
6155 }
6156
6157
6158 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6159 VARS_MAP maps old ssa names and var_decls to the new ones. */
6160
6161 static tree
6162 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6163 tree to_context)
6164 {
6165 void **loc;
6166 tree new_name;
6167
6168 gcc_assert (!virtual_operand_p (name));
6169
6170 loc = pointer_map_contains (vars_map, name);
6171
6172 if (!loc)
6173 {
6174 tree decl = SSA_NAME_VAR (name);
6175 if (decl)
6176 {
6177 replace_by_duplicate_decl (&decl, vars_map, to_context);
6178 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6179 decl, SSA_NAME_DEF_STMT (name));
6180 if (SSA_NAME_IS_DEFAULT_DEF (name))
6181 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6182 decl, new_name);
6183 }
6184 else
6185 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6186 name, SSA_NAME_DEF_STMT (name));
6187
6188 loc = pointer_map_insert (vars_map, name);
6189 *loc = new_name;
6190 }
6191 else
6192 new_name = (tree) *loc;
6193
6194 return new_name;
6195 }
6196
6197 struct move_stmt_d
6198 {
6199 tree orig_block;
6200 tree new_block;
6201 tree from_context;
6202 tree to_context;
6203 struct pointer_map_t *vars_map;
6204 htab_t new_label_map;
6205 struct pointer_map_t *eh_map;
6206 bool remap_decls_p;
6207 };
6208
6209 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6210 contained in *TP if it has been ORIG_BLOCK previously and change the
6211 DECL_CONTEXT of every local variable referenced in *TP. */
6212
6213 static tree
6214 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6215 {
6216 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6217 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6218 tree t = *tp;
6219
6220 if (EXPR_P (t))
6221 {
6222 tree block = TREE_BLOCK (t);
6223 if (block == p->orig_block
6224 || (p->orig_block == NULL_TREE
6225 && block != NULL_TREE))
6226 TREE_SET_BLOCK (t, p->new_block);
6227 #ifdef ENABLE_CHECKING
6228 else if (block != NULL_TREE)
6229 {
6230 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6231 block = BLOCK_SUPERCONTEXT (block);
6232 gcc_assert (block == p->orig_block);
6233 }
6234 #endif
6235 }
6236 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6237 {
6238 if (TREE_CODE (t) == SSA_NAME)
6239 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6240 else if (TREE_CODE (t) == LABEL_DECL)
6241 {
6242 if (p->new_label_map)
6243 {
6244 struct tree_map in, *out;
6245 in.base.from = t;
6246 out = (struct tree_map *)
6247 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6248 if (out)
6249 *tp = t = out->to;
6250 }
6251
6252 DECL_CONTEXT (t) = p->to_context;
6253 }
6254 else if (p->remap_decls_p)
6255 {
6256 /* Replace T with its duplicate. T should no longer appear in the
6257 parent function, so this looks wasteful; however, it may appear
6258 in referenced_vars, and more importantly, as virtual operands of
6259 statements, and in alias lists of other variables. It would be
6260 quite difficult to expunge it from all those places. ??? It might
6261 suffice to do this for addressable variables. */
6262 if ((TREE_CODE (t) == VAR_DECL
6263 && !is_global_var (t))
6264 || TREE_CODE (t) == CONST_DECL)
6265 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6266 }
6267 *walk_subtrees = 0;
6268 }
6269 else if (TYPE_P (t))
6270 *walk_subtrees = 0;
6271
6272 return NULL_TREE;
6273 }
6274
6275 /* Helper for move_stmt_r. Given an EH region number for the source
6276 function, map that to the duplicate EH regio number in the dest. */
6277
6278 static int
6279 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6280 {
6281 eh_region old_r, new_r;
6282 void **slot;
6283
6284 old_r = get_eh_region_from_number (old_nr);
6285 slot = pointer_map_contains (p->eh_map, old_r);
6286 new_r = (eh_region) *slot;
6287
6288 return new_r->index;
6289 }
6290
6291 /* Similar, but operate on INTEGER_CSTs. */
6292
6293 static tree
6294 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6295 {
6296 int old_nr, new_nr;
6297
6298 old_nr = tree_to_shwi (old_t_nr);
6299 new_nr = move_stmt_eh_region_nr (old_nr, p);
6300
6301 return build_int_cst (integer_type_node, new_nr);
6302 }
6303
6304 /* Like move_stmt_op, but for gimple statements.
6305
6306 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6307 contained in the current statement in *GSI_P and change the
6308 DECL_CONTEXT of every local variable referenced in the current
6309 statement. */
6310
6311 static tree
6312 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6313 struct walk_stmt_info *wi)
6314 {
6315 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6316 gimple stmt = gsi_stmt (*gsi_p);
6317 tree block = gimple_block (stmt);
6318
6319 if (block == p->orig_block
6320 || (p->orig_block == NULL_TREE
6321 && block != NULL_TREE))
6322 gimple_set_block (stmt, p->new_block);
6323
6324 switch (gimple_code (stmt))
6325 {
6326 case GIMPLE_CALL:
6327 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6328 {
6329 tree r, fndecl = gimple_call_fndecl (stmt);
6330 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6331 switch (DECL_FUNCTION_CODE (fndecl))
6332 {
6333 case BUILT_IN_EH_COPY_VALUES:
6334 r = gimple_call_arg (stmt, 1);
6335 r = move_stmt_eh_region_tree_nr (r, p);
6336 gimple_call_set_arg (stmt, 1, r);
6337 /* FALLTHRU */
6338
6339 case BUILT_IN_EH_POINTER:
6340 case BUILT_IN_EH_FILTER:
6341 r = gimple_call_arg (stmt, 0);
6342 r = move_stmt_eh_region_tree_nr (r, p);
6343 gimple_call_set_arg (stmt, 0, r);
6344 break;
6345
6346 default:
6347 break;
6348 }
6349 }
6350 break;
6351
6352 case GIMPLE_RESX:
6353 {
6354 int r = gimple_resx_region (stmt);
6355 r = move_stmt_eh_region_nr (r, p);
6356 gimple_resx_set_region (stmt, r);
6357 }
6358 break;
6359
6360 case GIMPLE_EH_DISPATCH:
6361 {
6362 int r = gimple_eh_dispatch_region (stmt);
6363 r = move_stmt_eh_region_nr (r, p);
6364 gimple_eh_dispatch_set_region (stmt, r);
6365 }
6366 break;
6367
6368 case GIMPLE_OMP_RETURN:
6369 case GIMPLE_OMP_CONTINUE:
6370 break;
6371 default:
6372 if (is_gimple_omp (stmt))
6373 {
6374 /* Do not remap variables inside OMP directives. Variables
6375 referenced in clauses and directive header belong to the
6376 parent function and should not be moved into the child
6377 function. */
6378 bool save_remap_decls_p = p->remap_decls_p;
6379 p->remap_decls_p = false;
6380 *handled_ops_p = true;
6381
6382 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6383 move_stmt_op, wi);
6384
6385 p->remap_decls_p = save_remap_decls_p;
6386 }
6387 break;
6388 }
6389
6390 return NULL_TREE;
6391 }
6392
6393 /* Move basic block BB from function CFUN to function DEST_FN. The
6394 block is moved out of the original linked list and placed after
6395 block AFTER in the new list. Also, the block is removed from the
6396 original array of blocks and placed in DEST_FN's array of blocks.
6397 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6398 updated to reflect the moved edges.
6399
6400 The local variables are remapped to new instances, VARS_MAP is used
6401 to record the mapping. */
6402
6403 static void
6404 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6405 basic_block after, bool update_edge_count_p,
6406 struct move_stmt_d *d)
6407 {
6408 struct control_flow_graph *cfg;
6409 edge_iterator ei;
6410 edge e;
6411 gimple_stmt_iterator si;
6412 unsigned old_len, new_len;
6413
6414 /* Remove BB from dominance structures. */
6415 delete_from_dominance_info (CDI_DOMINATORS, bb);
6416
6417 /* Move BB from its current loop to the copy in the new function. */
6418 if (current_loops)
6419 {
6420 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6421 if (new_loop)
6422 bb->loop_father = new_loop;
6423 }
6424
6425 /* Link BB to the new linked list. */
6426 move_block_after (bb, after);
6427
6428 /* Update the edge count in the corresponding flowgraphs. */
6429 if (update_edge_count_p)
6430 FOR_EACH_EDGE (e, ei, bb->succs)
6431 {
6432 cfun->cfg->x_n_edges--;
6433 dest_cfun->cfg->x_n_edges++;
6434 }
6435
6436 /* Remove BB from the original basic block array. */
6437 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6438 cfun->cfg->x_n_basic_blocks--;
6439
6440 /* Grow DEST_CFUN's basic block array if needed. */
6441 cfg = dest_cfun->cfg;
6442 cfg->x_n_basic_blocks++;
6443 if (bb->index >= cfg->x_last_basic_block)
6444 cfg->x_last_basic_block = bb->index + 1;
6445
6446 old_len = vec_safe_length (cfg->x_basic_block_info);
6447 if ((unsigned) cfg->x_last_basic_block >= old_len)
6448 {
6449 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6450 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6451 }
6452
6453 (*cfg->x_basic_block_info)[bb->index] = bb;
6454
6455 /* Remap the variables in phi nodes. */
6456 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6457 {
6458 gimple phi = gsi_stmt (si);
6459 use_operand_p use;
6460 tree op = PHI_RESULT (phi);
6461 ssa_op_iter oi;
6462 unsigned i;
6463
6464 if (virtual_operand_p (op))
6465 {
6466 /* Remove the phi nodes for virtual operands (alias analysis will be
6467 run for the new function, anyway). */
6468 remove_phi_node (&si, true);
6469 continue;
6470 }
6471
6472 SET_PHI_RESULT (phi,
6473 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6474 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6475 {
6476 op = USE_FROM_PTR (use);
6477 if (TREE_CODE (op) == SSA_NAME)
6478 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6479 }
6480
6481 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6482 {
6483 location_t locus = gimple_phi_arg_location (phi, i);
6484 tree block = LOCATION_BLOCK (locus);
6485
6486 if (locus == UNKNOWN_LOCATION)
6487 continue;
6488 if (d->orig_block == NULL_TREE || block == d->orig_block)
6489 {
6490 if (d->new_block == NULL_TREE)
6491 locus = LOCATION_LOCUS (locus);
6492 else
6493 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6494 gimple_phi_arg_set_location (phi, i, locus);
6495 }
6496 }
6497
6498 gsi_next (&si);
6499 }
6500
6501 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6502 {
6503 gimple stmt = gsi_stmt (si);
6504 struct walk_stmt_info wi;
6505
6506 memset (&wi, 0, sizeof (wi));
6507 wi.info = d;
6508 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6509
6510 if (gimple_code (stmt) == GIMPLE_LABEL)
6511 {
6512 tree label = gimple_label_label (stmt);
6513 int uid = LABEL_DECL_UID (label);
6514
6515 gcc_assert (uid > -1);
6516
6517 old_len = vec_safe_length (cfg->x_label_to_block_map);
6518 if (old_len <= (unsigned) uid)
6519 {
6520 new_len = 3 * uid / 2 + 1;
6521 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6522 }
6523
6524 (*cfg->x_label_to_block_map)[uid] = bb;
6525 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6526
6527 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6528
6529 if (uid >= dest_cfun->cfg->last_label_uid)
6530 dest_cfun->cfg->last_label_uid = uid + 1;
6531 }
6532
6533 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6534 remove_stmt_from_eh_lp_fn (cfun, stmt);
6535
6536 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6537 gimple_remove_stmt_histograms (cfun, stmt);
6538
6539 /* We cannot leave any operands allocated from the operand caches of
6540 the current function. */
6541 free_stmt_operands (cfun, stmt);
6542 push_cfun (dest_cfun);
6543 update_stmt (stmt);
6544 pop_cfun ();
6545 }
6546
6547 FOR_EACH_EDGE (e, ei, bb->succs)
6548 if (e->goto_locus != UNKNOWN_LOCATION)
6549 {
6550 tree block = LOCATION_BLOCK (e->goto_locus);
6551 if (d->orig_block == NULL_TREE
6552 || block == d->orig_block)
6553 e->goto_locus = d->new_block ?
6554 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6555 LOCATION_LOCUS (e->goto_locus);
6556 }
6557 }
6558
6559 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6560 the outermost EH region. Use REGION as the incoming base EH region. */
6561
6562 static eh_region
6563 find_outermost_region_in_block (struct function *src_cfun,
6564 basic_block bb, eh_region region)
6565 {
6566 gimple_stmt_iterator si;
6567
6568 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6569 {
6570 gimple stmt = gsi_stmt (si);
6571 eh_region stmt_region;
6572 int lp_nr;
6573
6574 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6575 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6576 if (stmt_region)
6577 {
6578 if (region == NULL)
6579 region = stmt_region;
6580 else if (stmt_region != region)
6581 {
6582 region = eh_region_outermost (src_cfun, stmt_region, region);
6583 gcc_assert (region != NULL);
6584 }
6585 }
6586 }
6587
6588 return region;
6589 }
6590
6591 static tree
6592 new_label_mapper (tree decl, void *data)
6593 {
6594 htab_t hash = (htab_t) data;
6595 struct tree_map *m;
6596 void **slot;
6597
6598 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6599
6600 m = XNEW (struct tree_map);
6601 m->hash = DECL_UID (decl);
6602 m->base.from = decl;
6603 m->to = create_artificial_label (UNKNOWN_LOCATION);
6604 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6605 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6606 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6607
6608 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6609 gcc_assert (*slot == NULL);
6610
6611 *slot = m;
6612
6613 return m->to;
6614 }
6615
6616 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6617 subblocks. */
6618
6619 static void
6620 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6621 tree to_context)
6622 {
6623 tree *tp, t;
6624
6625 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6626 {
6627 t = *tp;
6628 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6629 continue;
6630 replace_by_duplicate_decl (&t, vars_map, to_context);
6631 if (t != *tp)
6632 {
6633 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6634 {
6635 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6636 DECL_HAS_VALUE_EXPR_P (t) = 1;
6637 }
6638 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6639 *tp = t;
6640 }
6641 }
6642
6643 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6644 replace_block_vars_by_duplicates (block, vars_map, to_context);
6645 }
6646
6647 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6648 from FN1 to FN2. */
6649
6650 static void
6651 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6652 struct loop *loop)
6653 {
6654 /* Discard it from the old loop array. */
6655 (*get_loops (fn1))[loop->num] = NULL;
6656
6657 /* Place it in the new loop array, assigning it a new number. */
6658 loop->num = number_of_loops (fn2);
6659 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6660
6661 /* Recurse to children. */
6662 for (loop = loop->inner; loop; loop = loop->next)
6663 fixup_loop_arrays_after_move (fn1, fn2, loop);
6664 }
6665
6666 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6667 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6668 single basic block in the original CFG and the new basic block is
6669 returned. DEST_CFUN must not have a CFG yet.
6670
6671 Note that the region need not be a pure SESE region. Blocks inside
6672 the region may contain calls to abort/exit. The only restriction
6673 is that ENTRY_BB should be the only entry point and it must
6674 dominate EXIT_BB.
6675
6676 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6677 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6678 to the new function.
6679
6680 All local variables referenced in the region are assumed to be in
6681 the corresponding BLOCK_VARS and unexpanded variable lists
6682 associated with DEST_CFUN. */
6683
6684 basic_block
6685 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6686 basic_block exit_bb, tree orig_block)
6687 {
6688 vec<basic_block> bbs, dom_bbs;
6689 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6690 basic_block after, bb, *entry_pred, *exit_succ, abb;
6691 struct function *saved_cfun = cfun;
6692 int *entry_flag, *exit_flag;
6693 unsigned *entry_prob, *exit_prob;
6694 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6695 edge e;
6696 edge_iterator ei;
6697 htab_t new_label_map;
6698 struct pointer_map_t *vars_map, *eh_map;
6699 struct loop *loop = entry_bb->loop_father;
6700 struct loop *loop0 = get_loop (saved_cfun, 0);
6701 struct move_stmt_d d;
6702
6703 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6704 region. */
6705 gcc_assert (entry_bb != exit_bb
6706 && (!exit_bb
6707 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6708
6709 /* Collect all the blocks in the region. Manually add ENTRY_BB
6710 because it won't be added by dfs_enumerate_from. */
6711 bbs.create (0);
6712 bbs.safe_push (entry_bb);
6713 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6714
6715 /* The blocks that used to be dominated by something in BBS will now be
6716 dominated by the new block. */
6717 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6718 bbs.address (),
6719 bbs.length ());
6720
6721 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6722 the predecessor edges to ENTRY_BB and the successor edges to
6723 EXIT_BB so that we can re-attach them to the new basic block that
6724 will replace the region. */
6725 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6726 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6727 entry_flag = XNEWVEC (int, num_entry_edges);
6728 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6729 i = 0;
6730 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6731 {
6732 entry_prob[i] = e->probability;
6733 entry_flag[i] = e->flags;
6734 entry_pred[i++] = e->src;
6735 remove_edge (e);
6736 }
6737
6738 if (exit_bb)
6739 {
6740 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6741 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6742 exit_flag = XNEWVEC (int, num_exit_edges);
6743 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6744 i = 0;
6745 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6746 {
6747 exit_prob[i] = e->probability;
6748 exit_flag[i] = e->flags;
6749 exit_succ[i++] = e->dest;
6750 remove_edge (e);
6751 }
6752 }
6753 else
6754 {
6755 num_exit_edges = 0;
6756 exit_succ = NULL;
6757 exit_flag = NULL;
6758 exit_prob = NULL;
6759 }
6760
6761 /* Switch context to the child function to initialize DEST_FN's CFG. */
6762 gcc_assert (dest_cfun->cfg == NULL);
6763 push_cfun (dest_cfun);
6764
6765 init_empty_tree_cfg ();
6766
6767 /* Initialize EH information for the new function. */
6768 eh_map = NULL;
6769 new_label_map = NULL;
6770 if (saved_cfun->eh)
6771 {
6772 eh_region region = NULL;
6773
6774 FOR_EACH_VEC_ELT (bbs, i, bb)
6775 region = find_outermost_region_in_block (saved_cfun, bb, region);
6776
6777 init_eh_for_function ();
6778 if (region != NULL)
6779 {
6780 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6781 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6782 new_label_mapper, new_label_map);
6783 }
6784 }
6785
6786 /* Initialize an empty loop tree. */
6787 struct loops *loops = ggc_alloc_cleared_loops ();
6788 init_loops_structure (dest_cfun, loops, 1);
6789 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6790 set_loops_for_fn (dest_cfun, loops);
6791
6792 /* Move the outlined loop tree part. */
6793 num_nodes = bbs.length ();
6794 FOR_EACH_VEC_ELT (bbs, i, bb)
6795 {
6796 if (bb->loop_father->header == bb)
6797 {
6798 struct loop *this_loop = bb->loop_father;
6799 struct loop *outer = loop_outer (this_loop);
6800 if (outer == loop
6801 /* If the SESE region contains some bbs ending with
6802 a noreturn call, those are considered to belong
6803 to the outermost loop in saved_cfun, rather than
6804 the entry_bb's loop_father. */
6805 || outer == loop0)
6806 {
6807 if (outer != loop)
6808 num_nodes -= this_loop->num_nodes;
6809 flow_loop_tree_node_remove (bb->loop_father);
6810 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6811 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6812 }
6813 }
6814 else if (bb->loop_father == loop0 && loop0 != loop)
6815 num_nodes--;
6816
6817 /* Remove loop exits from the outlined region. */
6818 if (loops_for_fn (saved_cfun)->exits)
6819 FOR_EACH_EDGE (e, ei, bb->succs)
6820 {
6821 void **slot = htab_find_slot_with_hash
6822 (loops_for_fn (saved_cfun)->exits, e,
6823 htab_hash_pointer (e), NO_INSERT);
6824 if (slot)
6825 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6826 }
6827 }
6828
6829
6830 /* Adjust the number of blocks in the tree root of the outlined part. */
6831 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6832
6833 /* Setup a mapping to be used by move_block_to_fn. */
6834 loop->aux = current_loops->tree_root;
6835 loop0->aux = current_loops->tree_root;
6836
6837 pop_cfun ();
6838
6839 /* Move blocks from BBS into DEST_CFUN. */
6840 gcc_assert (bbs.length () >= 2);
6841 after = dest_cfun->cfg->x_entry_block_ptr;
6842 vars_map = pointer_map_create ();
6843
6844 memset (&d, 0, sizeof (d));
6845 d.orig_block = orig_block;
6846 d.new_block = DECL_INITIAL (dest_cfun->decl);
6847 d.from_context = cfun->decl;
6848 d.to_context = dest_cfun->decl;
6849 d.vars_map = vars_map;
6850 d.new_label_map = new_label_map;
6851 d.eh_map = eh_map;
6852 d.remap_decls_p = true;
6853
6854 FOR_EACH_VEC_ELT (bbs, i, bb)
6855 {
6856 /* No need to update edge counts on the last block. It has
6857 already been updated earlier when we detached the region from
6858 the original CFG. */
6859 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6860 after = bb;
6861 }
6862
6863 loop->aux = NULL;
6864 loop0->aux = NULL;
6865 /* Loop sizes are no longer correct, fix them up. */
6866 loop->num_nodes -= num_nodes;
6867 for (struct loop *outer = loop_outer (loop);
6868 outer; outer = loop_outer (outer))
6869 outer->num_nodes -= num_nodes;
6870 loop0->num_nodes -= bbs.length () - num_nodes;
6871
6872 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6873 {
6874 struct loop *aloop;
6875 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6876 if (aloop != NULL)
6877 {
6878 if (aloop->simduid)
6879 {
6880 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6881 d.to_context);
6882 dest_cfun->has_simduid_loops = true;
6883 }
6884 if (aloop->force_vect)
6885 dest_cfun->has_force_vect_loops = true;
6886 }
6887 }
6888
6889 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6890 if (orig_block)
6891 {
6892 tree block;
6893 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6894 == NULL_TREE);
6895 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6896 = BLOCK_SUBBLOCKS (orig_block);
6897 for (block = BLOCK_SUBBLOCKS (orig_block);
6898 block; block = BLOCK_CHAIN (block))
6899 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6900 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6901 }
6902
6903 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6904 vars_map, dest_cfun->decl);
6905
6906 if (new_label_map)
6907 htab_delete (new_label_map);
6908 if (eh_map)
6909 pointer_map_destroy (eh_map);
6910 pointer_map_destroy (vars_map);
6911
6912 /* Rewire the entry and exit blocks. The successor to the entry
6913 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6914 the child function. Similarly, the predecessor of DEST_FN's
6915 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6916 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6917 various CFG manipulation function get to the right CFG.
6918
6919 FIXME, this is silly. The CFG ought to become a parameter to
6920 these helpers. */
6921 push_cfun (dest_cfun);
6922 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
6923 if (exit_bb)
6924 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
6925 pop_cfun ();
6926
6927 /* Back in the original function, the SESE region has disappeared,
6928 create a new basic block in its place. */
6929 bb = create_empty_bb (entry_pred[0]);
6930 if (current_loops)
6931 add_bb_to_loop (bb, loop);
6932 for (i = 0; i < num_entry_edges; i++)
6933 {
6934 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6935 e->probability = entry_prob[i];
6936 }
6937
6938 for (i = 0; i < num_exit_edges; i++)
6939 {
6940 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6941 e->probability = exit_prob[i];
6942 }
6943
6944 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6945 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6946 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6947 dom_bbs.release ();
6948
6949 if (exit_bb)
6950 {
6951 free (exit_prob);
6952 free (exit_flag);
6953 free (exit_succ);
6954 }
6955 free (entry_prob);
6956 free (entry_flag);
6957 free (entry_pred);
6958 bbs.release ();
6959
6960 return bb;
6961 }
6962
6963
6964 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6965 */
6966
6967 void
6968 dump_function_to_file (tree fndecl, FILE *file, int flags)
6969 {
6970 tree arg, var, old_current_fndecl = current_function_decl;
6971 struct function *dsf;
6972 bool ignore_topmost_bind = false, any_var = false;
6973 basic_block bb;
6974 tree chain;
6975 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6976 && decl_is_tm_clone (fndecl));
6977 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6978
6979 current_function_decl = fndecl;
6980 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6981
6982 arg = DECL_ARGUMENTS (fndecl);
6983 while (arg)
6984 {
6985 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6986 fprintf (file, " ");
6987 print_generic_expr (file, arg, dump_flags);
6988 if (flags & TDF_VERBOSE)
6989 print_node (file, "", arg, 4);
6990 if (DECL_CHAIN (arg))
6991 fprintf (file, ", ");
6992 arg = DECL_CHAIN (arg);
6993 }
6994 fprintf (file, ")\n");
6995
6996 if (flags & TDF_VERBOSE)
6997 print_node (file, "", fndecl, 2);
6998
6999 dsf = DECL_STRUCT_FUNCTION (fndecl);
7000 if (dsf && (flags & TDF_EH))
7001 dump_eh_tree (file, dsf);
7002
7003 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7004 {
7005 dump_node (fndecl, TDF_SLIM | flags, file);
7006 current_function_decl = old_current_fndecl;
7007 return;
7008 }
7009
7010 /* When GIMPLE is lowered, the variables are no longer available in
7011 BIND_EXPRs, so display them separately. */
7012 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7013 {
7014 unsigned ix;
7015 ignore_topmost_bind = true;
7016
7017 fprintf (file, "{\n");
7018 if (!vec_safe_is_empty (fun->local_decls))
7019 FOR_EACH_LOCAL_DECL (fun, ix, var)
7020 {
7021 print_generic_decl (file, var, flags);
7022 if (flags & TDF_VERBOSE)
7023 print_node (file, "", var, 4);
7024 fprintf (file, "\n");
7025
7026 any_var = true;
7027 }
7028 if (gimple_in_ssa_p (cfun))
7029 for (ix = 1; ix < num_ssa_names; ++ix)
7030 {
7031 tree name = ssa_name (ix);
7032 if (name && !SSA_NAME_VAR (name))
7033 {
7034 fprintf (file, " ");
7035 print_generic_expr (file, TREE_TYPE (name), flags);
7036 fprintf (file, " ");
7037 print_generic_expr (file, name, flags);
7038 fprintf (file, ";\n");
7039
7040 any_var = true;
7041 }
7042 }
7043 }
7044
7045 if (fun && fun->decl == fndecl
7046 && fun->cfg
7047 && basic_block_info_for_fn (fun))
7048 {
7049 /* If the CFG has been built, emit a CFG-based dump. */
7050 if (!ignore_topmost_bind)
7051 fprintf (file, "{\n");
7052
7053 if (any_var && n_basic_blocks_for_fn (fun))
7054 fprintf (file, "\n");
7055
7056 FOR_EACH_BB_FN (bb, fun)
7057 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7058
7059 fprintf (file, "}\n");
7060 }
7061 else if (DECL_SAVED_TREE (fndecl) == NULL)
7062 {
7063 /* The function is now in GIMPLE form but the CFG has not been
7064 built yet. Emit the single sequence of GIMPLE statements
7065 that make up its body. */
7066 gimple_seq body = gimple_body (fndecl);
7067
7068 if (gimple_seq_first_stmt (body)
7069 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7070 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7071 print_gimple_seq (file, body, 0, flags);
7072 else
7073 {
7074 if (!ignore_topmost_bind)
7075 fprintf (file, "{\n");
7076
7077 if (any_var)
7078 fprintf (file, "\n");
7079
7080 print_gimple_seq (file, body, 2, flags);
7081 fprintf (file, "}\n");
7082 }
7083 }
7084 else
7085 {
7086 int indent;
7087
7088 /* Make a tree based dump. */
7089 chain = DECL_SAVED_TREE (fndecl);
7090 if (chain && TREE_CODE (chain) == BIND_EXPR)
7091 {
7092 if (ignore_topmost_bind)
7093 {
7094 chain = BIND_EXPR_BODY (chain);
7095 indent = 2;
7096 }
7097 else
7098 indent = 0;
7099 }
7100 else
7101 {
7102 if (!ignore_topmost_bind)
7103 fprintf (file, "{\n");
7104 indent = 2;
7105 }
7106
7107 if (any_var)
7108 fprintf (file, "\n");
7109
7110 print_generic_stmt_indented (file, chain, flags, indent);
7111 if (ignore_topmost_bind)
7112 fprintf (file, "}\n");
7113 }
7114
7115 if (flags & TDF_ENUMERATE_LOCALS)
7116 dump_enumerated_decls (file, flags);
7117 fprintf (file, "\n\n");
7118
7119 current_function_decl = old_current_fndecl;
7120 }
7121
7122 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7123
7124 DEBUG_FUNCTION void
7125 debug_function (tree fn, int flags)
7126 {
7127 dump_function_to_file (fn, stderr, flags);
7128 }
7129
7130
7131 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7132
7133 static void
7134 print_pred_bbs (FILE *file, basic_block bb)
7135 {
7136 edge e;
7137 edge_iterator ei;
7138
7139 FOR_EACH_EDGE (e, ei, bb->preds)
7140 fprintf (file, "bb_%d ", e->src->index);
7141 }
7142
7143
7144 /* Print on FILE the indexes for the successors of basic_block BB. */
7145
7146 static void
7147 print_succ_bbs (FILE *file, basic_block bb)
7148 {
7149 edge e;
7150 edge_iterator ei;
7151
7152 FOR_EACH_EDGE (e, ei, bb->succs)
7153 fprintf (file, "bb_%d ", e->dest->index);
7154 }
7155
7156 /* Print to FILE the basic block BB following the VERBOSITY level. */
7157
7158 void
7159 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7160 {
7161 char *s_indent = (char *) alloca ((size_t) indent + 1);
7162 memset ((void *) s_indent, ' ', (size_t) indent);
7163 s_indent[indent] = '\0';
7164
7165 /* Print basic_block's header. */
7166 if (verbosity >= 2)
7167 {
7168 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7169 print_pred_bbs (file, bb);
7170 fprintf (file, "}, succs = {");
7171 print_succ_bbs (file, bb);
7172 fprintf (file, "})\n");
7173 }
7174
7175 /* Print basic_block's body. */
7176 if (verbosity >= 3)
7177 {
7178 fprintf (file, "%s {\n", s_indent);
7179 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7180 fprintf (file, "%s }\n", s_indent);
7181 }
7182 }
7183
7184 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7185
7186 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7187 VERBOSITY level this outputs the contents of the loop, or just its
7188 structure. */
7189
7190 static void
7191 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7192 {
7193 char *s_indent;
7194 basic_block bb;
7195
7196 if (loop == NULL)
7197 return;
7198
7199 s_indent = (char *) alloca ((size_t) indent + 1);
7200 memset ((void *) s_indent, ' ', (size_t) indent);
7201 s_indent[indent] = '\0';
7202
7203 /* Print loop's header. */
7204 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7205 if (loop->header)
7206 fprintf (file, "header = %d", loop->header->index);
7207 else
7208 {
7209 fprintf (file, "deleted)\n");
7210 return;
7211 }
7212 if (loop->latch)
7213 fprintf (file, ", latch = %d", loop->latch->index);
7214 else
7215 fprintf (file, ", multiple latches");
7216 fprintf (file, ", niter = ");
7217 print_generic_expr (file, loop->nb_iterations, 0);
7218
7219 if (loop->any_upper_bound)
7220 {
7221 fprintf (file, ", upper_bound = ");
7222 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7223 }
7224
7225 if (loop->any_estimate)
7226 {
7227 fprintf (file, ", estimate = ");
7228 dump_double_int (file, loop->nb_iterations_estimate, true);
7229 }
7230 fprintf (file, ")\n");
7231
7232 /* Print loop's body. */
7233 if (verbosity >= 1)
7234 {
7235 fprintf (file, "%s{\n", s_indent);
7236 FOR_EACH_BB (bb)
7237 if (bb->loop_father == loop)
7238 print_loops_bb (file, bb, indent, verbosity);
7239
7240 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7241 fprintf (file, "%s}\n", s_indent);
7242 }
7243 }
7244
7245 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7246 spaces. Following VERBOSITY level this outputs the contents of the
7247 loop, or just its structure. */
7248
7249 static void
7250 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7251 int verbosity)
7252 {
7253 if (loop == NULL)
7254 return;
7255
7256 print_loop (file, loop, indent, verbosity);
7257 print_loop_and_siblings (file, loop->next, indent, verbosity);
7258 }
7259
7260 /* Follow a CFG edge from the entry point of the program, and on entry
7261 of a loop, pretty print the loop structure on FILE. */
7262
7263 void
7264 print_loops (FILE *file, int verbosity)
7265 {
7266 basic_block bb;
7267
7268 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7269 if (bb && bb->loop_father)
7270 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7271 }
7272
7273 /* Dump a loop. */
7274
7275 DEBUG_FUNCTION void
7276 debug (struct loop &ref)
7277 {
7278 print_loop (stderr, &ref, 0, /*verbosity*/0);
7279 }
7280
7281 DEBUG_FUNCTION void
7282 debug (struct loop *ptr)
7283 {
7284 if (ptr)
7285 debug (*ptr);
7286 else
7287 fprintf (stderr, "<nil>\n");
7288 }
7289
7290 /* Dump a loop verbosely. */
7291
7292 DEBUG_FUNCTION void
7293 debug_verbose (struct loop &ref)
7294 {
7295 print_loop (stderr, &ref, 0, /*verbosity*/3);
7296 }
7297
7298 DEBUG_FUNCTION void
7299 debug_verbose (struct loop *ptr)
7300 {
7301 if (ptr)
7302 debug (*ptr);
7303 else
7304 fprintf (stderr, "<nil>\n");
7305 }
7306
7307
7308 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7309
7310 DEBUG_FUNCTION void
7311 debug_loops (int verbosity)
7312 {
7313 print_loops (stderr, verbosity);
7314 }
7315
7316 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7317
7318 DEBUG_FUNCTION void
7319 debug_loop (struct loop *loop, int verbosity)
7320 {
7321 print_loop (stderr, loop, 0, verbosity);
7322 }
7323
7324 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7325 level. */
7326
7327 DEBUG_FUNCTION void
7328 debug_loop_num (unsigned num, int verbosity)
7329 {
7330 debug_loop (get_loop (cfun, num), verbosity);
7331 }
7332
7333 /* Return true if BB ends with a call, possibly followed by some
7334 instructions that must stay with the call. Return false,
7335 otherwise. */
7336
7337 static bool
7338 gimple_block_ends_with_call_p (basic_block bb)
7339 {
7340 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7341 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7342 }
7343
7344
7345 /* Return true if BB ends with a conditional branch. Return false,
7346 otherwise. */
7347
7348 static bool
7349 gimple_block_ends_with_condjump_p (const_basic_block bb)
7350 {
7351 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7352 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7353 }
7354
7355
7356 /* Return true if we need to add fake edge to exit at statement T.
7357 Helper function for gimple_flow_call_edges_add. */
7358
7359 static bool
7360 need_fake_edge_p (gimple t)
7361 {
7362 tree fndecl = NULL_TREE;
7363 int call_flags = 0;
7364
7365 /* NORETURN and LONGJMP calls already have an edge to exit.
7366 CONST and PURE calls do not need one.
7367 We don't currently check for CONST and PURE here, although
7368 it would be a good idea, because those attributes are
7369 figured out from the RTL in mark_constant_function, and
7370 the counter incrementation code from -fprofile-arcs
7371 leads to different results from -fbranch-probabilities. */
7372 if (is_gimple_call (t))
7373 {
7374 fndecl = gimple_call_fndecl (t);
7375 call_flags = gimple_call_flags (t);
7376 }
7377
7378 if (is_gimple_call (t)
7379 && fndecl
7380 && DECL_BUILT_IN (fndecl)
7381 && (call_flags & ECF_NOTHROW)
7382 && !(call_flags & ECF_RETURNS_TWICE)
7383 /* fork() doesn't really return twice, but the effect of
7384 wrapping it in __gcov_fork() which calls __gcov_flush()
7385 and clears the counters before forking has the same
7386 effect as returning twice. Force a fake edge. */
7387 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7388 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7389 return false;
7390
7391 if (is_gimple_call (t))
7392 {
7393 edge_iterator ei;
7394 edge e;
7395 basic_block bb;
7396
7397 if (!(call_flags & ECF_NORETURN))
7398 return true;
7399
7400 bb = gimple_bb (t);
7401 FOR_EACH_EDGE (e, ei, bb->succs)
7402 if ((e->flags & EDGE_FAKE) == 0)
7403 return true;
7404 }
7405
7406 if (gimple_code (t) == GIMPLE_ASM
7407 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7408 return true;
7409
7410 return false;
7411 }
7412
7413
7414 /* Add fake edges to the function exit for any non constant and non
7415 noreturn calls (or noreturn calls with EH/abnormal edges),
7416 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7417 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7418 that were split.
7419
7420 The goal is to expose cases in which entering a basic block does
7421 not imply that all subsequent instructions must be executed. */
7422
7423 static int
7424 gimple_flow_call_edges_add (sbitmap blocks)
7425 {
7426 int i;
7427 int blocks_split = 0;
7428 int last_bb = last_basic_block;
7429 bool check_last_block = false;
7430
7431 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7432 return 0;
7433
7434 if (! blocks)
7435 check_last_block = true;
7436 else
7437 check_last_block = bitmap_bit_p (blocks,
7438 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7439
7440 /* In the last basic block, before epilogue generation, there will be
7441 a fallthru edge to EXIT. Special care is required if the last insn
7442 of the last basic block is a call because make_edge folds duplicate
7443 edges, which would result in the fallthru edge also being marked
7444 fake, which would result in the fallthru edge being removed by
7445 remove_fake_edges, which would result in an invalid CFG.
7446
7447 Moreover, we can't elide the outgoing fake edge, since the block
7448 profiler needs to take this into account in order to solve the minimal
7449 spanning tree in the case that the call doesn't return.
7450
7451 Handle this by adding a dummy instruction in a new last basic block. */
7452 if (check_last_block)
7453 {
7454 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7455 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7456 gimple t = NULL;
7457
7458 if (!gsi_end_p (gsi))
7459 t = gsi_stmt (gsi);
7460
7461 if (t && need_fake_edge_p (t))
7462 {
7463 edge e;
7464
7465 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7466 if (e)
7467 {
7468 gsi_insert_on_edge (e, gimple_build_nop ());
7469 gsi_commit_edge_inserts ();
7470 }
7471 }
7472 }
7473
7474 /* Now add fake edges to the function exit for any non constant
7475 calls since there is no way that we can determine if they will
7476 return or not... */
7477 for (i = 0; i < last_bb; i++)
7478 {
7479 basic_block bb = BASIC_BLOCK (i);
7480 gimple_stmt_iterator gsi;
7481 gimple stmt, last_stmt;
7482
7483 if (!bb)
7484 continue;
7485
7486 if (blocks && !bitmap_bit_p (blocks, i))
7487 continue;
7488
7489 gsi = gsi_last_nondebug_bb (bb);
7490 if (!gsi_end_p (gsi))
7491 {
7492 last_stmt = gsi_stmt (gsi);
7493 do
7494 {
7495 stmt = gsi_stmt (gsi);
7496 if (need_fake_edge_p (stmt))
7497 {
7498 edge e;
7499
7500 /* The handling above of the final block before the
7501 epilogue should be enough to verify that there is
7502 no edge to the exit block in CFG already.
7503 Calling make_edge in such case would cause us to
7504 mark that edge as fake and remove it later. */
7505 #ifdef ENABLE_CHECKING
7506 if (stmt == last_stmt)
7507 {
7508 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7509 gcc_assert (e == NULL);
7510 }
7511 #endif
7512
7513 /* Note that the following may create a new basic block
7514 and renumber the existing basic blocks. */
7515 if (stmt != last_stmt)
7516 {
7517 e = split_block (bb, stmt);
7518 if (e)
7519 blocks_split++;
7520 }
7521 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7522 }
7523 gsi_prev (&gsi);
7524 }
7525 while (!gsi_end_p (gsi));
7526 }
7527 }
7528
7529 if (blocks_split)
7530 verify_flow_info ();
7531
7532 return blocks_split;
7533 }
7534
7535 /* Removes edge E and all the blocks dominated by it, and updates dominance
7536 information. The IL in E->src needs to be updated separately.
7537 If dominance info is not available, only the edge E is removed.*/
7538
7539 void
7540 remove_edge_and_dominated_blocks (edge e)
7541 {
7542 vec<basic_block> bbs_to_remove = vNULL;
7543 vec<basic_block> bbs_to_fix_dom = vNULL;
7544 bitmap df, df_idom;
7545 edge f;
7546 edge_iterator ei;
7547 bool none_removed = false;
7548 unsigned i;
7549 basic_block bb, dbb;
7550 bitmap_iterator bi;
7551
7552 if (!dom_info_available_p (CDI_DOMINATORS))
7553 {
7554 remove_edge (e);
7555 return;
7556 }
7557
7558 /* No updating is needed for edges to exit. */
7559 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7560 {
7561 if (cfgcleanup_altered_bbs)
7562 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7563 remove_edge (e);
7564 return;
7565 }
7566
7567 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7568 that is not dominated by E->dest, then this set is empty. Otherwise,
7569 all the basic blocks dominated by E->dest are removed.
7570
7571 Also, to DF_IDOM we store the immediate dominators of the blocks in
7572 the dominance frontier of E (i.e., of the successors of the
7573 removed blocks, if there are any, and of E->dest otherwise). */
7574 FOR_EACH_EDGE (f, ei, e->dest->preds)
7575 {
7576 if (f == e)
7577 continue;
7578
7579 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7580 {
7581 none_removed = true;
7582 break;
7583 }
7584 }
7585
7586 df = BITMAP_ALLOC (NULL);
7587 df_idom = BITMAP_ALLOC (NULL);
7588
7589 if (none_removed)
7590 bitmap_set_bit (df_idom,
7591 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7592 else
7593 {
7594 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7595 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7596 {
7597 FOR_EACH_EDGE (f, ei, bb->succs)
7598 {
7599 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7600 bitmap_set_bit (df, f->dest->index);
7601 }
7602 }
7603 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7604 bitmap_clear_bit (df, bb->index);
7605
7606 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7607 {
7608 bb = BASIC_BLOCK (i);
7609 bitmap_set_bit (df_idom,
7610 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7611 }
7612 }
7613
7614 if (cfgcleanup_altered_bbs)
7615 {
7616 /* Record the set of the altered basic blocks. */
7617 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7618 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7619 }
7620
7621 /* Remove E and the cancelled blocks. */
7622 if (none_removed)
7623 remove_edge (e);
7624 else
7625 {
7626 /* Walk backwards so as to get a chance to substitute all
7627 released DEFs into debug stmts. See
7628 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7629 details. */
7630 for (i = bbs_to_remove.length (); i-- > 0; )
7631 delete_basic_block (bbs_to_remove[i]);
7632 }
7633
7634 /* Update the dominance information. The immediate dominator may change only
7635 for blocks whose immediate dominator belongs to DF_IDOM:
7636
7637 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7638 removal. Let Z the arbitrary block such that idom(Z) = Y and
7639 Z dominates X after the removal. Before removal, there exists a path P
7640 from Y to X that avoids Z. Let F be the last edge on P that is
7641 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7642 dominates W, and because of P, Z does not dominate W), and W belongs to
7643 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7644 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7645 {
7646 bb = BASIC_BLOCK (i);
7647 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7648 dbb;
7649 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7650 bbs_to_fix_dom.safe_push (dbb);
7651 }
7652
7653 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7654
7655 BITMAP_FREE (df);
7656 BITMAP_FREE (df_idom);
7657 bbs_to_remove.release ();
7658 bbs_to_fix_dom.release ();
7659 }
7660
7661 /* Purge dead EH edges from basic block BB. */
7662
7663 bool
7664 gimple_purge_dead_eh_edges (basic_block bb)
7665 {
7666 bool changed = false;
7667 edge e;
7668 edge_iterator ei;
7669 gimple stmt = last_stmt (bb);
7670
7671 if (stmt && stmt_can_throw_internal (stmt))
7672 return false;
7673
7674 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7675 {
7676 if (e->flags & EDGE_EH)
7677 {
7678 remove_edge_and_dominated_blocks (e);
7679 changed = true;
7680 }
7681 else
7682 ei_next (&ei);
7683 }
7684
7685 return changed;
7686 }
7687
7688 /* Purge dead EH edges from basic block listed in BLOCKS. */
7689
7690 bool
7691 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7692 {
7693 bool changed = false;
7694 unsigned i;
7695 bitmap_iterator bi;
7696
7697 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7698 {
7699 basic_block bb = BASIC_BLOCK (i);
7700
7701 /* Earlier gimple_purge_dead_eh_edges could have removed
7702 this basic block already. */
7703 gcc_assert (bb || changed);
7704 if (bb != NULL)
7705 changed |= gimple_purge_dead_eh_edges (bb);
7706 }
7707
7708 return changed;
7709 }
7710
7711 /* Purge dead abnormal call edges from basic block BB. */
7712
7713 bool
7714 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7715 {
7716 bool changed = false;
7717 edge e;
7718 edge_iterator ei;
7719 gimple stmt = last_stmt (bb);
7720
7721 if (!cfun->has_nonlocal_label
7722 && !cfun->calls_setjmp)
7723 return false;
7724
7725 if (stmt && stmt_can_make_abnormal_goto (stmt))
7726 return false;
7727
7728 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7729 {
7730 if (e->flags & EDGE_ABNORMAL)
7731 {
7732 if (e->flags & EDGE_FALLTHRU)
7733 e->flags &= ~EDGE_ABNORMAL;
7734 else
7735 remove_edge_and_dominated_blocks (e);
7736 changed = true;
7737 }
7738 else
7739 ei_next (&ei);
7740 }
7741
7742 return changed;
7743 }
7744
7745 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7746
7747 bool
7748 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7749 {
7750 bool changed = false;
7751 unsigned i;
7752 bitmap_iterator bi;
7753
7754 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7755 {
7756 basic_block bb = BASIC_BLOCK (i);
7757
7758 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7759 this basic block already. */
7760 gcc_assert (bb || changed);
7761 if (bb != NULL)
7762 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7763 }
7764
7765 return changed;
7766 }
7767
7768 /* This function is called whenever a new edge is created or
7769 redirected. */
7770
7771 static void
7772 gimple_execute_on_growing_pred (edge e)
7773 {
7774 basic_block bb = e->dest;
7775
7776 if (!gimple_seq_empty_p (phi_nodes (bb)))
7777 reserve_phi_args_for_new_edge (bb);
7778 }
7779
7780 /* This function is called immediately before edge E is removed from
7781 the edge vector E->dest->preds. */
7782
7783 static void
7784 gimple_execute_on_shrinking_pred (edge e)
7785 {
7786 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7787 remove_phi_args (e);
7788 }
7789
7790 /*---------------------------------------------------------------------------
7791 Helper functions for Loop versioning
7792 ---------------------------------------------------------------------------*/
7793
7794 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7795 of 'first'. Both of them are dominated by 'new_head' basic block. When
7796 'new_head' was created by 'second's incoming edge it received phi arguments
7797 on the edge by split_edge(). Later, additional edge 'e' was created to
7798 connect 'new_head' and 'first'. Now this routine adds phi args on this
7799 additional edge 'e' that new_head to second edge received as part of edge
7800 splitting. */
7801
7802 static void
7803 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7804 basic_block new_head, edge e)
7805 {
7806 gimple phi1, phi2;
7807 gimple_stmt_iterator psi1, psi2;
7808 tree def;
7809 edge e2 = find_edge (new_head, second);
7810
7811 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7812 edge, we should always have an edge from NEW_HEAD to SECOND. */
7813 gcc_assert (e2 != NULL);
7814
7815 /* Browse all 'second' basic block phi nodes and add phi args to
7816 edge 'e' for 'first' head. PHI args are always in correct order. */
7817
7818 for (psi2 = gsi_start_phis (second),
7819 psi1 = gsi_start_phis (first);
7820 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7821 gsi_next (&psi2), gsi_next (&psi1))
7822 {
7823 phi1 = gsi_stmt (psi1);
7824 phi2 = gsi_stmt (psi2);
7825 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7826 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7827 }
7828 }
7829
7830
7831 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7832 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7833 the destination of the ELSE part. */
7834
7835 static void
7836 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7837 basic_block second_head ATTRIBUTE_UNUSED,
7838 basic_block cond_bb, void *cond_e)
7839 {
7840 gimple_stmt_iterator gsi;
7841 gimple new_cond_expr;
7842 tree cond_expr = (tree) cond_e;
7843 edge e0;
7844
7845 /* Build new conditional expr */
7846 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7847 NULL_TREE, NULL_TREE);
7848
7849 /* Add new cond in cond_bb. */
7850 gsi = gsi_last_bb (cond_bb);
7851 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7852
7853 /* Adjust edges appropriately to connect new head with first head
7854 as well as second head. */
7855 e0 = single_succ_edge (cond_bb);
7856 e0->flags &= ~EDGE_FALLTHRU;
7857 e0->flags |= EDGE_FALSE_VALUE;
7858 }
7859
7860
7861 /* Do book-keeping of basic block BB for the profile consistency checker.
7862 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7863 then do post-pass accounting. Store the counting in RECORD. */
7864 static void
7865 gimple_account_profile_record (basic_block bb, int after_pass,
7866 struct profile_record *record)
7867 {
7868 gimple_stmt_iterator i;
7869 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7870 {
7871 record->size[after_pass]
7872 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7873 if (profile_status == PROFILE_READ)
7874 record->time[after_pass]
7875 += estimate_num_insns (gsi_stmt (i),
7876 &eni_time_weights) * bb->count;
7877 else if (profile_status == PROFILE_GUESSED)
7878 record->time[after_pass]
7879 += estimate_num_insns (gsi_stmt (i),
7880 &eni_time_weights) * bb->frequency;
7881 }
7882 }
7883
7884 struct cfg_hooks gimple_cfg_hooks = {
7885 "gimple",
7886 gimple_verify_flow_info,
7887 gimple_dump_bb, /* dump_bb */
7888 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7889 create_bb, /* create_basic_block */
7890 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7891 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7892 gimple_can_remove_branch_p, /* can_remove_branch_p */
7893 remove_bb, /* delete_basic_block */
7894 gimple_split_block, /* split_block */
7895 gimple_move_block_after, /* move_block_after */
7896 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7897 gimple_merge_blocks, /* merge_blocks */
7898 gimple_predict_edge, /* predict_edge */
7899 gimple_predicted_by_p, /* predicted_by_p */
7900 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7901 gimple_duplicate_bb, /* duplicate_block */
7902 gimple_split_edge, /* split_edge */
7903 gimple_make_forwarder_block, /* make_forward_block */
7904 NULL, /* tidy_fallthru_edge */
7905 NULL, /* force_nonfallthru */
7906 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7907 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7908 gimple_flow_call_edges_add, /* flow_call_edges_add */
7909 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7910 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7911 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7912 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7913 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7914 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7915 flush_pending_stmts, /* flush_pending_stmts */
7916 gimple_empty_block_p, /* block_empty_p */
7917 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7918 gimple_account_profile_record,
7919 };
7920
7921
7922 /* Split all critical edges. */
7923
7924 static unsigned int
7925 split_critical_edges (void)
7926 {
7927 basic_block bb;
7928 edge e;
7929 edge_iterator ei;
7930
7931 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7932 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7933 mappings around the calls to split_edge. */
7934 start_recording_case_labels ();
7935 FOR_ALL_BB (bb)
7936 {
7937 FOR_EACH_EDGE (e, ei, bb->succs)
7938 {
7939 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7940 split_edge (e);
7941 /* PRE inserts statements to edges and expects that
7942 since split_critical_edges was done beforehand, committing edge
7943 insertions will not split more edges. In addition to critical
7944 edges we must split edges that have multiple successors and
7945 end by control flow statements, such as RESX.
7946 Go ahead and split them too. This matches the logic in
7947 gimple_find_edge_insert_loc. */
7948 else if ((!single_pred_p (e->dest)
7949 || !gimple_seq_empty_p (phi_nodes (e->dest))
7950 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7951 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
7952 && !(e->flags & EDGE_ABNORMAL))
7953 {
7954 gimple_stmt_iterator gsi;
7955
7956 gsi = gsi_last_bb (e->src);
7957 if (!gsi_end_p (gsi)
7958 && stmt_ends_bb_p (gsi_stmt (gsi))
7959 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7960 && !gimple_call_builtin_p (gsi_stmt (gsi),
7961 BUILT_IN_RETURN)))
7962 split_edge (e);
7963 }
7964 }
7965 }
7966 end_recording_case_labels ();
7967 return 0;
7968 }
7969
7970 namespace {
7971
7972 const pass_data pass_data_split_crit_edges =
7973 {
7974 GIMPLE_PASS, /* type */
7975 "crited", /* name */
7976 OPTGROUP_NONE, /* optinfo_flags */
7977 false, /* has_gate */
7978 true, /* has_execute */
7979 TV_TREE_SPLIT_EDGES, /* tv_id */
7980 PROP_cfg, /* properties_required */
7981 PROP_no_crit_edges, /* properties_provided */
7982 0, /* properties_destroyed */
7983 0, /* todo_flags_start */
7984 TODO_verify_flow, /* todo_flags_finish */
7985 };
7986
7987 class pass_split_crit_edges : public gimple_opt_pass
7988 {
7989 public:
7990 pass_split_crit_edges (gcc::context *ctxt)
7991 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7992 {}
7993
7994 /* opt_pass methods: */
7995 unsigned int execute () { return split_critical_edges (); }
7996
7997 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7998 }; // class pass_split_crit_edges
7999
8000 } // anon namespace
8001
8002 gimple_opt_pass *
8003 make_pass_split_crit_edges (gcc::context *ctxt)
8004 {
8005 return new pass_split_crit_edges (ctxt);
8006 }
8007
8008
8009 /* Build a ternary operation and gimplify it. Emit code before GSI.
8010 Return the gimple_val holding the result. */
8011
8012 tree
8013 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8014 tree type, tree a, tree b, tree c)
8015 {
8016 tree ret;
8017 location_t loc = gimple_location (gsi_stmt (*gsi));
8018
8019 ret = fold_build3_loc (loc, code, type, a, b, c);
8020 STRIP_NOPS (ret);
8021
8022 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8023 GSI_SAME_STMT);
8024 }
8025
8026 /* Build a binary operation and gimplify it. Emit code before GSI.
8027 Return the gimple_val holding the result. */
8028
8029 tree
8030 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8031 tree type, tree a, tree b)
8032 {
8033 tree ret;
8034
8035 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8036 STRIP_NOPS (ret);
8037
8038 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8039 GSI_SAME_STMT);
8040 }
8041
8042 /* Build a unary operation and gimplify it. Emit code before GSI.
8043 Return the gimple_val holding the result. */
8044
8045 tree
8046 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8047 tree a)
8048 {
8049 tree ret;
8050
8051 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8052 STRIP_NOPS (ret);
8053
8054 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8055 GSI_SAME_STMT);
8056 }
8057
8058
8059 \f
8060 /* Emit return warnings. */
8061
8062 static unsigned int
8063 execute_warn_function_return (void)
8064 {
8065 source_location location;
8066 gimple last;
8067 edge e;
8068 edge_iterator ei;
8069
8070 if (!targetm.warn_func_return (cfun->decl))
8071 return 0;
8072
8073 /* If we have a path to EXIT, then we do return. */
8074 if (TREE_THIS_VOLATILE (cfun->decl)
8075 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8076 {
8077 location = UNKNOWN_LOCATION;
8078 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8079 {
8080 last = last_stmt (e->src);
8081 if ((gimple_code (last) == GIMPLE_RETURN
8082 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8083 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8084 break;
8085 }
8086 if (location == UNKNOWN_LOCATION)
8087 location = cfun->function_end_locus;
8088 warning_at (location, 0, "%<noreturn%> function does return");
8089 }
8090
8091 /* If we see "return;" in some basic block, then we do reach the end
8092 without returning a value. */
8093 else if (warn_return_type
8094 && !TREE_NO_WARNING (cfun->decl)
8095 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8096 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8097 {
8098 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8099 {
8100 gimple last = last_stmt (e->src);
8101 if (gimple_code (last) == GIMPLE_RETURN
8102 && gimple_return_retval (last) == NULL
8103 && !gimple_no_warning_p (last))
8104 {
8105 location = gimple_location (last);
8106 if (location == UNKNOWN_LOCATION)
8107 location = cfun->function_end_locus;
8108 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8109 TREE_NO_WARNING (cfun->decl) = 1;
8110 break;
8111 }
8112 }
8113 }
8114 return 0;
8115 }
8116
8117
8118 /* Given a basic block B which ends with a conditional and has
8119 precisely two successors, determine which of the edges is taken if
8120 the conditional is true and which is taken if the conditional is
8121 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8122
8123 void
8124 extract_true_false_edges_from_block (basic_block b,
8125 edge *true_edge,
8126 edge *false_edge)
8127 {
8128 edge e = EDGE_SUCC (b, 0);
8129
8130 if (e->flags & EDGE_TRUE_VALUE)
8131 {
8132 *true_edge = e;
8133 *false_edge = EDGE_SUCC (b, 1);
8134 }
8135 else
8136 {
8137 *false_edge = e;
8138 *true_edge = EDGE_SUCC (b, 1);
8139 }
8140 }
8141
8142 namespace {
8143
8144 const pass_data pass_data_warn_function_return =
8145 {
8146 GIMPLE_PASS, /* type */
8147 "*warn_function_return", /* name */
8148 OPTGROUP_NONE, /* optinfo_flags */
8149 false, /* has_gate */
8150 true, /* has_execute */
8151 TV_NONE, /* tv_id */
8152 PROP_cfg, /* properties_required */
8153 0, /* properties_provided */
8154 0, /* properties_destroyed */
8155 0, /* todo_flags_start */
8156 0, /* todo_flags_finish */
8157 };
8158
8159 class pass_warn_function_return : public gimple_opt_pass
8160 {
8161 public:
8162 pass_warn_function_return (gcc::context *ctxt)
8163 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8164 {}
8165
8166 /* opt_pass methods: */
8167 unsigned int execute () { return execute_warn_function_return (); }
8168
8169 }; // class pass_warn_function_return
8170
8171 } // anon namespace
8172
8173 gimple_opt_pass *
8174 make_pass_warn_function_return (gcc::context *ctxt)
8175 {
8176 return new pass_warn_function_return (ctxt);
8177 }
8178
8179 /* Walk a gimplified function and warn for functions whose return value is
8180 ignored and attribute((warn_unused_result)) is set. This is done before
8181 inlining, so we don't have to worry about that. */
8182
8183 static void
8184 do_warn_unused_result (gimple_seq seq)
8185 {
8186 tree fdecl, ftype;
8187 gimple_stmt_iterator i;
8188
8189 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8190 {
8191 gimple g = gsi_stmt (i);
8192
8193 switch (gimple_code (g))
8194 {
8195 case GIMPLE_BIND:
8196 do_warn_unused_result (gimple_bind_body (g));
8197 break;
8198 case GIMPLE_TRY:
8199 do_warn_unused_result (gimple_try_eval (g));
8200 do_warn_unused_result (gimple_try_cleanup (g));
8201 break;
8202 case GIMPLE_CATCH:
8203 do_warn_unused_result (gimple_catch_handler (g));
8204 break;
8205 case GIMPLE_EH_FILTER:
8206 do_warn_unused_result (gimple_eh_filter_failure (g));
8207 break;
8208
8209 case GIMPLE_CALL:
8210 if (gimple_call_lhs (g))
8211 break;
8212 if (gimple_call_internal_p (g))
8213 break;
8214
8215 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8216 LHS. All calls whose value is ignored should be
8217 represented like this. Look for the attribute. */
8218 fdecl = gimple_call_fndecl (g);
8219 ftype = gimple_call_fntype (g);
8220
8221 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8222 {
8223 location_t loc = gimple_location (g);
8224
8225 if (fdecl)
8226 warning_at (loc, OPT_Wunused_result,
8227 "ignoring return value of %qD, "
8228 "declared with attribute warn_unused_result",
8229 fdecl);
8230 else
8231 warning_at (loc, OPT_Wunused_result,
8232 "ignoring return value of function "
8233 "declared with attribute warn_unused_result");
8234 }
8235 break;
8236
8237 default:
8238 /* Not a container, not a call, or a call whose value is used. */
8239 break;
8240 }
8241 }
8242 }
8243
8244 static unsigned int
8245 run_warn_unused_result (void)
8246 {
8247 do_warn_unused_result (gimple_body (current_function_decl));
8248 return 0;
8249 }
8250
8251 static bool
8252 gate_warn_unused_result (void)
8253 {
8254 return flag_warn_unused_result;
8255 }
8256
8257 namespace {
8258
8259 const pass_data pass_data_warn_unused_result =
8260 {
8261 GIMPLE_PASS, /* type */
8262 "*warn_unused_result", /* name */
8263 OPTGROUP_NONE, /* optinfo_flags */
8264 true, /* has_gate */
8265 true, /* has_execute */
8266 TV_NONE, /* tv_id */
8267 PROP_gimple_any, /* properties_required */
8268 0, /* properties_provided */
8269 0, /* properties_destroyed */
8270 0, /* todo_flags_start */
8271 0, /* todo_flags_finish */
8272 };
8273
8274 class pass_warn_unused_result : public gimple_opt_pass
8275 {
8276 public:
8277 pass_warn_unused_result (gcc::context *ctxt)
8278 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8279 {}
8280
8281 /* opt_pass methods: */
8282 bool gate () { return gate_warn_unused_result (); }
8283 unsigned int execute () { return run_warn_unused_result (); }
8284
8285 }; // class pass_warn_unused_result
8286
8287 } // anon namespace
8288
8289 gimple_opt_pass *
8290 make_pass_warn_unused_result (gcc::context *ctxt)
8291 {
8292 return new pass_warn_unused_result (ctxt);
8293 }
8294
8295 /* IPA passes, compilation of earlier functions or inlining
8296 might have changed some properties, such as marked functions nothrow,
8297 pure, const or noreturn.
8298 Remove redundant edges and basic blocks, and create new ones if necessary.
8299
8300 This pass can't be executed as stand alone pass from pass manager, because
8301 in between inlining and this fixup the verify_flow_info would fail. */
8302
8303 unsigned int
8304 execute_fixup_cfg (void)
8305 {
8306 basic_block bb;
8307 gimple_stmt_iterator gsi;
8308 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8309 gcov_type count_scale;
8310 edge e;
8311 edge_iterator ei;
8312
8313 count_scale
8314 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8315 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8316
8317 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8318 cgraph_get_node (current_function_decl)->count;
8319 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8320 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8321 count_scale);
8322
8323 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8324 e->count = apply_scale (e->count, count_scale);
8325
8326 FOR_EACH_BB (bb)
8327 {
8328 bb->count = apply_scale (bb->count, count_scale);
8329 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8330 {
8331 gimple stmt = gsi_stmt (gsi);
8332 tree decl = is_gimple_call (stmt)
8333 ? gimple_call_fndecl (stmt)
8334 : NULL;
8335 if (decl)
8336 {
8337 int flags = gimple_call_flags (stmt);
8338 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8339 {
8340 if (gimple_purge_dead_abnormal_call_edges (bb))
8341 todo |= TODO_cleanup_cfg;
8342
8343 if (gimple_in_ssa_p (cfun))
8344 {
8345 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8346 update_stmt (stmt);
8347 }
8348 }
8349
8350 if (flags & ECF_NORETURN
8351 && fixup_noreturn_call (stmt))
8352 todo |= TODO_cleanup_cfg;
8353 }
8354
8355 if (maybe_clean_eh_stmt (stmt)
8356 && gimple_purge_dead_eh_edges (bb))
8357 todo |= TODO_cleanup_cfg;
8358 }
8359
8360 FOR_EACH_EDGE (e, ei, bb->succs)
8361 e->count = apply_scale (e->count, count_scale);
8362
8363 /* If we have a basic block with no successors that does not
8364 end with a control statement or a noreturn call end it with
8365 a call to __builtin_unreachable. This situation can occur
8366 when inlining a noreturn call that does in fact return. */
8367 if (EDGE_COUNT (bb->succs) == 0)
8368 {
8369 gimple stmt = last_stmt (bb);
8370 if (!stmt
8371 || (!is_ctrl_stmt (stmt)
8372 && (!is_gimple_call (stmt)
8373 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8374 {
8375 stmt = gimple_build_call
8376 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8377 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8378 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8379 }
8380 }
8381 }
8382 if (count_scale != REG_BR_PROB_BASE)
8383 compute_function_frequency ();
8384
8385 /* We just processed all calls. */
8386 if (cfun->gimple_df)
8387 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8388
8389 /* Dump a textual representation of the flowgraph. */
8390 if (dump_file)
8391 gimple_dump_cfg (dump_file, dump_flags);
8392
8393 if (current_loops
8394 && (todo & TODO_cleanup_cfg))
8395 loops_state_set (LOOPS_NEED_FIXUP);
8396
8397 return todo;
8398 }
8399
8400 namespace {
8401
8402 const pass_data pass_data_fixup_cfg =
8403 {
8404 GIMPLE_PASS, /* type */
8405 "*free_cfg_annotations", /* name */
8406 OPTGROUP_NONE, /* optinfo_flags */
8407 false, /* has_gate */
8408 true, /* has_execute */
8409 TV_NONE, /* tv_id */
8410 PROP_cfg, /* properties_required */
8411 0, /* properties_provided */
8412 0, /* properties_destroyed */
8413 0, /* todo_flags_start */
8414 0, /* todo_flags_finish */
8415 };
8416
8417 class pass_fixup_cfg : public gimple_opt_pass
8418 {
8419 public:
8420 pass_fixup_cfg (gcc::context *ctxt)
8421 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8422 {}
8423
8424 /* opt_pass methods: */
8425 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8426 unsigned int execute () { return execute_fixup_cfg (); }
8427
8428 }; // class pass_fixup_cfg
8429
8430 } // anon namespace
8431
8432 gimple_opt_pass *
8433 make_pass_fixup_cfg (gcc::context *ctxt)
8434 {
8435 return new pass_fixup_cfg (ctxt);
8436 }
8437
8438 /* Garbage collection support for edge_def. */
8439
8440 extern void gt_ggc_mx (tree&);
8441 extern void gt_ggc_mx (gimple&);
8442 extern void gt_ggc_mx (rtx&);
8443 extern void gt_ggc_mx (basic_block&);
8444
8445 void
8446 gt_ggc_mx (edge_def *e)
8447 {
8448 tree block = LOCATION_BLOCK (e->goto_locus);
8449 gt_ggc_mx (e->src);
8450 gt_ggc_mx (e->dest);
8451 if (current_ir_type () == IR_GIMPLE)
8452 gt_ggc_mx (e->insns.g);
8453 else
8454 gt_ggc_mx (e->insns.r);
8455 gt_ggc_mx (block);
8456 }
8457
8458 /* PCH support for edge_def. */
8459
8460 extern void gt_pch_nx (tree&);
8461 extern void gt_pch_nx (gimple&);
8462 extern void gt_pch_nx (rtx&);
8463 extern void gt_pch_nx (basic_block&);
8464
8465 void
8466 gt_pch_nx (edge_def *e)
8467 {
8468 tree block = LOCATION_BLOCK (e->goto_locus);
8469 gt_pch_nx (e->src);
8470 gt_pch_nx (e->dest);
8471 if (current_ir_type () == IR_GIMPLE)
8472 gt_pch_nx (e->insns.g);
8473 else
8474 gt_pch_nx (e->insns.r);
8475 gt_pch_nx (block);
8476 }
8477
8478 void
8479 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8480 {
8481 tree block = LOCATION_BLOCK (e->goto_locus);
8482 op (&(e->src), cookie);
8483 op (&(e->dest), cookie);
8484 if (current_ir_type () == IR_GIMPLE)
8485 op (&(e->insns.g), cookie);
8486 else
8487 op (&(e->insns.r), cookie);
8488 op (&(block), cookie);
8489 }