Don't create out-of-bounds BIT_FIELD_REF.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
74
75 /* Local declarations. */
76
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity = 20;
79
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
84
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
87
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
92
93 static struct pointer_map_t *edge_to_cases;
94
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
98
99 static bitmap touched_switch_bbs;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto;
111
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
114 {
115 location_t locus;
116 int discriminator;
117 };
118
119 /* Hashtable helpers. */
120
121 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 {
123 typedef locus_discrim_map value_type;
124 typedef locus_discrim_map compare_type;
125 static inline hashval_t hash (const value_type *);
126 static inline bool equal (const value_type *, const compare_type *);
127 };
128
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
131
132 inline hashval_t
133 locus_discrim_hasher::hash (const value_type *item)
134 {
135 return LOCATION_LINE (item->locus);
136 }
137
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
140
141 inline bool
142 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 {
144 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 }
146
147 static hash_table <locus_discrim_hasher> discriminator_per_locus;
148
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq);
151 static void factor_computed_gotos (void);
152
153 /* Edges. */
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block);
157 static void make_gimple_switch_edges (basic_block);
158 static void make_goto_expr_edges (basic_block);
159 static void make_gimple_asm_edges (basic_block);
160 static edge gimple_redirect_edge_and_branch (edge, basic_block);
161 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
162 static unsigned int split_critical_edges (void);
163
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple, gimple);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge);
168 static gimple first_non_label_stmt (basic_block);
169 static bool verify_gimple_transaction (gimple);
170
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block, basic_block);
173 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
174 static void remove_bb (basic_block);
175 static edge find_taken_edge_computed_goto (basic_block, tree);
176 static edge find_taken_edge_cond_expr (basic_block, tree);
177 static edge find_taken_edge_switch_expr (basic_block, tree);
178 static tree find_case_label_for_value (gimple, tree);
179
180 void
181 init_empty_tree_cfg_for_function (struct function *fn)
182 {
183 /* Initialize the basic block array. */
184 init_flow (fn);
185 profile_status_for_function (fn) = PROFILE_ABSENT;
186 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
187 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
188 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (basic_block_info_for_function (fn),
190 initial_cfg_capacity);
191
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
194 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
195 initial_cfg_capacity);
196
197 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
198 ENTRY_BLOCK_PTR_FOR_FN (fn));
199 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
200 EXIT_BLOCK_PTR_FOR_FN (fn));
201
202 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
203 = EXIT_BLOCK_PTR_FOR_FN (fn);
204 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
205 = ENTRY_BLOCK_PTR_FOR_FN (fn);
206 }
207
208 void
209 init_empty_tree_cfg (void)
210 {
211 init_empty_tree_cfg_for_function (cfun);
212 }
213
214 /*---------------------------------------------------------------------------
215 Create basic blocks
216 ---------------------------------------------------------------------------*/
217
218 /* Entry point to the CFG builder for trees. SEQ is the sequence of
219 statements to be added to the flowgraph. */
220
221 static void
222 build_gimple_cfg (gimple_seq seq)
223 {
224 /* Register specific gimple functions. */
225 gimple_register_cfg_hooks ();
226
227 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
228
229 init_empty_tree_cfg ();
230
231 found_computed_goto = 0;
232 make_blocks (seq);
233
234 /* Computed gotos are hell to deal with, especially if there are
235 lots of them with a large number of destinations. So we factor
236 them to a common computed goto location before we build the
237 edge list. After we convert back to normal form, we will un-factor
238 the computed gotos since factoring introduces an unwanted jump. */
239 if (found_computed_goto)
240 factor_computed_gotos ();
241
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
245
246 /* Adjust the size of the array. */
247 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
248 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
249
250 /* To speed up statement iterator walks, we first purge dead labels. */
251 cleanup_dead_labels ();
252
253 /* Group case nodes to reduce the number of edges.
254 We do this after cleaning up dead labels because otherwise we miss
255 a lot of obvious case merging opportunities. */
256 group_case_labels ();
257
258 /* Create the edges of the flowgraph. */
259 discriminator_per_locus.create (13);
260 make_edges ();
261 assign_discriminators ();
262 cleanup_dead_labels ();
263 discriminator_per_locus.dispose ();
264 }
265
266
267 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
268 it and set loop->safelen to INT_MAX. We assume that the annotation
269 comes immediately before the condition. */
270
271 static void
272 replace_loop_annotate ()
273 {
274 struct loop *loop;
275 basic_block bb;
276 gimple_stmt_iterator gsi;
277 gimple stmt;
278
279 FOR_EACH_LOOP (loop, 0)
280 {
281 gsi = gsi_last_bb (loop->header);
282 stmt = gsi_stmt (gsi);
283 if (stmt && gimple_code (stmt) == GIMPLE_COND)
284 {
285 gsi_prev_nondebug (&gsi);
286 if (gsi_end_p (gsi))
287 continue;
288 stmt = gsi_stmt (gsi);
289 if (gimple_code (stmt) != GIMPLE_CALL)
290 continue;
291 if (!gimple_call_internal_p (stmt)
292 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
293 continue;
294 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
295 != annot_expr_ivdep_kind)
296 continue;
297 stmt = gimple_build_assign (gimple_call_lhs (stmt),
298 gimple_call_arg (stmt, 0));
299 gsi_replace (&gsi, stmt, true);
300 loop->safelen = INT_MAX;
301 }
302 }
303
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
305 FOR_EACH_BB (bb)
306 {
307 gsi = gsi_last_bb (bb);
308 stmt = gsi_stmt (gsi);
309 if (stmt && gimple_code (stmt) == GIMPLE_COND)
310 gsi_prev_nondebug (&gsi);
311 if (gsi_end_p (gsi))
312 continue;
313 stmt = gsi_stmt (gsi);
314 if (gimple_code (stmt) != GIMPLE_CALL)
315 continue;
316 if (!gimple_call_internal_p (stmt)
317 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
318 continue;
319 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
320 != annot_expr_ivdep_kind)
321 continue;
322 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
323 "annotation");
324 stmt = gimple_build_assign (gimple_call_lhs (stmt),
325 gimple_call_arg (stmt, 0));
326 gsi_replace (&gsi, stmt, true);
327 }
328 }
329
330
331 static unsigned int
332 execute_build_cfg (void)
333 {
334 gimple_seq body = gimple_body (current_function_decl);
335
336 build_gimple_cfg (body);
337 gimple_set_body (current_function_decl, NULL);
338 if (dump_file && (dump_flags & TDF_DETAILS))
339 {
340 fprintf (dump_file, "Scope blocks:\n");
341 dump_scope_blocks (dump_file, dump_flags);
342 }
343 cleanup_tree_cfg ();
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
345 replace_loop_annotate ();
346 return 0;
347 }
348
349 namespace {
350
351 const pass_data pass_data_build_cfg =
352 {
353 GIMPLE_PASS, /* type */
354 "cfg", /* name */
355 OPTGROUP_NONE, /* optinfo_flags */
356 false, /* has_gate */
357 true, /* has_execute */
358 TV_TREE_CFG, /* tv_id */
359 PROP_gimple_leh, /* properties_required */
360 ( PROP_cfg | PROP_loops ), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 TODO_verify_stmts, /* todo_flags_finish */
364 };
365
366 class pass_build_cfg : public gimple_opt_pass
367 {
368 public:
369 pass_build_cfg (gcc::context *ctxt)
370 : gimple_opt_pass (pass_data_build_cfg, ctxt)
371 {}
372
373 /* opt_pass methods: */
374 unsigned int execute () { return execute_build_cfg (); }
375
376 }; // class pass_build_cfg
377
378 } // anon namespace
379
380 gimple_opt_pass *
381 make_pass_build_cfg (gcc::context *ctxt)
382 {
383 return new pass_build_cfg (ctxt);
384 }
385
386
387 /* Return true if T is a computed goto. */
388
389 static bool
390 computed_goto_p (gimple t)
391 {
392 return (gimple_code (t) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
394 }
395
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
399 <bb C>:
400 ...
401 if (something)
402 goto <bb N>;
403 else
404 goto <bb M>;
405 <bb N>:
406 __builtin_unreachable ();
407 <bb M>: */
408
409 bool
410 assert_unreachable_fallthru_edge_p (edge e)
411 {
412 basic_block pred_bb = e->src;
413 gimple last = last_stmt (pred_bb);
414 if (last && gimple_code (last) == GIMPLE_COND)
415 {
416 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
417 if (other_bb == e->dest)
418 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
419 if (EDGE_COUNT (other_bb->succs) == 0)
420 {
421 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
422 gimple stmt;
423
424 if (gsi_end_p (gsi))
425 return false;
426 stmt = gsi_stmt (gsi);
427 if (is_gimple_debug (stmt))
428 {
429 gsi_next_nondebug (&gsi);
430 if (gsi_end_p (gsi))
431 return false;
432 stmt = gsi_stmt (gsi);
433 }
434 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
435 }
436 }
437 return false;
438 }
439
440
441 /* Search the CFG for any computed gotos. If found, factor them to a
442 common computed goto site. Also record the location of that site so
443 that we can un-factor the gotos after we have converted back to
444 normal form. */
445
446 static void
447 factor_computed_gotos (void)
448 {
449 basic_block bb;
450 tree factored_label_decl = NULL;
451 tree var = NULL;
452 gimple factored_computed_goto_label = NULL;
453 gimple factored_computed_goto = NULL;
454
455 /* We know there are one or more computed gotos in this function.
456 Examine the last statement in each basic block to see if the block
457 ends with a computed goto. */
458
459 FOR_EACH_BB (bb)
460 {
461 gimple_stmt_iterator gsi = gsi_last_bb (bb);
462 gimple last;
463
464 if (gsi_end_p (gsi))
465 continue;
466
467 last = gsi_stmt (gsi);
468
469 /* Ignore the computed goto we create when we factor the original
470 computed gotos. */
471 if (last == factored_computed_goto)
472 continue;
473
474 /* If the last statement is a computed goto, factor it. */
475 if (computed_goto_p (last))
476 {
477 gimple assignment;
478
479 /* The first time we find a computed goto we need to create
480 the factored goto block and the variable each original
481 computed goto will use for their goto destination. */
482 if (!factored_computed_goto)
483 {
484 basic_block new_bb = create_empty_bb (bb);
485 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
486
487 /* Create the destination of the factored goto. Each original
488 computed goto will put its desired destination into this
489 variable and jump to the label we create immediately
490 below. */
491 var = create_tmp_var (ptr_type_node, "gotovar");
492
493 /* Build a label for the new block which will contain the
494 factored computed goto. */
495 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
496 factored_computed_goto_label
497 = gimple_build_label (factored_label_decl);
498 gsi_insert_after (&new_gsi, factored_computed_goto_label,
499 GSI_NEW_STMT);
500
501 /* Build our new computed goto. */
502 factored_computed_goto = gimple_build_goto (var);
503 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
504 }
505
506 /* Copy the original computed goto's destination into VAR. */
507 assignment = gimple_build_assign (var, gimple_goto_dest (last));
508 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
509
510 /* And re-vector the computed goto to the new destination. */
511 gimple_goto_set_dest (last, factored_label_decl);
512 }
513 }
514 }
515
516
517 /* Build a flowgraph for the sequence of stmts SEQ. */
518
519 static void
520 make_blocks (gimple_seq seq)
521 {
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple stmt = NULL;
524 bool start_new_block = true;
525 bool first_stmt_of_seq = true;
526 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
527
528 while (!gsi_end_p (i))
529 {
530 gimple prev_stmt;
531
532 prev_stmt = stmt;
533 stmt = gsi_stmt (i);
534
535 /* If the statement starts a new basic block or if we have determined
536 in a previous pass that we need to create a new block for STMT, do
537 so now. */
538 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
539 {
540 if (!first_stmt_of_seq)
541 gsi_split_seq_before (&i, &seq);
542 bb = create_basic_block (seq, NULL, bb);
543 start_new_block = false;
544 }
545
546 /* Now add STMT to BB and create the subgraphs for special statement
547 codes. */
548 gimple_set_bb (stmt, bb);
549
550 if (computed_goto_p (stmt))
551 found_computed_goto = true;
552
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
554 next iteration. */
555 if (stmt_ends_bb_p (stmt))
556 {
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
561 SSA names. */
562 if (gimple_has_lhs (stmt)
563 && stmt_can_make_abnormal_goto (stmt)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
565 {
566 tree lhs = gimple_get_lhs (stmt);
567 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
568 gimple s = gimple_build_assign (lhs, tmp);
569 gimple_set_location (s, gimple_location (stmt));
570 gimple_set_block (s, gimple_block (stmt));
571 gimple_set_lhs (stmt, tmp);
572 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
574 DECL_GIMPLE_REG_P (tmp) = 1;
575 gsi_insert_after (&i, s, GSI_SAME_STMT);
576 }
577 start_new_block = true;
578 }
579
580 gsi_next (&i);
581 first_stmt_of_seq = false;
582 }
583 }
584
585
586 /* Create and return a new empty basic block after bb AFTER. */
587
588 static basic_block
589 create_bb (void *h, void *e, basic_block after)
590 {
591 basic_block bb;
592
593 gcc_assert (!e);
594
595 /* Create and initialize a new basic block. Since alloc_block uses
596 GC allocation that clears memory to allocate a basic block, we do
597 not have to clear the newly allocated basic block here. */
598 bb = alloc_block ();
599
600 bb->index = last_basic_block;
601 bb->flags = BB_NEW;
602 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
603
604 /* Add the new block to the linked list of blocks. */
605 link_block (bb, after);
606
607 /* Grow the basic block array if needed. */
608 if ((size_t) last_basic_block == basic_block_info->length ())
609 {
610 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
611 vec_safe_grow_cleared (basic_block_info, new_size);
612 }
613
614 /* Add the newly created block to the array. */
615 SET_BASIC_BLOCK (last_basic_block, bb);
616
617 n_basic_blocks_for_fn (cfun)++;
618 last_basic_block++;
619
620 return bb;
621 }
622
623
624 /*---------------------------------------------------------------------------
625 Edge creation
626 ---------------------------------------------------------------------------*/
627
628 /* Fold COND_EXPR_COND of each COND_EXPR. */
629
630 void
631 fold_cond_expr_cond (void)
632 {
633 basic_block bb;
634
635 FOR_EACH_BB (bb)
636 {
637 gimple stmt = last_stmt (bb);
638
639 if (stmt && gimple_code (stmt) == GIMPLE_COND)
640 {
641 location_t loc = gimple_location (stmt);
642 tree cond;
643 bool zerop, onep;
644
645 fold_defer_overflow_warnings ();
646 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
647 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
648 if (cond)
649 {
650 zerop = integer_zerop (cond);
651 onep = integer_onep (cond);
652 }
653 else
654 zerop = onep = false;
655
656 fold_undefer_overflow_warnings (zerop || onep,
657 stmt,
658 WARN_STRICT_OVERFLOW_CONDITIONAL);
659 if (zerop)
660 gimple_cond_make_false (stmt);
661 else if (onep)
662 gimple_cond_make_true (stmt);
663 }
664 }
665 }
666
667 /* Join all the blocks in the flowgraph. */
668
669 static void
670 make_edges (void)
671 {
672 basic_block bb;
673 struct omp_region *cur_region = NULL;
674
675 /* Create an edge from entry to the first block with executable
676 statements in it. */
677 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
678 EDGE_FALLTHRU);
679
680 /* Traverse the basic block array placing edges. */
681 FOR_EACH_BB (bb)
682 {
683 gimple last = last_stmt (bb);
684 bool fallthru;
685
686 if (last)
687 {
688 enum gimple_code code = gimple_code (last);
689 switch (code)
690 {
691 case GIMPLE_GOTO:
692 make_goto_expr_edges (bb);
693 fallthru = false;
694 break;
695 case GIMPLE_RETURN:
696 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
697 fallthru = false;
698 break;
699 case GIMPLE_COND:
700 make_cond_expr_edges (bb);
701 fallthru = false;
702 break;
703 case GIMPLE_SWITCH:
704 make_gimple_switch_edges (bb);
705 fallthru = false;
706 break;
707 case GIMPLE_RESX:
708 make_eh_edges (last);
709 fallthru = false;
710 break;
711 case GIMPLE_EH_DISPATCH:
712 fallthru = make_eh_dispatch_edges (last);
713 break;
714
715 case GIMPLE_CALL:
716 /* If this function receives a nonlocal goto, then we need to
717 make edges from this call site to all the nonlocal goto
718 handlers. */
719 if (stmt_can_make_abnormal_goto (last))
720 make_abnormal_goto_edges (bb, true);
721
722 /* If this statement has reachable exception handlers, then
723 create abnormal edges to them. */
724 make_eh_edges (last);
725
726 /* BUILTIN_RETURN is really a return statement. */
727 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
728 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
729 false;
730 /* Some calls are known not to return. */
731 else
732 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
733 break;
734
735 case GIMPLE_ASSIGN:
736 /* A GIMPLE_ASSIGN may throw internally and thus be considered
737 control-altering. */
738 if (is_ctrl_altering_stmt (last))
739 make_eh_edges (last);
740 fallthru = true;
741 break;
742
743 case GIMPLE_ASM:
744 make_gimple_asm_edges (bb);
745 fallthru = true;
746 break;
747
748 CASE_GIMPLE_OMP:
749 fallthru = make_gimple_omp_edges (bb, &cur_region);
750 break;
751
752 case GIMPLE_TRANSACTION:
753 {
754 tree abort_label = gimple_transaction_label (last);
755 if (abort_label)
756 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
757 fallthru = true;
758 }
759 break;
760
761 default:
762 gcc_assert (!stmt_ends_bb_p (last));
763 fallthru = true;
764 }
765 }
766 else
767 fallthru = true;
768
769 if (fallthru)
770 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
771 }
772
773 free_omp_regions ();
774
775 /* Fold COND_EXPR_COND of each COND_EXPR. */
776 fold_cond_expr_cond ();
777 }
778
779 /* Find the next available discriminator value for LOCUS. The
780 discriminator distinguishes among several basic blocks that
781 share a common locus, allowing for more accurate sample-based
782 profiling. */
783
784 static int
785 next_discriminator_for_locus (location_t locus)
786 {
787 struct locus_discrim_map item;
788 struct locus_discrim_map **slot;
789
790 item.locus = locus;
791 item.discriminator = 0;
792 slot = discriminator_per_locus.find_slot_with_hash (
793 &item, LOCATION_LINE (locus), INSERT);
794 gcc_assert (slot);
795 if (*slot == HTAB_EMPTY_ENTRY)
796 {
797 *slot = XNEW (struct locus_discrim_map);
798 gcc_assert (*slot);
799 (*slot)->locus = locus;
800 (*slot)->discriminator = 0;
801 }
802 (*slot)->discriminator++;
803 return (*slot)->discriminator;
804 }
805
806 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
807
808 static bool
809 same_line_p (location_t locus1, location_t locus2)
810 {
811 expanded_location from, to;
812
813 if (locus1 == locus2)
814 return true;
815
816 from = expand_location (locus1);
817 to = expand_location (locus2);
818
819 if (from.line != to.line)
820 return false;
821 if (from.file == to.file)
822 return true;
823 return (from.file != NULL
824 && to.file != NULL
825 && filename_cmp (from.file, to.file) == 0);
826 }
827
828 /* Assign discriminators to each basic block. */
829
830 static void
831 assign_discriminators (void)
832 {
833 basic_block bb;
834
835 FOR_EACH_BB (bb)
836 {
837 edge e;
838 edge_iterator ei;
839 gimple last = last_stmt (bb);
840 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
841
842 if (locus == UNKNOWN_LOCATION)
843 continue;
844
845 FOR_EACH_EDGE (e, ei, bb->succs)
846 {
847 gimple first = first_non_label_stmt (e->dest);
848 gimple last = last_stmt (e->dest);
849 if ((first && same_line_p (locus, gimple_location (first)))
850 || (last && same_line_p (locus, gimple_location (last))))
851 {
852 if (e->dest->discriminator != 0 && bb->discriminator == 0)
853 bb->discriminator = next_discriminator_for_locus (locus);
854 else
855 e->dest->discriminator = next_discriminator_for_locus (locus);
856 }
857 }
858 }
859 }
860
861 /* Create the edges for a GIMPLE_COND starting at block BB. */
862
863 static void
864 make_cond_expr_edges (basic_block bb)
865 {
866 gimple entry = last_stmt (bb);
867 gimple then_stmt, else_stmt;
868 basic_block then_bb, else_bb;
869 tree then_label, else_label;
870 edge e;
871
872 gcc_assert (entry);
873 gcc_assert (gimple_code (entry) == GIMPLE_COND);
874
875 /* Entry basic blocks for each component. */
876 then_label = gimple_cond_true_label (entry);
877 else_label = gimple_cond_false_label (entry);
878 then_bb = label_to_block (then_label);
879 else_bb = label_to_block (else_label);
880 then_stmt = first_stmt (then_bb);
881 else_stmt = first_stmt (else_bb);
882
883 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
884 e->goto_locus = gimple_location (then_stmt);
885 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
886 if (e)
887 e->goto_locus = gimple_location (else_stmt);
888
889 /* We do not need the labels anymore. */
890 gimple_cond_set_true_label (entry, NULL_TREE);
891 gimple_cond_set_false_label (entry, NULL_TREE);
892 }
893
894
895 /* Called for each element in the hash table (P) as we delete the
896 edge to cases hash table.
897
898 Clear all the TREE_CHAINs to prevent problems with copying of
899 SWITCH_EXPRs and structure sharing rules, then free the hash table
900 element. */
901
902 static bool
903 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
904 void *data ATTRIBUTE_UNUSED)
905 {
906 tree t, next;
907
908 for (t = (tree) *value; t; t = next)
909 {
910 next = CASE_CHAIN (t);
911 CASE_CHAIN (t) = NULL;
912 }
913
914 *value = NULL;
915 return true;
916 }
917
918 /* Start recording information mapping edges to case labels. */
919
920 void
921 start_recording_case_labels (void)
922 {
923 gcc_assert (edge_to_cases == NULL);
924 edge_to_cases = pointer_map_create ();
925 touched_switch_bbs = BITMAP_ALLOC (NULL);
926 }
927
928 /* Return nonzero if we are recording information for case labels. */
929
930 static bool
931 recording_case_labels_p (void)
932 {
933 return (edge_to_cases != NULL);
934 }
935
936 /* Stop recording information mapping edges to case labels and
937 remove any information we have recorded. */
938 void
939 end_recording_case_labels (void)
940 {
941 bitmap_iterator bi;
942 unsigned i;
943 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
944 pointer_map_destroy (edge_to_cases);
945 edge_to_cases = NULL;
946 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
947 {
948 basic_block bb = BASIC_BLOCK (i);
949 if (bb)
950 {
951 gimple stmt = last_stmt (bb);
952 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
953 group_case_labels_stmt (stmt);
954 }
955 }
956 BITMAP_FREE (touched_switch_bbs);
957 }
958
959 /* If we are inside a {start,end}_recording_cases block, then return
960 a chain of CASE_LABEL_EXPRs from T which reference E.
961
962 Otherwise return NULL. */
963
964 static tree
965 get_cases_for_edge (edge e, gimple t)
966 {
967 void **slot;
968 size_t i, n;
969
970 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
971 chains available. Return NULL so the caller can detect this case. */
972 if (!recording_case_labels_p ())
973 return NULL;
974
975 slot = pointer_map_contains (edge_to_cases, e);
976 if (slot)
977 return (tree) *slot;
978
979 /* If we did not find E in the hash table, then this must be the first
980 time we have been queried for information about E & T. Add all the
981 elements from T to the hash table then perform the query again. */
982
983 n = gimple_switch_num_labels (t);
984 for (i = 0; i < n; i++)
985 {
986 tree elt = gimple_switch_label (t, i);
987 tree lab = CASE_LABEL (elt);
988 basic_block label_bb = label_to_block (lab);
989 edge this_edge = find_edge (e->src, label_bb);
990
991 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
992 a new chain. */
993 slot = pointer_map_insert (edge_to_cases, this_edge);
994 CASE_CHAIN (elt) = (tree) *slot;
995 *slot = elt;
996 }
997
998 return (tree) *pointer_map_contains (edge_to_cases, e);
999 }
1000
1001 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1002
1003 static void
1004 make_gimple_switch_edges (basic_block bb)
1005 {
1006 gimple entry = last_stmt (bb);
1007 size_t i, n;
1008
1009 n = gimple_switch_num_labels (entry);
1010
1011 for (i = 0; i < n; ++i)
1012 {
1013 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1014 basic_block label_bb = label_to_block (lab);
1015 make_edge (bb, label_bb, 0);
1016 }
1017 }
1018
1019
1020 /* Return the basic block holding label DEST. */
1021
1022 basic_block
1023 label_to_block_fn (struct function *ifun, tree dest)
1024 {
1025 int uid = LABEL_DECL_UID (dest);
1026
1027 /* We would die hard when faced by an undefined label. Emit a label to
1028 the very first basic block. This will hopefully make even the dataflow
1029 and undefined variable warnings quite right. */
1030 if (seen_error () && uid < 0)
1031 {
1032 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1033 gimple stmt;
1034
1035 stmt = gimple_build_label (dest);
1036 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1037 uid = LABEL_DECL_UID (dest);
1038 }
1039 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1040 return NULL;
1041 return (*ifun->cfg->x_label_to_block_map)[uid];
1042 }
1043
1044 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1045 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1046
1047 void
1048 make_abnormal_goto_edges (basic_block bb, bool for_call)
1049 {
1050 basic_block target_bb;
1051 gimple_stmt_iterator gsi;
1052
1053 FOR_EACH_BB (target_bb)
1054 {
1055 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1056 {
1057 gimple label_stmt = gsi_stmt (gsi);
1058 tree target;
1059
1060 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1061 break;
1062
1063 target = gimple_label_label (label_stmt);
1064
1065 /* Make an edge to every label block that has been marked as a
1066 potential target for a computed goto or a non-local goto. */
1067 if ((FORCED_LABEL (target) && !for_call)
1068 || (DECL_NONLOCAL (target) && for_call))
1069 {
1070 make_edge (bb, target_bb, EDGE_ABNORMAL);
1071 break;
1072 }
1073 }
1074 if (!gsi_end_p (gsi)
1075 && is_gimple_debug (gsi_stmt (gsi)))
1076 gsi_next_nondebug (&gsi);
1077 if (!gsi_end_p (gsi))
1078 {
1079 /* Make an edge to every setjmp-like call. */
1080 gimple call_stmt = gsi_stmt (gsi);
1081 if (is_gimple_call (call_stmt)
1082 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1083 make_edge (bb, target_bb, EDGE_ABNORMAL);
1084 }
1085 }
1086 }
1087
1088 /* Create edges for a goto statement at block BB. */
1089
1090 static void
1091 make_goto_expr_edges (basic_block bb)
1092 {
1093 gimple_stmt_iterator last = gsi_last_bb (bb);
1094 gimple goto_t = gsi_stmt (last);
1095
1096 /* A simple GOTO creates normal edges. */
1097 if (simple_goto_p (goto_t))
1098 {
1099 tree dest = gimple_goto_dest (goto_t);
1100 basic_block label_bb = label_to_block (dest);
1101 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1102 e->goto_locus = gimple_location (goto_t);
1103 gsi_remove (&last, true);
1104 return;
1105 }
1106
1107 /* A computed GOTO creates abnormal edges. */
1108 make_abnormal_goto_edges (bb, false);
1109 }
1110
1111 /* Create edges for an asm statement with labels at block BB. */
1112
1113 static void
1114 make_gimple_asm_edges (basic_block bb)
1115 {
1116 gimple stmt = last_stmt (bb);
1117 int i, n = gimple_asm_nlabels (stmt);
1118
1119 for (i = 0; i < n; ++i)
1120 {
1121 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1122 basic_block label_bb = label_to_block (label);
1123 make_edge (bb, label_bb, 0);
1124 }
1125 }
1126
1127 /*---------------------------------------------------------------------------
1128 Flowgraph analysis
1129 ---------------------------------------------------------------------------*/
1130
1131 /* Cleanup useless labels in basic blocks. This is something we wish
1132 to do early because it allows us to group case labels before creating
1133 the edges for the CFG, and it speeds up block statement iterators in
1134 all passes later on.
1135 We rerun this pass after CFG is created, to get rid of the labels that
1136 are no longer referenced. After then we do not run it any more, since
1137 (almost) no new labels should be created. */
1138
1139 /* A map from basic block index to the leading label of that block. */
1140 static struct label_record
1141 {
1142 /* The label. */
1143 tree label;
1144
1145 /* True if the label is referenced from somewhere. */
1146 bool used;
1147 } *label_for_bb;
1148
1149 /* Given LABEL return the first label in the same basic block. */
1150
1151 static tree
1152 main_block_label (tree label)
1153 {
1154 basic_block bb = label_to_block (label);
1155 tree main_label = label_for_bb[bb->index].label;
1156
1157 /* label_to_block possibly inserted undefined label into the chain. */
1158 if (!main_label)
1159 {
1160 label_for_bb[bb->index].label = label;
1161 main_label = label;
1162 }
1163
1164 label_for_bb[bb->index].used = true;
1165 return main_label;
1166 }
1167
1168 /* Clean up redundant labels within the exception tree. */
1169
1170 static void
1171 cleanup_dead_labels_eh (void)
1172 {
1173 eh_landing_pad lp;
1174 eh_region r;
1175 tree lab;
1176 int i;
1177
1178 if (cfun->eh == NULL)
1179 return;
1180
1181 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1182 if (lp && lp->post_landing_pad)
1183 {
1184 lab = main_block_label (lp->post_landing_pad);
1185 if (lab != lp->post_landing_pad)
1186 {
1187 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1188 EH_LANDING_PAD_NR (lab) = lp->index;
1189 }
1190 }
1191
1192 FOR_ALL_EH_REGION (r)
1193 switch (r->type)
1194 {
1195 case ERT_CLEANUP:
1196 case ERT_MUST_NOT_THROW:
1197 break;
1198
1199 case ERT_TRY:
1200 {
1201 eh_catch c;
1202 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1203 {
1204 lab = c->label;
1205 if (lab)
1206 c->label = main_block_label (lab);
1207 }
1208 }
1209 break;
1210
1211 case ERT_ALLOWED_EXCEPTIONS:
1212 lab = r->u.allowed.label;
1213 if (lab)
1214 r->u.allowed.label = main_block_label (lab);
1215 break;
1216 }
1217 }
1218
1219
1220 /* Cleanup redundant labels. This is a three-step process:
1221 1) Find the leading label for each block.
1222 2) Redirect all references to labels to the leading labels.
1223 3) Cleanup all useless labels. */
1224
1225 void
1226 cleanup_dead_labels (void)
1227 {
1228 basic_block bb;
1229 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1230
1231 /* Find a suitable label for each block. We use the first user-defined
1232 label if there is one, or otherwise just the first label we see. */
1233 FOR_EACH_BB (bb)
1234 {
1235 gimple_stmt_iterator i;
1236
1237 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1238 {
1239 tree label;
1240 gimple stmt = gsi_stmt (i);
1241
1242 if (gimple_code (stmt) != GIMPLE_LABEL)
1243 break;
1244
1245 label = gimple_label_label (stmt);
1246
1247 /* If we have not yet seen a label for the current block,
1248 remember this one and see if there are more labels. */
1249 if (!label_for_bb[bb->index].label)
1250 {
1251 label_for_bb[bb->index].label = label;
1252 continue;
1253 }
1254
1255 /* If we did see a label for the current block already, but it
1256 is an artificially created label, replace it if the current
1257 label is a user defined label. */
1258 if (!DECL_ARTIFICIAL (label)
1259 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1260 {
1261 label_for_bb[bb->index].label = label;
1262 break;
1263 }
1264 }
1265 }
1266
1267 /* Now redirect all jumps/branches to the selected label.
1268 First do so for each block ending in a control statement. */
1269 FOR_EACH_BB (bb)
1270 {
1271 gimple stmt = last_stmt (bb);
1272 tree label, new_label;
1273
1274 if (!stmt)
1275 continue;
1276
1277 switch (gimple_code (stmt))
1278 {
1279 case GIMPLE_COND:
1280 label = gimple_cond_true_label (stmt);
1281 if (label)
1282 {
1283 new_label = main_block_label (label);
1284 if (new_label != label)
1285 gimple_cond_set_true_label (stmt, new_label);
1286 }
1287
1288 label = gimple_cond_false_label (stmt);
1289 if (label)
1290 {
1291 new_label = main_block_label (label);
1292 if (new_label != label)
1293 gimple_cond_set_false_label (stmt, new_label);
1294 }
1295 break;
1296
1297 case GIMPLE_SWITCH:
1298 {
1299 size_t i, n = gimple_switch_num_labels (stmt);
1300
1301 /* Replace all destination labels. */
1302 for (i = 0; i < n; ++i)
1303 {
1304 tree case_label = gimple_switch_label (stmt, i);
1305 label = CASE_LABEL (case_label);
1306 new_label = main_block_label (label);
1307 if (new_label != label)
1308 CASE_LABEL (case_label) = new_label;
1309 }
1310 break;
1311 }
1312
1313 case GIMPLE_ASM:
1314 {
1315 int i, n = gimple_asm_nlabels (stmt);
1316
1317 for (i = 0; i < n; ++i)
1318 {
1319 tree cons = gimple_asm_label_op (stmt, i);
1320 tree label = main_block_label (TREE_VALUE (cons));
1321 TREE_VALUE (cons) = label;
1322 }
1323 break;
1324 }
1325
1326 /* We have to handle gotos until they're removed, and we don't
1327 remove them until after we've created the CFG edges. */
1328 case GIMPLE_GOTO:
1329 if (!computed_goto_p (stmt))
1330 {
1331 label = gimple_goto_dest (stmt);
1332 new_label = main_block_label (label);
1333 if (new_label != label)
1334 gimple_goto_set_dest (stmt, new_label);
1335 }
1336 break;
1337
1338 case GIMPLE_TRANSACTION:
1339 {
1340 tree label = gimple_transaction_label (stmt);
1341 if (label)
1342 {
1343 tree new_label = main_block_label (label);
1344 if (new_label != label)
1345 gimple_transaction_set_label (stmt, new_label);
1346 }
1347 }
1348 break;
1349
1350 default:
1351 break;
1352 }
1353 }
1354
1355 /* Do the same for the exception region tree labels. */
1356 cleanup_dead_labels_eh ();
1357
1358 /* Finally, purge dead labels. All user-defined labels and labels that
1359 can be the target of non-local gotos and labels which have their
1360 address taken are preserved. */
1361 FOR_EACH_BB (bb)
1362 {
1363 gimple_stmt_iterator i;
1364 tree label_for_this_bb = label_for_bb[bb->index].label;
1365
1366 if (!label_for_this_bb)
1367 continue;
1368
1369 /* If the main label of the block is unused, we may still remove it. */
1370 if (!label_for_bb[bb->index].used)
1371 label_for_this_bb = NULL;
1372
1373 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1374 {
1375 tree label;
1376 gimple stmt = gsi_stmt (i);
1377
1378 if (gimple_code (stmt) != GIMPLE_LABEL)
1379 break;
1380
1381 label = gimple_label_label (stmt);
1382
1383 if (label == label_for_this_bb
1384 || !DECL_ARTIFICIAL (label)
1385 || DECL_NONLOCAL (label)
1386 || FORCED_LABEL (label))
1387 gsi_next (&i);
1388 else
1389 gsi_remove (&i, true);
1390 }
1391 }
1392
1393 free (label_for_bb);
1394 }
1395
1396 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1397 the ones jumping to the same label.
1398 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1399
1400 void
1401 group_case_labels_stmt (gimple stmt)
1402 {
1403 int old_size = gimple_switch_num_labels (stmt);
1404 int i, j, new_size = old_size;
1405 basic_block default_bb = NULL;
1406
1407 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1408
1409 /* Look for possible opportunities to merge cases. */
1410 i = 1;
1411 while (i < old_size)
1412 {
1413 tree base_case, base_high;
1414 basic_block base_bb;
1415
1416 base_case = gimple_switch_label (stmt, i);
1417
1418 gcc_assert (base_case);
1419 base_bb = label_to_block (CASE_LABEL (base_case));
1420
1421 /* Discard cases that have the same destination as the
1422 default case. */
1423 if (base_bb == default_bb)
1424 {
1425 gimple_switch_set_label (stmt, i, NULL_TREE);
1426 i++;
1427 new_size--;
1428 continue;
1429 }
1430
1431 base_high = CASE_HIGH (base_case)
1432 ? CASE_HIGH (base_case)
1433 : CASE_LOW (base_case);
1434 i++;
1435
1436 /* Try to merge case labels. Break out when we reach the end
1437 of the label vector or when we cannot merge the next case
1438 label with the current one. */
1439 while (i < old_size)
1440 {
1441 tree merge_case = gimple_switch_label (stmt, i);
1442 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1443 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1444
1445 /* Merge the cases if they jump to the same place,
1446 and their ranges are consecutive. */
1447 if (merge_bb == base_bb
1448 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1449 {
1450 base_high = CASE_HIGH (merge_case) ?
1451 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1452 CASE_HIGH (base_case) = base_high;
1453 gimple_switch_set_label (stmt, i, NULL_TREE);
1454 new_size--;
1455 i++;
1456 }
1457 else
1458 break;
1459 }
1460 }
1461
1462 /* Compress the case labels in the label vector, and adjust the
1463 length of the vector. */
1464 for (i = 0, j = 0; i < new_size; i++)
1465 {
1466 while (! gimple_switch_label (stmt, j))
1467 j++;
1468 gimple_switch_set_label (stmt, i,
1469 gimple_switch_label (stmt, j++));
1470 }
1471
1472 gcc_assert (new_size <= old_size);
1473 gimple_switch_set_num_labels (stmt, new_size);
1474 }
1475
1476 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1477 and scan the sorted vector of cases. Combine the ones jumping to the
1478 same label. */
1479
1480 void
1481 group_case_labels (void)
1482 {
1483 basic_block bb;
1484
1485 FOR_EACH_BB (bb)
1486 {
1487 gimple stmt = last_stmt (bb);
1488 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1489 group_case_labels_stmt (stmt);
1490 }
1491 }
1492
1493 /* Checks whether we can merge block B into block A. */
1494
1495 static bool
1496 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1497 {
1498 gimple stmt;
1499 gimple_stmt_iterator gsi;
1500
1501 if (!single_succ_p (a))
1502 return false;
1503
1504 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1505 return false;
1506
1507 if (single_succ (a) != b)
1508 return false;
1509
1510 if (!single_pred_p (b))
1511 return false;
1512
1513 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1514 return false;
1515
1516 /* If A ends by a statement causing exceptions or something similar, we
1517 cannot merge the blocks. */
1518 stmt = last_stmt (a);
1519 if (stmt && stmt_ends_bb_p (stmt))
1520 return false;
1521
1522 /* Do not allow a block with only a non-local label to be merged. */
1523 if (stmt
1524 && gimple_code (stmt) == GIMPLE_LABEL
1525 && DECL_NONLOCAL (gimple_label_label (stmt)))
1526 return false;
1527
1528 /* Examine the labels at the beginning of B. */
1529 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1530 {
1531 tree lab;
1532 stmt = gsi_stmt (gsi);
1533 if (gimple_code (stmt) != GIMPLE_LABEL)
1534 break;
1535 lab = gimple_label_label (stmt);
1536
1537 /* Do not remove user forced labels or for -O0 any user labels. */
1538 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1539 return false;
1540 }
1541
1542 /* Protect the loop latches. */
1543 if (current_loops && b->loop_father->latch == b)
1544 return false;
1545
1546 /* It must be possible to eliminate all phi nodes in B. If ssa form
1547 is not up-to-date and a name-mapping is registered, we cannot eliminate
1548 any phis. Symbols marked for renaming are never a problem though. */
1549 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1550 {
1551 gimple phi = gsi_stmt (gsi);
1552 /* Technically only new names matter. */
1553 if (name_registered_for_update_p (PHI_RESULT (phi)))
1554 return false;
1555 }
1556
1557 /* When not optimizing, don't merge if we'd lose goto_locus. */
1558 if (!optimize
1559 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1560 {
1561 location_t goto_locus = single_succ_edge (a)->goto_locus;
1562 gimple_stmt_iterator prev, next;
1563 prev = gsi_last_nondebug_bb (a);
1564 next = gsi_after_labels (b);
1565 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1566 gsi_next_nondebug (&next);
1567 if ((gsi_end_p (prev)
1568 || gimple_location (gsi_stmt (prev)) != goto_locus)
1569 && (gsi_end_p (next)
1570 || gimple_location (gsi_stmt (next)) != goto_locus))
1571 return false;
1572 }
1573
1574 return true;
1575 }
1576
1577 /* Replaces all uses of NAME by VAL. */
1578
1579 void
1580 replace_uses_by (tree name, tree val)
1581 {
1582 imm_use_iterator imm_iter;
1583 use_operand_p use;
1584 gimple stmt;
1585 edge e;
1586
1587 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1588 {
1589 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1590 {
1591 replace_exp (use, val);
1592
1593 if (gimple_code (stmt) == GIMPLE_PHI)
1594 {
1595 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1596 if (e->flags & EDGE_ABNORMAL)
1597 {
1598 /* This can only occur for virtual operands, since
1599 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1600 would prevent replacement. */
1601 gcc_checking_assert (virtual_operand_p (name));
1602 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1603 }
1604 }
1605 }
1606
1607 if (gimple_code (stmt) != GIMPLE_PHI)
1608 {
1609 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1610 gimple orig_stmt = stmt;
1611 size_t i;
1612
1613 /* Mark the block if we changed the last stmt in it. */
1614 if (cfgcleanup_altered_bbs
1615 && stmt_ends_bb_p (stmt))
1616 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1617
1618 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1619 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1620 only change sth from non-invariant to invariant, and only
1621 when propagating constants. */
1622 if (is_gimple_min_invariant (val))
1623 for (i = 0; i < gimple_num_ops (stmt); i++)
1624 {
1625 tree op = gimple_op (stmt, i);
1626 /* Operands may be empty here. For example, the labels
1627 of a GIMPLE_COND are nulled out following the creation
1628 of the corresponding CFG edges. */
1629 if (op && TREE_CODE (op) == ADDR_EXPR)
1630 recompute_tree_invariant_for_addr_expr (op);
1631 }
1632
1633 if (fold_stmt (&gsi))
1634 stmt = gsi_stmt (gsi);
1635
1636 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1637 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1638
1639 update_stmt (stmt);
1640 }
1641 }
1642
1643 gcc_checking_assert (has_zero_uses (name));
1644
1645 /* Also update the trees stored in loop structures. */
1646 if (current_loops)
1647 {
1648 struct loop *loop;
1649
1650 FOR_EACH_LOOP (loop, 0)
1651 {
1652 substitute_in_loop_info (loop, name, val);
1653 }
1654 }
1655 }
1656
1657 /* Merge block B into block A. */
1658
1659 static void
1660 gimple_merge_blocks (basic_block a, basic_block b)
1661 {
1662 gimple_stmt_iterator last, gsi, psi;
1663
1664 if (dump_file)
1665 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1666
1667 /* Remove all single-valued PHI nodes from block B of the form
1668 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1669 gsi = gsi_last_bb (a);
1670 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1671 {
1672 gimple phi = gsi_stmt (psi);
1673 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1674 gimple copy;
1675 bool may_replace_uses = (virtual_operand_p (def)
1676 || may_propagate_copy (def, use));
1677
1678 /* In case we maintain loop closed ssa form, do not propagate arguments
1679 of loop exit phi nodes. */
1680 if (current_loops
1681 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1682 && !virtual_operand_p (def)
1683 && TREE_CODE (use) == SSA_NAME
1684 && a->loop_father != b->loop_father)
1685 may_replace_uses = false;
1686
1687 if (!may_replace_uses)
1688 {
1689 gcc_assert (!virtual_operand_p (def));
1690
1691 /* Note that just emitting the copies is fine -- there is no problem
1692 with ordering of phi nodes. This is because A is the single
1693 predecessor of B, therefore results of the phi nodes cannot
1694 appear as arguments of the phi nodes. */
1695 copy = gimple_build_assign (def, use);
1696 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1697 remove_phi_node (&psi, false);
1698 }
1699 else
1700 {
1701 /* If we deal with a PHI for virtual operands, we can simply
1702 propagate these without fussing with folding or updating
1703 the stmt. */
1704 if (virtual_operand_p (def))
1705 {
1706 imm_use_iterator iter;
1707 use_operand_p use_p;
1708 gimple stmt;
1709
1710 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1711 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1712 SET_USE (use_p, use);
1713
1714 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1715 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1716 }
1717 else
1718 replace_uses_by (def, use);
1719
1720 remove_phi_node (&psi, true);
1721 }
1722 }
1723
1724 /* Ensure that B follows A. */
1725 move_block_after (b, a);
1726
1727 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1728 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1729
1730 /* Remove labels from B and set gimple_bb to A for other statements. */
1731 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1732 {
1733 gimple stmt = gsi_stmt (gsi);
1734 if (gimple_code (stmt) == GIMPLE_LABEL)
1735 {
1736 tree label = gimple_label_label (stmt);
1737 int lp_nr;
1738
1739 gsi_remove (&gsi, false);
1740
1741 /* Now that we can thread computed gotos, we might have
1742 a situation where we have a forced label in block B
1743 However, the label at the start of block B might still be
1744 used in other ways (think about the runtime checking for
1745 Fortran assigned gotos). So we can not just delete the
1746 label. Instead we move the label to the start of block A. */
1747 if (FORCED_LABEL (label))
1748 {
1749 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1750 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1751 }
1752 /* Other user labels keep around in a form of a debug stmt. */
1753 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1754 {
1755 gimple dbg = gimple_build_debug_bind (label,
1756 integer_zero_node,
1757 stmt);
1758 gimple_debug_bind_reset_value (dbg);
1759 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1760 }
1761
1762 lp_nr = EH_LANDING_PAD_NR (label);
1763 if (lp_nr)
1764 {
1765 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1766 lp->post_landing_pad = NULL;
1767 }
1768 }
1769 else
1770 {
1771 gimple_set_bb (stmt, a);
1772 gsi_next (&gsi);
1773 }
1774 }
1775
1776 /* Merge the sequences. */
1777 last = gsi_last_bb (a);
1778 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1779 set_bb_seq (b, NULL);
1780
1781 if (cfgcleanup_altered_bbs)
1782 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1783 }
1784
1785
1786 /* Return the one of two successors of BB that is not reachable by a
1787 complex edge, if there is one. Else, return BB. We use
1788 this in optimizations that use post-dominators for their heuristics,
1789 to catch the cases in C++ where function calls are involved. */
1790
1791 basic_block
1792 single_noncomplex_succ (basic_block bb)
1793 {
1794 edge e0, e1;
1795 if (EDGE_COUNT (bb->succs) != 2)
1796 return bb;
1797
1798 e0 = EDGE_SUCC (bb, 0);
1799 e1 = EDGE_SUCC (bb, 1);
1800 if (e0->flags & EDGE_COMPLEX)
1801 return e1->dest;
1802 if (e1->flags & EDGE_COMPLEX)
1803 return e0->dest;
1804
1805 return bb;
1806 }
1807
1808 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1809
1810 void
1811 notice_special_calls (gimple call)
1812 {
1813 int flags = gimple_call_flags (call);
1814
1815 if (flags & ECF_MAY_BE_ALLOCA)
1816 cfun->calls_alloca = true;
1817 if (flags & ECF_RETURNS_TWICE)
1818 cfun->calls_setjmp = true;
1819 }
1820
1821
1822 /* Clear flags set by notice_special_calls. Used by dead code removal
1823 to update the flags. */
1824
1825 void
1826 clear_special_calls (void)
1827 {
1828 cfun->calls_alloca = false;
1829 cfun->calls_setjmp = false;
1830 }
1831
1832 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1833
1834 static void
1835 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1836 {
1837 /* Since this block is no longer reachable, we can just delete all
1838 of its PHI nodes. */
1839 remove_phi_nodes (bb);
1840
1841 /* Remove edges to BB's successors. */
1842 while (EDGE_COUNT (bb->succs) > 0)
1843 remove_edge (EDGE_SUCC (bb, 0));
1844 }
1845
1846
1847 /* Remove statements of basic block BB. */
1848
1849 static void
1850 remove_bb (basic_block bb)
1851 {
1852 gimple_stmt_iterator i;
1853
1854 if (dump_file)
1855 {
1856 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1857 if (dump_flags & TDF_DETAILS)
1858 {
1859 dump_bb (dump_file, bb, 0, dump_flags);
1860 fprintf (dump_file, "\n");
1861 }
1862 }
1863
1864 if (current_loops)
1865 {
1866 struct loop *loop = bb->loop_father;
1867
1868 /* If a loop gets removed, clean up the information associated
1869 with it. */
1870 if (loop->latch == bb
1871 || loop->header == bb)
1872 free_numbers_of_iterations_estimates_loop (loop);
1873 }
1874
1875 /* Remove all the instructions in the block. */
1876 if (bb_seq (bb) != NULL)
1877 {
1878 /* Walk backwards so as to get a chance to substitute all
1879 released DEFs into debug stmts. See
1880 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1881 details. */
1882 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1883 {
1884 gimple stmt = gsi_stmt (i);
1885 if (gimple_code (stmt) == GIMPLE_LABEL
1886 && (FORCED_LABEL (gimple_label_label (stmt))
1887 || DECL_NONLOCAL (gimple_label_label (stmt))))
1888 {
1889 basic_block new_bb;
1890 gimple_stmt_iterator new_gsi;
1891
1892 /* A non-reachable non-local label may still be referenced.
1893 But it no longer needs to carry the extra semantics of
1894 non-locality. */
1895 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1896 {
1897 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1898 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1899 }
1900
1901 new_bb = bb->prev_bb;
1902 new_gsi = gsi_start_bb (new_bb);
1903 gsi_remove (&i, false);
1904 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1905 }
1906 else
1907 {
1908 /* Release SSA definitions if we are in SSA. Note that we
1909 may be called when not in SSA. For example,
1910 final_cleanup calls this function via
1911 cleanup_tree_cfg. */
1912 if (gimple_in_ssa_p (cfun))
1913 release_defs (stmt);
1914
1915 gsi_remove (&i, true);
1916 }
1917
1918 if (gsi_end_p (i))
1919 i = gsi_last_bb (bb);
1920 else
1921 gsi_prev (&i);
1922 }
1923 }
1924
1925 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1926 bb->il.gimple.seq = NULL;
1927 bb->il.gimple.phi_nodes = NULL;
1928 }
1929
1930
1931 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1932 predicate VAL, return the edge that will be taken out of the block.
1933 If VAL does not match a unique edge, NULL is returned. */
1934
1935 edge
1936 find_taken_edge (basic_block bb, tree val)
1937 {
1938 gimple stmt;
1939
1940 stmt = last_stmt (bb);
1941
1942 gcc_assert (stmt);
1943 gcc_assert (is_ctrl_stmt (stmt));
1944
1945 if (val == NULL)
1946 return NULL;
1947
1948 if (!is_gimple_min_invariant (val))
1949 return NULL;
1950
1951 if (gimple_code (stmt) == GIMPLE_COND)
1952 return find_taken_edge_cond_expr (bb, val);
1953
1954 if (gimple_code (stmt) == GIMPLE_SWITCH)
1955 return find_taken_edge_switch_expr (bb, val);
1956
1957 if (computed_goto_p (stmt))
1958 {
1959 /* Only optimize if the argument is a label, if the argument is
1960 not a label then we can not construct a proper CFG.
1961
1962 It may be the case that we only need to allow the LABEL_REF to
1963 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1964 appear inside a LABEL_EXPR just to be safe. */
1965 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1966 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1967 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1968 return NULL;
1969 }
1970
1971 gcc_unreachable ();
1972 }
1973
1974 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1975 statement, determine which of the outgoing edges will be taken out of the
1976 block. Return NULL if either edge may be taken. */
1977
1978 static edge
1979 find_taken_edge_computed_goto (basic_block bb, tree val)
1980 {
1981 basic_block dest;
1982 edge e = NULL;
1983
1984 dest = label_to_block (val);
1985 if (dest)
1986 {
1987 e = find_edge (bb, dest);
1988 gcc_assert (e != NULL);
1989 }
1990
1991 return e;
1992 }
1993
1994 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1995 statement, determine which of the two edges will be taken out of the
1996 block. Return NULL if either edge may be taken. */
1997
1998 static edge
1999 find_taken_edge_cond_expr (basic_block bb, tree val)
2000 {
2001 edge true_edge, false_edge;
2002
2003 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2004
2005 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2006 return (integer_zerop (val) ? false_edge : true_edge);
2007 }
2008
2009 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2010 statement, determine which edge will be taken out of the block. Return
2011 NULL if any edge may be taken. */
2012
2013 static edge
2014 find_taken_edge_switch_expr (basic_block bb, tree val)
2015 {
2016 basic_block dest_bb;
2017 edge e;
2018 gimple switch_stmt;
2019 tree taken_case;
2020
2021 switch_stmt = last_stmt (bb);
2022 taken_case = find_case_label_for_value (switch_stmt, val);
2023 dest_bb = label_to_block (CASE_LABEL (taken_case));
2024
2025 e = find_edge (bb, dest_bb);
2026 gcc_assert (e);
2027 return e;
2028 }
2029
2030
2031 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2032 We can make optimal use here of the fact that the case labels are
2033 sorted: We can do a binary search for a case matching VAL. */
2034
2035 static tree
2036 find_case_label_for_value (gimple switch_stmt, tree val)
2037 {
2038 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2039 tree default_case = gimple_switch_default_label (switch_stmt);
2040
2041 for (low = 0, high = n; high - low > 1; )
2042 {
2043 size_t i = (high + low) / 2;
2044 tree t = gimple_switch_label (switch_stmt, i);
2045 int cmp;
2046
2047 /* Cache the result of comparing CASE_LOW and val. */
2048 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2049
2050 if (cmp > 0)
2051 high = i;
2052 else
2053 low = i;
2054
2055 if (CASE_HIGH (t) == NULL)
2056 {
2057 /* A singe-valued case label. */
2058 if (cmp == 0)
2059 return t;
2060 }
2061 else
2062 {
2063 /* A case range. We can only handle integer ranges. */
2064 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2065 return t;
2066 }
2067 }
2068
2069 return default_case;
2070 }
2071
2072
2073 /* Dump a basic block on stderr. */
2074
2075 void
2076 gimple_debug_bb (basic_block bb)
2077 {
2078 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2079 }
2080
2081
2082 /* Dump basic block with index N on stderr. */
2083
2084 basic_block
2085 gimple_debug_bb_n (int n)
2086 {
2087 gimple_debug_bb (BASIC_BLOCK (n));
2088 return BASIC_BLOCK (n);
2089 }
2090
2091
2092 /* Dump the CFG on stderr.
2093
2094 FLAGS are the same used by the tree dumping functions
2095 (see TDF_* in dumpfile.h). */
2096
2097 void
2098 gimple_debug_cfg (int flags)
2099 {
2100 gimple_dump_cfg (stderr, flags);
2101 }
2102
2103
2104 /* Dump the program showing basic block boundaries on the given FILE.
2105
2106 FLAGS are the same used by the tree dumping functions (see TDF_* in
2107 tree.h). */
2108
2109 void
2110 gimple_dump_cfg (FILE *file, int flags)
2111 {
2112 if (flags & TDF_DETAILS)
2113 {
2114 dump_function_header (file, current_function_decl, flags);
2115 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2116 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2117 last_basic_block);
2118
2119 brief_dump_cfg (file, flags | TDF_COMMENT);
2120 fprintf (file, "\n");
2121 }
2122
2123 if (flags & TDF_STATS)
2124 dump_cfg_stats (file);
2125
2126 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2127 }
2128
2129
2130 /* Dump CFG statistics on FILE. */
2131
2132 void
2133 dump_cfg_stats (FILE *file)
2134 {
2135 static long max_num_merged_labels = 0;
2136 unsigned long size, total = 0;
2137 long num_edges;
2138 basic_block bb;
2139 const char * const fmt_str = "%-30s%-13s%12s\n";
2140 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2141 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2142 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2143 const char *funcname = current_function_name ();
2144
2145 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2146
2147 fprintf (file, "---------------------------------------------------------\n");
2148 fprintf (file, fmt_str, "", " Number of ", "Memory");
2149 fprintf (file, fmt_str, "", " instances ", "used ");
2150 fprintf (file, "---------------------------------------------------------\n");
2151
2152 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2153 total += size;
2154 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2155 SCALE (size), LABEL (size));
2156
2157 num_edges = 0;
2158 FOR_EACH_BB (bb)
2159 num_edges += EDGE_COUNT (bb->succs);
2160 size = num_edges * sizeof (struct edge_def);
2161 total += size;
2162 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2163
2164 fprintf (file, "---------------------------------------------------------\n");
2165 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2166 LABEL (total));
2167 fprintf (file, "---------------------------------------------------------\n");
2168 fprintf (file, "\n");
2169
2170 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2171 max_num_merged_labels = cfg_stats.num_merged_labels;
2172
2173 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2174 cfg_stats.num_merged_labels, max_num_merged_labels);
2175
2176 fprintf (file, "\n");
2177 }
2178
2179
2180 /* Dump CFG statistics on stderr. Keep extern so that it's always
2181 linked in the final executable. */
2182
2183 DEBUG_FUNCTION void
2184 debug_cfg_stats (void)
2185 {
2186 dump_cfg_stats (stderr);
2187 }
2188
2189 /*---------------------------------------------------------------------------
2190 Miscellaneous helpers
2191 ---------------------------------------------------------------------------*/
2192
2193 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2194 flow. Transfers of control flow associated with EH are excluded. */
2195
2196 static bool
2197 call_can_make_abnormal_goto (gimple t)
2198 {
2199 /* If the function has no non-local labels, then a call cannot make an
2200 abnormal transfer of control. */
2201 if (!cfun->has_nonlocal_label
2202 && !cfun->calls_setjmp)
2203 return false;
2204
2205 /* Likewise if the call has no side effects. */
2206 if (!gimple_has_side_effects (t))
2207 return false;
2208
2209 /* Likewise if the called function is leaf. */
2210 if (gimple_call_flags (t) & ECF_LEAF)
2211 return false;
2212
2213 return true;
2214 }
2215
2216
2217 /* Return true if T can make an abnormal transfer of control flow.
2218 Transfers of control flow associated with EH are excluded. */
2219
2220 bool
2221 stmt_can_make_abnormal_goto (gimple t)
2222 {
2223 if (computed_goto_p (t))
2224 return true;
2225 if (is_gimple_call (t))
2226 return call_can_make_abnormal_goto (t);
2227 return false;
2228 }
2229
2230
2231 /* Return true if T represents a stmt that always transfers control. */
2232
2233 bool
2234 is_ctrl_stmt (gimple t)
2235 {
2236 switch (gimple_code (t))
2237 {
2238 case GIMPLE_COND:
2239 case GIMPLE_SWITCH:
2240 case GIMPLE_GOTO:
2241 case GIMPLE_RETURN:
2242 case GIMPLE_RESX:
2243 return true;
2244 default:
2245 return false;
2246 }
2247 }
2248
2249
2250 /* Return true if T is a statement that may alter the flow of control
2251 (e.g., a call to a non-returning function). */
2252
2253 bool
2254 is_ctrl_altering_stmt (gimple t)
2255 {
2256 gcc_assert (t);
2257
2258 switch (gimple_code (t))
2259 {
2260 case GIMPLE_CALL:
2261 {
2262 int flags = gimple_call_flags (t);
2263
2264 /* A call alters control flow if it can make an abnormal goto. */
2265 if (call_can_make_abnormal_goto (t))
2266 return true;
2267
2268 /* A call also alters control flow if it does not return. */
2269 if (flags & ECF_NORETURN)
2270 return true;
2271
2272 /* TM ending statements have backedges out of the transaction.
2273 Return true so we split the basic block containing them.
2274 Note that the TM_BUILTIN test is merely an optimization. */
2275 if ((flags & ECF_TM_BUILTIN)
2276 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2277 return true;
2278
2279 /* BUILT_IN_RETURN call is same as return statement. */
2280 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2281 return true;
2282 }
2283 break;
2284
2285 case GIMPLE_EH_DISPATCH:
2286 /* EH_DISPATCH branches to the individual catch handlers at
2287 this level of a try or allowed-exceptions region. It can
2288 fallthru to the next statement as well. */
2289 return true;
2290
2291 case GIMPLE_ASM:
2292 if (gimple_asm_nlabels (t) > 0)
2293 return true;
2294 break;
2295
2296 CASE_GIMPLE_OMP:
2297 /* OpenMP directives alter control flow. */
2298 return true;
2299
2300 case GIMPLE_TRANSACTION:
2301 /* A transaction start alters control flow. */
2302 return true;
2303
2304 default:
2305 break;
2306 }
2307
2308 /* If a statement can throw, it alters control flow. */
2309 return stmt_can_throw_internal (t);
2310 }
2311
2312
2313 /* Return true if T is a simple local goto. */
2314
2315 bool
2316 simple_goto_p (gimple t)
2317 {
2318 return (gimple_code (t) == GIMPLE_GOTO
2319 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2320 }
2321
2322
2323 /* Return true if STMT should start a new basic block. PREV_STMT is
2324 the statement preceding STMT. It is used when STMT is a label or a
2325 case label. Labels should only start a new basic block if their
2326 previous statement wasn't a label. Otherwise, sequence of labels
2327 would generate unnecessary basic blocks that only contain a single
2328 label. */
2329
2330 static inline bool
2331 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2332 {
2333 if (stmt == NULL)
2334 return false;
2335
2336 /* Labels start a new basic block only if the preceding statement
2337 wasn't a label of the same type. This prevents the creation of
2338 consecutive blocks that have nothing but a single label. */
2339 if (gimple_code (stmt) == GIMPLE_LABEL)
2340 {
2341 /* Nonlocal and computed GOTO targets always start a new block. */
2342 if (DECL_NONLOCAL (gimple_label_label (stmt))
2343 || FORCED_LABEL (gimple_label_label (stmt)))
2344 return true;
2345
2346 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2347 {
2348 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2349 return true;
2350
2351 cfg_stats.num_merged_labels++;
2352 return false;
2353 }
2354 else
2355 return true;
2356 }
2357 else if (gimple_code (stmt) == GIMPLE_CALL
2358 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2359 /* setjmp acts similar to a nonlocal GOTO target and thus should
2360 start a new block. */
2361 return true;
2362
2363 return false;
2364 }
2365
2366
2367 /* Return true if T should end a basic block. */
2368
2369 bool
2370 stmt_ends_bb_p (gimple t)
2371 {
2372 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2373 }
2374
2375 /* Remove block annotations and other data structures. */
2376
2377 void
2378 delete_tree_cfg_annotations (void)
2379 {
2380 vec_free (label_to_block_map);
2381 }
2382
2383
2384 /* Return the first statement in basic block BB. */
2385
2386 gimple
2387 first_stmt (basic_block bb)
2388 {
2389 gimple_stmt_iterator i = gsi_start_bb (bb);
2390 gimple stmt = NULL;
2391
2392 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2393 {
2394 gsi_next (&i);
2395 stmt = NULL;
2396 }
2397 return stmt;
2398 }
2399
2400 /* Return the first non-label statement in basic block BB. */
2401
2402 static gimple
2403 first_non_label_stmt (basic_block bb)
2404 {
2405 gimple_stmt_iterator i = gsi_start_bb (bb);
2406 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2407 gsi_next (&i);
2408 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2409 }
2410
2411 /* Return the last statement in basic block BB. */
2412
2413 gimple
2414 last_stmt (basic_block bb)
2415 {
2416 gimple_stmt_iterator i = gsi_last_bb (bb);
2417 gimple stmt = NULL;
2418
2419 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2420 {
2421 gsi_prev (&i);
2422 stmt = NULL;
2423 }
2424 return stmt;
2425 }
2426
2427 /* Return the last statement of an otherwise empty block. Return NULL
2428 if the block is totally empty, or if it contains more than one
2429 statement. */
2430
2431 gimple
2432 last_and_only_stmt (basic_block bb)
2433 {
2434 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2435 gimple last, prev;
2436
2437 if (gsi_end_p (i))
2438 return NULL;
2439
2440 last = gsi_stmt (i);
2441 gsi_prev_nondebug (&i);
2442 if (gsi_end_p (i))
2443 return last;
2444
2445 /* Empty statements should no longer appear in the instruction stream.
2446 Everything that might have appeared before should be deleted by
2447 remove_useless_stmts, and the optimizers should just gsi_remove
2448 instead of smashing with build_empty_stmt.
2449
2450 Thus the only thing that should appear here in a block containing
2451 one executable statement is a label. */
2452 prev = gsi_stmt (i);
2453 if (gimple_code (prev) == GIMPLE_LABEL)
2454 return last;
2455 else
2456 return NULL;
2457 }
2458
2459 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2460
2461 static void
2462 reinstall_phi_args (edge new_edge, edge old_edge)
2463 {
2464 edge_var_map_vector *v;
2465 edge_var_map *vm;
2466 int i;
2467 gimple_stmt_iterator phis;
2468
2469 v = redirect_edge_var_map_vector (old_edge);
2470 if (!v)
2471 return;
2472
2473 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2474 v->iterate (i, &vm) && !gsi_end_p (phis);
2475 i++, gsi_next (&phis))
2476 {
2477 gimple phi = gsi_stmt (phis);
2478 tree result = redirect_edge_var_map_result (vm);
2479 tree arg = redirect_edge_var_map_def (vm);
2480
2481 gcc_assert (result == gimple_phi_result (phi));
2482
2483 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2484 }
2485
2486 redirect_edge_var_map_clear (old_edge);
2487 }
2488
2489 /* Returns the basic block after which the new basic block created
2490 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2491 near its "logical" location. This is of most help to humans looking
2492 at debugging dumps. */
2493
2494 static basic_block
2495 split_edge_bb_loc (edge edge_in)
2496 {
2497 basic_block dest = edge_in->dest;
2498 basic_block dest_prev = dest->prev_bb;
2499
2500 if (dest_prev)
2501 {
2502 edge e = find_edge (dest_prev, dest);
2503 if (e && !(e->flags & EDGE_COMPLEX))
2504 return edge_in->src;
2505 }
2506 return dest_prev;
2507 }
2508
2509 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2510 Abort on abnormal edges. */
2511
2512 static basic_block
2513 gimple_split_edge (edge edge_in)
2514 {
2515 basic_block new_bb, after_bb, dest;
2516 edge new_edge, e;
2517
2518 /* Abnormal edges cannot be split. */
2519 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2520
2521 dest = edge_in->dest;
2522
2523 after_bb = split_edge_bb_loc (edge_in);
2524
2525 new_bb = create_empty_bb (after_bb);
2526 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2527 new_bb->count = edge_in->count;
2528 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2529 new_edge->probability = REG_BR_PROB_BASE;
2530 new_edge->count = edge_in->count;
2531
2532 e = redirect_edge_and_branch (edge_in, new_bb);
2533 gcc_assert (e == edge_in);
2534 reinstall_phi_args (new_edge, e);
2535
2536 return new_bb;
2537 }
2538
2539
2540 /* Verify properties of the address expression T with base object BASE. */
2541
2542 static tree
2543 verify_address (tree t, tree base)
2544 {
2545 bool old_constant;
2546 bool old_side_effects;
2547 bool new_constant;
2548 bool new_side_effects;
2549
2550 old_constant = TREE_CONSTANT (t);
2551 old_side_effects = TREE_SIDE_EFFECTS (t);
2552
2553 recompute_tree_invariant_for_addr_expr (t);
2554 new_side_effects = TREE_SIDE_EFFECTS (t);
2555 new_constant = TREE_CONSTANT (t);
2556
2557 if (old_constant != new_constant)
2558 {
2559 error ("constant not recomputed when ADDR_EXPR changed");
2560 return t;
2561 }
2562 if (old_side_effects != new_side_effects)
2563 {
2564 error ("side effects not recomputed when ADDR_EXPR changed");
2565 return t;
2566 }
2567
2568 if (!(TREE_CODE (base) == VAR_DECL
2569 || TREE_CODE (base) == PARM_DECL
2570 || TREE_CODE (base) == RESULT_DECL))
2571 return NULL_TREE;
2572
2573 if (DECL_GIMPLE_REG_P (base))
2574 {
2575 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2576 return base;
2577 }
2578
2579 return NULL_TREE;
2580 }
2581
2582 /* Callback for walk_tree, check that all elements with address taken are
2583 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2584 inside a PHI node. */
2585
2586 static tree
2587 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2588 {
2589 tree t = *tp, x;
2590
2591 if (TYPE_P (t))
2592 *walk_subtrees = 0;
2593
2594 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2595 #define CHECK_OP(N, MSG) \
2596 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2597 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2598
2599 switch (TREE_CODE (t))
2600 {
2601 case SSA_NAME:
2602 if (SSA_NAME_IN_FREE_LIST (t))
2603 {
2604 error ("SSA name in freelist but still referenced");
2605 return *tp;
2606 }
2607 break;
2608
2609 case INDIRECT_REF:
2610 error ("INDIRECT_REF in gimple IL");
2611 return t;
2612
2613 case MEM_REF:
2614 x = TREE_OPERAND (t, 0);
2615 if (!POINTER_TYPE_P (TREE_TYPE (x))
2616 || !is_gimple_mem_ref_addr (x))
2617 {
2618 error ("invalid first operand of MEM_REF");
2619 return x;
2620 }
2621 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2622 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2623 {
2624 error ("invalid offset operand of MEM_REF");
2625 return TREE_OPERAND (t, 1);
2626 }
2627 if (TREE_CODE (x) == ADDR_EXPR
2628 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2629 return x;
2630 *walk_subtrees = 0;
2631 break;
2632
2633 case ASSERT_EXPR:
2634 x = fold (ASSERT_EXPR_COND (t));
2635 if (x == boolean_false_node)
2636 {
2637 error ("ASSERT_EXPR with an always-false condition");
2638 return *tp;
2639 }
2640 break;
2641
2642 case MODIFY_EXPR:
2643 error ("MODIFY_EXPR not expected while having tuples");
2644 return *tp;
2645
2646 case ADDR_EXPR:
2647 {
2648 tree tem;
2649
2650 gcc_assert (is_gimple_address (t));
2651
2652 /* Skip any references (they will be checked when we recurse down the
2653 tree) and ensure that any variable used as a prefix is marked
2654 addressable. */
2655 for (x = TREE_OPERAND (t, 0);
2656 handled_component_p (x);
2657 x = TREE_OPERAND (x, 0))
2658 ;
2659
2660 if ((tem = verify_address (t, x)))
2661 return tem;
2662
2663 if (!(TREE_CODE (x) == VAR_DECL
2664 || TREE_CODE (x) == PARM_DECL
2665 || TREE_CODE (x) == RESULT_DECL))
2666 return NULL;
2667
2668 if (!TREE_ADDRESSABLE (x))
2669 {
2670 error ("address taken, but ADDRESSABLE bit not set");
2671 return x;
2672 }
2673
2674 break;
2675 }
2676
2677 case COND_EXPR:
2678 x = COND_EXPR_COND (t);
2679 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2680 {
2681 error ("non-integral used in condition");
2682 return x;
2683 }
2684 if (!is_gimple_condexpr (x))
2685 {
2686 error ("invalid conditional operand");
2687 return x;
2688 }
2689 break;
2690
2691 case NON_LVALUE_EXPR:
2692 case TRUTH_NOT_EXPR:
2693 gcc_unreachable ();
2694
2695 CASE_CONVERT:
2696 case FIX_TRUNC_EXPR:
2697 case FLOAT_EXPR:
2698 case NEGATE_EXPR:
2699 case ABS_EXPR:
2700 case BIT_NOT_EXPR:
2701 CHECK_OP (0, "invalid operand to unary operator");
2702 break;
2703
2704 case REALPART_EXPR:
2705 case IMAGPART_EXPR:
2706 case BIT_FIELD_REF:
2707 if (!is_gimple_reg_type (TREE_TYPE (t)))
2708 {
2709 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2710 return t;
2711 }
2712
2713 if (TREE_CODE (t) == BIT_FIELD_REF)
2714 {
2715 tree t0 = TREE_OPERAND (t, 0);
2716 tree t1 = TREE_OPERAND (t, 1);
2717 tree t2 = TREE_OPERAND (t, 2);
2718 tree t0_type = TREE_TYPE (t0);
2719 unsigned HOST_WIDE_INT t0_size = 0;
2720
2721 if (tree_fits_uhwi_p (TYPE_SIZE (t0_type)))
2722 t0_size = tree_to_uhwi (TYPE_SIZE (t0_type));
2723 else
2724 {
2725 HOST_WIDE_INT t0_max_size = max_int_size_in_bytes (t0_type);
2726 if (t0_max_size > 0)
2727 t0_size = t0_max_size * BITS_PER_UNIT;
2728 }
2729 if (!tree_fits_uhwi_p (t1)
2730 || !tree_fits_uhwi_p (t2))
2731 {
2732 error ("invalid position or size operand to BIT_FIELD_REF");
2733 return t;
2734 }
2735 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2736 && (TYPE_PRECISION (TREE_TYPE (t))
2737 != tree_to_uhwi (t1)))
2738 {
2739 error ("integral result type precision does not match "
2740 "field size of BIT_FIELD_REF");
2741 return t;
2742 }
2743 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2744 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2745 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2746 != tree_to_uhwi (t1)))
2747 {
2748 error ("mode precision of non-integral result does not "
2749 "match field size of BIT_FIELD_REF");
2750 return t;
2751 }
2752 if (t0_size != 0
2753 && tree_to_uhwi (t1) + tree_to_uhwi (t2) > t0_size)
2754 {
2755 error ("position plus size exceeds size of referenced object in "
2756 "BIT_FIELD_REF");
2757 return t;
2758 }
2759 }
2760 t = TREE_OPERAND (t, 0);
2761
2762 /* Fall-through. */
2763 case COMPONENT_REF:
2764 case ARRAY_REF:
2765 case ARRAY_RANGE_REF:
2766 case VIEW_CONVERT_EXPR:
2767 /* We have a nest of references. Verify that each of the operands
2768 that determine where to reference is either a constant or a variable,
2769 verify that the base is valid, and then show we've already checked
2770 the subtrees. */
2771 while (handled_component_p (t))
2772 {
2773 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2774 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2775 else if (TREE_CODE (t) == ARRAY_REF
2776 || TREE_CODE (t) == ARRAY_RANGE_REF)
2777 {
2778 CHECK_OP (1, "invalid array index");
2779 if (TREE_OPERAND (t, 2))
2780 CHECK_OP (2, "invalid array lower bound");
2781 if (TREE_OPERAND (t, 3))
2782 CHECK_OP (3, "invalid array stride");
2783 }
2784 else if (TREE_CODE (t) == BIT_FIELD_REF
2785 || TREE_CODE (t) == REALPART_EXPR
2786 || TREE_CODE (t) == IMAGPART_EXPR)
2787 {
2788 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2789 "REALPART_EXPR");
2790 return t;
2791 }
2792
2793 t = TREE_OPERAND (t, 0);
2794 }
2795
2796 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2797 {
2798 error ("invalid reference prefix");
2799 return t;
2800 }
2801 *walk_subtrees = 0;
2802 break;
2803 case PLUS_EXPR:
2804 case MINUS_EXPR:
2805 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2806 POINTER_PLUS_EXPR. */
2807 if (POINTER_TYPE_P (TREE_TYPE (t)))
2808 {
2809 error ("invalid operand to plus/minus, type is a pointer");
2810 return t;
2811 }
2812 CHECK_OP (0, "invalid operand to binary operator");
2813 CHECK_OP (1, "invalid operand to binary operator");
2814 break;
2815
2816 case POINTER_PLUS_EXPR:
2817 /* Check to make sure the first operand is a pointer or reference type. */
2818 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2819 {
2820 error ("invalid operand to pointer plus, first operand is not a pointer");
2821 return t;
2822 }
2823 /* Check to make sure the second operand is a ptrofftype. */
2824 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2825 {
2826 error ("invalid operand to pointer plus, second operand is not an "
2827 "integer type of appropriate width");
2828 return t;
2829 }
2830 /* FALLTHROUGH */
2831 case LT_EXPR:
2832 case LE_EXPR:
2833 case GT_EXPR:
2834 case GE_EXPR:
2835 case EQ_EXPR:
2836 case NE_EXPR:
2837 case UNORDERED_EXPR:
2838 case ORDERED_EXPR:
2839 case UNLT_EXPR:
2840 case UNLE_EXPR:
2841 case UNGT_EXPR:
2842 case UNGE_EXPR:
2843 case UNEQ_EXPR:
2844 case LTGT_EXPR:
2845 case MULT_EXPR:
2846 case TRUNC_DIV_EXPR:
2847 case CEIL_DIV_EXPR:
2848 case FLOOR_DIV_EXPR:
2849 case ROUND_DIV_EXPR:
2850 case TRUNC_MOD_EXPR:
2851 case CEIL_MOD_EXPR:
2852 case FLOOR_MOD_EXPR:
2853 case ROUND_MOD_EXPR:
2854 case RDIV_EXPR:
2855 case EXACT_DIV_EXPR:
2856 case MIN_EXPR:
2857 case MAX_EXPR:
2858 case LSHIFT_EXPR:
2859 case RSHIFT_EXPR:
2860 case LROTATE_EXPR:
2861 case RROTATE_EXPR:
2862 case BIT_IOR_EXPR:
2863 case BIT_XOR_EXPR:
2864 case BIT_AND_EXPR:
2865 CHECK_OP (0, "invalid operand to binary operator");
2866 CHECK_OP (1, "invalid operand to binary operator");
2867 break;
2868
2869 case CONSTRUCTOR:
2870 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2871 *walk_subtrees = 0;
2872 break;
2873
2874 case CASE_LABEL_EXPR:
2875 if (CASE_CHAIN (t))
2876 {
2877 error ("invalid CASE_CHAIN");
2878 return t;
2879 }
2880 break;
2881
2882 default:
2883 break;
2884 }
2885 return NULL;
2886
2887 #undef CHECK_OP
2888 }
2889
2890
2891 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2892 Returns true if there is an error, otherwise false. */
2893
2894 static bool
2895 verify_types_in_gimple_min_lval (tree expr)
2896 {
2897 tree op;
2898
2899 if (is_gimple_id (expr))
2900 return false;
2901
2902 if (TREE_CODE (expr) != TARGET_MEM_REF
2903 && TREE_CODE (expr) != MEM_REF)
2904 {
2905 error ("invalid expression for min lvalue");
2906 return true;
2907 }
2908
2909 /* TARGET_MEM_REFs are strange beasts. */
2910 if (TREE_CODE (expr) == TARGET_MEM_REF)
2911 return false;
2912
2913 op = TREE_OPERAND (expr, 0);
2914 if (!is_gimple_val (op))
2915 {
2916 error ("invalid operand in indirect reference");
2917 debug_generic_stmt (op);
2918 return true;
2919 }
2920 /* Memory references now generally can involve a value conversion. */
2921
2922 return false;
2923 }
2924
2925 /* Verify if EXPR is a valid GIMPLE reference expression. If
2926 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2927 if there is an error, otherwise false. */
2928
2929 static bool
2930 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2931 {
2932 while (handled_component_p (expr))
2933 {
2934 tree op = TREE_OPERAND (expr, 0);
2935
2936 if (TREE_CODE (expr) == ARRAY_REF
2937 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2938 {
2939 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2940 || (TREE_OPERAND (expr, 2)
2941 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2942 || (TREE_OPERAND (expr, 3)
2943 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2944 {
2945 error ("invalid operands to array reference");
2946 debug_generic_stmt (expr);
2947 return true;
2948 }
2949 }
2950
2951 /* Verify if the reference array element types are compatible. */
2952 if (TREE_CODE (expr) == ARRAY_REF
2953 && !useless_type_conversion_p (TREE_TYPE (expr),
2954 TREE_TYPE (TREE_TYPE (op))))
2955 {
2956 error ("type mismatch in array reference");
2957 debug_generic_stmt (TREE_TYPE (expr));
2958 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2959 return true;
2960 }
2961 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2962 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2963 TREE_TYPE (TREE_TYPE (op))))
2964 {
2965 error ("type mismatch in array range reference");
2966 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2967 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2968 return true;
2969 }
2970
2971 if ((TREE_CODE (expr) == REALPART_EXPR
2972 || TREE_CODE (expr) == IMAGPART_EXPR)
2973 && !useless_type_conversion_p (TREE_TYPE (expr),
2974 TREE_TYPE (TREE_TYPE (op))))
2975 {
2976 error ("type mismatch in real/imagpart reference");
2977 debug_generic_stmt (TREE_TYPE (expr));
2978 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2979 return true;
2980 }
2981
2982 if (TREE_CODE (expr) == COMPONENT_REF
2983 && !useless_type_conversion_p (TREE_TYPE (expr),
2984 TREE_TYPE (TREE_OPERAND (expr, 1))))
2985 {
2986 error ("type mismatch in component reference");
2987 debug_generic_stmt (TREE_TYPE (expr));
2988 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2989 return true;
2990 }
2991
2992 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2993 {
2994 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2995 that their operand is not an SSA name or an invariant when
2996 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2997 bug). Otherwise there is nothing to verify, gross mismatches at
2998 most invoke undefined behavior. */
2999 if (require_lvalue
3000 && (TREE_CODE (op) == SSA_NAME
3001 || is_gimple_min_invariant (op)))
3002 {
3003 error ("conversion of an SSA_NAME on the left hand side");
3004 debug_generic_stmt (expr);
3005 return true;
3006 }
3007 else if (TREE_CODE (op) == SSA_NAME
3008 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3009 {
3010 error ("conversion of register to a different size");
3011 debug_generic_stmt (expr);
3012 return true;
3013 }
3014 else if (!handled_component_p (op))
3015 return false;
3016 }
3017
3018 expr = op;
3019 }
3020
3021 if (TREE_CODE (expr) == MEM_REF)
3022 {
3023 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3024 {
3025 error ("invalid address operand in MEM_REF");
3026 debug_generic_stmt (expr);
3027 return true;
3028 }
3029 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3030 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3031 {
3032 error ("invalid offset operand in MEM_REF");
3033 debug_generic_stmt (expr);
3034 return true;
3035 }
3036 }
3037 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3038 {
3039 if (!TMR_BASE (expr)
3040 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3041 {
3042 error ("invalid address operand in TARGET_MEM_REF");
3043 return true;
3044 }
3045 if (!TMR_OFFSET (expr)
3046 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3047 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3048 {
3049 error ("invalid offset operand in TARGET_MEM_REF");
3050 debug_generic_stmt (expr);
3051 return true;
3052 }
3053 }
3054
3055 return ((require_lvalue || !is_gimple_min_invariant (expr))
3056 && verify_types_in_gimple_min_lval (expr));
3057 }
3058
3059 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3060 list of pointer-to types that is trivially convertible to DEST. */
3061
3062 static bool
3063 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3064 {
3065 tree src;
3066
3067 if (!TYPE_POINTER_TO (src_obj))
3068 return true;
3069
3070 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3071 if (useless_type_conversion_p (dest, src))
3072 return true;
3073
3074 return false;
3075 }
3076
3077 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3078 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3079
3080 static bool
3081 valid_fixed_convert_types_p (tree type1, tree type2)
3082 {
3083 return (FIXED_POINT_TYPE_P (type1)
3084 && (INTEGRAL_TYPE_P (type2)
3085 || SCALAR_FLOAT_TYPE_P (type2)
3086 || FIXED_POINT_TYPE_P (type2)));
3087 }
3088
3089 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3090 is a problem, otherwise false. */
3091
3092 static bool
3093 verify_gimple_call (gimple stmt)
3094 {
3095 tree fn = gimple_call_fn (stmt);
3096 tree fntype, fndecl;
3097 unsigned i;
3098
3099 if (gimple_call_internal_p (stmt))
3100 {
3101 if (fn)
3102 {
3103 error ("gimple call has two targets");
3104 debug_generic_stmt (fn);
3105 return true;
3106 }
3107 }
3108 else
3109 {
3110 if (!fn)
3111 {
3112 error ("gimple call has no target");
3113 return true;
3114 }
3115 }
3116
3117 if (fn && !is_gimple_call_addr (fn))
3118 {
3119 error ("invalid function in gimple call");
3120 debug_generic_stmt (fn);
3121 return true;
3122 }
3123
3124 if (fn
3125 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3126 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3127 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3128 {
3129 error ("non-function in gimple call");
3130 return true;
3131 }
3132
3133 fndecl = gimple_call_fndecl (stmt);
3134 if (fndecl
3135 && TREE_CODE (fndecl) == FUNCTION_DECL
3136 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3137 && !DECL_PURE_P (fndecl)
3138 && !TREE_READONLY (fndecl))
3139 {
3140 error ("invalid pure const state for function");
3141 return true;
3142 }
3143
3144 if (gimple_call_lhs (stmt)
3145 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3146 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3147 {
3148 error ("invalid LHS in gimple call");
3149 return true;
3150 }
3151
3152 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3153 {
3154 error ("LHS in noreturn call");
3155 return true;
3156 }
3157
3158 fntype = gimple_call_fntype (stmt);
3159 if (fntype
3160 && gimple_call_lhs (stmt)
3161 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3162 TREE_TYPE (fntype))
3163 /* ??? At least C++ misses conversions at assignments from
3164 void * call results.
3165 ??? Java is completely off. Especially with functions
3166 returning java.lang.Object.
3167 For now simply allow arbitrary pointer type conversions. */
3168 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3169 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3170 {
3171 error ("invalid conversion in gimple call");
3172 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3173 debug_generic_stmt (TREE_TYPE (fntype));
3174 return true;
3175 }
3176
3177 if (gimple_call_chain (stmt)
3178 && !is_gimple_val (gimple_call_chain (stmt)))
3179 {
3180 error ("invalid static chain in gimple call");
3181 debug_generic_stmt (gimple_call_chain (stmt));
3182 return true;
3183 }
3184
3185 /* If there is a static chain argument, this should not be an indirect
3186 call, and the decl should have DECL_STATIC_CHAIN set. */
3187 if (gimple_call_chain (stmt))
3188 {
3189 if (!gimple_call_fndecl (stmt))
3190 {
3191 error ("static chain in indirect gimple call");
3192 return true;
3193 }
3194 fn = TREE_OPERAND (fn, 0);
3195
3196 if (!DECL_STATIC_CHAIN (fn))
3197 {
3198 error ("static chain with function that doesn%'t use one");
3199 return true;
3200 }
3201 }
3202
3203 /* ??? The C frontend passes unpromoted arguments in case it
3204 didn't see a function declaration before the call. So for now
3205 leave the call arguments mostly unverified. Once we gimplify
3206 unit-at-a-time we have a chance to fix this. */
3207
3208 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3209 {
3210 tree arg = gimple_call_arg (stmt, i);
3211 if ((is_gimple_reg_type (TREE_TYPE (arg))
3212 && !is_gimple_val (arg))
3213 || (!is_gimple_reg_type (TREE_TYPE (arg))
3214 && !is_gimple_lvalue (arg)))
3215 {
3216 error ("invalid argument to gimple call");
3217 debug_generic_expr (arg);
3218 return true;
3219 }
3220 }
3221
3222 return false;
3223 }
3224
3225 /* Verifies the gimple comparison with the result type TYPE and
3226 the operands OP0 and OP1. */
3227
3228 static bool
3229 verify_gimple_comparison (tree type, tree op0, tree op1)
3230 {
3231 tree op0_type = TREE_TYPE (op0);
3232 tree op1_type = TREE_TYPE (op1);
3233
3234 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3235 {
3236 error ("invalid operands in gimple comparison");
3237 return true;
3238 }
3239
3240 /* For comparisons we do not have the operations type as the
3241 effective type the comparison is carried out in. Instead
3242 we require that either the first operand is trivially
3243 convertible into the second, or the other way around.
3244 Because we special-case pointers to void we allow
3245 comparisons of pointers with the same mode as well. */
3246 if (!useless_type_conversion_p (op0_type, op1_type)
3247 && !useless_type_conversion_p (op1_type, op0_type)
3248 && (!POINTER_TYPE_P (op0_type)
3249 || !POINTER_TYPE_P (op1_type)
3250 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3251 {
3252 error ("mismatching comparison operand types");
3253 debug_generic_expr (op0_type);
3254 debug_generic_expr (op1_type);
3255 return true;
3256 }
3257
3258 /* The resulting type of a comparison may be an effective boolean type. */
3259 if (INTEGRAL_TYPE_P (type)
3260 && (TREE_CODE (type) == BOOLEAN_TYPE
3261 || TYPE_PRECISION (type) == 1))
3262 {
3263 if (TREE_CODE (op0_type) == VECTOR_TYPE
3264 || TREE_CODE (op1_type) == VECTOR_TYPE)
3265 {
3266 error ("vector comparison returning a boolean");
3267 debug_generic_expr (op0_type);
3268 debug_generic_expr (op1_type);
3269 return true;
3270 }
3271 }
3272 /* Or an integer vector type with the same size and element count
3273 as the comparison operand types. */
3274 else if (TREE_CODE (type) == VECTOR_TYPE
3275 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3276 {
3277 if (TREE_CODE (op0_type) != VECTOR_TYPE
3278 || TREE_CODE (op1_type) != VECTOR_TYPE)
3279 {
3280 error ("non-vector operands in vector comparison");
3281 debug_generic_expr (op0_type);
3282 debug_generic_expr (op1_type);
3283 return true;
3284 }
3285
3286 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3287 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3288 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3289 /* The result of a vector comparison is of signed
3290 integral type. */
3291 || TYPE_UNSIGNED (TREE_TYPE (type)))
3292 {
3293 error ("invalid vector comparison resulting type");
3294 debug_generic_expr (type);
3295 return true;
3296 }
3297 }
3298 else
3299 {
3300 error ("bogus comparison result type");
3301 debug_generic_expr (type);
3302 return true;
3303 }
3304
3305 return false;
3306 }
3307
3308 /* Verify a gimple assignment statement STMT with an unary rhs.
3309 Returns true if anything is wrong. */
3310
3311 static bool
3312 verify_gimple_assign_unary (gimple stmt)
3313 {
3314 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3315 tree lhs = gimple_assign_lhs (stmt);
3316 tree lhs_type = TREE_TYPE (lhs);
3317 tree rhs1 = gimple_assign_rhs1 (stmt);
3318 tree rhs1_type = TREE_TYPE (rhs1);
3319
3320 if (!is_gimple_reg (lhs))
3321 {
3322 error ("non-register as LHS of unary operation");
3323 return true;
3324 }
3325
3326 if (!is_gimple_val (rhs1))
3327 {
3328 error ("invalid operand in unary operation");
3329 return true;
3330 }
3331
3332 /* First handle conversions. */
3333 switch (rhs_code)
3334 {
3335 CASE_CONVERT:
3336 {
3337 /* Allow conversions from pointer type to integral type only if
3338 there is no sign or zero extension involved.
3339 For targets were the precision of ptrofftype doesn't match that
3340 of pointers we need to allow arbitrary conversions to ptrofftype. */
3341 if ((POINTER_TYPE_P (lhs_type)
3342 && INTEGRAL_TYPE_P (rhs1_type))
3343 || (POINTER_TYPE_P (rhs1_type)
3344 && INTEGRAL_TYPE_P (lhs_type)
3345 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3346 || ptrofftype_p (sizetype))))
3347 return false;
3348
3349 /* Allow conversion from integral to offset type and vice versa. */
3350 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3351 && INTEGRAL_TYPE_P (rhs1_type))
3352 || (INTEGRAL_TYPE_P (lhs_type)
3353 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3354 return false;
3355
3356 /* Otherwise assert we are converting between types of the
3357 same kind. */
3358 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3359 {
3360 error ("invalid types in nop conversion");
3361 debug_generic_expr (lhs_type);
3362 debug_generic_expr (rhs1_type);
3363 return true;
3364 }
3365
3366 return false;
3367 }
3368
3369 case ADDR_SPACE_CONVERT_EXPR:
3370 {
3371 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3372 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3373 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3374 {
3375 error ("invalid types in address space conversion");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 return true;
3379 }
3380
3381 return false;
3382 }
3383
3384 case FIXED_CONVERT_EXPR:
3385 {
3386 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3387 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3388 {
3389 error ("invalid types in fixed-point conversion");
3390 debug_generic_expr (lhs_type);
3391 debug_generic_expr (rhs1_type);
3392 return true;
3393 }
3394
3395 return false;
3396 }
3397
3398 case FLOAT_EXPR:
3399 {
3400 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3401 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3402 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3403 {
3404 error ("invalid types in conversion to floating point");
3405 debug_generic_expr (lhs_type);
3406 debug_generic_expr (rhs1_type);
3407 return true;
3408 }
3409
3410 return false;
3411 }
3412
3413 case FIX_TRUNC_EXPR:
3414 {
3415 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3416 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3417 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3418 {
3419 error ("invalid types in conversion to integer");
3420 debug_generic_expr (lhs_type);
3421 debug_generic_expr (rhs1_type);
3422 return true;
3423 }
3424
3425 return false;
3426 }
3427
3428 case VEC_UNPACK_HI_EXPR:
3429 case VEC_UNPACK_LO_EXPR:
3430 case REDUC_MAX_EXPR:
3431 case REDUC_MIN_EXPR:
3432 case REDUC_PLUS_EXPR:
3433 case VEC_UNPACK_FLOAT_HI_EXPR:
3434 case VEC_UNPACK_FLOAT_LO_EXPR:
3435 /* FIXME. */
3436 return false;
3437
3438 case NEGATE_EXPR:
3439 case ABS_EXPR:
3440 case BIT_NOT_EXPR:
3441 case PAREN_EXPR:
3442 case NON_LVALUE_EXPR:
3443 case CONJ_EXPR:
3444 break;
3445
3446 default:
3447 gcc_unreachable ();
3448 }
3449
3450 /* For the remaining codes assert there is no conversion involved. */
3451 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3452 {
3453 error ("non-trivial conversion in unary operation");
3454 debug_generic_expr (lhs_type);
3455 debug_generic_expr (rhs1_type);
3456 return true;
3457 }
3458
3459 return false;
3460 }
3461
3462 /* Verify a gimple assignment statement STMT with a binary rhs.
3463 Returns true if anything is wrong. */
3464
3465 static bool
3466 verify_gimple_assign_binary (gimple stmt)
3467 {
3468 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3469 tree lhs = gimple_assign_lhs (stmt);
3470 tree lhs_type = TREE_TYPE (lhs);
3471 tree rhs1 = gimple_assign_rhs1 (stmt);
3472 tree rhs1_type = TREE_TYPE (rhs1);
3473 tree rhs2 = gimple_assign_rhs2 (stmt);
3474 tree rhs2_type = TREE_TYPE (rhs2);
3475
3476 if (!is_gimple_reg (lhs))
3477 {
3478 error ("non-register as LHS of binary operation");
3479 return true;
3480 }
3481
3482 if (!is_gimple_val (rhs1)
3483 || !is_gimple_val (rhs2))
3484 {
3485 error ("invalid operands in binary operation");
3486 return true;
3487 }
3488
3489 /* First handle operations that involve different types. */
3490 switch (rhs_code)
3491 {
3492 case COMPLEX_EXPR:
3493 {
3494 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3495 || !(INTEGRAL_TYPE_P (rhs1_type)
3496 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3497 || !(INTEGRAL_TYPE_P (rhs2_type)
3498 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3499 {
3500 error ("type mismatch in complex expression");
3501 debug_generic_expr (lhs_type);
3502 debug_generic_expr (rhs1_type);
3503 debug_generic_expr (rhs2_type);
3504 return true;
3505 }
3506
3507 return false;
3508 }
3509
3510 case LSHIFT_EXPR:
3511 case RSHIFT_EXPR:
3512 case LROTATE_EXPR:
3513 case RROTATE_EXPR:
3514 {
3515 /* Shifts and rotates are ok on integral types, fixed point
3516 types and integer vector types. */
3517 if ((!INTEGRAL_TYPE_P (rhs1_type)
3518 && !FIXED_POINT_TYPE_P (rhs1_type)
3519 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3520 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3521 || (!INTEGRAL_TYPE_P (rhs2_type)
3522 /* Vector shifts of vectors are also ok. */
3523 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3524 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3525 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3526 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3527 || !useless_type_conversion_p (lhs_type, rhs1_type))
3528 {
3529 error ("type mismatch in shift expression");
3530 debug_generic_expr (lhs_type);
3531 debug_generic_expr (rhs1_type);
3532 debug_generic_expr (rhs2_type);
3533 return true;
3534 }
3535
3536 return false;
3537 }
3538
3539 case VEC_LSHIFT_EXPR:
3540 case VEC_RSHIFT_EXPR:
3541 {
3542 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3543 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3544 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3545 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3546 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3547 || (!INTEGRAL_TYPE_P (rhs2_type)
3548 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3549 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3550 || !useless_type_conversion_p (lhs_type, rhs1_type))
3551 {
3552 error ("type mismatch in vector shift expression");
3553 debug_generic_expr (lhs_type);
3554 debug_generic_expr (rhs1_type);
3555 debug_generic_expr (rhs2_type);
3556 return true;
3557 }
3558 /* For shifting a vector of non-integral components we
3559 only allow shifting by a constant multiple of the element size. */
3560 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3561 && (TREE_CODE (rhs2) != INTEGER_CST
3562 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3563 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3564 {
3565 error ("non-element sized vector shift of floating point vector");
3566 return true;
3567 }
3568
3569 return false;
3570 }
3571
3572 case WIDEN_LSHIFT_EXPR:
3573 {
3574 if (!INTEGRAL_TYPE_P (lhs_type)
3575 || !INTEGRAL_TYPE_P (rhs1_type)
3576 || TREE_CODE (rhs2) != INTEGER_CST
3577 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3578 {
3579 error ("type mismatch in widening vector shift expression");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 debug_generic_expr (rhs2_type);
3583 return true;
3584 }
3585
3586 return false;
3587 }
3588
3589 case VEC_WIDEN_LSHIFT_HI_EXPR:
3590 case VEC_WIDEN_LSHIFT_LO_EXPR:
3591 {
3592 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3593 || TREE_CODE (lhs_type) != VECTOR_TYPE
3594 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3595 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3596 || TREE_CODE (rhs2) != INTEGER_CST
3597 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3598 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3599 {
3600 error ("type mismatch in widening vector shift expression");
3601 debug_generic_expr (lhs_type);
3602 debug_generic_expr (rhs1_type);
3603 debug_generic_expr (rhs2_type);
3604 return true;
3605 }
3606
3607 return false;
3608 }
3609
3610 case PLUS_EXPR:
3611 case MINUS_EXPR:
3612 {
3613 tree lhs_etype = lhs_type;
3614 tree rhs1_etype = rhs1_type;
3615 tree rhs2_etype = rhs2_type;
3616 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3617 {
3618 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3619 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3620 {
3621 error ("invalid non-vector operands to vector valued plus");
3622 return true;
3623 }
3624 lhs_etype = TREE_TYPE (lhs_type);
3625 rhs1_etype = TREE_TYPE (rhs1_type);
3626 rhs2_etype = TREE_TYPE (rhs2_type);
3627 }
3628 if (POINTER_TYPE_P (lhs_etype)
3629 || POINTER_TYPE_P (rhs1_etype)
3630 || POINTER_TYPE_P (rhs2_etype))
3631 {
3632 error ("invalid (pointer) operands to plus/minus");
3633 return true;
3634 }
3635
3636 /* Continue with generic binary expression handling. */
3637 break;
3638 }
3639
3640 case POINTER_PLUS_EXPR:
3641 {
3642 if (!POINTER_TYPE_P (rhs1_type)
3643 || !useless_type_conversion_p (lhs_type, rhs1_type)
3644 || !ptrofftype_p (rhs2_type))
3645 {
3646 error ("type mismatch in pointer plus expression");
3647 debug_generic_stmt (lhs_type);
3648 debug_generic_stmt (rhs1_type);
3649 debug_generic_stmt (rhs2_type);
3650 return true;
3651 }
3652
3653 return false;
3654 }
3655
3656 case TRUTH_ANDIF_EXPR:
3657 case TRUTH_ORIF_EXPR:
3658 case TRUTH_AND_EXPR:
3659 case TRUTH_OR_EXPR:
3660 case TRUTH_XOR_EXPR:
3661
3662 gcc_unreachable ();
3663
3664 case LT_EXPR:
3665 case LE_EXPR:
3666 case GT_EXPR:
3667 case GE_EXPR:
3668 case EQ_EXPR:
3669 case NE_EXPR:
3670 case UNORDERED_EXPR:
3671 case ORDERED_EXPR:
3672 case UNLT_EXPR:
3673 case UNLE_EXPR:
3674 case UNGT_EXPR:
3675 case UNGE_EXPR:
3676 case UNEQ_EXPR:
3677 case LTGT_EXPR:
3678 /* Comparisons are also binary, but the result type is not
3679 connected to the operand types. */
3680 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3681
3682 case WIDEN_MULT_EXPR:
3683 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3684 return true;
3685 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3686 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3687
3688 case WIDEN_SUM_EXPR:
3689 case VEC_WIDEN_MULT_HI_EXPR:
3690 case VEC_WIDEN_MULT_LO_EXPR:
3691 case VEC_WIDEN_MULT_EVEN_EXPR:
3692 case VEC_WIDEN_MULT_ODD_EXPR:
3693 case VEC_PACK_TRUNC_EXPR:
3694 case VEC_PACK_SAT_EXPR:
3695 case VEC_PACK_FIX_TRUNC_EXPR:
3696 /* FIXME. */
3697 return false;
3698
3699 case MULT_EXPR:
3700 case MULT_HIGHPART_EXPR:
3701 case TRUNC_DIV_EXPR:
3702 case CEIL_DIV_EXPR:
3703 case FLOOR_DIV_EXPR:
3704 case ROUND_DIV_EXPR:
3705 case TRUNC_MOD_EXPR:
3706 case CEIL_MOD_EXPR:
3707 case FLOOR_MOD_EXPR:
3708 case ROUND_MOD_EXPR:
3709 case RDIV_EXPR:
3710 case EXACT_DIV_EXPR:
3711 case MIN_EXPR:
3712 case MAX_EXPR:
3713 case BIT_IOR_EXPR:
3714 case BIT_XOR_EXPR:
3715 case BIT_AND_EXPR:
3716 /* Continue with generic binary expression handling. */
3717 break;
3718
3719 default:
3720 gcc_unreachable ();
3721 }
3722
3723 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3724 || !useless_type_conversion_p (lhs_type, rhs2_type))
3725 {
3726 error ("type mismatch in binary expression");
3727 debug_generic_stmt (lhs_type);
3728 debug_generic_stmt (rhs1_type);
3729 debug_generic_stmt (rhs2_type);
3730 return true;
3731 }
3732
3733 return false;
3734 }
3735
3736 /* Verify a gimple assignment statement STMT with a ternary rhs.
3737 Returns true if anything is wrong. */
3738
3739 static bool
3740 verify_gimple_assign_ternary (gimple stmt)
3741 {
3742 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3743 tree lhs = gimple_assign_lhs (stmt);
3744 tree lhs_type = TREE_TYPE (lhs);
3745 tree rhs1 = gimple_assign_rhs1 (stmt);
3746 tree rhs1_type = TREE_TYPE (rhs1);
3747 tree rhs2 = gimple_assign_rhs2 (stmt);
3748 tree rhs2_type = TREE_TYPE (rhs2);
3749 tree rhs3 = gimple_assign_rhs3 (stmt);
3750 tree rhs3_type = TREE_TYPE (rhs3);
3751
3752 if (!is_gimple_reg (lhs))
3753 {
3754 error ("non-register as LHS of ternary operation");
3755 return true;
3756 }
3757
3758 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3759 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3760 || !is_gimple_val (rhs2)
3761 || !is_gimple_val (rhs3))
3762 {
3763 error ("invalid operands in ternary operation");
3764 return true;
3765 }
3766
3767 /* First handle operations that involve different types. */
3768 switch (rhs_code)
3769 {
3770 case WIDEN_MULT_PLUS_EXPR:
3771 case WIDEN_MULT_MINUS_EXPR:
3772 if ((!INTEGRAL_TYPE_P (rhs1_type)
3773 && !FIXED_POINT_TYPE_P (rhs1_type))
3774 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3775 || !useless_type_conversion_p (lhs_type, rhs3_type)
3776 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3777 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3778 {
3779 error ("type mismatch in widening multiply-accumulate expression");
3780 debug_generic_expr (lhs_type);
3781 debug_generic_expr (rhs1_type);
3782 debug_generic_expr (rhs2_type);
3783 debug_generic_expr (rhs3_type);
3784 return true;
3785 }
3786 break;
3787
3788 case FMA_EXPR:
3789 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3790 || !useless_type_conversion_p (lhs_type, rhs2_type)
3791 || !useless_type_conversion_p (lhs_type, rhs3_type))
3792 {
3793 error ("type mismatch in fused multiply-add expression");
3794 debug_generic_expr (lhs_type);
3795 debug_generic_expr (rhs1_type);
3796 debug_generic_expr (rhs2_type);
3797 debug_generic_expr (rhs3_type);
3798 return true;
3799 }
3800 break;
3801
3802 case COND_EXPR:
3803 case VEC_COND_EXPR:
3804 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3805 || !useless_type_conversion_p (lhs_type, rhs3_type))
3806 {
3807 error ("type mismatch in conditional expression");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs2_type);
3810 debug_generic_expr (rhs3_type);
3811 return true;
3812 }
3813 break;
3814
3815 case VEC_PERM_EXPR:
3816 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3817 || !useless_type_conversion_p (lhs_type, rhs2_type))
3818 {
3819 error ("type mismatch in vector permute expression");
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs1_type);
3822 debug_generic_expr (rhs2_type);
3823 debug_generic_expr (rhs3_type);
3824 return true;
3825 }
3826
3827 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3828 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3829 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3830 {
3831 error ("vector types expected in vector permute expression");
3832 debug_generic_expr (lhs_type);
3833 debug_generic_expr (rhs1_type);
3834 debug_generic_expr (rhs2_type);
3835 debug_generic_expr (rhs3_type);
3836 return true;
3837 }
3838
3839 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3840 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3841 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3842 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3843 != TYPE_VECTOR_SUBPARTS (lhs_type))
3844 {
3845 error ("vectors with different element number found "
3846 "in vector permute expression");
3847 debug_generic_expr (lhs_type);
3848 debug_generic_expr (rhs1_type);
3849 debug_generic_expr (rhs2_type);
3850 debug_generic_expr (rhs3_type);
3851 return true;
3852 }
3853
3854 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3855 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3856 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3857 {
3858 error ("invalid mask type in vector permute expression");
3859 debug_generic_expr (lhs_type);
3860 debug_generic_expr (rhs1_type);
3861 debug_generic_expr (rhs2_type);
3862 debug_generic_expr (rhs3_type);
3863 return true;
3864 }
3865
3866 return false;
3867
3868 case DOT_PROD_EXPR:
3869 case REALIGN_LOAD_EXPR:
3870 /* FIXME. */
3871 return false;
3872
3873 default:
3874 gcc_unreachable ();
3875 }
3876 return false;
3877 }
3878
3879 /* Verify a gimple assignment statement STMT with a single rhs.
3880 Returns true if anything is wrong. */
3881
3882 static bool
3883 verify_gimple_assign_single (gimple stmt)
3884 {
3885 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3886 tree lhs = gimple_assign_lhs (stmt);
3887 tree lhs_type = TREE_TYPE (lhs);
3888 tree rhs1 = gimple_assign_rhs1 (stmt);
3889 tree rhs1_type = TREE_TYPE (rhs1);
3890 bool res = false;
3891
3892 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3893 {
3894 error ("non-trivial conversion at assignment");
3895 debug_generic_expr (lhs_type);
3896 debug_generic_expr (rhs1_type);
3897 return true;
3898 }
3899
3900 if (gimple_clobber_p (stmt)
3901 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3902 {
3903 error ("non-decl/MEM_REF LHS in clobber statement");
3904 debug_generic_expr (lhs);
3905 return true;
3906 }
3907
3908 if (handled_component_p (lhs))
3909 res |= verify_types_in_gimple_reference (lhs, true);
3910
3911 /* Special codes we cannot handle via their class. */
3912 switch (rhs_code)
3913 {
3914 case ADDR_EXPR:
3915 {
3916 tree op = TREE_OPERAND (rhs1, 0);
3917 if (!is_gimple_addressable (op))
3918 {
3919 error ("invalid operand in unary expression");
3920 return true;
3921 }
3922
3923 /* Technically there is no longer a need for matching types, but
3924 gimple hygiene asks for this check. In LTO we can end up
3925 combining incompatible units and thus end up with addresses
3926 of globals that change their type to a common one. */
3927 if (!in_lto_p
3928 && !types_compatible_p (TREE_TYPE (op),
3929 TREE_TYPE (TREE_TYPE (rhs1)))
3930 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3931 TREE_TYPE (op)))
3932 {
3933 error ("type mismatch in address expression");
3934 debug_generic_stmt (TREE_TYPE (rhs1));
3935 debug_generic_stmt (TREE_TYPE (op));
3936 return true;
3937 }
3938
3939 return verify_types_in_gimple_reference (op, true);
3940 }
3941
3942 /* tcc_reference */
3943 case INDIRECT_REF:
3944 error ("INDIRECT_REF in gimple IL");
3945 return true;
3946
3947 case COMPONENT_REF:
3948 case BIT_FIELD_REF:
3949 case ARRAY_REF:
3950 case ARRAY_RANGE_REF:
3951 case VIEW_CONVERT_EXPR:
3952 case REALPART_EXPR:
3953 case IMAGPART_EXPR:
3954 case TARGET_MEM_REF:
3955 case MEM_REF:
3956 if (!is_gimple_reg (lhs)
3957 && is_gimple_reg_type (TREE_TYPE (lhs)))
3958 {
3959 error ("invalid rhs for gimple memory store");
3960 debug_generic_stmt (lhs);
3961 debug_generic_stmt (rhs1);
3962 return true;
3963 }
3964 return res || verify_types_in_gimple_reference (rhs1, false);
3965
3966 /* tcc_constant */
3967 case SSA_NAME:
3968 case INTEGER_CST:
3969 case REAL_CST:
3970 case FIXED_CST:
3971 case COMPLEX_CST:
3972 case VECTOR_CST:
3973 case STRING_CST:
3974 return res;
3975
3976 /* tcc_declaration */
3977 case CONST_DECL:
3978 return res;
3979 case VAR_DECL:
3980 case PARM_DECL:
3981 if (!is_gimple_reg (lhs)
3982 && !is_gimple_reg (rhs1)
3983 && is_gimple_reg_type (TREE_TYPE (lhs)))
3984 {
3985 error ("invalid rhs for gimple memory store");
3986 debug_generic_stmt (lhs);
3987 debug_generic_stmt (rhs1);
3988 return true;
3989 }
3990 return res;
3991
3992 case CONSTRUCTOR:
3993 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3994 {
3995 unsigned int i;
3996 tree elt_i, elt_v, elt_t = NULL_TREE;
3997
3998 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3999 return res;
4000 /* For vector CONSTRUCTORs we require that either it is empty
4001 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4002 (then the element count must be correct to cover the whole
4003 outer vector and index must be NULL on all elements, or it is
4004 a CONSTRUCTOR of scalar elements, where we as an exception allow
4005 smaller number of elements (assuming zero filling) and
4006 consecutive indexes as compared to NULL indexes (such
4007 CONSTRUCTORs can appear in the IL from FEs). */
4008 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4009 {
4010 if (elt_t == NULL_TREE)
4011 {
4012 elt_t = TREE_TYPE (elt_v);
4013 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4014 {
4015 tree elt_t = TREE_TYPE (elt_v);
4016 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4017 TREE_TYPE (elt_t)))
4018 {
4019 error ("incorrect type of vector CONSTRUCTOR"
4020 " elements");
4021 debug_generic_stmt (rhs1);
4022 return true;
4023 }
4024 else if (CONSTRUCTOR_NELTS (rhs1)
4025 * TYPE_VECTOR_SUBPARTS (elt_t)
4026 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4027 {
4028 error ("incorrect number of vector CONSTRUCTOR"
4029 " elements");
4030 debug_generic_stmt (rhs1);
4031 return true;
4032 }
4033 }
4034 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4035 elt_t))
4036 {
4037 error ("incorrect type of vector CONSTRUCTOR elements");
4038 debug_generic_stmt (rhs1);
4039 return true;
4040 }
4041 else if (CONSTRUCTOR_NELTS (rhs1)
4042 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4043 {
4044 error ("incorrect number of vector CONSTRUCTOR elements");
4045 debug_generic_stmt (rhs1);
4046 return true;
4047 }
4048 }
4049 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4050 {
4051 error ("incorrect type of vector CONSTRUCTOR elements");
4052 debug_generic_stmt (rhs1);
4053 return true;
4054 }
4055 if (elt_i != NULL_TREE
4056 && (TREE_CODE (elt_t) == VECTOR_TYPE
4057 || TREE_CODE (elt_i) != INTEGER_CST
4058 || compare_tree_int (elt_i, i) != 0))
4059 {
4060 error ("vector CONSTRUCTOR with non-NULL element index");
4061 debug_generic_stmt (rhs1);
4062 return true;
4063 }
4064 }
4065 }
4066 return res;
4067 case OBJ_TYPE_REF:
4068 case ASSERT_EXPR:
4069 case WITH_SIZE_EXPR:
4070 /* FIXME. */
4071 return res;
4072
4073 default:;
4074 }
4075
4076 return res;
4077 }
4078
4079 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4080 is a problem, otherwise false. */
4081
4082 static bool
4083 verify_gimple_assign (gimple stmt)
4084 {
4085 switch (gimple_assign_rhs_class (stmt))
4086 {
4087 case GIMPLE_SINGLE_RHS:
4088 return verify_gimple_assign_single (stmt);
4089
4090 case GIMPLE_UNARY_RHS:
4091 return verify_gimple_assign_unary (stmt);
4092
4093 case GIMPLE_BINARY_RHS:
4094 return verify_gimple_assign_binary (stmt);
4095
4096 case GIMPLE_TERNARY_RHS:
4097 return verify_gimple_assign_ternary (stmt);
4098
4099 default:
4100 gcc_unreachable ();
4101 }
4102 }
4103
4104 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4105 is a problem, otherwise false. */
4106
4107 static bool
4108 verify_gimple_return (gimple stmt)
4109 {
4110 tree op = gimple_return_retval (stmt);
4111 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4112
4113 /* We cannot test for present return values as we do not fix up missing
4114 return values from the original source. */
4115 if (op == NULL)
4116 return false;
4117
4118 if (!is_gimple_val (op)
4119 && TREE_CODE (op) != RESULT_DECL)
4120 {
4121 error ("invalid operand in return statement");
4122 debug_generic_stmt (op);
4123 return true;
4124 }
4125
4126 if ((TREE_CODE (op) == RESULT_DECL
4127 && DECL_BY_REFERENCE (op))
4128 || (TREE_CODE (op) == SSA_NAME
4129 && SSA_NAME_VAR (op)
4130 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4131 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4132 op = TREE_TYPE (op);
4133
4134 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4135 {
4136 error ("invalid conversion in return statement");
4137 debug_generic_stmt (restype);
4138 debug_generic_stmt (TREE_TYPE (op));
4139 return true;
4140 }
4141
4142 return false;
4143 }
4144
4145
4146 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4147 is a problem, otherwise false. */
4148
4149 static bool
4150 verify_gimple_goto (gimple stmt)
4151 {
4152 tree dest = gimple_goto_dest (stmt);
4153
4154 /* ??? We have two canonical forms of direct goto destinations, a
4155 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4156 if (TREE_CODE (dest) != LABEL_DECL
4157 && (!is_gimple_val (dest)
4158 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4159 {
4160 error ("goto destination is neither a label nor a pointer");
4161 return true;
4162 }
4163
4164 return false;
4165 }
4166
4167 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4168 is a problem, otherwise false. */
4169
4170 static bool
4171 verify_gimple_switch (gimple stmt)
4172 {
4173 unsigned int i, n;
4174 tree elt, prev_upper_bound = NULL_TREE;
4175 tree index_type, elt_type = NULL_TREE;
4176
4177 if (!is_gimple_val (gimple_switch_index (stmt)))
4178 {
4179 error ("invalid operand to switch statement");
4180 debug_generic_stmt (gimple_switch_index (stmt));
4181 return true;
4182 }
4183
4184 index_type = TREE_TYPE (gimple_switch_index (stmt));
4185 if (! INTEGRAL_TYPE_P (index_type))
4186 {
4187 error ("non-integral type switch statement");
4188 debug_generic_expr (index_type);
4189 return true;
4190 }
4191
4192 elt = gimple_switch_label (stmt, 0);
4193 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4194 {
4195 error ("invalid default case label in switch statement");
4196 debug_generic_expr (elt);
4197 return true;
4198 }
4199
4200 n = gimple_switch_num_labels (stmt);
4201 for (i = 1; i < n; i++)
4202 {
4203 elt = gimple_switch_label (stmt, i);
4204
4205 if (! CASE_LOW (elt))
4206 {
4207 error ("invalid case label in switch statement");
4208 debug_generic_expr (elt);
4209 return true;
4210 }
4211 if (CASE_HIGH (elt)
4212 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4213 {
4214 error ("invalid case range in switch statement");
4215 debug_generic_expr (elt);
4216 return true;
4217 }
4218
4219 if (elt_type)
4220 {
4221 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4222 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4223 {
4224 error ("type mismatch for case label in switch statement");
4225 debug_generic_expr (elt);
4226 return true;
4227 }
4228 }
4229 else
4230 {
4231 elt_type = TREE_TYPE (CASE_LOW (elt));
4232 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4233 {
4234 error ("type precision mismatch in switch statement");
4235 return true;
4236 }
4237 }
4238
4239 if (prev_upper_bound)
4240 {
4241 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4242 {
4243 error ("case labels not sorted in switch statement");
4244 return true;
4245 }
4246 }
4247
4248 prev_upper_bound = CASE_HIGH (elt);
4249 if (! prev_upper_bound)
4250 prev_upper_bound = CASE_LOW (elt);
4251 }
4252
4253 return false;
4254 }
4255
4256 /* Verify a gimple debug statement STMT.
4257 Returns true if anything is wrong. */
4258
4259 static bool
4260 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4261 {
4262 /* There isn't much that could be wrong in a gimple debug stmt. A
4263 gimple debug bind stmt, for example, maps a tree, that's usually
4264 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4265 component or member of an aggregate type, to another tree, that
4266 can be an arbitrary expression. These stmts expand into debug
4267 insns, and are converted to debug notes by var-tracking.c. */
4268 return false;
4269 }
4270
4271 /* Verify a gimple label statement STMT.
4272 Returns true if anything is wrong. */
4273
4274 static bool
4275 verify_gimple_label (gimple stmt)
4276 {
4277 tree decl = gimple_label_label (stmt);
4278 int uid;
4279 bool err = false;
4280
4281 if (TREE_CODE (decl) != LABEL_DECL)
4282 return true;
4283 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4284 && DECL_CONTEXT (decl) != current_function_decl)
4285 {
4286 error ("label's context is not the current function decl");
4287 err |= true;
4288 }
4289
4290 uid = LABEL_DECL_UID (decl);
4291 if (cfun->cfg
4292 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4293 {
4294 error ("incorrect entry in label_to_block_map");
4295 err |= true;
4296 }
4297
4298 uid = EH_LANDING_PAD_NR (decl);
4299 if (uid)
4300 {
4301 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4302 if (decl != lp->post_landing_pad)
4303 {
4304 error ("incorrect setting of landing pad number");
4305 err |= true;
4306 }
4307 }
4308
4309 return err;
4310 }
4311
4312 /* Verify the GIMPLE statement STMT. Returns true if there is an
4313 error, otherwise false. */
4314
4315 static bool
4316 verify_gimple_stmt (gimple stmt)
4317 {
4318 switch (gimple_code (stmt))
4319 {
4320 case GIMPLE_ASSIGN:
4321 return verify_gimple_assign (stmt);
4322
4323 case GIMPLE_LABEL:
4324 return verify_gimple_label (stmt);
4325
4326 case GIMPLE_CALL:
4327 return verify_gimple_call (stmt);
4328
4329 case GIMPLE_COND:
4330 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4331 {
4332 error ("invalid comparison code in gimple cond");
4333 return true;
4334 }
4335 if (!(!gimple_cond_true_label (stmt)
4336 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4337 || !(!gimple_cond_false_label (stmt)
4338 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4339 {
4340 error ("invalid labels in gimple cond");
4341 return true;
4342 }
4343
4344 return verify_gimple_comparison (boolean_type_node,
4345 gimple_cond_lhs (stmt),
4346 gimple_cond_rhs (stmt));
4347
4348 case GIMPLE_GOTO:
4349 return verify_gimple_goto (stmt);
4350
4351 case GIMPLE_SWITCH:
4352 return verify_gimple_switch (stmt);
4353
4354 case GIMPLE_RETURN:
4355 return verify_gimple_return (stmt);
4356
4357 case GIMPLE_ASM:
4358 return false;
4359
4360 case GIMPLE_TRANSACTION:
4361 return verify_gimple_transaction (stmt);
4362
4363 /* Tuples that do not have tree operands. */
4364 case GIMPLE_NOP:
4365 case GIMPLE_PREDICT:
4366 case GIMPLE_RESX:
4367 case GIMPLE_EH_DISPATCH:
4368 case GIMPLE_EH_MUST_NOT_THROW:
4369 return false;
4370
4371 CASE_GIMPLE_OMP:
4372 /* OpenMP directives are validated by the FE and never operated
4373 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4374 non-gimple expressions when the main index variable has had
4375 its address taken. This does not affect the loop itself
4376 because the header of an GIMPLE_OMP_FOR is merely used to determine
4377 how to setup the parallel iteration. */
4378 return false;
4379
4380 case GIMPLE_DEBUG:
4381 return verify_gimple_debug (stmt);
4382
4383 default:
4384 gcc_unreachable ();
4385 }
4386 }
4387
4388 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4389 and false otherwise. */
4390
4391 static bool
4392 verify_gimple_phi (gimple phi)
4393 {
4394 bool err = false;
4395 unsigned i;
4396 tree phi_result = gimple_phi_result (phi);
4397 bool virtual_p;
4398
4399 if (!phi_result)
4400 {
4401 error ("invalid PHI result");
4402 return true;
4403 }
4404
4405 virtual_p = virtual_operand_p (phi_result);
4406 if (TREE_CODE (phi_result) != SSA_NAME
4407 || (virtual_p
4408 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4409 {
4410 error ("invalid PHI result");
4411 err = true;
4412 }
4413
4414 for (i = 0; i < gimple_phi_num_args (phi); i++)
4415 {
4416 tree t = gimple_phi_arg_def (phi, i);
4417
4418 if (!t)
4419 {
4420 error ("missing PHI def");
4421 err |= true;
4422 continue;
4423 }
4424 /* Addressable variables do have SSA_NAMEs but they
4425 are not considered gimple values. */
4426 else if ((TREE_CODE (t) == SSA_NAME
4427 && virtual_p != virtual_operand_p (t))
4428 || (virtual_p
4429 && (TREE_CODE (t) != SSA_NAME
4430 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4431 || (!virtual_p
4432 && !is_gimple_val (t)))
4433 {
4434 error ("invalid PHI argument");
4435 debug_generic_expr (t);
4436 err |= true;
4437 }
4438 #ifdef ENABLE_TYPES_CHECKING
4439 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4440 {
4441 error ("incompatible types in PHI argument %u", i);
4442 debug_generic_stmt (TREE_TYPE (phi_result));
4443 debug_generic_stmt (TREE_TYPE (t));
4444 err |= true;
4445 }
4446 #endif
4447 }
4448
4449 return err;
4450 }
4451
4452 /* Verify the GIMPLE statements inside the sequence STMTS. */
4453
4454 static bool
4455 verify_gimple_in_seq_2 (gimple_seq stmts)
4456 {
4457 gimple_stmt_iterator ittr;
4458 bool err = false;
4459
4460 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4461 {
4462 gimple stmt = gsi_stmt (ittr);
4463
4464 switch (gimple_code (stmt))
4465 {
4466 case GIMPLE_BIND:
4467 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4468 break;
4469
4470 case GIMPLE_TRY:
4471 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4472 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4473 break;
4474
4475 case GIMPLE_EH_FILTER:
4476 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4477 break;
4478
4479 case GIMPLE_EH_ELSE:
4480 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4481 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4482 break;
4483
4484 case GIMPLE_CATCH:
4485 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4486 break;
4487
4488 case GIMPLE_TRANSACTION:
4489 err |= verify_gimple_transaction (stmt);
4490 break;
4491
4492 default:
4493 {
4494 bool err2 = verify_gimple_stmt (stmt);
4495 if (err2)
4496 debug_gimple_stmt (stmt);
4497 err |= err2;
4498 }
4499 }
4500 }
4501
4502 return err;
4503 }
4504
4505 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4506 is a problem, otherwise false. */
4507
4508 static bool
4509 verify_gimple_transaction (gimple stmt)
4510 {
4511 tree lab = gimple_transaction_label (stmt);
4512 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4513 return true;
4514 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4515 }
4516
4517
4518 /* Verify the GIMPLE statements inside the statement list STMTS. */
4519
4520 DEBUG_FUNCTION void
4521 verify_gimple_in_seq (gimple_seq stmts)
4522 {
4523 timevar_push (TV_TREE_STMT_VERIFY);
4524 if (verify_gimple_in_seq_2 (stmts))
4525 internal_error ("verify_gimple failed");
4526 timevar_pop (TV_TREE_STMT_VERIFY);
4527 }
4528
4529 /* Return true when the T can be shared. */
4530
4531 static bool
4532 tree_node_can_be_shared (tree t)
4533 {
4534 if (IS_TYPE_OR_DECL_P (t)
4535 || is_gimple_min_invariant (t)
4536 || TREE_CODE (t) == SSA_NAME
4537 || t == error_mark_node
4538 || TREE_CODE (t) == IDENTIFIER_NODE)
4539 return true;
4540
4541 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4542 return true;
4543
4544 if (DECL_P (t))
4545 return true;
4546
4547 return false;
4548 }
4549
4550 /* Called via walk_tree. Verify tree sharing. */
4551
4552 static tree
4553 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4554 {
4555 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4556
4557 if (tree_node_can_be_shared (*tp))
4558 {
4559 *walk_subtrees = false;
4560 return NULL;
4561 }
4562
4563 if (pointer_set_insert (visited, *tp))
4564 return *tp;
4565
4566 return NULL;
4567 }
4568
4569 /* Called via walk_gimple_stmt. Verify tree sharing. */
4570
4571 static tree
4572 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4573 {
4574 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4575 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4576 }
4577
4578 static bool eh_error_found;
4579 static int
4580 verify_eh_throw_stmt_node (void **slot, void *data)
4581 {
4582 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4583 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4584
4585 if (!pointer_set_contains (visited, node->stmt))
4586 {
4587 error ("dead STMT in EH table");
4588 debug_gimple_stmt (node->stmt);
4589 eh_error_found = true;
4590 }
4591 return 1;
4592 }
4593
4594 /* Verify if the location LOCs block is in BLOCKS. */
4595
4596 static bool
4597 verify_location (pointer_set_t *blocks, location_t loc)
4598 {
4599 tree block = LOCATION_BLOCK (loc);
4600 if (block != NULL_TREE
4601 && !pointer_set_contains (blocks, block))
4602 {
4603 error ("location references block not in block tree");
4604 return true;
4605 }
4606 if (block != NULL_TREE)
4607 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4608 return false;
4609 }
4610
4611 /* Called via walk_tree. Verify that expressions have no blocks. */
4612
4613 static tree
4614 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4615 {
4616 if (!EXPR_P (*tp))
4617 {
4618 *walk_subtrees = false;
4619 return NULL;
4620 }
4621
4622 location_t loc = EXPR_LOCATION (*tp);
4623 if (LOCATION_BLOCK (loc) != NULL)
4624 return *tp;
4625
4626 return NULL;
4627 }
4628
4629 /* Called via walk_tree. Verify locations of expressions. */
4630
4631 static tree
4632 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4633 {
4634 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4635
4636 if (TREE_CODE (*tp) == VAR_DECL
4637 && DECL_HAS_DEBUG_EXPR_P (*tp))
4638 {
4639 tree t = DECL_DEBUG_EXPR (*tp);
4640 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4641 if (addr)
4642 return addr;
4643 }
4644 if ((TREE_CODE (*tp) == VAR_DECL
4645 || TREE_CODE (*tp) == PARM_DECL
4646 || TREE_CODE (*tp) == RESULT_DECL)
4647 && DECL_HAS_VALUE_EXPR_P (*tp))
4648 {
4649 tree t = DECL_VALUE_EXPR (*tp);
4650 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4651 if (addr)
4652 return addr;
4653 }
4654
4655 if (!EXPR_P (*tp))
4656 {
4657 *walk_subtrees = false;
4658 return NULL;
4659 }
4660
4661 location_t loc = EXPR_LOCATION (*tp);
4662 if (verify_location (blocks, loc))
4663 return *tp;
4664
4665 return NULL;
4666 }
4667
4668 /* Called via walk_gimple_op. Verify locations of expressions. */
4669
4670 static tree
4671 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4672 {
4673 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4674 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4675 }
4676
4677 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4678
4679 static void
4680 collect_subblocks (pointer_set_t *blocks, tree block)
4681 {
4682 tree t;
4683 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4684 {
4685 pointer_set_insert (blocks, t);
4686 collect_subblocks (blocks, t);
4687 }
4688 }
4689
4690 /* Verify the GIMPLE statements in the CFG of FN. */
4691
4692 DEBUG_FUNCTION void
4693 verify_gimple_in_cfg (struct function *fn)
4694 {
4695 basic_block bb;
4696 bool err = false;
4697 struct pointer_set_t *visited, *visited_stmts, *blocks;
4698
4699 timevar_push (TV_TREE_STMT_VERIFY);
4700 visited = pointer_set_create ();
4701 visited_stmts = pointer_set_create ();
4702
4703 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4704 blocks = pointer_set_create ();
4705 if (DECL_INITIAL (fn->decl))
4706 {
4707 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4708 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4709 }
4710
4711 FOR_EACH_BB_FN (bb, fn)
4712 {
4713 gimple_stmt_iterator gsi;
4714
4715 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4716 {
4717 gimple phi = gsi_stmt (gsi);
4718 bool err2 = false;
4719 unsigned i;
4720
4721 pointer_set_insert (visited_stmts, phi);
4722
4723 if (gimple_bb (phi) != bb)
4724 {
4725 error ("gimple_bb (phi) is set to a wrong basic block");
4726 err2 = true;
4727 }
4728
4729 err2 |= verify_gimple_phi (phi);
4730
4731 /* Only PHI arguments have locations. */
4732 if (gimple_location (phi) != UNKNOWN_LOCATION)
4733 {
4734 error ("PHI node with location");
4735 err2 = true;
4736 }
4737
4738 for (i = 0; i < gimple_phi_num_args (phi); i++)
4739 {
4740 tree arg = gimple_phi_arg_def (phi, i);
4741 tree addr = walk_tree (&arg, verify_node_sharing_1,
4742 visited, NULL);
4743 if (addr)
4744 {
4745 error ("incorrect sharing of tree nodes");
4746 debug_generic_expr (addr);
4747 err2 |= true;
4748 }
4749 location_t loc = gimple_phi_arg_location (phi, i);
4750 if (virtual_operand_p (gimple_phi_result (phi))
4751 && loc != UNKNOWN_LOCATION)
4752 {
4753 error ("virtual PHI with argument locations");
4754 err2 = true;
4755 }
4756 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4757 if (addr)
4758 {
4759 debug_generic_expr (addr);
4760 err2 = true;
4761 }
4762 err2 |= verify_location (blocks, loc);
4763 }
4764
4765 if (err2)
4766 debug_gimple_stmt (phi);
4767 err |= err2;
4768 }
4769
4770 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4771 {
4772 gimple stmt = gsi_stmt (gsi);
4773 bool err2 = false;
4774 struct walk_stmt_info wi;
4775 tree addr;
4776 int lp_nr;
4777
4778 pointer_set_insert (visited_stmts, stmt);
4779
4780 if (gimple_bb (stmt) != bb)
4781 {
4782 error ("gimple_bb (stmt) is set to a wrong basic block");
4783 err2 = true;
4784 }
4785
4786 err2 |= verify_gimple_stmt (stmt);
4787 err2 |= verify_location (blocks, gimple_location (stmt));
4788
4789 memset (&wi, 0, sizeof (wi));
4790 wi.info = (void *) visited;
4791 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4792 if (addr)
4793 {
4794 error ("incorrect sharing of tree nodes");
4795 debug_generic_expr (addr);
4796 err2 |= true;
4797 }
4798
4799 memset (&wi, 0, sizeof (wi));
4800 wi.info = (void *) blocks;
4801 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4802 if (addr)
4803 {
4804 debug_generic_expr (addr);
4805 err2 |= true;
4806 }
4807
4808 /* ??? Instead of not checking these stmts at all the walker
4809 should know its context via wi. */
4810 if (!is_gimple_debug (stmt)
4811 && !is_gimple_omp (stmt))
4812 {
4813 memset (&wi, 0, sizeof (wi));
4814 addr = walk_gimple_op (stmt, verify_expr, &wi);
4815 if (addr)
4816 {
4817 debug_generic_expr (addr);
4818 inform (gimple_location (stmt), "in statement");
4819 err2 |= true;
4820 }
4821 }
4822
4823 /* If the statement is marked as part of an EH region, then it is
4824 expected that the statement could throw. Verify that when we
4825 have optimizations that simplify statements such that we prove
4826 that they cannot throw, that we update other data structures
4827 to match. */
4828 lp_nr = lookup_stmt_eh_lp (stmt);
4829 if (lp_nr != 0)
4830 {
4831 if (!stmt_could_throw_p (stmt))
4832 {
4833 error ("statement marked for throw, but doesn%'t");
4834 err2 |= true;
4835 }
4836 else if (lp_nr > 0
4837 && !gsi_one_before_end_p (gsi)
4838 && stmt_can_throw_internal (stmt))
4839 {
4840 error ("statement marked for throw in middle of block");
4841 err2 |= true;
4842 }
4843 }
4844
4845 if (err2)
4846 debug_gimple_stmt (stmt);
4847 err |= err2;
4848 }
4849 }
4850
4851 eh_error_found = false;
4852 if (get_eh_throw_stmt_table (cfun))
4853 htab_traverse (get_eh_throw_stmt_table (cfun),
4854 verify_eh_throw_stmt_node,
4855 visited_stmts);
4856
4857 if (err || eh_error_found)
4858 internal_error ("verify_gimple failed");
4859
4860 pointer_set_destroy (visited);
4861 pointer_set_destroy (visited_stmts);
4862 pointer_set_destroy (blocks);
4863 verify_histograms ();
4864 timevar_pop (TV_TREE_STMT_VERIFY);
4865 }
4866
4867
4868 /* Verifies that the flow information is OK. */
4869
4870 static int
4871 gimple_verify_flow_info (void)
4872 {
4873 int err = 0;
4874 basic_block bb;
4875 gimple_stmt_iterator gsi;
4876 gimple stmt;
4877 edge e;
4878 edge_iterator ei;
4879
4880 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4881 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4882 {
4883 error ("ENTRY_BLOCK has IL associated with it");
4884 err = 1;
4885 }
4886
4887 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4888 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4889 {
4890 error ("EXIT_BLOCK has IL associated with it");
4891 err = 1;
4892 }
4893
4894 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4895 if (e->flags & EDGE_FALLTHRU)
4896 {
4897 error ("fallthru to exit from bb %d", e->src->index);
4898 err = 1;
4899 }
4900
4901 FOR_EACH_BB (bb)
4902 {
4903 bool found_ctrl_stmt = false;
4904
4905 stmt = NULL;
4906
4907 /* Skip labels on the start of basic block. */
4908 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4909 {
4910 tree label;
4911 gimple prev_stmt = stmt;
4912
4913 stmt = gsi_stmt (gsi);
4914
4915 if (gimple_code (stmt) != GIMPLE_LABEL)
4916 break;
4917
4918 label = gimple_label_label (stmt);
4919 if (prev_stmt && DECL_NONLOCAL (label))
4920 {
4921 error ("nonlocal label ");
4922 print_generic_expr (stderr, label, 0);
4923 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4924 bb->index);
4925 err = 1;
4926 }
4927
4928 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4929 {
4930 error ("EH landing pad label ");
4931 print_generic_expr (stderr, label, 0);
4932 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4933 bb->index);
4934 err = 1;
4935 }
4936
4937 if (label_to_block (label) != bb)
4938 {
4939 error ("label ");
4940 print_generic_expr (stderr, label, 0);
4941 fprintf (stderr, " to block does not match in bb %d",
4942 bb->index);
4943 err = 1;
4944 }
4945
4946 if (decl_function_context (label) != current_function_decl)
4947 {
4948 error ("label ");
4949 print_generic_expr (stderr, label, 0);
4950 fprintf (stderr, " has incorrect context in bb %d",
4951 bb->index);
4952 err = 1;
4953 }
4954 }
4955
4956 /* Verify that body of basic block BB is free of control flow. */
4957 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4958 {
4959 gimple stmt = gsi_stmt (gsi);
4960
4961 if (found_ctrl_stmt)
4962 {
4963 error ("control flow in the middle of basic block %d",
4964 bb->index);
4965 err = 1;
4966 }
4967
4968 if (stmt_ends_bb_p (stmt))
4969 found_ctrl_stmt = true;
4970
4971 if (gimple_code (stmt) == GIMPLE_LABEL)
4972 {
4973 error ("label ");
4974 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4975 fprintf (stderr, " in the middle of basic block %d", bb->index);
4976 err = 1;
4977 }
4978 }
4979
4980 gsi = gsi_last_bb (bb);
4981 if (gsi_end_p (gsi))
4982 continue;
4983
4984 stmt = gsi_stmt (gsi);
4985
4986 if (gimple_code (stmt) == GIMPLE_LABEL)
4987 continue;
4988
4989 err |= verify_eh_edges (stmt);
4990
4991 if (is_ctrl_stmt (stmt))
4992 {
4993 FOR_EACH_EDGE (e, ei, bb->succs)
4994 if (e->flags & EDGE_FALLTHRU)
4995 {
4996 error ("fallthru edge after a control statement in bb %d",
4997 bb->index);
4998 err = 1;
4999 }
5000 }
5001
5002 if (gimple_code (stmt) != GIMPLE_COND)
5003 {
5004 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5005 after anything else but if statement. */
5006 FOR_EACH_EDGE (e, ei, bb->succs)
5007 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5008 {
5009 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5010 bb->index);
5011 err = 1;
5012 }
5013 }
5014
5015 switch (gimple_code (stmt))
5016 {
5017 case GIMPLE_COND:
5018 {
5019 edge true_edge;
5020 edge false_edge;
5021
5022 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5023
5024 if (!true_edge
5025 || !false_edge
5026 || !(true_edge->flags & EDGE_TRUE_VALUE)
5027 || !(false_edge->flags & EDGE_FALSE_VALUE)
5028 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5029 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5030 || EDGE_COUNT (bb->succs) >= 3)
5031 {
5032 error ("wrong outgoing edge flags at end of bb %d",
5033 bb->index);
5034 err = 1;
5035 }
5036 }
5037 break;
5038
5039 case GIMPLE_GOTO:
5040 if (simple_goto_p (stmt))
5041 {
5042 error ("explicit goto at end of bb %d", bb->index);
5043 err = 1;
5044 }
5045 else
5046 {
5047 /* FIXME. We should double check that the labels in the
5048 destination blocks have their address taken. */
5049 FOR_EACH_EDGE (e, ei, bb->succs)
5050 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5051 | EDGE_FALSE_VALUE))
5052 || !(e->flags & EDGE_ABNORMAL))
5053 {
5054 error ("wrong outgoing edge flags at end of bb %d",
5055 bb->index);
5056 err = 1;
5057 }
5058 }
5059 break;
5060
5061 case GIMPLE_CALL:
5062 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5063 break;
5064 /* ... fallthru ... */
5065 case GIMPLE_RETURN:
5066 if (!single_succ_p (bb)
5067 || (single_succ_edge (bb)->flags
5068 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5069 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5070 {
5071 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5072 err = 1;
5073 }
5074 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5075 {
5076 error ("return edge does not point to exit in bb %d",
5077 bb->index);
5078 err = 1;
5079 }
5080 break;
5081
5082 case GIMPLE_SWITCH:
5083 {
5084 tree prev;
5085 edge e;
5086 size_t i, n;
5087
5088 n = gimple_switch_num_labels (stmt);
5089
5090 /* Mark all the destination basic blocks. */
5091 for (i = 0; i < n; ++i)
5092 {
5093 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5094 basic_block label_bb = label_to_block (lab);
5095 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5096 label_bb->aux = (void *)1;
5097 }
5098
5099 /* Verify that the case labels are sorted. */
5100 prev = gimple_switch_label (stmt, 0);
5101 for (i = 1; i < n; ++i)
5102 {
5103 tree c = gimple_switch_label (stmt, i);
5104 if (!CASE_LOW (c))
5105 {
5106 error ("found default case not at the start of "
5107 "case vector");
5108 err = 1;
5109 continue;
5110 }
5111 if (CASE_LOW (prev)
5112 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5113 {
5114 error ("case labels not sorted: ");
5115 print_generic_expr (stderr, prev, 0);
5116 fprintf (stderr," is greater than ");
5117 print_generic_expr (stderr, c, 0);
5118 fprintf (stderr," but comes before it.\n");
5119 err = 1;
5120 }
5121 prev = c;
5122 }
5123 /* VRP will remove the default case if it can prove it will
5124 never be executed. So do not verify there always exists
5125 a default case here. */
5126
5127 FOR_EACH_EDGE (e, ei, bb->succs)
5128 {
5129 if (!e->dest->aux)
5130 {
5131 error ("extra outgoing edge %d->%d",
5132 bb->index, e->dest->index);
5133 err = 1;
5134 }
5135
5136 e->dest->aux = (void *)2;
5137 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5138 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5139 {
5140 error ("wrong outgoing edge flags at end of bb %d",
5141 bb->index);
5142 err = 1;
5143 }
5144 }
5145
5146 /* Check that we have all of them. */
5147 for (i = 0; i < n; ++i)
5148 {
5149 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5150 basic_block label_bb = label_to_block (lab);
5151
5152 if (label_bb->aux != (void *)2)
5153 {
5154 error ("missing edge %i->%i", bb->index, label_bb->index);
5155 err = 1;
5156 }
5157 }
5158
5159 FOR_EACH_EDGE (e, ei, bb->succs)
5160 e->dest->aux = (void *)0;
5161 }
5162 break;
5163
5164 case GIMPLE_EH_DISPATCH:
5165 err |= verify_eh_dispatch_edge (stmt);
5166 break;
5167
5168 default:
5169 break;
5170 }
5171 }
5172
5173 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5174 verify_dominators (CDI_DOMINATORS);
5175
5176 return err;
5177 }
5178
5179
5180 /* Updates phi nodes after creating a forwarder block joined
5181 by edge FALLTHRU. */
5182
5183 static void
5184 gimple_make_forwarder_block (edge fallthru)
5185 {
5186 edge e;
5187 edge_iterator ei;
5188 basic_block dummy, bb;
5189 tree var;
5190 gimple_stmt_iterator gsi;
5191
5192 dummy = fallthru->src;
5193 bb = fallthru->dest;
5194
5195 if (single_pred_p (bb))
5196 return;
5197
5198 /* If we redirected a branch we must create new PHI nodes at the
5199 start of BB. */
5200 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5201 {
5202 gimple phi, new_phi;
5203
5204 phi = gsi_stmt (gsi);
5205 var = gimple_phi_result (phi);
5206 new_phi = create_phi_node (var, bb);
5207 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5208 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5209 UNKNOWN_LOCATION);
5210 }
5211
5212 /* Add the arguments we have stored on edges. */
5213 FOR_EACH_EDGE (e, ei, bb->preds)
5214 {
5215 if (e == fallthru)
5216 continue;
5217
5218 flush_pending_stmts (e);
5219 }
5220 }
5221
5222
5223 /* Return a non-special label in the head of basic block BLOCK.
5224 Create one if it doesn't exist. */
5225
5226 tree
5227 gimple_block_label (basic_block bb)
5228 {
5229 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5230 bool first = true;
5231 tree label;
5232 gimple stmt;
5233
5234 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5235 {
5236 stmt = gsi_stmt (i);
5237 if (gimple_code (stmt) != GIMPLE_LABEL)
5238 break;
5239 label = gimple_label_label (stmt);
5240 if (!DECL_NONLOCAL (label))
5241 {
5242 if (!first)
5243 gsi_move_before (&i, &s);
5244 return label;
5245 }
5246 }
5247
5248 label = create_artificial_label (UNKNOWN_LOCATION);
5249 stmt = gimple_build_label (label);
5250 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5251 return label;
5252 }
5253
5254
5255 /* Attempt to perform edge redirection by replacing a possibly complex
5256 jump instruction by a goto or by removing the jump completely.
5257 This can apply only if all edges now point to the same block. The
5258 parameters and return values are equivalent to
5259 redirect_edge_and_branch. */
5260
5261 static edge
5262 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5263 {
5264 basic_block src = e->src;
5265 gimple_stmt_iterator i;
5266 gimple stmt;
5267
5268 /* We can replace or remove a complex jump only when we have exactly
5269 two edges. */
5270 if (EDGE_COUNT (src->succs) != 2
5271 /* Verify that all targets will be TARGET. Specifically, the
5272 edge that is not E must also go to TARGET. */
5273 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5274 return NULL;
5275
5276 i = gsi_last_bb (src);
5277 if (gsi_end_p (i))
5278 return NULL;
5279
5280 stmt = gsi_stmt (i);
5281
5282 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5283 {
5284 gsi_remove (&i, true);
5285 e = ssa_redirect_edge (e, target);
5286 e->flags = EDGE_FALLTHRU;
5287 return e;
5288 }
5289
5290 return NULL;
5291 }
5292
5293
5294 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5295 edge representing the redirected branch. */
5296
5297 static edge
5298 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5299 {
5300 basic_block bb = e->src;
5301 gimple_stmt_iterator gsi;
5302 edge ret;
5303 gimple stmt;
5304
5305 if (e->flags & EDGE_ABNORMAL)
5306 return NULL;
5307
5308 if (e->dest == dest)
5309 return NULL;
5310
5311 if (e->flags & EDGE_EH)
5312 return redirect_eh_edge (e, dest);
5313
5314 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5315 {
5316 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5317 if (ret)
5318 return ret;
5319 }
5320
5321 gsi = gsi_last_bb (bb);
5322 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5323
5324 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5325 {
5326 case GIMPLE_COND:
5327 /* For COND_EXPR, we only need to redirect the edge. */
5328 break;
5329
5330 case GIMPLE_GOTO:
5331 /* No non-abnormal edges should lead from a non-simple goto, and
5332 simple ones should be represented implicitly. */
5333 gcc_unreachable ();
5334
5335 case GIMPLE_SWITCH:
5336 {
5337 tree label = gimple_block_label (dest);
5338 tree cases = get_cases_for_edge (e, stmt);
5339
5340 /* If we have a list of cases associated with E, then use it
5341 as it's a lot faster than walking the entire case vector. */
5342 if (cases)
5343 {
5344 edge e2 = find_edge (e->src, dest);
5345 tree last, first;
5346
5347 first = cases;
5348 while (cases)
5349 {
5350 last = cases;
5351 CASE_LABEL (cases) = label;
5352 cases = CASE_CHAIN (cases);
5353 }
5354
5355 /* If there was already an edge in the CFG, then we need
5356 to move all the cases associated with E to E2. */
5357 if (e2)
5358 {
5359 tree cases2 = get_cases_for_edge (e2, stmt);
5360
5361 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5362 CASE_CHAIN (cases2) = first;
5363 }
5364 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5365 }
5366 else
5367 {
5368 size_t i, n = gimple_switch_num_labels (stmt);
5369
5370 for (i = 0; i < n; i++)
5371 {
5372 tree elt = gimple_switch_label (stmt, i);
5373 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5374 CASE_LABEL (elt) = label;
5375 }
5376 }
5377 }
5378 break;
5379
5380 case GIMPLE_ASM:
5381 {
5382 int i, n = gimple_asm_nlabels (stmt);
5383 tree label = NULL;
5384
5385 for (i = 0; i < n; ++i)
5386 {
5387 tree cons = gimple_asm_label_op (stmt, i);
5388 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5389 {
5390 if (!label)
5391 label = gimple_block_label (dest);
5392 TREE_VALUE (cons) = label;
5393 }
5394 }
5395
5396 /* If we didn't find any label matching the former edge in the
5397 asm labels, we must be redirecting the fallthrough
5398 edge. */
5399 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5400 }
5401 break;
5402
5403 case GIMPLE_RETURN:
5404 gsi_remove (&gsi, true);
5405 e->flags |= EDGE_FALLTHRU;
5406 break;
5407
5408 case GIMPLE_OMP_RETURN:
5409 case GIMPLE_OMP_CONTINUE:
5410 case GIMPLE_OMP_SECTIONS_SWITCH:
5411 case GIMPLE_OMP_FOR:
5412 /* The edges from OMP constructs can be simply redirected. */
5413 break;
5414
5415 case GIMPLE_EH_DISPATCH:
5416 if (!(e->flags & EDGE_FALLTHRU))
5417 redirect_eh_dispatch_edge (stmt, e, dest);
5418 break;
5419
5420 case GIMPLE_TRANSACTION:
5421 /* The ABORT edge has a stored label associated with it, otherwise
5422 the edges are simply redirectable. */
5423 if (e->flags == 0)
5424 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5425 break;
5426
5427 default:
5428 /* Otherwise it must be a fallthru edge, and we don't need to
5429 do anything besides redirecting it. */
5430 gcc_assert (e->flags & EDGE_FALLTHRU);
5431 break;
5432 }
5433
5434 /* Update/insert PHI nodes as necessary. */
5435
5436 /* Now update the edges in the CFG. */
5437 e = ssa_redirect_edge (e, dest);
5438
5439 return e;
5440 }
5441
5442 /* Returns true if it is possible to remove edge E by redirecting
5443 it to the destination of the other edge from E->src. */
5444
5445 static bool
5446 gimple_can_remove_branch_p (const_edge e)
5447 {
5448 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5449 return false;
5450
5451 return true;
5452 }
5453
5454 /* Simple wrapper, as we can always redirect fallthru edges. */
5455
5456 static basic_block
5457 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5458 {
5459 e = gimple_redirect_edge_and_branch (e, dest);
5460 gcc_assert (e);
5461
5462 return NULL;
5463 }
5464
5465
5466 /* Splits basic block BB after statement STMT (but at least after the
5467 labels). If STMT is NULL, BB is split just after the labels. */
5468
5469 static basic_block
5470 gimple_split_block (basic_block bb, void *stmt)
5471 {
5472 gimple_stmt_iterator gsi;
5473 gimple_stmt_iterator gsi_tgt;
5474 gimple act;
5475 gimple_seq list;
5476 basic_block new_bb;
5477 edge e;
5478 edge_iterator ei;
5479
5480 new_bb = create_empty_bb (bb);
5481
5482 /* Redirect the outgoing edges. */
5483 new_bb->succs = bb->succs;
5484 bb->succs = NULL;
5485 FOR_EACH_EDGE (e, ei, new_bb->succs)
5486 e->src = new_bb;
5487
5488 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5489 stmt = NULL;
5490
5491 /* Move everything from GSI to the new basic block. */
5492 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5493 {
5494 act = gsi_stmt (gsi);
5495 if (gimple_code (act) == GIMPLE_LABEL)
5496 continue;
5497
5498 if (!stmt)
5499 break;
5500
5501 if (stmt == act)
5502 {
5503 gsi_next (&gsi);
5504 break;
5505 }
5506 }
5507
5508 if (gsi_end_p (gsi))
5509 return new_bb;
5510
5511 /* Split the statement list - avoid re-creating new containers as this
5512 brings ugly quadratic memory consumption in the inliner.
5513 (We are still quadratic since we need to update stmt BB pointers,
5514 sadly.) */
5515 gsi_split_seq_before (&gsi, &list);
5516 set_bb_seq (new_bb, list);
5517 for (gsi_tgt = gsi_start (list);
5518 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5519 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5520
5521 return new_bb;
5522 }
5523
5524
5525 /* Moves basic block BB after block AFTER. */
5526
5527 static bool
5528 gimple_move_block_after (basic_block bb, basic_block after)
5529 {
5530 if (bb->prev_bb == after)
5531 return true;
5532
5533 unlink_block (bb);
5534 link_block (bb, after);
5535
5536 return true;
5537 }
5538
5539
5540 /* Return TRUE if block BB has no executable statements, otherwise return
5541 FALSE. */
5542
5543 static bool
5544 gimple_empty_block_p (basic_block bb)
5545 {
5546 /* BB must have no executable statements. */
5547 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5548 if (phi_nodes (bb))
5549 return false;
5550 if (gsi_end_p (gsi))
5551 return true;
5552 if (is_gimple_debug (gsi_stmt (gsi)))
5553 gsi_next_nondebug (&gsi);
5554 return gsi_end_p (gsi);
5555 }
5556
5557
5558 /* Split a basic block if it ends with a conditional branch and if the
5559 other part of the block is not empty. */
5560
5561 static basic_block
5562 gimple_split_block_before_cond_jump (basic_block bb)
5563 {
5564 gimple last, split_point;
5565 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5566 if (gsi_end_p (gsi))
5567 return NULL;
5568 last = gsi_stmt (gsi);
5569 if (gimple_code (last) != GIMPLE_COND
5570 && gimple_code (last) != GIMPLE_SWITCH)
5571 return NULL;
5572 gsi_prev_nondebug (&gsi);
5573 split_point = gsi_stmt (gsi);
5574 return split_block (bb, split_point)->dest;
5575 }
5576
5577
5578 /* Return true if basic_block can be duplicated. */
5579
5580 static bool
5581 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5582 {
5583 return true;
5584 }
5585
5586 /* Create a duplicate of the basic block BB. NOTE: This does not
5587 preserve SSA form. */
5588
5589 static basic_block
5590 gimple_duplicate_bb (basic_block bb)
5591 {
5592 basic_block new_bb;
5593 gimple_stmt_iterator gsi, gsi_tgt;
5594 gimple_seq phis = phi_nodes (bb);
5595 gimple phi, stmt, copy;
5596
5597 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5598
5599 /* Copy the PHI nodes. We ignore PHI node arguments here because
5600 the incoming edges have not been setup yet. */
5601 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5602 {
5603 phi = gsi_stmt (gsi);
5604 copy = create_phi_node (NULL_TREE, new_bb);
5605 create_new_def_for (gimple_phi_result (phi), copy,
5606 gimple_phi_result_ptr (copy));
5607 gimple_set_uid (copy, gimple_uid (phi));
5608 }
5609
5610 gsi_tgt = gsi_start_bb (new_bb);
5611 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5612 {
5613 def_operand_p def_p;
5614 ssa_op_iter op_iter;
5615 tree lhs;
5616
5617 stmt = gsi_stmt (gsi);
5618 if (gimple_code (stmt) == GIMPLE_LABEL)
5619 continue;
5620
5621 /* Don't duplicate label debug stmts. */
5622 if (gimple_debug_bind_p (stmt)
5623 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5624 == LABEL_DECL)
5625 continue;
5626
5627 /* Create a new copy of STMT and duplicate STMT's virtual
5628 operands. */
5629 copy = gimple_copy (stmt);
5630 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5631
5632 maybe_duplicate_eh_stmt (copy, stmt);
5633 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5634
5635 /* When copying around a stmt writing into a local non-user
5636 aggregate, make sure it won't share stack slot with other
5637 vars. */
5638 lhs = gimple_get_lhs (stmt);
5639 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5640 {
5641 tree base = get_base_address (lhs);
5642 if (base
5643 && (TREE_CODE (base) == VAR_DECL
5644 || TREE_CODE (base) == RESULT_DECL)
5645 && DECL_IGNORED_P (base)
5646 && !TREE_STATIC (base)
5647 && !DECL_EXTERNAL (base)
5648 && (TREE_CODE (base) != VAR_DECL
5649 || !DECL_HAS_VALUE_EXPR_P (base)))
5650 DECL_NONSHAREABLE (base) = 1;
5651 }
5652
5653 /* Create new names for all the definitions created by COPY and
5654 add replacement mappings for each new name. */
5655 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5656 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5657 }
5658
5659 return new_bb;
5660 }
5661
5662 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5663
5664 static void
5665 add_phi_args_after_copy_edge (edge e_copy)
5666 {
5667 basic_block bb, bb_copy = e_copy->src, dest;
5668 edge e;
5669 edge_iterator ei;
5670 gimple phi, phi_copy;
5671 tree def;
5672 gimple_stmt_iterator psi, psi_copy;
5673
5674 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5675 return;
5676
5677 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5678
5679 if (e_copy->dest->flags & BB_DUPLICATED)
5680 dest = get_bb_original (e_copy->dest);
5681 else
5682 dest = e_copy->dest;
5683
5684 e = find_edge (bb, dest);
5685 if (!e)
5686 {
5687 /* During loop unrolling the target of the latch edge is copied.
5688 In this case we are not looking for edge to dest, but to
5689 duplicated block whose original was dest. */
5690 FOR_EACH_EDGE (e, ei, bb->succs)
5691 {
5692 if ((e->dest->flags & BB_DUPLICATED)
5693 && get_bb_original (e->dest) == dest)
5694 break;
5695 }
5696
5697 gcc_assert (e != NULL);
5698 }
5699
5700 for (psi = gsi_start_phis (e->dest),
5701 psi_copy = gsi_start_phis (e_copy->dest);
5702 !gsi_end_p (psi);
5703 gsi_next (&psi), gsi_next (&psi_copy))
5704 {
5705 phi = gsi_stmt (psi);
5706 phi_copy = gsi_stmt (psi_copy);
5707 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5708 add_phi_arg (phi_copy, def, e_copy,
5709 gimple_phi_arg_location_from_edge (phi, e));
5710 }
5711 }
5712
5713
5714 /* Basic block BB_COPY was created by code duplication. Add phi node
5715 arguments for edges going out of BB_COPY. The blocks that were
5716 duplicated have BB_DUPLICATED set. */
5717
5718 void
5719 add_phi_args_after_copy_bb (basic_block bb_copy)
5720 {
5721 edge e_copy;
5722 edge_iterator ei;
5723
5724 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5725 {
5726 add_phi_args_after_copy_edge (e_copy);
5727 }
5728 }
5729
5730 /* Blocks in REGION_COPY array of length N_REGION were created by
5731 duplication of basic blocks. Add phi node arguments for edges
5732 going from these blocks. If E_COPY is not NULL, also add
5733 phi node arguments for its destination.*/
5734
5735 void
5736 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5737 edge e_copy)
5738 {
5739 unsigned i;
5740
5741 for (i = 0; i < n_region; i++)
5742 region_copy[i]->flags |= BB_DUPLICATED;
5743
5744 for (i = 0; i < n_region; i++)
5745 add_phi_args_after_copy_bb (region_copy[i]);
5746 if (e_copy)
5747 add_phi_args_after_copy_edge (e_copy);
5748
5749 for (i = 0; i < n_region; i++)
5750 region_copy[i]->flags &= ~BB_DUPLICATED;
5751 }
5752
5753 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5754 important exit edge EXIT. By important we mean that no SSA name defined
5755 inside region is live over the other exit edges of the region. All entry
5756 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5757 to the duplicate of the region. Dominance and loop information is
5758 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5759 UPDATE_DOMINANCE is false then we assume that the caller will update the
5760 dominance information after calling this function. The new basic
5761 blocks are stored to REGION_COPY in the same order as they had in REGION,
5762 provided that REGION_COPY is not NULL.
5763 The function returns false if it is unable to copy the region,
5764 true otherwise. */
5765
5766 bool
5767 gimple_duplicate_sese_region (edge entry, edge exit,
5768 basic_block *region, unsigned n_region,
5769 basic_block *region_copy,
5770 bool update_dominance)
5771 {
5772 unsigned i;
5773 bool free_region_copy = false, copying_header = false;
5774 struct loop *loop = entry->dest->loop_father;
5775 edge exit_copy;
5776 vec<basic_block> doms;
5777 edge redirected;
5778 int total_freq = 0, entry_freq = 0;
5779 gcov_type total_count = 0, entry_count = 0;
5780
5781 if (!can_copy_bbs_p (region, n_region))
5782 return false;
5783
5784 /* Some sanity checking. Note that we do not check for all possible
5785 missuses of the functions. I.e. if you ask to copy something weird,
5786 it will work, but the state of structures probably will not be
5787 correct. */
5788 for (i = 0; i < n_region; i++)
5789 {
5790 /* We do not handle subloops, i.e. all the blocks must belong to the
5791 same loop. */
5792 if (region[i]->loop_father != loop)
5793 return false;
5794
5795 if (region[i] != entry->dest
5796 && region[i] == loop->header)
5797 return false;
5798 }
5799
5800 set_loop_copy (loop, loop);
5801
5802 /* In case the function is used for loop header copying (which is the primary
5803 use), ensure that EXIT and its copy will be new latch and entry edges. */
5804 if (loop->header == entry->dest)
5805 {
5806 copying_header = true;
5807 set_loop_copy (loop, loop_outer (loop));
5808
5809 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5810 return false;
5811
5812 for (i = 0; i < n_region; i++)
5813 if (region[i] != exit->src
5814 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5815 return false;
5816 }
5817
5818 if (!region_copy)
5819 {
5820 region_copy = XNEWVEC (basic_block, n_region);
5821 free_region_copy = true;
5822 }
5823
5824 initialize_original_copy_tables ();
5825
5826 /* Record blocks outside the region that are dominated by something
5827 inside. */
5828 if (update_dominance)
5829 {
5830 doms.create (0);
5831 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5832 }
5833
5834 if (entry->dest->count)
5835 {
5836 total_count = entry->dest->count;
5837 entry_count = entry->count;
5838 /* Fix up corner cases, to avoid division by zero or creation of negative
5839 frequencies. */
5840 if (entry_count > total_count)
5841 entry_count = total_count;
5842 }
5843 else
5844 {
5845 total_freq = entry->dest->frequency;
5846 entry_freq = EDGE_FREQUENCY (entry);
5847 /* Fix up corner cases, to avoid division by zero or creation of negative
5848 frequencies. */
5849 if (total_freq == 0)
5850 total_freq = 1;
5851 else if (entry_freq > total_freq)
5852 entry_freq = total_freq;
5853 }
5854
5855 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5856 split_edge_bb_loc (entry), update_dominance);
5857 if (total_count)
5858 {
5859 scale_bbs_frequencies_gcov_type (region, n_region,
5860 total_count - entry_count,
5861 total_count);
5862 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5863 total_count);
5864 }
5865 else
5866 {
5867 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5868 total_freq);
5869 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5870 }
5871
5872 if (copying_header)
5873 {
5874 loop->header = exit->dest;
5875 loop->latch = exit->src;
5876 }
5877
5878 /* Redirect the entry and add the phi node arguments. */
5879 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5880 gcc_assert (redirected != NULL);
5881 flush_pending_stmts (entry);
5882
5883 /* Concerning updating of dominators: We must recount dominators
5884 for entry block and its copy. Anything that is outside of the
5885 region, but was dominated by something inside needs recounting as
5886 well. */
5887 if (update_dominance)
5888 {
5889 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5890 doms.safe_push (get_bb_original (entry->dest));
5891 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5892 doms.release ();
5893 }
5894
5895 /* Add the other PHI node arguments. */
5896 add_phi_args_after_copy (region_copy, n_region, NULL);
5897
5898 if (free_region_copy)
5899 free (region_copy);
5900
5901 free_original_copy_tables ();
5902 return true;
5903 }
5904
5905 /* Checks if BB is part of the region defined by N_REGION BBS. */
5906 static bool
5907 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5908 {
5909 unsigned int n;
5910
5911 for (n = 0; n < n_region; n++)
5912 {
5913 if (bb == bbs[n])
5914 return true;
5915 }
5916 return false;
5917 }
5918
5919 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5920 are stored to REGION_COPY in the same order in that they appear
5921 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5922 the region, EXIT an exit from it. The condition guarding EXIT
5923 is moved to ENTRY. Returns true if duplication succeeds, false
5924 otherwise.
5925
5926 For example,
5927
5928 some_code;
5929 if (cond)
5930 A;
5931 else
5932 B;
5933
5934 is transformed to
5935
5936 if (cond)
5937 {
5938 some_code;
5939 A;
5940 }
5941 else
5942 {
5943 some_code;
5944 B;
5945 }
5946 */
5947
5948 bool
5949 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5950 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5951 basic_block *region_copy ATTRIBUTE_UNUSED)
5952 {
5953 unsigned i;
5954 bool free_region_copy = false;
5955 struct loop *loop = exit->dest->loop_father;
5956 struct loop *orig_loop = entry->dest->loop_father;
5957 basic_block switch_bb, entry_bb, nentry_bb;
5958 vec<basic_block> doms;
5959 int total_freq = 0, exit_freq = 0;
5960 gcov_type total_count = 0, exit_count = 0;
5961 edge exits[2], nexits[2], e;
5962 gimple_stmt_iterator gsi;
5963 gimple cond_stmt;
5964 edge sorig, snew;
5965 basic_block exit_bb;
5966 gimple_stmt_iterator psi;
5967 gimple phi;
5968 tree def;
5969 struct loop *target, *aloop, *cloop;
5970
5971 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5972 exits[0] = exit;
5973 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5974
5975 if (!can_copy_bbs_p (region, n_region))
5976 return false;
5977
5978 initialize_original_copy_tables ();
5979 set_loop_copy (orig_loop, loop);
5980
5981 target= loop;
5982 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5983 {
5984 if (bb_part_of_region_p (aloop->header, region, n_region))
5985 {
5986 cloop = duplicate_loop (aloop, target);
5987 duplicate_subloops (aloop, cloop);
5988 }
5989 }
5990
5991 if (!region_copy)
5992 {
5993 region_copy = XNEWVEC (basic_block, n_region);
5994 free_region_copy = true;
5995 }
5996
5997 gcc_assert (!need_ssa_update_p (cfun));
5998
5999 /* Record blocks outside the region that are dominated by something
6000 inside. */
6001 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6002
6003 if (exit->src->count)
6004 {
6005 total_count = exit->src->count;
6006 exit_count = exit->count;
6007 /* Fix up corner cases, to avoid division by zero or creation of negative
6008 frequencies. */
6009 if (exit_count > total_count)
6010 exit_count = total_count;
6011 }
6012 else
6013 {
6014 total_freq = exit->src->frequency;
6015 exit_freq = EDGE_FREQUENCY (exit);
6016 /* Fix up corner cases, to avoid division by zero or creation of negative
6017 frequencies. */
6018 if (total_freq == 0)
6019 total_freq = 1;
6020 if (exit_freq > total_freq)
6021 exit_freq = total_freq;
6022 }
6023
6024 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6025 split_edge_bb_loc (exit), true);
6026 if (total_count)
6027 {
6028 scale_bbs_frequencies_gcov_type (region, n_region,
6029 total_count - exit_count,
6030 total_count);
6031 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6032 total_count);
6033 }
6034 else
6035 {
6036 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6037 total_freq);
6038 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6039 }
6040
6041 /* Create the switch block, and put the exit condition to it. */
6042 entry_bb = entry->dest;
6043 nentry_bb = get_bb_copy (entry_bb);
6044 if (!last_stmt (entry->src)
6045 || !stmt_ends_bb_p (last_stmt (entry->src)))
6046 switch_bb = entry->src;
6047 else
6048 switch_bb = split_edge (entry);
6049 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6050
6051 gsi = gsi_last_bb (switch_bb);
6052 cond_stmt = last_stmt (exit->src);
6053 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6054 cond_stmt = gimple_copy (cond_stmt);
6055
6056 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6057
6058 sorig = single_succ_edge (switch_bb);
6059 sorig->flags = exits[1]->flags;
6060 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6061
6062 /* Register the new edge from SWITCH_BB in loop exit lists. */
6063 rescan_loop_exit (snew, true, false);
6064
6065 /* Add the PHI node arguments. */
6066 add_phi_args_after_copy (region_copy, n_region, snew);
6067
6068 /* Get rid of now superfluous conditions and associated edges (and phi node
6069 arguments). */
6070 exit_bb = exit->dest;
6071
6072 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6073 PENDING_STMT (e) = NULL;
6074
6075 /* The latch of ORIG_LOOP was copied, and so was the backedge
6076 to the original header. We redirect this backedge to EXIT_BB. */
6077 for (i = 0; i < n_region; i++)
6078 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6079 {
6080 gcc_assert (single_succ_edge (region_copy[i]));
6081 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6082 PENDING_STMT (e) = NULL;
6083 for (psi = gsi_start_phis (exit_bb);
6084 !gsi_end_p (psi);
6085 gsi_next (&psi))
6086 {
6087 phi = gsi_stmt (psi);
6088 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6089 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6090 }
6091 }
6092 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6093 PENDING_STMT (e) = NULL;
6094
6095 /* Anything that is outside of the region, but was dominated by something
6096 inside needs to update dominance info. */
6097 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6098 doms.release ();
6099 /* Update the SSA web. */
6100 update_ssa (TODO_update_ssa);
6101
6102 if (free_region_copy)
6103 free (region_copy);
6104
6105 free_original_copy_tables ();
6106 return true;
6107 }
6108
6109 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6110 adding blocks when the dominator traversal reaches EXIT. This
6111 function silently assumes that ENTRY strictly dominates EXIT. */
6112
6113 void
6114 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6115 vec<basic_block> *bbs_p)
6116 {
6117 basic_block son;
6118
6119 for (son = first_dom_son (CDI_DOMINATORS, entry);
6120 son;
6121 son = next_dom_son (CDI_DOMINATORS, son))
6122 {
6123 bbs_p->safe_push (son);
6124 if (son != exit)
6125 gather_blocks_in_sese_region (son, exit, bbs_p);
6126 }
6127 }
6128
6129 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6130 The duplicates are recorded in VARS_MAP. */
6131
6132 static void
6133 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6134 tree to_context)
6135 {
6136 tree t = *tp, new_t;
6137 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6138 void **loc;
6139
6140 if (DECL_CONTEXT (t) == to_context)
6141 return;
6142
6143 loc = pointer_map_contains (vars_map, t);
6144
6145 if (!loc)
6146 {
6147 loc = pointer_map_insert (vars_map, t);
6148
6149 if (SSA_VAR_P (t))
6150 {
6151 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6152 add_local_decl (f, new_t);
6153 }
6154 else
6155 {
6156 gcc_assert (TREE_CODE (t) == CONST_DECL);
6157 new_t = copy_node (t);
6158 }
6159 DECL_CONTEXT (new_t) = to_context;
6160
6161 *loc = new_t;
6162 }
6163 else
6164 new_t = (tree) *loc;
6165
6166 *tp = new_t;
6167 }
6168
6169
6170 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6171 VARS_MAP maps old ssa names and var_decls to the new ones. */
6172
6173 static tree
6174 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6175 tree to_context)
6176 {
6177 void **loc;
6178 tree new_name;
6179
6180 gcc_assert (!virtual_operand_p (name));
6181
6182 loc = pointer_map_contains (vars_map, name);
6183
6184 if (!loc)
6185 {
6186 tree decl = SSA_NAME_VAR (name);
6187 if (decl)
6188 {
6189 replace_by_duplicate_decl (&decl, vars_map, to_context);
6190 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6191 decl, SSA_NAME_DEF_STMT (name));
6192 if (SSA_NAME_IS_DEFAULT_DEF (name))
6193 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6194 decl, new_name);
6195 }
6196 else
6197 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6198 name, SSA_NAME_DEF_STMT (name));
6199
6200 loc = pointer_map_insert (vars_map, name);
6201 *loc = new_name;
6202 }
6203 else
6204 new_name = (tree) *loc;
6205
6206 return new_name;
6207 }
6208
6209 struct move_stmt_d
6210 {
6211 tree orig_block;
6212 tree new_block;
6213 tree from_context;
6214 tree to_context;
6215 struct pointer_map_t *vars_map;
6216 htab_t new_label_map;
6217 struct pointer_map_t *eh_map;
6218 bool remap_decls_p;
6219 };
6220
6221 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6222 contained in *TP if it has been ORIG_BLOCK previously and change the
6223 DECL_CONTEXT of every local variable referenced in *TP. */
6224
6225 static tree
6226 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6227 {
6228 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6229 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6230 tree t = *tp;
6231
6232 if (EXPR_P (t))
6233 {
6234 tree block = TREE_BLOCK (t);
6235 if (block == p->orig_block
6236 || (p->orig_block == NULL_TREE
6237 && block != NULL_TREE))
6238 TREE_SET_BLOCK (t, p->new_block);
6239 #ifdef ENABLE_CHECKING
6240 else if (block != NULL_TREE)
6241 {
6242 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6243 block = BLOCK_SUPERCONTEXT (block);
6244 gcc_assert (block == p->orig_block);
6245 }
6246 #endif
6247 }
6248 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6249 {
6250 if (TREE_CODE (t) == SSA_NAME)
6251 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6252 else if (TREE_CODE (t) == LABEL_DECL)
6253 {
6254 if (p->new_label_map)
6255 {
6256 struct tree_map in, *out;
6257 in.base.from = t;
6258 out = (struct tree_map *)
6259 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6260 if (out)
6261 *tp = t = out->to;
6262 }
6263
6264 DECL_CONTEXT (t) = p->to_context;
6265 }
6266 else if (p->remap_decls_p)
6267 {
6268 /* Replace T with its duplicate. T should no longer appear in the
6269 parent function, so this looks wasteful; however, it may appear
6270 in referenced_vars, and more importantly, as virtual operands of
6271 statements, and in alias lists of other variables. It would be
6272 quite difficult to expunge it from all those places. ??? It might
6273 suffice to do this for addressable variables. */
6274 if ((TREE_CODE (t) == VAR_DECL
6275 && !is_global_var (t))
6276 || TREE_CODE (t) == CONST_DECL)
6277 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6278 }
6279 *walk_subtrees = 0;
6280 }
6281 else if (TYPE_P (t))
6282 *walk_subtrees = 0;
6283
6284 return NULL_TREE;
6285 }
6286
6287 /* Helper for move_stmt_r. Given an EH region number for the source
6288 function, map that to the duplicate EH regio number in the dest. */
6289
6290 static int
6291 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6292 {
6293 eh_region old_r, new_r;
6294 void **slot;
6295
6296 old_r = get_eh_region_from_number (old_nr);
6297 slot = pointer_map_contains (p->eh_map, old_r);
6298 new_r = (eh_region) *slot;
6299
6300 return new_r->index;
6301 }
6302
6303 /* Similar, but operate on INTEGER_CSTs. */
6304
6305 static tree
6306 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6307 {
6308 int old_nr, new_nr;
6309
6310 old_nr = tree_to_shwi (old_t_nr);
6311 new_nr = move_stmt_eh_region_nr (old_nr, p);
6312
6313 return build_int_cst (integer_type_node, new_nr);
6314 }
6315
6316 /* Like move_stmt_op, but for gimple statements.
6317
6318 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6319 contained in the current statement in *GSI_P and change the
6320 DECL_CONTEXT of every local variable referenced in the current
6321 statement. */
6322
6323 static tree
6324 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6325 struct walk_stmt_info *wi)
6326 {
6327 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6328 gimple stmt = gsi_stmt (*gsi_p);
6329 tree block = gimple_block (stmt);
6330
6331 if (block == p->orig_block
6332 || (p->orig_block == NULL_TREE
6333 && block != NULL_TREE))
6334 gimple_set_block (stmt, p->new_block);
6335
6336 switch (gimple_code (stmt))
6337 {
6338 case GIMPLE_CALL:
6339 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6340 {
6341 tree r, fndecl = gimple_call_fndecl (stmt);
6342 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6343 switch (DECL_FUNCTION_CODE (fndecl))
6344 {
6345 case BUILT_IN_EH_COPY_VALUES:
6346 r = gimple_call_arg (stmt, 1);
6347 r = move_stmt_eh_region_tree_nr (r, p);
6348 gimple_call_set_arg (stmt, 1, r);
6349 /* FALLTHRU */
6350
6351 case BUILT_IN_EH_POINTER:
6352 case BUILT_IN_EH_FILTER:
6353 r = gimple_call_arg (stmt, 0);
6354 r = move_stmt_eh_region_tree_nr (r, p);
6355 gimple_call_set_arg (stmt, 0, r);
6356 break;
6357
6358 default:
6359 break;
6360 }
6361 }
6362 break;
6363
6364 case GIMPLE_RESX:
6365 {
6366 int r = gimple_resx_region (stmt);
6367 r = move_stmt_eh_region_nr (r, p);
6368 gimple_resx_set_region (stmt, r);
6369 }
6370 break;
6371
6372 case GIMPLE_EH_DISPATCH:
6373 {
6374 int r = gimple_eh_dispatch_region (stmt);
6375 r = move_stmt_eh_region_nr (r, p);
6376 gimple_eh_dispatch_set_region (stmt, r);
6377 }
6378 break;
6379
6380 case GIMPLE_OMP_RETURN:
6381 case GIMPLE_OMP_CONTINUE:
6382 break;
6383 default:
6384 if (is_gimple_omp (stmt))
6385 {
6386 /* Do not remap variables inside OMP directives. Variables
6387 referenced in clauses and directive header belong to the
6388 parent function and should not be moved into the child
6389 function. */
6390 bool save_remap_decls_p = p->remap_decls_p;
6391 p->remap_decls_p = false;
6392 *handled_ops_p = true;
6393
6394 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6395 move_stmt_op, wi);
6396
6397 p->remap_decls_p = save_remap_decls_p;
6398 }
6399 break;
6400 }
6401
6402 return NULL_TREE;
6403 }
6404
6405 /* Move basic block BB from function CFUN to function DEST_FN. The
6406 block is moved out of the original linked list and placed after
6407 block AFTER in the new list. Also, the block is removed from the
6408 original array of blocks and placed in DEST_FN's array of blocks.
6409 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6410 updated to reflect the moved edges.
6411
6412 The local variables are remapped to new instances, VARS_MAP is used
6413 to record the mapping. */
6414
6415 static void
6416 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6417 basic_block after, bool update_edge_count_p,
6418 struct move_stmt_d *d)
6419 {
6420 struct control_flow_graph *cfg;
6421 edge_iterator ei;
6422 edge e;
6423 gimple_stmt_iterator si;
6424 unsigned old_len, new_len;
6425
6426 /* Remove BB from dominance structures. */
6427 delete_from_dominance_info (CDI_DOMINATORS, bb);
6428
6429 /* Move BB from its current loop to the copy in the new function. */
6430 if (current_loops)
6431 {
6432 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6433 if (new_loop)
6434 bb->loop_father = new_loop;
6435 }
6436
6437 /* Link BB to the new linked list. */
6438 move_block_after (bb, after);
6439
6440 /* Update the edge count in the corresponding flowgraphs. */
6441 if (update_edge_count_p)
6442 FOR_EACH_EDGE (e, ei, bb->succs)
6443 {
6444 cfun->cfg->x_n_edges--;
6445 dest_cfun->cfg->x_n_edges++;
6446 }
6447
6448 /* Remove BB from the original basic block array. */
6449 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6450 cfun->cfg->x_n_basic_blocks--;
6451
6452 /* Grow DEST_CFUN's basic block array if needed. */
6453 cfg = dest_cfun->cfg;
6454 cfg->x_n_basic_blocks++;
6455 if (bb->index >= cfg->x_last_basic_block)
6456 cfg->x_last_basic_block = bb->index + 1;
6457
6458 old_len = vec_safe_length (cfg->x_basic_block_info);
6459 if ((unsigned) cfg->x_last_basic_block >= old_len)
6460 {
6461 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6462 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6463 }
6464
6465 (*cfg->x_basic_block_info)[bb->index] = bb;
6466
6467 /* Remap the variables in phi nodes. */
6468 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6469 {
6470 gimple phi = gsi_stmt (si);
6471 use_operand_p use;
6472 tree op = PHI_RESULT (phi);
6473 ssa_op_iter oi;
6474 unsigned i;
6475
6476 if (virtual_operand_p (op))
6477 {
6478 /* Remove the phi nodes for virtual operands (alias analysis will be
6479 run for the new function, anyway). */
6480 remove_phi_node (&si, true);
6481 continue;
6482 }
6483
6484 SET_PHI_RESULT (phi,
6485 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6486 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6487 {
6488 op = USE_FROM_PTR (use);
6489 if (TREE_CODE (op) == SSA_NAME)
6490 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6491 }
6492
6493 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6494 {
6495 location_t locus = gimple_phi_arg_location (phi, i);
6496 tree block = LOCATION_BLOCK (locus);
6497
6498 if (locus == UNKNOWN_LOCATION)
6499 continue;
6500 if (d->orig_block == NULL_TREE || block == d->orig_block)
6501 {
6502 if (d->new_block == NULL_TREE)
6503 locus = LOCATION_LOCUS (locus);
6504 else
6505 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6506 gimple_phi_arg_set_location (phi, i, locus);
6507 }
6508 }
6509
6510 gsi_next (&si);
6511 }
6512
6513 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6514 {
6515 gimple stmt = gsi_stmt (si);
6516 struct walk_stmt_info wi;
6517
6518 memset (&wi, 0, sizeof (wi));
6519 wi.info = d;
6520 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6521
6522 if (gimple_code (stmt) == GIMPLE_LABEL)
6523 {
6524 tree label = gimple_label_label (stmt);
6525 int uid = LABEL_DECL_UID (label);
6526
6527 gcc_assert (uid > -1);
6528
6529 old_len = vec_safe_length (cfg->x_label_to_block_map);
6530 if (old_len <= (unsigned) uid)
6531 {
6532 new_len = 3 * uid / 2 + 1;
6533 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6534 }
6535
6536 (*cfg->x_label_to_block_map)[uid] = bb;
6537 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6538
6539 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6540
6541 if (uid >= dest_cfun->cfg->last_label_uid)
6542 dest_cfun->cfg->last_label_uid = uid + 1;
6543 }
6544
6545 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6546 remove_stmt_from_eh_lp_fn (cfun, stmt);
6547
6548 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6549 gimple_remove_stmt_histograms (cfun, stmt);
6550
6551 /* We cannot leave any operands allocated from the operand caches of
6552 the current function. */
6553 free_stmt_operands (stmt);
6554 push_cfun (dest_cfun);
6555 update_stmt (stmt);
6556 pop_cfun ();
6557 }
6558
6559 FOR_EACH_EDGE (e, ei, bb->succs)
6560 if (e->goto_locus != UNKNOWN_LOCATION)
6561 {
6562 tree block = LOCATION_BLOCK (e->goto_locus);
6563 if (d->orig_block == NULL_TREE
6564 || block == d->orig_block)
6565 e->goto_locus = d->new_block ?
6566 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6567 LOCATION_LOCUS (e->goto_locus);
6568 }
6569 }
6570
6571 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6572 the outermost EH region. Use REGION as the incoming base EH region. */
6573
6574 static eh_region
6575 find_outermost_region_in_block (struct function *src_cfun,
6576 basic_block bb, eh_region region)
6577 {
6578 gimple_stmt_iterator si;
6579
6580 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6581 {
6582 gimple stmt = gsi_stmt (si);
6583 eh_region stmt_region;
6584 int lp_nr;
6585
6586 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6587 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6588 if (stmt_region)
6589 {
6590 if (region == NULL)
6591 region = stmt_region;
6592 else if (stmt_region != region)
6593 {
6594 region = eh_region_outermost (src_cfun, stmt_region, region);
6595 gcc_assert (region != NULL);
6596 }
6597 }
6598 }
6599
6600 return region;
6601 }
6602
6603 static tree
6604 new_label_mapper (tree decl, void *data)
6605 {
6606 htab_t hash = (htab_t) data;
6607 struct tree_map *m;
6608 void **slot;
6609
6610 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6611
6612 m = XNEW (struct tree_map);
6613 m->hash = DECL_UID (decl);
6614 m->base.from = decl;
6615 m->to = create_artificial_label (UNKNOWN_LOCATION);
6616 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6617 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6618 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6619
6620 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6621 gcc_assert (*slot == NULL);
6622
6623 *slot = m;
6624
6625 return m->to;
6626 }
6627
6628 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6629 subblocks. */
6630
6631 static void
6632 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6633 tree to_context)
6634 {
6635 tree *tp, t;
6636
6637 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6638 {
6639 t = *tp;
6640 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6641 continue;
6642 replace_by_duplicate_decl (&t, vars_map, to_context);
6643 if (t != *tp)
6644 {
6645 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6646 {
6647 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6648 DECL_HAS_VALUE_EXPR_P (t) = 1;
6649 }
6650 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6651 *tp = t;
6652 }
6653 }
6654
6655 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6656 replace_block_vars_by_duplicates (block, vars_map, to_context);
6657 }
6658
6659 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6660 from FN1 to FN2. */
6661
6662 static void
6663 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6664 struct loop *loop)
6665 {
6666 /* Discard it from the old loop array. */
6667 (*get_loops (fn1))[loop->num] = NULL;
6668
6669 /* Place it in the new loop array, assigning it a new number. */
6670 loop->num = number_of_loops (fn2);
6671 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6672
6673 /* Recurse to children. */
6674 for (loop = loop->inner; loop; loop = loop->next)
6675 fixup_loop_arrays_after_move (fn1, fn2, loop);
6676 }
6677
6678 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6679 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6680 single basic block in the original CFG and the new basic block is
6681 returned. DEST_CFUN must not have a CFG yet.
6682
6683 Note that the region need not be a pure SESE region. Blocks inside
6684 the region may contain calls to abort/exit. The only restriction
6685 is that ENTRY_BB should be the only entry point and it must
6686 dominate EXIT_BB.
6687
6688 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6689 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6690 to the new function.
6691
6692 All local variables referenced in the region are assumed to be in
6693 the corresponding BLOCK_VARS and unexpanded variable lists
6694 associated with DEST_CFUN. */
6695
6696 basic_block
6697 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6698 basic_block exit_bb, tree orig_block)
6699 {
6700 vec<basic_block> bbs, dom_bbs;
6701 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6702 basic_block after, bb, *entry_pred, *exit_succ, abb;
6703 struct function *saved_cfun = cfun;
6704 int *entry_flag, *exit_flag;
6705 unsigned *entry_prob, *exit_prob;
6706 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6707 edge e;
6708 edge_iterator ei;
6709 htab_t new_label_map;
6710 struct pointer_map_t *vars_map, *eh_map;
6711 struct loop *loop = entry_bb->loop_father;
6712 struct loop *loop0 = get_loop (saved_cfun, 0);
6713 struct move_stmt_d d;
6714
6715 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6716 region. */
6717 gcc_assert (entry_bb != exit_bb
6718 && (!exit_bb
6719 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6720
6721 /* Collect all the blocks in the region. Manually add ENTRY_BB
6722 because it won't be added by dfs_enumerate_from. */
6723 bbs.create (0);
6724 bbs.safe_push (entry_bb);
6725 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6726
6727 /* The blocks that used to be dominated by something in BBS will now be
6728 dominated by the new block. */
6729 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6730 bbs.address (),
6731 bbs.length ());
6732
6733 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6734 the predecessor edges to ENTRY_BB and the successor edges to
6735 EXIT_BB so that we can re-attach them to the new basic block that
6736 will replace the region. */
6737 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6738 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6739 entry_flag = XNEWVEC (int, num_entry_edges);
6740 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6741 i = 0;
6742 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6743 {
6744 entry_prob[i] = e->probability;
6745 entry_flag[i] = e->flags;
6746 entry_pred[i++] = e->src;
6747 remove_edge (e);
6748 }
6749
6750 if (exit_bb)
6751 {
6752 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6753 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6754 exit_flag = XNEWVEC (int, num_exit_edges);
6755 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6756 i = 0;
6757 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6758 {
6759 exit_prob[i] = e->probability;
6760 exit_flag[i] = e->flags;
6761 exit_succ[i++] = e->dest;
6762 remove_edge (e);
6763 }
6764 }
6765 else
6766 {
6767 num_exit_edges = 0;
6768 exit_succ = NULL;
6769 exit_flag = NULL;
6770 exit_prob = NULL;
6771 }
6772
6773 /* Switch context to the child function to initialize DEST_FN's CFG. */
6774 gcc_assert (dest_cfun->cfg == NULL);
6775 push_cfun (dest_cfun);
6776
6777 init_empty_tree_cfg ();
6778
6779 /* Initialize EH information for the new function. */
6780 eh_map = NULL;
6781 new_label_map = NULL;
6782 if (saved_cfun->eh)
6783 {
6784 eh_region region = NULL;
6785
6786 FOR_EACH_VEC_ELT (bbs, i, bb)
6787 region = find_outermost_region_in_block (saved_cfun, bb, region);
6788
6789 init_eh_for_function ();
6790 if (region != NULL)
6791 {
6792 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6793 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6794 new_label_mapper, new_label_map);
6795 }
6796 }
6797
6798 /* Initialize an empty loop tree. */
6799 struct loops *loops = ggc_alloc_cleared_loops ();
6800 init_loops_structure (dest_cfun, loops, 1);
6801 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6802 set_loops_for_fn (dest_cfun, loops);
6803
6804 /* Move the outlined loop tree part. */
6805 num_nodes = bbs.length ();
6806 FOR_EACH_VEC_ELT (bbs, i, bb)
6807 {
6808 if (bb->loop_father->header == bb)
6809 {
6810 struct loop *this_loop = bb->loop_father;
6811 struct loop *outer = loop_outer (this_loop);
6812 if (outer == loop
6813 /* If the SESE region contains some bbs ending with
6814 a noreturn call, those are considered to belong
6815 to the outermost loop in saved_cfun, rather than
6816 the entry_bb's loop_father. */
6817 || outer == loop0)
6818 {
6819 if (outer != loop)
6820 num_nodes -= this_loop->num_nodes;
6821 flow_loop_tree_node_remove (bb->loop_father);
6822 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6823 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6824 }
6825 }
6826 else if (bb->loop_father == loop0 && loop0 != loop)
6827 num_nodes--;
6828
6829 /* Remove loop exits from the outlined region. */
6830 if (loops_for_fn (saved_cfun)->exits)
6831 FOR_EACH_EDGE (e, ei, bb->succs)
6832 {
6833 void **slot = htab_find_slot_with_hash
6834 (loops_for_fn (saved_cfun)->exits, e,
6835 htab_hash_pointer (e), NO_INSERT);
6836 if (slot)
6837 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6838 }
6839 }
6840
6841
6842 /* Adjust the number of blocks in the tree root of the outlined part. */
6843 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6844
6845 /* Setup a mapping to be used by move_block_to_fn. */
6846 loop->aux = current_loops->tree_root;
6847 loop0->aux = current_loops->tree_root;
6848
6849 pop_cfun ();
6850
6851 /* Move blocks from BBS into DEST_CFUN. */
6852 gcc_assert (bbs.length () >= 2);
6853 after = dest_cfun->cfg->x_entry_block_ptr;
6854 vars_map = pointer_map_create ();
6855
6856 memset (&d, 0, sizeof (d));
6857 d.orig_block = orig_block;
6858 d.new_block = DECL_INITIAL (dest_cfun->decl);
6859 d.from_context = cfun->decl;
6860 d.to_context = dest_cfun->decl;
6861 d.vars_map = vars_map;
6862 d.new_label_map = new_label_map;
6863 d.eh_map = eh_map;
6864 d.remap_decls_p = true;
6865
6866 FOR_EACH_VEC_ELT (bbs, i, bb)
6867 {
6868 /* No need to update edge counts on the last block. It has
6869 already been updated earlier when we detached the region from
6870 the original CFG. */
6871 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6872 after = bb;
6873 }
6874
6875 loop->aux = NULL;
6876 loop0->aux = NULL;
6877 /* Loop sizes are no longer correct, fix them up. */
6878 loop->num_nodes -= num_nodes;
6879 for (struct loop *outer = loop_outer (loop);
6880 outer; outer = loop_outer (outer))
6881 outer->num_nodes -= num_nodes;
6882 loop0->num_nodes -= bbs.length () - num_nodes;
6883
6884 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6885 {
6886 struct loop *aloop;
6887 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6888 if (aloop != NULL)
6889 {
6890 if (aloop->simduid)
6891 {
6892 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6893 d.to_context);
6894 dest_cfun->has_simduid_loops = true;
6895 }
6896 if (aloop->force_vect)
6897 dest_cfun->has_force_vect_loops = true;
6898 }
6899 }
6900
6901 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6902 if (orig_block)
6903 {
6904 tree block;
6905 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6906 == NULL_TREE);
6907 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6908 = BLOCK_SUBBLOCKS (orig_block);
6909 for (block = BLOCK_SUBBLOCKS (orig_block);
6910 block; block = BLOCK_CHAIN (block))
6911 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6912 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6913 }
6914
6915 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6916 vars_map, dest_cfun->decl);
6917
6918 if (new_label_map)
6919 htab_delete (new_label_map);
6920 if (eh_map)
6921 pointer_map_destroy (eh_map);
6922 pointer_map_destroy (vars_map);
6923
6924 /* Rewire the entry and exit blocks. The successor to the entry
6925 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6926 the child function. Similarly, the predecessor of DEST_FN's
6927 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6928 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6929 various CFG manipulation function get to the right CFG.
6930
6931 FIXME, this is silly. The CFG ought to become a parameter to
6932 these helpers. */
6933 push_cfun (dest_cfun);
6934 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
6935 if (exit_bb)
6936 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
6937 pop_cfun ();
6938
6939 /* Back in the original function, the SESE region has disappeared,
6940 create a new basic block in its place. */
6941 bb = create_empty_bb (entry_pred[0]);
6942 if (current_loops)
6943 add_bb_to_loop (bb, loop);
6944 for (i = 0; i < num_entry_edges; i++)
6945 {
6946 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6947 e->probability = entry_prob[i];
6948 }
6949
6950 for (i = 0; i < num_exit_edges; i++)
6951 {
6952 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6953 e->probability = exit_prob[i];
6954 }
6955
6956 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6957 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6958 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6959 dom_bbs.release ();
6960
6961 if (exit_bb)
6962 {
6963 free (exit_prob);
6964 free (exit_flag);
6965 free (exit_succ);
6966 }
6967 free (entry_prob);
6968 free (entry_flag);
6969 free (entry_pred);
6970 bbs.release ();
6971
6972 return bb;
6973 }
6974
6975
6976 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6977 */
6978
6979 void
6980 dump_function_to_file (tree fndecl, FILE *file, int flags)
6981 {
6982 tree arg, var, old_current_fndecl = current_function_decl;
6983 struct function *dsf;
6984 bool ignore_topmost_bind = false, any_var = false;
6985 basic_block bb;
6986 tree chain;
6987 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6988 && decl_is_tm_clone (fndecl));
6989 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6990
6991 current_function_decl = fndecl;
6992 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6993
6994 arg = DECL_ARGUMENTS (fndecl);
6995 while (arg)
6996 {
6997 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6998 fprintf (file, " ");
6999 print_generic_expr (file, arg, dump_flags);
7000 if (flags & TDF_VERBOSE)
7001 print_node (file, "", arg, 4);
7002 if (DECL_CHAIN (arg))
7003 fprintf (file, ", ");
7004 arg = DECL_CHAIN (arg);
7005 }
7006 fprintf (file, ")\n");
7007
7008 if (flags & TDF_VERBOSE)
7009 print_node (file, "", fndecl, 2);
7010
7011 dsf = DECL_STRUCT_FUNCTION (fndecl);
7012 if (dsf && (flags & TDF_EH))
7013 dump_eh_tree (file, dsf);
7014
7015 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7016 {
7017 dump_node (fndecl, TDF_SLIM | flags, file);
7018 current_function_decl = old_current_fndecl;
7019 return;
7020 }
7021
7022 /* When GIMPLE is lowered, the variables are no longer available in
7023 BIND_EXPRs, so display them separately. */
7024 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7025 {
7026 unsigned ix;
7027 ignore_topmost_bind = true;
7028
7029 fprintf (file, "{\n");
7030 if (!vec_safe_is_empty (fun->local_decls))
7031 FOR_EACH_LOCAL_DECL (fun, ix, var)
7032 {
7033 print_generic_decl (file, var, flags);
7034 if (flags & TDF_VERBOSE)
7035 print_node (file, "", var, 4);
7036 fprintf (file, "\n");
7037
7038 any_var = true;
7039 }
7040 if (gimple_in_ssa_p (cfun))
7041 for (ix = 1; ix < num_ssa_names; ++ix)
7042 {
7043 tree name = ssa_name (ix);
7044 if (name && !SSA_NAME_VAR (name))
7045 {
7046 fprintf (file, " ");
7047 print_generic_expr (file, TREE_TYPE (name), flags);
7048 fprintf (file, " ");
7049 print_generic_expr (file, name, flags);
7050 fprintf (file, ";\n");
7051
7052 any_var = true;
7053 }
7054 }
7055 }
7056
7057 if (fun && fun->decl == fndecl
7058 && fun->cfg
7059 && basic_block_info_for_function (fun))
7060 {
7061 /* If the CFG has been built, emit a CFG-based dump. */
7062 if (!ignore_topmost_bind)
7063 fprintf (file, "{\n");
7064
7065 if (any_var && n_basic_blocks_for_fn (fun))
7066 fprintf (file, "\n");
7067
7068 FOR_EACH_BB_FN (bb, fun)
7069 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7070
7071 fprintf (file, "}\n");
7072 }
7073 else if (DECL_SAVED_TREE (fndecl) == NULL)
7074 {
7075 /* The function is now in GIMPLE form but the CFG has not been
7076 built yet. Emit the single sequence of GIMPLE statements
7077 that make up its body. */
7078 gimple_seq body = gimple_body (fndecl);
7079
7080 if (gimple_seq_first_stmt (body)
7081 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7082 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7083 print_gimple_seq (file, body, 0, flags);
7084 else
7085 {
7086 if (!ignore_topmost_bind)
7087 fprintf (file, "{\n");
7088
7089 if (any_var)
7090 fprintf (file, "\n");
7091
7092 print_gimple_seq (file, body, 2, flags);
7093 fprintf (file, "}\n");
7094 }
7095 }
7096 else
7097 {
7098 int indent;
7099
7100 /* Make a tree based dump. */
7101 chain = DECL_SAVED_TREE (fndecl);
7102 if (chain && TREE_CODE (chain) == BIND_EXPR)
7103 {
7104 if (ignore_topmost_bind)
7105 {
7106 chain = BIND_EXPR_BODY (chain);
7107 indent = 2;
7108 }
7109 else
7110 indent = 0;
7111 }
7112 else
7113 {
7114 if (!ignore_topmost_bind)
7115 fprintf (file, "{\n");
7116 indent = 2;
7117 }
7118
7119 if (any_var)
7120 fprintf (file, "\n");
7121
7122 print_generic_stmt_indented (file, chain, flags, indent);
7123 if (ignore_topmost_bind)
7124 fprintf (file, "}\n");
7125 }
7126
7127 if (flags & TDF_ENUMERATE_LOCALS)
7128 dump_enumerated_decls (file, flags);
7129 fprintf (file, "\n\n");
7130
7131 current_function_decl = old_current_fndecl;
7132 }
7133
7134 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7135
7136 DEBUG_FUNCTION void
7137 debug_function (tree fn, int flags)
7138 {
7139 dump_function_to_file (fn, stderr, flags);
7140 }
7141
7142
7143 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7144
7145 static void
7146 print_pred_bbs (FILE *file, basic_block bb)
7147 {
7148 edge e;
7149 edge_iterator ei;
7150
7151 FOR_EACH_EDGE (e, ei, bb->preds)
7152 fprintf (file, "bb_%d ", e->src->index);
7153 }
7154
7155
7156 /* Print on FILE the indexes for the successors of basic_block BB. */
7157
7158 static void
7159 print_succ_bbs (FILE *file, basic_block bb)
7160 {
7161 edge e;
7162 edge_iterator ei;
7163
7164 FOR_EACH_EDGE (e, ei, bb->succs)
7165 fprintf (file, "bb_%d ", e->dest->index);
7166 }
7167
7168 /* Print to FILE the basic block BB following the VERBOSITY level. */
7169
7170 void
7171 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7172 {
7173 char *s_indent = (char *) alloca ((size_t) indent + 1);
7174 memset ((void *) s_indent, ' ', (size_t) indent);
7175 s_indent[indent] = '\0';
7176
7177 /* Print basic_block's header. */
7178 if (verbosity >= 2)
7179 {
7180 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7181 print_pred_bbs (file, bb);
7182 fprintf (file, "}, succs = {");
7183 print_succ_bbs (file, bb);
7184 fprintf (file, "})\n");
7185 }
7186
7187 /* Print basic_block's body. */
7188 if (verbosity >= 3)
7189 {
7190 fprintf (file, "%s {\n", s_indent);
7191 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7192 fprintf (file, "%s }\n", s_indent);
7193 }
7194 }
7195
7196 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7197
7198 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7199 VERBOSITY level this outputs the contents of the loop, or just its
7200 structure. */
7201
7202 static void
7203 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7204 {
7205 char *s_indent;
7206 basic_block bb;
7207
7208 if (loop == NULL)
7209 return;
7210
7211 s_indent = (char *) alloca ((size_t) indent + 1);
7212 memset ((void *) s_indent, ' ', (size_t) indent);
7213 s_indent[indent] = '\0';
7214
7215 /* Print loop's header. */
7216 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7217 if (loop->header)
7218 fprintf (file, "header = %d", loop->header->index);
7219 else
7220 {
7221 fprintf (file, "deleted)\n");
7222 return;
7223 }
7224 if (loop->latch)
7225 fprintf (file, ", latch = %d", loop->latch->index);
7226 else
7227 fprintf (file, ", multiple latches");
7228 fprintf (file, ", niter = ");
7229 print_generic_expr (file, loop->nb_iterations, 0);
7230
7231 if (loop->any_upper_bound)
7232 {
7233 fprintf (file, ", upper_bound = ");
7234 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7235 }
7236
7237 if (loop->any_estimate)
7238 {
7239 fprintf (file, ", estimate = ");
7240 dump_double_int (file, loop->nb_iterations_estimate, true);
7241 }
7242 fprintf (file, ")\n");
7243
7244 /* Print loop's body. */
7245 if (verbosity >= 1)
7246 {
7247 fprintf (file, "%s{\n", s_indent);
7248 FOR_EACH_BB (bb)
7249 if (bb->loop_father == loop)
7250 print_loops_bb (file, bb, indent, verbosity);
7251
7252 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7253 fprintf (file, "%s}\n", s_indent);
7254 }
7255 }
7256
7257 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7258 spaces. Following VERBOSITY level this outputs the contents of the
7259 loop, or just its structure. */
7260
7261 static void
7262 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7263 int verbosity)
7264 {
7265 if (loop == NULL)
7266 return;
7267
7268 print_loop (file, loop, indent, verbosity);
7269 print_loop_and_siblings (file, loop->next, indent, verbosity);
7270 }
7271
7272 /* Follow a CFG edge from the entry point of the program, and on entry
7273 of a loop, pretty print the loop structure on FILE. */
7274
7275 void
7276 print_loops (FILE *file, int verbosity)
7277 {
7278 basic_block bb;
7279
7280 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7281 if (bb && bb->loop_father)
7282 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7283 }
7284
7285 /* Dump a loop. */
7286
7287 DEBUG_FUNCTION void
7288 debug (struct loop &ref)
7289 {
7290 print_loop (stderr, &ref, 0, /*verbosity*/0);
7291 }
7292
7293 DEBUG_FUNCTION void
7294 debug (struct loop *ptr)
7295 {
7296 if (ptr)
7297 debug (*ptr);
7298 else
7299 fprintf (stderr, "<nil>\n");
7300 }
7301
7302 /* Dump a loop verbosely. */
7303
7304 DEBUG_FUNCTION void
7305 debug_verbose (struct loop &ref)
7306 {
7307 print_loop (stderr, &ref, 0, /*verbosity*/3);
7308 }
7309
7310 DEBUG_FUNCTION void
7311 debug_verbose (struct loop *ptr)
7312 {
7313 if (ptr)
7314 debug (*ptr);
7315 else
7316 fprintf (stderr, "<nil>\n");
7317 }
7318
7319
7320 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7321
7322 DEBUG_FUNCTION void
7323 debug_loops (int verbosity)
7324 {
7325 print_loops (stderr, verbosity);
7326 }
7327
7328 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7329
7330 DEBUG_FUNCTION void
7331 debug_loop (struct loop *loop, int verbosity)
7332 {
7333 print_loop (stderr, loop, 0, verbosity);
7334 }
7335
7336 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7337 level. */
7338
7339 DEBUG_FUNCTION void
7340 debug_loop_num (unsigned num, int verbosity)
7341 {
7342 debug_loop (get_loop (cfun, num), verbosity);
7343 }
7344
7345 /* Return true if BB ends with a call, possibly followed by some
7346 instructions that must stay with the call. Return false,
7347 otherwise. */
7348
7349 static bool
7350 gimple_block_ends_with_call_p (basic_block bb)
7351 {
7352 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7353 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7354 }
7355
7356
7357 /* Return true if BB ends with a conditional branch. Return false,
7358 otherwise. */
7359
7360 static bool
7361 gimple_block_ends_with_condjump_p (const_basic_block bb)
7362 {
7363 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7364 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7365 }
7366
7367
7368 /* Return true if we need to add fake edge to exit at statement T.
7369 Helper function for gimple_flow_call_edges_add. */
7370
7371 static bool
7372 need_fake_edge_p (gimple t)
7373 {
7374 tree fndecl = NULL_TREE;
7375 int call_flags = 0;
7376
7377 /* NORETURN and LONGJMP calls already have an edge to exit.
7378 CONST and PURE calls do not need one.
7379 We don't currently check for CONST and PURE here, although
7380 it would be a good idea, because those attributes are
7381 figured out from the RTL in mark_constant_function, and
7382 the counter incrementation code from -fprofile-arcs
7383 leads to different results from -fbranch-probabilities. */
7384 if (is_gimple_call (t))
7385 {
7386 fndecl = gimple_call_fndecl (t);
7387 call_flags = gimple_call_flags (t);
7388 }
7389
7390 if (is_gimple_call (t)
7391 && fndecl
7392 && DECL_BUILT_IN (fndecl)
7393 && (call_flags & ECF_NOTHROW)
7394 && !(call_flags & ECF_RETURNS_TWICE)
7395 /* fork() doesn't really return twice, but the effect of
7396 wrapping it in __gcov_fork() which calls __gcov_flush()
7397 and clears the counters before forking has the same
7398 effect as returning twice. Force a fake edge. */
7399 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7400 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7401 return false;
7402
7403 if (is_gimple_call (t))
7404 {
7405 edge_iterator ei;
7406 edge e;
7407 basic_block bb;
7408
7409 if (!(call_flags & ECF_NORETURN))
7410 return true;
7411
7412 bb = gimple_bb (t);
7413 FOR_EACH_EDGE (e, ei, bb->succs)
7414 if ((e->flags & EDGE_FAKE) == 0)
7415 return true;
7416 }
7417
7418 if (gimple_code (t) == GIMPLE_ASM
7419 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7420 return true;
7421
7422 return false;
7423 }
7424
7425
7426 /* Add fake edges to the function exit for any non constant and non
7427 noreturn calls (or noreturn calls with EH/abnormal edges),
7428 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7429 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7430 that were split.
7431
7432 The goal is to expose cases in which entering a basic block does
7433 not imply that all subsequent instructions must be executed. */
7434
7435 static int
7436 gimple_flow_call_edges_add (sbitmap blocks)
7437 {
7438 int i;
7439 int blocks_split = 0;
7440 int last_bb = last_basic_block;
7441 bool check_last_block = false;
7442
7443 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7444 return 0;
7445
7446 if (! blocks)
7447 check_last_block = true;
7448 else
7449 check_last_block = bitmap_bit_p (blocks,
7450 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7451
7452 /* In the last basic block, before epilogue generation, there will be
7453 a fallthru edge to EXIT. Special care is required if the last insn
7454 of the last basic block is a call because make_edge folds duplicate
7455 edges, which would result in the fallthru edge also being marked
7456 fake, which would result in the fallthru edge being removed by
7457 remove_fake_edges, which would result in an invalid CFG.
7458
7459 Moreover, we can't elide the outgoing fake edge, since the block
7460 profiler needs to take this into account in order to solve the minimal
7461 spanning tree in the case that the call doesn't return.
7462
7463 Handle this by adding a dummy instruction in a new last basic block. */
7464 if (check_last_block)
7465 {
7466 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7467 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7468 gimple t = NULL;
7469
7470 if (!gsi_end_p (gsi))
7471 t = gsi_stmt (gsi);
7472
7473 if (t && need_fake_edge_p (t))
7474 {
7475 edge e;
7476
7477 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7478 if (e)
7479 {
7480 gsi_insert_on_edge (e, gimple_build_nop ());
7481 gsi_commit_edge_inserts ();
7482 }
7483 }
7484 }
7485
7486 /* Now add fake edges to the function exit for any non constant
7487 calls since there is no way that we can determine if they will
7488 return or not... */
7489 for (i = 0; i < last_bb; i++)
7490 {
7491 basic_block bb = BASIC_BLOCK (i);
7492 gimple_stmt_iterator gsi;
7493 gimple stmt, last_stmt;
7494
7495 if (!bb)
7496 continue;
7497
7498 if (blocks && !bitmap_bit_p (blocks, i))
7499 continue;
7500
7501 gsi = gsi_last_nondebug_bb (bb);
7502 if (!gsi_end_p (gsi))
7503 {
7504 last_stmt = gsi_stmt (gsi);
7505 do
7506 {
7507 stmt = gsi_stmt (gsi);
7508 if (need_fake_edge_p (stmt))
7509 {
7510 edge e;
7511
7512 /* The handling above of the final block before the
7513 epilogue should be enough to verify that there is
7514 no edge to the exit block in CFG already.
7515 Calling make_edge in such case would cause us to
7516 mark that edge as fake and remove it later. */
7517 #ifdef ENABLE_CHECKING
7518 if (stmt == last_stmt)
7519 {
7520 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7521 gcc_assert (e == NULL);
7522 }
7523 #endif
7524
7525 /* Note that the following may create a new basic block
7526 and renumber the existing basic blocks. */
7527 if (stmt != last_stmt)
7528 {
7529 e = split_block (bb, stmt);
7530 if (e)
7531 blocks_split++;
7532 }
7533 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7534 }
7535 gsi_prev (&gsi);
7536 }
7537 while (!gsi_end_p (gsi));
7538 }
7539 }
7540
7541 if (blocks_split)
7542 verify_flow_info ();
7543
7544 return blocks_split;
7545 }
7546
7547 /* Removes edge E and all the blocks dominated by it, and updates dominance
7548 information. The IL in E->src needs to be updated separately.
7549 If dominance info is not available, only the edge E is removed.*/
7550
7551 void
7552 remove_edge_and_dominated_blocks (edge e)
7553 {
7554 vec<basic_block> bbs_to_remove = vNULL;
7555 vec<basic_block> bbs_to_fix_dom = vNULL;
7556 bitmap df, df_idom;
7557 edge f;
7558 edge_iterator ei;
7559 bool none_removed = false;
7560 unsigned i;
7561 basic_block bb, dbb;
7562 bitmap_iterator bi;
7563
7564 if (!dom_info_available_p (CDI_DOMINATORS))
7565 {
7566 remove_edge (e);
7567 return;
7568 }
7569
7570 /* No updating is needed for edges to exit. */
7571 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7572 {
7573 if (cfgcleanup_altered_bbs)
7574 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7575 remove_edge (e);
7576 return;
7577 }
7578
7579 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7580 that is not dominated by E->dest, then this set is empty. Otherwise,
7581 all the basic blocks dominated by E->dest are removed.
7582
7583 Also, to DF_IDOM we store the immediate dominators of the blocks in
7584 the dominance frontier of E (i.e., of the successors of the
7585 removed blocks, if there are any, and of E->dest otherwise). */
7586 FOR_EACH_EDGE (f, ei, e->dest->preds)
7587 {
7588 if (f == e)
7589 continue;
7590
7591 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7592 {
7593 none_removed = true;
7594 break;
7595 }
7596 }
7597
7598 df = BITMAP_ALLOC (NULL);
7599 df_idom = BITMAP_ALLOC (NULL);
7600
7601 if (none_removed)
7602 bitmap_set_bit (df_idom,
7603 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7604 else
7605 {
7606 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7607 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7608 {
7609 FOR_EACH_EDGE (f, ei, bb->succs)
7610 {
7611 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7612 bitmap_set_bit (df, f->dest->index);
7613 }
7614 }
7615 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7616 bitmap_clear_bit (df, bb->index);
7617
7618 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7619 {
7620 bb = BASIC_BLOCK (i);
7621 bitmap_set_bit (df_idom,
7622 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7623 }
7624 }
7625
7626 if (cfgcleanup_altered_bbs)
7627 {
7628 /* Record the set of the altered basic blocks. */
7629 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7630 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7631 }
7632
7633 /* Remove E and the cancelled blocks. */
7634 if (none_removed)
7635 remove_edge (e);
7636 else
7637 {
7638 /* Walk backwards so as to get a chance to substitute all
7639 released DEFs into debug stmts. See
7640 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7641 details. */
7642 for (i = bbs_to_remove.length (); i-- > 0; )
7643 delete_basic_block (bbs_to_remove[i]);
7644 }
7645
7646 /* Update the dominance information. The immediate dominator may change only
7647 for blocks whose immediate dominator belongs to DF_IDOM:
7648
7649 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7650 removal. Let Z the arbitrary block such that idom(Z) = Y and
7651 Z dominates X after the removal. Before removal, there exists a path P
7652 from Y to X that avoids Z. Let F be the last edge on P that is
7653 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7654 dominates W, and because of P, Z does not dominate W), and W belongs to
7655 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7656 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7657 {
7658 bb = BASIC_BLOCK (i);
7659 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7660 dbb;
7661 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7662 bbs_to_fix_dom.safe_push (dbb);
7663 }
7664
7665 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7666
7667 BITMAP_FREE (df);
7668 BITMAP_FREE (df_idom);
7669 bbs_to_remove.release ();
7670 bbs_to_fix_dom.release ();
7671 }
7672
7673 /* Purge dead EH edges from basic block BB. */
7674
7675 bool
7676 gimple_purge_dead_eh_edges (basic_block bb)
7677 {
7678 bool changed = false;
7679 edge e;
7680 edge_iterator ei;
7681 gimple stmt = last_stmt (bb);
7682
7683 if (stmt && stmt_can_throw_internal (stmt))
7684 return false;
7685
7686 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7687 {
7688 if (e->flags & EDGE_EH)
7689 {
7690 remove_edge_and_dominated_blocks (e);
7691 changed = true;
7692 }
7693 else
7694 ei_next (&ei);
7695 }
7696
7697 return changed;
7698 }
7699
7700 /* Purge dead EH edges from basic block listed in BLOCKS. */
7701
7702 bool
7703 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7704 {
7705 bool changed = false;
7706 unsigned i;
7707 bitmap_iterator bi;
7708
7709 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7710 {
7711 basic_block bb = BASIC_BLOCK (i);
7712
7713 /* Earlier gimple_purge_dead_eh_edges could have removed
7714 this basic block already. */
7715 gcc_assert (bb || changed);
7716 if (bb != NULL)
7717 changed |= gimple_purge_dead_eh_edges (bb);
7718 }
7719
7720 return changed;
7721 }
7722
7723 /* Purge dead abnormal call edges from basic block BB. */
7724
7725 bool
7726 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7727 {
7728 bool changed = false;
7729 edge e;
7730 edge_iterator ei;
7731 gimple stmt = last_stmt (bb);
7732
7733 if (!cfun->has_nonlocal_label
7734 && !cfun->calls_setjmp)
7735 return false;
7736
7737 if (stmt && stmt_can_make_abnormal_goto (stmt))
7738 return false;
7739
7740 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7741 {
7742 if (e->flags & EDGE_ABNORMAL)
7743 {
7744 if (e->flags & EDGE_FALLTHRU)
7745 e->flags &= ~EDGE_ABNORMAL;
7746 else
7747 remove_edge_and_dominated_blocks (e);
7748 changed = true;
7749 }
7750 else
7751 ei_next (&ei);
7752 }
7753
7754 return changed;
7755 }
7756
7757 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7758
7759 bool
7760 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7761 {
7762 bool changed = false;
7763 unsigned i;
7764 bitmap_iterator bi;
7765
7766 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7767 {
7768 basic_block bb = BASIC_BLOCK (i);
7769
7770 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7771 this basic block already. */
7772 gcc_assert (bb || changed);
7773 if (bb != NULL)
7774 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7775 }
7776
7777 return changed;
7778 }
7779
7780 /* This function is called whenever a new edge is created or
7781 redirected. */
7782
7783 static void
7784 gimple_execute_on_growing_pred (edge e)
7785 {
7786 basic_block bb = e->dest;
7787
7788 if (!gimple_seq_empty_p (phi_nodes (bb)))
7789 reserve_phi_args_for_new_edge (bb);
7790 }
7791
7792 /* This function is called immediately before edge E is removed from
7793 the edge vector E->dest->preds. */
7794
7795 static void
7796 gimple_execute_on_shrinking_pred (edge e)
7797 {
7798 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7799 remove_phi_args (e);
7800 }
7801
7802 /*---------------------------------------------------------------------------
7803 Helper functions for Loop versioning
7804 ---------------------------------------------------------------------------*/
7805
7806 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7807 of 'first'. Both of them are dominated by 'new_head' basic block. When
7808 'new_head' was created by 'second's incoming edge it received phi arguments
7809 on the edge by split_edge(). Later, additional edge 'e' was created to
7810 connect 'new_head' and 'first'. Now this routine adds phi args on this
7811 additional edge 'e' that new_head to second edge received as part of edge
7812 splitting. */
7813
7814 static void
7815 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7816 basic_block new_head, edge e)
7817 {
7818 gimple phi1, phi2;
7819 gimple_stmt_iterator psi1, psi2;
7820 tree def;
7821 edge e2 = find_edge (new_head, second);
7822
7823 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7824 edge, we should always have an edge from NEW_HEAD to SECOND. */
7825 gcc_assert (e2 != NULL);
7826
7827 /* Browse all 'second' basic block phi nodes and add phi args to
7828 edge 'e' for 'first' head. PHI args are always in correct order. */
7829
7830 for (psi2 = gsi_start_phis (second),
7831 psi1 = gsi_start_phis (first);
7832 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7833 gsi_next (&psi2), gsi_next (&psi1))
7834 {
7835 phi1 = gsi_stmt (psi1);
7836 phi2 = gsi_stmt (psi2);
7837 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7838 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7839 }
7840 }
7841
7842
7843 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7844 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7845 the destination of the ELSE part. */
7846
7847 static void
7848 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7849 basic_block second_head ATTRIBUTE_UNUSED,
7850 basic_block cond_bb, void *cond_e)
7851 {
7852 gimple_stmt_iterator gsi;
7853 gimple new_cond_expr;
7854 tree cond_expr = (tree) cond_e;
7855 edge e0;
7856
7857 /* Build new conditional expr */
7858 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7859 NULL_TREE, NULL_TREE);
7860
7861 /* Add new cond in cond_bb. */
7862 gsi = gsi_last_bb (cond_bb);
7863 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7864
7865 /* Adjust edges appropriately to connect new head with first head
7866 as well as second head. */
7867 e0 = single_succ_edge (cond_bb);
7868 e0->flags &= ~EDGE_FALLTHRU;
7869 e0->flags |= EDGE_FALSE_VALUE;
7870 }
7871
7872
7873 /* Do book-keeping of basic block BB for the profile consistency checker.
7874 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7875 then do post-pass accounting. Store the counting in RECORD. */
7876 static void
7877 gimple_account_profile_record (basic_block bb, int after_pass,
7878 struct profile_record *record)
7879 {
7880 gimple_stmt_iterator i;
7881 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7882 {
7883 record->size[after_pass]
7884 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7885 if (profile_status == PROFILE_READ)
7886 record->time[after_pass]
7887 += estimate_num_insns (gsi_stmt (i),
7888 &eni_time_weights) * bb->count;
7889 else if (profile_status == PROFILE_GUESSED)
7890 record->time[after_pass]
7891 += estimate_num_insns (gsi_stmt (i),
7892 &eni_time_weights) * bb->frequency;
7893 }
7894 }
7895
7896 struct cfg_hooks gimple_cfg_hooks = {
7897 "gimple",
7898 gimple_verify_flow_info,
7899 gimple_dump_bb, /* dump_bb */
7900 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7901 create_bb, /* create_basic_block */
7902 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7903 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7904 gimple_can_remove_branch_p, /* can_remove_branch_p */
7905 remove_bb, /* delete_basic_block */
7906 gimple_split_block, /* split_block */
7907 gimple_move_block_after, /* move_block_after */
7908 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7909 gimple_merge_blocks, /* merge_blocks */
7910 gimple_predict_edge, /* predict_edge */
7911 gimple_predicted_by_p, /* predicted_by_p */
7912 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7913 gimple_duplicate_bb, /* duplicate_block */
7914 gimple_split_edge, /* split_edge */
7915 gimple_make_forwarder_block, /* make_forward_block */
7916 NULL, /* tidy_fallthru_edge */
7917 NULL, /* force_nonfallthru */
7918 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7919 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7920 gimple_flow_call_edges_add, /* flow_call_edges_add */
7921 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7922 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7923 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7924 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7925 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7926 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7927 flush_pending_stmts, /* flush_pending_stmts */
7928 gimple_empty_block_p, /* block_empty_p */
7929 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7930 gimple_account_profile_record,
7931 };
7932
7933
7934 /* Split all critical edges. */
7935
7936 static unsigned int
7937 split_critical_edges (void)
7938 {
7939 basic_block bb;
7940 edge e;
7941 edge_iterator ei;
7942
7943 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7944 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7945 mappings around the calls to split_edge. */
7946 start_recording_case_labels ();
7947 FOR_ALL_BB (bb)
7948 {
7949 FOR_EACH_EDGE (e, ei, bb->succs)
7950 {
7951 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7952 split_edge (e);
7953 /* PRE inserts statements to edges and expects that
7954 since split_critical_edges was done beforehand, committing edge
7955 insertions will not split more edges. In addition to critical
7956 edges we must split edges that have multiple successors and
7957 end by control flow statements, such as RESX.
7958 Go ahead and split them too. This matches the logic in
7959 gimple_find_edge_insert_loc. */
7960 else if ((!single_pred_p (e->dest)
7961 || !gimple_seq_empty_p (phi_nodes (e->dest))
7962 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7963 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
7964 && !(e->flags & EDGE_ABNORMAL))
7965 {
7966 gimple_stmt_iterator gsi;
7967
7968 gsi = gsi_last_bb (e->src);
7969 if (!gsi_end_p (gsi)
7970 && stmt_ends_bb_p (gsi_stmt (gsi))
7971 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7972 && !gimple_call_builtin_p (gsi_stmt (gsi),
7973 BUILT_IN_RETURN)))
7974 split_edge (e);
7975 }
7976 }
7977 }
7978 end_recording_case_labels ();
7979 return 0;
7980 }
7981
7982 namespace {
7983
7984 const pass_data pass_data_split_crit_edges =
7985 {
7986 GIMPLE_PASS, /* type */
7987 "crited", /* name */
7988 OPTGROUP_NONE, /* optinfo_flags */
7989 false, /* has_gate */
7990 true, /* has_execute */
7991 TV_TREE_SPLIT_EDGES, /* tv_id */
7992 PROP_cfg, /* properties_required */
7993 PROP_no_crit_edges, /* properties_provided */
7994 0, /* properties_destroyed */
7995 0, /* todo_flags_start */
7996 TODO_verify_flow, /* todo_flags_finish */
7997 };
7998
7999 class pass_split_crit_edges : public gimple_opt_pass
8000 {
8001 public:
8002 pass_split_crit_edges (gcc::context *ctxt)
8003 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8004 {}
8005
8006 /* opt_pass methods: */
8007 unsigned int execute () { return split_critical_edges (); }
8008
8009 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8010 }; // class pass_split_crit_edges
8011
8012 } // anon namespace
8013
8014 gimple_opt_pass *
8015 make_pass_split_crit_edges (gcc::context *ctxt)
8016 {
8017 return new pass_split_crit_edges (ctxt);
8018 }
8019
8020
8021 /* Build a ternary operation and gimplify it. Emit code before GSI.
8022 Return the gimple_val holding the result. */
8023
8024 tree
8025 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8026 tree type, tree a, tree b, tree c)
8027 {
8028 tree ret;
8029 location_t loc = gimple_location (gsi_stmt (*gsi));
8030
8031 ret = fold_build3_loc (loc, code, type, a, b, c);
8032 STRIP_NOPS (ret);
8033
8034 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8035 GSI_SAME_STMT);
8036 }
8037
8038 /* Build a binary operation and gimplify it. Emit code before GSI.
8039 Return the gimple_val holding the result. */
8040
8041 tree
8042 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8043 tree type, tree a, tree b)
8044 {
8045 tree ret;
8046
8047 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8048 STRIP_NOPS (ret);
8049
8050 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8051 GSI_SAME_STMT);
8052 }
8053
8054 /* Build a unary operation and gimplify it. Emit code before GSI.
8055 Return the gimple_val holding the result. */
8056
8057 tree
8058 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8059 tree a)
8060 {
8061 tree ret;
8062
8063 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8064 STRIP_NOPS (ret);
8065
8066 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8067 GSI_SAME_STMT);
8068 }
8069
8070
8071 \f
8072 /* Emit return warnings. */
8073
8074 static unsigned int
8075 execute_warn_function_return (void)
8076 {
8077 source_location location;
8078 gimple last;
8079 edge e;
8080 edge_iterator ei;
8081
8082 if (!targetm.warn_func_return (cfun->decl))
8083 return 0;
8084
8085 /* If we have a path to EXIT, then we do return. */
8086 if (TREE_THIS_VOLATILE (cfun->decl)
8087 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8088 {
8089 location = UNKNOWN_LOCATION;
8090 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8091 {
8092 last = last_stmt (e->src);
8093 if ((gimple_code (last) == GIMPLE_RETURN
8094 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8095 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8096 break;
8097 }
8098 if (location == UNKNOWN_LOCATION)
8099 location = cfun->function_end_locus;
8100 warning_at (location, 0, "%<noreturn%> function does return");
8101 }
8102
8103 /* If we see "return;" in some basic block, then we do reach the end
8104 without returning a value. */
8105 else if (warn_return_type
8106 && !TREE_NO_WARNING (cfun->decl)
8107 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8108 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8109 {
8110 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8111 {
8112 gimple last = last_stmt (e->src);
8113 if (gimple_code (last) == GIMPLE_RETURN
8114 && gimple_return_retval (last) == NULL
8115 && !gimple_no_warning_p (last))
8116 {
8117 location = gimple_location (last);
8118 if (location == UNKNOWN_LOCATION)
8119 location = cfun->function_end_locus;
8120 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8121 TREE_NO_WARNING (cfun->decl) = 1;
8122 break;
8123 }
8124 }
8125 }
8126 return 0;
8127 }
8128
8129
8130 /* Given a basic block B which ends with a conditional and has
8131 precisely two successors, determine which of the edges is taken if
8132 the conditional is true and which is taken if the conditional is
8133 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8134
8135 void
8136 extract_true_false_edges_from_block (basic_block b,
8137 edge *true_edge,
8138 edge *false_edge)
8139 {
8140 edge e = EDGE_SUCC (b, 0);
8141
8142 if (e->flags & EDGE_TRUE_VALUE)
8143 {
8144 *true_edge = e;
8145 *false_edge = EDGE_SUCC (b, 1);
8146 }
8147 else
8148 {
8149 *false_edge = e;
8150 *true_edge = EDGE_SUCC (b, 1);
8151 }
8152 }
8153
8154 namespace {
8155
8156 const pass_data pass_data_warn_function_return =
8157 {
8158 GIMPLE_PASS, /* type */
8159 "*warn_function_return", /* name */
8160 OPTGROUP_NONE, /* optinfo_flags */
8161 false, /* has_gate */
8162 true, /* has_execute */
8163 TV_NONE, /* tv_id */
8164 PROP_cfg, /* properties_required */
8165 0, /* properties_provided */
8166 0, /* properties_destroyed */
8167 0, /* todo_flags_start */
8168 0, /* todo_flags_finish */
8169 };
8170
8171 class pass_warn_function_return : public gimple_opt_pass
8172 {
8173 public:
8174 pass_warn_function_return (gcc::context *ctxt)
8175 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8176 {}
8177
8178 /* opt_pass methods: */
8179 unsigned int execute () { return execute_warn_function_return (); }
8180
8181 }; // class pass_warn_function_return
8182
8183 } // anon namespace
8184
8185 gimple_opt_pass *
8186 make_pass_warn_function_return (gcc::context *ctxt)
8187 {
8188 return new pass_warn_function_return (ctxt);
8189 }
8190
8191 /* Walk a gimplified function and warn for functions whose return value is
8192 ignored and attribute((warn_unused_result)) is set. This is done before
8193 inlining, so we don't have to worry about that. */
8194
8195 static void
8196 do_warn_unused_result (gimple_seq seq)
8197 {
8198 tree fdecl, ftype;
8199 gimple_stmt_iterator i;
8200
8201 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8202 {
8203 gimple g = gsi_stmt (i);
8204
8205 switch (gimple_code (g))
8206 {
8207 case GIMPLE_BIND:
8208 do_warn_unused_result (gimple_bind_body (g));
8209 break;
8210 case GIMPLE_TRY:
8211 do_warn_unused_result (gimple_try_eval (g));
8212 do_warn_unused_result (gimple_try_cleanup (g));
8213 break;
8214 case GIMPLE_CATCH:
8215 do_warn_unused_result (gimple_catch_handler (g));
8216 break;
8217 case GIMPLE_EH_FILTER:
8218 do_warn_unused_result (gimple_eh_filter_failure (g));
8219 break;
8220
8221 case GIMPLE_CALL:
8222 if (gimple_call_lhs (g))
8223 break;
8224 if (gimple_call_internal_p (g))
8225 break;
8226
8227 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8228 LHS. All calls whose value is ignored should be
8229 represented like this. Look for the attribute. */
8230 fdecl = gimple_call_fndecl (g);
8231 ftype = gimple_call_fntype (g);
8232
8233 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8234 {
8235 location_t loc = gimple_location (g);
8236
8237 if (fdecl)
8238 warning_at (loc, OPT_Wunused_result,
8239 "ignoring return value of %qD, "
8240 "declared with attribute warn_unused_result",
8241 fdecl);
8242 else
8243 warning_at (loc, OPT_Wunused_result,
8244 "ignoring return value of function "
8245 "declared with attribute warn_unused_result");
8246 }
8247 break;
8248
8249 default:
8250 /* Not a container, not a call, or a call whose value is used. */
8251 break;
8252 }
8253 }
8254 }
8255
8256 static unsigned int
8257 run_warn_unused_result (void)
8258 {
8259 do_warn_unused_result (gimple_body (current_function_decl));
8260 return 0;
8261 }
8262
8263 static bool
8264 gate_warn_unused_result (void)
8265 {
8266 return flag_warn_unused_result;
8267 }
8268
8269 namespace {
8270
8271 const pass_data pass_data_warn_unused_result =
8272 {
8273 GIMPLE_PASS, /* type */
8274 "*warn_unused_result", /* name */
8275 OPTGROUP_NONE, /* optinfo_flags */
8276 true, /* has_gate */
8277 true, /* has_execute */
8278 TV_NONE, /* tv_id */
8279 PROP_gimple_any, /* properties_required */
8280 0, /* properties_provided */
8281 0, /* properties_destroyed */
8282 0, /* todo_flags_start */
8283 0, /* todo_flags_finish */
8284 };
8285
8286 class pass_warn_unused_result : public gimple_opt_pass
8287 {
8288 public:
8289 pass_warn_unused_result (gcc::context *ctxt)
8290 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8291 {}
8292
8293 /* opt_pass methods: */
8294 bool gate () { return gate_warn_unused_result (); }
8295 unsigned int execute () { return run_warn_unused_result (); }
8296
8297 }; // class pass_warn_unused_result
8298
8299 } // anon namespace
8300
8301 gimple_opt_pass *
8302 make_pass_warn_unused_result (gcc::context *ctxt)
8303 {
8304 return new pass_warn_unused_result (ctxt);
8305 }
8306
8307 /* IPA passes, compilation of earlier functions or inlining
8308 might have changed some properties, such as marked functions nothrow,
8309 pure, const or noreturn.
8310 Remove redundant edges and basic blocks, and create new ones if necessary.
8311
8312 This pass can't be executed as stand alone pass from pass manager, because
8313 in between inlining and this fixup the verify_flow_info would fail. */
8314
8315 unsigned int
8316 execute_fixup_cfg (void)
8317 {
8318 basic_block bb;
8319 gimple_stmt_iterator gsi;
8320 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8321 gcov_type count_scale;
8322 edge e;
8323 edge_iterator ei;
8324
8325 count_scale
8326 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8327 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8328
8329 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8330 cgraph_get_node (current_function_decl)->count;
8331 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8332 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8333 count_scale);
8334
8335 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8336 e->count = apply_scale (e->count, count_scale);
8337
8338 FOR_EACH_BB (bb)
8339 {
8340 bb->count = apply_scale (bb->count, count_scale);
8341 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8342 {
8343 gimple stmt = gsi_stmt (gsi);
8344 tree decl = is_gimple_call (stmt)
8345 ? gimple_call_fndecl (stmt)
8346 : NULL;
8347 if (decl)
8348 {
8349 int flags = gimple_call_flags (stmt);
8350 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8351 {
8352 if (gimple_purge_dead_abnormal_call_edges (bb))
8353 todo |= TODO_cleanup_cfg;
8354
8355 if (gimple_in_ssa_p (cfun))
8356 {
8357 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8358 update_stmt (stmt);
8359 }
8360 }
8361
8362 if (flags & ECF_NORETURN
8363 && fixup_noreturn_call (stmt))
8364 todo |= TODO_cleanup_cfg;
8365 }
8366
8367 if (maybe_clean_eh_stmt (stmt)
8368 && gimple_purge_dead_eh_edges (bb))
8369 todo |= TODO_cleanup_cfg;
8370 }
8371
8372 FOR_EACH_EDGE (e, ei, bb->succs)
8373 e->count = apply_scale (e->count, count_scale);
8374
8375 /* If we have a basic block with no successors that does not
8376 end with a control statement or a noreturn call end it with
8377 a call to __builtin_unreachable. This situation can occur
8378 when inlining a noreturn call that does in fact return. */
8379 if (EDGE_COUNT (bb->succs) == 0)
8380 {
8381 gimple stmt = last_stmt (bb);
8382 if (!stmt
8383 || (!is_ctrl_stmt (stmt)
8384 && (!is_gimple_call (stmt)
8385 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8386 {
8387 stmt = gimple_build_call
8388 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8389 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8390 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8391 }
8392 }
8393 }
8394 if (count_scale != REG_BR_PROB_BASE)
8395 compute_function_frequency ();
8396
8397 /* We just processed all calls. */
8398 if (cfun->gimple_df)
8399 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8400
8401 /* Dump a textual representation of the flowgraph. */
8402 if (dump_file)
8403 gimple_dump_cfg (dump_file, dump_flags);
8404
8405 if (current_loops
8406 && (todo & TODO_cleanup_cfg))
8407 loops_state_set (LOOPS_NEED_FIXUP);
8408
8409 return todo;
8410 }
8411
8412 namespace {
8413
8414 const pass_data pass_data_fixup_cfg =
8415 {
8416 GIMPLE_PASS, /* type */
8417 "*free_cfg_annotations", /* name */
8418 OPTGROUP_NONE, /* optinfo_flags */
8419 false, /* has_gate */
8420 true, /* has_execute */
8421 TV_NONE, /* tv_id */
8422 PROP_cfg, /* properties_required */
8423 0, /* properties_provided */
8424 0, /* properties_destroyed */
8425 0, /* todo_flags_start */
8426 0, /* todo_flags_finish */
8427 };
8428
8429 class pass_fixup_cfg : public gimple_opt_pass
8430 {
8431 public:
8432 pass_fixup_cfg (gcc::context *ctxt)
8433 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8434 {}
8435
8436 /* opt_pass methods: */
8437 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8438 unsigned int execute () { return execute_fixup_cfg (); }
8439
8440 }; // class pass_fixup_cfg
8441
8442 } // anon namespace
8443
8444 gimple_opt_pass *
8445 make_pass_fixup_cfg (gcc::context *ctxt)
8446 {
8447 return new pass_fixup_cfg (ctxt);
8448 }
8449
8450 /* Garbage collection support for edge_def. */
8451
8452 extern void gt_ggc_mx (tree&);
8453 extern void gt_ggc_mx (gimple&);
8454 extern void gt_ggc_mx (rtx&);
8455 extern void gt_ggc_mx (basic_block&);
8456
8457 void
8458 gt_ggc_mx (edge_def *e)
8459 {
8460 tree block = LOCATION_BLOCK (e->goto_locus);
8461 gt_ggc_mx (e->src);
8462 gt_ggc_mx (e->dest);
8463 if (current_ir_type () == IR_GIMPLE)
8464 gt_ggc_mx (e->insns.g);
8465 else
8466 gt_ggc_mx (e->insns.r);
8467 gt_ggc_mx (block);
8468 }
8469
8470 /* PCH support for edge_def. */
8471
8472 extern void gt_pch_nx (tree&);
8473 extern void gt_pch_nx (gimple&);
8474 extern void gt_pch_nx (rtx&);
8475 extern void gt_pch_nx (basic_block&);
8476
8477 void
8478 gt_pch_nx (edge_def *e)
8479 {
8480 tree block = LOCATION_BLOCK (e->goto_locus);
8481 gt_pch_nx (e->src);
8482 gt_pch_nx (e->dest);
8483 if (current_ir_type () == IR_GIMPLE)
8484 gt_pch_nx (e->insns.g);
8485 else
8486 gt_pch_nx (e->insns.r);
8487 gt_pch_nx (block);
8488 }
8489
8490 void
8491 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8492 {
8493 tree block = LOCATION_BLOCK (e->goto_locus);
8494 op (&(e->src), cookie);
8495 op (&(e->dest), cookie);
8496 if (current_ir_type () == IR_GIMPLE)
8497 op (&(e->insns.g), cookie);
8498 else
8499 op (&(e->insns.r), cookie);
8500 op (&(block), cookie);
8501 }