support ggc hash_map and hash_set
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71 #include "wide-int.h"
72 #include "wide-int-print.h"
73
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
76
77 /* Local declarations. */
78
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity = 20;
81
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
86
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
89
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
94
95 static hash_map<edge, tree> *edge_to_cases;
96
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
100
101 static bitmap touched_switch_bbs;
102
103 /* CFG statistics. */
104 struct cfg_stats_d
105 {
106 long num_merged_labels;
107 };
108
109 static struct cfg_stats_d cfg_stats;
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 location_t locus;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
121 {
122 typedef locus_discrim_map value_type;
123 typedef locus_discrim_map compare_type;
124 static inline hashval_t hash (const value_type *);
125 static inline bool equal (const value_type *, const compare_type *);
126 };
127
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
130
131 inline hashval_t
132 locus_discrim_hasher::hash (const value_type *item)
133 {
134 return LOCATION_LINE (item->locus);
135 }
136
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
139
140 inline bool
141 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
142 {
143 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple, gimple);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gimple);
167 static bool call_can_make_abnormal_goto (gimple);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (basic_block, tree);
175 static edge find_taken_edge_switch_expr (basic_block, tree);
176 static tree find_case_label_for_value (gimple, tree);
177
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
180 {
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
189
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
194
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
202 }
203
204 void
205 init_empty_tree_cfg (void)
206 {
207 init_empty_tree_cfg_for_function (cfun);
208 }
209
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
213
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
216
217 static void
218 build_gimple_cfg (gimple_seq seq)
219 {
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
222
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224
225 init_empty_tree_cfg ();
226
227 make_blocks (seq);
228
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
238
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
241
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
246
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
254 }
255
256
257 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
258 them and propagate the information to the loop. We assume that the
259 annotations come immediately before the condition of the loop. */
260
261 static void
262 replace_loop_annotate ()
263 {
264 struct loop *loop;
265 basic_block bb;
266 gimple_stmt_iterator gsi;
267 gimple stmt;
268
269 FOR_EACH_LOOP (loop, 0)
270 {
271 gsi = gsi_last_bb (loop->header);
272 stmt = gsi_stmt (gsi);
273 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
274 continue;
275 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
276 {
277 stmt = gsi_stmt (gsi);
278 if (gimple_code (stmt) != GIMPLE_CALL)
279 break;
280 if (!gimple_call_internal_p (stmt)
281 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
282 break;
283 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
284 {
285 case annot_expr_ivdep_kind:
286 loop->safelen = INT_MAX;
287 break;
288 case annot_expr_no_vector_kind:
289 loop->dont_vectorize = true;
290 break;
291 case annot_expr_vector_kind:
292 loop->force_vectorize = true;
293 cfun->has_force_vectorize_loops = true;
294 break;
295 default:
296 gcc_unreachable ();
297 }
298 stmt = gimple_build_assign (gimple_call_lhs (stmt),
299 gimple_call_arg (stmt, 0));
300 gsi_replace (&gsi, stmt, true);
301 }
302 }
303
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
305 FOR_EACH_BB_FN (bb, cfun)
306 {
307 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
308 {
309 stmt = gsi_stmt (gsi);
310 if (gimple_code (stmt) != GIMPLE_CALL)
311 break;
312 if (!gimple_call_internal_p (stmt)
313 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
314 break;
315 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
316 {
317 case annot_expr_ivdep_kind:
318 case annot_expr_no_vector_kind:
319 case annot_expr_vector_kind:
320 break;
321 default:
322 gcc_unreachable ();
323 }
324 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
325 stmt = gimple_build_assign (gimple_call_lhs (stmt),
326 gimple_call_arg (stmt, 0));
327 gsi_replace (&gsi, stmt, true);
328 }
329 }
330 }
331
332
333 static unsigned int
334 execute_build_cfg (void)
335 {
336 gimple_seq body = gimple_body (current_function_decl);
337
338 build_gimple_cfg (body);
339 gimple_set_body (current_function_decl, NULL);
340 if (dump_file && (dump_flags & TDF_DETAILS))
341 {
342 fprintf (dump_file, "Scope blocks:\n");
343 dump_scope_blocks (dump_file, dump_flags);
344 }
345 cleanup_tree_cfg ();
346 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
347 replace_loop_annotate ();
348 return 0;
349 }
350
351 namespace {
352
353 const pass_data pass_data_build_cfg =
354 {
355 GIMPLE_PASS, /* type */
356 "cfg", /* name */
357 OPTGROUP_NONE, /* optinfo_flags */
358 TV_TREE_CFG, /* tv_id */
359 PROP_gimple_leh, /* properties_required */
360 ( PROP_cfg | PROP_loops ), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 0, /* todo_flags_finish */
364 };
365
366 class pass_build_cfg : public gimple_opt_pass
367 {
368 public:
369 pass_build_cfg (gcc::context *ctxt)
370 : gimple_opt_pass (pass_data_build_cfg, ctxt)
371 {}
372
373 /* opt_pass methods: */
374 virtual unsigned int execute (function *) { return execute_build_cfg (); }
375
376 }; // class pass_build_cfg
377
378 } // anon namespace
379
380 gimple_opt_pass *
381 make_pass_build_cfg (gcc::context *ctxt)
382 {
383 return new pass_build_cfg (ctxt);
384 }
385
386
387 /* Return true if T is a computed goto. */
388
389 bool
390 computed_goto_p (gimple t)
391 {
392 return (gimple_code (t) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
394 }
395
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
399 <bb C>:
400 ...
401 if (something)
402 goto <bb N>;
403 else
404 goto <bb M>;
405 <bb N>:
406 __builtin_unreachable ();
407 <bb M>: */
408
409 bool
410 assert_unreachable_fallthru_edge_p (edge e)
411 {
412 basic_block pred_bb = e->src;
413 gimple last = last_stmt (pred_bb);
414 if (last && gimple_code (last) == GIMPLE_COND)
415 {
416 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
417 if (other_bb == e->dest)
418 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
419 if (EDGE_COUNT (other_bb->succs) == 0)
420 {
421 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
422 gimple stmt;
423
424 if (gsi_end_p (gsi))
425 return false;
426 stmt = gsi_stmt (gsi);
427 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
428 {
429 gsi_next (&gsi);
430 if (gsi_end_p (gsi))
431 return false;
432 stmt = gsi_stmt (gsi);
433 }
434 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
435 }
436 }
437 return false;
438 }
439
440
441 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
442 could alter control flow except via eh. We initialize the flag at
443 CFG build time and only ever clear it later. */
444
445 static void
446 gimple_call_initialize_ctrl_altering (gimple stmt)
447 {
448 int flags = gimple_call_flags (stmt);
449
450 /* A call alters control flow if it can make an abnormal goto. */
451 if (call_can_make_abnormal_goto (stmt)
452 /* A call also alters control flow if it does not return. */
453 || flags & ECF_NORETURN
454 /* TM ending statements have backedges out of the transaction.
455 Return true so we split the basic block containing them.
456 Note that the TM_BUILTIN test is merely an optimization. */
457 || ((flags & ECF_TM_BUILTIN)
458 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
459 /* BUILT_IN_RETURN call is same as return statement. */
460 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
461 gimple_call_set_ctrl_altering (stmt, true);
462 else
463 gimple_call_set_ctrl_altering (stmt, false);
464 }
465
466
467 /* Build a flowgraph for the sequence of stmts SEQ. */
468
469 static void
470 make_blocks (gimple_seq seq)
471 {
472 gimple_stmt_iterator i = gsi_start (seq);
473 gimple stmt = NULL;
474 bool start_new_block = true;
475 bool first_stmt_of_seq = true;
476 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
477
478 while (!gsi_end_p (i))
479 {
480 gimple prev_stmt;
481
482 prev_stmt = stmt;
483 stmt = gsi_stmt (i);
484
485 if (stmt && is_gimple_call (stmt))
486 gimple_call_initialize_ctrl_altering (stmt);
487
488 /* If the statement starts a new basic block or if we have determined
489 in a previous pass that we need to create a new block for STMT, do
490 so now. */
491 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
492 {
493 if (!first_stmt_of_seq)
494 gsi_split_seq_before (&i, &seq);
495 bb = create_basic_block (seq, NULL, bb);
496 start_new_block = false;
497 }
498
499 /* Now add STMT to BB and create the subgraphs for special statement
500 codes. */
501 gimple_set_bb (stmt, bb);
502
503 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
504 next iteration. */
505 if (stmt_ends_bb_p (stmt))
506 {
507 /* If the stmt can make abnormal goto use a new temporary
508 for the assignment to the LHS. This makes sure the old value
509 of the LHS is available on the abnormal edge. Otherwise
510 we will end up with overlapping life-ranges for abnormal
511 SSA names. */
512 if (gimple_has_lhs (stmt)
513 && stmt_can_make_abnormal_goto (stmt)
514 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
515 {
516 tree lhs = gimple_get_lhs (stmt);
517 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
518 gimple s = gimple_build_assign (lhs, tmp);
519 gimple_set_location (s, gimple_location (stmt));
520 gimple_set_block (s, gimple_block (stmt));
521 gimple_set_lhs (stmt, tmp);
522 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
523 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
524 DECL_GIMPLE_REG_P (tmp) = 1;
525 gsi_insert_after (&i, s, GSI_SAME_STMT);
526 }
527 start_new_block = true;
528 }
529
530 gsi_next (&i);
531 first_stmt_of_seq = false;
532 }
533 }
534
535
536 /* Create and return a new empty basic block after bb AFTER. */
537
538 static basic_block
539 create_bb (void *h, void *e, basic_block after)
540 {
541 basic_block bb;
542
543 gcc_assert (!e);
544
545 /* Create and initialize a new basic block. Since alloc_block uses
546 GC allocation that clears memory to allocate a basic block, we do
547 not have to clear the newly allocated basic block here. */
548 bb = alloc_block ();
549
550 bb->index = last_basic_block_for_fn (cfun);
551 bb->flags = BB_NEW;
552 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
553
554 /* Add the new block to the linked list of blocks. */
555 link_block (bb, after);
556
557 /* Grow the basic block array if needed. */
558 if ((size_t) last_basic_block_for_fn (cfun)
559 == basic_block_info_for_fn (cfun)->length ())
560 {
561 size_t new_size =
562 (last_basic_block_for_fn (cfun)
563 + (last_basic_block_for_fn (cfun) + 3) / 4);
564 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
565 }
566
567 /* Add the newly created block to the array. */
568 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
569
570 n_basic_blocks_for_fn (cfun)++;
571 last_basic_block_for_fn (cfun)++;
572
573 return bb;
574 }
575
576
577 /*---------------------------------------------------------------------------
578 Edge creation
579 ---------------------------------------------------------------------------*/
580
581 /* Fold COND_EXPR_COND of each COND_EXPR. */
582
583 void
584 fold_cond_expr_cond (void)
585 {
586 basic_block bb;
587
588 FOR_EACH_BB_FN (bb, cfun)
589 {
590 gimple stmt = last_stmt (bb);
591
592 if (stmt && gimple_code (stmt) == GIMPLE_COND)
593 {
594 location_t loc = gimple_location (stmt);
595 tree cond;
596 bool zerop, onep;
597
598 fold_defer_overflow_warnings ();
599 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
600 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
601 if (cond)
602 {
603 zerop = integer_zerop (cond);
604 onep = integer_onep (cond);
605 }
606 else
607 zerop = onep = false;
608
609 fold_undefer_overflow_warnings (zerop || onep,
610 stmt,
611 WARN_STRICT_OVERFLOW_CONDITIONAL);
612 if (zerop)
613 gimple_cond_make_false (stmt);
614 else if (onep)
615 gimple_cond_make_true (stmt);
616 }
617 }
618 }
619
620 /* If basic block BB has an abnormal edge to a basic block
621 containing IFN_ABNORMAL_DISPATCHER internal call, return
622 that the dispatcher's basic block, otherwise return NULL. */
623
624 basic_block
625 get_abnormal_succ_dispatcher (basic_block bb)
626 {
627 edge e;
628 edge_iterator ei;
629
630 FOR_EACH_EDGE (e, ei, bb->succs)
631 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
632 {
633 gimple_stmt_iterator gsi
634 = gsi_start_nondebug_after_labels_bb (e->dest);
635 gimple g = gsi_stmt (gsi);
636 if (g
637 && is_gimple_call (g)
638 && gimple_call_internal_p (g)
639 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
640 return e->dest;
641 }
642 return NULL;
643 }
644
645 /* Helper function for make_edges. Create a basic block with
646 with ABNORMAL_DISPATCHER internal call in it if needed, and
647 create abnormal edges from BBS to it and from it to FOR_BB
648 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
649
650 static void
651 handle_abnormal_edges (basic_block *dispatcher_bbs,
652 basic_block for_bb, int *bb_to_omp_idx,
653 auto_vec<basic_block> *bbs, bool computed_goto)
654 {
655 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
656 unsigned int idx = 0;
657 basic_block bb;
658 bool inner = false;
659
660 if (bb_to_omp_idx)
661 {
662 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
663 if (bb_to_omp_idx[for_bb->index] != 0)
664 inner = true;
665 }
666
667 /* If the dispatcher has been created already, then there are basic
668 blocks with abnormal edges to it, so just make a new edge to
669 for_bb. */
670 if (*dispatcher == NULL)
671 {
672 /* Check if there are any basic blocks that need to have
673 abnormal edges to this dispatcher. If there are none, return
674 early. */
675 if (bb_to_omp_idx == NULL)
676 {
677 if (bbs->is_empty ())
678 return;
679 }
680 else
681 {
682 FOR_EACH_VEC_ELT (*bbs, idx, bb)
683 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
684 break;
685 if (bb == NULL)
686 return;
687 }
688
689 /* Create the dispatcher bb. */
690 *dispatcher = create_basic_block (NULL, NULL, for_bb);
691 if (computed_goto)
692 {
693 /* Factor computed gotos into a common computed goto site. Also
694 record the location of that site so that we can un-factor the
695 gotos after we have converted back to normal form. */
696 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
697
698 /* Create the destination of the factored goto. Each original
699 computed goto will put its desired destination into this
700 variable and jump to the label we create immediately below. */
701 tree var = create_tmp_var (ptr_type_node, "gotovar");
702
703 /* Build a label for the new block which will contain the
704 factored computed goto. */
705 tree factored_label_decl
706 = create_artificial_label (UNKNOWN_LOCATION);
707 gimple factored_computed_goto_label
708 = gimple_build_label (factored_label_decl);
709 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
710
711 /* Build our new computed goto. */
712 gimple factored_computed_goto = gimple_build_goto (var);
713 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
714
715 FOR_EACH_VEC_ELT (*bbs, idx, bb)
716 {
717 if (bb_to_omp_idx
718 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
719 continue;
720
721 gsi = gsi_last_bb (bb);
722 gimple last = gsi_stmt (gsi);
723
724 gcc_assert (computed_goto_p (last));
725
726 /* Copy the original computed goto's destination into VAR. */
727 gimple assignment
728 = gimple_build_assign (var, gimple_goto_dest (last));
729 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
730
731 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
732 e->goto_locus = gimple_location (last);
733 gsi_remove (&gsi, true);
734 }
735 }
736 else
737 {
738 tree arg = inner ? boolean_true_node : boolean_false_node;
739 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
740 1, arg);
741 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
742 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
743
744 /* Create predecessor edges of the dispatcher. */
745 FOR_EACH_VEC_ELT (*bbs, idx, bb)
746 {
747 if (bb_to_omp_idx
748 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
749 continue;
750 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
751 }
752 }
753 }
754
755 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
756 }
757
758 /* Join all the blocks in the flowgraph. */
759
760 static void
761 make_edges (void)
762 {
763 basic_block bb;
764 struct omp_region *cur_region = NULL;
765 auto_vec<basic_block> ab_edge_goto;
766 auto_vec<basic_block> ab_edge_call;
767 int *bb_to_omp_idx = NULL;
768 int cur_omp_region_idx = 0;
769
770 /* Create an edge from entry to the first block with executable
771 statements in it. */
772 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
773 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
774 EDGE_FALLTHRU);
775
776 /* Traverse the basic block array placing edges. */
777 FOR_EACH_BB_FN (bb, cfun)
778 {
779 gimple last = last_stmt (bb);
780 bool fallthru;
781
782 if (bb_to_omp_idx)
783 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
784
785 if (last)
786 {
787 enum gimple_code code = gimple_code (last);
788 switch (code)
789 {
790 case GIMPLE_GOTO:
791 if (make_goto_expr_edges (bb))
792 ab_edge_goto.safe_push (bb);
793 fallthru = false;
794 break;
795 case GIMPLE_RETURN:
796 {
797 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
798 e->goto_locus = gimple_location (last);
799 fallthru = false;
800 }
801 break;
802 case GIMPLE_COND:
803 make_cond_expr_edges (bb);
804 fallthru = false;
805 break;
806 case GIMPLE_SWITCH:
807 make_gimple_switch_edges (bb);
808 fallthru = false;
809 break;
810 case GIMPLE_RESX:
811 make_eh_edges (last);
812 fallthru = false;
813 break;
814 case GIMPLE_EH_DISPATCH:
815 fallthru = make_eh_dispatch_edges (last);
816 break;
817
818 case GIMPLE_CALL:
819 /* If this function receives a nonlocal goto, then we need to
820 make edges from this call site to all the nonlocal goto
821 handlers. */
822 if (stmt_can_make_abnormal_goto (last))
823 ab_edge_call.safe_push (bb);
824
825 /* If this statement has reachable exception handlers, then
826 create abnormal edges to them. */
827 make_eh_edges (last);
828
829 /* BUILTIN_RETURN is really a return statement. */
830 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
831 {
832 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
833 fallthru = false;
834 }
835 /* Some calls are known not to return. */
836 else
837 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
838 break;
839
840 case GIMPLE_ASSIGN:
841 /* A GIMPLE_ASSIGN may throw internally and thus be considered
842 control-altering. */
843 if (is_ctrl_altering_stmt (last))
844 make_eh_edges (last);
845 fallthru = true;
846 break;
847
848 case GIMPLE_ASM:
849 make_gimple_asm_edges (bb);
850 fallthru = true;
851 break;
852
853 CASE_GIMPLE_OMP:
854 fallthru = make_gimple_omp_edges (bb, &cur_region,
855 &cur_omp_region_idx);
856 if (cur_region && bb_to_omp_idx == NULL)
857 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
858 break;
859
860 case GIMPLE_TRANSACTION:
861 {
862 tree abort_label = gimple_transaction_label (last);
863 if (abort_label)
864 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
865 fallthru = true;
866 }
867 break;
868
869 default:
870 gcc_assert (!stmt_ends_bb_p (last));
871 fallthru = true;
872 }
873 }
874 else
875 fallthru = true;
876
877 if (fallthru)
878 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
879 }
880
881 /* Computed gotos are hell to deal with, especially if there are
882 lots of them with a large number of destinations. So we factor
883 them to a common computed goto location before we build the
884 edge list. After we convert back to normal form, we will un-factor
885 the computed gotos since factoring introduces an unwanted jump.
886 For non-local gotos and abnormal edges from calls to calls that return
887 twice or forced labels, factor the abnormal edges too, by having all
888 abnormal edges from the calls go to a common artificial basic block
889 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
890 basic block to all forced labels and calls returning twice.
891 We do this per-OpenMP structured block, because those regions
892 are guaranteed to be single entry single exit by the standard,
893 so it is not allowed to enter or exit such regions abnormally this way,
894 thus all computed gotos, non-local gotos and setjmp/longjmp calls
895 must not transfer control across SESE region boundaries. */
896 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
897 {
898 gimple_stmt_iterator gsi;
899 basic_block dispatcher_bb_array[2] = { NULL, NULL };
900 basic_block *dispatcher_bbs = dispatcher_bb_array;
901 int count = n_basic_blocks_for_fn (cfun);
902
903 if (bb_to_omp_idx)
904 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
905
906 FOR_EACH_BB_FN (bb, cfun)
907 {
908 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
909 {
910 gimple label_stmt = gsi_stmt (gsi);
911 tree target;
912
913 if (gimple_code (label_stmt) != GIMPLE_LABEL)
914 break;
915
916 target = gimple_label_label (label_stmt);
917
918 /* Make an edge to every label block that has been marked as a
919 potential target for a computed goto or a non-local goto. */
920 if (FORCED_LABEL (target))
921 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
922 &ab_edge_goto, true);
923 if (DECL_NONLOCAL (target))
924 {
925 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
926 &ab_edge_call, false);
927 break;
928 }
929 }
930
931 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
932 gsi_next_nondebug (&gsi);
933 if (!gsi_end_p (gsi))
934 {
935 /* Make an edge to every setjmp-like call. */
936 gimple call_stmt = gsi_stmt (gsi);
937 if (is_gimple_call (call_stmt)
938 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
939 || gimple_call_builtin_p (call_stmt,
940 BUILT_IN_SETJMP_RECEIVER)))
941 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
942 &ab_edge_call, false);
943 }
944 }
945
946 if (bb_to_omp_idx)
947 XDELETE (dispatcher_bbs);
948 }
949
950 XDELETE (bb_to_omp_idx);
951
952 free_omp_regions ();
953
954 /* Fold COND_EXPR_COND of each COND_EXPR. */
955 fold_cond_expr_cond ();
956 }
957
958 /* Find the next available discriminator value for LOCUS. The
959 discriminator distinguishes among several basic blocks that
960 share a common locus, allowing for more accurate sample-based
961 profiling. */
962
963 static int
964 next_discriminator_for_locus (location_t locus)
965 {
966 struct locus_discrim_map item;
967 struct locus_discrim_map **slot;
968
969 item.locus = locus;
970 item.discriminator = 0;
971 slot = discriminator_per_locus->find_slot_with_hash (
972 &item, LOCATION_LINE (locus), INSERT);
973 gcc_assert (slot);
974 if (*slot == HTAB_EMPTY_ENTRY)
975 {
976 *slot = XNEW (struct locus_discrim_map);
977 gcc_assert (*slot);
978 (*slot)->locus = locus;
979 (*slot)->discriminator = 0;
980 }
981 (*slot)->discriminator++;
982 return (*slot)->discriminator;
983 }
984
985 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
986
987 static bool
988 same_line_p (location_t locus1, location_t locus2)
989 {
990 expanded_location from, to;
991
992 if (locus1 == locus2)
993 return true;
994
995 from = expand_location (locus1);
996 to = expand_location (locus2);
997
998 if (from.line != to.line)
999 return false;
1000 if (from.file == to.file)
1001 return true;
1002 return (from.file != NULL
1003 && to.file != NULL
1004 && filename_cmp (from.file, to.file) == 0);
1005 }
1006
1007 /* Assign discriminators to each basic block. */
1008
1009 static void
1010 assign_discriminators (void)
1011 {
1012 basic_block bb;
1013
1014 FOR_EACH_BB_FN (bb, cfun)
1015 {
1016 edge e;
1017 edge_iterator ei;
1018 gimple last = last_stmt (bb);
1019 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1020
1021 if (locus == UNKNOWN_LOCATION)
1022 continue;
1023
1024 FOR_EACH_EDGE (e, ei, bb->succs)
1025 {
1026 gimple first = first_non_label_stmt (e->dest);
1027 gimple last = last_stmt (e->dest);
1028 if ((first && same_line_p (locus, gimple_location (first)))
1029 || (last && same_line_p (locus, gimple_location (last))))
1030 {
1031 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1032 bb->discriminator = next_discriminator_for_locus (locus);
1033 else
1034 e->dest->discriminator = next_discriminator_for_locus (locus);
1035 }
1036 }
1037 }
1038 }
1039
1040 /* Create the edges for a GIMPLE_COND starting at block BB. */
1041
1042 static void
1043 make_cond_expr_edges (basic_block bb)
1044 {
1045 gimple entry = last_stmt (bb);
1046 gimple then_stmt, else_stmt;
1047 basic_block then_bb, else_bb;
1048 tree then_label, else_label;
1049 edge e;
1050
1051 gcc_assert (entry);
1052 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1053
1054 /* Entry basic blocks for each component. */
1055 then_label = gimple_cond_true_label (entry);
1056 else_label = gimple_cond_false_label (entry);
1057 then_bb = label_to_block (then_label);
1058 else_bb = label_to_block (else_label);
1059 then_stmt = first_stmt (then_bb);
1060 else_stmt = first_stmt (else_bb);
1061
1062 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1063 e->goto_locus = gimple_location (then_stmt);
1064 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1065 if (e)
1066 e->goto_locus = gimple_location (else_stmt);
1067
1068 /* We do not need the labels anymore. */
1069 gimple_cond_set_true_label (entry, NULL_TREE);
1070 gimple_cond_set_false_label (entry, NULL_TREE);
1071 }
1072
1073
1074 /* Called for each element in the hash table (P) as we delete the
1075 edge to cases hash table.
1076
1077 Clear all the TREE_CHAINs to prevent problems with copying of
1078 SWITCH_EXPRs and structure sharing rules, then free the hash table
1079 element. */
1080
1081 bool
1082 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1083 {
1084 tree t, next;
1085
1086 for (t = value; t; t = next)
1087 {
1088 next = CASE_CHAIN (t);
1089 CASE_CHAIN (t) = NULL;
1090 }
1091
1092 return true;
1093 }
1094
1095 /* Start recording information mapping edges to case labels. */
1096
1097 void
1098 start_recording_case_labels (void)
1099 {
1100 gcc_assert (edge_to_cases == NULL);
1101 edge_to_cases = new hash_map<edge, tree>;
1102 touched_switch_bbs = BITMAP_ALLOC (NULL);
1103 }
1104
1105 /* Return nonzero if we are recording information for case labels. */
1106
1107 static bool
1108 recording_case_labels_p (void)
1109 {
1110 return (edge_to_cases != NULL);
1111 }
1112
1113 /* Stop recording information mapping edges to case labels and
1114 remove any information we have recorded. */
1115 void
1116 end_recording_case_labels (void)
1117 {
1118 bitmap_iterator bi;
1119 unsigned i;
1120 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1121 delete edge_to_cases;
1122 edge_to_cases = NULL;
1123 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1124 {
1125 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1126 if (bb)
1127 {
1128 gimple stmt = last_stmt (bb);
1129 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1130 group_case_labels_stmt (stmt);
1131 }
1132 }
1133 BITMAP_FREE (touched_switch_bbs);
1134 }
1135
1136 /* If we are inside a {start,end}_recording_cases block, then return
1137 a chain of CASE_LABEL_EXPRs from T which reference E.
1138
1139 Otherwise return NULL. */
1140
1141 static tree
1142 get_cases_for_edge (edge e, gimple t)
1143 {
1144 tree *slot;
1145 size_t i, n;
1146
1147 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1148 chains available. Return NULL so the caller can detect this case. */
1149 if (!recording_case_labels_p ())
1150 return NULL;
1151
1152 slot = edge_to_cases->get (e);
1153 if (slot)
1154 return *slot;
1155
1156 /* If we did not find E in the hash table, then this must be the first
1157 time we have been queried for information about E & T. Add all the
1158 elements from T to the hash table then perform the query again. */
1159
1160 n = gimple_switch_num_labels (t);
1161 for (i = 0; i < n; i++)
1162 {
1163 tree elt = gimple_switch_label (t, i);
1164 tree lab = CASE_LABEL (elt);
1165 basic_block label_bb = label_to_block (lab);
1166 edge this_edge = find_edge (e->src, label_bb);
1167
1168 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1169 a new chain. */
1170 tree &s = edge_to_cases->get_or_insert (this_edge);
1171 CASE_CHAIN (elt) = s;
1172 s = elt;
1173 }
1174
1175 return *edge_to_cases->get (e);
1176 }
1177
1178 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1179
1180 static void
1181 make_gimple_switch_edges (basic_block bb)
1182 {
1183 gimple entry = last_stmt (bb);
1184 size_t i, n;
1185
1186 n = gimple_switch_num_labels (entry);
1187
1188 for (i = 0; i < n; ++i)
1189 {
1190 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1191 basic_block label_bb = label_to_block (lab);
1192 make_edge (bb, label_bb, 0);
1193 }
1194 }
1195
1196
1197 /* Return the basic block holding label DEST. */
1198
1199 basic_block
1200 label_to_block_fn (struct function *ifun, tree dest)
1201 {
1202 int uid = LABEL_DECL_UID (dest);
1203
1204 /* We would die hard when faced by an undefined label. Emit a label to
1205 the very first basic block. This will hopefully make even the dataflow
1206 and undefined variable warnings quite right. */
1207 if (seen_error () && uid < 0)
1208 {
1209 gimple_stmt_iterator gsi =
1210 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1211 gimple stmt;
1212
1213 stmt = gimple_build_label (dest);
1214 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1215 uid = LABEL_DECL_UID (dest);
1216 }
1217 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1218 return NULL;
1219 return (*ifun->cfg->x_label_to_block_map)[uid];
1220 }
1221
1222 /* Create edges for a goto statement at block BB. Returns true
1223 if abnormal edges should be created. */
1224
1225 static bool
1226 make_goto_expr_edges (basic_block bb)
1227 {
1228 gimple_stmt_iterator last = gsi_last_bb (bb);
1229 gimple goto_t = gsi_stmt (last);
1230
1231 /* A simple GOTO creates normal edges. */
1232 if (simple_goto_p (goto_t))
1233 {
1234 tree dest = gimple_goto_dest (goto_t);
1235 basic_block label_bb = label_to_block (dest);
1236 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1237 e->goto_locus = gimple_location (goto_t);
1238 gsi_remove (&last, true);
1239 return false;
1240 }
1241
1242 /* A computed GOTO creates abnormal edges. */
1243 return true;
1244 }
1245
1246 /* Create edges for an asm statement with labels at block BB. */
1247
1248 static void
1249 make_gimple_asm_edges (basic_block bb)
1250 {
1251 gimple stmt = last_stmt (bb);
1252 int i, n = gimple_asm_nlabels (stmt);
1253
1254 for (i = 0; i < n; ++i)
1255 {
1256 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1257 basic_block label_bb = label_to_block (label);
1258 make_edge (bb, label_bb, 0);
1259 }
1260 }
1261
1262 /*---------------------------------------------------------------------------
1263 Flowgraph analysis
1264 ---------------------------------------------------------------------------*/
1265
1266 /* Cleanup useless labels in basic blocks. This is something we wish
1267 to do early because it allows us to group case labels before creating
1268 the edges for the CFG, and it speeds up block statement iterators in
1269 all passes later on.
1270 We rerun this pass after CFG is created, to get rid of the labels that
1271 are no longer referenced. After then we do not run it any more, since
1272 (almost) no new labels should be created. */
1273
1274 /* A map from basic block index to the leading label of that block. */
1275 static struct label_record
1276 {
1277 /* The label. */
1278 tree label;
1279
1280 /* True if the label is referenced from somewhere. */
1281 bool used;
1282 } *label_for_bb;
1283
1284 /* Given LABEL return the first label in the same basic block. */
1285
1286 static tree
1287 main_block_label (tree label)
1288 {
1289 basic_block bb = label_to_block (label);
1290 tree main_label = label_for_bb[bb->index].label;
1291
1292 /* label_to_block possibly inserted undefined label into the chain. */
1293 if (!main_label)
1294 {
1295 label_for_bb[bb->index].label = label;
1296 main_label = label;
1297 }
1298
1299 label_for_bb[bb->index].used = true;
1300 return main_label;
1301 }
1302
1303 /* Clean up redundant labels within the exception tree. */
1304
1305 static void
1306 cleanup_dead_labels_eh (void)
1307 {
1308 eh_landing_pad lp;
1309 eh_region r;
1310 tree lab;
1311 int i;
1312
1313 if (cfun->eh == NULL)
1314 return;
1315
1316 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1317 if (lp && lp->post_landing_pad)
1318 {
1319 lab = main_block_label (lp->post_landing_pad);
1320 if (lab != lp->post_landing_pad)
1321 {
1322 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1323 EH_LANDING_PAD_NR (lab) = lp->index;
1324 }
1325 }
1326
1327 FOR_ALL_EH_REGION (r)
1328 switch (r->type)
1329 {
1330 case ERT_CLEANUP:
1331 case ERT_MUST_NOT_THROW:
1332 break;
1333
1334 case ERT_TRY:
1335 {
1336 eh_catch c;
1337 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1338 {
1339 lab = c->label;
1340 if (lab)
1341 c->label = main_block_label (lab);
1342 }
1343 }
1344 break;
1345
1346 case ERT_ALLOWED_EXCEPTIONS:
1347 lab = r->u.allowed.label;
1348 if (lab)
1349 r->u.allowed.label = main_block_label (lab);
1350 break;
1351 }
1352 }
1353
1354
1355 /* Cleanup redundant labels. This is a three-step process:
1356 1) Find the leading label for each block.
1357 2) Redirect all references to labels to the leading labels.
1358 3) Cleanup all useless labels. */
1359
1360 void
1361 cleanup_dead_labels (void)
1362 {
1363 basic_block bb;
1364 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1365
1366 /* Find a suitable label for each block. We use the first user-defined
1367 label if there is one, or otherwise just the first label we see. */
1368 FOR_EACH_BB_FN (bb, cfun)
1369 {
1370 gimple_stmt_iterator i;
1371
1372 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1373 {
1374 tree label;
1375 gimple stmt = gsi_stmt (i);
1376
1377 if (gimple_code (stmt) != GIMPLE_LABEL)
1378 break;
1379
1380 label = gimple_label_label (stmt);
1381
1382 /* If we have not yet seen a label for the current block,
1383 remember this one and see if there are more labels. */
1384 if (!label_for_bb[bb->index].label)
1385 {
1386 label_for_bb[bb->index].label = label;
1387 continue;
1388 }
1389
1390 /* If we did see a label for the current block already, but it
1391 is an artificially created label, replace it if the current
1392 label is a user defined label. */
1393 if (!DECL_ARTIFICIAL (label)
1394 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1395 {
1396 label_for_bb[bb->index].label = label;
1397 break;
1398 }
1399 }
1400 }
1401
1402 /* Now redirect all jumps/branches to the selected label.
1403 First do so for each block ending in a control statement. */
1404 FOR_EACH_BB_FN (bb, cfun)
1405 {
1406 gimple stmt = last_stmt (bb);
1407 tree label, new_label;
1408
1409 if (!stmt)
1410 continue;
1411
1412 switch (gimple_code (stmt))
1413 {
1414 case GIMPLE_COND:
1415 label = gimple_cond_true_label (stmt);
1416 if (label)
1417 {
1418 new_label = main_block_label (label);
1419 if (new_label != label)
1420 gimple_cond_set_true_label (stmt, new_label);
1421 }
1422
1423 label = gimple_cond_false_label (stmt);
1424 if (label)
1425 {
1426 new_label = main_block_label (label);
1427 if (new_label != label)
1428 gimple_cond_set_false_label (stmt, new_label);
1429 }
1430 break;
1431
1432 case GIMPLE_SWITCH:
1433 {
1434 size_t i, n = gimple_switch_num_labels (stmt);
1435
1436 /* Replace all destination labels. */
1437 for (i = 0; i < n; ++i)
1438 {
1439 tree case_label = gimple_switch_label (stmt, i);
1440 label = CASE_LABEL (case_label);
1441 new_label = main_block_label (label);
1442 if (new_label != label)
1443 CASE_LABEL (case_label) = new_label;
1444 }
1445 break;
1446 }
1447
1448 case GIMPLE_ASM:
1449 {
1450 int i, n = gimple_asm_nlabels (stmt);
1451
1452 for (i = 0; i < n; ++i)
1453 {
1454 tree cons = gimple_asm_label_op (stmt, i);
1455 tree label = main_block_label (TREE_VALUE (cons));
1456 TREE_VALUE (cons) = label;
1457 }
1458 break;
1459 }
1460
1461 /* We have to handle gotos until they're removed, and we don't
1462 remove them until after we've created the CFG edges. */
1463 case GIMPLE_GOTO:
1464 if (!computed_goto_p (stmt))
1465 {
1466 label = gimple_goto_dest (stmt);
1467 new_label = main_block_label (label);
1468 if (new_label != label)
1469 gimple_goto_set_dest (stmt, new_label);
1470 }
1471 break;
1472
1473 case GIMPLE_TRANSACTION:
1474 {
1475 tree label = gimple_transaction_label (stmt);
1476 if (label)
1477 {
1478 tree new_label = main_block_label (label);
1479 if (new_label != label)
1480 gimple_transaction_set_label (stmt, new_label);
1481 }
1482 }
1483 break;
1484
1485 default:
1486 break;
1487 }
1488 }
1489
1490 /* Do the same for the exception region tree labels. */
1491 cleanup_dead_labels_eh ();
1492
1493 /* Finally, purge dead labels. All user-defined labels and labels that
1494 can be the target of non-local gotos and labels which have their
1495 address taken are preserved. */
1496 FOR_EACH_BB_FN (bb, cfun)
1497 {
1498 gimple_stmt_iterator i;
1499 tree label_for_this_bb = label_for_bb[bb->index].label;
1500
1501 if (!label_for_this_bb)
1502 continue;
1503
1504 /* If the main label of the block is unused, we may still remove it. */
1505 if (!label_for_bb[bb->index].used)
1506 label_for_this_bb = NULL;
1507
1508 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1509 {
1510 tree label;
1511 gimple stmt = gsi_stmt (i);
1512
1513 if (gimple_code (stmt) != GIMPLE_LABEL)
1514 break;
1515
1516 label = gimple_label_label (stmt);
1517
1518 if (label == label_for_this_bb
1519 || !DECL_ARTIFICIAL (label)
1520 || DECL_NONLOCAL (label)
1521 || FORCED_LABEL (label))
1522 gsi_next (&i);
1523 else
1524 gsi_remove (&i, true);
1525 }
1526 }
1527
1528 free (label_for_bb);
1529 }
1530
1531 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1532 the ones jumping to the same label.
1533 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1534
1535 void
1536 group_case_labels_stmt (gimple stmt)
1537 {
1538 int old_size = gimple_switch_num_labels (stmt);
1539 int i, j, new_size = old_size;
1540 basic_block default_bb = NULL;
1541
1542 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1543
1544 /* Look for possible opportunities to merge cases. */
1545 i = 1;
1546 while (i < old_size)
1547 {
1548 tree base_case, base_high;
1549 basic_block base_bb;
1550
1551 base_case = gimple_switch_label (stmt, i);
1552
1553 gcc_assert (base_case);
1554 base_bb = label_to_block (CASE_LABEL (base_case));
1555
1556 /* Discard cases that have the same destination as the
1557 default case. */
1558 if (base_bb == default_bb)
1559 {
1560 gimple_switch_set_label (stmt, i, NULL_TREE);
1561 i++;
1562 new_size--;
1563 continue;
1564 }
1565
1566 base_high = CASE_HIGH (base_case)
1567 ? CASE_HIGH (base_case)
1568 : CASE_LOW (base_case);
1569 i++;
1570
1571 /* Try to merge case labels. Break out when we reach the end
1572 of the label vector or when we cannot merge the next case
1573 label with the current one. */
1574 while (i < old_size)
1575 {
1576 tree merge_case = gimple_switch_label (stmt, i);
1577 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1578 wide_int bhp1 = wi::add (base_high, 1);
1579
1580 /* Merge the cases if they jump to the same place,
1581 and their ranges are consecutive. */
1582 if (merge_bb == base_bb
1583 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1584 {
1585 base_high = CASE_HIGH (merge_case) ?
1586 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1587 CASE_HIGH (base_case) = base_high;
1588 gimple_switch_set_label (stmt, i, NULL_TREE);
1589 new_size--;
1590 i++;
1591 }
1592 else
1593 break;
1594 }
1595 }
1596
1597 /* Compress the case labels in the label vector, and adjust the
1598 length of the vector. */
1599 for (i = 0, j = 0; i < new_size; i++)
1600 {
1601 while (! gimple_switch_label (stmt, j))
1602 j++;
1603 gimple_switch_set_label (stmt, i,
1604 gimple_switch_label (stmt, j++));
1605 }
1606
1607 gcc_assert (new_size <= old_size);
1608 gimple_switch_set_num_labels (stmt, new_size);
1609 }
1610
1611 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1612 and scan the sorted vector of cases. Combine the ones jumping to the
1613 same label. */
1614
1615 void
1616 group_case_labels (void)
1617 {
1618 basic_block bb;
1619
1620 FOR_EACH_BB_FN (bb, cfun)
1621 {
1622 gimple stmt = last_stmt (bb);
1623 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1624 group_case_labels_stmt (stmt);
1625 }
1626 }
1627
1628 /* Checks whether we can merge block B into block A. */
1629
1630 static bool
1631 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1632 {
1633 gimple stmt;
1634 gimple_stmt_iterator gsi;
1635
1636 if (!single_succ_p (a))
1637 return false;
1638
1639 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1640 return false;
1641
1642 if (single_succ (a) != b)
1643 return false;
1644
1645 if (!single_pred_p (b))
1646 return false;
1647
1648 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1649 return false;
1650
1651 /* If A ends by a statement causing exceptions or something similar, we
1652 cannot merge the blocks. */
1653 stmt = last_stmt (a);
1654 if (stmt && stmt_ends_bb_p (stmt))
1655 return false;
1656
1657 /* Do not allow a block with only a non-local label to be merged. */
1658 if (stmt
1659 && gimple_code (stmt) == GIMPLE_LABEL
1660 && DECL_NONLOCAL (gimple_label_label (stmt)))
1661 return false;
1662
1663 /* Examine the labels at the beginning of B. */
1664 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1665 {
1666 tree lab;
1667 stmt = gsi_stmt (gsi);
1668 if (gimple_code (stmt) != GIMPLE_LABEL)
1669 break;
1670 lab = gimple_label_label (stmt);
1671
1672 /* Do not remove user forced labels or for -O0 any user labels. */
1673 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1674 return false;
1675 }
1676
1677 /* Protect the loop latches. */
1678 if (current_loops && b->loop_father->latch == b)
1679 return false;
1680
1681 /* It must be possible to eliminate all phi nodes in B. If ssa form
1682 is not up-to-date and a name-mapping is registered, we cannot eliminate
1683 any phis. Symbols marked for renaming are never a problem though. */
1684 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1685 {
1686 gimple phi = gsi_stmt (gsi);
1687 /* Technically only new names matter. */
1688 if (name_registered_for_update_p (PHI_RESULT (phi)))
1689 return false;
1690 }
1691
1692 /* When not optimizing, don't merge if we'd lose goto_locus. */
1693 if (!optimize
1694 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1695 {
1696 location_t goto_locus = single_succ_edge (a)->goto_locus;
1697 gimple_stmt_iterator prev, next;
1698 prev = gsi_last_nondebug_bb (a);
1699 next = gsi_after_labels (b);
1700 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1701 gsi_next_nondebug (&next);
1702 if ((gsi_end_p (prev)
1703 || gimple_location (gsi_stmt (prev)) != goto_locus)
1704 && (gsi_end_p (next)
1705 || gimple_location (gsi_stmt (next)) != goto_locus))
1706 return false;
1707 }
1708
1709 return true;
1710 }
1711
1712 /* Replaces all uses of NAME by VAL. */
1713
1714 void
1715 replace_uses_by (tree name, tree val)
1716 {
1717 imm_use_iterator imm_iter;
1718 use_operand_p use;
1719 gimple stmt;
1720 edge e;
1721
1722 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1723 {
1724 /* Mark the block if we change the last stmt in it. */
1725 if (cfgcleanup_altered_bbs
1726 && stmt_ends_bb_p (stmt))
1727 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1728
1729 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1730 {
1731 replace_exp (use, val);
1732
1733 if (gimple_code (stmt) == GIMPLE_PHI)
1734 {
1735 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1736 if (e->flags & EDGE_ABNORMAL)
1737 {
1738 /* This can only occur for virtual operands, since
1739 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1740 would prevent replacement. */
1741 gcc_checking_assert (virtual_operand_p (name));
1742 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1743 }
1744 }
1745 }
1746
1747 if (gimple_code (stmt) != GIMPLE_PHI)
1748 {
1749 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1750 gimple orig_stmt = stmt;
1751 size_t i;
1752
1753 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1754 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1755 only change sth from non-invariant to invariant, and only
1756 when propagating constants. */
1757 if (is_gimple_min_invariant (val))
1758 for (i = 0; i < gimple_num_ops (stmt); i++)
1759 {
1760 tree op = gimple_op (stmt, i);
1761 /* Operands may be empty here. For example, the labels
1762 of a GIMPLE_COND are nulled out following the creation
1763 of the corresponding CFG edges. */
1764 if (op && TREE_CODE (op) == ADDR_EXPR)
1765 recompute_tree_invariant_for_addr_expr (op);
1766 }
1767
1768 if (fold_stmt (&gsi))
1769 stmt = gsi_stmt (gsi);
1770
1771 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1772 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1773
1774 update_stmt (stmt);
1775 }
1776 }
1777
1778 gcc_checking_assert (has_zero_uses (name));
1779
1780 /* Also update the trees stored in loop structures. */
1781 if (current_loops)
1782 {
1783 struct loop *loop;
1784
1785 FOR_EACH_LOOP (loop, 0)
1786 {
1787 substitute_in_loop_info (loop, name, val);
1788 }
1789 }
1790 }
1791
1792 /* Merge block B into block A. */
1793
1794 static void
1795 gimple_merge_blocks (basic_block a, basic_block b)
1796 {
1797 gimple_stmt_iterator last, gsi, psi;
1798
1799 if (dump_file)
1800 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1801
1802 /* Remove all single-valued PHI nodes from block B of the form
1803 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1804 gsi = gsi_last_bb (a);
1805 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1806 {
1807 gimple phi = gsi_stmt (psi);
1808 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1809 gimple copy;
1810 bool may_replace_uses = (virtual_operand_p (def)
1811 || may_propagate_copy (def, use));
1812
1813 /* In case we maintain loop closed ssa form, do not propagate arguments
1814 of loop exit phi nodes. */
1815 if (current_loops
1816 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1817 && !virtual_operand_p (def)
1818 && TREE_CODE (use) == SSA_NAME
1819 && a->loop_father != b->loop_father)
1820 may_replace_uses = false;
1821
1822 if (!may_replace_uses)
1823 {
1824 gcc_assert (!virtual_operand_p (def));
1825
1826 /* Note that just emitting the copies is fine -- there is no problem
1827 with ordering of phi nodes. This is because A is the single
1828 predecessor of B, therefore results of the phi nodes cannot
1829 appear as arguments of the phi nodes. */
1830 copy = gimple_build_assign (def, use);
1831 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1832 remove_phi_node (&psi, false);
1833 }
1834 else
1835 {
1836 /* If we deal with a PHI for virtual operands, we can simply
1837 propagate these without fussing with folding or updating
1838 the stmt. */
1839 if (virtual_operand_p (def))
1840 {
1841 imm_use_iterator iter;
1842 use_operand_p use_p;
1843 gimple stmt;
1844
1845 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1846 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1847 SET_USE (use_p, use);
1848
1849 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1850 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1851 }
1852 else
1853 replace_uses_by (def, use);
1854
1855 remove_phi_node (&psi, true);
1856 }
1857 }
1858
1859 /* Ensure that B follows A. */
1860 move_block_after (b, a);
1861
1862 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1863 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1864
1865 /* Remove labels from B and set gimple_bb to A for other statements. */
1866 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1867 {
1868 gimple stmt = gsi_stmt (gsi);
1869 if (gimple_code (stmt) == GIMPLE_LABEL)
1870 {
1871 tree label = gimple_label_label (stmt);
1872 int lp_nr;
1873
1874 gsi_remove (&gsi, false);
1875
1876 /* Now that we can thread computed gotos, we might have
1877 a situation where we have a forced label in block B
1878 However, the label at the start of block B might still be
1879 used in other ways (think about the runtime checking for
1880 Fortran assigned gotos). So we can not just delete the
1881 label. Instead we move the label to the start of block A. */
1882 if (FORCED_LABEL (label))
1883 {
1884 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1885 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1886 }
1887 /* Other user labels keep around in a form of a debug stmt. */
1888 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1889 {
1890 gimple dbg = gimple_build_debug_bind (label,
1891 integer_zero_node,
1892 stmt);
1893 gimple_debug_bind_reset_value (dbg);
1894 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1895 }
1896
1897 lp_nr = EH_LANDING_PAD_NR (label);
1898 if (lp_nr)
1899 {
1900 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1901 lp->post_landing_pad = NULL;
1902 }
1903 }
1904 else
1905 {
1906 gimple_set_bb (stmt, a);
1907 gsi_next (&gsi);
1908 }
1909 }
1910
1911 /* When merging two BBs, if their counts are different, the larger count
1912 is selected as the new bb count. This is to handle inconsistent
1913 profiles. */
1914 if (a->loop_father == b->loop_father)
1915 {
1916 a->count = MAX (a->count, b->count);
1917 a->frequency = MAX (a->frequency, b->frequency);
1918 }
1919
1920 /* Merge the sequences. */
1921 last = gsi_last_bb (a);
1922 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1923 set_bb_seq (b, NULL);
1924
1925 if (cfgcleanup_altered_bbs)
1926 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1927 }
1928
1929
1930 /* Return the one of two successors of BB that is not reachable by a
1931 complex edge, if there is one. Else, return BB. We use
1932 this in optimizations that use post-dominators for their heuristics,
1933 to catch the cases in C++ where function calls are involved. */
1934
1935 basic_block
1936 single_noncomplex_succ (basic_block bb)
1937 {
1938 edge e0, e1;
1939 if (EDGE_COUNT (bb->succs) != 2)
1940 return bb;
1941
1942 e0 = EDGE_SUCC (bb, 0);
1943 e1 = EDGE_SUCC (bb, 1);
1944 if (e0->flags & EDGE_COMPLEX)
1945 return e1->dest;
1946 if (e1->flags & EDGE_COMPLEX)
1947 return e0->dest;
1948
1949 return bb;
1950 }
1951
1952 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1953
1954 void
1955 notice_special_calls (gimple call)
1956 {
1957 int flags = gimple_call_flags (call);
1958
1959 if (flags & ECF_MAY_BE_ALLOCA)
1960 cfun->calls_alloca = true;
1961 if (flags & ECF_RETURNS_TWICE)
1962 cfun->calls_setjmp = true;
1963 }
1964
1965
1966 /* Clear flags set by notice_special_calls. Used by dead code removal
1967 to update the flags. */
1968
1969 void
1970 clear_special_calls (void)
1971 {
1972 cfun->calls_alloca = false;
1973 cfun->calls_setjmp = false;
1974 }
1975
1976 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1977
1978 static void
1979 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1980 {
1981 /* Since this block is no longer reachable, we can just delete all
1982 of its PHI nodes. */
1983 remove_phi_nodes (bb);
1984
1985 /* Remove edges to BB's successors. */
1986 while (EDGE_COUNT (bb->succs) > 0)
1987 remove_edge (EDGE_SUCC (bb, 0));
1988 }
1989
1990
1991 /* Remove statements of basic block BB. */
1992
1993 static void
1994 remove_bb (basic_block bb)
1995 {
1996 gimple_stmt_iterator i;
1997
1998 if (dump_file)
1999 {
2000 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2001 if (dump_flags & TDF_DETAILS)
2002 {
2003 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2004 fprintf (dump_file, "\n");
2005 }
2006 }
2007
2008 if (current_loops)
2009 {
2010 struct loop *loop = bb->loop_father;
2011
2012 /* If a loop gets removed, clean up the information associated
2013 with it. */
2014 if (loop->latch == bb
2015 || loop->header == bb)
2016 free_numbers_of_iterations_estimates_loop (loop);
2017 }
2018
2019 /* Remove all the instructions in the block. */
2020 if (bb_seq (bb) != NULL)
2021 {
2022 /* Walk backwards so as to get a chance to substitute all
2023 released DEFs into debug stmts. See
2024 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2025 details. */
2026 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2027 {
2028 gimple stmt = gsi_stmt (i);
2029 if (gimple_code (stmt) == GIMPLE_LABEL
2030 && (FORCED_LABEL (gimple_label_label (stmt))
2031 || DECL_NONLOCAL (gimple_label_label (stmt))))
2032 {
2033 basic_block new_bb;
2034 gimple_stmt_iterator new_gsi;
2035
2036 /* A non-reachable non-local label may still be referenced.
2037 But it no longer needs to carry the extra semantics of
2038 non-locality. */
2039 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2040 {
2041 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2042 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2043 }
2044
2045 new_bb = bb->prev_bb;
2046 new_gsi = gsi_start_bb (new_bb);
2047 gsi_remove (&i, false);
2048 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2049 }
2050 else
2051 {
2052 /* Release SSA definitions if we are in SSA. Note that we
2053 may be called when not in SSA. For example,
2054 final_cleanup calls this function via
2055 cleanup_tree_cfg. */
2056 if (gimple_in_ssa_p (cfun))
2057 release_defs (stmt);
2058
2059 gsi_remove (&i, true);
2060 }
2061
2062 if (gsi_end_p (i))
2063 i = gsi_last_bb (bb);
2064 else
2065 gsi_prev (&i);
2066 }
2067 }
2068
2069 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2070 bb->il.gimple.seq = NULL;
2071 bb->il.gimple.phi_nodes = NULL;
2072 }
2073
2074
2075 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2076 predicate VAL, return the edge that will be taken out of the block.
2077 If VAL does not match a unique edge, NULL is returned. */
2078
2079 edge
2080 find_taken_edge (basic_block bb, tree val)
2081 {
2082 gimple stmt;
2083
2084 stmt = last_stmt (bb);
2085
2086 gcc_assert (stmt);
2087 gcc_assert (is_ctrl_stmt (stmt));
2088
2089 if (val == NULL)
2090 return NULL;
2091
2092 if (!is_gimple_min_invariant (val))
2093 return NULL;
2094
2095 if (gimple_code (stmt) == GIMPLE_COND)
2096 return find_taken_edge_cond_expr (bb, val);
2097
2098 if (gimple_code (stmt) == GIMPLE_SWITCH)
2099 return find_taken_edge_switch_expr (bb, val);
2100
2101 if (computed_goto_p (stmt))
2102 {
2103 /* Only optimize if the argument is a label, if the argument is
2104 not a label then we can not construct a proper CFG.
2105
2106 It may be the case that we only need to allow the LABEL_REF to
2107 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2108 appear inside a LABEL_EXPR just to be safe. */
2109 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2110 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2111 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2112 return NULL;
2113 }
2114
2115 gcc_unreachable ();
2116 }
2117
2118 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2119 statement, determine which of the outgoing edges will be taken out of the
2120 block. Return NULL if either edge may be taken. */
2121
2122 static edge
2123 find_taken_edge_computed_goto (basic_block bb, tree val)
2124 {
2125 basic_block dest;
2126 edge e = NULL;
2127
2128 dest = label_to_block (val);
2129 if (dest)
2130 {
2131 e = find_edge (bb, dest);
2132 gcc_assert (e != NULL);
2133 }
2134
2135 return e;
2136 }
2137
2138 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2139 statement, determine which of the two edges will be taken out of the
2140 block. Return NULL if either edge may be taken. */
2141
2142 static edge
2143 find_taken_edge_cond_expr (basic_block bb, tree val)
2144 {
2145 edge true_edge, false_edge;
2146
2147 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2148
2149 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2150 return (integer_zerop (val) ? false_edge : true_edge);
2151 }
2152
2153 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2154 statement, determine which edge will be taken out of the block. Return
2155 NULL if any edge may be taken. */
2156
2157 static edge
2158 find_taken_edge_switch_expr (basic_block bb, tree val)
2159 {
2160 basic_block dest_bb;
2161 edge e;
2162 gimple switch_stmt;
2163 tree taken_case;
2164
2165 switch_stmt = last_stmt (bb);
2166 taken_case = find_case_label_for_value (switch_stmt, val);
2167 dest_bb = label_to_block (CASE_LABEL (taken_case));
2168
2169 e = find_edge (bb, dest_bb);
2170 gcc_assert (e);
2171 return e;
2172 }
2173
2174
2175 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2176 We can make optimal use here of the fact that the case labels are
2177 sorted: We can do a binary search for a case matching VAL. */
2178
2179 static tree
2180 find_case_label_for_value (gimple switch_stmt, tree val)
2181 {
2182 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2183 tree default_case = gimple_switch_default_label (switch_stmt);
2184
2185 for (low = 0, high = n; high - low > 1; )
2186 {
2187 size_t i = (high + low) / 2;
2188 tree t = gimple_switch_label (switch_stmt, i);
2189 int cmp;
2190
2191 /* Cache the result of comparing CASE_LOW and val. */
2192 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2193
2194 if (cmp > 0)
2195 high = i;
2196 else
2197 low = i;
2198
2199 if (CASE_HIGH (t) == NULL)
2200 {
2201 /* A singe-valued case label. */
2202 if (cmp == 0)
2203 return t;
2204 }
2205 else
2206 {
2207 /* A case range. We can only handle integer ranges. */
2208 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2209 return t;
2210 }
2211 }
2212
2213 return default_case;
2214 }
2215
2216
2217 /* Dump a basic block on stderr. */
2218
2219 void
2220 gimple_debug_bb (basic_block bb)
2221 {
2222 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2223 }
2224
2225
2226 /* Dump basic block with index N on stderr. */
2227
2228 basic_block
2229 gimple_debug_bb_n (int n)
2230 {
2231 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2232 return BASIC_BLOCK_FOR_FN (cfun, n);
2233 }
2234
2235
2236 /* Dump the CFG on stderr.
2237
2238 FLAGS are the same used by the tree dumping functions
2239 (see TDF_* in dumpfile.h). */
2240
2241 void
2242 gimple_debug_cfg (int flags)
2243 {
2244 gimple_dump_cfg (stderr, flags);
2245 }
2246
2247
2248 /* Dump the program showing basic block boundaries on the given FILE.
2249
2250 FLAGS are the same used by the tree dumping functions (see TDF_* in
2251 tree.h). */
2252
2253 void
2254 gimple_dump_cfg (FILE *file, int flags)
2255 {
2256 if (flags & TDF_DETAILS)
2257 {
2258 dump_function_header (file, current_function_decl, flags);
2259 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2260 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2261 last_basic_block_for_fn (cfun));
2262
2263 brief_dump_cfg (file, flags | TDF_COMMENT);
2264 fprintf (file, "\n");
2265 }
2266
2267 if (flags & TDF_STATS)
2268 dump_cfg_stats (file);
2269
2270 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2271 }
2272
2273
2274 /* Dump CFG statistics on FILE. */
2275
2276 void
2277 dump_cfg_stats (FILE *file)
2278 {
2279 static long max_num_merged_labels = 0;
2280 unsigned long size, total = 0;
2281 long num_edges;
2282 basic_block bb;
2283 const char * const fmt_str = "%-30s%-13s%12s\n";
2284 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2285 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2286 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2287 const char *funcname = current_function_name ();
2288
2289 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2290
2291 fprintf (file, "---------------------------------------------------------\n");
2292 fprintf (file, fmt_str, "", " Number of ", "Memory");
2293 fprintf (file, fmt_str, "", " instances ", "used ");
2294 fprintf (file, "---------------------------------------------------------\n");
2295
2296 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2297 total += size;
2298 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2299 SCALE (size), LABEL (size));
2300
2301 num_edges = 0;
2302 FOR_EACH_BB_FN (bb, cfun)
2303 num_edges += EDGE_COUNT (bb->succs);
2304 size = num_edges * sizeof (struct edge_def);
2305 total += size;
2306 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2307
2308 fprintf (file, "---------------------------------------------------------\n");
2309 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2310 LABEL (total));
2311 fprintf (file, "---------------------------------------------------------\n");
2312 fprintf (file, "\n");
2313
2314 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2315 max_num_merged_labels = cfg_stats.num_merged_labels;
2316
2317 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2318 cfg_stats.num_merged_labels, max_num_merged_labels);
2319
2320 fprintf (file, "\n");
2321 }
2322
2323
2324 /* Dump CFG statistics on stderr. Keep extern so that it's always
2325 linked in the final executable. */
2326
2327 DEBUG_FUNCTION void
2328 debug_cfg_stats (void)
2329 {
2330 dump_cfg_stats (stderr);
2331 }
2332
2333 /*---------------------------------------------------------------------------
2334 Miscellaneous helpers
2335 ---------------------------------------------------------------------------*/
2336
2337 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2338 flow. Transfers of control flow associated with EH are excluded. */
2339
2340 static bool
2341 call_can_make_abnormal_goto (gimple t)
2342 {
2343 /* If the function has no non-local labels, then a call cannot make an
2344 abnormal transfer of control. */
2345 if (!cfun->has_nonlocal_label
2346 && !cfun->calls_setjmp)
2347 return false;
2348
2349 /* Likewise if the call has no side effects. */
2350 if (!gimple_has_side_effects (t))
2351 return false;
2352
2353 /* Likewise if the called function is leaf. */
2354 if (gimple_call_flags (t) & ECF_LEAF)
2355 return false;
2356
2357 return true;
2358 }
2359
2360
2361 /* Return true if T can make an abnormal transfer of control flow.
2362 Transfers of control flow associated with EH are excluded. */
2363
2364 bool
2365 stmt_can_make_abnormal_goto (gimple t)
2366 {
2367 if (computed_goto_p (t))
2368 return true;
2369 if (is_gimple_call (t))
2370 return call_can_make_abnormal_goto (t);
2371 return false;
2372 }
2373
2374
2375 /* Return true if T represents a stmt that always transfers control. */
2376
2377 bool
2378 is_ctrl_stmt (gimple t)
2379 {
2380 switch (gimple_code (t))
2381 {
2382 case GIMPLE_COND:
2383 case GIMPLE_SWITCH:
2384 case GIMPLE_GOTO:
2385 case GIMPLE_RETURN:
2386 case GIMPLE_RESX:
2387 return true;
2388 default:
2389 return false;
2390 }
2391 }
2392
2393
2394 /* Return true if T is a statement that may alter the flow of control
2395 (e.g., a call to a non-returning function). */
2396
2397 bool
2398 is_ctrl_altering_stmt (gimple t)
2399 {
2400 gcc_assert (t);
2401
2402 switch (gimple_code (t))
2403 {
2404 case GIMPLE_CALL:
2405 /* Per stmt call flag indicates whether the call could alter
2406 controlflow. */
2407 if (gimple_call_ctrl_altering_p (t))
2408 return true;
2409 break;
2410
2411 case GIMPLE_EH_DISPATCH:
2412 /* EH_DISPATCH branches to the individual catch handlers at
2413 this level of a try or allowed-exceptions region. It can
2414 fallthru to the next statement as well. */
2415 return true;
2416
2417 case GIMPLE_ASM:
2418 if (gimple_asm_nlabels (t) > 0)
2419 return true;
2420 break;
2421
2422 CASE_GIMPLE_OMP:
2423 /* OpenMP directives alter control flow. */
2424 return true;
2425
2426 case GIMPLE_TRANSACTION:
2427 /* A transaction start alters control flow. */
2428 return true;
2429
2430 default:
2431 break;
2432 }
2433
2434 /* If a statement can throw, it alters control flow. */
2435 return stmt_can_throw_internal (t);
2436 }
2437
2438
2439 /* Return true if T is a simple local goto. */
2440
2441 bool
2442 simple_goto_p (gimple t)
2443 {
2444 return (gimple_code (t) == GIMPLE_GOTO
2445 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2446 }
2447
2448
2449 /* Return true if STMT should start a new basic block. PREV_STMT is
2450 the statement preceding STMT. It is used when STMT is a label or a
2451 case label. Labels should only start a new basic block if their
2452 previous statement wasn't a label. Otherwise, sequence of labels
2453 would generate unnecessary basic blocks that only contain a single
2454 label. */
2455
2456 static inline bool
2457 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2458 {
2459 if (stmt == NULL)
2460 return false;
2461
2462 /* Labels start a new basic block only if the preceding statement
2463 wasn't a label of the same type. This prevents the creation of
2464 consecutive blocks that have nothing but a single label. */
2465 if (gimple_code (stmt) == GIMPLE_LABEL)
2466 {
2467 /* Nonlocal and computed GOTO targets always start a new block. */
2468 if (DECL_NONLOCAL (gimple_label_label (stmt))
2469 || FORCED_LABEL (gimple_label_label (stmt)))
2470 return true;
2471
2472 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2473 {
2474 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2475 return true;
2476
2477 cfg_stats.num_merged_labels++;
2478 return false;
2479 }
2480 else
2481 return true;
2482 }
2483 else if (gimple_code (stmt) == GIMPLE_CALL
2484 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2485 /* setjmp acts similar to a nonlocal GOTO target and thus should
2486 start a new block. */
2487 return true;
2488
2489 return false;
2490 }
2491
2492
2493 /* Return true if T should end a basic block. */
2494
2495 bool
2496 stmt_ends_bb_p (gimple t)
2497 {
2498 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2499 }
2500
2501 /* Remove block annotations and other data structures. */
2502
2503 void
2504 delete_tree_cfg_annotations (void)
2505 {
2506 vec_free (label_to_block_map_for_fn (cfun));
2507 }
2508
2509
2510 /* Return the first statement in basic block BB. */
2511
2512 gimple
2513 first_stmt (basic_block bb)
2514 {
2515 gimple_stmt_iterator i = gsi_start_bb (bb);
2516 gimple stmt = NULL;
2517
2518 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2519 {
2520 gsi_next (&i);
2521 stmt = NULL;
2522 }
2523 return stmt;
2524 }
2525
2526 /* Return the first non-label statement in basic block BB. */
2527
2528 static gimple
2529 first_non_label_stmt (basic_block bb)
2530 {
2531 gimple_stmt_iterator i = gsi_start_bb (bb);
2532 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2533 gsi_next (&i);
2534 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2535 }
2536
2537 /* Return the last statement in basic block BB. */
2538
2539 gimple
2540 last_stmt (basic_block bb)
2541 {
2542 gimple_stmt_iterator i = gsi_last_bb (bb);
2543 gimple stmt = NULL;
2544
2545 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2546 {
2547 gsi_prev (&i);
2548 stmt = NULL;
2549 }
2550 return stmt;
2551 }
2552
2553 /* Return the last statement of an otherwise empty block. Return NULL
2554 if the block is totally empty, or if it contains more than one
2555 statement. */
2556
2557 gimple
2558 last_and_only_stmt (basic_block bb)
2559 {
2560 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2561 gimple last, prev;
2562
2563 if (gsi_end_p (i))
2564 return NULL;
2565
2566 last = gsi_stmt (i);
2567 gsi_prev_nondebug (&i);
2568 if (gsi_end_p (i))
2569 return last;
2570
2571 /* Empty statements should no longer appear in the instruction stream.
2572 Everything that might have appeared before should be deleted by
2573 remove_useless_stmts, and the optimizers should just gsi_remove
2574 instead of smashing with build_empty_stmt.
2575
2576 Thus the only thing that should appear here in a block containing
2577 one executable statement is a label. */
2578 prev = gsi_stmt (i);
2579 if (gimple_code (prev) == GIMPLE_LABEL)
2580 return last;
2581 else
2582 return NULL;
2583 }
2584
2585 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2586
2587 static void
2588 reinstall_phi_args (edge new_edge, edge old_edge)
2589 {
2590 edge_var_map *vm;
2591 int i;
2592 gimple_stmt_iterator phis;
2593
2594 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2595 if (!v)
2596 return;
2597
2598 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2599 v->iterate (i, &vm) && !gsi_end_p (phis);
2600 i++, gsi_next (&phis))
2601 {
2602 gimple phi = gsi_stmt (phis);
2603 tree result = redirect_edge_var_map_result (vm);
2604 tree arg = redirect_edge_var_map_def (vm);
2605
2606 gcc_assert (result == gimple_phi_result (phi));
2607
2608 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2609 }
2610
2611 redirect_edge_var_map_clear (old_edge);
2612 }
2613
2614 /* Returns the basic block after which the new basic block created
2615 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2616 near its "logical" location. This is of most help to humans looking
2617 at debugging dumps. */
2618
2619 static basic_block
2620 split_edge_bb_loc (edge edge_in)
2621 {
2622 basic_block dest = edge_in->dest;
2623 basic_block dest_prev = dest->prev_bb;
2624
2625 if (dest_prev)
2626 {
2627 edge e = find_edge (dest_prev, dest);
2628 if (e && !(e->flags & EDGE_COMPLEX))
2629 return edge_in->src;
2630 }
2631 return dest_prev;
2632 }
2633
2634 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2635 Abort on abnormal edges. */
2636
2637 static basic_block
2638 gimple_split_edge (edge edge_in)
2639 {
2640 basic_block new_bb, after_bb, dest;
2641 edge new_edge, e;
2642
2643 /* Abnormal edges cannot be split. */
2644 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2645
2646 dest = edge_in->dest;
2647
2648 after_bb = split_edge_bb_loc (edge_in);
2649
2650 new_bb = create_empty_bb (after_bb);
2651 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2652 new_bb->count = edge_in->count;
2653 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2654 new_edge->probability = REG_BR_PROB_BASE;
2655 new_edge->count = edge_in->count;
2656
2657 e = redirect_edge_and_branch (edge_in, new_bb);
2658 gcc_assert (e == edge_in);
2659 reinstall_phi_args (new_edge, e);
2660
2661 return new_bb;
2662 }
2663
2664
2665 /* Verify properties of the address expression T with base object BASE. */
2666
2667 static tree
2668 verify_address (tree t, tree base)
2669 {
2670 bool old_constant;
2671 bool old_side_effects;
2672 bool new_constant;
2673 bool new_side_effects;
2674
2675 old_constant = TREE_CONSTANT (t);
2676 old_side_effects = TREE_SIDE_EFFECTS (t);
2677
2678 recompute_tree_invariant_for_addr_expr (t);
2679 new_side_effects = TREE_SIDE_EFFECTS (t);
2680 new_constant = TREE_CONSTANT (t);
2681
2682 if (old_constant != new_constant)
2683 {
2684 error ("constant not recomputed when ADDR_EXPR changed");
2685 return t;
2686 }
2687 if (old_side_effects != new_side_effects)
2688 {
2689 error ("side effects not recomputed when ADDR_EXPR changed");
2690 return t;
2691 }
2692
2693 if (!(TREE_CODE (base) == VAR_DECL
2694 || TREE_CODE (base) == PARM_DECL
2695 || TREE_CODE (base) == RESULT_DECL))
2696 return NULL_TREE;
2697
2698 if (DECL_GIMPLE_REG_P (base))
2699 {
2700 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2701 return base;
2702 }
2703
2704 return NULL_TREE;
2705 }
2706
2707 /* Callback for walk_tree, check that all elements with address taken are
2708 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2709 inside a PHI node. */
2710
2711 static tree
2712 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2713 {
2714 tree t = *tp, x;
2715
2716 if (TYPE_P (t))
2717 *walk_subtrees = 0;
2718
2719 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2720 #define CHECK_OP(N, MSG) \
2721 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2722 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2723
2724 switch (TREE_CODE (t))
2725 {
2726 case SSA_NAME:
2727 if (SSA_NAME_IN_FREE_LIST (t))
2728 {
2729 error ("SSA name in freelist but still referenced");
2730 return *tp;
2731 }
2732 break;
2733
2734 case INDIRECT_REF:
2735 error ("INDIRECT_REF in gimple IL");
2736 return t;
2737
2738 case MEM_REF:
2739 x = TREE_OPERAND (t, 0);
2740 if (!POINTER_TYPE_P (TREE_TYPE (x))
2741 || !is_gimple_mem_ref_addr (x))
2742 {
2743 error ("invalid first operand of MEM_REF");
2744 return x;
2745 }
2746 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2747 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2748 {
2749 error ("invalid offset operand of MEM_REF");
2750 return TREE_OPERAND (t, 1);
2751 }
2752 if (TREE_CODE (x) == ADDR_EXPR
2753 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2754 return x;
2755 *walk_subtrees = 0;
2756 break;
2757
2758 case ASSERT_EXPR:
2759 x = fold (ASSERT_EXPR_COND (t));
2760 if (x == boolean_false_node)
2761 {
2762 error ("ASSERT_EXPR with an always-false condition");
2763 return *tp;
2764 }
2765 break;
2766
2767 case MODIFY_EXPR:
2768 error ("MODIFY_EXPR not expected while having tuples");
2769 return *tp;
2770
2771 case ADDR_EXPR:
2772 {
2773 tree tem;
2774
2775 gcc_assert (is_gimple_address (t));
2776
2777 /* Skip any references (they will be checked when we recurse down the
2778 tree) and ensure that any variable used as a prefix is marked
2779 addressable. */
2780 for (x = TREE_OPERAND (t, 0);
2781 handled_component_p (x);
2782 x = TREE_OPERAND (x, 0))
2783 ;
2784
2785 if ((tem = verify_address (t, x)))
2786 return tem;
2787
2788 if (!(TREE_CODE (x) == VAR_DECL
2789 || TREE_CODE (x) == PARM_DECL
2790 || TREE_CODE (x) == RESULT_DECL))
2791 return NULL;
2792
2793 if (!TREE_ADDRESSABLE (x))
2794 {
2795 error ("address taken, but ADDRESSABLE bit not set");
2796 return x;
2797 }
2798
2799 break;
2800 }
2801
2802 case COND_EXPR:
2803 x = COND_EXPR_COND (t);
2804 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2805 {
2806 error ("non-integral used in condition");
2807 return x;
2808 }
2809 if (!is_gimple_condexpr (x))
2810 {
2811 error ("invalid conditional operand");
2812 return x;
2813 }
2814 break;
2815
2816 case NON_LVALUE_EXPR:
2817 case TRUTH_NOT_EXPR:
2818 gcc_unreachable ();
2819
2820 CASE_CONVERT:
2821 case FIX_TRUNC_EXPR:
2822 case FLOAT_EXPR:
2823 case NEGATE_EXPR:
2824 case ABS_EXPR:
2825 case BIT_NOT_EXPR:
2826 CHECK_OP (0, "invalid operand to unary operator");
2827 break;
2828
2829 case REALPART_EXPR:
2830 case IMAGPART_EXPR:
2831 case BIT_FIELD_REF:
2832 if (!is_gimple_reg_type (TREE_TYPE (t)))
2833 {
2834 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2835 return t;
2836 }
2837
2838 if (TREE_CODE (t) == BIT_FIELD_REF)
2839 {
2840 tree t0 = TREE_OPERAND (t, 0);
2841 tree t1 = TREE_OPERAND (t, 1);
2842 tree t2 = TREE_OPERAND (t, 2);
2843 if (!tree_fits_uhwi_p (t1)
2844 || !tree_fits_uhwi_p (t2))
2845 {
2846 error ("invalid position or size operand to BIT_FIELD_REF");
2847 return t;
2848 }
2849 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2850 && (TYPE_PRECISION (TREE_TYPE (t))
2851 != tree_to_uhwi (t1)))
2852 {
2853 error ("integral result type precision does not match "
2854 "field size of BIT_FIELD_REF");
2855 return t;
2856 }
2857 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2858 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2859 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2860 != tree_to_uhwi (t1)))
2861 {
2862 error ("mode precision of non-integral result does not "
2863 "match field size of BIT_FIELD_REF");
2864 return t;
2865 }
2866 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2867 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2868 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2869 {
2870 error ("position plus size exceeds size of referenced object in "
2871 "BIT_FIELD_REF");
2872 return t;
2873 }
2874 }
2875 t = TREE_OPERAND (t, 0);
2876
2877 /* Fall-through. */
2878 case COMPONENT_REF:
2879 case ARRAY_REF:
2880 case ARRAY_RANGE_REF:
2881 case VIEW_CONVERT_EXPR:
2882 /* We have a nest of references. Verify that each of the operands
2883 that determine where to reference is either a constant or a variable,
2884 verify that the base is valid, and then show we've already checked
2885 the subtrees. */
2886 while (handled_component_p (t))
2887 {
2888 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2889 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2890 else if (TREE_CODE (t) == ARRAY_REF
2891 || TREE_CODE (t) == ARRAY_RANGE_REF)
2892 {
2893 CHECK_OP (1, "invalid array index");
2894 if (TREE_OPERAND (t, 2))
2895 CHECK_OP (2, "invalid array lower bound");
2896 if (TREE_OPERAND (t, 3))
2897 CHECK_OP (3, "invalid array stride");
2898 }
2899 else if (TREE_CODE (t) == BIT_FIELD_REF
2900 || TREE_CODE (t) == REALPART_EXPR
2901 || TREE_CODE (t) == IMAGPART_EXPR)
2902 {
2903 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2904 "REALPART_EXPR");
2905 return t;
2906 }
2907
2908 t = TREE_OPERAND (t, 0);
2909 }
2910
2911 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2912 {
2913 error ("invalid reference prefix");
2914 return t;
2915 }
2916 *walk_subtrees = 0;
2917 break;
2918 case PLUS_EXPR:
2919 case MINUS_EXPR:
2920 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2921 POINTER_PLUS_EXPR. */
2922 if (POINTER_TYPE_P (TREE_TYPE (t)))
2923 {
2924 error ("invalid operand to plus/minus, type is a pointer");
2925 return t;
2926 }
2927 CHECK_OP (0, "invalid operand to binary operator");
2928 CHECK_OP (1, "invalid operand to binary operator");
2929 break;
2930
2931 case POINTER_PLUS_EXPR:
2932 /* Check to make sure the first operand is a pointer or reference type. */
2933 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2934 {
2935 error ("invalid operand to pointer plus, first operand is not a pointer");
2936 return t;
2937 }
2938 /* Check to make sure the second operand is a ptrofftype. */
2939 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2940 {
2941 error ("invalid operand to pointer plus, second operand is not an "
2942 "integer type of appropriate width");
2943 return t;
2944 }
2945 /* FALLTHROUGH */
2946 case LT_EXPR:
2947 case LE_EXPR:
2948 case GT_EXPR:
2949 case GE_EXPR:
2950 case EQ_EXPR:
2951 case NE_EXPR:
2952 case UNORDERED_EXPR:
2953 case ORDERED_EXPR:
2954 case UNLT_EXPR:
2955 case UNLE_EXPR:
2956 case UNGT_EXPR:
2957 case UNGE_EXPR:
2958 case UNEQ_EXPR:
2959 case LTGT_EXPR:
2960 case MULT_EXPR:
2961 case TRUNC_DIV_EXPR:
2962 case CEIL_DIV_EXPR:
2963 case FLOOR_DIV_EXPR:
2964 case ROUND_DIV_EXPR:
2965 case TRUNC_MOD_EXPR:
2966 case CEIL_MOD_EXPR:
2967 case FLOOR_MOD_EXPR:
2968 case ROUND_MOD_EXPR:
2969 case RDIV_EXPR:
2970 case EXACT_DIV_EXPR:
2971 case MIN_EXPR:
2972 case MAX_EXPR:
2973 case LSHIFT_EXPR:
2974 case RSHIFT_EXPR:
2975 case LROTATE_EXPR:
2976 case RROTATE_EXPR:
2977 case BIT_IOR_EXPR:
2978 case BIT_XOR_EXPR:
2979 case BIT_AND_EXPR:
2980 CHECK_OP (0, "invalid operand to binary operator");
2981 CHECK_OP (1, "invalid operand to binary operator");
2982 break;
2983
2984 case CONSTRUCTOR:
2985 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2986 *walk_subtrees = 0;
2987 break;
2988
2989 case CASE_LABEL_EXPR:
2990 if (CASE_CHAIN (t))
2991 {
2992 error ("invalid CASE_CHAIN");
2993 return t;
2994 }
2995 break;
2996
2997 default:
2998 break;
2999 }
3000 return NULL;
3001
3002 #undef CHECK_OP
3003 }
3004
3005
3006 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3007 Returns true if there is an error, otherwise false. */
3008
3009 static bool
3010 verify_types_in_gimple_min_lval (tree expr)
3011 {
3012 tree op;
3013
3014 if (is_gimple_id (expr))
3015 return false;
3016
3017 if (TREE_CODE (expr) != TARGET_MEM_REF
3018 && TREE_CODE (expr) != MEM_REF)
3019 {
3020 error ("invalid expression for min lvalue");
3021 return true;
3022 }
3023
3024 /* TARGET_MEM_REFs are strange beasts. */
3025 if (TREE_CODE (expr) == TARGET_MEM_REF)
3026 return false;
3027
3028 op = TREE_OPERAND (expr, 0);
3029 if (!is_gimple_val (op))
3030 {
3031 error ("invalid operand in indirect reference");
3032 debug_generic_stmt (op);
3033 return true;
3034 }
3035 /* Memory references now generally can involve a value conversion. */
3036
3037 return false;
3038 }
3039
3040 /* Verify if EXPR is a valid GIMPLE reference expression. If
3041 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3042 if there is an error, otherwise false. */
3043
3044 static bool
3045 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3046 {
3047 while (handled_component_p (expr))
3048 {
3049 tree op = TREE_OPERAND (expr, 0);
3050
3051 if (TREE_CODE (expr) == ARRAY_REF
3052 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3053 {
3054 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3055 || (TREE_OPERAND (expr, 2)
3056 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3057 || (TREE_OPERAND (expr, 3)
3058 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3059 {
3060 error ("invalid operands to array reference");
3061 debug_generic_stmt (expr);
3062 return true;
3063 }
3064 }
3065
3066 /* Verify if the reference array element types are compatible. */
3067 if (TREE_CODE (expr) == ARRAY_REF
3068 && !useless_type_conversion_p (TREE_TYPE (expr),
3069 TREE_TYPE (TREE_TYPE (op))))
3070 {
3071 error ("type mismatch in array reference");
3072 debug_generic_stmt (TREE_TYPE (expr));
3073 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3074 return true;
3075 }
3076 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3077 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3078 TREE_TYPE (TREE_TYPE (op))))
3079 {
3080 error ("type mismatch in array range reference");
3081 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3082 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3083 return true;
3084 }
3085
3086 if ((TREE_CODE (expr) == REALPART_EXPR
3087 || TREE_CODE (expr) == IMAGPART_EXPR)
3088 && !useless_type_conversion_p (TREE_TYPE (expr),
3089 TREE_TYPE (TREE_TYPE (op))))
3090 {
3091 error ("type mismatch in real/imagpart reference");
3092 debug_generic_stmt (TREE_TYPE (expr));
3093 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3094 return true;
3095 }
3096
3097 if (TREE_CODE (expr) == COMPONENT_REF
3098 && !useless_type_conversion_p (TREE_TYPE (expr),
3099 TREE_TYPE (TREE_OPERAND (expr, 1))))
3100 {
3101 error ("type mismatch in component reference");
3102 debug_generic_stmt (TREE_TYPE (expr));
3103 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3104 return true;
3105 }
3106
3107 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3108 {
3109 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3110 that their operand is not an SSA name or an invariant when
3111 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3112 bug). Otherwise there is nothing to verify, gross mismatches at
3113 most invoke undefined behavior. */
3114 if (require_lvalue
3115 && (TREE_CODE (op) == SSA_NAME
3116 || is_gimple_min_invariant (op)))
3117 {
3118 error ("conversion of an SSA_NAME on the left hand side");
3119 debug_generic_stmt (expr);
3120 return true;
3121 }
3122 else if (TREE_CODE (op) == SSA_NAME
3123 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3124 {
3125 error ("conversion of register to a different size");
3126 debug_generic_stmt (expr);
3127 return true;
3128 }
3129 else if (!handled_component_p (op))
3130 return false;
3131 }
3132
3133 expr = op;
3134 }
3135
3136 if (TREE_CODE (expr) == MEM_REF)
3137 {
3138 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3139 {
3140 error ("invalid address operand in MEM_REF");
3141 debug_generic_stmt (expr);
3142 return true;
3143 }
3144 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3145 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3146 {
3147 error ("invalid offset operand in MEM_REF");
3148 debug_generic_stmt (expr);
3149 return true;
3150 }
3151 }
3152 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3153 {
3154 if (!TMR_BASE (expr)
3155 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3156 {
3157 error ("invalid address operand in TARGET_MEM_REF");
3158 return true;
3159 }
3160 if (!TMR_OFFSET (expr)
3161 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3162 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3163 {
3164 error ("invalid offset operand in TARGET_MEM_REF");
3165 debug_generic_stmt (expr);
3166 return true;
3167 }
3168 }
3169
3170 return ((require_lvalue || !is_gimple_min_invariant (expr))
3171 && verify_types_in_gimple_min_lval (expr));
3172 }
3173
3174 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3175 list of pointer-to types that is trivially convertible to DEST. */
3176
3177 static bool
3178 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3179 {
3180 tree src;
3181
3182 if (!TYPE_POINTER_TO (src_obj))
3183 return true;
3184
3185 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3186 if (useless_type_conversion_p (dest, src))
3187 return true;
3188
3189 return false;
3190 }
3191
3192 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3193 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3194
3195 static bool
3196 valid_fixed_convert_types_p (tree type1, tree type2)
3197 {
3198 return (FIXED_POINT_TYPE_P (type1)
3199 && (INTEGRAL_TYPE_P (type2)
3200 || SCALAR_FLOAT_TYPE_P (type2)
3201 || FIXED_POINT_TYPE_P (type2)));
3202 }
3203
3204 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3205 is a problem, otherwise false. */
3206
3207 static bool
3208 verify_gimple_call (gimple stmt)
3209 {
3210 tree fn = gimple_call_fn (stmt);
3211 tree fntype, fndecl;
3212 unsigned i;
3213
3214 if (gimple_call_internal_p (stmt))
3215 {
3216 if (fn)
3217 {
3218 error ("gimple call has two targets");
3219 debug_generic_stmt (fn);
3220 return true;
3221 }
3222 }
3223 else
3224 {
3225 if (!fn)
3226 {
3227 error ("gimple call has no target");
3228 return true;
3229 }
3230 }
3231
3232 if (fn && !is_gimple_call_addr (fn))
3233 {
3234 error ("invalid function in gimple call");
3235 debug_generic_stmt (fn);
3236 return true;
3237 }
3238
3239 if (fn
3240 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3241 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3242 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3243 {
3244 error ("non-function in gimple call");
3245 return true;
3246 }
3247
3248 fndecl = gimple_call_fndecl (stmt);
3249 if (fndecl
3250 && TREE_CODE (fndecl) == FUNCTION_DECL
3251 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3252 && !DECL_PURE_P (fndecl)
3253 && !TREE_READONLY (fndecl))
3254 {
3255 error ("invalid pure const state for function");
3256 return true;
3257 }
3258
3259 if (gimple_call_lhs (stmt)
3260 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3261 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3262 {
3263 error ("invalid LHS in gimple call");
3264 return true;
3265 }
3266
3267 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3268 {
3269 error ("LHS in noreturn call");
3270 return true;
3271 }
3272
3273 fntype = gimple_call_fntype (stmt);
3274 if (fntype
3275 && gimple_call_lhs (stmt)
3276 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3277 TREE_TYPE (fntype))
3278 /* ??? At least C++ misses conversions at assignments from
3279 void * call results.
3280 ??? Java is completely off. Especially with functions
3281 returning java.lang.Object.
3282 For now simply allow arbitrary pointer type conversions. */
3283 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3284 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3285 {
3286 error ("invalid conversion in gimple call");
3287 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3288 debug_generic_stmt (TREE_TYPE (fntype));
3289 return true;
3290 }
3291
3292 if (gimple_call_chain (stmt)
3293 && !is_gimple_val (gimple_call_chain (stmt)))
3294 {
3295 error ("invalid static chain in gimple call");
3296 debug_generic_stmt (gimple_call_chain (stmt));
3297 return true;
3298 }
3299
3300 /* If there is a static chain argument, this should not be an indirect
3301 call, and the decl should have DECL_STATIC_CHAIN set. */
3302 if (gimple_call_chain (stmt))
3303 {
3304 if (!gimple_call_fndecl (stmt))
3305 {
3306 error ("static chain in indirect gimple call");
3307 return true;
3308 }
3309 fn = TREE_OPERAND (fn, 0);
3310
3311 if (!DECL_STATIC_CHAIN (fn))
3312 {
3313 error ("static chain with function that doesn%'t use one");
3314 return true;
3315 }
3316 }
3317
3318 /* ??? The C frontend passes unpromoted arguments in case it
3319 didn't see a function declaration before the call. So for now
3320 leave the call arguments mostly unverified. Once we gimplify
3321 unit-at-a-time we have a chance to fix this. */
3322
3323 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3324 {
3325 tree arg = gimple_call_arg (stmt, i);
3326 if ((is_gimple_reg_type (TREE_TYPE (arg))
3327 && !is_gimple_val (arg))
3328 || (!is_gimple_reg_type (TREE_TYPE (arg))
3329 && !is_gimple_lvalue (arg)))
3330 {
3331 error ("invalid argument to gimple call");
3332 debug_generic_expr (arg);
3333 return true;
3334 }
3335 }
3336
3337 return false;
3338 }
3339
3340 /* Verifies the gimple comparison with the result type TYPE and
3341 the operands OP0 and OP1. */
3342
3343 static bool
3344 verify_gimple_comparison (tree type, tree op0, tree op1)
3345 {
3346 tree op0_type = TREE_TYPE (op0);
3347 tree op1_type = TREE_TYPE (op1);
3348
3349 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3350 {
3351 error ("invalid operands in gimple comparison");
3352 return true;
3353 }
3354
3355 /* For comparisons we do not have the operations type as the
3356 effective type the comparison is carried out in. Instead
3357 we require that either the first operand is trivially
3358 convertible into the second, or the other way around.
3359 Because we special-case pointers to void we allow
3360 comparisons of pointers with the same mode as well. */
3361 if (!useless_type_conversion_p (op0_type, op1_type)
3362 && !useless_type_conversion_p (op1_type, op0_type)
3363 && (!POINTER_TYPE_P (op0_type)
3364 || !POINTER_TYPE_P (op1_type)
3365 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3366 {
3367 error ("mismatching comparison operand types");
3368 debug_generic_expr (op0_type);
3369 debug_generic_expr (op1_type);
3370 return true;
3371 }
3372
3373 /* The resulting type of a comparison may be an effective boolean type. */
3374 if (INTEGRAL_TYPE_P (type)
3375 && (TREE_CODE (type) == BOOLEAN_TYPE
3376 || TYPE_PRECISION (type) == 1))
3377 {
3378 if (TREE_CODE (op0_type) == VECTOR_TYPE
3379 || TREE_CODE (op1_type) == VECTOR_TYPE)
3380 {
3381 error ("vector comparison returning a boolean");
3382 debug_generic_expr (op0_type);
3383 debug_generic_expr (op1_type);
3384 return true;
3385 }
3386 }
3387 /* Or an integer vector type with the same size and element count
3388 as the comparison operand types. */
3389 else if (TREE_CODE (type) == VECTOR_TYPE
3390 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3391 {
3392 if (TREE_CODE (op0_type) != VECTOR_TYPE
3393 || TREE_CODE (op1_type) != VECTOR_TYPE)
3394 {
3395 error ("non-vector operands in vector comparison");
3396 debug_generic_expr (op0_type);
3397 debug_generic_expr (op1_type);
3398 return true;
3399 }
3400
3401 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3402 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3403 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3404 /* The result of a vector comparison is of signed
3405 integral type. */
3406 || TYPE_UNSIGNED (TREE_TYPE (type)))
3407 {
3408 error ("invalid vector comparison resulting type");
3409 debug_generic_expr (type);
3410 return true;
3411 }
3412 }
3413 else
3414 {
3415 error ("bogus comparison result type");
3416 debug_generic_expr (type);
3417 return true;
3418 }
3419
3420 return false;
3421 }
3422
3423 /* Verify a gimple assignment statement STMT with an unary rhs.
3424 Returns true if anything is wrong. */
3425
3426 static bool
3427 verify_gimple_assign_unary (gimple stmt)
3428 {
3429 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3430 tree lhs = gimple_assign_lhs (stmt);
3431 tree lhs_type = TREE_TYPE (lhs);
3432 tree rhs1 = gimple_assign_rhs1 (stmt);
3433 tree rhs1_type = TREE_TYPE (rhs1);
3434
3435 if (!is_gimple_reg (lhs))
3436 {
3437 error ("non-register as LHS of unary operation");
3438 return true;
3439 }
3440
3441 if (!is_gimple_val (rhs1))
3442 {
3443 error ("invalid operand in unary operation");
3444 return true;
3445 }
3446
3447 /* First handle conversions. */
3448 switch (rhs_code)
3449 {
3450 CASE_CONVERT:
3451 {
3452 /* Allow conversions from pointer type to integral type only if
3453 there is no sign or zero extension involved.
3454 For targets were the precision of ptrofftype doesn't match that
3455 of pointers we need to allow arbitrary conversions to ptrofftype. */
3456 if ((POINTER_TYPE_P (lhs_type)
3457 && INTEGRAL_TYPE_P (rhs1_type))
3458 || (POINTER_TYPE_P (rhs1_type)
3459 && INTEGRAL_TYPE_P (lhs_type)
3460 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3461 || ptrofftype_p (sizetype))))
3462 return false;
3463
3464 /* Allow conversion from integral to offset type and vice versa. */
3465 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3466 && INTEGRAL_TYPE_P (rhs1_type))
3467 || (INTEGRAL_TYPE_P (lhs_type)
3468 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3469 return false;
3470
3471 /* Otherwise assert we are converting between types of the
3472 same kind. */
3473 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3474 {
3475 error ("invalid types in nop conversion");
3476 debug_generic_expr (lhs_type);
3477 debug_generic_expr (rhs1_type);
3478 return true;
3479 }
3480
3481 return false;
3482 }
3483
3484 case ADDR_SPACE_CONVERT_EXPR:
3485 {
3486 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3487 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3488 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3489 {
3490 error ("invalid types in address space conversion");
3491 debug_generic_expr (lhs_type);
3492 debug_generic_expr (rhs1_type);
3493 return true;
3494 }
3495
3496 return false;
3497 }
3498
3499 case FIXED_CONVERT_EXPR:
3500 {
3501 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3502 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3503 {
3504 error ("invalid types in fixed-point conversion");
3505 debug_generic_expr (lhs_type);
3506 debug_generic_expr (rhs1_type);
3507 return true;
3508 }
3509
3510 return false;
3511 }
3512
3513 case FLOAT_EXPR:
3514 {
3515 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3516 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3517 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3518 {
3519 error ("invalid types in conversion to floating point");
3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
3522 return true;
3523 }
3524
3525 return false;
3526 }
3527
3528 case FIX_TRUNC_EXPR:
3529 {
3530 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3531 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3532 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3533 {
3534 error ("invalid types in conversion to integer");
3535 debug_generic_expr (lhs_type);
3536 debug_generic_expr (rhs1_type);
3537 return true;
3538 }
3539
3540 return false;
3541 }
3542
3543 case VEC_UNPACK_HI_EXPR:
3544 case VEC_UNPACK_LO_EXPR:
3545 case REDUC_MAX_EXPR:
3546 case REDUC_MIN_EXPR:
3547 case REDUC_PLUS_EXPR:
3548 case VEC_UNPACK_FLOAT_HI_EXPR:
3549 case VEC_UNPACK_FLOAT_LO_EXPR:
3550 /* FIXME. */
3551 return false;
3552
3553 case NEGATE_EXPR:
3554 case ABS_EXPR:
3555 case BIT_NOT_EXPR:
3556 case PAREN_EXPR:
3557 case CONJ_EXPR:
3558 break;
3559
3560 default:
3561 gcc_unreachable ();
3562 }
3563
3564 /* For the remaining codes assert there is no conversion involved. */
3565 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3566 {
3567 error ("non-trivial conversion in unary operation");
3568 debug_generic_expr (lhs_type);
3569 debug_generic_expr (rhs1_type);
3570 return true;
3571 }
3572
3573 return false;
3574 }
3575
3576 /* Verify a gimple assignment statement STMT with a binary rhs.
3577 Returns true if anything is wrong. */
3578
3579 static bool
3580 verify_gimple_assign_binary (gimple stmt)
3581 {
3582 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3583 tree lhs = gimple_assign_lhs (stmt);
3584 tree lhs_type = TREE_TYPE (lhs);
3585 tree rhs1 = gimple_assign_rhs1 (stmt);
3586 tree rhs1_type = TREE_TYPE (rhs1);
3587 tree rhs2 = gimple_assign_rhs2 (stmt);
3588 tree rhs2_type = TREE_TYPE (rhs2);
3589
3590 if (!is_gimple_reg (lhs))
3591 {
3592 error ("non-register as LHS of binary operation");
3593 return true;
3594 }
3595
3596 if (!is_gimple_val (rhs1)
3597 || !is_gimple_val (rhs2))
3598 {
3599 error ("invalid operands in binary operation");
3600 return true;
3601 }
3602
3603 /* First handle operations that involve different types. */
3604 switch (rhs_code)
3605 {
3606 case COMPLEX_EXPR:
3607 {
3608 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3609 || !(INTEGRAL_TYPE_P (rhs1_type)
3610 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3611 || !(INTEGRAL_TYPE_P (rhs2_type)
3612 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3613 {
3614 error ("type mismatch in complex expression");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 debug_generic_expr (rhs2_type);
3618 return true;
3619 }
3620
3621 return false;
3622 }
3623
3624 case LSHIFT_EXPR:
3625 case RSHIFT_EXPR:
3626 case LROTATE_EXPR:
3627 case RROTATE_EXPR:
3628 {
3629 /* Shifts and rotates are ok on integral types, fixed point
3630 types and integer vector types. */
3631 if ((!INTEGRAL_TYPE_P (rhs1_type)
3632 && !FIXED_POINT_TYPE_P (rhs1_type)
3633 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3634 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3635 || (!INTEGRAL_TYPE_P (rhs2_type)
3636 /* Vector shifts of vectors are also ok. */
3637 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3638 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3639 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3640 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3641 || !useless_type_conversion_p (lhs_type, rhs1_type))
3642 {
3643 error ("type mismatch in shift expression");
3644 debug_generic_expr (lhs_type);
3645 debug_generic_expr (rhs1_type);
3646 debug_generic_expr (rhs2_type);
3647 return true;
3648 }
3649
3650 return false;
3651 }
3652
3653 case VEC_LSHIFT_EXPR:
3654 case VEC_RSHIFT_EXPR:
3655 {
3656 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3657 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3658 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3659 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3660 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3661 || (!INTEGRAL_TYPE_P (rhs2_type)
3662 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3663 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3664 || !useless_type_conversion_p (lhs_type, rhs1_type))
3665 {
3666 error ("type mismatch in vector shift expression");
3667 debug_generic_expr (lhs_type);
3668 debug_generic_expr (rhs1_type);
3669 debug_generic_expr (rhs2_type);
3670 return true;
3671 }
3672 /* For shifting a vector of non-integral components we
3673 only allow shifting by a constant multiple of the element size. */
3674 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3675 && (TREE_CODE (rhs2) != INTEGER_CST
3676 || !div_if_zero_remainder (rhs2,
3677 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3678 {
3679 error ("non-element sized vector shift of floating point vector");
3680 return true;
3681 }
3682
3683 return false;
3684 }
3685
3686 case WIDEN_LSHIFT_EXPR:
3687 {
3688 if (!INTEGRAL_TYPE_P (lhs_type)
3689 || !INTEGRAL_TYPE_P (rhs1_type)
3690 || TREE_CODE (rhs2) != INTEGER_CST
3691 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3692 {
3693 error ("type mismatch in widening vector shift expression");
3694 debug_generic_expr (lhs_type);
3695 debug_generic_expr (rhs1_type);
3696 debug_generic_expr (rhs2_type);
3697 return true;
3698 }
3699
3700 return false;
3701 }
3702
3703 case VEC_WIDEN_LSHIFT_HI_EXPR:
3704 case VEC_WIDEN_LSHIFT_LO_EXPR:
3705 {
3706 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3707 || TREE_CODE (lhs_type) != VECTOR_TYPE
3708 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3709 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3710 || TREE_CODE (rhs2) != INTEGER_CST
3711 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3712 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3713 {
3714 error ("type mismatch in widening vector shift expression");
3715 debug_generic_expr (lhs_type);
3716 debug_generic_expr (rhs1_type);
3717 debug_generic_expr (rhs2_type);
3718 return true;
3719 }
3720
3721 return false;
3722 }
3723
3724 case PLUS_EXPR:
3725 case MINUS_EXPR:
3726 {
3727 tree lhs_etype = lhs_type;
3728 tree rhs1_etype = rhs1_type;
3729 tree rhs2_etype = rhs2_type;
3730 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3731 {
3732 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3733 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3734 {
3735 error ("invalid non-vector operands to vector valued plus");
3736 return true;
3737 }
3738 lhs_etype = TREE_TYPE (lhs_type);
3739 rhs1_etype = TREE_TYPE (rhs1_type);
3740 rhs2_etype = TREE_TYPE (rhs2_type);
3741 }
3742 if (POINTER_TYPE_P (lhs_etype)
3743 || POINTER_TYPE_P (rhs1_etype)
3744 || POINTER_TYPE_P (rhs2_etype))
3745 {
3746 error ("invalid (pointer) operands to plus/minus");
3747 return true;
3748 }
3749
3750 /* Continue with generic binary expression handling. */
3751 break;
3752 }
3753
3754 case POINTER_PLUS_EXPR:
3755 {
3756 if (!POINTER_TYPE_P (rhs1_type)
3757 || !useless_type_conversion_p (lhs_type, rhs1_type)
3758 || !ptrofftype_p (rhs2_type))
3759 {
3760 error ("type mismatch in pointer plus expression");
3761 debug_generic_stmt (lhs_type);
3762 debug_generic_stmt (rhs1_type);
3763 debug_generic_stmt (rhs2_type);
3764 return true;
3765 }
3766
3767 return false;
3768 }
3769
3770 case TRUTH_ANDIF_EXPR:
3771 case TRUTH_ORIF_EXPR:
3772 case TRUTH_AND_EXPR:
3773 case TRUTH_OR_EXPR:
3774 case TRUTH_XOR_EXPR:
3775
3776 gcc_unreachable ();
3777
3778 case LT_EXPR:
3779 case LE_EXPR:
3780 case GT_EXPR:
3781 case GE_EXPR:
3782 case EQ_EXPR:
3783 case NE_EXPR:
3784 case UNORDERED_EXPR:
3785 case ORDERED_EXPR:
3786 case UNLT_EXPR:
3787 case UNLE_EXPR:
3788 case UNGT_EXPR:
3789 case UNGE_EXPR:
3790 case UNEQ_EXPR:
3791 case LTGT_EXPR:
3792 /* Comparisons are also binary, but the result type is not
3793 connected to the operand types. */
3794 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3795
3796 case WIDEN_MULT_EXPR:
3797 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3798 return true;
3799 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3800 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3801
3802 case WIDEN_SUM_EXPR:
3803 case VEC_WIDEN_MULT_HI_EXPR:
3804 case VEC_WIDEN_MULT_LO_EXPR:
3805 case VEC_WIDEN_MULT_EVEN_EXPR:
3806 case VEC_WIDEN_MULT_ODD_EXPR:
3807 case VEC_PACK_TRUNC_EXPR:
3808 case VEC_PACK_SAT_EXPR:
3809 case VEC_PACK_FIX_TRUNC_EXPR:
3810 /* FIXME. */
3811 return false;
3812
3813 case MULT_EXPR:
3814 case MULT_HIGHPART_EXPR:
3815 case TRUNC_DIV_EXPR:
3816 case CEIL_DIV_EXPR:
3817 case FLOOR_DIV_EXPR:
3818 case ROUND_DIV_EXPR:
3819 case TRUNC_MOD_EXPR:
3820 case CEIL_MOD_EXPR:
3821 case FLOOR_MOD_EXPR:
3822 case ROUND_MOD_EXPR:
3823 case RDIV_EXPR:
3824 case EXACT_DIV_EXPR:
3825 case MIN_EXPR:
3826 case MAX_EXPR:
3827 case BIT_IOR_EXPR:
3828 case BIT_XOR_EXPR:
3829 case BIT_AND_EXPR:
3830 /* Continue with generic binary expression handling. */
3831 break;
3832
3833 default:
3834 gcc_unreachable ();
3835 }
3836
3837 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3838 || !useless_type_conversion_p (lhs_type, rhs2_type))
3839 {
3840 error ("type mismatch in binary expression");
3841 debug_generic_stmt (lhs_type);
3842 debug_generic_stmt (rhs1_type);
3843 debug_generic_stmt (rhs2_type);
3844 return true;
3845 }
3846
3847 return false;
3848 }
3849
3850 /* Verify a gimple assignment statement STMT with a ternary rhs.
3851 Returns true if anything is wrong. */
3852
3853 static bool
3854 verify_gimple_assign_ternary (gimple stmt)
3855 {
3856 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3857 tree lhs = gimple_assign_lhs (stmt);
3858 tree lhs_type = TREE_TYPE (lhs);
3859 tree rhs1 = gimple_assign_rhs1 (stmt);
3860 tree rhs1_type = TREE_TYPE (rhs1);
3861 tree rhs2 = gimple_assign_rhs2 (stmt);
3862 tree rhs2_type = TREE_TYPE (rhs2);
3863 tree rhs3 = gimple_assign_rhs3 (stmt);
3864 tree rhs3_type = TREE_TYPE (rhs3);
3865
3866 if (!is_gimple_reg (lhs))
3867 {
3868 error ("non-register as LHS of ternary operation");
3869 return true;
3870 }
3871
3872 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3873 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3874 || !is_gimple_val (rhs2)
3875 || !is_gimple_val (rhs3))
3876 {
3877 error ("invalid operands in ternary operation");
3878 return true;
3879 }
3880
3881 /* First handle operations that involve different types. */
3882 switch (rhs_code)
3883 {
3884 case WIDEN_MULT_PLUS_EXPR:
3885 case WIDEN_MULT_MINUS_EXPR:
3886 if ((!INTEGRAL_TYPE_P (rhs1_type)
3887 && !FIXED_POINT_TYPE_P (rhs1_type))
3888 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3889 || !useless_type_conversion_p (lhs_type, rhs3_type)
3890 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3891 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3892 {
3893 error ("type mismatch in widening multiply-accumulate expression");
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3896 debug_generic_expr (rhs2_type);
3897 debug_generic_expr (rhs3_type);
3898 return true;
3899 }
3900 break;
3901
3902 case FMA_EXPR:
3903 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3904 || !useless_type_conversion_p (lhs_type, rhs2_type)
3905 || !useless_type_conversion_p (lhs_type, rhs3_type))
3906 {
3907 error ("type mismatch in fused multiply-add expression");
3908 debug_generic_expr (lhs_type);
3909 debug_generic_expr (rhs1_type);
3910 debug_generic_expr (rhs2_type);
3911 debug_generic_expr (rhs3_type);
3912 return true;
3913 }
3914 break;
3915
3916 case COND_EXPR:
3917 case VEC_COND_EXPR:
3918 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3919 || !useless_type_conversion_p (lhs_type, rhs3_type))
3920 {
3921 error ("type mismatch in conditional expression");
3922 debug_generic_expr (lhs_type);
3923 debug_generic_expr (rhs2_type);
3924 debug_generic_expr (rhs3_type);
3925 return true;
3926 }
3927 break;
3928
3929 case VEC_PERM_EXPR:
3930 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3931 || !useless_type_conversion_p (lhs_type, rhs2_type))
3932 {
3933 error ("type mismatch in vector permute expression");
3934 debug_generic_expr (lhs_type);
3935 debug_generic_expr (rhs1_type);
3936 debug_generic_expr (rhs2_type);
3937 debug_generic_expr (rhs3_type);
3938 return true;
3939 }
3940
3941 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3942 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3943 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3944 {
3945 error ("vector types expected in vector permute expression");
3946 debug_generic_expr (lhs_type);
3947 debug_generic_expr (rhs1_type);
3948 debug_generic_expr (rhs2_type);
3949 debug_generic_expr (rhs3_type);
3950 return true;
3951 }
3952
3953 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3954 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3955 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3956 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3957 != TYPE_VECTOR_SUBPARTS (lhs_type))
3958 {
3959 error ("vectors with different element number found "
3960 "in vector permute expression");
3961 debug_generic_expr (lhs_type);
3962 debug_generic_expr (rhs1_type);
3963 debug_generic_expr (rhs2_type);
3964 debug_generic_expr (rhs3_type);
3965 return true;
3966 }
3967
3968 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3969 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3970 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3971 {
3972 error ("invalid mask type in vector permute expression");
3973 debug_generic_expr (lhs_type);
3974 debug_generic_expr (rhs1_type);
3975 debug_generic_expr (rhs2_type);
3976 debug_generic_expr (rhs3_type);
3977 return true;
3978 }
3979
3980 return false;
3981
3982 case SAD_EXPR:
3983 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
3984 || !useless_type_conversion_p (lhs_type, rhs3_type)
3985 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
3986 (TYPE_MODE (TREE_TYPE (rhs1_type))))
3987 > GET_MODE_BITSIZE (GET_MODE_INNER
3988 (TYPE_MODE (TREE_TYPE (lhs_type)))))
3989 {
3990 error ("type mismatch in sad expression");
3991 debug_generic_expr (lhs_type);
3992 debug_generic_expr (rhs1_type);
3993 debug_generic_expr (rhs2_type);
3994 debug_generic_expr (rhs3_type);
3995 return true;
3996 }
3997
3998 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3999 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4000 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4001 {
4002 error ("vector types expected in sad expression");
4003 debug_generic_expr (lhs_type);
4004 debug_generic_expr (rhs1_type);
4005 debug_generic_expr (rhs2_type);
4006 debug_generic_expr (rhs3_type);
4007 return true;
4008 }
4009
4010 return false;
4011
4012 case DOT_PROD_EXPR:
4013 case REALIGN_LOAD_EXPR:
4014 /* FIXME. */
4015 return false;
4016
4017 default:
4018 gcc_unreachable ();
4019 }
4020 return false;
4021 }
4022
4023 /* Verify a gimple assignment statement STMT with a single rhs.
4024 Returns true if anything is wrong. */
4025
4026 static bool
4027 verify_gimple_assign_single (gimple stmt)
4028 {
4029 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4030 tree lhs = gimple_assign_lhs (stmt);
4031 tree lhs_type = TREE_TYPE (lhs);
4032 tree rhs1 = gimple_assign_rhs1 (stmt);
4033 tree rhs1_type = TREE_TYPE (rhs1);
4034 bool res = false;
4035
4036 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4037 {
4038 error ("non-trivial conversion at assignment");
4039 debug_generic_expr (lhs_type);
4040 debug_generic_expr (rhs1_type);
4041 return true;
4042 }
4043
4044 if (gimple_clobber_p (stmt)
4045 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4046 {
4047 error ("non-decl/MEM_REF LHS in clobber statement");
4048 debug_generic_expr (lhs);
4049 return true;
4050 }
4051
4052 if (handled_component_p (lhs)
4053 || TREE_CODE (lhs) == MEM_REF
4054 || TREE_CODE (lhs) == TARGET_MEM_REF)
4055 res |= verify_types_in_gimple_reference (lhs, true);
4056
4057 /* Special codes we cannot handle via their class. */
4058 switch (rhs_code)
4059 {
4060 case ADDR_EXPR:
4061 {
4062 tree op = TREE_OPERAND (rhs1, 0);
4063 if (!is_gimple_addressable (op))
4064 {
4065 error ("invalid operand in unary expression");
4066 return true;
4067 }
4068
4069 /* Technically there is no longer a need for matching types, but
4070 gimple hygiene asks for this check. In LTO we can end up
4071 combining incompatible units and thus end up with addresses
4072 of globals that change their type to a common one. */
4073 if (!in_lto_p
4074 && !types_compatible_p (TREE_TYPE (op),
4075 TREE_TYPE (TREE_TYPE (rhs1)))
4076 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4077 TREE_TYPE (op)))
4078 {
4079 error ("type mismatch in address expression");
4080 debug_generic_stmt (TREE_TYPE (rhs1));
4081 debug_generic_stmt (TREE_TYPE (op));
4082 return true;
4083 }
4084
4085 return verify_types_in_gimple_reference (op, true);
4086 }
4087
4088 /* tcc_reference */
4089 case INDIRECT_REF:
4090 error ("INDIRECT_REF in gimple IL");
4091 return true;
4092
4093 case COMPONENT_REF:
4094 case BIT_FIELD_REF:
4095 case ARRAY_REF:
4096 case ARRAY_RANGE_REF:
4097 case VIEW_CONVERT_EXPR:
4098 case REALPART_EXPR:
4099 case IMAGPART_EXPR:
4100 case TARGET_MEM_REF:
4101 case MEM_REF:
4102 if (!is_gimple_reg (lhs)
4103 && is_gimple_reg_type (TREE_TYPE (lhs)))
4104 {
4105 error ("invalid rhs for gimple memory store");
4106 debug_generic_stmt (lhs);
4107 debug_generic_stmt (rhs1);
4108 return true;
4109 }
4110 return res || verify_types_in_gimple_reference (rhs1, false);
4111
4112 /* tcc_constant */
4113 case SSA_NAME:
4114 case INTEGER_CST:
4115 case REAL_CST:
4116 case FIXED_CST:
4117 case COMPLEX_CST:
4118 case VECTOR_CST:
4119 case STRING_CST:
4120 return res;
4121
4122 /* tcc_declaration */
4123 case CONST_DECL:
4124 return res;
4125 case VAR_DECL:
4126 case PARM_DECL:
4127 if (!is_gimple_reg (lhs)
4128 && !is_gimple_reg (rhs1)
4129 && is_gimple_reg_type (TREE_TYPE (lhs)))
4130 {
4131 error ("invalid rhs for gimple memory store");
4132 debug_generic_stmt (lhs);
4133 debug_generic_stmt (rhs1);
4134 return true;
4135 }
4136 return res;
4137
4138 case CONSTRUCTOR:
4139 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4140 {
4141 unsigned int i;
4142 tree elt_i, elt_v, elt_t = NULL_TREE;
4143
4144 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4145 return res;
4146 /* For vector CONSTRUCTORs we require that either it is empty
4147 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4148 (then the element count must be correct to cover the whole
4149 outer vector and index must be NULL on all elements, or it is
4150 a CONSTRUCTOR of scalar elements, where we as an exception allow
4151 smaller number of elements (assuming zero filling) and
4152 consecutive indexes as compared to NULL indexes (such
4153 CONSTRUCTORs can appear in the IL from FEs). */
4154 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4155 {
4156 if (elt_t == NULL_TREE)
4157 {
4158 elt_t = TREE_TYPE (elt_v);
4159 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4160 {
4161 tree elt_t = TREE_TYPE (elt_v);
4162 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4163 TREE_TYPE (elt_t)))
4164 {
4165 error ("incorrect type of vector CONSTRUCTOR"
4166 " elements");
4167 debug_generic_stmt (rhs1);
4168 return true;
4169 }
4170 else if (CONSTRUCTOR_NELTS (rhs1)
4171 * TYPE_VECTOR_SUBPARTS (elt_t)
4172 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4173 {
4174 error ("incorrect number of vector CONSTRUCTOR"
4175 " elements");
4176 debug_generic_stmt (rhs1);
4177 return true;
4178 }
4179 }
4180 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4181 elt_t))
4182 {
4183 error ("incorrect type of vector CONSTRUCTOR elements");
4184 debug_generic_stmt (rhs1);
4185 return true;
4186 }
4187 else if (CONSTRUCTOR_NELTS (rhs1)
4188 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4189 {
4190 error ("incorrect number of vector CONSTRUCTOR elements");
4191 debug_generic_stmt (rhs1);
4192 return true;
4193 }
4194 }
4195 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4196 {
4197 error ("incorrect type of vector CONSTRUCTOR elements");
4198 debug_generic_stmt (rhs1);
4199 return true;
4200 }
4201 if (elt_i != NULL_TREE
4202 && (TREE_CODE (elt_t) == VECTOR_TYPE
4203 || TREE_CODE (elt_i) != INTEGER_CST
4204 || compare_tree_int (elt_i, i) != 0))
4205 {
4206 error ("vector CONSTRUCTOR with non-NULL element index");
4207 debug_generic_stmt (rhs1);
4208 return true;
4209 }
4210 }
4211 }
4212 return res;
4213 case OBJ_TYPE_REF:
4214 case ASSERT_EXPR:
4215 case WITH_SIZE_EXPR:
4216 /* FIXME. */
4217 return res;
4218
4219 default:;
4220 }
4221
4222 return res;
4223 }
4224
4225 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4226 is a problem, otherwise false. */
4227
4228 static bool
4229 verify_gimple_assign (gimple stmt)
4230 {
4231 switch (gimple_assign_rhs_class (stmt))
4232 {
4233 case GIMPLE_SINGLE_RHS:
4234 return verify_gimple_assign_single (stmt);
4235
4236 case GIMPLE_UNARY_RHS:
4237 return verify_gimple_assign_unary (stmt);
4238
4239 case GIMPLE_BINARY_RHS:
4240 return verify_gimple_assign_binary (stmt);
4241
4242 case GIMPLE_TERNARY_RHS:
4243 return verify_gimple_assign_ternary (stmt);
4244
4245 default:
4246 gcc_unreachable ();
4247 }
4248 }
4249
4250 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4251 is a problem, otherwise false. */
4252
4253 static bool
4254 verify_gimple_return (gimple stmt)
4255 {
4256 tree op = gimple_return_retval (stmt);
4257 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4258
4259 /* We cannot test for present return values as we do not fix up missing
4260 return values from the original source. */
4261 if (op == NULL)
4262 return false;
4263
4264 if (!is_gimple_val (op)
4265 && TREE_CODE (op) != RESULT_DECL)
4266 {
4267 error ("invalid operand in return statement");
4268 debug_generic_stmt (op);
4269 return true;
4270 }
4271
4272 if ((TREE_CODE (op) == RESULT_DECL
4273 && DECL_BY_REFERENCE (op))
4274 || (TREE_CODE (op) == SSA_NAME
4275 && SSA_NAME_VAR (op)
4276 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4277 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4278 op = TREE_TYPE (op);
4279
4280 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4281 {
4282 error ("invalid conversion in return statement");
4283 debug_generic_stmt (restype);
4284 debug_generic_stmt (TREE_TYPE (op));
4285 return true;
4286 }
4287
4288 return false;
4289 }
4290
4291
4292 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4293 is a problem, otherwise false. */
4294
4295 static bool
4296 verify_gimple_goto (gimple stmt)
4297 {
4298 tree dest = gimple_goto_dest (stmt);
4299
4300 /* ??? We have two canonical forms of direct goto destinations, a
4301 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4302 if (TREE_CODE (dest) != LABEL_DECL
4303 && (!is_gimple_val (dest)
4304 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4305 {
4306 error ("goto destination is neither a label nor a pointer");
4307 return true;
4308 }
4309
4310 return false;
4311 }
4312
4313 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4314 is a problem, otherwise false. */
4315
4316 static bool
4317 verify_gimple_switch (gimple stmt)
4318 {
4319 unsigned int i, n;
4320 tree elt, prev_upper_bound = NULL_TREE;
4321 tree index_type, elt_type = NULL_TREE;
4322
4323 if (!is_gimple_val (gimple_switch_index (stmt)))
4324 {
4325 error ("invalid operand to switch statement");
4326 debug_generic_stmt (gimple_switch_index (stmt));
4327 return true;
4328 }
4329
4330 index_type = TREE_TYPE (gimple_switch_index (stmt));
4331 if (! INTEGRAL_TYPE_P (index_type))
4332 {
4333 error ("non-integral type switch statement");
4334 debug_generic_expr (index_type);
4335 return true;
4336 }
4337
4338 elt = gimple_switch_label (stmt, 0);
4339 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4340 {
4341 error ("invalid default case label in switch statement");
4342 debug_generic_expr (elt);
4343 return true;
4344 }
4345
4346 n = gimple_switch_num_labels (stmt);
4347 for (i = 1; i < n; i++)
4348 {
4349 elt = gimple_switch_label (stmt, i);
4350
4351 if (! CASE_LOW (elt))
4352 {
4353 error ("invalid case label in switch statement");
4354 debug_generic_expr (elt);
4355 return true;
4356 }
4357 if (CASE_HIGH (elt)
4358 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4359 {
4360 error ("invalid case range in switch statement");
4361 debug_generic_expr (elt);
4362 return true;
4363 }
4364
4365 if (elt_type)
4366 {
4367 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4368 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4369 {
4370 error ("type mismatch for case label in switch statement");
4371 debug_generic_expr (elt);
4372 return true;
4373 }
4374 }
4375 else
4376 {
4377 elt_type = TREE_TYPE (CASE_LOW (elt));
4378 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4379 {
4380 error ("type precision mismatch in switch statement");
4381 return true;
4382 }
4383 }
4384
4385 if (prev_upper_bound)
4386 {
4387 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4388 {
4389 error ("case labels not sorted in switch statement");
4390 return true;
4391 }
4392 }
4393
4394 prev_upper_bound = CASE_HIGH (elt);
4395 if (! prev_upper_bound)
4396 prev_upper_bound = CASE_LOW (elt);
4397 }
4398
4399 return false;
4400 }
4401
4402 /* Verify a gimple debug statement STMT.
4403 Returns true if anything is wrong. */
4404
4405 static bool
4406 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4407 {
4408 /* There isn't much that could be wrong in a gimple debug stmt. A
4409 gimple debug bind stmt, for example, maps a tree, that's usually
4410 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4411 component or member of an aggregate type, to another tree, that
4412 can be an arbitrary expression. These stmts expand into debug
4413 insns, and are converted to debug notes by var-tracking.c. */
4414 return false;
4415 }
4416
4417 /* Verify a gimple label statement STMT.
4418 Returns true if anything is wrong. */
4419
4420 static bool
4421 verify_gimple_label (gimple stmt)
4422 {
4423 tree decl = gimple_label_label (stmt);
4424 int uid;
4425 bool err = false;
4426
4427 if (TREE_CODE (decl) != LABEL_DECL)
4428 return true;
4429 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4430 && DECL_CONTEXT (decl) != current_function_decl)
4431 {
4432 error ("label's context is not the current function decl");
4433 err |= true;
4434 }
4435
4436 uid = LABEL_DECL_UID (decl);
4437 if (cfun->cfg
4438 && (uid == -1
4439 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4440 {
4441 error ("incorrect entry in label_to_block_map");
4442 err |= true;
4443 }
4444
4445 uid = EH_LANDING_PAD_NR (decl);
4446 if (uid)
4447 {
4448 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4449 if (decl != lp->post_landing_pad)
4450 {
4451 error ("incorrect setting of landing pad number");
4452 err |= true;
4453 }
4454 }
4455
4456 return err;
4457 }
4458
4459 /* Verify the GIMPLE statement STMT. Returns true if there is an
4460 error, otherwise false. */
4461
4462 static bool
4463 verify_gimple_stmt (gimple stmt)
4464 {
4465 switch (gimple_code (stmt))
4466 {
4467 case GIMPLE_ASSIGN:
4468 return verify_gimple_assign (stmt);
4469
4470 case GIMPLE_LABEL:
4471 return verify_gimple_label (stmt);
4472
4473 case GIMPLE_CALL:
4474 return verify_gimple_call (stmt);
4475
4476 case GIMPLE_COND:
4477 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4478 {
4479 error ("invalid comparison code in gimple cond");
4480 return true;
4481 }
4482 if (!(!gimple_cond_true_label (stmt)
4483 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4484 || !(!gimple_cond_false_label (stmt)
4485 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4486 {
4487 error ("invalid labels in gimple cond");
4488 return true;
4489 }
4490
4491 return verify_gimple_comparison (boolean_type_node,
4492 gimple_cond_lhs (stmt),
4493 gimple_cond_rhs (stmt));
4494
4495 case GIMPLE_GOTO:
4496 return verify_gimple_goto (stmt);
4497
4498 case GIMPLE_SWITCH:
4499 return verify_gimple_switch (stmt);
4500
4501 case GIMPLE_RETURN:
4502 return verify_gimple_return (stmt);
4503
4504 case GIMPLE_ASM:
4505 return false;
4506
4507 case GIMPLE_TRANSACTION:
4508 return verify_gimple_transaction (stmt);
4509
4510 /* Tuples that do not have tree operands. */
4511 case GIMPLE_NOP:
4512 case GIMPLE_PREDICT:
4513 case GIMPLE_RESX:
4514 case GIMPLE_EH_DISPATCH:
4515 case GIMPLE_EH_MUST_NOT_THROW:
4516 return false;
4517
4518 CASE_GIMPLE_OMP:
4519 /* OpenMP directives are validated by the FE and never operated
4520 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4521 non-gimple expressions when the main index variable has had
4522 its address taken. This does not affect the loop itself
4523 because the header of an GIMPLE_OMP_FOR is merely used to determine
4524 how to setup the parallel iteration. */
4525 return false;
4526
4527 case GIMPLE_DEBUG:
4528 return verify_gimple_debug (stmt);
4529
4530 default:
4531 gcc_unreachable ();
4532 }
4533 }
4534
4535 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4536 and false otherwise. */
4537
4538 static bool
4539 verify_gimple_phi (gimple phi)
4540 {
4541 bool err = false;
4542 unsigned i;
4543 tree phi_result = gimple_phi_result (phi);
4544 bool virtual_p;
4545
4546 if (!phi_result)
4547 {
4548 error ("invalid PHI result");
4549 return true;
4550 }
4551
4552 virtual_p = virtual_operand_p (phi_result);
4553 if (TREE_CODE (phi_result) != SSA_NAME
4554 || (virtual_p
4555 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4556 {
4557 error ("invalid PHI result");
4558 err = true;
4559 }
4560
4561 for (i = 0; i < gimple_phi_num_args (phi); i++)
4562 {
4563 tree t = gimple_phi_arg_def (phi, i);
4564
4565 if (!t)
4566 {
4567 error ("missing PHI def");
4568 err |= true;
4569 continue;
4570 }
4571 /* Addressable variables do have SSA_NAMEs but they
4572 are not considered gimple values. */
4573 else if ((TREE_CODE (t) == SSA_NAME
4574 && virtual_p != virtual_operand_p (t))
4575 || (virtual_p
4576 && (TREE_CODE (t) != SSA_NAME
4577 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4578 || (!virtual_p
4579 && !is_gimple_val (t)))
4580 {
4581 error ("invalid PHI argument");
4582 debug_generic_expr (t);
4583 err |= true;
4584 }
4585 #ifdef ENABLE_TYPES_CHECKING
4586 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4587 {
4588 error ("incompatible types in PHI argument %u", i);
4589 debug_generic_stmt (TREE_TYPE (phi_result));
4590 debug_generic_stmt (TREE_TYPE (t));
4591 err |= true;
4592 }
4593 #endif
4594 }
4595
4596 return err;
4597 }
4598
4599 /* Verify the GIMPLE statements inside the sequence STMTS. */
4600
4601 static bool
4602 verify_gimple_in_seq_2 (gimple_seq stmts)
4603 {
4604 gimple_stmt_iterator ittr;
4605 bool err = false;
4606
4607 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4608 {
4609 gimple stmt = gsi_stmt (ittr);
4610
4611 switch (gimple_code (stmt))
4612 {
4613 case GIMPLE_BIND:
4614 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4615 break;
4616
4617 case GIMPLE_TRY:
4618 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4619 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4620 break;
4621
4622 case GIMPLE_EH_FILTER:
4623 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4624 break;
4625
4626 case GIMPLE_EH_ELSE:
4627 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4628 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4629 break;
4630
4631 case GIMPLE_CATCH:
4632 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4633 break;
4634
4635 case GIMPLE_TRANSACTION:
4636 err |= verify_gimple_transaction (stmt);
4637 break;
4638
4639 default:
4640 {
4641 bool err2 = verify_gimple_stmt (stmt);
4642 if (err2)
4643 debug_gimple_stmt (stmt);
4644 err |= err2;
4645 }
4646 }
4647 }
4648
4649 return err;
4650 }
4651
4652 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4653 is a problem, otherwise false. */
4654
4655 static bool
4656 verify_gimple_transaction (gimple stmt)
4657 {
4658 tree lab = gimple_transaction_label (stmt);
4659 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4660 return true;
4661 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4662 }
4663
4664
4665 /* Verify the GIMPLE statements inside the statement list STMTS. */
4666
4667 DEBUG_FUNCTION void
4668 verify_gimple_in_seq (gimple_seq stmts)
4669 {
4670 timevar_push (TV_TREE_STMT_VERIFY);
4671 if (verify_gimple_in_seq_2 (stmts))
4672 internal_error ("verify_gimple failed");
4673 timevar_pop (TV_TREE_STMT_VERIFY);
4674 }
4675
4676 /* Return true when the T can be shared. */
4677
4678 static bool
4679 tree_node_can_be_shared (tree t)
4680 {
4681 if (IS_TYPE_OR_DECL_P (t)
4682 || is_gimple_min_invariant (t)
4683 || TREE_CODE (t) == SSA_NAME
4684 || t == error_mark_node
4685 || TREE_CODE (t) == IDENTIFIER_NODE)
4686 return true;
4687
4688 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4689 return true;
4690
4691 if (DECL_P (t))
4692 return true;
4693
4694 return false;
4695 }
4696
4697 /* Called via walk_tree. Verify tree sharing. */
4698
4699 static tree
4700 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4701 {
4702 hash_set<void *> *visited = (hash_set<void *> *) data;
4703
4704 if (tree_node_can_be_shared (*tp))
4705 {
4706 *walk_subtrees = false;
4707 return NULL;
4708 }
4709
4710 if (visited->add (*tp))
4711 return *tp;
4712
4713 return NULL;
4714 }
4715
4716 /* Called via walk_gimple_stmt. Verify tree sharing. */
4717
4718 static tree
4719 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4720 {
4721 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4722 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4723 }
4724
4725 static bool eh_error_found;
4726 bool
4727 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4728 hash_set<gimple> *visited)
4729 {
4730 if (!visited->contains (stmt))
4731 {
4732 error ("dead STMT in EH table");
4733 debug_gimple_stmt (stmt);
4734 eh_error_found = true;
4735 }
4736 return true;
4737 }
4738
4739 /* Verify if the location LOCs block is in BLOCKS. */
4740
4741 static bool
4742 verify_location (hash_set<tree> *blocks, location_t loc)
4743 {
4744 tree block = LOCATION_BLOCK (loc);
4745 if (block != NULL_TREE
4746 && !blocks->contains (block))
4747 {
4748 error ("location references block not in block tree");
4749 return true;
4750 }
4751 if (block != NULL_TREE)
4752 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4753 return false;
4754 }
4755
4756 /* Called via walk_tree. Verify that expressions have no blocks. */
4757
4758 static tree
4759 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4760 {
4761 if (!EXPR_P (*tp))
4762 {
4763 *walk_subtrees = false;
4764 return NULL;
4765 }
4766
4767 location_t loc = EXPR_LOCATION (*tp);
4768 if (LOCATION_BLOCK (loc) != NULL)
4769 return *tp;
4770
4771 return NULL;
4772 }
4773
4774 /* Called via walk_tree. Verify locations of expressions. */
4775
4776 static tree
4777 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4778 {
4779 hash_set<tree> *blocks = (hash_set<tree> *) data;
4780
4781 if (TREE_CODE (*tp) == VAR_DECL
4782 && DECL_HAS_DEBUG_EXPR_P (*tp))
4783 {
4784 tree t = DECL_DEBUG_EXPR (*tp);
4785 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4786 if (addr)
4787 return addr;
4788 }
4789 if ((TREE_CODE (*tp) == VAR_DECL
4790 || TREE_CODE (*tp) == PARM_DECL
4791 || TREE_CODE (*tp) == RESULT_DECL)
4792 && DECL_HAS_VALUE_EXPR_P (*tp))
4793 {
4794 tree t = DECL_VALUE_EXPR (*tp);
4795 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4796 if (addr)
4797 return addr;
4798 }
4799
4800 if (!EXPR_P (*tp))
4801 {
4802 *walk_subtrees = false;
4803 return NULL;
4804 }
4805
4806 location_t loc = EXPR_LOCATION (*tp);
4807 if (verify_location (blocks, loc))
4808 return *tp;
4809
4810 return NULL;
4811 }
4812
4813 /* Called via walk_gimple_op. Verify locations of expressions. */
4814
4815 static tree
4816 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4817 {
4818 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4819 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4820 }
4821
4822 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4823
4824 static void
4825 collect_subblocks (hash_set<tree> *blocks, tree block)
4826 {
4827 tree t;
4828 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4829 {
4830 blocks->add (t);
4831 collect_subblocks (blocks, t);
4832 }
4833 }
4834
4835 /* Verify the GIMPLE statements in the CFG of FN. */
4836
4837 DEBUG_FUNCTION void
4838 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4839 {
4840 basic_block bb;
4841 bool err = false;
4842
4843 timevar_push (TV_TREE_STMT_VERIFY);
4844 hash_set<void *> visited;
4845 hash_set<gimple> visited_stmts;
4846
4847 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4848 hash_set<tree> blocks;
4849 if (DECL_INITIAL (fn->decl))
4850 {
4851 blocks.add (DECL_INITIAL (fn->decl));
4852 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4853 }
4854
4855 FOR_EACH_BB_FN (bb, fn)
4856 {
4857 gimple_stmt_iterator gsi;
4858
4859 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4860 {
4861 gimple phi = gsi_stmt (gsi);
4862 bool err2 = false;
4863 unsigned i;
4864
4865 visited_stmts.add (phi);
4866
4867 if (gimple_bb (phi) != bb)
4868 {
4869 error ("gimple_bb (phi) is set to a wrong basic block");
4870 err2 = true;
4871 }
4872
4873 err2 |= verify_gimple_phi (phi);
4874
4875 /* Only PHI arguments have locations. */
4876 if (gimple_location (phi) != UNKNOWN_LOCATION)
4877 {
4878 error ("PHI node with location");
4879 err2 = true;
4880 }
4881
4882 for (i = 0; i < gimple_phi_num_args (phi); i++)
4883 {
4884 tree arg = gimple_phi_arg_def (phi, i);
4885 tree addr = walk_tree (&arg, verify_node_sharing_1,
4886 &visited, NULL);
4887 if (addr)
4888 {
4889 error ("incorrect sharing of tree nodes");
4890 debug_generic_expr (addr);
4891 err2 |= true;
4892 }
4893 location_t loc = gimple_phi_arg_location (phi, i);
4894 if (virtual_operand_p (gimple_phi_result (phi))
4895 && loc != UNKNOWN_LOCATION)
4896 {
4897 error ("virtual PHI with argument locations");
4898 err2 = true;
4899 }
4900 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4901 if (addr)
4902 {
4903 debug_generic_expr (addr);
4904 err2 = true;
4905 }
4906 err2 |= verify_location (&blocks, loc);
4907 }
4908
4909 if (err2)
4910 debug_gimple_stmt (phi);
4911 err |= err2;
4912 }
4913
4914 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4915 {
4916 gimple stmt = gsi_stmt (gsi);
4917 bool err2 = false;
4918 struct walk_stmt_info wi;
4919 tree addr;
4920 int lp_nr;
4921
4922 visited_stmts.add (stmt);
4923
4924 if (gimple_bb (stmt) != bb)
4925 {
4926 error ("gimple_bb (stmt) is set to a wrong basic block");
4927 err2 = true;
4928 }
4929
4930 err2 |= verify_gimple_stmt (stmt);
4931 err2 |= verify_location (&blocks, gimple_location (stmt));
4932
4933 memset (&wi, 0, sizeof (wi));
4934 wi.info = (void *) &visited;
4935 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4936 if (addr)
4937 {
4938 error ("incorrect sharing of tree nodes");
4939 debug_generic_expr (addr);
4940 err2 |= true;
4941 }
4942
4943 memset (&wi, 0, sizeof (wi));
4944 wi.info = (void *) &blocks;
4945 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4946 if (addr)
4947 {
4948 debug_generic_expr (addr);
4949 err2 |= true;
4950 }
4951
4952 /* ??? Instead of not checking these stmts at all the walker
4953 should know its context via wi. */
4954 if (!is_gimple_debug (stmt)
4955 && !is_gimple_omp (stmt))
4956 {
4957 memset (&wi, 0, sizeof (wi));
4958 addr = walk_gimple_op (stmt, verify_expr, &wi);
4959 if (addr)
4960 {
4961 debug_generic_expr (addr);
4962 inform (gimple_location (stmt), "in statement");
4963 err2 |= true;
4964 }
4965 }
4966
4967 /* If the statement is marked as part of an EH region, then it is
4968 expected that the statement could throw. Verify that when we
4969 have optimizations that simplify statements such that we prove
4970 that they cannot throw, that we update other data structures
4971 to match. */
4972 lp_nr = lookup_stmt_eh_lp (stmt);
4973 if (lp_nr > 0)
4974 {
4975 if (!stmt_could_throw_p (stmt))
4976 {
4977 if (verify_nothrow)
4978 {
4979 error ("statement marked for throw, but doesn%'t");
4980 err2 |= true;
4981 }
4982 }
4983 else if (!gsi_one_before_end_p (gsi))
4984 {
4985 error ("statement marked for throw in middle of block");
4986 err2 |= true;
4987 }
4988 }
4989
4990 if (err2)
4991 debug_gimple_stmt (stmt);
4992 err |= err2;
4993 }
4994 }
4995
4996 eh_error_found = false;
4997 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
4998 if (eh_table)
4999 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5000 (&visited_stmts);
5001
5002 if (err || eh_error_found)
5003 internal_error ("verify_gimple failed");
5004
5005 verify_histograms ();
5006 timevar_pop (TV_TREE_STMT_VERIFY);
5007 }
5008
5009
5010 /* Verifies that the flow information is OK. */
5011
5012 static int
5013 gimple_verify_flow_info (void)
5014 {
5015 int err = 0;
5016 basic_block bb;
5017 gimple_stmt_iterator gsi;
5018 gimple stmt;
5019 edge e;
5020 edge_iterator ei;
5021
5022 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5023 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5024 {
5025 error ("ENTRY_BLOCK has IL associated with it");
5026 err = 1;
5027 }
5028
5029 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5030 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5031 {
5032 error ("EXIT_BLOCK has IL associated with it");
5033 err = 1;
5034 }
5035
5036 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5037 if (e->flags & EDGE_FALLTHRU)
5038 {
5039 error ("fallthru to exit from bb %d", e->src->index);
5040 err = 1;
5041 }
5042
5043 FOR_EACH_BB_FN (bb, cfun)
5044 {
5045 bool found_ctrl_stmt = false;
5046
5047 stmt = NULL;
5048
5049 /* Skip labels on the start of basic block. */
5050 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5051 {
5052 tree label;
5053 gimple prev_stmt = stmt;
5054
5055 stmt = gsi_stmt (gsi);
5056
5057 if (gimple_code (stmt) != GIMPLE_LABEL)
5058 break;
5059
5060 label = gimple_label_label (stmt);
5061 if (prev_stmt && DECL_NONLOCAL (label))
5062 {
5063 error ("nonlocal label ");
5064 print_generic_expr (stderr, label, 0);
5065 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5066 bb->index);
5067 err = 1;
5068 }
5069
5070 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5071 {
5072 error ("EH landing pad label ");
5073 print_generic_expr (stderr, label, 0);
5074 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5075 bb->index);
5076 err = 1;
5077 }
5078
5079 if (label_to_block (label) != bb)
5080 {
5081 error ("label ");
5082 print_generic_expr (stderr, label, 0);
5083 fprintf (stderr, " to block does not match in bb %d",
5084 bb->index);
5085 err = 1;
5086 }
5087
5088 if (decl_function_context (label) != current_function_decl)
5089 {
5090 error ("label ");
5091 print_generic_expr (stderr, label, 0);
5092 fprintf (stderr, " has incorrect context in bb %d",
5093 bb->index);
5094 err = 1;
5095 }
5096 }
5097
5098 /* Verify that body of basic block BB is free of control flow. */
5099 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5100 {
5101 gimple stmt = gsi_stmt (gsi);
5102
5103 if (found_ctrl_stmt)
5104 {
5105 error ("control flow in the middle of basic block %d",
5106 bb->index);
5107 err = 1;
5108 }
5109
5110 if (stmt_ends_bb_p (stmt))
5111 found_ctrl_stmt = true;
5112
5113 if (gimple_code (stmt) == GIMPLE_LABEL)
5114 {
5115 error ("label ");
5116 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5117 fprintf (stderr, " in the middle of basic block %d", bb->index);
5118 err = 1;
5119 }
5120 }
5121
5122 gsi = gsi_last_bb (bb);
5123 if (gsi_end_p (gsi))
5124 continue;
5125
5126 stmt = gsi_stmt (gsi);
5127
5128 if (gimple_code (stmt) == GIMPLE_LABEL)
5129 continue;
5130
5131 err |= verify_eh_edges (stmt);
5132
5133 if (is_ctrl_stmt (stmt))
5134 {
5135 FOR_EACH_EDGE (e, ei, bb->succs)
5136 if (e->flags & EDGE_FALLTHRU)
5137 {
5138 error ("fallthru edge after a control statement in bb %d",
5139 bb->index);
5140 err = 1;
5141 }
5142 }
5143
5144 if (gimple_code (stmt) != GIMPLE_COND)
5145 {
5146 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5147 after anything else but if statement. */
5148 FOR_EACH_EDGE (e, ei, bb->succs)
5149 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5150 {
5151 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5152 bb->index);
5153 err = 1;
5154 }
5155 }
5156
5157 switch (gimple_code (stmt))
5158 {
5159 case GIMPLE_COND:
5160 {
5161 edge true_edge;
5162 edge false_edge;
5163
5164 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5165
5166 if (!true_edge
5167 || !false_edge
5168 || !(true_edge->flags & EDGE_TRUE_VALUE)
5169 || !(false_edge->flags & EDGE_FALSE_VALUE)
5170 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5171 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5172 || EDGE_COUNT (bb->succs) >= 3)
5173 {
5174 error ("wrong outgoing edge flags at end of bb %d",
5175 bb->index);
5176 err = 1;
5177 }
5178 }
5179 break;
5180
5181 case GIMPLE_GOTO:
5182 if (simple_goto_p (stmt))
5183 {
5184 error ("explicit goto at end of bb %d", bb->index);
5185 err = 1;
5186 }
5187 else
5188 {
5189 /* FIXME. We should double check that the labels in the
5190 destination blocks have their address taken. */
5191 FOR_EACH_EDGE (e, ei, bb->succs)
5192 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5193 | EDGE_FALSE_VALUE))
5194 || !(e->flags & EDGE_ABNORMAL))
5195 {
5196 error ("wrong outgoing edge flags at end of bb %d",
5197 bb->index);
5198 err = 1;
5199 }
5200 }
5201 break;
5202
5203 case GIMPLE_CALL:
5204 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5205 break;
5206 /* ... fallthru ... */
5207 case GIMPLE_RETURN:
5208 if (!single_succ_p (bb)
5209 || (single_succ_edge (bb)->flags
5210 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5211 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5212 {
5213 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5214 err = 1;
5215 }
5216 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5217 {
5218 error ("return edge does not point to exit in bb %d",
5219 bb->index);
5220 err = 1;
5221 }
5222 break;
5223
5224 case GIMPLE_SWITCH:
5225 {
5226 tree prev;
5227 edge e;
5228 size_t i, n;
5229
5230 n = gimple_switch_num_labels (stmt);
5231
5232 /* Mark all the destination basic blocks. */
5233 for (i = 0; i < n; ++i)
5234 {
5235 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5236 basic_block label_bb = label_to_block (lab);
5237 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5238 label_bb->aux = (void *)1;
5239 }
5240
5241 /* Verify that the case labels are sorted. */
5242 prev = gimple_switch_label (stmt, 0);
5243 for (i = 1; i < n; ++i)
5244 {
5245 tree c = gimple_switch_label (stmt, i);
5246 if (!CASE_LOW (c))
5247 {
5248 error ("found default case not at the start of "
5249 "case vector");
5250 err = 1;
5251 continue;
5252 }
5253 if (CASE_LOW (prev)
5254 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5255 {
5256 error ("case labels not sorted: ");
5257 print_generic_expr (stderr, prev, 0);
5258 fprintf (stderr," is greater than ");
5259 print_generic_expr (stderr, c, 0);
5260 fprintf (stderr," but comes before it.\n");
5261 err = 1;
5262 }
5263 prev = c;
5264 }
5265 /* VRP will remove the default case if it can prove it will
5266 never be executed. So do not verify there always exists
5267 a default case here. */
5268
5269 FOR_EACH_EDGE (e, ei, bb->succs)
5270 {
5271 if (!e->dest->aux)
5272 {
5273 error ("extra outgoing edge %d->%d",
5274 bb->index, e->dest->index);
5275 err = 1;
5276 }
5277
5278 e->dest->aux = (void *)2;
5279 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5280 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5281 {
5282 error ("wrong outgoing edge flags at end of bb %d",
5283 bb->index);
5284 err = 1;
5285 }
5286 }
5287
5288 /* Check that we have all of them. */
5289 for (i = 0; i < n; ++i)
5290 {
5291 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5292 basic_block label_bb = label_to_block (lab);
5293
5294 if (label_bb->aux != (void *)2)
5295 {
5296 error ("missing edge %i->%i", bb->index, label_bb->index);
5297 err = 1;
5298 }
5299 }
5300
5301 FOR_EACH_EDGE (e, ei, bb->succs)
5302 e->dest->aux = (void *)0;
5303 }
5304 break;
5305
5306 case GIMPLE_EH_DISPATCH:
5307 err |= verify_eh_dispatch_edge (stmt);
5308 break;
5309
5310 default:
5311 break;
5312 }
5313 }
5314
5315 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5316 verify_dominators (CDI_DOMINATORS);
5317
5318 return err;
5319 }
5320
5321
5322 /* Updates phi nodes after creating a forwarder block joined
5323 by edge FALLTHRU. */
5324
5325 static void
5326 gimple_make_forwarder_block (edge fallthru)
5327 {
5328 edge e;
5329 edge_iterator ei;
5330 basic_block dummy, bb;
5331 tree var;
5332 gimple_stmt_iterator gsi;
5333
5334 dummy = fallthru->src;
5335 bb = fallthru->dest;
5336
5337 if (single_pred_p (bb))
5338 return;
5339
5340 /* If we redirected a branch we must create new PHI nodes at the
5341 start of BB. */
5342 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5343 {
5344 gimple phi, new_phi;
5345
5346 phi = gsi_stmt (gsi);
5347 var = gimple_phi_result (phi);
5348 new_phi = create_phi_node (var, bb);
5349 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5350 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5351 UNKNOWN_LOCATION);
5352 }
5353
5354 /* Add the arguments we have stored on edges. */
5355 FOR_EACH_EDGE (e, ei, bb->preds)
5356 {
5357 if (e == fallthru)
5358 continue;
5359
5360 flush_pending_stmts (e);
5361 }
5362 }
5363
5364
5365 /* Return a non-special label in the head of basic block BLOCK.
5366 Create one if it doesn't exist. */
5367
5368 tree
5369 gimple_block_label (basic_block bb)
5370 {
5371 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5372 bool first = true;
5373 tree label;
5374 gimple stmt;
5375
5376 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5377 {
5378 stmt = gsi_stmt (i);
5379 if (gimple_code (stmt) != GIMPLE_LABEL)
5380 break;
5381 label = gimple_label_label (stmt);
5382 if (!DECL_NONLOCAL (label))
5383 {
5384 if (!first)
5385 gsi_move_before (&i, &s);
5386 return label;
5387 }
5388 }
5389
5390 label = create_artificial_label (UNKNOWN_LOCATION);
5391 stmt = gimple_build_label (label);
5392 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5393 return label;
5394 }
5395
5396
5397 /* Attempt to perform edge redirection by replacing a possibly complex
5398 jump instruction by a goto or by removing the jump completely.
5399 This can apply only if all edges now point to the same block. The
5400 parameters and return values are equivalent to
5401 redirect_edge_and_branch. */
5402
5403 static edge
5404 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5405 {
5406 basic_block src = e->src;
5407 gimple_stmt_iterator i;
5408 gimple stmt;
5409
5410 /* We can replace or remove a complex jump only when we have exactly
5411 two edges. */
5412 if (EDGE_COUNT (src->succs) != 2
5413 /* Verify that all targets will be TARGET. Specifically, the
5414 edge that is not E must also go to TARGET. */
5415 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5416 return NULL;
5417
5418 i = gsi_last_bb (src);
5419 if (gsi_end_p (i))
5420 return NULL;
5421
5422 stmt = gsi_stmt (i);
5423
5424 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5425 {
5426 gsi_remove (&i, true);
5427 e = ssa_redirect_edge (e, target);
5428 e->flags = EDGE_FALLTHRU;
5429 return e;
5430 }
5431
5432 return NULL;
5433 }
5434
5435
5436 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5437 edge representing the redirected branch. */
5438
5439 static edge
5440 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5441 {
5442 basic_block bb = e->src;
5443 gimple_stmt_iterator gsi;
5444 edge ret;
5445 gimple stmt;
5446
5447 if (e->flags & EDGE_ABNORMAL)
5448 return NULL;
5449
5450 if (e->dest == dest)
5451 return NULL;
5452
5453 if (e->flags & EDGE_EH)
5454 return redirect_eh_edge (e, dest);
5455
5456 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5457 {
5458 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5459 if (ret)
5460 return ret;
5461 }
5462
5463 gsi = gsi_last_bb (bb);
5464 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5465
5466 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5467 {
5468 case GIMPLE_COND:
5469 /* For COND_EXPR, we only need to redirect the edge. */
5470 break;
5471
5472 case GIMPLE_GOTO:
5473 /* No non-abnormal edges should lead from a non-simple goto, and
5474 simple ones should be represented implicitly. */
5475 gcc_unreachable ();
5476
5477 case GIMPLE_SWITCH:
5478 {
5479 tree label = gimple_block_label (dest);
5480 tree cases = get_cases_for_edge (e, stmt);
5481
5482 /* If we have a list of cases associated with E, then use it
5483 as it's a lot faster than walking the entire case vector. */
5484 if (cases)
5485 {
5486 edge e2 = find_edge (e->src, dest);
5487 tree last, first;
5488
5489 first = cases;
5490 while (cases)
5491 {
5492 last = cases;
5493 CASE_LABEL (cases) = label;
5494 cases = CASE_CHAIN (cases);
5495 }
5496
5497 /* If there was already an edge in the CFG, then we need
5498 to move all the cases associated with E to E2. */
5499 if (e2)
5500 {
5501 tree cases2 = get_cases_for_edge (e2, stmt);
5502
5503 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5504 CASE_CHAIN (cases2) = first;
5505 }
5506 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5507 }
5508 else
5509 {
5510 size_t i, n = gimple_switch_num_labels (stmt);
5511
5512 for (i = 0; i < n; i++)
5513 {
5514 tree elt = gimple_switch_label (stmt, i);
5515 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5516 CASE_LABEL (elt) = label;
5517 }
5518 }
5519 }
5520 break;
5521
5522 case GIMPLE_ASM:
5523 {
5524 int i, n = gimple_asm_nlabels (stmt);
5525 tree label = NULL;
5526
5527 for (i = 0; i < n; ++i)
5528 {
5529 tree cons = gimple_asm_label_op (stmt, i);
5530 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5531 {
5532 if (!label)
5533 label = gimple_block_label (dest);
5534 TREE_VALUE (cons) = label;
5535 }
5536 }
5537
5538 /* If we didn't find any label matching the former edge in the
5539 asm labels, we must be redirecting the fallthrough
5540 edge. */
5541 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5542 }
5543 break;
5544
5545 case GIMPLE_RETURN:
5546 gsi_remove (&gsi, true);
5547 e->flags |= EDGE_FALLTHRU;
5548 break;
5549
5550 case GIMPLE_OMP_RETURN:
5551 case GIMPLE_OMP_CONTINUE:
5552 case GIMPLE_OMP_SECTIONS_SWITCH:
5553 case GIMPLE_OMP_FOR:
5554 /* The edges from OMP constructs can be simply redirected. */
5555 break;
5556
5557 case GIMPLE_EH_DISPATCH:
5558 if (!(e->flags & EDGE_FALLTHRU))
5559 redirect_eh_dispatch_edge (stmt, e, dest);
5560 break;
5561
5562 case GIMPLE_TRANSACTION:
5563 /* The ABORT edge has a stored label associated with it, otherwise
5564 the edges are simply redirectable. */
5565 if (e->flags == 0)
5566 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5567 break;
5568
5569 default:
5570 /* Otherwise it must be a fallthru edge, and we don't need to
5571 do anything besides redirecting it. */
5572 gcc_assert (e->flags & EDGE_FALLTHRU);
5573 break;
5574 }
5575
5576 /* Update/insert PHI nodes as necessary. */
5577
5578 /* Now update the edges in the CFG. */
5579 e = ssa_redirect_edge (e, dest);
5580
5581 return e;
5582 }
5583
5584 /* Returns true if it is possible to remove edge E by redirecting
5585 it to the destination of the other edge from E->src. */
5586
5587 static bool
5588 gimple_can_remove_branch_p (const_edge e)
5589 {
5590 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5591 return false;
5592
5593 return true;
5594 }
5595
5596 /* Simple wrapper, as we can always redirect fallthru edges. */
5597
5598 static basic_block
5599 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5600 {
5601 e = gimple_redirect_edge_and_branch (e, dest);
5602 gcc_assert (e);
5603
5604 return NULL;
5605 }
5606
5607
5608 /* Splits basic block BB after statement STMT (but at least after the
5609 labels). If STMT is NULL, BB is split just after the labels. */
5610
5611 static basic_block
5612 gimple_split_block (basic_block bb, void *stmt)
5613 {
5614 gimple_stmt_iterator gsi;
5615 gimple_stmt_iterator gsi_tgt;
5616 gimple act;
5617 gimple_seq list;
5618 basic_block new_bb;
5619 edge e;
5620 edge_iterator ei;
5621
5622 new_bb = create_empty_bb (bb);
5623
5624 /* Redirect the outgoing edges. */
5625 new_bb->succs = bb->succs;
5626 bb->succs = NULL;
5627 FOR_EACH_EDGE (e, ei, new_bb->succs)
5628 e->src = new_bb;
5629
5630 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5631 stmt = NULL;
5632
5633 /* Move everything from GSI to the new basic block. */
5634 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5635 {
5636 act = gsi_stmt (gsi);
5637 if (gimple_code (act) == GIMPLE_LABEL)
5638 continue;
5639
5640 if (!stmt)
5641 break;
5642
5643 if (stmt == act)
5644 {
5645 gsi_next (&gsi);
5646 break;
5647 }
5648 }
5649
5650 if (gsi_end_p (gsi))
5651 return new_bb;
5652
5653 /* Split the statement list - avoid re-creating new containers as this
5654 brings ugly quadratic memory consumption in the inliner.
5655 (We are still quadratic since we need to update stmt BB pointers,
5656 sadly.) */
5657 gsi_split_seq_before (&gsi, &list);
5658 set_bb_seq (new_bb, list);
5659 for (gsi_tgt = gsi_start (list);
5660 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5661 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5662
5663 return new_bb;
5664 }
5665
5666
5667 /* Moves basic block BB after block AFTER. */
5668
5669 static bool
5670 gimple_move_block_after (basic_block bb, basic_block after)
5671 {
5672 if (bb->prev_bb == after)
5673 return true;
5674
5675 unlink_block (bb);
5676 link_block (bb, after);
5677
5678 return true;
5679 }
5680
5681
5682 /* Return TRUE if block BB has no executable statements, otherwise return
5683 FALSE. */
5684
5685 static bool
5686 gimple_empty_block_p (basic_block bb)
5687 {
5688 /* BB must have no executable statements. */
5689 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5690 if (phi_nodes (bb))
5691 return false;
5692 if (gsi_end_p (gsi))
5693 return true;
5694 if (is_gimple_debug (gsi_stmt (gsi)))
5695 gsi_next_nondebug (&gsi);
5696 return gsi_end_p (gsi);
5697 }
5698
5699
5700 /* Split a basic block if it ends with a conditional branch and if the
5701 other part of the block is not empty. */
5702
5703 static basic_block
5704 gimple_split_block_before_cond_jump (basic_block bb)
5705 {
5706 gimple last, split_point;
5707 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5708 if (gsi_end_p (gsi))
5709 return NULL;
5710 last = gsi_stmt (gsi);
5711 if (gimple_code (last) != GIMPLE_COND
5712 && gimple_code (last) != GIMPLE_SWITCH)
5713 return NULL;
5714 gsi_prev_nondebug (&gsi);
5715 split_point = gsi_stmt (gsi);
5716 return split_block (bb, split_point)->dest;
5717 }
5718
5719
5720 /* Return true if basic_block can be duplicated. */
5721
5722 static bool
5723 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5724 {
5725 return true;
5726 }
5727
5728 /* Create a duplicate of the basic block BB. NOTE: This does not
5729 preserve SSA form. */
5730
5731 static basic_block
5732 gimple_duplicate_bb (basic_block bb)
5733 {
5734 basic_block new_bb;
5735 gimple_stmt_iterator gsi, gsi_tgt;
5736 gimple_seq phis = phi_nodes (bb);
5737 gimple phi, stmt, copy;
5738
5739 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5740
5741 /* Copy the PHI nodes. We ignore PHI node arguments here because
5742 the incoming edges have not been setup yet. */
5743 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5744 {
5745 phi = gsi_stmt (gsi);
5746 copy = create_phi_node (NULL_TREE, new_bb);
5747 create_new_def_for (gimple_phi_result (phi), copy,
5748 gimple_phi_result_ptr (copy));
5749 gimple_set_uid (copy, gimple_uid (phi));
5750 }
5751
5752 gsi_tgt = gsi_start_bb (new_bb);
5753 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5754 {
5755 def_operand_p def_p;
5756 ssa_op_iter op_iter;
5757 tree lhs;
5758
5759 stmt = gsi_stmt (gsi);
5760 if (gimple_code (stmt) == GIMPLE_LABEL)
5761 continue;
5762
5763 /* Don't duplicate label debug stmts. */
5764 if (gimple_debug_bind_p (stmt)
5765 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5766 == LABEL_DECL)
5767 continue;
5768
5769 /* Create a new copy of STMT and duplicate STMT's virtual
5770 operands. */
5771 copy = gimple_copy (stmt);
5772 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5773
5774 maybe_duplicate_eh_stmt (copy, stmt);
5775 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5776
5777 /* When copying around a stmt writing into a local non-user
5778 aggregate, make sure it won't share stack slot with other
5779 vars. */
5780 lhs = gimple_get_lhs (stmt);
5781 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5782 {
5783 tree base = get_base_address (lhs);
5784 if (base
5785 && (TREE_CODE (base) == VAR_DECL
5786 || TREE_CODE (base) == RESULT_DECL)
5787 && DECL_IGNORED_P (base)
5788 && !TREE_STATIC (base)
5789 && !DECL_EXTERNAL (base)
5790 && (TREE_CODE (base) != VAR_DECL
5791 || !DECL_HAS_VALUE_EXPR_P (base)))
5792 DECL_NONSHAREABLE (base) = 1;
5793 }
5794
5795 /* Create new names for all the definitions created by COPY and
5796 add replacement mappings for each new name. */
5797 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5798 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5799 }
5800
5801 return new_bb;
5802 }
5803
5804 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5805
5806 static void
5807 add_phi_args_after_copy_edge (edge e_copy)
5808 {
5809 basic_block bb, bb_copy = e_copy->src, dest;
5810 edge e;
5811 edge_iterator ei;
5812 gimple phi, phi_copy;
5813 tree def;
5814 gimple_stmt_iterator psi, psi_copy;
5815
5816 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5817 return;
5818
5819 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5820
5821 if (e_copy->dest->flags & BB_DUPLICATED)
5822 dest = get_bb_original (e_copy->dest);
5823 else
5824 dest = e_copy->dest;
5825
5826 e = find_edge (bb, dest);
5827 if (!e)
5828 {
5829 /* During loop unrolling the target of the latch edge is copied.
5830 In this case we are not looking for edge to dest, but to
5831 duplicated block whose original was dest. */
5832 FOR_EACH_EDGE (e, ei, bb->succs)
5833 {
5834 if ((e->dest->flags & BB_DUPLICATED)
5835 && get_bb_original (e->dest) == dest)
5836 break;
5837 }
5838
5839 gcc_assert (e != NULL);
5840 }
5841
5842 for (psi = gsi_start_phis (e->dest),
5843 psi_copy = gsi_start_phis (e_copy->dest);
5844 !gsi_end_p (psi);
5845 gsi_next (&psi), gsi_next (&psi_copy))
5846 {
5847 phi = gsi_stmt (psi);
5848 phi_copy = gsi_stmt (psi_copy);
5849 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5850 add_phi_arg (phi_copy, def, e_copy,
5851 gimple_phi_arg_location_from_edge (phi, e));
5852 }
5853 }
5854
5855
5856 /* Basic block BB_COPY was created by code duplication. Add phi node
5857 arguments for edges going out of BB_COPY. The blocks that were
5858 duplicated have BB_DUPLICATED set. */
5859
5860 void
5861 add_phi_args_after_copy_bb (basic_block bb_copy)
5862 {
5863 edge e_copy;
5864 edge_iterator ei;
5865
5866 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5867 {
5868 add_phi_args_after_copy_edge (e_copy);
5869 }
5870 }
5871
5872 /* Blocks in REGION_COPY array of length N_REGION were created by
5873 duplication of basic blocks. Add phi node arguments for edges
5874 going from these blocks. If E_COPY is not NULL, also add
5875 phi node arguments for its destination.*/
5876
5877 void
5878 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5879 edge e_copy)
5880 {
5881 unsigned i;
5882
5883 for (i = 0; i < n_region; i++)
5884 region_copy[i]->flags |= BB_DUPLICATED;
5885
5886 for (i = 0; i < n_region; i++)
5887 add_phi_args_after_copy_bb (region_copy[i]);
5888 if (e_copy)
5889 add_phi_args_after_copy_edge (e_copy);
5890
5891 for (i = 0; i < n_region; i++)
5892 region_copy[i]->flags &= ~BB_DUPLICATED;
5893 }
5894
5895 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5896 important exit edge EXIT. By important we mean that no SSA name defined
5897 inside region is live over the other exit edges of the region. All entry
5898 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5899 to the duplicate of the region. Dominance and loop information is
5900 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5901 UPDATE_DOMINANCE is false then we assume that the caller will update the
5902 dominance information after calling this function. The new basic
5903 blocks are stored to REGION_COPY in the same order as they had in REGION,
5904 provided that REGION_COPY is not NULL.
5905 The function returns false if it is unable to copy the region,
5906 true otherwise. */
5907
5908 bool
5909 gimple_duplicate_sese_region (edge entry, edge exit,
5910 basic_block *region, unsigned n_region,
5911 basic_block *region_copy,
5912 bool update_dominance)
5913 {
5914 unsigned i;
5915 bool free_region_copy = false, copying_header = false;
5916 struct loop *loop = entry->dest->loop_father;
5917 edge exit_copy;
5918 vec<basic_block> doms;
5919 edge redirected;
5920 int total_freq = 0, entry_freq = 0;
5921 gcov_type total_count = 0, entry_count = 0;
5922
5923 if (!can_copy_bbs_p (region, n_region))
5924 return false;
5925
5926 /* Some sanity checking. Note that we do not check for all possible
5927 missuses of the functions. I.e. if you ask to copy something weird,
5928 it will work, but the state of structures probably will not be
5929 correct. */
5930 for (i = 0; i < n_region; i++)
5931 {
5932 /* We do not handle subloops, i.e. all the blocks must belong to the
5933 same loop. */
5934 if (region[i]->loop_father != loop)
5935 return false;
5936
5937 if (region[i] != entry->dest
5938 && region[i] == loop->header)
5939 return false;
5940 }
5941
5942 /* In case the function is used for loop header copying (which is the primary
5943 use), ensure that EXIT and its copy will be new latch and entry edges. */
5944 if (loop->header == entry->dest)
5945 {
5946 copying_header = true;
5947
5948 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5949 return false;
5950
5951 for (i = 0; i < n_region; i++)
5952 if (region[i] != exit->src
5953 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5954 return false;
5955 }
5956
5957 initialize_original_copy_tables ();
5958
5959 if (copying_header)
5960 set_loop_copy (loop, loop_outer (loop));
5961 else
5962 set_loop_copy (loop, loop);
5963
5964 if (!region_copy)
5965 {
5966 region_copy = XNEWVEC (basic_block, n_region);
5967 free_region_copy = true;
5968 }
5969
5970 /* Record blocks outside the region that are dominated by something
5971 inside. */
5972 if (update_dominance)
5973 {
5974 doms.create (0);
5975 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5976 }
5977
5978 if (entry->dest->count)
5979 {
5980 total_count = entry->dest->count;
5981 entry_count = entry->count;
5982 /* Fix up corner cases, to avoid division by zero or creation of negative
5983 frequencies. */
5984 if (entry_count > total_count)
5985 entry_count = total_count;
5986 }
5987 else
5988 {
5989 total_freq = entry->dest->frequency;
5990 entry_freq = EDGE_FREQUENCY (entry);
5991 /* Fix up corner cases, to avoid division by zero or creation of negative
5992 frequencies. */
5993 if (total_freq == 0)
5994 total_freq = 1;
5995 else if (entry_freq > total_freq)
5996 entry_freq = total_freq;
5997 }
5998
5999 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6000 split_edge_bb_loc (entry), update_dominance);
6001 if (total_count)
6002 {
6003 scale_bbs_frequencies_gcov_type (region, n_region,
6004 total_count - entry_count,
6005 total_count);
6006 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6007 total_count);
6008 }
6009 else
6010 {
6011 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6012 total_freq);
6013 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6014 }
6015
6016 if (copying_header)
6017 {
6018 loop->header = exit->dest;
6019 loop->latch = exit->src;
6020 }
6021
6022 /* Redirect the entry and add the phi node arguments. */
6023 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6024 gcc_assert (redirected != NULL);
6025 flush_pending_stmts (entry);
6026
6027 /* Concerning updating of dominators: We must recount dominators
6028 for entry block and its copy. Anything that is outside of the
6029 region, but was dominated by something inside needs recounting as
6030 well. */
6031 if (update_dominance)
6032 {
6033 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6034 doms.safe_push (get_bb_original (entry->dest));
6035 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6036 doms.release ();
6037 }
6038
6039 /* Add the other PHI node arguments. */
6040 add_phi_args_after_copy (region_copy, n_region, NULL);
6041
6042 if (free_region_copy)
6043 free (region_copy);
6044
6045 free_original_copy_tables ();
6046 return true;
6047 }
6048
6049 /* Checks if BB is part of the region defined by N_REGION BBS. */
6050 static bool
6051 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6052 {
6053 unsigned int n;
6054
6055 for (n = 0; n < n_region; n++)
6056 {
6057 if (bb == bbs[n])
6058 return true;
6059 }
6060 return false;
6061 }
6062
6063 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6064 are stored to REGION_COPY in the same order in that they appear
6065 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6066 the region, EXIT an exit from it. The condition guarding EXIT
6067 is moved to ENTRY. Returns true if duplication succeeds, false
6068 otherwise.
6069
6070 For example,
6071
6072 some_code;
6073 if (cond)
6074 A;
6075 else
6076 B;
6077
6078 is transformed to
6079
6080 if (cond)
6081 {
6082 some_code;
6083 A;
6084 }
6085 else
6086 {
6087 some_code;
6088 B;
6089 }
6090 */
6091
6092 bool
6093 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6094 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6095 basic_block *region_copy ATTRIBUTE_UNUSED)
6096 {
6097 unsigned i;
6098 bool free_region_copy = false;
6099 struct loop *loop = exit->dest->loop_father;
6100 struct loop *orig_loop = entry->dest->loop_father;
6101 basic_block switch_bb, entry_bb, nentry_bb;
6102 vec<basic_block> doms;
6103 int total_freq = 0, exit_freq = 0;
6104 gcov_type total_count = 0, exit_count = 0;
6105 edge exits[2], nexits[2], e;
6106 gimple_stmt_iterator gsi;
6107 gimple cond_stmt;
6108 edge sorig, snew;
6109 basic_block exit_bb;
6110 gimple_stmt_iterator psi;
6111 gimple phi;
6112 tree def;
6113 struct loop *target, *aloop, *cloop;
6114
6115 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6116 exits[0] = exit;
6117 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6118
6119 if (!can_copy_bbs_p (region, n_region))
6120 return false;
6121
6122 initialize_original_copy_tables ();
6123 set_loop_copy (orig_loop, loop);
6124
6125 target= loop;
6126 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6127 {
6128 if (bb_part_of_region_p (aloop->header, region, n_region))
6129 {
6130 cloop = duplicate_loop (aloop, target);
6131 duplicate_subloops (aloop, cloop);
6132 }
6133 }
6134
6135 if (!region_copy)
6136 {
6137 region_copy = XNEWVEC (basic_block, n_region);
6138 free_region_copy = true;
6139 }
6140
6141 gcc_assert (!need_ssa_update_p (cfun));
6142
6143 /* Record blocks outside the region that are dominated by something
6144 inside. */
6145 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6146
6147 if (exit->src->count)
6148 {
6149 total_count = exit->src->count;
6150 exit_count = exit->count;
6151 /* Fix up corner cases, to avoid division by zero or creation of negative
6152 frequencies. */
6153 if (exit_count > total_count)
6154 exit_count = total_count;
6155 }
6156 else
6157 {
6158 total_freq = exit->src->frequency;
6159 exit_freq = EDGE_FREQUENCY (exit);
6160 /* Fix up corner cases, to avoid division by zero or creation of negative
6161 frequencies. */
6162 if (total_freq == 0)
6163 total_freq = 1;
6164 if (exit_freq > total_freq)
6165 exit_freq = total_freq;
6166 }
6167
6168 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6169 split_edge_bb_loc (exit), true);
6170 if (total_count)
6171 {
6172 scale_bbs_frequencies_gcov_type (region, n_region,
6173 total_count - exit_count,
6174 total_count);
6175 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6176 total_count);
6177 }
6178 else
6179 {
6180 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6181 total_freq);
6182 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6183 }
6184
6185 /* Create the switch block, and put the exit condition to it. */
6186 entry_bb = entry->dest;
6187 nentry_bb = get_bb_copy (entry_bb);
6188 if (!last_stmt (entry->src)
6189 || !stmt_ends_bb_p (last_stmt (entry->src)))
6190 switch_bb = entry->src;
6191 else
6192 switch_bb = split_edge (entry);
6193 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6194
6195 gsi = gsi_last_bb (switch_bb);
6196 cond_stmt = last_stmt (exit->src);
6197 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6198 cond_stmt = gimple_copy (cond_stmt);
6199
6200 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6201
6202 sorig = single_succ_edge (switch_bb);
6203 sorig->flags = exits[1]->flags;
6204 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6205
6206 /* Register the new edge from SWITCH_BB in loop exit lists. */
6207 rescan_loop_exit (snew, true, false);
6208
6209 /* Add the PHI node arguments. */
6210 add_phi_args_after_copy (region_copy, n_region, snew);
6211
6212 /* Get rid of now superfluous conditions and associated edges (and phi node
6213 arguments). */
6214 exit_bb = exit->dest;
6215
6216 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6217 PENDING_STMT (e) = NULL;
6218
6219 /* The latch of ORIG_LOOP was copied, and so was the backedge
6220 to the original header. We redirect this backedge to EXIT_BB. */
6221 for (i = 0; i < n_region; i++)
6222 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6223 {
6224 gcc_assert (single_succ_edge (region_copy[i]));
6225 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6226 PENDING_STMT (e) = NULL;
6227 for (psi = gsi_start_phis (exit_bb);
6228 !gsi_end_p (psi);
6229 gsi_next (&psi))
6230 {
6231 phi = gsi_stmt (psi);
6232 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6233 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6234 }
6235 }
6236 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6237 PENDING_STMT (e) = NULL;
6238
6239 /* Anything that is outside of the region, but was dominated by something
6240 inside needs to update dominance info. */
6241 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6242 doms.release ();
6243 /* Update the SSA web. */
6244 update_ssa (TODO_update_ssa);
6245
6246 if (free_region_copy)
6247 free (region_copy);
6248
6249 free_original_copy_tables ();
6250 return true;
6251 }
6252
6253 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6254 adding blocks when the dominator traversal reaches EXIT. This
6255 function silently assumes that ENTRY strictly dominates EXIT. */
6256
6257 void
6258 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6259 vec<basic_block> *bbs_p)
6260 {
6261 basic_block son;
6262
6263 for (son = first_dom_son (CDI_DOMINATORS, entry);
6264 son;
6265 son = next_dom_son (CDI_DOMINATORS, son))
6266 {
6267 bbs_p->safe_push (son);
6268 if (son != exit)
6269 gather_blocks_in_sese_region (son, exit, bbs_p);
6270 }
6271 }
6272
6273 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6274 The duplicates are recorded in VARS_MAP. */
6275
6276 static void
6277 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6278 tree to_context)
6279 {
6280 tree t = *tp, new_t;
6281 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6282
6283 if (DECL_CONTEXT (t) == to_context)
6284 return;
6285
6286 bool existed;
6287 tree &loc = vars_map->get_or_insert (t, &existed);
6288
6289 if (!existed)
6290 {
6291 if (SSA_VAR_P (t))
6292 {
6293 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6294 add_local_decl (f, new_t);
6295 }
6296 else
6297 {
6298 gcc_assert (TREE_CODE (t) == CONST_DECL);
6299 new_t = copy_node (t);
6300 }
6301 DECL_CONTEXT (new_t) = to_context;
6302
6303 loc = new_t;
6304 }
6305 else
6306 new_t = loc;
6307
6308 *tp = new_t;
6309 }
6310
6311
6312 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6313 VARS_MAP maps old ssa names and var_decls to the new ones. */
6314
6315 static tree
6316 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6317 tree to_context)
6318 {
6319 tree new_name;
6320
6321 gcc_assert (!virtual_operand_p (name));
6322
6323 tree *loc = vars_map->get (name);
6324
6325 if (!loc)
6326 {
6327 tree decl = SSA_NAME_VAR (name);
6328 if (decl)
6329 {
6330 replace_by_duplicate_decl (&decl, vars_map, to_context);
6331 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6332 decl, SSA_NAME_DEF_STMT (name));
6333 if (SSA_NAME_IS_DEFAULT_DEF (name))
6334 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6335 decl, new_name);
6336 }
6337 else
6338 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6339 name, SSA_NAME_DEF_STMT (name));
6340
6341 vars_map->put (name, new_name);
6342 }
6343 else
6344 new_name = *loc;
6345
6346 return new_name;
6347 }
6348
6349 struct move_stmt_d
6350 {
6351 tree orig_block;
6352 tree new_block;
6353 tree from_context;
6354 tree to_context;
6355 hash_map<tree, tree> *vars_map;
6356 htab_t new_label_map;
6357 hash_map<void *, void *> *eh_map;
6358 bool remap_decls_p;
6359 };
6360
6361 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6362 contained in *TP if it has been ORIG_BLOCK previously and change the
6363 DECL_CONTEXT of every local variable referenced in *TP. */
6364
6365 static tree
6366 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6367 {
6368 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6369 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6370 tree t = *tp;
6371
6372 if (EXPR_P (t))
6373 {
6374 tree block = TREE_BLOCK (t);
6375 if (block == p->orig_block
6376 || (p->orig_block == NULL_TREE
6377 && block != NULL_TREE))
6378 TREE_SET_BLOCK (t, p->new_block);
6379 #ifdef ENABLE_CHECKING
6380 else if (block != NULL_TREE)
6381 {
6382 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6383 block = BLOCK_SUPERCONTEXT (block);
6384 gcc_assert (block == p->orig_block);
6385 }
6386 #endif
6387 }
6388 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6389 {
6390 if (TREE_CODE (t) == SSA_NAME)
6391 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6392 else if (TREE_CODE (t) == LABEL_DECL)
6393 {
6394 if (p->new_label_map)
6395 {
6396 struct tree_map in, *out;
6397 in.base.from = t;
6398 out = (struct tree_map *)
6399 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6400 if (out)
6401 *tp = t = out->to;
6402 }
6403
6404 DECL_CONTEXT (t) = p->to_context;
6405 }
6406 else if (p->remap_decls_p)
6407 {
6408 /* Replace T with its duplicate. T should no longer appear in the
6409 parent function, so this looks wasteful; however, it may appear
6410 in referenced_vars, and more importantly, as virtual operands of
6411 statements, and in alias lists of other variables. It would be
6412 quite difficult to expunge it from all those places. ??? It might
6413 suffice to do this for addressable variables. */
6414 if ((TREE_CODE (t) == VAR_DECL
6415 && !is_global_var (t))
6416 || TREE_CODE (t) == CONST_DECL)
6417 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6418 }
6419 *walk_subtrees = 0;
6420 }
6421 else if (TYPE_P (t))
6422 *walk_subtrees = 0;
6423
6424 return NULL_TREE;
6425 }
6426
6427 /* Helper for move_stmt_r. Given an EH region number for the source
6428 function, map that to the duplicate EH regio number in the dest. */
6429
6430 static int
6431 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6432 {
6433 eh_region old_r, new_r;
6434
6435 old_r = get_eh_region_from_number (old_nr);
6436 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6437
6438 return new_r->index;
6439 }
6440
6441 /* Similar, but operate on INTEGER_CSTs. */
6442
6443 static tree
6444 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6445 {
6446 int old_nr, new_nr;
6447
6448 old_nr = tree_to_shwi (old_t_nr);
6449 new_nr = move_stmt_eh_region_nr (old_nr, p);
6450
6451 return build_int_cst (integer_type_node, new_nr);
6452 }
6453
6454 /* Like move_stmt_op, but for gimple statements.
6455
6456 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6457 contained in the current statement in *GSI_P and change the
6458 DECL_CONTEXT of every local variable referenced in the current
6459 statement. */
6460
6461 static tree
6462 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6463 struct walk_stmt_info *wi)
6464 {
6465 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6466 gimple stmt = gsi_stmt (*gsi_p);
6467 tree block = gimple_block (stmt);
6468
6469 if (block == p->orig_block
6470 || (p->orig_block == NULL_TREE
6471 && block != NULL_TREE))
6472 gimple_set_block (stmt, p->new_block);
6473
6474 switch (gimple_code (stmt))
6475 {
6476 case GIMPLE_CALL:
6477 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6478 {
6479 tree r, fndecl = gimple_call_fndecl (stmt);
6480 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6481 switch (DECL_FUNCTION_CODE (fndecl))
6482 {
6483 case BUILT_IN_EH_COPY_VALUES:
6484 r = gimple_call_arg (stmt, 1);
6485 r = move_stmt_eh_region_tree_nr (r, p);
6486 gimple_call_set_arg (stmt, 1, r);
6487 /* FALLTHRU */
6488
6489 case BUILT_IN_EH_POINTER:
6490 case BUILT_IN_EH_FILTER:
6491 r = gimple_call_arg (stmt, 0);
6492 r = move_stmt_eh_region_tree_nr (r, p);
6493 gimple_call_set_arg (stmt, 0, r);
6494 break;
6495
6496 default:
6497 break;
6498 }
6499 }
6500 break;
6501
6502 case GIMPLE_RESX:
6503 {
6504 int r = gimple_resx_region (stmt);
6505 r = move_stmt_eh_region_nr (r, p);
6506 gimple_resx_set_region (stmt, r);
6507 }
6508 break;
6509
6510 case GIMPLE_EH_DISPATCH:
6511 {
6512 int r = gimple_eh_dispatch_region (stmt);
6513 r = move_stmt_eh_region_nr (r, p);
6514 gimple_eh_dispatch_set_region (stmt, r);
6515 }
6516 break;
6517
6518 case GIMPLE_OMP_RETURN:
6519 case GIMPLE_OMP_CONTINUE:
6520 break;
6521 default:
6522 if (is_gimple_omp (stmt))
6523 {
6524 /* Do not remap variables inside OMP directives. Variables
6525 referenced in clauses and directive header belong to the
6526 parent function and should not be moved into the child
6527 function. */
6528 bool save_remap_decls_p = p->remap_decls_p;
6529 p->remap_decls_p = false;
6530 *handled_ops_p = true;
6531
6532 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6533 move_stmt_op, wi);
6534
6535 p->remap_decls_p = save_remap_decls_p;
6536 }
6537 break;
6538 }
6539
6540 return NULL_TREE;
6541 }
6542
6543 /* Move basic block BB from function CFUN to function DEST_FN. The
6544 block is moved out of the original linked list and placed after
6545 block AFTER in the new list. Also, the block is removed from the
6546 original array of blocks and placed in DEST_FN's array of blocks.
6547 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6548 updated to reflect the moved edges.
6549
6550 The local variables are remapped to new instances, VARS_MAP is used
6551 to record the mapping. */
6552
6553 static void
6554 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6555 basic_block after, bool update_edge_count_p,
6556 struct move_stmt_d *d)
6557 {
6558 struct control_flow_graph *cfg;
6559 edge_iterator ei;
6560 edge e;
6561 gimple_stmt_iterator si;
6562 unsigned old_len, new_len;
6563
6564 /* Remove BB from dominance structures. */
6565 delete_from_dominance_info (CDI_DOMINATORS, bb);
6566
6567 /* Move BB from its current loop to the copy in the new function. */
6568 if (current_loops)
6569 {
6570 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6571 if (new_loop)
6572 bb->loop_father = new_loop;
6573 }
6574
6575 /* Link BB to the new linked list. */
6576 move_block_after (bb, after);
6577
6578 /* Update the edge count in the corresponding flowgraphs. */
6579 if (update_edge_count_p)
6580 FOR_EACH_EDGE (e, ei, bb->succs)
6581 {
6582 cfun->cfg->x_n_edges--;
6583 dest_cfun->cfg->x_n_edges++;
6584 }
6585
6586 /* Remove BB from the original basic block array. */
6587 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6588 cfun->cfg->x_n_basic_blocks--;
6589
6590 /* Grow DEST_CFUN's basic block array if needed. */
6591 cfg = dest_cfun->cfg;
6592 cfg->x_n_basic_blocks++;
6593 if (bb->index >= cfg->x_last_basic_block)
6594 cfg->x_last_basic_block = bb->index + 1;
6595
6596 old_len = vec_safe_length (cfg->x_basic_block_info);
6597 if ((unsigned) cfg->x_last_basic_block >= old_len)
6598 {
6599 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6600 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6601 }
6602
6603 (*cfg->x_basic_block_info)[bb->index] = bb;
6604
6605 /* Remap the variables in phi nodes. */
6606 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6607 {
6608 gimple phi = gsi_stmt (si);
6609 use_operand_p use;
6610 tree op = PHI_RESULT (phi);
6611 ssa_op_iter oi;
6612 unsigned i;
6613
6614 if (virtual_operand_p (op))
6615 {
6616 /* Remove the phi nodes for virtual operands (alias analysis will be
6617 run for the new function, anyway). */
6618 remove_phi_node (&si, true);
6619 continue;
6620 }
6621
6622 SET_PHI_RESULT (phi,
6623 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6624 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6625 {
6626 op = USE_FROM_PTR (use);
6627 if (TREE_CODE (op) == SSA_NAME)
6628 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6629 }
6630
6631 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6632 {
6633 location_t locus = gimple_phi_arg_location (phi, i);
6634 tree block = LOCATION_BLOCK (locus);
6635
6636 if (locus == UNKNOWN_LOCATION)
6637 continue;
6638 if (d->orig_block == NULL_TREE || block == d->orig_block)
6639 {
6640 if (d->new_block == NULL_TREE)
6641 locus = LOCATION_LOCUS (locus);
6642 else
6643 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6644 gimple_phi_arg_set_location (phi, i, locus);
6645 }
6646 }
6647
6648 gsi_next (&si);
6649 }
6650
6651 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6652 {
6653 gimple stmt = gsi_stmt (si);
6654 struct walk_stmt_info wi;
6655
6656 memset (&wi, 0, sizeof (wi));
6657 wi.info = d;
6658 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6659
6660 if (gimple_code (stmt) == GIMPLE_LABEL)
6661 {
6662 tree label = gimple_label_label (stmt);
6663 int uid = LABEL_DECL_UID (label);
6664
6665 gcc_assert (uid > -1);
6666
6667 old_len = vec_safe_length (cfg->x_label_to_block_map);
6668 if (old_len <= (unsigned) uid)
6669 {
6670 new_len = 3 * uid / 2 + 1;
6671 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6672 }
6673
6674 (*cfg->x_label_to_block_map)[uid] = bb;
6675 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6676
6677 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6678
6679 if (uid >= dest_cfun->cfg->last_label_uid)
6680 dest_cfun->cfg->last_label_uid = uid + 1;
6681 }
6682
6683 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6684 remove_stmt_from_eh_lp_fn (cfun, stmt);
6685
6686 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6687 gimple_remove_stmt_histograms (cfun, stmt);
6688
6689 /* We cannot leave any operands allocated from the operand caches of
6690 the current function. */
6691 free_stmt_operands (cfun, stmt);
6692 push_cfun (dest_cfun);
6693 update_stmt (stmt);
6694 pop_cfun ();
6695 }
6696
6697 FOR_EACH_EDGE (e, ei, bb->succs)
6698 if (e->goto_locus != UNKNOWN_LOCATION)
6699 {
6700 tree block = LOCATION_BLOCK (e->goto_locus);
6701 if (d->orig_block == NULL_TREE
6702 || block == d->orig_block)
6703 e->goto_locus = d->new_block ?
6704 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6705 LOCATION_LOCUS (e->goto_locus);
6706 }
6707 }
6708
6709 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6710 the outermost EH region. Use REGION as the incoming base EH region. */
6711
6712 static eh_region
6713 find_outermost_region_in_block (struct function *src_cfun,
6714 basic_block bb, eh_region region)
6715 {
6716 gimple_stmt_iterator si;
6717
6718 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6719 {
6720 gimple stmt = gsi_stmt (si);
6721 eh_region stmt_region;
6722 int lp_nr;
6723
6724 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6725 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6726 if (stmt_region)
6727 {
6728 if (region == NULL)
6729 region = stmt_region;
6730 else if (stmt_region != region)
6731 {
6732 region = eh_region_outermost (src_cfun, stmt_region, region);
6733 gcc_assert (region != NULL);
6734 }
6735 }
6736 }
6737
6738 return region;
6739 }
6740
6741 static tree
6742 new_label_mapper (tree decl, void *data)
6743 {
6744 htab_t hash = (htab_t) data;
6745 struct tree_map *m;
6746 void **slot;
6747
6748 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6749
6750 m = XNEW (struct tree_map);
6751 m->hash = DECL_UID (decl);
6752 m->base.from = decl;
6753 m->to = create_artificial_label (UNKNOWN_LOCATION);
6754 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6755 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6756 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6757
6758 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6759 gcc_assert (*slot == NULL);
6760
6761 *slot = m;
6762
6763 return m->to;
6764 }
6765
6766 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6767 subblocks. */
6768
6769 static void
6770 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6771 tree to_context)
6772 {
6773 tree *tp, t;
6774
6775 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6776 {
6777 t = *tp;
6778 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6779 continue;
6780 replace_by_duplicate_decl (&t, vars_map, to_context);
6781 if (t != *tp)
6782 {
6783 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6784 {
6785 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6786 DECL_HAS_VALUE_EXPR_P (t) = 1;
6787 }
6788 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6789 *tp = t;
6790 }
6791 }
6792
6793 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6794 replace_block_vars_by_duplicates (block, vars_map, to_context);
6795 }
6796
6797 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6798 from FN1 to FN2. */
6799
6800 static void
6801 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6802 struct loop *loop)
6803 {
6804 /* Discard it from the old loop array. */
6805 (*get_loops (fn1))[loop->num] = NULL;
6806
6807 /* Place it in the new loop array, assigning it a new number. */
6808 loop->num = number_of_loops (fn2);
6809 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6810
6811 /* Recurse to children. */
6812 for (loop = loop->inner; loop; loop = loop->next)
6813 fixup_loop_arrays_after_move (fn1, fn2, loop);
6814 }
6815
6816 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6817 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6818 single basic block in the original CFG and the new basic block is
6819 returned. DEST_CFUN must not have a CFG yet.
6820
6821 Note that the region need not be a pure SESE region. Blocks inside
6822 the region may contain calls to abort/exit. The only restriction
6823 is that ENTRY_BB should be the only entry point and it must
6824 dominate EXIT_BB.
6825
6826 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6827 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6828 to the new function.
6829
6830 All local variables referenced in the region are assumed to be in
6831 the corresponding BLOCK_VARS and unexpanded variable lists
6832 associated with DEST_CFUN. */
6833
6834 basic_block
6835 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6836 basic_block exit_bb, tree orig_block)
6837 {
6838 vec<basic_block> bbs, dom_bbs;
6839 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6840 basic_block after, bb, *entry_pred, *exit_succ, abb;
6841 struct function *saved_cfun = cfun;
6842 int *entry_flag, *exit_flag;
6843 unsigned *entry_prob, *exit_prob;
6844 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6845 edge e;
6846 edge_iterator ei;
6847 htab_t new_label_map;
6848 hash_map<void *, void *> *eh_map;
6849 struct loop *loop = entry_bb->loop_father;
6850 struct loop *loop0 = get_loop (saved_cfun, 0);
6851 struct move_stmt_d d;
6852
6853 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6854 region. */
6855 gcc_assert (entry_bb != exit_bb
6856 && (!exit_bb
6857 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6858
6859 /* Collect all the blocks in the region. Manually add ENTRY_BB
6860 because it won't be added by dfs_enumerate_from. */
6861 bbs.create (0);
6862 bbs.safe_push (entry_bb);
6863 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6864
6865 /* The blocks that used to be dominated by something in BBS will now be
6866 dominated by the new block. */
6867 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6868 bbs.address (),
6869 bbs.length ());
6870
6871 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6872 the predecessor edges to ENTRY_BB and the successor edges to
6873 EXIT_BB so that we can re-attach them to the new basic block that
6874 will replace the region. */
6875 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6876 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6877 entry_flag = XNEWVEC (int, num_entry_edges);
6878 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6879 i = 0;
6880 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6881 {
6882 entry_prob[i] = e->probability;
6883 entry_flag[i] = e->flags;
6884 entry_pred[i++] = e->src;
6885 remove_edge (e);
6886 }
6887
6888 if (exit_bb)
6889 {
6890 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6891 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6892 exit_flag = XNEWVEC (int, num_exit_edges);
6893 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6894 i = 0;
6895 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6896 {
6897 exit_prob[i] = e->probability;
6898 exit_flag[i] = e->flags;
6899 exit_succ[i++] = e->dest;
6900 remove_edge (e);
6901 }
6902 }
6903 else
6904 {
6905 num_exit_edges = 0;
6906 exit_succ = NULL;
6907 exit_flag = NULL;
6908 exit_prob = NULL;
6909 }
6910
6911 /* Switch context to the child function to initialize DEST_FN's CFG. */
6912 gcc_assert (dest_cfun->cfg == NULL);
6913 push_cfun (dest_cfun);
6914
6915 init_empty_tree_cfg ();
6916
6917 /* Initialize EH information for the new function. */
6918 eh_map = NULL;
6919 new_label_map = NULL;
6920 if (saved_cfun->eh)
6921 {
6922 eh_region region = NULL;
6923
6924 FOR_EACH_VEC_ELT (bbs, i, bb)
6925 region = find_outermost_region_in_block (saved_cfun, bb, region);
6926
6927 init_eh_for_function ();
6928 if (region != NULL)
6929 {
6930 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6931 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6932 new_label_mapper, new_label_map);
6933 }
6934 }
6935
6936 /* Initialize an empty loop tree. */
6937 struct loops *loops = ggc_cleared_alloc<struct loops> ();
6938 init_loops_structure (dest_cfun, loops, 1);
6939 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6940 set_loops_for_fn (dest_cfun, loops);
6941
6942 /* Move the outlined loop tree part. */
6943 num_nodes = bbs.length ();
6944 FOR_EACH_VEC_ELT (bbs, i, bb)
6945 {
6946 if (bb->loop_father->header == bb)
6947 {
6948 struct loop *this_loop = bb->loop_father;
6949 struct loop *outer = loop_outer (this_loop);
6950 if (outer == loop
6951 /* If the SESE region contains some bbs ending with
6952 a noreturn call, those are considered to belong
6953 to the outermost loop in saved_cfun, rather than
6954 the entry_bb's loop_father. */
6955 || outer == loop0)
6956 {
6957 if (outer != loop)
6958 num_nodes -= this_loop->num_nodes;
6959 flow_loop_tree_node_remove (bb->loop_father);
6960 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6961 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6962 }
6963 }
6964 else if (bb->loop_father == loop0 && loop0 != loop)
6965 num_nodes--;
6966
6967 /* Remove loop exits from the outlined region. */
6968 if (loops_for_fn (saved_cfun)->exits)
6969 FOR_EACH_EDGE (e, ei, bb->succs)
6970 {
6971 void **slot = htab_find_slot_with_hash
6972 (loops_for_fn (saved_cfun)->exits, e,
6973 htab_hash_pointer (e), NO_INSERT);
6974 if (slot)
6975 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6976 }
6977 }
6978
6979
6980 /* Adjust the number of blocks in the tree root of the outlined part. */
6981 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6982
6983 /* Setup a mapping to be used by move_block_to_fn. */
6984 loop->aux = current_loops->tree_root;
6985 loop0->aux = current_loops->tree_root;
6986
6987 pop_cfun ();
6988
6989 /* Move blocks from BBS into DEST_CFUN. */
6990 gcc_assert (bbs.length () >= 2);
6991 after = dest_cfun->cfg->x_entry_block_ptr;
6992 hash_map<tree, tree> vars_map;
6993
6994 memset (&d, 0, sizeof (d));
6995 d.orig_block = orig_block;
6996 d.new_block = DECL_INITIAL (dest_cfun->decl);
6997 d.from_context = cfun->decl;
6998 d.to_context = dest_cfun->decl;
6999 d.vars_map = &vars_map;
7000 d.new_label_map = new_label_map;
7001 d.eh_map = eh_map;
7002 d.remap_decls_p = true;
7003
7004 FOR_EACH_VEC_ELT (bbs, i, bb)
7005 {
7006 /* No need to update edge counts on the last block. It has
7007 already been updated earlier when we detached the region from
7008 the original CFG. */
7009 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7010 after = bb;
7011 }
7012
7013 loop->aux = NULL;
7014 loop0->aux = NULL;
7015 /* Loop sizes are no longer correct, fix them up. */
7016 loop->num_nodes -= num_nodes;
7017 for (struct loop *outer = loop_outer (loop);
7018 outer; outer = loop_outer (outer))
7019 outer->num_nodes -= num_nodes;
7020 loop0->num_nodes -= bbs.length () - num_nodes;
7021
7022 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7023 {
7024 struct loop *aloop;
7025 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7026 if (aloop != NULL)
7027 {
7028 if (aloop->simduid)
7029 {
7030 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7031 d.to_context);
7032 dest_cfun->has_simduid_loops = true;
7033 }
7034 if (aloop->force_vectorize)
7035 dest_cfun->has_force_vectorize_loops = true;
7036 }
7037 }
7038
7039 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7040 if (orig_block)
7041 {
7042 tree block;
7043 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7044 == NULL_TREE);
7045 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7046 = BLOCK_SUBBLOCKS (orig_block);
7047 for (block = BLOCK_SUBBLOCKS (orig_block);
7048 block; block = BLOCK_CHAIN (block))
7049 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7050 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7051 }
7052
7053 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7054 &vars_map, dest_cfun->decl);
7055
7056 if (new_label_map)
7057 htab_delete (new_label_map);
7058 if (eh_map)
7059 delete eh_map;
7060
7061 /* Rewire the entry and exit blocks. The successor to the entry
7062 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7063 the child function. Similarly, the predecessor of DEST_FN's
7064 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7065 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7066 various CFG manipulation function get to the right CFG.
7067
7068 FIXME, this is silly. The CFG ought to become a parameter to
7069 these helpers. */
7070 push_cfun (dest_cfun);
7071 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7072 if (exit_bb)
7073 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7074 pop_cfun ();
7075
7076 /* Back in the original function, the SESE region has disappeared,
7077 create a new basic block in its place. */
7078 bb = create_empty_bb (entry_pred[0]);
7079 if (current_loops)
7080 add_bb_to_loop (bb, loop);
7081 for (i = 0; i < num_entry_edges; i++)
7082 {
7083 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7084 e->probability = entry_prob[i];
7085 }
7086
7087 for (i = 0; i < num_exit_edges; i++)
7088 {
7089 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7090 e->probability = exit_prob[i];
7091 }
7092
7093 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7094 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7095 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7096 dom_bbs.release ();
7097
7098 if (exit_bb)
7099 {
7100 free (exit_prob);
7101 free (exit_flag);
7102 free (exit_succ);
7103 }
7104 free (entry_prob);
7105 free (entry_flag);
7106 free (entry_pred);
7107 bbs.release ();
7108
7109 return bb;
7110 }
7111
7112
7113 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7114 */
7115
7116 void
7117 dump_function_to_file (tree fndecl, FILE *file, int flags)
7118 {
7119 tree arg, var, old_current_fndecl = current_function_decl;
7120 struct function *dsf;
7121 bool ignore_topmost_bind = false, any_var = false;
7122 basic_block bb;
7123 tree chain;
7124 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7125 && decl_is_tm_clone (fndecl));
7126 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7127
7128 current_function_decl = fndecl;
7129 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7130
7131 arg = DECL_ARGUMENTS (fndecl);
7132 while (arg)
7133 {
7134 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7135 fprintf (file, " ");
7136 print_generic_expr (file, arg, dump_flags);
7137 if (flags & TDF_VERBOSE)
7138 print_node (file, "", arg, 4);
7139 if (DECL_CHAIN (arg))
7140 fprintf (file, ", ");
7141 arg = DECL_CHAIN (arg);
7142 }
7143 fprintf (file, ")\n");
7144
7145 if (flags & TDF_VERBOSE)
7146 print_node (file, "", fndecl, 2);
7147
7148 dsf = DECL_STRUCT_FUNCTION (fndecl);
7149 if (dsf && (flags & TDF_EH))
7150 dump_eh_tree (file, dsf);
7151
7152 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7153 {
7154 dump_node (fndecl, TDF_SLIM | flags, file);
7155 current_function_decl = old_current_fndecl;
7156 return;
7157 }
7158
7159 /* When GIMPLE is lowered, the variables are no longer available in
7160 BIND_EXPRs, so display them separately. */
7161 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7162 {
7163 unsigned ix;
7164 ignore_topmost_bind = true;
7165
7166 fprintf (file, "{\n");
7167 if (!vec_safe_is_empty (fun->local_decls))
7168 FOR_EACH_LOCAL_DECL (fun, ix, var)
7169 {
7170 print_generic_decl (file, var, flags);
7171 if (flags & TDF_VERBOSE)
7172 print_node (file, "", var, 4);
7173 fprintf (file, "\n");
7174
7175 any_var = true;
7176 }
7177 if (gimple_in_ssa_p (cfun))
7178 for (ix = 1; ix < num_ssa_names; ++ix)
7179 {
7180 tree name = ssa_name (ix);
7181 if (name && !SSA_NAME_VAR (name))
7182 {
7183 fprintf (file, " ");
7184 print_generic_expr (file, TREE_TYPE (name), flags);
7185 fprintf (file, " ");
7186 print_generic_expr (file, name, flags);
7187 fprintf (file, ";\n");
7188
7189 any_var = true;
7190 }
7191 }
7192 }
7193
7194 if (fun && fun->decl == fndecl
7195 && fun->cfg
7196 && basic_block_info_for_fn (fun))
7197 {
7198 /* If the CFG has been built, emit a CFG-based dump. */
7199 if (!ignore_topmost_bind)
7200 fprintf (file, "{\n");
7201
7202 if (any_var && n_basic_blocks_for_fn (fun))
7203 fprintf (file, "\n");
7204
7205 FOR_EACH_BB_FN (bb, fun)
7206 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7207
7208 fprintf (file, "}\n");
7209 }
7210 else if (DECL_SAVED_TREE (fndecl) == NULL)
7211 {
7212 /* The function is now in GIMPLE form but the CFG has not been
7213 built yet. Emit the single sequence of GIMPLE statements
7214 that make up its body. */
7215 gimple_seq body = gimple_body (fndecl);
7216
7217 if (gimple_seq_first_stmt (body)
7218 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7219 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7220 print_gimple_seq (file, body, 0, flags);
7221 else
7222 {
7223 if (!ignore_topmost_bind)
7224 fprintf (file, "{\n");
7225
7226 if (any_var)
7227 fprintf (file, "\n");
7228
7229 print_gimple_seq (file, body, 2, flags);
7230 fprintf (file, "}\n");
7231 }
7232 }
7233 else
7234 {
7235 int indent;
7236
7237 /* Make a tree based dump. */
7238 chain = DECL_SAVED_TREE (fndecl);
7239 if (chain && TREE_CODE (chain) == BIND_EXPR)
7240 {
7241 if (ignore_topmost_bind)
7242 {
7243 chain = BIND_EXPR_BODY (chain);
7244 indent = 2;
7245 }
7246 else
7247 indent = 0;
7248 }
7249 else
7250 {
7251 if (!ignore_topmost_bind)
7252 fprintf (file, "{\n");
7253 indent = 2;
7254 }
7255
7256 if (any_var)
7257 fprintf (file, "\n");
7258
7259 print_generic_stmt_indented (file, chain, flags, indent);
7260 if (ignore_topmost_bind)
7261 fprintf (file, "}\n");
7262 }
7263
7264 if (flags & TDF_ENUMERATE_LOCALS)
7265 dump_enumerated_decls (file, flags);
7266 fprintf (file, "\n\n");
7267
7268 current_function_decl = old_current_fndecl;
7269 }
7270
7271 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7272
7273 DEBUG_FUNCTION void
7274 debug_function (tree fn, int flags)
7275 {
7276 dump_function_to_file (fn, stderr, flags);
7277 }
7278
7279
7280 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7281
7282 static void
7283 print_pred_bbs (FILE *file, basic_block bb)
7284 {
7285 edge e;
7286 edge_iterator ei;
7287
7288 FOR_EACH_EDGE (e, ei, bb->preds)
7289 fprintf (file, "bb_%d ", e->src->index);
7290 }
7291
7292
7293 /* Print on FILE the indexes for the successors of basic_block BB. */
7294
7295 static void
7296 print_succ_bbs (FILE *file, basic_block bb)
7297 {
7298 edge e;
7299 edge_iterator ei;
7300
7301 FOR_EACH_EDGE (e, ei, bb->succs)
7302 fprintf (file, "bb_%d ", e->dest->index);
7303 }
7304
7305 /* Print to FILE the basic block BB following the VERBOSITY level. */
7306
7307 void
7308 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7309 {
7310 char *s_indent = (char *) alloca ((size_t) indent + 1);
7311 memset ((void *) s_indent, ' ', (size_t) indent);
7312 s_indent[indent] = '\0';
7313
7314 /* Print basic_block's header. */
7315 if (verbosity >= 2)
7316 {
7317 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7318 print_pred_bbs (file, bb);
7319 fprintf (file, "}, succs = {");
7320 print_succ_bbs (file, bb);
7321 fprintf (file, "})\n");
7322 }
7323
7324 /* Print basic_block's body. */
7325 if (verbosity >= 3)
7326 {
7327 fprintf (file, "%s {\n", s_indent);
7328 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7329 fprintf (file, "%s }\n", s_indent);
7330 }
7331 }
7332
7333 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7334
7335 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7336 VERBOSITY level this outputs the contents of the loop, or just its
7337 structure. */
7338
7339 static void
7340 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7341 {
7342 char *s_indent;
7343 basic_block bb;
7344
7345 if (loop == NULL)
7346 return;
7347
7348 s_indent = (char *) alloca ((size_t) indent + 1);
7349 memset ((void *) s_indent, ' ', (size_t) indent);
7350 s_indent[indent] = '\0';
7351
7352 /* Print loop's header. */
7353 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7354 if (loop->header)
7355 fprintf (file, "header = %d", loop->header->index);
7356 else
7357 {
7358 fprintf (file, "deleted)\n");
7359 return;
7360 }
7361 if (loop->latch)
7362 fprintf (file, ", latch = %d", loop->latch->index);
7363 else
7364 fprintf (file, ", multiple latches");
7365 fprintf (file, ", niter = ");
7366 print_generic_expr (file, loop->nb_iterations, 0);
7367
7368 if (loop->any_upper_bound)
7369 {
7370 fprintf (file, ", upper_bound = ");
7371 print_decu (loop->nb_iterations_upper_bound, file);
7372 }
7373
7374 if (loop->any_estimate)
7375 {
7376 fprintf (file, ", estimate = ");
7377 print_decu (loop->nb_iterations_estimate, file);
7378 }
7379 fprintf (file, ")\n");
7380
7381 /* Print loop's body. */
7382 if (verbosity >= 1)
7383 {
7384 fprintf (file, "%s{\n", s_indent);
7385 FOR_EACH_BB_FN (bb, cfun)
7386 if (bb->loop_father == loop)
7387 print_loops_bb (file, bb, indent, verbosity);
7388
7389 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7390 fprintf (file, "%s}\n", s_indent);
7391 }
7392 }
7393
7394 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7395 spaces. Following VERBOSITY level this outputs the contents of the
7396 loop, or just its structure. */
7397
7398 static void
7399 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7400 int verbosity)
7401 {
7402 if (loop == NULL)
7403 return;
7404
7405 print_loop (file, loop, indent, verbosity);
7406 print_loop_and_siblings (file, loop->next, indent, verbosity);
7407 }
7408
7409 /* Follow a CFG edge from the entry point of the program, and on entry
7410 of a loop, pretty print the loop structure on FILE. */
7411
7412 void
7413 print_loops (FILE *file, int verbosity)
7414 {
7415 basic_block bb;
7416
7417 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7418 if (bb && bb->loop_father)
7419 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7420 }
7421
7422 /* Dump a loop. */
7423
7424 DEBUG_FUNCTION void
7425 debug (struct loop &ref)
7426 {
7427 print_loop (stderr, &ref, 0, /*verbosity*/0);
7428 }
7429
7430 DEBUG_FUNCTION void
7431 debug (struct loop *ptr)
7432 {
7433 if (ptr)
7434 debug (*ptr);
7435 else
7436 fprintf (stderr, "<nil>\n");
7437 }
7438
7439 /* Dump a loop verbosely. */
7440
7441 DEBUG_FUNCTION void
7442 debug_verbose (struct loop &ref)
7443 {
7444 print_loop (stderr, &ref, 0, /*verbosity*/3);
7445 }
7446
7447 DEBUG_FUNCTION void
7448 debug_verbose (struct loop *ptr)
7449 {
7450 if (ptr)
7451 debug (*ptr);
7452 else
7453 fprintf (stderr, "<nil>\n");
7454 }
7455
7456
7457 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7458
7459 DEBUG_FUNCTION void
7460 debug_loops (int verbosity)
7461 {
7462 print_loops (stderr, verbosity);
7463 }
7464
7465 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7466
7467 DEBUG_FUNCTION void
7468 debug_loop (struct loop *loop, int verbosity)
7469 {
7470 print_loop (stderr, loop, 0, verbosity);
7471 }
7472
7473 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7474 level. */
7475
7476 DEBUG_FUNCTION void
7477 debug_loop_num (unsigned num, int verbosity)
7478 {
7479 debug_loop (get_loop (cfun, num), verbosity);
7480 }
7481
7482 /* Return true if BB ends with a call, possibly followed by some
7483 instructions that must stay with the call. Return false,
7484 otherwise. */
7485
7486 static bool
7487 gimple_block_ends_with_call_p (basic_block bb)
7488 {
7489 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7490 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7491 }
7492
7493
7494 /* Return true if BB ends with a conditional branch. Return false,
7495 otherwise. */
7496
7497 static bool
7498 gimple_block_ends_with_condjump_p (const_basic_block bb)
7499 {
7500 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7501 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7502 }
7503
7504
7505 /* Return true if we need to add fake edge to exit at statement T.
7506 Helper function for gimple_flow_call_edges_add. */
7507
7508 static bool
7509 need_fake_edge_p (gimple t)
7510 {
7511 tree fndecl = NULL_TREE;
7512 int call_flags = 0;
7513
7514 /* NORETURN and LONGJMP calls already have an edge to exit.
7515 CONST and PURE calls do not need one.
7516 We don't currently check for CONST and PURE here, although
7517 it would be a good idea, because those attributes are
7518 figured out from the RTL in mark_constant_function, and
7519 the counter incrementation code from -fprofile-arcs
7520 leads to different results from -fbranch-probabilities. */
7521 if (is_gimple_call (t))
7522 {
7523 fndecl = gimple_call_fndecl (t);
7524 call_flags = gimple_call_flags (t);
7525 }
7526
7527 if (is_gimple_call (t)
7528 && fndecl
7529 && DECL_BUILT_IN (fndecl)
7530 && (call_flags & ECF_NOTHROW)
7531 && !(call_flags & ECF_RETURNS_TWICE)
7532 /* fork() doesn't really return twice, but the effect of
7533 wrapping it in __gcov_fork() which calls __gcov_flush()
7534 and clears the counters before forking has the same
7535 effect as returning twice. Force a fake edge. */
7536 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7537 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7538 return false;
7539
7540 if (is_gimple_call (t))
7541 {
7542 edge_iterator ei;
7543 edge e;
7544 basic_block bb;
7545
7546 if (!(call_flags & ECF_NORETURN))
7547 return true;
7548
7549 bb = gimple_bb (t);
7550 FOR_EACH_EDGE (e, ei, bb->succs)
7551 if ((e->flags & EDGE_FAKE) == 0)
7552 return true;
7553 }
7554
7555 if (gimple_code (t) == GIMPLE_ASM
7556 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7557 return true;
7558
7559 return false;
7560 }
7561
7562
7563 /* Add fake edges to the function exit for any non constant and non
7564 noreturn calls (or noreturn calls with EH/abnormal edges),
7565 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7566 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7567 that were split.
7568
7569 The goal is to expose cases in which entering a basic block does
7570 not imply that all subsequent instructions must be executed. */
7571
7572 static int
7573 gimple_flow_call_edges_add (sbitmap blocks)
7574 {
7575 int i;
7576 int blocks_split = 0;
7577 int last_bb = last_basic_block_for_fn (cfun);
7578 bool check_last_block = false;
7579
7580 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7581 return 0;
7582
7583 if (! blocks)
7584 check_last_block = true;
7585 else
7586 check_last_block = bitmap_bit_p (blocks,
7587 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7588
7589 /* In the last basic block, before epilogue generation, there will be
7590 a fallthru edge to EXIT. Special care is required if the last insn
7591 of the last basic block is a call because make_edge folds duplicate
7592 edges, which would result in the fallthru edge also being marked
7593 fake, which would result in the fallthru edge being removed by
7594 remove_fake_edges, which would result in an invalid CFG.
7595
7596 Moreover, we can't elide the outgoing fake edge, since the block
7597 profiler needs to take this into account in order to solve the minimal
7598 spanning tree in the case that the call doesn't return.
7599
7600 Handle this by adding a dummy instruction in a new last basic block. */
7601 if (check_last_block)
7602 {
7603 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7604 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7605 gimple t = NULL;
7606
7607 if (!gsi_end_p (gsi))
7608 t = gsi_stmt (gsi);
7609
7610 if (t && need_fake_edge_p (t))
7611 {
7612 edge e;
7613
7614 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7615 if (e)
7616 {
7617 gsi_insert_on_edge (e, gimple_build_nop ());
7618 gsi_commit_edge_inserts ();
7619 }
7620 }
7621 }
7622
7623 /* Now add fake edges to the function exit for any non constant
7624 calls since there is no way that we can determine if they will
7625 return or not... */
7626 for (i = 0; i < last_bb; i++)
7627 {
7628 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7629 gimple_stmt_iterator gsi;
7630 gimple stmt, last_stmt;
7631
7632 if (!bb)
7633 continue;
7634
7635 if (blocks && !bitmap_bit_p (blocks, i))
7636 continue;
7637
7638 gsi = gsi_last_nondebug_bb (bb);
7639 if (!gsi_end_p (gsi))
7640 {
7641 last_stmt = gsi_stmt (gsi);
7642 do
7643 {
7644 stmt = gsi_stmt (gsi);
7645 if (need_fake_edge_p (stmt))
7646 {
7647 edge e;
7648
7649 /* The handling above of the final block before the
7650 epilogue should be enough to verify that there is
7651 no edge to the exit block in CFG already.
7652 Calling make_edge in such case would cause us to
7653 mark that edge as fake and remove it later. */
7654 #ifdef ENABLE_CHECKING
7655 if (stmt == last_stmt)
7656 {
7657 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7658 gcc_assert (e == NULL);
7659 }
7660 #endif
7661
7662 /* Note that the following may create a new basic block
7663 and renumber the existing basic blocks. */
7664 if (stmt != last_stmt)
7665 {
7666 e = split_block (bb, stmt);
7667 if (e)
7668 blocks_split++;
7669 }
7670 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7671 }
7672 gsi_prev (&gsi);
7673 }
7674 while (!gsi_end_p (gsi));
7675 }
7676 }
7677
7678 if (blocks_split)
7679 verify_flow_info ();
7680
7681 return blocks_split;
7682 }
7683
7684 /* Removes edge E and all the blocks dominated by it, and updates dominance
7685 information. The IL in E->src needs to be updated separately.
7686 If dominance info is not available, only the edge E is removed.*/
7687
7688 void
7689 remove_edge_and_dominated_blocks (edge e)
7690 {
7691 vec<basic_block> bbs_to_remove = vNULL;
7692 vec<basic_block> bbs_to_fix_dom = vNULL;
7693 bitmap df, df_idom;
7694 edge f;
7695 edge_iterator ei;
7696 bool none_removed = false;
7697 unsigned i;
7698 basic_block bb, dbb;
7699 bitmap_iterator bi;
7700
7701 if (!dom_info_available_p (CDI_DOMINATORS))
7702 {
7703 remove_edge (e);
7704 return;
7705 }
7706
7707 /* No updating is needed for edges to exit. */
7708 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7709 {
7710 if (cfgcleanup_altered_bbs)
7711 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7712 remove_edge (e);
7713 return;
7714 }
7715
7716 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7717 that is not dominated by E->dest, then this set is empty. Otherwise,
7718 all the basic blocks dominated by E->dest are removed.
7719
7720 Also, to DF_IDOM we store the immediate dominators of the blocks in
7721 the dominance frontier of E (i.e., of the successors of the
7722 removed blocks, if there are any, and of E->dest otherwise). */
7723 FOR_EACH_EDGE (f, ei, e->dest->preds)
7724 {
7725 if (f == e)
7726 continue;
7727
7728 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7729 {
7730 none_removed = true;
7731 break;
7732 }
7733 }
7734
7735 df = BITMAP_ALLOC (NULL);
7736 df_idom = BITMAP_ALLOC (NULL);
7737
7738 if (none_removed)
7739 bitmap_set_bit (df_idom,
7740 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7741 else
7742 {
7743 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7744 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7745 {
7746 FOR_EACH_EDGE (f, ei, bb->succs)
7747 {
7748 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7749 bitmap_set_bit (df, f->dest->index);
7750 }
7751 }
7752 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7753 bitmap_clear_bit (df, bb->index);
7754
7755 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7756 {
7757 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7758 bitmap_set_bit (df_idom,
7759 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7760 }
7761 }
7762
7763 if (cfgcleanup_altered_bbs)
7764 {
7765 /* Record the set of the altered basic blocks. */
7766 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7767 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7768 }
7769
7770 /* Remove E and the cancelled blocks. */
7771 if (none_removed)
7772 remove_edge (e);
7773 else
7774 {
7775 /* Walk backwards so as to get a chance to substitute all
7776 released DEFs into debug stmts. See
7777 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7778 details. */
7779 for (i = bbs_to_remove.length (); i-- > 0; )
7780 delete_basic_block (bbs_to_remove[i]);
7781 }
7782
7783 /* Update the dominance information. The immediate dominator may change only
7784 for blocks whose immediate dominator belongs to DF_IDOM:
7785
7786 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7787 removal. Let Z the arbitrary block such that idom(Z) = Y and
7788 Z dominates X after the removal. Before removal, there exists a path P
7789 from Y to X that avoids Z. Let F be the last edge on P that is
7790 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7791 dominates W, and because of P, Z does not dominate W), and W belongs to
7792 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7793 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7794 {
7795 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7796 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7797 dbb;
7798 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7799 bbs_to_fix_dom.safe_push (dbb);
7800 }
7801
7802 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7803
7804 BITMAP_FREE (df);
7805 BITMAP_FREE (df_idom);
7806 bbs_to_remove.release ();
7807 bbs_to_fix_dom.release ();
7808 }
7809
7810 /* Purge dead EH edges from basic block BB. */
7811
7812 bool
7813 gimple_purge_dead_eh_edges (basic_block bb)
7814 {
7815 bool changed = false;
7816 edge e;
7817 edge_iterator ei;
7818 gimple stmt = last_stmt (bb);
7819
7820 if (stmt && stmt_can_throw_internal (stmt))
7821 return false;
7822
7823 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7824 {
7825 if (e->flags & EDGE_EH)
7826 {
7827 remove_edge_and_dominated_blocks (e);
7828 changed = true;
7829 }
7830 else
7831 ei_next (&ei);
7832 }
7833
7834 return changed;
7835 }
7836
7837 /* Purge dead EH edges from basic block listed in BLOCKS. */
7838
7839 bool
7840 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7841 {
7842 bool changed = false;
7843 unsigned i;
7844 bitmap_iterator bi;
7845
7846 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7847 {
7848 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7849
7850 /* Earlier gimple_purge_dead_eh_edges could have removed
7851 this basic block already. */
7852 gcc_assert (bb || changed);
7853 if (bb != NULL)
7854 changed |= gimple_purge_dead_eh_edges (bb);
7855 }
7856
7857 return changed;
7858 }
7859
7860 /* Purge dead abnormal call edges from basic block BB. */
7861
7862 bool
7863 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7864 {
7865 bool changed = false;
7866 edge e;
7867 edge_iterator ei;
7868 gimple stmt = last_stmt (bb);
7869
7870 if (!cfun->has_nonlocal_label
7871 && !cfun->calls_setjmp)
7872 return false;
7873
7874 if (stmt && stmt_can_make_abnormal_goto (stmt))
7875 return false;
7876
7877 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7878 {
7879 if (e->flags & EDGE_ABNORMAL)
7880 {
7881 if (e->flags & EDGE_FALLTHRU)
7882 e->flags &= ~EDGE_ABNORMAL;
7883 else
7884 remove_edge_and_dominated_blocks (e);
7885 changed = true;
7886 }
7887 else
7888 ei_next (&ei);
7889 }
7890
7891 return changed;
7892 }
7893
7894 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7895
7896 bool
7897 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7898 {
7899 bool changed = false;
7900 unsigned i;
7901 bitmap_iterator bi;
7902
7903 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7904 {
7905 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7906
7907 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7908 this basic block already. */
7909 gcc_assert (bb || changed);
7910 if (bb != NULL)
7911 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7912 }
7913
7914 return changed;
7915 }
7916
7917 /* This function is called whenever a new edge is created or
7918 redirected. */
7919
7920 static void
7921 gimple_execute_on_growing_pred (edge e)
7922 {
7923 basic_block bb = e->dest;
7924
7925 if (!gimple_seq_empty_p (phi_nodes (bb)))
7926 reserve_phi_args_for_new_edge (bb);
7927 }
7928
7929 /* This function is called immediately before edge E is removed from
7930 the edge vector E->dest->preds. */
7931
7932 static void
7933 gimple_execute_on_shrinking_pred (edge e)
7934 {
7935 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7936 remove_phi_args (e);
7937 }
7938
7939 /*---------------------------------------------------------------------------
7940 Helper functions for Loop versioning
7941 ---------------------------------------------------------------------------*/
7942
7943 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7944 of 'first'. Both of them are dominated by 'new_head' basic block. When
7945 'new_head' was created by 'second's incoming edge it received phi arguments
7946 on the edge by split_edge(). Later, additional edge 'e' was created to
7947 connect 'new_head' and 'first'. Now this routine adds phi args on this
7948 additional edge 'e' that new_head to second edge received as part of edge
7949 splitting. */
7950
7951 static void
7952 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7953 basic_block new_head, edge e)
7954 {
7955 gimple phi1, phi2;
7956 gimple_stmt_iterator psi1, psi2;
7957 tree def;
7958 edge e2 = find_edge (new_head, second);
7959
7960 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7961 edge, we should always have an edge from NEW_HEAD to SECOND. */
7962 gcc_assert (e2 != NULL);
7963
7964 /* Browse all 'second' basic block phi nodes and add phi args to
7965 edge 'e' for 'first' head. PHI args are always in correct order. */
7966
7967 for (psi2 = gsi_start_phis (second),
7968 psi1 = gsi_start_phis (first);
7969 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7970 gsi_next (&psi2), gsi_next (&psi1))
7971 {
7972 phi1 = gsi_stmt (psi1);
7973 phi2 = gsi_stmt (psi2);
7974 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7975 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7976 }
7977 }
7978
7979
7980 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7981 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7982 the destination of the ELSE part. */
7983
7984 static void
7985 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7986 basic_block second_head ATTRIBUTE_UNUSED,
7987 basic_block cond_bb, void *cond_e)
7988 {
7989 gimple_stmt_iterator gsi;
7990 gimple new_cond_expr;
7991 tree cond_expr = (tree) cond_e;
7992 edge e0;
7993
7994 /* Build new conditional expr */
7995 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7996 NULL_TREE, NULL_TREE);
7997
7998 /* Add new cond in cond_bb. */
7999 gsi = gsi_last_bb (cond_bb);
8000 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8001
8002 /* Adjust edges appropriately to connect new head with first head
8003 as well as second head. */
8004 e0 = single_succ_edge (cond_bb);
8005 e0->flags &= ~EDGE_FALLTHRU;
8006 e0->flags |= EDGE_FALSE_VALUE;
8007 }
8008
8009
8010 /* Do book-keeping of basic block BB for the profile consistency checker.
8011 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8012 then do post-pass accounting. Store the counting in RECORD. */
8013 static void
8014 gimple_account_profile_record (basic_block bb, int after_pass,
8015 struct profile_record *record)
8016 {
8017 gimple_stmt_iterator i;
8018 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8019 {
8020 record->size[after_pass]
8021 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8022 if (profile_status_for_fn (cfun) == PROFILE_READ)
8023 record->time[after_pass]
8024 += estimate_num_insns (gsi_stmt (i),
8025 &eni_time_weights) * bb->count;
8026 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8027 record->time[after_pass]
8028 += estimate_num_insns (gsi_stmt (i),
8029 &eni_time_weights) * bb->frequency;
8030 }
8031 }
8032
8033 struct cfg_hooks gimple_cfg_hooks = {
8034 "gimple",
8035 gimple_verify_flow_info,
8036 gimple_dump_bb, /* dump_bb */
8037 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8038 create_bb, /* create_basic_block */
8039 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8040 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8041 gimple_can_remove_branch_p, /* can_remove_branch_p */
8042 remove_bb, /* delete_basic_block */
8043 gimple_split_block, /* split_block */
8044 gimple_move_block_after, /* move_block_after */
8045 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8046 gimple_merge_blocks, /* merge_blocks */
8047 gimple_predict_edge, /* predict_edge */
8048 gimple_predicted_by_p, /* predicted_by_p */
8049 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8050 gimple_duplicate_bb, /* duplicate_block */
8051 gimple_split_edge, /* split_edge */
8052 gimple_make_forwarder_block, /* make_forward_block */
8053 NULL, /* tidy_fallthru_edge */
8054 NULL, /* force_nonfallthru */
8055 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8056 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8057 gimple_flow_call_edges_add, /* flow_call_edges_add */
8058 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8059 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8060 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8061 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8062 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8063 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8064 flush_pending_stmts, /* flush_pending_stmts */
8065 gimple_empty_block_p, /* block_empty_p */
8066 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8067 gimple_account_profile_record,
8068 };
8069
8070
8071 /* Split all critical edges. */
8072
8073 unsigned int
8074 split_critical_edges (void)
8075 {
8076 basic_block bb;
8077 edge e;
8078 edge_iterator ei;
8079
8080 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8081 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8082 mappings around the calls to split_edge. */
8083 start_recording_case_labels ();
8084 FOR_ALL_BB_FN (bb, cfun)
8085 {
8086 FOR_EACH_EDGE (e, ei, bb->succs)
8087 {
8088 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8089 split_edge (e);
8090 /* PRE inserts statements to edges and expects that
8091 since split_critical_edges was done beforehand, committing edge
8092 insertions will not split more edges. In addition to critical
8093 edges we must split edges that have multiple successors and
8094 end by control flow statements, such as RESX.
8095 Go ahead and split them too. This matches the logic in
8096 gimple_find_edge_insert_loc. */
8097 else if ((!single_pred_p (e->dest)
8098 || !gimple_seq_empty_p (phi_nodes (e->dest))
8099 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8100 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8101 && !(e->flags & EDGE_ABNORMAL))
8102 {
8103 gimple_stmt_iterator gsi;
8104
8105 gsi = gsi_last_bb (e->src);
8106 if (!gsi_end_p (gsi)
8107 && stmt_ends_bb_p (gsi_stmt (gsi))
8108 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8109 && !gimple_call_builtin_p (gsi_stmt (gsi),
8110 BUILT_IN_RETURN)))
8111 split_edge (e);
8112 }
8113 }
8114 }
8115 end_recording_case_labels ();
8116 return 0;
8117 }
8118
8119 namespace {
8120
8121 const pass_data pass_data_split_crit_edges =
8122 {
8123 GIMPLE_PASS, /* type */
8124 "crited", /* name */
8125 OPTGROUP_NONE, /* optinfo_flags */
8126 TV_TREE_SPLIT_EDGES, /* tv_id */
8127 PROP_cfg, /* properties_required */
8128 PROP_no_crit_edges, /* properties_provided */
8129 0, /* properties_destroyed */
8130 0, /* todo_flags_start */
8131 0, /* todo_flags_finish */
8132 };
8133
8134 class pass_split_crit_edges : public gimple_opt_pass
8135 {
8136 public:
8137 pass_split_crit_edges (gcc::context *ctxt)
8138 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8139 {}
8140
8141 /* opt_pass methods: */
8142 virtual unsigned int execute (function *) { return split_critical_edges (); }
8143
8144 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8145 }; // class pass_split_crit_edges
8146
8147 } // anon namespace
8148
8149 gimple_opt_pass *
8150 make_pass_split_crit_edges (gcc::context *ctxt)
8151 {
8152 return new pass_split_crit_edges (ctxt);
8153 }
8154
8155
8156 /* Build a ternary operation and gimplify it. Emit code before GSI.
8157 Return the gimple_val holding the result. */
8158
8159 tree
8160 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8161 tree type, tree a, tree b, tree c)
8162 {
8163 tree ret;
8164 location_t loc = gimple_location (gsi_stmt (*gsi));
8165
8166 ret = fold_build3_loc (loc, code, type, a, b, c);
8167 STRIP_NOPS (ret);
8168
8169 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8170 GSI_SAME_STMT);
8171 }
8172
8173 /* Build a binary operation and gimplify it. Emit code before GSI.
8174 Return the gimple_val holding the result. */
8175
8176 tree
8177 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8178 tree type, tree a, tree b)
8179 {
8180 tree ret;
8181
8182 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8183 STRIP_NOPS (ret);
8184
8185 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8186 GSI_SAME_STMT);
8187 }
8188
8189 /* Build a unary operation and gimplify it. Emit code before GSI.
8190 Return the gimple_val holding the result. */
8191
8192 tree
8193 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8194 tree a)
8195 {
8196 tree ret;
8197
8198 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8199 STRIP_NOPS (ret);
8200
8201 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8202 GSI_SAME_STMT);
8203 }
8204
8205
8206 \f
8207 /* Given a basic block B which ends with a conditional and has
8208 precisely two successors, determine which of the edges is taken if
8209 the conditional is true and which is taken if the conditional is
8210 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8211
8212 void
8213 extract_true_false_edges_from_block (basic_block b,
8214 edge *true_edge,
8215 edge *false_edge)
8216 {
8217 edge e = EDGE_SUCC (b, 0);
8218
8219 if (e->flags & EDGE_TRUE_VALUE)
8220 {
8221 *true_edge = e;
8222 *false_edge = EDGE_SUCC (b, 1);
8223 }
8224 else
8225 {
8226 *false_edge = e;
8227 *true_edge = EDGE_SUCC (b, 1);
8228 }
8229 }
8230
8231 /* Emit return warnings. */
8232
8233 namespace {
8234
8235 const pass_data pass_data_warn_function_return =
8236 {
8237 GIMPLE_PASS, /* type */
8238 "*warn_function_return", /* name */
8239 OPTGROUP_NONE, /* optinfo_flags */
8240 TV_NONE, /* tv_id */
8241 PROP_cfg, /* properties_required */
8242 0, /* properties_provided */
8243 0, /* properties_destroyed */
8244 0, /* todo_flags_start */
8245 0, /* todo_flags_finish */
8246 };
8247
8248 class pass_warn_function_return : public gimple_opt_pass
8249 {
8250 public:
8251 pass_warn_function_return (gcc::context *ctxt)
8252 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8253 {}
8254
8255 /* opt_pass methods: */
8256 virtual unsigned int execute (function *);
8257
8258 }; // class pass_warn_function_return
8259
8260 unsigned int
8261 pass_warn_function_return::execute (function *fun)
8262 {
8263 source_location location;
8264 gimple last;
8265 edge e;
8266 edge_iterator ei;
8267
8268 if (!targetm.warn_func_return (fun->decl))
8269 return 0;
8270
8271 /* If we have a path to EXIT, then we do return. */
8272 if (TREE_THIS_VOLATILE (fun->decl)
8273 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8274 {
8275 location = UNKNOWN_LOCATION;
8276 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8277 {
8278 last = last_stmt (e->src);
8279 if ((gimple_code (last) == GIMPLE_RETURN
8280 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8281 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8282 break;
8283 }
8284 if (location == UNKNOWN_LOCATION)
8285 location = cfun->function_end_locus;
8286 warning_at (location, 0, "%<noreturn%> function does return");
8287 }
8288
8289 /* If we see "return;" in some basic block, then we do reach the end
8290 without returning a value. */
8291 else if (warn_return_type
8292 && !TREE_NO_WARNING (fun->decl)
8293 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8294 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8295 {
8296 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8297 {
8298 gimple last = last_stmt (e->src);
8299 if (gimple_code (last) == GIMPLE_RETURN
8300 && gimple_return_retval (last) == NULL
8301 && !gimple_no_warning_p (last))
8302 {
8303 location = gimple_location (last);
8304 if (location == UNKNOWN_LOCATION)
8305 location = fun->function_end_locus;
8306 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8307 TREE_NO_WARNING (fun->decl) = 1;
8308 break;
8309 }
8310 }
8311 }
8312 return 0;
8313 }
8314
8315 } // anon namespace
8316
8317 gimple_opt_pass *
8318 make_pass_warn_function_return (gcc::context *ctxt)
8319 {
8320 return new pass_warn_function_return (ctxt);
8321 }
8322
8323 /* Walk a gimplified function and warn for functions whose return value is
8324 ignored and attribute((warn_unused_result)) is set. This is done before
8325 inlining, so we don't have to worry about that. */
8326
8327 static void
8328 do_warn_unused_result (gimple_seq seq)
8329 {
8330 tree fdecl, ftype;
8331 gimple_stmt_iterator i;
8332
8333 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8334 {
8335 gimple g = gsi_stmt (i);
8336
8337 switch (gimple_code (g))
8338 {
8339 case GIMPLE_BIND:
8340 do_warn_unused_result (gimple_bind_body (g));
8341 break;
8342 case GIMPLE_TRY:
8343 do_warn_unused_result (gimple_try_eval (g));
8344 do_warn_unused_result (gimple_try_cleanup (g));
8345 break;
8346 case GIMPLE_CATCH:
8347 do_warn_unused_result (gimple_catch_handler (g));
8348 break;
8349 case GIMPLE_EH_FILTER:
8350 do_warn_unused_result (gimple_eh_filter_failure (g));
8351 break;
8352
8353 case GIMPLE_CALL:
8354 if (gimple_call_lhs (g))
8355 break;
8356 if (gimple_call_internal_p (g))
8357 break;
8358
8359 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8360 LHS. All calls whose value is ignored should be
8361 represented like this. Look for the attribute. */
8362 fdecl = gimple_call_fndecl (g);
8363 ftype = gimple_call_fntype (g);
8364
8365 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8366 {
8367 location_t loc = gimple_location (g);
8368
8369 if (fdecl)
8370 warning_at (loc, OPT_Wunused_result,
8371 "ignoring return value of %qD, "
8372 "declared with attribute warn_unused_result",
8373 fdecl);
8374 else
8375 warning_at (loc, OPT_Wunused_result,
8376 "ignoring return value of function "
8377 "declared with attribute warn_unused_result");
8378 }
8379 break;
8380
8381 default:
8382 /* Not a container, not a call, or a call whose value is used. */
8383 break;
8384 }
8385 }
8386 }
8387
8388 namespace {
8389
8390 const pass_data pass_data_warn_unused_result =
8391 {
8392 GIMPLE_PASS, /* type */
8393 "*warn_unused_result", /* name */
8394 OPTGROUP_NONE, /* optinfo_flags */
8395 TV_NONE, /* tv_id */
8396 PROP_gimple_any, /* properties_required */
8397 0, /* properties_provided */
8398 0, /* properties_destroyed */
8399 0, /* todo_flags_start */
8400 0, /* todo_flags_finish */
8401 };
8402
8403 class pass_warn_unused_result : public gimple_opt_pass
8404 {
8405 public:
8406 pass_warn_unused_result (gcc::context *ctxt)
8407 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8408 {}
8409
8410 /* opt_pass methods: */
8411 virtual bool gate (function *) { return flag_warn_unused_result; }
8412 virtual unsigned int execute (function *)
8413 {
8414 do_warn_unused_result (gimple_body (current_function_decl));
8415 return 0;
8416 }
8417
8418 }; // class pass_warn_unused_result
8419
8420 } // anon namespace
8421
8422 gimple_opt_pass *
8423 make_pass_warn_unused_result (gcc::context *ctxt)
8424 {
8425 return new pass_warn_unused_result (ctxt);
8426 }
8427
8428 /* IPA passes, compilation of earlier functions or inlining
8429 might have changed some properties, such as marked functions nothrow,
8430 pure, const or noreturn.
8431 Remove redundant edges and basic blocks, and create new ones if necessary.
8432
8433 This pass can't be executed as stand alone pass from pass manager, because
8434 in between inlining and this fixup the verify_flow_info would fail. */
8435
8436 unsigned int
8437 execute_fixup_cfg (void)
8438 {
8439 basic_block bb;
8440 gimple_stmt_iterator gsi;
8441 int todo = 0;
8442 gcov_type count_scale;
8443 edge e;
8444 edge_iterator ei;
8445
8446 count_scale
8447 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8448 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8449
8450 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8451 cgraph_node::get (current_function_decl)->count;
8452 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8453 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8454 count_scale);
8455
8456 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8457 e->count = apply_scale (e->count, count_scale);
8458
8459 FOR_EACH_BB_FN (bb, cfun)
8460 {
8461 bb->count = apply_scale (bb->count, count_scale);
8462 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8463 {
8464 gimple stmt = gsi_stmt (gsi);
8465 tree decl = is_gimple_call (stmt)
8466 ? gimple_call_fndecl (stmt)
8467 : NULL;
8468 if (decl)
8469 {
8470 int flags = gimple_call_flags (stmt);
8471 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8472 {
8473 if (gimple_purge_dead_abnormal_call_edges (bb))
8474 todo |= TODO_cleanup_cfg;
8475
8476 if (gimple_in_ssa_p (cfun))
8477 {
8478 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8479 update_stmt (stmt);
8480 }
8481 }
8482
8483 if (flags & ECF_NORETURN
8484 && fixup_noreturn_call (stmt))
8485 todo |= TODO_cleanup_cfg;
8486 }
8487
8488 /* Remove stores to variables we marked write-only.
8489 Keep access when store has side effect, i.e. in case when source
8490 is volatile. */
8491 if (gimple_store_p (stmt)
8492 && !gimple_has_side_effects (stmt))
8493 {
8494 tree lhs = get_base_address (gimple_get_lhs (stmt));
8495
8496 if (TREE_CODE (lhs) == VAR_DECL
8497 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8498 && varpool_node::get (lhs)->writeonly)
8499 {
8500 unlink_stmt_vdef (stmt);
8501 gsi_remove (&gsi, true);
8502 release_defs (stmt);
8503 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8504 continue;
8505 }
8506 }
8507 /* For calls we can simply remove LHS when it is known
8508 to be write-only. */
8509 if (is_gimple_call (stmt)
8510 && gimple_get_lhs (stmt))
8511 {
8512 tree lhs = get_base_address (gimple_get_lhs (stmt));
8513
8514 if (TREE_CODE (lhs) == VAR_DECL
8515 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8516 && varpool_node::get (lhs)->writeonly)
8517 {
8518 gimple_call_set_lhs (stmt, NULL);
8519 update_stmt (stmt);
8520 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8521 }
8522 }
8523
8524 if (maybe_clean_eh_stmt (stmt)
8525 && gimple_purge_dead_eh_edges (bb))
8526 todo |= TODO_cleanup_cfg;
8527 gsi_next (&gsi);
8528 }
8529
8530 FOR_EACH_EDGE (e, ei, bb->succs)
8531 e->count = apply_scale (e->count, count_scale);
8532
8533 /* If we have a basic block with no successors that does not
8534 end with a control statement or a noreturn call end it with
8535 a call to __builtin_unreachable. This situation can occur
8536 when inlining a noreturn call that does in fact return. */
8537 if (EDGE_COUNT (bb->succs) == 0)
8538 {
8539 gimple stmt = last_stmt (bb);
8540 if (!stmt
8541 || (!is_ctrl_stmt (stmt)
8542 && (!is_gimple_call (stmt)
8543 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8544 {
8545 if (stmt && is_gimple_call (stmt))
8546 gimple_call_set_ctrl_altering (stmt, false);
8547 stmt = gimple_build_call
8548 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8549 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8550 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8551 }
8552 }
8553 }
8554 if (count_scale != REG_BR_PROB_BASE)
8555 compute_function_frequency ();
8556
8557 /* Dump a textual representation of the flowgraph. */
8558 if (dump_file)
8559 gimple_dump_cfg (dump_file, dump_flags);
8560
8561 if (current_loops
8562 && (todo & TODO_cleanup_cfg))
8563 loops_state_set (LOOPS_NEED_FIXUP);
8564
8565 return todo;
8566 }
8567
8568 namespace {
8569
8570 const pass_data pass_data_fixup_cfg =
8571 {
8572 GIMPLE_PASS, /* type */
8573 "*free_cfg_annotations", /* name */
8574 OPTGROUP_NONE, /* optinfo_flags */
8575 TV_NONE, /* tv_id */
8576 PROP_cfg, /* properties_required */
8577 0, /* properties_provided */
8578 0, /* properties_destroyed */
8579 0, /* todo_flags_start */
8580 0, /* todo_flags_finish */
8581 };
8582
8583 class pass_fixup_cfg : public gimple_opt_pass
8584 {
8585 public:
8586 pass_fixup_cfg (gcc::context *ctxt)
8587 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8588 {}
8589
8590 /* opt_pass methods: */
8591 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8592 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8593
8594 }; // class pass_fixup_cfg
8595
8596 } // anon namespace
8597
8598 gimple_opt_pass *
8599 make_pass_fixup_cfg (gcc::context *ctxt)
8600 {
8601 return new pass_fixup_cfg (ctxt);
8602 }
8603
8604 /* Garbage collection support for edge_def. */
8605
8606 extern void gt_ggc_mx (tree&);
8607 extern void gt_ggc_mx (gimple&);
8608 extern void gt_ggc_mx (rtx&);
8609 extern void gt_ggc_mx (basic_block&);
8610
8611 static void
8612 gt_ggc_mx (rtx_insn *& x)
8613 {
8614 if (x)
8615 gt_ggc_mx_rtx_def ((void *) x);
8616 }
8617
8618 void
8619 gt_ggc_mx (edge_def *e)
8620 {
8621 tree block = LOCATION_BLOCK (e->goto_locus);
8622 gt_ggc_mx (e->src);
8623 gt_ggc_mx (e->dest);
8624 if (current_ir_type () == IR_GIMPLE)
8625 gt_ggc_mx (e->insns.g);
8626 else
8627 gt_ggc_mx (e->insns.r);
8628 gt_ggc_mx (block);
8629 }
8630
8631 /* PCH support for edge_def. */
8632
8633 extern void gt_pch_nx (tree&);
8634 extern void gt_pch_nx (gimple&);
8635 extern void gt_pch_nx (rtx&);
8636 extern void gt_pch_nx (basic_block&);
8637
8638 static void
8639 gt_pch_nx (rtx_insn *& x)
8640 {
8641 if (x)
8642 gt_pch_nx_rtx_def ((void *) x);
8643 }
8644
8645 void
8646 gt_pch_nx (edge_def *e)
8647 {
8648 tree block = LOCATION_BLOCK (e->goto_locus);
8649 gt_pch_nx (e->src);
8650 gt_pch_nx (e->dest);
8651 if (current_ir_type () == IR_GIMPLE)
8652 gt_pch_nx (e->insns.g);
8653 else
8654 gt_pch_nx (e->insns.r);
8655 gt_pch_nx (block);
8656 }
8657
8658 void
8659 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8660 {
8661 tree block = LOCATION_BLOCK (e->goto_locus);
8662 op (&(e->src), cookie);
8663 op (&(e->dest), cookie);
8664 if (current_ir_type () == IR_GIMPLE)
8665 op (&(e->insns.g), cookie);
8666 else
8667 op (&(e->insns.r), cookie);
8668 op (&(block), cookie);
8669 }