Redesign Graphite scop detection
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "trans-mem.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "cfganal.h"
37 #include "flags.h"
38 #include "gimple-pretty-print.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "cgraph.h"
46 #include "tree-cfg.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "tree-dump.h"
62 #include "tree-pass.h"
63 #include "diagnostic-core.h"
64 #include "except.h"
65 #include "cfgloop.h"
66 #include "tree-ssa-propagate.h"
67 #include "value-prof.h"
68 #include "tree-inline.h"
69 #include "target.h"
70 #include "tree-ssa-live.h"
71 #include "omp-low.h"
72 #include "tree-cfgcleanup.h"
73 #include "wide-int-print.h"
74 #include "gimplify.h"
75
76 /* This file contains functions for building the Control Flow Graph (CFG)
77 for a function tree. */
78
79 /* Local declarations. */
80
81 /* Initial capacity for the basic block array. */
82 static const int initial_cfg_capacity = 20;
83
84 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
85 which use a particular edge. The CASE_LABEL_EXPRs are chained together
86 via their CASE_CHAIN field, which we clear after we're done with the
87 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
88
89 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
90 update the case vector in response to edge redirections.
91
92 Right now this table is set up and torn down at key points in the
93 compilation process. It would be nice if we could make the table
94 more persistent. The key is getting notification of changes to
95 the CFG (particularly edge removal, creation and redirection). */
96
97 static hash_map<edge, tree> *edge_to_cases;
98
99 /* If we record edge_to_cases, this bitmap will hold indexes
100 of basic blocks that end in a GIMPLE_SWITCH which we touched
101 due to edge manipulations. */
102
103 static bitmap touched_switch_bbs;
104
105 /* CFG statistics. */
106 struct cfg_stats_d
107 {
108 long num_merged_labels;
109 };
110
111 static struct cfg_stats_d cfg_stats;
112
113 /* Data to pass to replace_block_vars_by_duplicates_1. */
114 struct replace_decls_d
115 {
116 hash_map<tree, tree> *vars_map;
117 tree to_context;
118 };
119
120 /* Hash table to store last discriminator assigned for each locus. */
121 struct locus_discrim_map
122 {
123 location_t locus;
124 int discriminator;
125 };
126
127 /* Hashtable helpers. */
128
129 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
130 {
131 static inline hashval_t hash (const locus_discrim_map *);
132 static inline bool equal (const locus_discrim_map *,
133 const locus_discrim_map *);
134 };
135
136 /* Trivial hash function for a location_t. ITEM is a pointer to
137 a hash table entry that maps a location_t to a discriminator. */
138
139 inline hashval_t
140 locus_discrim_hasher::hash (const locus_discrim_map *item)
141 {
142 return LOCATION_LINE (item->locus);
143 }
144
145 /* Equality function for the locus-to-discriminator map. A and B
146 point to the two hash table entries to compare. */
147
148 inline bool
149 locus_discrim_hasher::equal (const locus_discrim_map *a,
150 const locus_discrim_map *b)
151 {
152 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
153 }
154
155 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
156
157 /* Basic blocks and flowgraphs. */
158 static void make_blocks (gimple_seq);
159
160 /* Edges. */
161 static void make_edges (void);
162 static void assign_discriminators (void);
163 static void make_cond_expr_edges (basic_block);
164 static void make_gimple_switch_edges (gswitch *, basic_block);
165 static bool make_goto_expr_edges (basic_block);
166 static void make_gimple_asm_edges (basic_block);
167 static edge gimple_redirect_edge_and_branch (edge, basic_block);
168 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
169
170 /* Various helpers. */
171 static inline bool stmt_starts_bb_p (gimple *, gimple *);
172 static int gimple_verify_flow_info (void);
173 static void gimple_make_forwarder_block (edge);
174 static gimple *first_non_label_stmt (basic_block);
175 static bool verify_gimple_transaction (gtransaction *);
176 static bool call_can_make_abnormal_goto (gimple *);
177
178 /* Flowgraph optimization and cleanup. */
179 static void gimple_merge_blocks (basic_block, basic_block);
180 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
181 static void remove_bb (basic_block);
182 static edge find_taken_edge_computed_goto (basic_block, tree);
183 static edge find_taken_edge_cond_expr (basic_block, tree);
184 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
185 static tree find_case_label_for_value (gswitch *, tree);
186
187 void
188 init_empty_tree_cfg_for_function (struct function *fn)
189 {
190 /* Initialize the basic block array. */
191 init_flow (fn);
192 profile_status_for_fn (fn) = PROFILE_ABSENT;
193 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
194 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
195 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
196 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
197 initial_cfg_capacity);
198
199 /* Build a mapping of labels to their associated blocks. */
200 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
201 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
202 initial_cfg_capacity);
203
204 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
205 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
206
207 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
208 = EXIT_BLOCK_PTR_FOR_FN (fn);
209 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
210 = ENTRY_BLOCK_PTR_FOR_FN (fn);
211 }
212
213 void
214 init_empty_tree_cfg (void)
215 {
216 init_empty_tree_cfg_for_function (cfun);
217 }
218
219 /*---------------------------------------------------------------------------
220 Create basic blocks
221 ---------------------------------------------------------------------------*/
222
223 /* Entry point to the CFG builder for trees. SEQ is the sequence of
224 statements to be added to the flowgraph. */
225
226 static void
227 build_gimple_cfg (gimple_seq seq)
228 {
229 /* Register specific gimple functions. */
230 gimple_register_cfg_hooks ();
231
232 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
233
234 init_empty_tree_cfg ();
235
236 make_blocks (seq);
237
238 /* Make sure there is always at least one block, even if it's empty. */
239 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
240 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
241
242 /* Adjust the size of the array. */
243 if (basic_block_info_for_fn (cfun)->length ()
244 < (size_t) n_basic_blocks_for_fn (cfun))
245 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
246 n_basic_blocks_for_fn (cfun));
247
248 /* To speed up statement iterator walks, we first purge dead labels. */
249 cleanup_dead_labels ();
250
251 /* Group case nodes to reduce the number of edges.
252 We do this after cleaning up dead labels because otherwise we miss
253 a lot of obvious case merging opportunities. */
254 group_case_labels ();
255
256 /* Create the edges of the flowgraph. */
257 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
258 make_edges ();
259 assign_discriminators ();
260 cleanup_dead_labels ();
261 delete discriminator_per_locus;
262 discriminator_per_locus = NULL;
263 }
264
265 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
266 them and propagate the information to LOOP. We assume that the annotations
267 come immediately before the condition in BB, if any. */
268
269 static void
270 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
271 {
272 gimple_stmt_iterator gsi = gsi_last_bb (bb);
273 gimple *stmt = gsi_stmt (gsi);
274
275 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
276 return;
277
278 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
279 {
280 stmt = gsi_stmt (gsi);
281 if (gimple_code (stmt) != GIMPLE_CALL)
282 break;
283 if (!gimple_call_internal_p (stmt)
284 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
285 break;
286
287 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
288 {
289 case annot_expr_ivdep_kind:
290 loop->safelen = INT_MAX;
291 break;
292 case annot_expr_no_vector_kind:
293 loop->dont_vectorize = true;
294 break;
295 case annot_expr_vector_kind:
296 loop->force_vectorize = true;
297 cfun->has_force_vectorize_loops = true;
298 break;
299 default:
300 gcc_unreachable ();
301 }
302
303 stmt = gimple_build_assign (gimple_call_lhs (stmt),
304 gimple_call_arg (stmt, 0));
305 gsi_replace (&gsi, stmt, true);
306 }
307 }
308
309 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
310 them and propagate the information to the loop. We assume that the
311 annotations come immediately before the condition of the loop. */
312
313 static void
314 replace_loop_annotate (void)
315 {
316 struct loop *loop;
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
320
321 FOR_EACH_LOOP (loop, 0)
322 {
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
325
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
329 }
330
331 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
332 FOR_EACH_BB_FN (bb, cfun)
333 {
334 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
335 {
336 stmt = gsi_stmt (gsi);
337 if (gimple_code (stmt) != GIMPLE_CALL)
338 continue;
339 if (!gimple_call_internal_p (stmt)
340 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
341 continue;
342
343 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
344 {
345 case annot_expr_ivdep_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 break;
349 default:
350 gcc_unreachable ();
351 }
352
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
357 }
358 }
359 }
360
361
362 static unsigned int
363 execute_build_cfg (void)
364 {
365 gimple_seq body = gimple_body (current_function_decl);
366
367 build_gimple_cfg (body);
368 gimple_set_body (current_function_decl, NULL);
369 if (dump_file && (dump_flags & TDF_DETAILS))
370 {
371 fprintf (dump_file, "Scope blocks:\n");
372 dump_scope_blocks (dump_file, dump_flags);
373 }
374 cleanup_tree_cfg ();
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
376 replace_loop_annotate ();
377 return 0;
378 }
379
380 namespace {
381
382 const pass_data pass_data_build_cfg =
383 {
384 GIMPLE_PASS, /* type */
385 "cfg", /* name */
386 OPTGROUP_NONE, /* optinfo_flags */
387 TV_TREE_CFG, /* tv_id */
388 PROP_gimple_leh, /* properties_required */
389 ( PROP_cfg | PROP_loops ), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
393 };
394
395 class pass_build_cfg : public gimple_opt_pass
396 {
397 public:
398 pass_build_cfg (gcc::context *ctxt)
399 : gimple_opt_pass (pass_data_build_cfg, ctxt)
400 {}
401
402 /* opt_pass methods: */
403 virtual unsigned int execute (function *) { return execute_build_cfg (); }
404
405 }; // class pass_build_cfg
406
407 } // anon namespace
408
409 gimple_opt_pass *
410 make_pass_build_cfg (gcc::context *ctxt)
411 {
412 return new pass_build_cfg (ctxt);
413 }
414
415
416 /* Return true if T is a computed goto. */
417
418 bool
419 computed_goto_p (gimple *t)
420 {
421 return (gimple_code (t) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
423 }
424
425 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
426 the other edge points to a bb with just __builtin_unreachable ().
427 I.e. return true for C->M edge in:
428 <bb C>:
429 ...
430 if (something)
431 goto <bb N>;
432 else
433 goto <bb M>;
434 <bb N>:
435 __builtin_unreachable ();
436 <bb M>: */
437
438 bool
439 assert_unreachable_fallthru_edge_p (edge e)
440 {
441 basic_block pred_bb = e->src;
442 gimple *last = last_stmt (pred_bb);
443 if (last && gimple_code (last) == GIMPLE_COND)
444 {
445 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
446 if (other_bb == e->dest)
447 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
448 if (EDGE_COUNT (other_bb->succs) == 0)
449 {
450 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
451 gimple *stmt;
452
453 if (gsi_end_p (gsi))
454 return false;
455 stmt = gsi_stmt (gsi);
456 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
457 {
458 gsi_next (&gsi);
459 if (gsi_end_p (gsi))
460 return false;
461 stmt = gsi_stmt (gsi);
462 }
463 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
464 }
465 }
466 return false;
467 }
468
469
470 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
471 could alter control flow except via eh. We initialize the flag at
472 CFG build time and only ever clear it later. */
473
474 static void
475 gimple_call_initialize_ctrl_altering (gimple *stmt)
476 {
477 int flags = gimple_call_flags (stmt);
478
479 /* A call alters control flow if it can make an abnormal goto. */
480 if (call_can_make_abnormal_goto (stmt)
481 /* A call also alters control flow if it does not return. */
482 || flags & ECF_NORETURN
483 /* TM ending statements have backedges out of the transaction.
484 Return true so we split the basic block containing them.
485 Note that the TM_BUILTIN test is merely an optimization. */
486 || ((flags & ECF_TM_BUILTIN)
487 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
488 /* BUILT_IN_RETURN call is same as return statement. */
489 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
490 gimple_call_set_ctrl_altering (stmt, true);
491 else
492 gimple_call_set_ctrl_altering (stmt, false);
493 }
494
495
496 /* Insert SEQ after BB and build a flowgraph. */
497
498 static basic_block
499 make_blocks_1 (gimple_seq seq, basic_block bb)
500 {
501 gimple_stmt_iterator i = gsi_start (seq);
502 gimple *stmt = NULL;
503 bool start_new_block = true;
504 bool first_stmt_of_seq = true;
505
506 while (!gsi_end_p (i))
507 {
508 gimple *prev_stmt;
509
510 prev_stmt = stmt;
511 stmt = gsi_stmt (i);
512
513 if (stmt && is_gimple_call (stmt))
514 gimple_call_initialize_ctrl_altering (stmt);
515
516 /* If the statement starts a new basic block or if we have determined
517 in a previous pass that we need to create a new block for STMT, do
518 so now. */
519 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
520 {
521 if (!first_stmt_of_seq)
522 gsi_split_seq_before (&i, &seq);
523 bb = create_basic_block (seq, bb);
524 start_new_block = false;
525 }
526
527 /* Now add STMT to BB and create the subgraphs for special statement
528 codes. */
529 gimple_set_bb (stmt, bb);
530
531 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
532 next iteration. */
533 if (stmt_ends_bb_p (stmt))
534 {
535 /* If the stmt can make abnormal goto use a new temporary
536 for the assignment to the LHS. This makes sure the old value
537 of the LHS is available on the abnormal edge. Otherwise
538 we will end up with overlapping life-ranges for abnormal
539 SSA names. */
540 if (gimple_has_lhs (stmt)
541 && stmt_can_make_abnormal_goto (stmt)
542 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
543 {
544 tree lhs = gimple_get_lhs (stmt);
545 tree tmp = create_tmp_var (TREE_TYPE (lhs));
546 gimple *s = gimple_build_assign (lhs, tmp);
547 gimple_set_location (s, gimple_location (stmt));
548 gimple_set_block (s, gimple_block (stmt));
549 gimple_set_lhs (stmt, tmp);
550 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
551 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
552 DECL_GIMPLE_REG_P (tmp) = 1;
553 gsi_insert_after (&i, s, GSI_SAME_STMT);
554 }
555 start_new_block = true;
556 }
557
558 gsi_next (&i);
559 first_stmt_of_seq = false;
560 }
561 return bb;
562 }
563
564 /* Build a flowgraph for the sequence of stmts SEQ. */
565
566 static void
567 make_blocks (gimple_seq seq)
568 {
569 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
570 }
571
572 /* Create and return a new empty basic block after bb AFTER. */
573
574 static basic_block
575 create_bb (void *h, void *e, basic_block after)
576 {
577 basic_block bb;
578
579 gcc_assert (!e);
580
581 /* Create and initialize a new basic block. Since alloc_block uses
582 GC allocation that clears memory to allocate a basic block, we do
583 not have to clear the newly allocated basic block here. */
584 bb = alloc_block ();
585
586 bb->index = last_basic_block_for_fn (cfun);
587 bb->flags = BB_NEW;
588 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
589
590 /* Add the new block to the linked list of blocks. */
591 link_block (bb, after);
592
593 /* Grow the basic block array if needed. */
594 if ((size_t) last_basic_block_for_fn (cfun)
595 == basic_block_info_for_fn (cfun)->length ())
596 {
597 size_t new_size =
598 (last_basic_block_for_fn (cfun)
599 + (last_basic_block_for_fn (cfun) + 3) / 4);
600 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
601 }
602
603 /* Add the newly created block to the array. */
604 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
605
606 n_basic_blocks_for_fn (cfun)++;
607 last_basic_block_for_fn (cfun)++;
608
609 return bb;
610 }
611
612
613 /*---------------------------------------------------------------------------
614 Edge creation
615 ---------------------------------------------------------------------------*/
616
617 /* If basic block BB has an abnormal edge to a basic block
618 containing IFN_ABNORMAL_DISPATCHER internal call, return
619 that the dispatcher's basic block, otherwise return NULL. */
620
621 basic_block
622 get_abnormal_succ_dispatcher (basic_block bb)
623 {
624 edge e;
625 edge_iterator ei;
626
627 FOR_EACH_EDGE (e, ei, bb->succs)
628 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
629 {
630 gimple_stmt_iterator gsi
631 = gsi_start_nondebug_after_labels_bb (e->dest);
632 gimple *g = gsi_stmt (gsi);
633 if (g
634 && is_gimple_call (g)
635 && gimple_call_internal_p (g)
636 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
637 return e->dest;
638 }
639 return NULL;
640 }
641
642 /* Helper function for make_edges. Create a basic block with
643 with ABNORMAL_DISPATCHER internal call in it if needed, and
644 create abnormal edges from BBS to it and from it to FOR_BB
645 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
646
647 static void
648 handle_abnormal_edges (basic_block *dispatcher_bbs,
649 basic_block for_bb, int *bb_to_omp_idx,
650 auto_vec<basic_block> *bbs, bool computed_goto)
651 {
652 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
653 unsigned int idx = 0;
654 basic_block bb;
655 bool inner = false;
656
657 if (bb_to_omp_idx)
658 {
659 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
660 if (bb_to_omp_idx[for_bb->index] != 0)
661 inner = true;
662 }
663
664 /* If the dispatcher has been created already, then there are basic
665 blocks with abnormal edges to it, so just make a new edge to
666 for_bb. */
667 if (*dispatcher == NULL)
668 {
669 /* Check if there are any basic blocks that need to have
670 abnormal edges to this dispatcher. If there are none, return
671 early. */
672 if (bb_to_omp_idx == NULL)
673 {
674 if (bbs->is_empty ())
675 return;
676 }
677 else
678 {
679 FOR_EACH_VEC_ELT (*bbs, idx, bb)
680 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
681 break;
682 if (bb == NULL)
683 return;
684 }
685
686 /* Create the dispatcher bb. */
687 *dispatcher = create_basic_block (NULL, for_bb);
688 if (computed_goto)
689 {
690 /* Factor computed gotos into a common computed goto site. Also
691 record the location of that site so that we can un-factor the
692 gotos after we have converted back to normal form. */
693 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
694
695 /* Create the destination of the factored goto. Each original
696 computed goto will put its desired destination into this
697 variable and jump to the label we create immediately below. */
698 tree var = create_tmp_var (ptr_type_node, "gotovar");
699
700 /* Build a label for the new block which will contain the
701 factored computed goto. */
702 tree factored_label_decl
703 = create_artificial_label (UNKNOWN_LOCATION);
704 gimple *factored_computed_goto_label
705 = gimple_build_label (factored_label_decl);
706 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
707
708 /* Build our new computed goto. */
709 gimple *factored_computed_goto = gimple_build_goto (var);
710 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
711
712 FOR_EACH_VEC_ELT (*bbs, idx, bb)
713 {
714 if (bb_to_omp_idx
715 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
716 continue;
717
718 gsi = gsi_last_bb (bb);
719 gimple *last = gsi_stmt (gsi);
720
721 gcc_assert (computed_goto_p (last));
722
723 /* Copy the original computed goto's destination into VAR. */
724 gimple *assignment
725 = gimple_build_assign (var, gimple_goto_dest (last));
726 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
727
728 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
729 e->goto_locus = gimple_location (last);
730 gsi_remove (&gsi, true);
731 }
732 }
733 else
734 {
735 tree arg = inner ? boolean_true_node : boolean_false_node;
736 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
737 1, arg);
738 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
739 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
740
741 /* Create predecessor edges of the dispatcher. */
742 FOR_EACH_VEC_ELT (*bbs, idx, bb)
743 {
744 if (bb_to_omp_idx
745 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
746 continue;
747 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
748 }
749 }
750 }
751
752 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
753 }
754
755 /* Creates outgoing edges for BB. Returns 1 when it ends with an
756 computed goto, returns 2 when it ends with a statement that
757 might return to this function via an nonlocal goto, otherwise
758 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
759
760 static int
761 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
762 {
763 gimple *last = last_stmt (bb);
764 bool fallthru = false;
765 int ret = 0;
766
767 if (!last)
768 return ret;
769
770 switch (gimple_code (last))
771 {
772 case GIMPLE_GOTO:
773 if (make_goto_expr_edges (bb))
774 ret = 1;
775 fallthru = false;
776 break;
777 case GIMPLE_RETURN:
778 {
779 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
780 e->goto_locus = gimple_location (last);
781 fallthru = false;
782 }
783 break;
784 case GIMPLE_COND:
785 make_cond_expr_edges (bb);
786 fallthru = false;
787 break;
788 case GIMPLE_SWITCH:
789 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
790 fallthru = false;
791 break;
792 case GIMPLE_RESX:
793 make_eh_edges (last);
794 fallthru = false;
795 break;
796 case GIMPLE_EH_DISPATCH:
797 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
798 break;
799
800 case GIMPLE_CALL:
801 /* If this function receives a nonlocal goto, then we need to
802 make edges from this call site to all the nonlocal goto
803 handlers. */
804 if (stmt_can_make_abnormal_goto (last))
805 ret = 2;
806
807 /* If this statement has reachable exception handlers, then
808 create abnormal edges to them. */
809 make_eh_edges (last);
810
811 /* BUILTIN_RETURN is really a return statement. */
812 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
813 {
814 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
815 fallthru = false;
816 }
817 /* Some calls are known not to return. */
818 else
819 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
820 break;
821
822 case GIMPLE_ASSIGN:
823 /* A GIMPLE_ASSIGN may throw internally and thus be considered
824 control-altering. */
825 if (is_ctrl_altering_stmt (last))
826 make_eh_edges (last);
827 fallthru = true;
828 break;
829
830 case GIMPLE_ASM:
831 make_gimple_asm_edges (bb);
832 fallthru = true;
833 break;
834
835 CASE_GIMPLE_OMP:
836 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
837 break;
838
839 case GIMPLE_TRANSACTION:
840 {
841 tree abort_label
842 = gimple_transaction_label (as_a <gtransaction *> (last));
843 if (abort_label)
844 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
845 fallthru = true;
846 }
847 break;
848
849 default:
850 gcc_assert (!stmt_ends_bb_p (last));
851 fallthru = true;
852 break;
853 }
854
855 if (fallthru)
856 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
857
858 return ret;
859 }
860
861 /* Join all the blocks in the flowgraph. */
862
863 static void
864 make_edges (void)
865 {
866 basic_block bb;
867 struct omp_region *cur_region = NULL;
868 auto_vec<basic_block> ab_edge_goto;
869 auto_vec<basic_block> ab_edge_call;
870 int *bb_to_omp_idx = NULL;
871 int cur_omp_region_idx = 0;
872
873 /* Create an edge from entry to the first block with executable
874 statements in it. */
875 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
876 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
877 EDGE_FALLTHRU);
878
879 /* Traverse the basic block array placing edges. */
880 FOR_EACH_BB_FN (bb, cfun)
881 {
882 int mer;
883
884 if (bb_to_omp_idx)
885 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
886
887 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
888 if (mer == 1)
889 ab_edge_goto.safe_push (bb);
890 else if (mer == 2)
891 ab_edge_call.safe_push (bb);
892
893 if (cur_region && bb_to_omp_idx == NULL)
894 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
895 }
896
897 /* Computed gotos are hell to deal with, especially if there are
898 lots of them with a large number of destinations. So we factor
899 them to a common computed goto location before we build the
900 edge list. After we convert back to normal form, we will un-factor
901 the computed gotos since factoring introduces an unwanted jump.
902 For non-local gotos and abnormal edges from calls to calls that return
903 twice or forced labels, factor the abnormal edges too, by having all
904 abnormal edges from the calls go to a common artificial basic block
905 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
906 basic block to all forced labels and calls returning twice.
907 We do this per-OpenMP structured block, because those regions
908 are guaranteed to be single entry single exit by the standard,
909 so it is not allowed to enter or exit such regions abnormally this way,
910 thus all computed gotos, non-local gotos and setjmp/longjmp calls
911 must not transfer control across SESE region boundaries. */
912 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
913 {
914 gimple_stmt_iterator gsi;
915 basic_block dispatcher_bb_array[2] = { NULL, NULL };
916 basic_block *dispatcher_bbs = dispatcher_bb_array;
917 int count = n_basic_blocks_for_fn (cfun);
918
919 if (bb_to_omp_idx)
920 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
921
922 FOR_EACH_BB_FN (bb, cfun)
923 {
924 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
925 {
926 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
927 tree target;
928
929 if (!label_stmt)
930 break;
931
932 target = gimple_label_label (label_stmt);
933
934 /* Make an edge to every label block that has been marked as a
935 potential target for a computed goto or a non-local goto. */
936 if (FORCED_LABEL (target))
937 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
938 &ab_edge_goto, true);
939 if (DECL_NONLOCAL (target))
940 {
941 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
942 &ab_edge_call, false);
943 break;
944 }
945 }
946
947 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
948 gsi_next_nondebug (&gsi);
949 if (!gsi_end_p (gsi))
950 {
951 /* Make an edge to every setjmp-like call. */
952 gimple *call_stmt = gsi_stmt (gsi);
953 if (is_gimple_call (call_stmt)
954 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
955 || gimple_call_builtin_p (call_stmt,
956 BUILT_IN_SETJMP_RECEIVER)))
957 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
958 &ab_edge_call, false);
959 }
960 }
961
962 if (bb_to_omp_idx)
963 XDELETE (dispatcher_bbs);
964 }
965
966 XDELETE (bb_to_omp_idx);
967
968 free_omp_regions ();
969 }
970
971 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
972 needed. Returns true if new bbs were created.
973 Note: This is transitional code, and should not be used for new code. We
974 should be able to get rid of this by rewriting all target va-arg
975 gimplification hooks to use an interface gimple_build_cond_value as described
976 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
977
978 bool
979 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
980 {
981 gimple *stmt = gsi_stmt (*gsi);
982 basic_block bb = gimple_bb (stmt);
983 basic_block lastbb, afterbb;
984 int old_num_bbs = n_basic_blocks_for_fn (cfun);
985 edge e;
986 lastbb = make_blocks_1 (seq, bb);
987 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
988 return false;
989 e = split_block (bb, stmt);
990 /* Move e->dest to come after the new basic blocks. */
991 afterbb = e->dest;
992 unlink_block (afterbb);
993 link_block (afterbb, lastbb);
994 redirect_edge_succ (e, bb->next_bb);
995 bb = bb->next_bb;
996 while (bb != afterbb)
997 {
998 struct omp_region *cur_region = NULL;
999 int cur_omp_region_idx = 0;
1000 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1001 gcc_assert (!mer && !cur_region);
1002 add_bb_to_loop (bb, afterbb->loop_father);
1003 bb = bb->next_bb;
1004 }
1005 return true;
1006 }
1007
1008 /* Find the next available discriminator value for LOCUS. The
1009 discriminator distinguishes among several basic blocks that
1010 share a common locus, allowing for more accurate sample-based
1011 profiling. */
1012
1013 static int
1014 next_discriminator_for_locus (location_t locus)
1015 {
1016 struct locus_discrim_map item;
1017 struct locus_discrim_map **slot;
1018
1019 item.locus = locus;
1020 item.discriminator = 0;
1021 slot = discriminator_per_locus->find_slot_with_hash (
1022 &item, LOCATION_LINE (locus), INSERT);
1023 gcc_assert (slot);
1024 if (*slot == HTAB_EMPTY_ENTRY)
1025 {
1026 *slot = XNEW (struct locus_discrim_map);
1027 gcc_assert (*slot);
1028 (*slot)->locus = locus;
1029 (*slot)->discriminator = 0;
1030 }
1031 (*slot)->discriminator++;
1032 return (*slot)->discriminator;
1033 }
1034
1035 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1036
1037 static bool
1038 same_line_p (location_t locus1, location_t locus2)
1039 {
1040 expanded_location from, to;
1041
1042 if (locus1 == locus2)
1043 return true;
1044
1045 from = expand_location (locus1);
1046 to = expand_location (locus2);
1047
1048 if (from.line != to.line)
1049 return false;
1050 if (from.file == to.file)
1051 return true;
1052 return (from.file != NULL
1053 && to.file != NULL
1054 && filename_cmp (from.file, to.file) == 0);
1055 }
1056
1057 /* Assign discriminators to each basic block. */
1058
1059 static void
1060 assign_discriminators (void)
1061 {
1062 basic_block bb;
1063
1064 FOR_EACH_BB_FN (bb, cfun)
1065 {
1066 edge e;
1067 edge_iterator ei;
1068 gimple *last = last_stmt (bb);
1069 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1070
1071 if (locus == UNKNOWN_LOCATION)
1072 continue;
1073
1074 FOR_EACH_EDGE (e, ei, bb->succs)
1075 {
1076 gimple *first = first_non_label_stmt (e->dest);
1077 gimple *last = last_stmt (e->dest);
1078 if ((first && same_line_p (locus, gimple_location (first)))
1079 || (last && same_line_p (locus, gimple_location (last))))
1080 {
1081 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1082 bb->discriminator = next_discriminator_for_locus (locus);
1083 else
1084 e->dest->discriminator = next_discriminator_for_locus (locus);
1085 }
1086 }
1087 }
1088 }
1089
1090 /* Create the edges for a GIMPLE_COND starting at block BB. */
1091
1092 static void
1093 make_cond_expr_edges (basic_block bb)
1094 {
1095 gcond *entry = as_a <gcond *> (last_stmt (bb));
1096 gimple *then_stmt, *else_stmt;
1097 basic_block then_bb, else_bb;
1098 tree then_label, else_label;
1099 edge e;
1100
1101 gcc_assert (entry);
1102 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1103
1104 /* Entry basic blocks for each component. */
1105 then_label = gimple_cond_true_label (entry);
1106 else_label = gimple_cond_false_label (entry);
1107 then_bb = label_to_block (then_label);
1108 else_bb = label_to_block (else_label);
1109 then_stmt = first_stmt (then_bb);
1110 else_stmt = first_stmt (else_bb);
1111
1112 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1113 e->goto_locus = gimple_location (then_stmt);
1114 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1115 if (e)
1116 e->goto_locus = gimple_location (else_stmt);
1117
1118 /* We do not need the labels anymore. */
1119 gimple_cond_set_true_label (entry, NULL_TREE);
1120 gimple_cond_set_false_label (entry, NULL_TREE);
1121 }
1122
1123
1124 /* Called for each element in the hash table (P) as we delete the
1125 edge to cases hash table.
1126
1127 Clear all the TREE_CHAINs to prevent problems with copying of
1128 SWITCH_EXPRs and structure sharing rules, then free the hash table
1129 element. */
1130
1131 bool
1132 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1133 {
1134 tree t, next;
1135
1136 for (t = value; t; t = next)
1137 {
1138 next = CASE_CHAIN (t);
1139 CASE_CHAIN (t) = NULL;
1140 }
1141
1142 return true;
1143 }
1144
1145 /* Start recording information mapping edges to case labels. */
1146
1147 void
1148 start_recording_case_labels (void)
1149 {
1150 gcc_assert (edge_to_cases == NULL);
1151 edge_to_cases = new hash_map<edge, tree>;
1152 touched_switch_bbs = BITMAP_ALLOC (NULL);
1153 }
1154
1155 /* Return nonzero if we are recording information for case labels. */
1156
1157 static bool
1158 recording_case_labels_p (void)
1159 {
1160 return (edge_to_cases != NULL);
1161 }
1162
1163 /* Stop recording information mapping edges to case labels and
1164 remove any information we have recorded. */
1165 void
1166 end_recording_case_labels (void)
1167 {
1168 bitmap_iterator bi;
1169 unsigned i;
1170 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1171 delete edge_to_cases;
1172 edge_to_cases = NULL;
1173 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1174 {
1175 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1176 if (bb)
1177 {
1178 gimple *stmt = last_stmt (bb);
1179 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1180 group_case_labels_stmt (as_a <gswitch *> (stmt));
1181 }
1182 }
1183 BITMAP_FREE (touched_switch_bbs);
1184 }
1185
1186 /* If we are inside a {start,end}_recording_cases block, then return
1187 a chain of CASE_LABEL_EXPRs from T which reference E.
1188
1189 Otherwise return NULL. */
1190
1191 static tree
1192 get_cases_for_edge (edge e, gswitch *t)
1193 {
1194 tree *slot;
1195 size_t i, n;
1196
1197 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1198 chains available. Return NULL so the caller can detect this case. */
1199 if (!recording_case_labels_p ())
1200 return NULL;
1201
1202 slot = edge_to_cases->get (e);
1203 if (slot)
1204 return *slot;
1205
1206 /* If we did not find E in the hash table, then this must be the first
1207 time we have been queried for information about E & T. Add all the
1208 elements from T to the hash table then perform the query again. */
1209
1210 n = gimple_switch_num_labels (t);
1211 for (i = 0; i < n; i++)
1212 {
1213 tree elt = gimple_switch_label (t, i);
1214 tree lab = CASE_LABEL (elt);
1215 basic_block label_bb = label_to_block (lab);
1216 edge this_edge = find_edge (e->src, label_bb);
1217
1218 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1219 a new chain. */
1220 tree &s = edge_to_cases->get_or_insert (this_edge);
1221 CASE_CHAIN (elt) = s;
1222 s = elt;
1223 }
1224
1225 return *edge_to_cases->get (e);
1226 }
1227
1228 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1229
1230 static void
1231 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1232 {
1233 size_t i, n;
1234
1235 n = gimple_switch_num_labels (entry);
1236
1237 for (i = 0; i < n; ++i)
1238 {
1239 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1240 basic_block label_bb = label_to_block (lab);
1241 make_edge (bb, label_bb, 0);
1242 }
1243 }
1244
1245
1246 /* Return the basic block holding label DEST. */
1247
1248 basic_block
1249 label_to_block_fn (struct function *ifun, tree dest)
1250 {
1251 int uid = LABEL_DECL_UID (dest);
1252
1253 /* We would die hard when faced by an undefined label. Emit a label to
1254 the very first basic block. This will hopefully make even the dataflow
1255 and undefined variable warnings quite right. */
1256 if (seen_error () && uid < 0)
1257 {
1258 gimple_stmt_iterator gsi =
1259 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1260 gimple *stmt;
1261
1262 stmt = gimple_build_label (dest);
1263 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1264 uid = LABEL_DECL_UID (dest);
1265 }
1266 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1267 return NULL;
1268 return (*ifun->cfg->x_label_to_block_map)[uid];
1269 }
1270
1271 /* Create edges for a goto statement at block BB. Returns true
1272 if abnormal edges should be created. */
1273
1274 static bool
1275 make_goto_expr_edges (basic_block bb)
1276 {
1277 gimple_stmt_iterator last = gsi_last_bb (bb);
1278 gimple *goto_t = gsi_stmt (last);
1279
1280 /* A simple GOTO creates normal edges. */
1281 if (simple_goto_p (goto_t))
1282 {
1283 tree dest = gimple_goto_dest (goto_t);
1284 basic_block label_bb = label_to_block (dest);
1285 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1286 e->goto_locus = gimple_location (goto_t);
1287 gsi_remove (&last, true);
1288 return false;
1289 }
1290
1291 /* A computed GOTO creates abnormal edges. */
1292 return true;
1293 }
1294
1295 /* Create edges for an asm statement with labels at block BB. */
1296
1297 static void
1298 make_gimple_asm_edges (basic_block bb)
1299 {
1300 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1301 int i, n = gimple_asm_nlabels (stmt);
1302
1303 for (i = 0; i < n; ++i)
1304 {
1305 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1306 basic_block label_bb = label_to_block (label);
1307 make_edge (bb, label_bb, 0);
1308 }
1309 }
1310
1311 /*---------------------------------------------------------------------------
1312 Flowgraph analysis
1313 ---------------------------------------------------------------------------*/
1314
1315 /* Cleanup useless labels in basic blocks. This is something we wish
1316 to do early because it allows us to group case labels before creating
1317 the edges for the CFG, and it speeds up block statement iterators in
1318 all passes later on.
1319 We rerun this pass after CFG is created, to get rid of the labels that
1320 are no longer referenced. After then we do not run it any more, since
1321 (almost) no new labels should be created. */
1322
1323 /* A map from basic block index to the leading label of that block. */
1324 static struct label_record
1325 {
1326 /* The label. */
1327 tree label;
1328
1329 /* True if the label is referenced from somewhere. */
1330 bool used;
1331 } *label_for_bb;
1332
1333 /* Given LABEL return the first label in the same basic block. */
1334
1335 static tree
1336 main_block_label (tree label)
1337 {
1338 basic_block bb = label_to_block (label);
1339 tree main_label = label_for_bb[bb->index].label;
1340
1341 /* label_to_block possibly inserted undefined label into the chain. */
1342 if (!main_label)
1343 {
1344 label_for_bb[bb->index].label = label;
1345 main_label = label;
1346 }
1347
1348 label_for_bb[bb->index].used = true;
1349 return main_label;
1350 }
1351
1352 /* Clean up redundant labels within the exception tree. */
1353
1354 static void
1355 cleanup_dead_labels_eh (void)
1356 {
1357 eh_landing_pad lp;
1358 eh_region r;
1359 tree lab;
1360 int i;
1361
1362 if (cfun->eh == NULL)
1363 return;
1364
1365 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1366 if (lp && lp->post_landing_pad)
1367 {
1368 lab = main_block_label (lp->post_landing_pad);
1369 if (lab != lp->post_landing_pad)
1370 {
1371 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1372 EH_LANDING_PAD_NR (lab) = lp->index;
1373 }
1374 }
1375
1376 FOR_ALL_EH_REGION (r)
1377 switch (r->type)
1378 {
1379 case ERT_CLEANUP:
1380 case ERT_MUST_NOT_THROW:
1381 break;
1382
1383 case ERT_TRY:
1384 {
1385 eh_catch c;
1386 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1387 {
1388 lab = c->label;
1389 if (lab)
1390 c->label = main_block_label (lab);
1391 }
1392 }
1393 break;
1394
1395 case ERT_ALLOWED_EXCEPTIONS:
1396 lab = r->u.allowed.label;
1397 if (lab)
1398 r->u.allowed.label = main_block_label (lab);
1399 break;
1400 }
1401 }
1402
1403
1404 /* Cleanup redundant labels. This is a three-step process:
1405 1) Find the leading label for each block.
1406 2) Redirect all references to labels to the leading labels.
1407 3) Cleanup all useless labels. */
1408
1409 void
1410 cleanup_dead_labels (void)
1411 {
1412 basic_block bb;
1413 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1414
1415 /* Find a suitable label for each block. We use the first user-defined
1416 label if there is one, or otherwise just the first label we see. */
1417 FOR_EACH_BB_FN (bb, cfun)
1418 {
1419 gimple_stmt_iterator i;
1420
1421 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1422 {
1423 tree label;
1424 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1425
1426 if (!label_stmt)
1427 break;
1428
1429 label = gimple_label_label (label_stmt);
1430
1431 /* If we have not yet seen a label for the current block,
1432 remember this one and see if there are more labels. */
1433 if (!label_for_bb[bb->index].label)
1434 {
1435 label_for_bb[bb->index].label = label;
1436 continue;
1437 }
1438
1439 /* If we did see a label for the current block already, but it
1440 is an artificially created label, replace it if the current
1441 label is a user defined label. */
1442 if (!DECL_ARTIFICIAL (label)
1443 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1444 {
1445 label_for_bb[bb->index].label = label;
1446 break;
1447 }
1448 }
1449 }
1450
1451 /* Now redirect all jumps/branches to the selected label.
1452 First do so for each block ending in a control statement. */
1453 FOR_EACH_BB_FN (bb, cfun)
1454 {
1455 gimple *stmt = last_stmt (bb);
1456 tree label, new_label;
1457
1458 if (!stmt)
1459 continue;
1460
1461 switch (gimple_code (stmt))
1462 {
1463 case GIMPLE_COND:
1464 {
1465 gcond *cond_stmt = as_a <gcond *> (stmt);
1466 label = gimple_cond_true_label (cond_stmt);
1467 if (label)
1468 {
1469 new_label = main_block_label (label);
1470 if (new_label != label)
1471 gimple_cond_set_true_label (cond_stmt, new_label);
1472 }
1473
1474 label = gimple_cond_false_label (cond_stmt);
1475 if (label)
1476 {
1477 new_label = main_block_label (label);
1478 if (new_label != label)
1479 gimple_cond_set_false_label (cond_stmt, new_label);
1480 }
1481 }
1482 break;
1483
1484 case GIMPLE_SWITCH:
1485 {
1486 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1487 size_t i, n = gimple_switch_num_labels (switch_stmt);
1488
1489 /* Replace all destination labels. */
1490 for (i = 0; i < n; ++i)
1491 {
1492 tree case_label = gimple_switch_label (switch_stmt, i);
1493 label = CASE_LABEL (case_label);
1494 new_label = main_block_label (label);
1495 if (new_label != label)
1496 CASE_LABEL (case_label) = new_label;
1497 }
1498 break;
1499 }
1500
1501 case GIMPLE_ASM:
1502 {
1503 gasm *asm_stmt = as_a <gasm *> (stmt);
1504 int i, n = gimple_asm_nlabels (asm_stmt);
1505
1506 for (i = 0; i < n; ++i)
1507 {
1508 tree cons = gimple_asm_label_op (asm_stmt, i);
1509 tree label = main_block_label (TREE_VALUE (cons));
1510 TREE_VALUE (cons) = label;
1511 }
1512 break;
1513 }
1514
1515 /* We have to handle gotos until they're removed, and we don't
1516 remove them until after we've created the CFG edges. */
1517 case GIMPLE_GOTO:
1518 if (!computed_goto_p (stmt))
1519 {
1520 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1521 label = gimple_goto_dest (goto_stmt);
1522 new_label = main_block_label (label);
1523 if (new_label != label)
1524 gimple_goto_set_dest (goto_stmt, new_label);
1525 }
1526 break;
1527
1528 case GIMPLE_TRANSACTION:
1529 {
1530 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1531 tree label = gimple_transaction_label (trans_stmt);
1532 if (label)
1533 {
1534 tree new_label = main_block_label (label);
1535 if (new_label != label)
1536 gimple_transaction_set_label (trans_stmt, new_label);
1537 }
1538 }
1539 break;
1540
1541 default:
1542 break;
1543 }
1544 }
1545
1546 /* Do the same for the exception region tree labels. */
1547 cleanup_dead_labels_eh ();
1548
1549 /* Finally, purge dead labels. All user-defined labels and labels that
1550 can be the target of non-local gotos and labels which have their
1551 address taken are preserved. */
1552 FOR_EACH_BB_FN (bb, cfun)
1553 {
1554 gimple_stmt_iterator i;
1555 tree label_for_this_bb = label_for_bb[bb->index].label;
1556
1557 if (!label_for_this_bb)
1558 continue;
1559
1560 /* If the main label of the block is unused, we may still remove it. */
1561 if (!label_for_bb[bb->index].used)
1562 label_for_this_bb = NULL;
1563
1564 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1565 {
1566 tree label;
1567 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1568
1569 if (!label_stmt)
1570 break;
1571
1572 label = gimple_label_label (label_stmt);
1573
1574 if (label == label_for_this_bb
1575 || !DECL_ARTIFICIAL (label)
1576 || DECL_NONLOCAL (label)
1577 || FORCED_LABEL (label))
1578 gsi_next (&i);
1579 else
1580 gsi_remove (&i, true);
1581 }
1582 }
1583
1584 free (label_for_bb);
1585 }
1586
1587 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1588 the ones jumping to the same label.
1589 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1590
1591 void
1592 group_case_labels_stmt (gswitch *stmt)
1593 {
1594 int old_size = gimple_switch_num_labels (stmt);
1595 int i, j, new_size = old_size;
1596 basic_block default_bb = NULL;
1597
1598 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1599
1600 /* Look for possible opportunities to merge cases. */
1601 i = 1;
1602 while (i < old_size)
1603 {
1604 tree base_case, base_high;
1605 basic_block base_bb;
1606
1607 base_case = gimple_switch_label (stmt, i);
1608
1609 gcc_assert (base_case);
1610 base_bb = label_to_block (CASE_LABEL (base_case));
1611
1612 /* Discard cases that have the same destination as the
1613 default case. */
1614 if (base_bb == default_bb)
1615 {
1616 gimple_switch_set_label (stmt, i, NULL_TREE);
1617 i++;
1618 new_size--;
1619 continue;
1620 }
1621
1622 base_high = CASE_HIGH (base_case)
1623 ? CASE_HIGH (base_case)
1624 : CASE_LOW (base_case);
1625 i++;
1626
1627 /* Try to merge case labels. Break out when we reach the end
1628 of the label vector or when we cannot merge the next case
1629 label with the current one. */
1630 while (i < old_size)
1631 {
1632 tree merge_case = gimple_switch_label (stmt, i);
1633 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1634 wide_int bhp1 = wi::add (base_high, 1);
1635
1636 /* Merge the cases if they jump to the same place,
1637 and their ranges are consecutive. */
1638 if (merge_bb == base_bb
1639 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1640 {
1641 base_high = CASE_HIGH (merge_case) ?
1642 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1643 CASE_HIGH (base_case) = base_high;
1644 gimple_switch_set_label (stmt, i, NULL_TREE);
1645 new_size--;
1646 i++;
1647 }
1648 else
1649 break;
1650 }
1651 }
1652
1653 /* Compress the case labels in the label vector, and adjust the
1654 length of the vector. */
1655 for (i = 0, j = 0; i < new_size; i++)
1656 {
1657 while (! gimple_switch_label (stmt, j))
1658 j++;
1659 gimple_switch_set_label (stmt, i,
1660 gimple_switch_label (stmt, j++));
1661 }
1662
1663 gcc_assert (new_size <= old_size);
1664 gimple_switch_set_num_labels (stmt, new_size);
1665 }
1666
1667 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1668 and scan the sorted vector of cases. Combine the ones jumping to the
1669 same label. */
1670
1671 void
1672 group_case_labels (void)
1673 {
1674 basic_block bb;
1675
1676 FOR_EACH_BB_FN (bb, cfun)
1677 {
1678 gimple *stmt = last_stmt (bb);
1679 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1680 group_case_labels_stmt (as_a <gswitch *> (stmt));
1681 }
1682 }
1683
1684 /* Checks whether we can merge block B into block A. */
1685
1686 static bool
1687 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1688 {
1689 gimple *stmt;
1690
1691 if (!single_succ_p (a))
1692 return false;
1693
1694 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1695 return false;
1696
1697 if (single_succ (a) != b)
1698 return false;
1699
1700 if (!single_pred_p (b))
1701 return false;
1702
1703 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1704 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1705 return false;
1706
1707 /* If A ends by a statement causing exceptions or something similar, we
1708 cannot merge the blocks. */
1709 stmt = last_stmt (a);
1710 if (stmt && stmt_ends_bb_p (stmt))
1711 return false;
1712
1713 /* Do not allow a block with only a non-local label to be merged. */
1714 if (stmt)
1715 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1716 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1717 return false;
1718
1719 /* Examine the labels at the beginning of B. */
1720 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1721 gsi_next (&gsi))
1722 {
1723 tree lab;
1724 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1725 if (!label_stmt)
1726 break;
1727 lab = gimple_label_label (label_stmt);
1728
1729 /* Do not remove user forced labels or for -O0 any user labels. */
1730 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1731 return false;
1732 }
1733
1734 /* Protect simple loop latches. We only want to avoid merging
1735 the latch with the loop header or with a block in another
1736 loop in this case. */
1737 if (current_loops
1738 && b->loop_father->latch == b
1739 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1740 && (b->loop_father->header == a
1741 || b->loop_father != a->loop_father))
1742 return false;
1743
1744 /* It must be possible to eliminate all phi nodes in B. If ssa form
1745 is not up-to-date and a name-mapping is registered, we cannot eliminate
1746 any phis. Symbols marked for renaming are never a problem though. */
1747 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1748 gsi_next (&gsi))
1749 {
1750 gphi *phi = gsi.phi ();
1751 /* Technically only new names matter. */
1752 if (name_registered_for_update_p (PHI_RESULT (phi)))
1753 return false;
1754 }
1755
1756 /* When not optimizing, don't merge if we'd lose goto_locus. */
1757 if (!optimize
1758 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1759 {
1760 location_t goto_locus = single_succ_edge (a)->goto_locus;
1761 gimple_stmt_iterator prev, next;
1762 prev = gsi_last_nondebug_bb (a);
1763 next = gsi_after_labels (b);
1764 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1765 gsi_next_nondebug (&next);
1766 if ((gsi_end_p (prev)
1767 || gimple_location (gsi_stmt (prev)) != goto_locus)
1768 && (gsi_end_p (next)
1769 || gimple_location (gsi_stmt (next)) != goto_locus))
1770 return false;
1771 }
1772
1773 return true;
1774 }
1775
1776 /* Replaces all uses of NAME by VAL. */
1777
1778 void
1779 replace_uses_by (tree name, tree val)
1780 {
1781 imm_use_iterator imm_iter;
1782 use_operand_p use;
1783 gimple *stmt;
1784 edge e;
1785
1786 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1787 {
1788 /* Mark the block if we change the last stmt in it. */
1789 if (cfgcleanup_altered_bbs
1790 && stmt_ends_bb_p (stmt))
1791 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1792
1793 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1794 {
1795 replace_exp (use, val);
1796
1797 if (gimple_code (stmt) == GIMPLE_PHI)
1798 {
1799 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1800 PHI_ARG_INDEX_FROM_USE (use));
1801 if (e->flags & EDGE_ABNORMAL
1802 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1803 {
1804 /* This can only occur for virtual operands, since
1805 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1806 would prevent replacement. */
1807 gcc_checking_assert (virtual_operand_p (name));
1808 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1809 }
1810 }
1811 }
1812
1813 if (gimple_code (stmt) != GIMPLE_PHI)
1814 {
1815 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1816 gimple *orig_stmt = stmt;
1817 size_t i;
1818
1819 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1820 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1821 only change sth from non-invariant to invariant, and only
1822 when propagating constants. */
1823 if (is_gimple_min_invariant (val))
1824 for (i = 0; i < gimple_num_ops (stmt); i++)
1825 {
1826 tree op = gimple_op (stmt, i);
1827 /* Operands may be empty here. For example, the labels
1828 of a GIMPLE_COND are nulled out following the creation
1829 of the corresponding CFG edges. */
1830 if (op && TREE_CODE (op) == ADDR_EXPR)
1831 recompute_tree_invariant_for_addr_expr (op);
1832 }
1833
1834 if (fold_stmt (&gsi))
1835 stmt = gsi_stmt (gsi);
1836
1837 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1838 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1839
1840 update_stmt (stmt);
1841 }
1842 }
1843
1844 gcc_checking_assert (has_zero_uses (name));
1845
1846 /* Also update the trees stored in loop structures. */
1847 if (current_loops)
1848 {
1849 struct loop *loop;
1850
1851 FOR_EACH_LOOP (loop, 0)
1852 {
1853 substitute_in_loop_info (loop, name, val);
1854 }
1855 }
1856 }
1857
1858 /* Merge block B into block A. */
1859
1860 static void
1861 gimple_merge_blocks (basic_block a, basic_block b)
1862 {
1863 gimple_stmt_iterator last, gsi;
1864 gphi_iterator psi;
1865
1866 if (dump_file)
1867 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1868
1869 /* Remove all single-valued PHI nodes from block B of the form
1870 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1871 gsi = gsi_last_bb (a);
1872 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1873 {
1874 gimple *phi = gsi_stmt (psi);
1875 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1876 gimple *copy;
1877 bool may_replace_uses = (virtual_operand_p (def)
1878 || may_propagate_copy (def, use));
1879
1880 /* In case we maintain loop closed ssa form, do not propagate arguments
1881 of loop exit phi nodes. */
1882 if (current_loops
1883 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1884 && !virtual_operand_p (def)
1885 && TREE_CODE (use) == SSA_NAME
1886 && a->loop_father != b->loop_father)
1887 may_replace_uses = false;
1888
1889 if (!may_replace_uses)
1890 {
1891 gcc_assert (!virtual_operand_p (def));
1892
1893 /* Note that just emitting the copies is fine -- there is no problem
1894 with ordering of phi nodes. This is because A is the single
1895 predecessor of B, therefore results of the phi nodes cannot
1896 appear as arguments of the phi nodes. */
1897 copy = gimple_build_assign (def, use);
1898 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1899 remove_phi_node (&psi, false);
1900 }
1901 else
1902 {
1903 /* If we deal with a PHI for virtual operands, we can simply
1904 propagate these without fussing with folding or updating
1905 the stmt. */
1906 if (virtual_operand_p (def))
1907 {
1908 imm_use_iterator iter;
1909 use_operand_p use_p;
1910 gimple *stmt;
1911
1912 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1913 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1914 SET_USE (use_p, use);
1915
1916 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1917 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1918 }
1919 else
1920 replace_uses_by (def, use);
1921
1922 remove_phi_node (&psi, true);
1923 }
1924 }
1925
1926 /* Ensure that B follows A. */
1927 move_block_after (b, a);
1928
1929 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1930 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1931
1932 /* Remove labels from B and set gimple_bb to A for other statements. */
1933 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1934 {
1935 gimple *stmt = gsi_stmt (gsi);
1936 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1937 {
1938 tree label = gimple_label_label (label_stmt);
1939 int lp_nr;
1940
1941 gsi_remove (&gsi, false);
1942
1943 /* Now that we can thread computed gotos, we might have
1944 a situation where we have a forced label in block B
1945 However, the label at the start of block B might still be
1946 used in other ways (think about the runtime checking for
1947 Fortran assigned gotos). So we can not just delete the
1948 label. Instead we move the label to the start of block A. */
1949 if (FORCED_LABEL (label))
1950 {
1951 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1952 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1953 }
1954 /* Other user labels keep around in a form of a debug stmt. */
1955 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1956 {
1957 gimple *dbg = gimple_build_debug_bind (label,
1958 integer_zero_node,
1959 stmt);
1960 gimple_debug_bind_reset_value (dbg);
1961 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1962 }
1963
1964 lp_nr = EH_LANDING_PAD_NR (label);
1965 if (lp_nr)
1966 {
1967 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1968 lp->post_landing_pad = NULL;
1969 }
1970 }
1971 else
1972 {
1973 gimple_set_bb (stmt, a);
1974 gsi_next (&gsi);
1975 }
1976 }
1977
1978 /* When merging two BBs, if their counts are different, the larger count
1979 is selected as the new bb count. This is to handle inconsistent
1980 profiles. */
1981 if (a->loop_father == b->loop_father)
1982 {
1983 a->count = MAX (a->count, b->count);
1984 a->frequency = MAX (a->frequency, b->frequency);
1985 }
1986
1987 /* Merge the sequences. */
1988 last = gsi_last_bb (a);
1989 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1990 set_bb_seq (b, NULL);
1991
1992 if (cfgcleanup_altered_bbs)
1993 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1994 }
1995
1996
1997 /* Return the one of two successors of BB that is not reachable by a
1998 complex edge, if there is one. Else, return BB. We use
1999 this in optimizations that use post-dominators for their heuristics,
2000 to catch the cases in C++ where function calls are involved. */
2001
2002 basic_block
2003 single_noncomplex_succ (basic_block bb)
2004 {
2005 edge e0, e1;
2006 if (EDGE_COUNT (bb->succs) != 2)
2007 return bb;
2008
2009 e0 = EDGE_SUCC (bb, 0);
2010 e1 = EDGE_SUCC (bb, 1);
2011 if (e0->flags & EDGE_COMPLEX)
2012 return e1->dest;
2013 if (e1->flags & EDGE_COMPLEX)
2014 return e0->dest;
2015
2016 return bb;
2017 }
2018
2019 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2020
2021 void
2022 notice_special_calls (gcall *call)
2023 {
2024 int flags = gimple_call_flags (call);
2025
2026 if (flags & ECF_MAY_BE_ALLOCA)
2027 cfun->calls_alloca = true;
2028 if (flags & ECF_RETURNS_TWICE)
2029 cfun->calls_setjmp = true;
2030 }
2031
2032
2033 /* Clear flags set by notice_special_calls. Used by dead code removal
2034 to update the flags. */
2035
2036 void
2037 clear_special_calls (void)
2038 {
2039 cfun->calls_alloca = false;
2040 cfun->calls_setjmp = false;
2041 }
2042
2043 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2044
2045 static void
2046 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2047 {
2048 /* Since this block is no longer reachable, we can just delete all
2049 of its PHI nodes. */
2050 remove_phi_nodes (bb);
2051
2052 /* Remove edges to BB's successors. */
2053 while (EDGE_COUNT (bb->succs) > 0)
2054 remove_edge (EDGE_SUCC (bb, 0));
2055 }
2056
2057
2058 /* Remove statements of basic block BB. */
2059
2060 static void
2061 remove_bb (basic_block bb)
2062 {
2063 gimple_stmt_iterator i;
2064
2065 if (dump_file)
2066 {
2067 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2068 if (dump_flags & TDF_DETAILS)
2069 {
2070 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2071 fprintf (dump_file, "\n");
2072 }
2073 }
2074
2075 if (current_loops)
2076 {
2077 struct loop *loop = bb->loop_father;
2078
2079 /* If a loop gets removed, clean up the information associated
2080 with it. */
2081 if (loop->latch == bb
2082 || loop->header == bb)
2083 free_numbers_of_iterations_estimates_loop (loop);
2084 }
2085
2086 /* Remove all the instructions in the block. */
2087 if (bb_seq (bb) != NULL)
2088 {
2089 /* Walk backwards so as to get a chance to substitute all
2090 released DEFs into debug stmts. See
2091 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2092 details. */
2093 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2094 {
2095 gimple *stmt = gsi_stmt (i);
2096 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2097 if (label_stmt
2098 && (FORCED_LABEL (gimple_label_label (label_stmt))
2099 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2100 {
2101 basic_block new_bb;
2102 gimple_stmt_iterator new_gsi;
2103
2104 /* A non-reachable non-local label may still be referenced.
2105 But it no longer needs to carry the extra semantics of
2106 non-locality. */
2107 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2108 {
2109 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2110 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2111 }
2112
2113 new_bb = bb->prev_bb;
2114 new_gsi = gsi_start_bb (new_bb);
2115 gsi_remove (&i, false);
2116 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2117 }
2118 else
2119 {
2120 /* Release SSA definitions if we are in SSA. Note that we
2121 may be called when not in SSA. For example,
2122 final_cleanup calls this function via
2123 cleanup_tree_cfg. */
2124 if (gimple_in_ssa_p (cfun))
2125 release_defs (stmt);
2126
2127 gsi_remove (&i, true);
2128 }
2129
2130 if (gsi_end_p (i))
2131 i = gsi_last_bb (bb);
2132 else
2133 gsi_prev (&i);
2134 }
2135 }
2136
2137 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2138 bb->il.gimple.seq = NULL;
2139 bb->il.gimple.phi_nodes = NULL;
2140 }
2141
2142
2143 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2144 predicate VAL, return the edge that will be taken out of the block.
2145 If VAL does not match a unique edge, NULL is returned. */
2146
2147 edge
2148 find_taken_edge (basic_block bb, tree val)
2149 {
2150 gimple *stmt;
2151
2152 stmt = last_stmt (bb);
2153
2154 gcc_assert (stmt);
2155 gcc_assert (is_ctrl_stmt (stmt));
2156
2157 if (val == NULL)
2158 return NULL;
2159
2160 if (!is_gimple_min_invariant (val))
2161 return NULL;
2162
2163 if (gimple_code (stmt) == GIMPLE_COND)
2164 return find_taken_edge_cond_expr (bb, val);
2165
2166 if (gimple_code (stmt) == GIMPLE_SWITCH)
2167 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2168
2169 if (computed_goto_p (stmt))
2170 {
2171 /* Only optimize if the argument is a label, if the argument is
2172 not a label then we can not construct a proper CFG.
2173
2174 It may be the case that we only need to allow the LABEL_REF to
2175 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2176 appear inside a LABEL_EXPR just to be safe. */
2177 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2178 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2179 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2180 return NULL;
2181 }
2182
2183 gcc_unreachable ();
2184 }
2185
2186 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2187 statement, determine which of the outgoing edges will be taken out of the
2188 block. Return NULL if either edge may be taken. */
2189
2190 static edge
2191 find_taken_edge_computed_goto (basic_block bb, tree val)
2192 {
2193 basic_block dest;
2194 edge e = NULL;
2195
2196 dest = label_to_block (val);
2197 if (dest)
2198 {
2199 e = find_edge (bb, dest);
2200 gcc_assert (e != NULL);
2201 }
2202
2203 return e;
2204 }
2205
2206 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2207 statement, determine which of the two edges will be taken out of the
2208 block. Return NULL if either edge may be taken. */
2209
2210 static edge
2211 find_taken_edge_cond_expr (basic_block bb, tree val)
2212 {
2213 edge true_edge, false_edge;
2214
2215 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2216
2217 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2218 return (integer_zerop (val) ? false_edge : true_edge);
2219 }
2220
2221 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2222 statement, determine which edge will be taken out of the block. Return
2223 NULL if any edge may be taken. */
2224
2225 static edge
2226 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2227 tree val)
2228 {
2229 basic_block dest_bb;
2230 edge e;
2231 tree taken_case;
2232
2233 taken_case = find_case_label_for_value (switch_stmt, val);
2234 dest_bb = label_to_block (CASE_LABEL (taken_case));
2235
2236 e = find_edge (bb, dest_bb);
2237 gcc_assert (e);
2238 return e;
2239 }
2240
2241
2242 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2243 We can make optimal use here of the fact that the case labels are
2244 sorted: We can do a binary search for a case matching VAL. */
2245
2246 static tree
2247 find_case_label_for_value (gswitch *switch_stmt, tree val)
2248 {
2249 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2250 tree default_case = gimple_switch_default_label (switch_stmt);
2251
2252 for (low = 0, high = n; high - low > 1; )
2253 {
2254 size_t i = (high + low) / 2;
2255 tree t = gimple_switch_label (switch_stmt, i);
2256 int cmp;
2257
2258 /* Cache the result of comparing CASE_LOW and val. */
2259 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2260
2261 if (cmp > 0)
2262 high = i;
2263 else
2264 low = i;
2265
2266 if (CASE_HIGH (t) == NULL)
2267 {
2268 /* A singe-valued case label. */
2269 if (cmp == 0)
2270 return t;
2271 }
2272 else
2273 {
2274 /* A case range. We can only handle integer ranges. */
2275 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2276 return t;
2277 }
2278 }
2279
2280 return default_case;
2281 }
2282
2283
2284 /* Dump a basic block on stderr. */
2285
2286 void
2287 gimple_debug_bb (basic_block bb)
2288 {
2289 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2290 }
2291
2292
2293 /* Dump basic block with index N on stderr. */
2294
2295 basic_block
2296 gimple_debug_bb_n (int n)
2297 {
2298 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2299 return BASIC_BLOCK_FOR_FN (cfun, n);
2300 }
2301
2302
2303 /* Dump the CFG on stderr.
2304
2305 FLAGS are the same used by the tree dumping functions
2306 (see TDF_* in dumpfile.h). */
2307
2308 void
2309 gimple_debug_cfg (int flags)
2310 {
2311 gimple_dump_cfg (stderr, flags);
2312 }
2313
2314
2315 /* Dump the program showing basic block boundaries on the given FILE.
2316
2317 FLAGS are the same used by the tree dumping functions (see TDF_* in
2318 tree.h). */
2319
2320 void
2321 gimple_dump_cfg (FILE *file, int flags)
2322 {
2323 if (flags & TDF_DETAILS)
2324 {
2325 dump_function_header (file, current_function_decl, flags);
2326 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2327 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2328 last_basic_block_for_fn (cfun));
2329
2330 brief_dump_cfg (file, flags | TDF_COMMENT);
2331 fprintf (file, "\n");
2332 }
2333
2334 if (flags & TDF_STATS)
2335 dump_cfg_stats (file);
2336
2337 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2338 }
2339
2340
2341 /* Dump CFG statistics on FILE. */
2342
2343 void
2344 dump_cfg_stats (FILE *file)
2345 {
2346 static long max_num_merged_labels = 0;
2347 unsigned long size, total = 0;
2348 long num_edges;
2349 basic_block bb;
2350 const char * const fmt_str = "%-30s%-13s%12s\n";
2351 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2352 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2353 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2354 const char *funcname = current_function_name ();
2355
2356 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2357
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, fmt_str, "", " Number of ", "Memory");
2360 fprintf (file, fmt_str, "", " instances ", "used ");
2361 fprintf (file, "---------------------------------------------------------\n");
2362
2363 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2364 total += size;
2365 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2366 SCALE (size), LABEL (size));
2367
2368 num_edges = 0;
2369 FOR_EACH_BB_FN (bb, cfun)
2370 num_edges += EDGE_COUNT (bb->succs);
2371 size = num_edges * sizeof (struct edge_def);
2372 total += size;
2373 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2374
2375 fprintf (file, "---------------------------------------------------------\n");
2376 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2377 LABEL (total));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, "\n");
2380
2381 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2382 max_num_merged_labels = cfg_stats.num_merged_labels;
2383
2384 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2385 cfg_stats.num_merged_labels, max_num_merged_labels);
2386
2387 fprintf (file, "\n");
2388 }
2389
2390
2391 /* Dump CFG statistics on stderr. Keep extern so that it's always
2392 linked in the final executable. */
2393
2394 DEBUG_FUNCTION void
2395 debug_cfg_stats (void)
2396 {
2397 dump_cfg_stats (stderr);
2398 }
2399
2400 /*---------------------------------------------------------------------------
2401 Miscellaneous helpers
2402 ---------------------------------------------------------------------------*/
2403
2404 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2405 flow. Transfers of control flow associated with EH are excluded. */
2406
2407 static bool
2408 call_can_make_abnormal_goto (gimple *t)
2409 {
2410 /* If the function has no non-local labels, then a call cannot make an
2411 abnormal transfer of control. */
2412 if (!cfun->has_nonlocal_label
2413 && !cfun->calls_setjmp)
2414 return false;
2415
2416 /* Likewise if the call has no side effects. */
2417 if (!gimple_has_side_effects (t))
2418 return false;
2419
2420 /* Likewise if the called function is leaf. */
2421 if (gimple_call_flags (t) & ECF_LEAF)
2422 return false;
2423
2424 return true;
2425 }
2426
2427
2428 /* Return true if T can make an abnormal transfer of control flow.
2429 Transfers of control flow associated with EH are excluded. */
2430
2431 bool
2432 stmt_can_make_abnormal_goto (gimple *t)
2433 {
2434 if (computed_goto_p (t))
2435 return true;
2436 if (is_gimple_call (t))
2437 return call_can_make_abnormal_goto (t);
2438 return false;
2439 }
2440
2441
2442 /* Return true if T represents a stmt that always transfers control. */
2443
2444 bool
2445 is_ctrl_stmt (gimple *t)
2446 {
2447 switch (gimple_code (t))
2448 {
2449 case GIMPLE_COND:
2450 case GIMPLE_SWITCH:
2451 case GIMPLE_GOTO:
2452 case GIMPLE_RETURN:
2453 case GIMPLE_RESX:
2454 return true;
2455 default:
2456 return false;
2457 }
2458 }
2459
2460
2461 /* Return true if T is a statement that may alter the flow of control
2462 (e.g., a call to a non-returning function). */
2463
2464 bool
2465 is_ctrl_altering_stmt (gimple *t)
2466 {
2467 gcc_assert (t);
2468
2469 switch (gimple_code (t))
2470 {
2471 case GIMPLE_CALL:
2472 /* Per stmt call flag indicates whether the call could alter
2473 controlflow. */
2474 if (gimple_call_ctrl_altering_p (t))
2475 return true;
2476 break;
2477
2478 case GIMPLE_EH_DISPATCH:
2479 /* EH_DISPATCH branches to the individual catch handlers at
2480 this level of a try or allowed-exceptions region. It can
2481 fallthru to the next statement as well. */
2482 return true;
2483
2484 case GIMPLE_ASM:
2485 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2486 return true;
2487 break;
2488
2489 CASE_GIMPLE_OMP:
2490 /* OpenMP directives alter control flow. */
2491 return true;
2492
2493 case GIMPLE_TRANSACTION:
2494 /* A transaction start alters control flow. */
2495 return true;
2496
2497 default:
2498 break;
2499 }
2500
2501 /* If a statement can throw, it alters control flow. */
2502 return stmt_can_throw_internal (t);
2503 }
2504
2505
2506 /* Return true if T is a simple local goto. */
2507
2508 bool
2509 simple_goto_p (gimple *t)
2510 {
2511 return (gimple_code (t) == GIMPLE_GOTO
2512 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2513 }
2514
2515
2516 /* Return true if STMT should start a new basic block. PREV_STMT is
2517 the statement preceding STMT. It is used when STMT is a label or a
2518 case label. Labels should only start a new basic block if their
2519 previous statement wasn't a label. Otherwise, sequence of labels
2520 would generate unnecessary basic blocks that only contain a single
2521 label. */
2522
2523 static inline bool
2524 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2525 {
2526 if (stmt == NULL)
2527 return false;
2528
2529 /* Labels start a new basic block only if the preceding statement
2530 wasn't a label of the same type. This prevents the creation of
2531 consecutive blocks that have nothing but a single label. */
2532 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2533 {
2534 /* Nonlocal and computed GOTO targets always start a new block. */
2535 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2536 || FORCED_LABEL (gimple_label_label (label_stmt)))
2537 return true;
2538
2539 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2540 {
2541 if (DECL_NONLOCAL (gimple_label_label (
2542 as_a <glabel *> (prev_stmt))))
2543 return true;
2544
2545 cfg_stats.num_merged_labels++;
2546 return false;
2547 }
2548 else
2549 return true;
2550 }
2551 else if (gimple_code (stmt) == GIMPLE_CALL
2552 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2553 /* setjmp acts similar to a nonlocal GOTO target and thus should
2554 start a new block. */
2555 return true;
2556
2557 return false;
2558 }
2559
2560
2561 /* Return true if T should end a basic block. */
2562
2563 bool
2564 stmt_ends_bb_p (gimple *t)
2565 {
2566 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2567 }
2568
2569 /* Remove block annotations and other data structures. */
2570
2571 void
2572 delete_tree_cfg_annotations (void)
2573 {
2574 vec_free (label_to_block_map_for_fn (cfun));
2575 }
2576
2577 /* Return the virtual phi in BB. */
2578
2579 gphi *
2580 get_virtual_phi (basic_block bb)
2581 {
2582 for (gphi_iterator gsi = gsi_start_phis (bb);
2583 !gsi_end_p (gsi);
2584 gsi_next (&gsi))
2585 {
2586 gphi *phi = gsi.phi ();
2587
2588 if (virtual_operand_p (PHI_RESULT (phi)))
2589 return phi;
2590 }
2591
2592 return NULL;
2593 }
2594
2595 /* Return the first statement in basic block BB. */
2596
2597 gimple *
2598 first_stmt (basic_block bb)
2599 {
2600 gimple_stmt_iterator i = gsi_start_bb (bb);
2601 gimple *stmt = NULL;
2602
2603 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2604 {
2605 gsi_next (&i);
2606 stmt = NULL;
2607 }
2608 return stmt;
2609 }
2610
2611 /* Return the first non-label statement in basic block BB. */
2612
2613 static gimple *
2614 first_non_label_stmt (basic_block bb)
2615 {
2616 gimple_stmt_iterator i = gsi_start_bb (bb);
2617 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2618 gsi_next (&i);
2619 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2620 }
2621
2622 /* Return the last statement in basic block BB. */
2623
2624 gimple *
2625 last_stmt (basic_block bb)
2626 {
2627 gimple_stmt_iterator i = gsi_last_bb (bb);
2628 gimple *stmt = NULL;
2629
2630 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2631 {
2632 gsi_prev (&i);
2633 stmt = NULL;
2634 }
2635 return stmt;
2636 }
2637
2638 /* Return the last statement of an otherwise empty block. Return NULL
2639 if the block is totally empty, or if it contains more than one
2640 statement. */
2641
2642 gimple *
2643 last_and_only_stmt (basic_block bb)
2644 {
2645 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2646 gimple *last, *prev;
2647
2648 if (gsi_end_p (i))
2649 return NULL;
2650
2651 last = gsi_stmt (i);
2652 gsi_prev_nondebug (&i);
2653 if (gsi_end_p (i))
2654 return last;
2655
2656 /* Empty statements should no longer appear in the instruction stream.
2657 Everything that might have appeared before should be deleted by
2658 remove_useless_stmts, and the optimizers should just gsi_remove
2659 instead of smashing with build_empty_stmt.
2660
2661 Thus the only thing that should appear here in a block containing
2662 one executable statement is a label. */
2663 prev = gsi_stmt (i);
2664 if (gimple_code (prev) == GIMPLE_LABEL)
2665 return last;
2666 else
2667 return NULL;
2668 }
2669
2670 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2671
2672 static void
2673 reinstall_phi_args (edge new_edge, edge old_edge)
2674 {
2675 edge_var_map *vm;
2676 int i;
2677 gphi_iterator phis;
2678
2679 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2680 if (!v)
2681 return;
2682
2683 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2684 v->iterate (i, &vm) && !gsi_end_p (phis);
2685 i++, gsi_next (&phis))
2686 {
2687 gphi *phi = phis.phi ();
2688 tree result = redirect_edge_var_map_result (vm);
2689 tree arg = redirect_edge_var_map_def (vm);
2690
2691 gcc_assert (result == gimple_phi_result (phi));
2692
2693 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2694 }
2695
2696 redirect_edge_var_map_clear (old_edge);
2697 }
2698
2699 /* Returns the basic block after which the new basic block created
2700 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2701 near its "logical" location. This is of most help to humans looking
2702 at debugging dumps. */
2703
2704 basic_block
2705 split_edge_bb_loc (edge edge_in)
2706 {
2707 basic_block dest = edge_in->dest;
2708 basic_block dest_prev = dest->prev_bb;
2709
2710 if (dest_prev)
2711 {
2712 edge e = find_edge (dest_prev, dest);
2713 if (e && !(e->flags & EDGE_COMPLEX))
2714 return edge_in->src;
2715 }
2716 return dest_prev;
2717 }
2718
2719 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2720 Abort on abnormal edges. */
2721
2722 static basic_block
2723 gimple_split_edge (edge edge_in)
2724 {
2725 basic_block new_bb, after_bb, dest;
2726 edge new_edge, e;
2727
2728 /* Abnormal edges cannot be split. */
2729 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2730
2731 dest = edge_in->dest;
2732
2733 after_bb = split_edge_bb_loc (edge_in);
2734
2735 new_bb = create_empty_bb (after_bb);
2736 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2737 new_bb->count = edge_in->count;
2738 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2739 new_edge->probability = REG_BR_PROB_BASE;
2740 new_edge->count = edge_in->count;
2741
2742 e = redirect_edge_and_branch (edge_in, new_bb);
2743 gcc_assert (e == edge_in);
2744 reinstall_phi_args (new_edge, e);
2745
2746 return new_bb;
2747 }
2748
2749
2750 /* Verify properties of the address expression T with base object BASE. */
2751
2752 static tree
2753 verify_address (tree t, tree base)
2754 {
2755 bool old_constant;
2756 bool old_side_effects;
2757 bool new_constant;
2758 bool new_side_effects;
2759
2760 old_constant = TREE_CONSTANT (t);
2761 old_side_effects = TREE_SIDE_EFFECTS (t);
2762
2763 recompute_tree_invariant_for_addr_expr (t);
2764 new_side_effects = TREE_SIDE_EFFECTS (t);
2765 new_constant = TREE_CONSTANT (t);
2766
2767 if (old_constant != new_constant)
2768 {
2769 error ("constant not recomputed when ADDR_EXPR changed");
2770 return t;
2771 }
2772 if (old_side_effects != new_side_effects)
2773 {
2774 error ("side effects not recomputed when ADDR_EXPR changed");
2775 return t;
2776 }
2777
2778 if (!(TREE_CODE (base) == VAR_DECL
2779 || TREE_CODE (base) == PARM_DECL
2780 || TREE_CODE (base) == RESULT_DECL))
2781 return NULL_TREE;
2782
2783 if (DECL_GIMPLE_REG_P (base))
2784 {
2785 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2786 return base;
2787 }
2788
2789 return NULL_TREE;
2790 }
2791
2792 /* Callback for walk_tree, check that all elements with address taken are
2793 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2794 inside a PHI node. */
2795
2796 static tree
2797 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2798 {
2799 tree t = *tp, x;
2800
2801 if (TYPE_P (t))
2802 *walk_subtrees = 0;
2803
2804 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2805 #define CHECK_OP(N, MSG) \
2806 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2807 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2808
2809 switch (TREE_CODE (t))
2810 {
2811 case SSA_NAME:
2812 if (SSA_NAME_IN_FREE_LIST (t))
2813 {
2814 error ("SSA name in freelist but still referenced");
2815 return *tp;
2816 }
2817 break;
2818
2819 case INDIRECT_REF:
2820 error ("INDIRECT_REF in gimple IL");
2821 return t;
2822
2823 case MEM_REF:
2824 x = TREE_OPERAND (t, 0);
2825 if (!POINTER_TYPE_P (TREE_TYPE (x))
2826 || !is_gimple_mem_ref_addr (x))
2827 {
2828 error ("invalid first operand of MEM_REF");
2829 return x;
2830 }
2831 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2832 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2833 {
2834 error ("invalid offset operand of MEM_REF");
2835 return TREE_OPERAND (t, 1);
2836 }
2837 if (TREE_CODE (x) == ADDR_EXPR
2838 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2839 return x;
2840 *walk_subtrees = 0;
2841 break;
2842
2843 case ASSERT_EXPR:
2844 x = fold (ASSERT_EXPR_COND (t));
2845 if (x == boolean_false_node)
2846 {
2847 error ("ASSERT_EXPR with an always-false condition");
2848 return *tp;
2849 }
2850 break;
2851
2852 case MODIFY_EXPR:
2853 error ("MODIFY_EXPR not expected while having tuples");
2854 return *tp;
2855
2856 case ADDR_EXPR:
2857 {
2858 tree tem;
2859
2860 gcc_assert (is_gimple_address (t));
2861
2862 /* Skip any references (they will be checked when we recurse down the
2863 tree) and ensure that any variable used as a prefix is marked
2864 addressable. */
2865 for (x = TREE_OPERAND (t, 0);
2866 handled_component_p (x);
2867 x = TREE_OPERAND (x, 0))
2868 ;
2869
2870 if ((tem = verify_address (t, x)))
2871 return tem;
2872
2873 if (!(TREE_CODE (x) == VAR_DECL
2874 || TREE_CODE (x) == PARM_DECL
2875 || TREE_CODE (x) == RESULT_DECL))
2876 return NULL;
2877
2878 if (!TREE_ADDRESSABLE (x))
2879 {
2880 error ("address taken, but ADDRESSABLE bit not set");
2881 return x;
2882 }
2883
2884 break;
2885 }
2886
2887 case COND_EXPR:
2888 x = COND_EXPR_COND (t);
2889 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2890 {
2891 error ("non-integral used in condition");
2892 return x;
2893 }
2894 if (!is_gimple_condexpr (x))
2895 {
2896 error ("invalid conditional operand");
2897 return x;
2898 }
2899 break;
2900
2901 case NON_LVALUE_EXPR:
2902 case TRUTH_NOT_EXPR:
2903 gcc_unreachable ();
2904
2905 CASE_CONVERT:
2906 case FIX_TRUNC_EXPR:
2907 case FLOAT_EXPR:
2908 case NEGATE_EXPR:
2909 case ABS_EXPR:
2910 case BIT_NOT_EXPR:
2911 CHECK_OP (0, "invalid operand to unary operator");
2912 break;
2913
2914 case REALPART_EXPR:
2915 case IMAGPART_EXPR:
2916 case BIT_FIELD_REF:
2917 if (!is_gimple_reg_type (TREE_TYPE (t)))
2918 {
2919 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2920 return t;
2921 }
2922
2923 if (TREE_CODE (t) == BIT_FIELD_REF)
2924 {
2925 tree t0 = TREE_OPERAND (t, 0);
2926 tree t1 = TREE_OPERAND (t, 1);
2927 tree t2 = TREE_OPERAND (t, 2);
2928 if (!tree_fits_uhwi_p (t1)
2929 || !tree_fits_uhwi_p (t2))
2930 {
2931 error ("invalid position or size operand to BIT_FIELD_REF");
2932 return t;
2933 }
2934 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2935 && (TYPE_PRECISION (TREE_TYPE (t))
2936 != tree_to_uhwi (t1)))
2937 {
2938 error ("integral result type precision does not match "
2939 "field size of BIT_FIELD_REF");
2940 return t;
2941 }
2942 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2943 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2944 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2945 != tree_to_uhwi (t1)))
2946 {
2947 error ("mode precision of non-integral result does not "
2948 "match field size of BIT_FIELD_REF");
2949 return t;
2950 }
2951 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2952 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2953 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2954 {
2955 error ("position plus size exceeds size of referenced object in "
2956 "BIT_FIELD_REF");
2957 return t;
2958 }
2959 }
2960 t = TREE_OPERAND (t, 0);
2961
2962 /* Fall-through. */
2963 case COMPONENT_REF:
2964 case ARRAY_REF:
2965 case ARRAY_RANGE_REF:
2966 case VIEW_CONVERT_EXPR:
2967 /* We have a nest of references. Verify that each of the operands
2968 that determine where to reference is either a constant or a variable,
2969 verify that the base is valid, and then show we've already checked
2970 the subtrees. */
2971 while (handled_component_p (t))
2972 {
2973 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2974 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2975 else if (TREE_CODE (t) == ARRAY_REF
2976 || TREE_CODE (t) == ARRAY_RANGE_REF)
2977 {
2978 CHECK_OP (1, "invalid array index");
2979 if (TREE_OPERAND (t, 2))
2980 CHECK_OP (2, "invalid array lower bound");
2981 if (TREE_OPERAND (t, 3))
2982 CHECK_OP (3, "invalid array stride");
2983 }
2984 else if (TREE_CODE (t) == BIT_FIELD_REF
2985 || TREE_CODE (t) == REALPART_EXPR
2986 || TREE_CODE (t) == IMAGPART_EXPR)
2987 {
2988 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2989 "REALPART_EXPR");
2990 return t;
2991 }
2992
2993 t = TREE_OPERAND (t, 0);
2994 }
2995
2996 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2997 {
2998 error ("invalid reference prefix");
2999 return t;
3000 }
3001 *walk_subtrees = 0;
3002 break;
3003 case PLUS_EXPR:
3004 case MINUS_EXPR:
3005 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3006 POINTER_PLUS_EXPR. */
3007 if (POINTER_TYPE_P (TREE_TYPE (t)))
3008 {
3009 error ("invalid operand to plus/minus, type is a pointer");
3010 return t;
3011 }
3012 CHECK_OP (0, "invalid operand to binary operator");
3013 CHECK_OP (1, "invalid operand to binary operator");
3014 break;
3015
3016 case POINTER_PLUS_EXPR:
3017 /* Check to make sure the first operand is a pointer or reference type. */
3018 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3019 {
3020 error ("invalid operand to pointer plus, first operand is not a pointer");
3021 return t;
3022 }
3023 /* Check to make sure the second operand is a ptrofftype. */
3024 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3025 {
3026 error ("invalid operand to pointer plus, second operand is not an "
3027 "integer type of appropriate width");
3028 return t;
3029 }
3030 /* FALLTHROUGH */
3031 case LT_EXPR:
3032 case LE_EXPR:
3033 case GT_EXPR:
3034 case GE_EXPR:
3035 case EQ_EXPR:
3036 case NE_EXPR:
3037 case UNORDERED_EXPR:
3038 case ORDERED_EXPR:
3039 case UNLT_EXPR:
3040 case UNLE_EXPR:
3041 case UNGT_EXPR:
3042 case UNGE_EXPR:
3043 case UNEQ_EXPR:
3044 case LTGT_EXPR:
3045 case MULT_EXPR:
3046 case TRUNC_DIV_EXPR:
3047 case CEIL_DIV_EXPR:
3048 case FLOOR_DIV_EXPR:
3049 case ROUND_DIV_EXPR:
3050 case TRUNC_MOD_EXPR:
3051 case CEIL_MOD_EXPR:
3052 case FLOOR_MOD_EXPR:
3053 case ROUND_MOD_EXPR:
3054 case RDIV_EXPR:
3055 case EXACT_DIV_EXPR:
3056 case MIN_EXPR:
3057 case MAX_EXPR:
3058 case LSHIFT_EXPR:
3059 case RSHIFT_EXPR:
3060 case LROTATE_EXPR:
3061 case RROTATE_EXPR:
3062 case BIT_IOR_EXPR:
3063 case BIT_XOR_EXPR:
3064 case BIT_AND_EXPR:
3065 CHECK_OP (0, "invalid operand to binary operator");
3066 CHECK_OP (1, "invalid operand to binary operator");
3067 break;
3068
3069 case CONSTRUCTOR:
3070 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3071 *walk_subtrees = 0;
3072 break;
3073
3074 case CASE_LABEL_EXPR:
3075 if (CASE_CHAIN (t))
3076 {
3077 error ("invalid CASE_CHAIN");
3078 return t;
3079 }
3080 break;
3081
3082 default:
3083 break;
3084 }
3085 return NULL;
3086
3087 #undef CHECK_OP
3088 }
3089
3090
3091 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3092 Returns true if there is an error, otherwise false. */
3093
3094 static bool
3095 verify_types_in_gimple_min_lval (tree expr)
3096 {
3097 tree op;
3098
3099 if (is_gimple_id (expr))
3100 return false;
3101
3102 if (TREE_CODE (expr) != TARGET_MEM_REF
3103 && TREE_CODE (expr) != MEM_REF)
3104 {
3105 error ("invalid expression for min lvalue");
3106 return true;
3107 }
3108
3109 /* TARGET_MEM_REFs are strange beasts. */
3110 if (TREE_CODE (expr) == TARGET_MEM_REF)
3111 return false;
3112
3113 op = TREE_OPERAND (expr, 0);
3114 if (!is_gimple_val (op))
3115 {
3116 error ("invalid operand in indirect reference");
3117 debug_generic_stmt (op);
3118 return true;
3119 }
3120 /* Memory references now generally can involve a value conversion. */
3121
3122 return false;
3123 }
3124
3125 /* Verify if EXPR is a valid GIMPLE reference expression. If
3126 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3127 if there is an error, otherwise false. */
3128
3129 static bool
3130 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3131 {
3132 while (handled_component_p (expr))
3133 {
3134 tree op = TREE_OPERAND (expr, 0);
3135
3136 if (TREE_CODE (expr) == ARRAY_REF
3137 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3138 {
3139 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3140 || (TREE_OPERAND (expr, 2)
3141 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3142 || (TREE_OPERAND (expr, 3)
3143 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3144 {
3145 error ("invalid operands to array reference");
3146 debug_generic_stmt (expr);
3147 return true;
3148 }
3149 }
3150
3151 /* Verify if the reference array element types are compatible. */
3152 if (TREE_CODE (expr) == ARRAY_REF
3153 && !useless_type_conversion_p (TREE_TYPE (expr),
3154 TREE_TYPE (TREE_TYPE (op))))
3155 {
3156 error ("type mismatch in array reference");
3157 debug_generic_stmt (TREE_TYPE (expr));
3158 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3159 return true;
3160 }
3161 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3162 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3163 TREE_TYPE (TREE_TYPE (op))))
3164 {
3165 error ("type mismatch in array range reference");
3166 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3167 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3168 return true;
3169 }
3170
3171 if ((TREE_CODE (expr) == REALPART_EXPR
3172 || TREE_CODE (expr) == IMAGPART_EXPR)
3173 && !useless_type_conversion_p (TREE_TYPE (expr),
3174 TREE_TYPE (TREE_TYPE (op))))
3175 {
3176 error ("type mismatch in real/imagpart reference");
3177 debug_generic_stmt (TREE_TYPE (expr));
3178 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3179 return true;
3180 }
3181
3182 if (TREE_CODE (expr) == COMPONENT_REF
3183 && !useless_type_conversion_p (TREE_TYPE (expr),
3184 TREE_TYPE (TREE_OPERAND (expr, 1))))
3185 {
3186 error ("type mismatch in component reference");
3187 debug_generic_stmt (TREE_TYPE (expr));
3188 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3189 return true;
3190 }
3191
3192 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3193 {
3194 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3195 that their operand is not an SSA name or an invariant when
3196 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3197 bug). Otherwise there is nothing to verify, gross mismatches at
3198 most invoke undefined behavior. */
3199 if (require_lvalue
3200 && (TREE_CODE (op) == SSA_NAME
3201 || is_gimple_min_invariant (op)))
3202 {
3203 error ("conversion of an SSA_NAME on the left hand side");
3204 debug_generic_stmt (expr);
3205 return true;
3206 }
3207 else if (TREE_CODE (op) == SSA_NAME
3208 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3209 {
3210 error ("conversion of register to a different size");
3211 debug_generic_stmt (expr);
3212 return true;
3213 }
3214 else if (!handled_component_p (op))
3215 return false;
3216 }
3217
3218 expr = op;
3219 }
3220
3221 if (TREE_CODE (expr) == MEM_REF)
3222 {
3223 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3224 {
3225 error ("invalid address operand in MEM_REF");
3226 debug_generic_stmt (expr);
3227 return true;
3228 }
3229 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3230 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3231 {
3232 error ("invalid offset operand in MEM_REF");
3233 debug_generic_stmt (expr);
3234 return true;
3235 }
3236 }
3237 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3238 {
3239 if (!TMR_BASE (expr)
3240 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3241 {
3242 error ("invalid address operand in TARGET_MEM_REF");
3243 return true;
3244 }
3245 if (!TMR_OFFSET (expr)
3246 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3247 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3248 {
3249 error ("invalid offset operand in TARGET_MEM_REF");
3250 debug_generic_stmt (expr);
3251 return true;
3252 }
3253 }
3254
3255 return ((require_lvalue || !is_gimple_min_invariant (expr))
3256 && verify_types_in_gimple_min_lval (expr));
3257 }
3258
3259 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3260 list of pointer-to types that is trivially convertible to DEST. */
3261
3262 static bool
3263 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3264 {
3265 tree src;
3266
3267 if (!TYPE_POINTER_TO (src_obj))
3268 return true;
3269
3270 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3271 if (useless_type_conversion_p (dest, src))
3272 return true;
3273
3274 return false;
3275 }
3276
3277 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3278 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3279
3280 static bool
3281 valid_fixed_convert_types_p (tree type1, tree type2)
3282 {
3283 return (FIXED_POINT_TYPE_P (type1)
3284 && (INTEGRAL_TYPE_P (type2)
3285 || SCALAR_FLOAT_TYPE_P (type2)
3286 || FIXED_POINT_TYPE_P (type2)));
3287 }
3288
3289 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3290 is a problem, otherwise false. */
3291
3292 static bool
3293 verify_gimple_call (gcall *stmt)
3294 {
3295 tree fn = gimple_call_fn (stmt);
3296 tree fntype, fndecl;
3297 unsigned i;
3298
3299 if (gimple_call_internal_p (stmt))
3300 {
3301 if (fn)
3302 {
3303 error ("gimple call has two targets");
3304 debug_generic_stmt (fn);
3305 return true;
3306 }
3307 }
3308 else
3309 {
3310 if (!fn)
3311 {
3312 error ("gimple call has no target");
3313 return true;
3314 }
3315 }
3316
3317 if (fn && !is_gimple_call_addr (fn))
3318 {
3319 error ("invalid function in gimple call");
3320 debug_generic_stmt (fn);
3321 return true;
3322 }
3323
3324 if (fn
3325 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3326 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3327 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3328 {
3329 error ("non-function in gimple call");
3330 return true;
3331 }
3332
3333 fndecl = gimple_call_fndecl (stmt);
3334 if (fndecl
3335 && TREE_CODE (fndecl) == FUNCTION_DECL
3336 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3337 && !DECL_PURE_P (fndecl)
3338 && !TREE_READONLY (fndecl))
3339 {
3340 error ("invalid pure const state for function");
3341 return true;
3342 }
3343
3344 tree lhs = gimple_call_lhs (stmt);
3345 if (lhs
3346 && (!is_gimple_lvalue (lhs)
3347 || verify_types_in_gimple_reference (lhs, true)))
3348 {
3349 error ("invalid LHS in gimple call");
3350 return true;
3351 }
3352
3353 if (lhs
3354 && gimple_call_ctrl_altering_p (stmt)
3355 && gimple_call_noreturn_p (stmt)
3356 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3357 {
3358 error ("LHS in noreturn call");
3359 return true;
3360 }
3361
3362 fntype = gimple_call_fntype (stmt);
3363 if (fntype
3364 && lhs
3365 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3366 /* ??? At least C++ misses conversions at assignments from
3367 void * call results.
3368 ??? Java is completely off. Especially with functions
3369 returning java.lang.Object.
3370 For now simply allow arbitrary pointer type conversions. */
3371 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3372 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3373 {
3374 error ("invalid conversion in gimple call");
3375 debug_generic_stmt (TREE_TYPE (lhs));
3376 debug_generic_stmt (TREE_TYPE (fntype));
3377 return true;
3378 }
3379
3380 if (gimple_call_chain (stmt)
3381 && !is_gimple_val (gimple_call_chain (stmt)))
3382 {
3383 error ("invalid static chain in gimple call");
3384 debug_generic_stmt (gimple_call_chain (stmt));
3385 return true;
3386 }
3387
3388 /* If there is a static chain argument, the call should either be
3389 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3390 if (gimple_call_chain (stmt)
3391 && fndecl
3392 && !DECL_STATIC_CHAIN (fndecl))
3393 {
3394 error ("static chain with function that doesn%'t use one");
3395 return true;
3396 }
3397
3398 /* ??? The C frontend passes unpromoted arguments in case it
3399 didn't see a function declaration before the call. So for now
3400 leave the call arguments mostly unverified. Once we gimplify
3401 unit-at-a-time we have a chance to fix this. */
3402
3403 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3404 {
3405 tree arg = gimple_call_arg (stmt, i);
3406 if ((is_gimple_reg_type (TREE_TYPE (arg))
3407 && !is_gimple_val (arg))
3408 || (!is_gimple_reg_type (TREE_TYPE (arg))
3409 && !is_gimple_lvalue (arg)))
3410 {
3411 error ("invalid argument to gimple call");
3412 debug_generic_expr (arg);
3413 return true;
3414 }
3415 }
3416
3417 return false;
3418 }
3419
3420 /* Verifies the gimple comparison with the result type TYPE and
3421 the operands OP0 and OP1. */
3422
3423 static bool
3424 verify_gimple_comparison (tree type, tree op0, tree op1)
3425 {
3426 tree op0_type = TREE_TYPE (op0);
3427 tree op1_type = TREE_TYPE (op1);
3428
3429 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3430 {
3431 error ("invalid operands in gimple comparison");
3432 return true;
3433 }
3434
3435 /* For comparisons we do not have the operations type as the
3436 effective type the comparison is carried out in. Instead
3437 we require that either the first operand is trivially
3438 convertible into the second, or the other way around.
3439 Because we special-case pointers to void we allow
3440 comparisons of pointers with the same mode as well. */
3441 if (!useless_type_conversion_p (op0_type, op1_type)
3442 && !useless_type_conversion_p (op1_type, op0_type)
3443 && (!POINTER_TYPE_P (op0_type)
3444 || !POINTER_TYPE_P (op1_type)
3445 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3446 {
3447 error ("mismatching comparison operand types");
3448 debug_generic_expr (op0_type);
3449 debug_generic_expr (op1_type);
3450 return true;
3451 }
3452
3453 /* The resulting type of a comparison may be an effective boolean type. */
3454 if (INTEGRAL_TYPE_P (type)
3455 && (TREE_CODE (type) == BOOLEAN_TYPE
3456 || TYPE_PRECISION (type) == 1))
3457 {
3458 if (TREE_CODE (op0_type) == VECTOR_TYPE
3459 || TREE_CODE (op1_type) == VECTOR_TYPE)
3460 {
3461 error ("vector comparison returning a boolean");
3462 debug_generic_expr (op0_type);
3463 debug_generic_expr (op1_type);
3464 return true;
3465 }
3466 }
3467 /* Or an integer vector type with the same size and element count
3468 as the comparison operand types. */
3469 else if (TREE_CODE (type) == VECTOR_TYPE
3470 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3471 {
3472 if (TREE_CODE (op0_type) != VECTOR_TYPE
3473 || TREE_CODE (op1_type) != VECTOR_TYPE)
3474 {
3475 error ("non-vector operands in vector comparison");
3476 debug_generic_expr (op0_type);
3477 debug_generic_expr (op1_type);
3478 return true;
3479 }
3480
3481 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3482 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3483 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3484 /* The result of a vector comparison is of signed
3485 integral type. */
3486 || TYPE_UNSIGNED (TREE_TYPE (type)))
3487 {
3488 error ("invalid vector comparison resulting type");
3489 debug_generic_expr (type);
3490 return true;
3491 }
3492 }
3493 else
3494 {
3495 error ("bogus comparison result type");
3496 debug_generic_expr (type);
3497 return true;
3498 }
3499
3500 return false;
3501 }
3502
3503 /* Verify a gimple assignment statement STMT with an unary rhs.
3504 Returns true if anything is wrong. */
3505
3506 static bool
3507 verify_gimple_assign_unary (gassign *stmt)
3508 {
3509 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3510 tree lhs = gimple_assign_lhs (stmt);
3511 tree lhs_type = TREE_TYPE (lhs);
3512 tree rhs1 = gimple_assign_rhs1 (stmt);
3513 tree rhs1_type = TREE_TYPE (rhs1);
3514
3515 if (!is_gimple_reg (lhs))
3516 {
3517 error ("non-register as LHS of unary operation");
3518 return true;
3519 }
3520
3521 if (!is_gimple_val (rhs1))
3522 {
3523 error ("invalid operand in unary operation");
3524 return true;
3525 }
3526
3527 /* First handle conversions. */
3528 switch (rhs_code)
3529 {
3530 CASE_CONVERT:
3531 {
3532 /* Allow conversions from pointer type to integral type only if
3533 there is no sign or zero extension involved.
3534 For targets were the precision of ptrofftype doesn't match that
3535 of pointers we need to allow arbitrary conversions to ptrofftype. */
3536 if ((POINTER_TYPE_P (lhs_type)
3537 && INTEGRAL_TYPE_P (rhs1_type))
3538 || (POINTER_TYPE_P (rhs1_type)
3539 && INTEGRAL_TYPE_P (lhs_type)
3540 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3541 || ptrofftype_p (sizetype))))
3542 return false;
3543
3544 /* Allow conversion from integral to offset type and vice versa. */
3545 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3546 && INTEGRAL_TYPE_P (rhs1_type))
3547 || (INTEGRAL_TYPE_P (lhs_type)
3548 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3549 return false;
3550
3551 /* Otherwise assert we are converting between types of the
3552 same kind. */
3553 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3554 {
3555 error ("invalid types in nop conversion");
3556 debug_generic_expr (lhs_type);
3557 debug_generic_expr (rhs1_type);
3558 return true;
3559 }
3560
3561 return false;
3562 }
3563
3564 case ADDR_SPACE_CONVERT_EXPR:
3565 {
3566 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3567 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3568 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3569 {
3570 error ("invalid types in address space conversion");
3571 debug_generic_expr (lhs_type);
3572 debug_generic_expr (rhs1_type);
3573 return true;
3574 }
3575
3576 return false;
3577 }
3578
3579 case FIXED_CONVERT_EXPR:
3580 {
3581 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3582 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3583 {
3584 error ("invalid types in fixed-point conversion");
3585 debug_generic_expr (lhs_type);
3586 debug_generic_expr (rhs1_type);
3587 return true;
3588 }
3589
3590 return false;
3591 }
3592
3593 case FLOAT_EXPR:
3594 {
3595 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3596 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3597 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3598 {
3599 error ("invalid types in conversion to floating point");
3600 debug_generic_expr (lhs_type);
3601 debug_generic_expr (rhs1_type);
3602 return true;
3603 }
3604
3605 return false;
3606 }
3607
3608 case FIX_TRUNC_EXPR:
3609 {
3610 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3611 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3612 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3613 {
3614 error ("invalid types in conversion to integer");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 return true;
3618 }
3619
3620 return false;
3621 }
3622 case REDUC_MAX_EXPR:
3623 case REDUC_MIN_EXPR:
3624 case REDUC_PLUS_EXPR:
3625 if (!VECTOR_TYPE_P (rhs1_type)
3626 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3627 {
3628 error ("reduction should convert from vector to element type");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3632 }
3633 return false;
3634
3635 case VEC_UNPACK_HI_EXPR:
3636 case VEC_UNPACK_LO_EXPR:
3637 case VEC_UNPACK_FLOAT_HI_EXPR:
3638 case VEC_UNPACK_FLOAT_LO_EXPR:
3639 /* FIXME. */
3640 return false;
3641
3642 case NEGATE_EXPR:
3643 case ABS_EXPR:
3644 case BIT_NOT_EXPR:
3645 case PAREN_EXPR:
3646 case CONJ_EXPR:
3647 break;
3648
3649 default:
3650 gcc_unreachable ();
3651 }
3652
3653 /* For the remaining codes assert there is no conversion involved. */
3654 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3655 {
3656 error ("non-trivial conversion in unary operation");
3657 debug_generic_expr (lhs_type);
3658 debug_generic_expr (rhs1_type);
3659 return true;
3660 }
3661
3662 return false;
3663 }
3664
3665 /* Verify a gimple assignment statement STMT with a binary rhs.
3666 Returns true if anything is wrong. */
3667
3668 static bool
3669 verify_gimple_assign_binary (gassign *stmt)
3670 {
3671 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3672 tree lhs = gimple_assign_lhs (stmt);
3673 tree lhs_type = TREE_TYPE (lhs);
3674 tree rhs1 = gimple_assign_rhs1 (stmt);
3675 tree rhs1_type = TREE_TYPE (rhs1);
3676 tree rhs2 = gimple_assign_rhs2 (stmt);
3677 tree rhs2_type = TREE_TYPE (rhs2);
3678
3679 if (!is_gimple_reg (lhs))
3680 {
3681 error ("non-register as LHS of binary operation");
3682 return true;
3683 }
3684
3685 if (!is_gimple_val (rhs1)
3686 || !is_gimple_val (rhs2))
3687 {
3688 error ("invalid operands in binary operation");
3689 return true;
3690 }
3691
3692 /* First handle operations that involve different types. */
3693 switch (rhs_code)
3694 {
3695 case COMPLEX_EXPR:
3696 {
3697 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3698 || !(INTEGRAL_TYPE_P (rhs1_type)
3699 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3700 || !(INTEGRAL_TYPE_P (rhs2_type)
3701 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3702 {
3703 error ("type mismatch in complex expression");
3704 debug_generic_expr (lhs_type);
3705 debug_generic_expr (rhs1_type);
3706 debug_generic_expr (rhs2_type);
3707 return true;
3708 }
3709
3710 return false;
3711 }
3712
3713 case LSHIFT_EXPR:
3714 case RSHIFT_EXPR:
3715 case LROTATE_EXPR:
3716 case RROTATE_EXPR:
3717 {
3718 /* Shifts and rotates are ok on integral types, fixed point
3719 types and integer vector types. */
3720 if ((!INTEGRAL_TYPE_P (rhs1_type)
3721 && !FIXED_POINT_TYPE_P (rhs1_type)
3722 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3723 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3724 || (!INTEGRAL_TYPE_P (rhs2_type)
3725 /* Vector shifts of vectors are also ok. */
3726 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3727 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3728 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3729 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3730 || !useless_type_conversion_p (lhs_type, rhs1_type))
3731 {
3732 error ("type mismatch in shift expression");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 debug_generic_expr (rhs2_type);
3736 return true;
3737 }
3738
3739 return false;
3740 }
3741
3742 case WIDEN_LSHIFT_EXPR:
3743 {
3744 if (!INTEGRAL_TYPE_P (lhs_type)
3745 || !INTEGRAL_TYPE_P (rhs1_type)
3746 || TREE_CODE (rhs2) != INTEGER_CST
3747 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3748 {
3749 error ("type mismatch in widening vector shift expression");
3750 debug_generic_expr (lhs_type);
3751 debug_generic_expr (rhs1_type);
3752 debug_generic_expr (rhs2_type);
3753 return true;
3754 }
3755
3756 return false;
3757 }
3758
3759 case VEC_WIDEN_LSHIFT_HI_EXPR:
3760 case VEC_WIDEN_LSHIFT_LO_EXPR:
3761 {
3762 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3763 || TREE_CODE (lhs_type) != VECTOR_TYPE
3764 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3765 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3766 || TREE_CODE (rhs2) != INTEGER_CST
3767 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3768 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3769 {
3770 error ("type mismatch in widening vector shift expression");
3771 debug_generic_expr (lhs_type);
3772 debug_generic_expr (rhs1_type);
3773 debug_generic_expr (rhs2_type);
3774 return true;
3775 }
3776
3777 return false;
3778 }
3779
3780 case PLUS_EXPR:
3781 case MINUS_EXPR:
3782 {
3783 tree lhs_etype = lhs_type;
3784 tree rhs1_etype = rhs1_type;
3785 tree rhs2_etype = rhs2_type;
3786 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3787 {
3788 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3789 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3790 {
3791 error ("invalid non-vector operands to vector valued plus");
3792 return true;
3793 }
3794 lhs_etype = TREE_TYPE (lhs_type);
3795 rhs1_etype = TREE_TYPE (rhs1_type);
3796 rhs2_etype = TREE_TYPE (rhs2_type);
3797 }
3798 if (POINTER_TYPE_P (lhs_etype)
3799 || POINTER_TYPE_P (rhs1_etype)
3800 || POINTER_TYPE_P (rhs2_etype))
3801 {
3802 error ("invalid (pointer) operands to plus/minus");
3803 return true;
3804 }
3805
3806 /* Continue with generic binary expression handling. */
3807 break;
3808 }
3809
3810 case POINTER_PLUS_EXPR:
3811 {
3812 if (!POINTER_TYPE_P (rhs1_type)
3813 || !useless_type_conversion_p (lhs_type, rhs1_type)
3814 || !ptrofftype_p (rhs2_type))
3815 {
3816 error ("type mismatch in pointer plus expression");
3817 debug_generic_stmt (lhs_type);
3818 debug_generic_stmt (rhs1_type);
3819 debug_generic_stmt (rhs2_type);
3820 return true;
3821 }
3822
3823 return false;
3824 }
3825
3826 case TRUTH_ANDIF_EXPR:
3827 case TRUTH_ORIF_EXPR:
3828 case TRUTH_AND_EXPR:
3829 case TRUTH_OR_EXPR:
3830 case TRUTH_XOR_EXPR:
3831
3832 gcc_unreachable ();
3833
3834 case LT_EXPR:
3835 case LE_EXPR:
3836 case GT_EXPR:
3837 case GE_EXPR:
3838 case EQ_EXPR:
3839 case NE_EXPR:
3840 case UNORDERED_EXPR:
3841 case ORDERED_EXPR:
3842 case UNLT_EXPR:
3843 case UNLE_EXPR:
3844 case UNGT_EXPR:
3845 case UNGE_EXPR:
3846 case UNEQ_EXPR:
3847 case LTGT_EXPR:
3848 /* Comparisons are also binary, but the result type is not
3849 connected to the operand types. */
3850 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3851
3852 case WIDEN_MULT_EXPR:
3853 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3854 return true;
3855 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3856 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3857
3858 case WIDEN_SUM_EXPR:
3859 case VEC_WIDEN_MULT_HI_EXPR:
3860 case VEC_WIDEN_MULT_LO_EXPR:
3861 case VEC_WIDEN_MULT_EVEN_EXPR:
3862 case VEC_WIDEN_MULT_ODD_EXPR:
3863 case VEC_PACK_TRUNC_EXPR:
3864 case VEC_PACK_SAT_EXPR:
3865 case VEC_PACK_FIX_TRUNC_EXPR:
3866 /* FIXME. */
3867 return false;
3868
3869 case MULT_EXPR:
3870 case MULT_HIGHPART_EXPR:
3871 case TRUNC_DIV_EXPR:
3872 case CEIL_DIV_EXPR:
3873 case FLOOR_DIV_EXPR:
3874 case ROUND_DIV_EXPR:
3875 case TRUNC_MOD_EXPR:
3876 case CEIL_MOD_EXPR:
3877 case FLOOR_MOD_EXPR:
3878 case ROUND_MOD_EXPR:
3879 case RDIV_EXPR:
3880 case EXACT_DIV_EXPR:
3881 case MIN_EXPR:
3882 case MAX_EXPR:
3883 case BIT_IOR_EXPR:
3884 case BIT_XOR_EXPR:
3885 case BIT_AND_EXPR:
3886 /* Continue with generic binary expression handling. */
3887 break;
3888
3889 default:
3890 gcc_unreachable ();
3891 }
3892
3893 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3894 || !useless_type_conversion_p (lhs_type, rhs2_type))
3895 {
3896 error ("type mismatch in binary expression");
3897 debug_generic_stmt (lhs_type);
3898 debug_generic_stmt (rhs1_type);
3899 debug_generic_stmt (rhs2_type);
3900 return true;
3901 }
3902
3903 return false;
3904 }
3905
3906 /* Verify a gimple assignment statement STMT with a ternary rhs.
3907 Returns true if anything is wrong. */
3908
3909 static bool
3910 verify_gimple_assign_ternary (gassign *stmt)
3911 {
3912 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3913 tree lhs = gimple_assign_lhs (stmt);
3914 tree lhs_type = TREE_TYPE (lhs);
3915 tree rhs1 = gimple_assign_rhs1 (stmt);
3916 tree rhs1_type = TREE_TYPE (rhs1);
3917 tree rhs2 = gimple_assign_rhs2 (stmt);
3918 tree rhs2_type = TREE_TYPE (rhs2);
3919 tree rhs3 = gimple_assign_rhs3 (stmt);
3920 tree rhs3_type = TREE_TYPE (rhs3);
3921
3922 if (!is_gimple_reg (lhs))
3923 {
3924 error ("non-register as LHS of ternary operation");
3925 return true;
3926 }
3927
3928 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3929 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3930 || !is_gimple_val (rhs2)
3931 || !is_gimple_val (rhs3))
3932 {
3933 error ("invalid operands in ternary operation");
3934 return true;
3935 }
3936
3937 /* First handle operations that involve different types. */
3938 switch (rhs_code)
3939 {
3940 case WIDEN_MULT_PLUS_EXPR:
3941 case WIDEN_MULT_MINUS_EXPR:
3942 if ((!INTEGRAL_TYPE_P (rhs1_type)
3943 && !FIXED_POINT_TYPE_P (rhs1_type))
3944 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3945 || !useless_type_conversion_p (lhs_type, rhs3_type)
3946 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3947 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3948 {
3949 error ("type mismatch in widening multiply-accumulate expression");
3950 debug_generic_expr (lhs_type);
3951 debug_generic_expr (rhs1_type);
3952 debug_generic_expr (rhs2_type);
3953 debug_generic_expr (rhs3_type);
3954 return true;
3955 }
3956 break;
3957
3958 case FMA_EXPR:
3959 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3960 || !useless_type_conversion_p (lhs_type, rhs2_type)
3961 || !useless_type_conversion_p (lhs_type, rhs3_type))
3962 {
3963 error ("type mismatch in fused multiply-add expression");
3964 debug_generic_expr (lhs_type);
3965 debug_generic_expr (rhs1_type);
3966 debug_generic_expr (rhs2_type);
3967 debug_generic_expr (rhs3_type);
3968 return true;
3969 }
3970 break;
3971
3972 case VEC_COND_EXPR:
3973 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3974 || TYPE_SIGN (rhs1_type) != SIGNED
3975 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
3976 || TYPE_VECTOR_SUBPARTS (rhs1_type)
3977 != TYPE_VECTOR_SUBPARTS (lhs_type))
3978 {
3979 error ("the first argument of a VEC_COND_EXPR must be of a signed "
3980 "integral vector type of the same size and number of "
3981 "elements as the result");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 return true;
3985 }
3986 /* Fallthrough. */
3987 case COND_EXPR:
3988 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3989 || !useless_type_conversion_p (lhs_type, rhs3_type))
3990 {
3991 error ("type mismatch in conditional expression");
3992 debug_generic_expr (lhs_type);
3993 debug_generic_expr (rhs2_type);
3994 debug_generic_expr (rhs3_type);
3995 return true;
3996 }
3997 break;
3998
3999 case VEC_PERM_EXPR:
4000 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4001 || !useless_type_conversion_p (lhs_type, rhs2_type))
4002 {
4003 error ("type mismatch in vector permute expression");
4004 debug_generic_expr (lhs_type);
4005 debug_generic_expr (rhs1_type);
4006 debug_generic_expr (rhs2_type);
4007 debug_generic_expr (rhs3_type);
4008 return true;
4009 }
4010
4011 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4012 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4013 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4014 {
4015 error ("vector types expected in vector permute expression");
4016 debug_generic_expr (lhs_type);
4017 debug_generic_expr (rhs1_type);
4018 debug_generic_expr (rhs2_type);
4019 debug_generic_expr (rhs3_type);
4020 return true;
4021 }
4022
4023 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4024 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4025 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4026 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4027 != TYPE_VECTOR_SUBPARTS (lhs_type))
4028 {
4029 error ("vectors with different element number found "
4030 "in vector permute expression");
4031 debug_generic_expr (lhs_type);
4032 debug_generic_expr (rhs1_type);
4033 debug_generic_expr (rhs2_type);
4034 debug_generic_expr (rhs3_type);
4035 return true;
4036 }
4037
4038 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4039 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4040 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4041 {
4042 error ("invalid mask type in vector permute expression");
4043 debug_generic_expr (lhs_type);
4044 debug_generic_expr (rhs1_type);
4045 debug_generic_expr (rhs2_type);
4046 debug_generic_expr (rhs3_type);
4047 return true;
4048 }
4049
4050 return false;
4051
4052 case SAD_EXPR:
4053 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4054 || !useless_type_conversion_p (lhs_type, rhs3_type)
4055 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4056 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4057 {
4058 error ("type mismatch in sad expression");
4059 debug_generic_expr (lhs_type);
4060 debug_generic_expr (rhs1_type);
4061 debug_generic_expr (rhs2_type);
4062 debug_generic_expr (rhs3_type);
4063 return true;
4064 }
4065
4066 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4067 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4068 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4069 {
4070 error ("vector types expected in sad expression");
4071 debug_generic_expr (lhs_type);
4072 debug_generic_expr (rhs1_type);
4073 debug_generic_expr (rhs2_type);
4074 debug_generic_expr (rhs3_type);
4075 return true;
4076 }
4077
4078 return false;
4079
4080 case DOT_PROD_EXPR:
4081 case REALIGN_LOAD_EXPR:
4082 /* FIXME. */
4083 return false;
4084
4085 default:
4086 gcc_unreachable ();
4087 }
4088 return false;
4089 }
4090
4091 /* Verify a gimple assignment statement STMT with a single rhs.
4092 Returns true if anything is wrong. */
4093
4094 static bool
4095 verify_gimple_assign_single (gassign *stmt)
4096 {
4097 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4098 tree lhs = gimple_assign_lhs (stmt);
4099 tree lhs_type = TREE_TYPE (lhs);
4100 tree rhs1 = gimple_assign_rhs1 (stmt);
4101 tree rhs1_type = TREE_TYPE (rhs1);
4102 bool res = false;
4103
4104 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4105 {
4106 error ("non-trivial conversion at assignment");
4107 debug_generic_expr (lhs_type);
4108 debug_generic_expr (rhs1_type);
4109 return true;
4110 }
4111
4112 if (gimple_clobber_p (stmt)
4113 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4114 {
4115 error ("non-decl/MEM_REF LHS in clobber statement");
4116 debug_generic_expr (lhs);
4117 return true;
4118 }
4119
4120 if (handled_component_p (lhs)
4121 || TREE_CODE (lhs) == MEM_REF
4122 || TREE_CODE (lhs) == TARGET_MEM_REF)
4123 res |= verify_types_in_gimple_reference (lhs, true);
4124
4125 /* Special codes we cannot handle via their class. */
4126 switch (rhs_code)
4127 {
4128 case ADDR_EXPR:
4129 {
4130 tree op = TREE_OPERAND (rhs1, 0);
4131 if (!is_gimple_addressable (op))
4132 {
4133 error ("invalid operand in unary expression");
4134 return true;
4135 }
4136
4137 /* Technically there is no longer a need for matching types, but
4138 gimple hygiene asks for this check. In LTO we can end up
4139 combining incompatible units and thus end up with addresses
4140 of globals that change their type to a common one. */
4141 if (!in_lto_p
4142 && !types_compatible_p (TREE_TYPE (op),
4143 TREE_TYPE (TREE_TYPE (rhs1)))
4144 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4145 TREE_TYPE (op)))
4146 {
4147 error ("type mismatch in address expression");
4148 debug_generic_stmt (TREE_TYPE (rhs1));
4149 debug_generic_stmt (TREE_TYPE (op));
4150 return true;
4151 }
4152
4153 return verify_types_in_gimple_reference (op, true);
4154 }
4155
4156 /* tcc_reference */
4157 case INDIRECT_REF:
4158 error ("INDIRECT_REF in gimple IL");
4159 return true;
4160
4161 case COMPONENT_REF:
4162 case BIT_FIELD_REF:
4163 case ARRAY_REF:
4164 case ARRAY_RANGE_REF:
4165 case VIEW_CONVERT_EXPR:
4166 case REALPART_EXPR:
4167 case IMAGPART_EXPR:
4168 case TARGET_MEM_REF:
4169 case MEM_REF:
4170 if (!is_gimple_reg (lhs)
4171 && is_gimple_reg_type (TREE_TYPE (lhs)))
4172 {
4173 error ("invalid rhs for gimple memory store");
4174 debug_generic_stmt (lhs);
4175 debug_generic_stmt (rhs1);
4176 return true;
4177 }
4178 return res || verify_types_in_gimple_reference (rhs1, false);
4179
4180 /* tcc_constant */
4181 case SSA_NAME:
4182 case INTEGER_CST:
4183 case REAL_CST:
4184 case FIXED_CST:
4185 case COMPLEX_CST:
4186 case VECTOR_CST:
4187 case STRING_CST:
4188 return res;
4189
4190 /* tcc_declaration */
4191 case CONST_DECL:
4192 return res;
4193 case VAR_DECL:
4194 case PARM_DECL:
4195 if (!is_gimple_reg (lhs)
4196 && !is_gimple_reg (rhs1)
4197 && is_gimple_reg_type (TREE_TYPE (lhs)))
4198 {
4199 error ("invalid rhs for gimple memory store");
4200 debug_generic_stmt (lhs);
4201 debug_generic_stmt (rhs1);
4202 return true;
4203 }
4204 return res;
4205
4206 case CONSTRUCTOR:
4207 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4208 {
4209 unsigned int i;
4210 tree elt_i, elt_v, elt_t = NULL_TREE;
4211
4212 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4213 return res;
4214 /* For vector CONSTRUCTORs we require that either it is empty
4215 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4216 (then the element count must be correct to cover the whole
4217 outer vector and index must be NULL on all elements, or it is
4218 a CONSTRUCTOR of scalar elements, where we as an exception allow
4219 smaller number of elements (assuming zero filling) and
4220 consecutive indexes as compared to NULL indexes (such
4221 CONSTRUCTORs can appear in the IL from FEs). */
4222 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4223 {
4224 if (elt_t == NULL_TREE)
4225 {
4226 elt_t = TREE_TYPE (elt_v);
4227 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4228 {
4229 tree elt_t = TREE_TYPE (elt_v);
4230 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4231 TREE_TYPE (elt_t)))
4232 {
4233 error ("incorrect type of vector CONSTRUCTOR"
4234 " elements");
4235 debug_generic_stmt (rhs1);
4236 return true;
4237 }
4238 else if (CONSTRUCTOR_NELTS (rhs1)
4239 * TYPE_VECTOR_SUBPARTS (elt_t)
4240 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4241 {
4242 error ("incorrect number of vector CONSTRUCTOR"
4243 " elements");
4244 debug_generic_stmt (rhs1);
4245 return true;
4246 }
4247 }
4248 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4249 elt_t))
4250 {
4251 error ("incorrect type of vector CONSTRUCTOR elements");
4252 debug_generic_stmt (rhs1);
4253 return true;
4254 }
4255 else if (CONSTRUCTOR_NELTS (rhs1)
4256 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4257 {
4258 error ("incorrect number of vector CONSTRUCTOR elements");
4259 debug_generic_stmt (rhs1);
4260 return true;
4261 }
4262 }
4263 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4264 {
4265 error ("incorrect type of vector CONSTRUCTOR elements");
4266 debug_generic_stmt (rhs1);
4267 return true;
4268 }
4269 if (elt_i != NULL_TREE
4270 && (TREE_CODE (elt_t) == VECTOR_TYPE
4271 || TREE_CODE (elt_i) != INTEGER_CST
4272 || compare_tree_int (elt_i, i) != 0))
4273 {
4274 error ("vector CONSTRUCTOR with non-NULL element index");
4275 debug_generic_stmt (rhs1);
4276 return true;
4277 }
4278 if (!is_gimple_val (elt_v))
4279 {
4280 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4281 debug_generic_stmt (rhs1);
4282 return true;
4283 }
4284 }
4285 }
4286 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4287 {
4288 error ("non-vector CONSTRUCTOR with elements");
4289 debug_generic_stmt (rhs1);
4290 return true;
4291 }
4292 return res;
4293 case OBJ_TYPE_REF:
4294 case ASSERT_EXPR:
4295 case WITH_SIZE_EXPR:
4296 /* FIXME. */
4297 return res;
4298
4299 default:;
4300 }
4301
4302 return res;
4303 }
4304
4305 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4306 is a problem, otherwise false. */
4307
4308 static bool
4309 verify_gimple_assign (gassign *stmt)
4310 {
4311 switch (gimple_assign_rhs_class (stmt))
4312 {
4313 case GIMPLE_SINGLE_RHS:
4314 return verify_gimple_assign_single (stmt);
4315
4316 case GIMPLE_UNARY_RHS:
4317 return verify_gimple_assign_unary (stmt);
4318
4319 case GIMPLE_BINARY_RHS:
4320 return verify_gimple_assign_binary (stmt);
4321
4322 case GIMPLE_TERNARY_RHS:
4323 return verify_gimple_assign_ternary (stmt);
4324
4325 default:
4326 gcc_unreachable ();
4327 }
4328 }
4329
4330 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4331 is a problem, otherwise false. */
4332
4333 static bool
4334 verify_gimple_return (greturn *stmt)
4335 {
4336 tree op = gimple_return_retval (stmt);
4337 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4338
4339 /* We cannot test for present return values as we do not fix up missing
4340 return values from the original source. */
4341 if (op == NULL)
4342 return false;
4343
4344 if (!is_gimple_val (op)
4345 && TREE_CODE (op) != RESULT_DECL)
4346 {
4347 error ("invalid operand in return statement");
4348 debug_generic_stmt (op);
4349 return true;
4350 }
4351
4352 if ((TREE_CODE (op) == RESULT_DECL
4353 && DECL_BY_REFERENCE (op))
4354 || (TREE_CODE (op) == SSA_NAME
4355 && SSA_NAME_VAR (op)
4356 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4357 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4358 op = TREE_TYPE (op);
4359
4360 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4361 {
4362 error ("invalid conversion in return statement");
4363 debug_generic_stmt (restype);
4364 debug_generic_stmt (TREE_TYPE (op));
4365 return true;
4366 }
4367
4368 return false;
4369 }
4370
4371
4372 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4373 is a problem, otherwise false. */
4374
4375 static bool
4376 verify_gimple_goto (ggoto *stmt)
4377 {
4378 tree dest = gimple_goto_dest (stmt);
4379
4380 /* ??? We have two canonical forms of direct goto destinations, a
4381 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4382 if (TREE_CODE (dest) != LABEL_DECL
4383 && (!is_gimple_val (dest)
4384 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4385 {
4386 error ("goto destination is neither a label nor a pointer");
4387 return true;
4388 }
4389
4390 return false;
4391 }
4392
4393 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4394 is a problem, otherwise false. */
4395
4396 static bool
4397 verify_gimple_switch (gswitch *stmt)
4398 {
4399 unsigned int i, n;
4400 tree elt, prev_upper_bound = NULL_TREE;
4401 tree index_type, elt_type = NULL_TREE;
4402
4403 if (!is_gimple_val (gimple_switch_index (stmt)))
4404 {
4405 error ("invalid operand to switch statement");
4406 debug_generic_stmt (gimple_switch_index (stmt));
4407 return true;
4408 }
4409
4410 index_type = TREE_TYPE (gimple_switch_index (stmt));
4411 if (! INTEGRAL_TYPE_P (index_type))
4412 {
4413 error ("non-integral type switch statement");
4414 debug_generic_expr (index_type);
4415 return true;
4416 }
4417
4418 elt = gimple_switch_label (stmt, 0);
4419 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4420 {
4421 error ("invalid default case label in switch statement");
4422 debug_generic_expr (elt);
4423 return true;
4424 }
4425
4426 n = gimple_switch_num_labels (stmt);
4427 for (i = 1; i < n; i++)
4428 {
4429 elt = gimple_switch_label (stmt, i);
4430
4431 if (! CASE_LOW (elt))
4432 {
4433 error ("invalid case label in switch statement");
4434 debug_generic_expr (elt);
4435 return true;
4436 }
4437 if (CASE_HIGH (elt)
4438 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4439 {
4440 error ("invalid case range in switch statement");
4441 debug_generic_expr (elt);
4442 return true;
4443 }
4444
4445 if (elt_type)
4446 {
4447 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4448 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4449 {
4450 error ("type mismatch for case label in switch statement");
4451 debug_generic_expr (elt);
4452 return true;
4453 }
4454 }
4455 else
4456 {
4457 elt_type = TREE_TYPE (CASE_LOW (elt));
4458 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4459 {
4460 error ("type precision mismatch in switch statement");
4461 return true;
4462 }
4463 }
4464
4465 if (prev_upper_bound)
4466 {
4467 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4468 {
4469 error ("case labels not sorted in switch statement");
4470 return true;
4471 }
4472 }
4473
4474 prev_upper_bound = CASE_HIGH (elt);
4475 if (! prev_upper_bound)
4476 prev_upper_bound = CASE_LOW (elt);
4477 }
4478
4479 return false;
4480 }
4481
4482 /* Verify a gimple debug statement STMT.
4483 Returns true if anything is wrong. */
4484
4485 static bool
4486 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4487 {
4488 /* There isn't much that could be wrong in a gimple debug stmt. A
4489 gimple debug bind stmt, for example, maps a tree, that's usually
4490 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4491 component or member of an aggregate type, to another tree, that
4492 can be an arbitrary expression. These stmts expand into debug
4493 insns, and are converted to debug notes by var-tracking.c. */
4494 return false;
4495 }
4496
4497 /* Verify a gimple label statement STMT.
4498 Returns true if anything is wrong. */
4499
4500 static bool
4501 verify_gimple_label (glabel *stmt)
4502 {
4503 tree decl = gimple_label_label (stmt);
4504 int uid;
4505 bool err = false;
4506
4507 if (TREE_CODE (decl) != LABEL_DECL)
4508 return true;
4509 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4510 && DECL_CONTEXT (decl) != current_function_decl)
4511 {
4512 error ("label's context is not the current function decl");
4513 err |= true;
4514 }
4515
4516 uid = LABEL_DECL_UID (decl);
4517 if (cfun->cfg
4518 && (uid == -1
4519 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4520 {
4521 error ("incorrect entry in label_to_block_map");
4522 err |= true;
4523 }
4524
4525 uid = EH_LANDING_PAD_NR (decl);
4526 if (uid)
4527 {
4528 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4529 if (decl != lp->post_landing_pad)
4530 {
4531 error ("incorrect setting of landing pad number");
4532 err |= true;
4533 }
4534 }
4535
4536 return err;
4537 }
4538
4539 /* Verify a gimple cond statement STMT.
4540 Returns true if anything is wrong. */
4541
4542 static bool
4543 verify_gimple_cond (gcond *stmt)
4544 {
4545 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4546 {
4547 error ("invalid comparison code in gimple cond");
4548 return true;
4549 }
4550 if (!(!gimple_cond_true_label (stmt)
4551 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4552 || !(!gimple_cond_false_label (stmt)
4553 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4554 {
4555 error ("invalid labels in gimple cond");
4556 return true;
4557 }
4558
4559 return verify_gimple_comparison (boolean_type_node,
4560 gimple_cond_lhs (stmt),
4561 gimple_cond_rhs (stmt));
4562 }
4563
4564 /* Verify the GIMPLE statement STMT. Returns true if there is an
4565 error, otherwise false. */
4566
4567 static bool
4568 verify_gimple_stmt (gimple *stmt)
4569 {
4570 switch (gimple_code (stmt))
4571 {
4572 case GIMPLE_ASSIGN:
4573 return verify_gimple_assign (as_a <gassign *> (stmt));
4574
4575 case GIMPLE_LABEL:
4576 return verify_gimple_label (as_a <glabel *> (stmt));
4577
4578 case GIMPLE_CALL:
4579 return verify_gimple_call (as_a <gcall *> (stmt));
4580
4581 case GIMPLE_COND:
4582 return verify_gimple_cond (as_a <gcond *> (stmt));
4583
4584 case GIMPLE_GOTO:
4585 return verify_gimple_goto (as_a <ggoto *> (stmt));
4586
4587 case GIMPLE_SWITCH:
4588 return verify_gimple_switch (as_a <gswitch *> (stmt));
4589
4590 case GIMPLE_RETURN:
4591 return verify_gimple_return (as_a <greturn *> (stmt));
4592
4593 case GIMPLE_ASM:
4594 return false;
4595
4596 case GIMPLE_TRANSACTION:
4597 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4598
4599 /* Tuples that do not have tree operands. */
4600 case GIMPLE_NOP:
4601 case GIMPLE_PREDICT:
4602 case GIMPLE_RESX:
4603 case GIMPLE_EH_DISPATCH:
4604 case GIMPLE_EH_MUST_NOT_THROW:
4605 return false;
4606
4607 CASE_GIMPLE_OMP:
4608 /* OpenMP directives are validated by the FE and never operated
4609 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4610 non-gimple expressions when the main index variable has had
4611 its address taken. This does not affect the loop itself
4612 because the header of an GIMPLE_OMP_FOR is merely used to determine
4613 how to setup the parallel iteration. */
4614 return false;
4615
4616 case GIMPLE_DEBUG:
4617 return verify_gimple_debug (stmt);
4618
4619 default:
4620 gcc_unreachable ();
4621 }
4622 }
4623
4624 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4625 and false otherwise. */
4626
4627 static bool
4628 verify_gimple_phi (gimple *phi)
4629 {
4630 bool err = false;
4631 unsigned i;
4632 tree phi_result = gimple_phi_result (phi);
4633 bool virtual_p;
4634
4635 if (!phi_result)
4636 {
4637 error ("invalid PHI result");
4638 return true;
4639 }
4640
4641 virtual_p = virtual_operand_p (phi_result);
4642 if (TREE_CODE (phi_result) != SSA_NAME
4643 || (virtual_p
4644 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4645 {
4646 error ("invalid PHI result");
4647 err = true;
4648 }
4649
4650 for (i = 0; i < gimple_phi_num_args (phi); i++)
4651 {
4652 tree t = gimple_phi_arg_def (phi, i);
4653
4654 if (!t)
4655 {
4656 error ("missing PHI def");
4657 err |= true;
4658 continue;
4659 }
4660 /* Addressable variables do have SSA_NAMEs but they
4661 are not considered gimple values. */
4662 else if ((TREE_CODE (t) == SSA_NAME
4663 && virtual_p != virtual_operand_p (t))
4664 || (virtual_p
4665 && (TREE_CODE (t) != SSA_NAME
4666 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4667 || (!virtual_p
4668 && !is_gimple_val (t)))
4669 {
4670 error ("invalid PHI argument");
4671 debug_generic_expr (t);
4672 err |= true;
4673 }
4674 #ifdef ENABLE_TYPES_CHECKING
4675 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4676 {
4677 error ("incompatible types in PHI argument %u", i);
4678 debug_generic_stmt (TREE_TYPE (phi_result));
4679 debug_generic_stmt (TREE_TYPE (t));
4680 err |= true;
4681 }
4682 #endif
4683 }
4684
4685 return err;
4686 }
4687
4688 /* Verify the GIMPLE statements inside the sequence STMTS. */
4689
4690 static bool
4691 verify_gimple_in_seq_2 (gimple_seq stmts)
4692 {
4693 gimple_stmt_iterator ittr;
4694 bool err = false;
4695
4696 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4697 {
4698 gimple *stmt = gsi_stmt (ittr);
4699
4700 switch (gimple_code (stmt))
4701 {
4702 case GIMPLE_BIND:
4703 err |= verify_gimple_in_seq_2 (
4704 gimple_bind_body (as_a <gbind *> (stmt)));
4705 break;
4706
4707 case GIMPLE_TRY:
4708 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4709 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4710 break;
4711
4712 case GIMPLE_EH_FILTER:
4713 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4714 break;
4715
4716 case GIMPLE_EH_ELSE:
4717 {
4718 geh_else *eh_else = as_a <geh_else *> (stmt);
4719 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4720 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4721 }
4722 break;
4723
4724 case GIMPLE_CATCH:
4725 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4726 as_a <gcatch *> (stmt)));
4727 break;
4728
4729 case GIMPLE_TRANSACTION:
4730 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4731 break;
4732
4733 default:
4734 {
4735 bool err2 = verify_gimple_stmt (stmt);
4736 if (err2)
4737 debug_gimple_stmt (stmt);
4738 err |= err2;
4739 }
4740 }
4741 }
4742
4743 return err;
4744 }
4745
4746 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4747 is a problem, otherwise false. */
4748
4749 static bool
4750 verify_gimple_transaction (gtransaction *stmt)
4751 {
4752 tree lab = gimple_transaction_label (stmt);
4753 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4754 return true;
4755 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4756 }
4757
4758
4759 /* Verify the GIMPLE statements inside the statement list STMTS. */
4760
4761 DEBUG_FUNCTION void
4762 verify_gimple_in_seq (gimple_seq stmts)
4763 {
4764 timevar_push (TV_TREE_STMT_VERIFY);
4765 if (verify_gimple_in_seq_2 (stmts))
4766 internal_error ("verify_gimple failed");
4767 timevar_pop (TV_TREE_STMT_VERIFY);
4768 }
4769
4770 /* Return true when the T can be shared. */
4771
4772 static bool
4773 tree_node_can_be_shared (tree t)
4774 {
4775 if (IS_TYPE_OR_DECL_P (t)
4776 || is_gimple_min_invariant (t)
4777 || TREE_CODE (t) == SSA_NAME
4778 || t == error_mark_node
4779 || TREE_CODE (t) == IDENTIFIER_NODE)
4780 return true;
4781
4782 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4783 return true;
4784
4785 if (DECL_P (t))
4786 return true;
4787
4788 return false;
4789 }
4790
4791 /* Called via walk_tree. Verify tree sharing. */
4792
4793 static tree
4794 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4795 {
4796 hash_set<void *> *visited = (hash_set<void *> *) data;
4797
4798 if (tree_node_can_be_shared (*tp))
4799 {
4800 *walk_subtrees = false;
4801 return NULL;
4802 }
4803
4804 if (visited->add (*tp))
4805 return *tp;
4806
4807 return NULL;
4808 }
4809
4810 /* Called via walk_gimple_stmt. Verify tree sharing. */
4811
4812 static tree
4813 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4814 {
4815 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4816 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4817 }
4818
4819 static bool eh_error_found;
4820 bool
4821 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4822 hash_set<gimple *> *visited)
4823 {
4824 if (!visited->contains (stmt))
4825 {
4826 error ("dead STMT in EH table");
4827 debug_gimple_stmt (stmt);
4828 eh_error_found = true;
4829 }
4830 return true;
4831 }
4832
4833 /* Verify if the location LOCs block is in BLOCKS. */
4834
4835 static bool
4836 verify_location (hash_set<tree> *blocks, location_t loc)
4837 {
4838 tree block = LOCATION_BLOCK (loc);
4839 if (block != NULL_TREE
4840 && !blocks->contains (block))
4841 {
4842 error ("location references block not in block tree");
4843 return true;
4844 }
4845 if (block != NULL_TREE)
4846 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4847 return false;
4848 }
4849
4850 /* Called via walk_tree. Verify that expressions have no blocks. */
4851
4852 static tree
4853 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4854 {
4855 if (!EXPR_P (*tp))
4856 {
4857 *walk_subtrees = false;
4858 return NULL;
4859 }
4860
4861 location_t loc = EXPR_LOCATION (*tp);
4862 if (LOCATION_BLOCK (loc) != NULL)
4863 return *tp;
4864
4865 return NULL;
4866 }
4867
4868 /* Called via walk_tree. Verify locations of expressions. */
4869
4870 static tree
4871 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4872 {
4873 hash_set<tree> *blocks = (hash_set<tree> *) data;
4874
4875 if (TREE_CODE (*tp) == VAR_DECL
4876 && DECL_HAS_DEBUG_EXPR_P (*tp))
4877 {
4878 tree t = DECL_DEBUG_EXPR (*tp);
4879 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4880 if (addr)
4881 return addr;
4882 }
4883 if ((TREE_CODE (*tp) == VAR_DECL
4884 || TREE_CODE (*tp) == PARM_DECL
4885 || TREE_CODE (*tp) == RESULT_DECL)
4886 && DECL_HAS_VALUE_EXPR_P (*tp))
4887 {
4888 tree t = DECL_VALUE_EXPR (*tp);
4889 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4890 if (addr)
4891 return addr;
4892 }
4893
4894 if (!EXPR_P (*tp))
4895 {
4896 *walk_subtrees = false;
4897 return NULL;
4898 }
4899
4900 location_t loc = EXPR_LOCATION (*tp);
4901 if (verify_location (blocks, loc))
4902 return *tp;
4903
4904 return NULL;
4905 }
4906
4907 /* Called via walk_gimple_op. Verify locations of expressions. */
4908
4909 static tree
4910 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4911 {
4912 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4913 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4914 }
4915
4916 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4917
4918 static void
4919 collect_subblocks (hash_set<tree> *blocks, tree block)
4920 {
4921 tree t;
4922 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4923 {
4924 blocks->add (t);
4925 collect_subblocks (blocks, t);
4926 }
4927 }
4928
4929 /* Verify the GIMPLE statements in the CFG of FN. */
4930
4931 DEBUG_FUNCTION void
4932 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4933 {
4934 basic_block bb;
4935 bool err = false;
4936
4937 timevar_push (TV_TREE_STMT_VERIFY);
4938 hash_set<void *> visited;
4939 hash_set<gimple *> visited_stmts;
4940
4941 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4942 hash_set<tree> blocks;
4943 if (DECL_INITIAL (fn->decl))
4944 {
4945 blocks.add (DECL_INITIAL (fn->decl));
4946 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4947 }
4948
4949 FOR_EACH_BB_FN (bb, fn)
4950 {
4951 gimple_stmt_iterator gsi;
4952
4953 for (gphi_iterator gpi = gsi_start_phis (bb);
4954 !gsi_end_p (gpi);
4955 gsi_next (&gpi))
4956 {
4957 gphi *phi = gpi.phi ();
4958 bool err2 = false;
4959 unsigned i;
4960
4961 visited_stmts.add (phi);
4962
4963 if (gimple_bb (phi) != bb)
4964 {
4965 error ("gimple_bb (phi) is set to a wrong basic block");
4966 err2 = true;
4967 }
4968
4969 err2 |= verify_gimple_phi (phi);
4970
4971 /* Only PHI arguments have locations. */
4972 if (gimple_location (phi) != UNKNOWN_LOCATION)
4973 {
4974 error ("PHI node with location");
4975 err2 = true;
4976 }
4977
4978 for (i = 0; i < gimple_phi_num_args (phi); i++)
4979 {
4980 tree arg = gimple_phi_arg_def (phi, i);
4981 tree addr = walk_tree (&arg, verify_node_sharing_1,
4982 &visited, NULL);
4983 if (addr)
4984 {
4985 error ("incorrect sharing of tree nodes");
4986 debug_generic_expr (addr);
4987 err2 |= true;
4988 }
4989 location_t loc = gimple_phi_arg_location (phi, i);
4990 if (virtual_operand_p (gimple_phi_result (phi))
4991 && loc != UNKNOWN_LOCATION)
4992 {
4993 error ("virtual PHI with argument locations");
4994 err2 = true;
4995 }
4996 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4997 if (addr)
4998 {
4999 debug_generic_expr (addr);
5000 err2 = true;
5001 }
5002 err2 |= verify_location (&blocks, loc);
5003 }
5004
5005 if (err2)
5006 debug_gimple_stmt (phi);
5007 err |= err2;
5008 }
5009
5010 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5011 {
5012 gimple *stmt = gsi_stmt (gsi);
5013 bool err2 = false;
5014 struct walk_stmt_info wi;
5015 tree addr;
5016 int lp_nr;
5017
5018 visited_stmts.add (stmt);
5019
5020 if (gimple_bb (stmt) != bb)
5021 {
5022 error ("gimple_bb (stmt) is set to a wrong basic block");
5023 err2 = true;
5024 }
5025
5026 err2 |= verify_gimple_stmt (stmt);
5027 err2 |= verify_location (&blocks, gimple_location (stmt));
5028
5029 memset (&wi, 0, sizeof (wi));
5030 wi.info = (void *) &visited;
5031 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5032 if (addr)
5033 {
5034 error ("incorrect sharing of tree nodes");
5035 debug_generic_expr (addr);
5036 err2 |= true;
5037 }
5038
5039 memset (&wi, 0, sizeof (wi));
5040 wi.info = (void *) &blocks;
5041 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5042 if (addr)
5043 {
5044 debug_generic_expr (addr);
5045 err2 |= true;
5046 }
5047
5048 /* ??? Instead of not checking these stmts at all the walker
5049 should know its context via wi. */
5050 if (!is_gimple_debug (stmt)
5051 && !is_gimple_omp (stmt))
5052 {
5053 memset (&wi, 0, sizeof (wi));
5054 addr = walk_gimple_op (stmt, verify_expr, &wi);
5055 if (addr)
5056 {
5057 debug_generic_expr (addr);
5058 inform (gimple_location (stmt), "in statement");
5059 err2 |= true;
5060 }
5061 }
5062
5063 /* If the statement is marked as part of an EH region, then it is
5064 expected that the statement could throw. Verify that when we
5065 have optimizations that simplify statements such that we prove
5066 that they cannot throw, that we update other data structures
5067 to match. */
5068 lp_nr = lookup_stmt_eh_lp (stmt);
5069 if (lp_nr > 0)
5070 {
5071 if (!stmt_could_throw_p (stmt))
5072 {
5073 if (verify_nothrow)
5074 {
5075 error ("statement marked for throw, but doesn%'t");
5076 err2 |= true;
5077 }
5078 }
5079 else if (!gsi_one_before_end_p (gsi))
5080 {
5081 error ("statement marked for throw in middle of block");
5082 err2 |= true;
5083 }
5084 }
5085
5086 if (err2)
5087 debug_gimple_stmt (stmt);
5088 err |= err2;
5089 }
5090 }
5091
5092 eh_error_found = false;
5093 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5094 if (eh_table)
5095 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5096 (&visited_stmts);
5097
5098 if (err || eh_error_found)
5099 internal_error ("verify_gimple failed");
5100
5101 verify_histograms ();
5102 timevar_pop (TV_TREE_STMT_VERIFY);
5103 }
5104
5105
5106 /* Verifies that the flow information is OK. */
5107
5108 static int
5109 gimple_verify_flow_info (void)
5110 {
5111 int err = 0;
5112 basic_block bb;
5113 gimple_stmt_iterator gsi;
5114 gimple *stmt;
5115 edge e;
5116 edge_iterator ei;
5117
5118 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5119 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5120 {
5121 error ("ENTRY_BLOCK has IL associated with it");
5122 err = 1;
5123 }
5124
5125 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5126 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5127 {
5128 error ("EXIT_BLOCK has IL associated with it");
5129 err = 1;
5130 }
5131
5132 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5133 if (e->flags & EDGE_FALLTHRU)
5134 {
5135 error ("fallthru to exit from bb %d", e->src->index);
5136 err = 1;
5137 }
5138
5139 FOR_EACH_BB_FN (bb, cfun)
5140 {
5141 bool found_ctrl_stmt = false;
5142
5143 stmt = NULL;
5144
5145 /* Skip labels on the start of basic block. */
5146 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5147 {
5148 tree label;
5149 gimple *prev_stmt = stmt;
5150
5151 stmt = gsi_stmt (gsi);
5152
5153 if (gimple_code (stmt) != GIMPLE_LABEL)
5154 break;
5155
5156 label = gimple_label_label (as_a <glabel *> (stmt));
5157 if (prev_stmt && DECL_NONLOCAL (label))
5158 {
5159 error ("nonlocal label ");
5160 print_generic_expr (stderr, label, 0);
5161 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5162 bb->index);
5163 err = 1;
5164 }
5165
5166 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5167 {
5168 error ("EH landing pad label ");
5169 print_generic_expr (stderr, label, 0);
5170 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5171 bb->index);
5172 err = 1;
5173 }
5174
5175 if (label_to_block (label) != bb)
5176 {
5177 error ("label ");
5178 print_generic_expr (stderr, label, 0);
5179 fprintf (stderr, " to block does not match in bb %d",
5180 bb->index);
5181 err = 1;
5182 }
5183
5184 if (decl_function_context (label) != current_function_decl)
5185 {
5186 error ("label ");
5187 print_generic_expr (stderr, label, 0);
5188 fprintf (stderr, " has incorrect context in bb %d",
5189 bb->index);
5190 err = 1;
5191 }
5192 }
5193
5194 /* Verify that body of basic block BB is free of control flow. */
5195 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5196 {
5197 gimple *stmt = gsi_stmt (gsi);
5198
5199 if (found_ctrl_stmt)
5200 {
5201 error ("control flow in the middle of basic block %d",
5202 bb->index);
5203 err = 1;
5204 }
5205
5206 if (stmt_ends_bb_p (stmt))
5207 found_ctrl_stmt = true;
5208
5209 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5210 {
5211 error ("label ");
5212 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5213 fprintf (stderr, " in the middle of basic block %d", bb->index);
5214 err = 1;
5215 }
5216 }
5217
5218 gsi = gsi_last_bb (bb);
5219 if (gsi_end_p (gsi))
5220 continue;
5221
5222 stmt = gsi_stmt (gsi);
5223
5224 if (gimple_code (stmt) == GIMPLE_LABEL)
5225 continue;
5226
5227 err |= verify_eh_edges (stmt);
5228
5229 if (is_ctrl_stmt (stmt))
5230 {
5231 FOR_EACH_EDGE (e, ei, bb->succs)
5232 if (e->flags & EDGE_FALLTHRU)
5233 {
5234 error ("fallthru edge after a control statement in bb %d",
5235 bb->index);
5236 err = 1;
5237 }
5238 }
5239
5240 if (gimple_code (stmt) != GIMPLE_COND)
5241 {
5242 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5243 after anything else but if statement. */
5244 FOR_EACH_EDGE (e, ei, bb->succs)
5245 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5246 {
5247 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5248 bb->index);
5249 err = 1;
5250 }
5251 }
5252
5253 switch (gimple_code (stmt))
5254 {
5255 case GIMPLE_COND:
5256 {
5257 edge true_edge;
5258 edge false_edge;
5259
5260 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5261
5262 if (!true_edge
5263 || !false_edge
5264 || !(true_edge->flags & EDGE_TRUE_VALUE)
5265 || !(false_edge->flags & EDGE_FALSE_VALUE)
5266 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5267 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5268 || EDGE_COUNT (bb->succs) >= 3)
5269 {
5270 error ("wrong outgoing edge flags at end of bb %d",
5271 bb->index);
5272 err = 1;
5273 }
5274 }
5275 break;
5276
5277 case GIMPLE_GOTO:
5278 if (simple_goto_p (stmt))
5279 {
5280 error ("explicit goto at end of bb %d", bb->index);
5281 err = 1;
5282 }
5283 else
5284 {
5285 /* FIXME. We should double check that the labels in the
5286 destination blocks have their address taken. */
5287 FOR_EACH_EDGE (e, ei, bb->succs)
5288 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5289 | EDGE_FALSE_VALUE))
5290 || !(e->flags & EDGE_ABNORMAL))
5291 {
5292 error ("wrong outgoing edge flags at end of bb %d",
5293 bb->index);
5294 err = 1;
5295 }
5296 }
5297 break;
5298
5299 case GIMPLE_CALL:
5300 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5301 break;
5302 /* ... fallthru ... */
5303 case GIMPLE_RETURN:
5304 if (!single_succ_p (bb)
5305 || (single_succ_edge (bb)->flags
5306 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5307 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5308 {
5309 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5310 err = 1;
5311 }
5312 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5313 {
5314 error ("return edge does not point to exit in bb %d",
5315 bb->index);
5316 err = 1;
5317 }
5318 break;
5319
5320 case GIMPLE_SWITCH:
5321 {
5322 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5323 tree prev;
5324 edge e;
5325 size_t i, n;
5326
5327 n = gimple_switch_num_labels (switch_stmt);
5328
5329 /* Mark all the destination basic blocks. */
5330 for (i = 0; i < n; ++i)
5331 {
5332 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5333 basic_block label_bb = label_to_block (lab);
5334 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5335 label_bb->aux = (void *)1;
5336 }
5337
5338 /* Verify that the case labels are sorted. */
5339 prev = gimple_switch_label (switch_stmt, 0);
5340 for (i = 1; i < n; ++i)
5341 {
5342 tree c = gimple_switch_label (switch_stmt, i);
5343 if (!CASE_LOW (c))
5344 {
5345 error ("found default case not at the start of "
5346 "case vector");
5347 err = 1;
5348 continue;
5349 }
5350 if (CASE_LOW (prev)
5351 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5352 {
5353 error ("case labels not sorted: ");
5354 print_generic_expr (stderr, prev, 0);
5355 fprintf (stderr," is greater than ");
5356 print_generic_expr (stderr, c, 0);
5357 fprintf (stderr," but comes before it.\n");
5358 err = 1;
5359 }
5360 prev = c;
5361 }
5362 /* VRP will remove the default case if it can prove it will
5363 never be executed. So do not verify there always exists
5364 a default case here. */
5365
5366 FOR_EACH_EDGE (e, ei, bb->succs)
5367 {
5368 if (!e->dest->aux)
5369 {
5370 error ("extra outgoing edge %d->%d",
5371 bb->index, e->dest->index);
5372 err = 1;
5373 }
5374
5375 e->dest->aux = (void *)2;
5376 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5377 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5378 {
5379 error ("wrong outgoing edge flags at end of bb %d",
5380 bb->index);
5381 err = 1;
5382 }
5383 }
5384
5385 /* Check that we have all of them. */
5386 for (i = 0; i < n; ++i)
5387 {
5388 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5389 basic_block label_bb = label_to_block (lab);
5390
5391 if (label_bb->aux != (void *)2)
5392 {
5393 error ("missing edge %i->%i", bb->index, label_bb->index);
5394 err = 1;
5395 }
5396 }
5397
5398 FOR_EACH_EDGE (e, ei, bb->succs)
5399 e->dest->aux = (void *)0;
5400 }
5401 break;
5402
5403 case GIMPLE_EH_DISPATCH:
5404 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5405 break;
5406
5407 default:
5408 break;
5409 }
5410 }
5411
5412 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5413 verify_dominators (CDI_DOMINATORS);
5414
5415 return err;
5416 }
5417
5418
5419 /* Updates phi nodes after creating a forwarder block joined
5420 by edge FALLTHRU. */
5421
5422 static void
5423 gimple_make_forwarder_block (edge fallthru)
5424 {
5425 edge e;
5426 edge_iterator ei;
5427 basic_block dummy, bb;
5428 tree var;
5429 gphi_iterator gsi;
5430
5431 dummy = fallthru->src;
5432 bb = fallthru->dest;
5433
5434 if (single_pred_p (bb))
5435 return;
5436
5437 /* If we redirected a branch we must create new PHI nodes at the
5438 start of BB. */
5439 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5440 {
5441 gphi *phi, *new_phi;
5442
5443 phi = gsi.phi ();
5444 var = gimple_phi_result (phi);
5445 new_phi = create_phi_node (var, bb);
5446 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5447 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5448 UNKNOWN_LOCATION);
5449 }
5450
5451 /* Add the arguments we have stored on edges. */
5452 FOR_EACH_EDGE (e, ei, bb->preds)
5453 {
5454 if (e == fallthru)
5455 continue;
5456
5457 flush_pending_stmts (e);
5458 }
5459 }
5460
5461
5462 /* Return a non-special label in the head of basic block BLOCK.
5463 Create one if it doesn't exist. */
5464
5465 tree
5466 gimple_block_label (basic_block bb)
5467 {
5468 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5469 bool first = true;
5470 tree label;
5471 glabel *stmt;
5472
5473 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5474 {
5475 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5476 if (!stmt)
5477 break;
5478 label = gimple_label_label (stmt);
5479 if (!DECL_NONLOCAL (label))
5480 {
5481 if (!first)
5482 gsi_move_before (&i, &s);
5483 return label;
5484 }
5485 }
5486
5487 label = create_artificial_label (UNKNOWN_LOCATION);
5488 stmt = gimple_build_label (label);
5489 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5490 return label;
5491 }
5492
5493
5494 /* Attempt to perform edge redirection by replacing a possibly complex
5495 jump instruction by a goto or by removing the jump completely.
5496 This can apply only if all edges now point to the same block. The
5497 parameters and return values are equivalent to
5498 redirect_edge_and_branch. */
5499
5500 static edge
5501 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5502 {
5503 basic_block src = e->src;
5504 gimple_stmt_iterator i;
5505 gimple *stmt;
5506
5507 /* We can replace or remove a complex jump only when we have exactly
5508 two edges. */
5509 if (EDGE_COUNT (src->succs) != 2
5510 /* Verify that all targets will be TARGET. Specifically, the
5511 edge that is not E must also go to TARGET. */
5512 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5513 return NULL;
5514
5515 i = gsi_last_bb (src);
5516 if (gsi_end_p (i))
5517 return NULL;
5518
5519 stmt = gsi_stmt (i);
5520
5521 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5522 {
5523 gsi_remove (&i, true);
5524 e = ssa_redirect_edge (e, target);
5525 e->flags = EDGE_FALLTHRU;
5526 return e;
5527 }
5528
5529 return NULL;
5530 }
5531
5532
5533 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5534 edge representing the redirected branch. */
5535
5536 static edge
5537 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5538 {
5539 basic_block bb = e->src;
5540 gimple_stmt_iterator gsi;
5541 edge ret;
5542 gimple *stmt;
5543
5544 if (e->flags & EDGE_ABNORMAL)
5545 return NULL;
5546
5547 if (e->dest == dest)
5548 return NULL;
5549
5550 if (e->flags & EDGE_EH)
5551 return redirect_eh_edge (e, dest);
5552
5553 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5554 {
5555 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5556 if (ret)
5557 return ret;
5558 }
5559
5560 gsi = gsi_last_bb (bb);
5561 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5562
5563 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5564 {
5565 case GIMPLE_COND:
5566 /* For COND_EXPR, we only need to redirect the edge. */
5567 break;
5568
5569 case GIMPLE_GOTO:
5570 /* No non-abnormal edges should lead from a non-simple goto, and
5571 simple ones should be represented implicitly. */
5572 gcc_unreachable ();
5573
5574 case GIMPLE_SWITCH:
5575 {
5576 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5577 tree label = gimple_block_label (dest);
5578 tree cases = get_cases_for_edge (e, switch_stmt);
5579
5580 /* If we have a list of cases associated with E, then use it
5581 as it's a lot faster than walking the entire case vector. */
5582 if (cases)
5583 {
5584 edge e2 = find_edge (e->src, dest);
5585 tree last, first;
5586
5587 first = cases;
5588 while (cases)
5589 {
5590 last = cases;
5591 CASE_LABEL (cases) = label;
5592 cases = CASE_CHAIN (cases);
5593 }
5594
5595 /* If there was already an edge in the CFG, then we need
5596 to move all the cases associated with E to E2. */
5597 if (e2)
5598 {
5599 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5600
5601 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5602 CASE_CHAIN (cases2) = first;
5603 }
5604 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5605 }
5606 else
5607 {
5608 size_t i, n = gimple_switch_num_labels (switch_stmt);
5609
5610 for (i = 0; i < n; i++)
5611 {
5612 tree elt = gimple_switch_label (switch_stmt, i);
5613 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5614 CASE_LABEL (elt) = label;
5615 }
5616 }
5617 }
5618 break;
5619
5620 case GIMPLE_ASM:
5621 {
5622 gasm *asm_stmt = as_a <gasm *> (stmt);
5623 int i, n = gimple_asm_nlabels (asm_stmt);
5624 tree label = NULL;
5625
5626 for (i = 0; i < n; ++i)
5627 {
5628 tree cons = gimple_asm_label_op (asm_stmt, i);
5629 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5630 {
5631 if (!label)
5632 label = gimple_block_label (dest);
5633 TREE_VALUE (cons) = label;
5634 }
5635 }
5636
5637 /* If we didn't find any label matching the former edge in the
5638 asm labels, we must be redirecting the fallthrough
5639 edge. */
5640 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5641 }
5642 break;
5643
5644 case GIMPLE_RETURN:
5645 gsi_remove (&gsi, true);
5646 e->flags |= EDGE_FALLTHRU;
5647 break;
5648
5649 case GIMPLE_OMP_RETURN:
5650 case GIMPLE_OMP_CONTINUE:
5651 case GIMPLE_OMP_SECTIONS_SWITCH:
5652 case GIMPLE_OMP_FOR:
5653 /* The edges from OMP constructs can be simply redirected. */
5654 break;
5655
5656 case GIMPLE_EH_DISPATCH:
5657 if (!(e->flags & EDGE_FALLTHRU))
5658 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5659 break;
5660
5661 case GIMPLE_TRANSACTION:
5662 /* The ABORT edge has a stored label associated with it, otherwise
5663 the edges are simply redirectable. */
5664 if (e->flags == 0)
5665 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5666 gimple_block_label (dest));
5667 break;
5668
5669 default:
5670 /* Otherwise it must be a fallthru edge, and we don't need to
5671 do anything besides redirecting it. */
5672 gcc_assert (e->flags & EDGE_FALLTHRU);
5673 break;
5674 }
5675
5676 /* Update/insert PHI nodes as necessary. */
5677
5678 /* Now update the edges in the CFG. */
5679 e = ssa_redirect_edge (e, dest);
5680
5681 return e;
5682 }
5683
5684 /* Returns true if it is possible to remove edge E by redirecting
5685 it to the destination of the other edge from E->src. */
5686
5687 static bool
5688 gimple_can_remove_branch_p (const_edge e)
5689 {
5690 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5691 return false;
5692
5693 return true;
5694 }
5695
5696 /* Simple wrapper, as we can always redirect fallthru edges. */
5697
5698 static basic_block
5699 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5700 {
5701 e = gimple_redirect_edge_and_branch (e, dest);
5702 gcc_assert (e);
5703
5704 return NULL;
5705 }
5706
5707
5708 /* Splits basic block BB after statement STMT (but at least after the
5709 labels). If STMT is NULL, BB is split just after the labels. */
5710
5711 static basic_block
5712 gimple_split_block (basic_block bb, void *stmt)
5713 {
5714 gimple_stmt_iterator gsi;
5715 gimple_stmt_iterator gsi_tgt;
5716 gimple_seq list;
5717 basic_block new_bb;
5718 edge e;
5719 edge_iterator ei;
5720
5721 new_bb = create_empty_bb (bb);
5722
5723 /* Redirect the outgoing edges. */
5724 new_bb->succs = bb->succs;
5725 bb->succs = NULL;
5726 FOR_EACH_EDGE (e, ei, new_bb->succs)
5727 e->src = new_bb;
5728
5729 /* Get a stmt iterator pointing to the first stmt to move. */
5730 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5731 gsi = gsi_after_labels (bb);
5732 else
5733 {
5734 gsi = gsi_for_stmt ((gimple *) stmt);
5735 gsi_next (&gsi);
5736 }
5737
5738 /* Move everything from GSI to the new basic block. */
5739 if (gsi_end_p (gsi))
5740 return new_bb;
5741
5742 /* Split the statement list - avoid re-creating new containers as this
5743 brings ugly quadratic memory consumption in the inliner.
5744 (We are still quadratic since we need to update stmt BB pointers,
5745 sadly.) */
5746 gsi_split_seq_before (&gsi, &list);
5747 set_bb_seq (new_bb, list);
5748 for (gsi_tgt = gsi_start (list);
5749 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5750 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5751
5752 return new_bb;
5753 }
5754
5755
5756 /* Moves basic block BB after block AFTER. */
5757
5758 static bool
5759 gimple_move_block_after (basic_block bb, basic_block after)
5760 {
5761 if (bb->prev_bb == after)
5762 return true;
5763
5764 unlink_block (bb);
5765 link_block (bb, after);
5766
5767 return true;
5768 }
5769
5770
5771 /* Return TRUE if block BB has no executable statements, otherwise return
5772 FALSE. */
5773
5774 static bool
5775 gimple_empty_block_p (basic_block bb)
5776 {
5777 /* BB must have no executable statements. */
5778 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5779 if (phi_nodes (bb))
5780 return false;
5781 if (gsi_end_p (gsi))
5782 return true;
5783 if (is_gimple_debug (gsi_stmt (gsi)))
5784 gsi_next_nondebug (&gsi);
5785 return gsi_end_p (gsi);
5786 }
5787
5788
5789 /* Split a basic block if it ends with a conditional branch and if the
5790 other part of the block is not empty. */
5791
5792 static basic_block
5793 gimple_split_block_before_cond_jump (basic_block bb)
5794 {
5795 gimple *last, *split_point;
5796 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5797 if (gsi_end_p (gsi))
5798 return NULL;
5799 last = gsi_stmt (gsi);
5800 if (gimple_code (last) != GIMPLE_COND
5801 && gimple_code (last) != GIMPLE_SWITCH)
5802 return NULL;
5803 gsi_prev_nondebug (&gsi);
5804 split_point = gsi_stmt (gsi);
5805 return split_block (bb, split_point)->dest;
5806 }
5807
5808
5809 /* Return true if basic_block can be duplicated. */
5810
5811 static bool
5812 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5813 {
5814 return true;
5815 }
5816
5817 /* Create a duplicate of the basic block BB. NOTE: This does not
5818 preserve SSA form. */
5819
5820 static basic_block
5821 gimple_duplicate_bb (basic_block bb)
5822 {
5823 basic_block new_bb;
5824 gimple_stmt_iterator gsi_tgt;
5825
5826 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5827
5828 /* Copy the PHI nodes. We ignore PHI node arguments here because
5829 the incoming edges have not been setup yet. */
5830 for (gphi_iterator gpi = gsi_start_phis (bb);
5831 !gsi_end_p (gpi);
5832 gsi_next (&gpi))
5833 {
5834 gphi *phi, *copy;
5835 phi = gpi.phi ();
5836 copy = create_phi_node (NULL_TREE, new_bb);
5837 create_new_def_for (gimple_phi_result (phi), copy,
5838 gimple_phi_result_ptr (copy));
5839 gimple_set_uid (copy, gimple_uid (phi));
5840 }
5841
5842 gsi_tgt = gsi_start_bb (new_bb);
5843 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5844 !gsi_end_p (gsi);
5845 gsi_next (&gsi))
5846 {
5847 def_operand_p def_p;
5848 ssa_op_iter op_iter;
5849 tree lhs;
5850 gimple *stmt, *copy;
5851
5852 stmt = gsi_stmt (gsi);
5853 if (gimple_code (stmt) == GIMPLE_LABEL)
5854 continue;
5855
5856 /* Don't duplicate label debug stmts. */
5857 if (gimple_debug_bind_p (stmt)
5858 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5859 == LABEL_DECL)
5860 continue;
5861
5862 /* Create a new copy of STMT and duplicate STMT's virtual
5863 operands. */
5864 copy = gimple_copy (stmt);
5865 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5866
5867 maybe_duplicate_eh_stmt (copy, stmt);
5868 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5869
5870 /* When copying around a stmt writing into a local non-user
5871 aggregate, make sure it won't share stack slot with other
5872 vars. */
5873 lhs = gimple_get_lhs (stmt);
5874 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5875 {
5876 tree base = get_base_address (lhs);
5877 if (base
5878 && (TREE_CODE (base) == VAR_DECL
5879 || TREE_CODE (base) == RESULT_DECL)
5880 && DECL_IGNORED_P (base)
5881 && !TREE_STATIC (base)
5882 && !DECL_EXTERNAL (base)
5883 && (TREE_CODE (base) != VAR_DECL
5884 || !DECL_HAS_VALUE_EXPR_P (base)))
5885 DECL_NONSHAREABLE (base) = 1;
5886 }
5887
5888 /* Create new names for all the definitions created by COPY and
5889 add replacement mappings for each new name. */
5890 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5891 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5892 }
5893
5894 return new_bb;
5895 }
5896
5897 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5898
5899 static void
5900 add_phi_args_after_copy_edge (edge e_copy)
5901 {
5902 basic_block bb, bb_copy = e_copy->src, dest;
5903 edge e;
5904 edge_iterator ei;
5905 gphi *phi, *phi_copy;
5906 tree def;
5907 gphi_iterator psi, psi_copy;
5908
5909 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5910 return;
5911
5912 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5913
5914 if (e_copy->dest->flags & BB_DUPLICATED)
5915 dest = get_bb_original (e_copy->dest);
5916 else
5917 dest = e_copy->dest;
5918
5919 e = find_edge (bb, dest);
5920 if (!e)
5921 {
5922 /* During loop unrolling the target of the latch edge is copied.
5923 In this case we are not looking for edge to dest, but to
5924 duplicated block whose original was dest. */
5925 FOR_EACH_EDGE (e, ei, bb->succs)
5926 {
5927 if ((e->dest->flags & BB_DUPLICATED)
5928 && get_bb_original (e->dest) == dest)
5929 break;
5930 }
5931
5932 gcc_assert (e != NULL);
5933 }
5934
5935 for (psi = gsi_start_phis (e->dest),
5936 psi_copy = gsi_start_phis (e_copy->dest);
5937 !gsi_end_p (psi);
5938 gsi_next (&psi), gsi_next (&psi_copy))
5939 {
5940 phi = psi.phi ();
5941 phi_copy = psi_copy.phi ();
5942 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5943 add_phi_arg (phi_copy, def, e_copy,
5944 gimple_phi_arg_location_from_edge (phi, e));
5945 }
5946 }
5947
5948
5949 /* Basic block BB_COPY was created by code duplication. Add phi node
5950 arguments for edges going out of BB_COPY. The blocks that were
5951 duplicated have BB_DUPLICATED set. */
5952
5953 void
5954 add_phi_args_after_copy_bb (basic_block bb_copy)
5955 {
5956 edge e_copy;
5957 edge_iterator ei;
5958
5959 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5960 {
5961 add_phi_args_after_copy_edge (e_copy);
5962 }
5963 }
5964
5965 /* Blocks in REGION_COPY array of length N_REGION were created by
5966 duplication of basic blocks. Add phi node arguments for edges
5967 going from these blocks. If E_COPY is not NULL, also add
5968 phi node arguments for its destination.*/
5969
5970 void
5971 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5972 edge e_copy)
5973 {
5974 unsigned i;
5975
5976 for (i = 0; i < n_region; i++)
5977 region_copy[i]->flags |= BB_DUPLICATED;
5978
5979 for (i = 0; i < n_region; i++)
5980 add_phi_args_after_copy_bb (region_copy[i]);
5981 if (e_copy)
5982 add_phi_args_after_copy_edge (e_copy);
5983
5984 for (i = 0; i < n_region; i++)
5985 region_copy[i]->flags &= ~BB_DUPLICATED;
5986 }
5987
5988 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5989 important exit edge EXIT. By important we mean that no SSA name defined
5990 inside region is live over the other exit edges of the region. All entry
5991 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5992 to the duplicate of the region. Dominance and loop information is
5993 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5994 UPDATE_DOMINANCE is false then we assume that the caller will update the
5995 dominance information after calling this function. The new basic
5996 blocks are stored to REGION_COPY in the same order as they had in REGION,
5997 provided that REGION_COPY is not NULL.
5998 The function returns false if it is unable to copy the region,
5999 true otherwise. */
6000
6001 bool
6002 gimple_duplicate_sese_region (edge entry, edge exit,
6003 basic_block *region, unsigned n_region,
6004 basic_block *region_copy,
6005 bool update_dominance)
6006 {
6007 unsigned i;
6008 bool free_region_copy = false, copying_header = false;
6009 struct loop *loop = entry->dest->loop_father;
6010 edge exit_copy;
6011 vec<basic_block> doms;
6012 edge redirected;
6013 int total_freq = 0, entry_freq = 0;
6014 gcov_type total_count = 0, entry_count = 0;
6015
6016 if (!can_copy_bbs_p (region, n_region))
6017 return false;
6018
6019 /* Some sanity checking. Note that we do not check for all possible
6020 missuses of the functions. I.e. if you ask to copy something weird,
6021 it will work, but the state of structures probably will not be
6022 correct. */
6023 for (i = 0; i < n_region; i++)
6024 {
6025 /* We do not handle subloops, i.e. all the blocks must belong to the
6026 same loop. */
6027 if (region[i]->loop_father != loop)
6028 return false;
6029
6030 if (region[i] != entry->dest
6031 && region[i] == loop->header)
6032 return false;
6033 }
6034
6035 /* In case the function is used for loop header copying (which is the primary
6036 use), ensure that EXIT and its copy will be new latch and entry edges. */
6037 if (loop->header == entry->dest)
6038 {
6039 copying_header = true;
6040
6041 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6042 return false;
6043
6044 for (i = 0; i < n_region; i++)
6045 if (region[i] != exit->src
6046 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6047 return false;
6048 }
6049
6050 initialize_original_copy_tables ();
6051
6052 if (copying_header)
6053 set_loop_copy (loop, loop_outer (loop));
6054 else
6055 set_loop_copy (loop, loop);
6056
6057 if (!region_copy)
6058 {
6059 region_copy = XNEWVEC (basic_block, n_region);
6060 free_region_copy = true;
6061 }
6062
6063 /* Record blocks outside the region that are dominated by something
6064 inside. */
6065 if (update_dominance)
6066 {
6067 doms.create (0);
6068 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6069 }
6070
6071 if (entry->dest->count)
6072 {
6073 total_count = entry->dest->count;
6074 entry_count = entry->count;
6075 /* Fix up corner cases, to avoid division by zero or creation of negative
6076 frequencies. */
6077 if (entry_count > total_count)
6078 entry_count = total_count;
6079 }
6080 else
6081 {
6082 total_freq = entry->dest->frequency;
6083 entry_freq = EDGE_FREQUENCY (entry);
6084 /* Fix up corner cases, to avoid division by zero or creation of negative
6085 frequencies. */
6086 if (total_freq == 0)
6087 total_freq = 1;
6088 else if (entry_freq > total_freq)
6089 entry_freq = total_freq;
6090 }
6091
6092 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6093 split_edge_bb_loc (entry), update_dominance);
6094 if (total_count)
6095 {
6096 scale_bbs_frequencies_gcov_type (region, n_region,
6097 total_count - entry_count,
6098 total_count);
6099 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6100 total_count);
6101 }
6102 else
6103 {
6104 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6105 total_freq);
6106 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6107 }
6108
6109 if (copying_header)
6110 {
6111 loop->header = exit->dest;
6112 loop->latch = exit->src;
6113 }
6114
6115 /* Redirect the entry and add the phi node arguments. */
6116 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6117 gcc_assert (redirected != NULL);
6118 flush_pending_stmts (entry);
6119
6120 /* Concerning updating of dominators: We must recount dominators
6121 for entry block and its copy. Anything that is outside of the
6122 region, but was dominated by something inside needs recounting as
6123 well. */
6124 if (update_dominance)
6125 {
6126 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6127 doms.safe_push (get_bb_original (entry->dest));
6128 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6129 doms.release ();
6130 }
6131
6132 /* Add the other PHI node arguments. */
6133 add_phi_args_after_copy (region_copy, n_region, NULL);
6134
6135 if (free_region_copy)
6136 free (region_copy);
6137
6138 free_original_copy_tables ();
6139 return true;
6140 }
6141
6142 /* Checks if BB is part of the region defined by N_REGION BBS. */
6143 static bool
6144 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6145 {
6146 unsigned int n;
6147
6148 for (n = 0; n < n_region; n++)
6149 {
6150 if (bb == bbs[n])
6151 return true;
6152 }
6153 return false;
6154 }
6155
6156 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6157 are stored to REGION_COPY in the same order in that they appear
6158 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6159 the region, EXIT an exit from it. The condition guarding EXIT
6160 is moved to ENTRY. Returns true if duplication succeeds, false
6161 otherwise.
6162
6163 For example,
6164
6165 some_code;
6166 if (cond)
6167 A;
6168 else
6169 B;
6170
6171 is transformed to
6172
6173 if (cond)
6174 {
6175 some_code;
6176 A;
6177 }
6178 else
6179 {
6180 some_code;
6181 B;
6182 }
6183 */
6184
6185 bool
6186 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6187 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6188 basic_block *region_copy ATTRIBUTE_UNUSED)
6189 {
6190 unsigned i;
6191 bool free_region_copy = false;
6192 struct loop *loop = exit->dest->loop_father;
6193 struct loop *orig_loop = entry->dest->loop_father;
6194 basic_block switch_bb, entry_bb, nentry_bb;
6195 vec<basic_block> doms;
6196 int total_freq = 0, exit_freq = 0;
6197 gcov_type total_count = 0, exit_count = 0;
6198 edge exits[2], nexits[2], e;
6199 gimple_stmt_iterator gsi;
6200 gimple *cond_stmt;
6201 edge sorig, snew;
6202 basic_block exit_bb;
6203 gphi_iterator psi;
6204 gphi *phi;
6205 tree def;
6206 struct loop *target, *aloop, *cloop;
6207
6208 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6209 exits[0] = exit;
6210 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6211
6212 if (!can_copy_bbs_p (region, n_region))
6213 return false;
6214
6215 initialize_original_copy_tables ();
6216 set_loop_copy (orig_loop, loop);
6217
6218 target= loop;
6219 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6220 {
6221 if (bb_part_of_region_p (aloop->header, region, n_region))
6222 {
6223 cloop = duplicate_loop (aloop, target);
6224 duplicate_subloops (aloop, cloop);
6225 }
6226 }
6227
6228 if (!region_copy)
6229 {
6230 region_copy = XNEWVEC (basic_block, n_region);
6231 free_region_copy = true;
6232 }
6233
6234 gcc_assert (!need_ssa_update_p (cfun));
6235
6236 /* Record blocks outside the region that are dominated by something
6237 inside. */
6238 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6239
6240 if (exit->src->count)
6241 {
6242 total_count = exit->src->count;
6243 exit_count = exit->count;
6244 /* Fix up corner cases, to avoid division by zero or creation of negative
6245 frequencies. */
6246 if (exit_count > total_count)
6247 exit_count = total_count;
6248 }
6249 else
6250 {
6251 total_freq = exit->src->frequency;
6252 exit_freq = EDGE_FREQUENCY (exit);
6253 /* Fix up corner cases, to avoid division by zero or creation of negative
6254 frequencies. */
6255 if (total_freq == 0)
6256 total_freq = 1;
6257 if (exit_freq > total_freq)
6258 exit_freq = total_freq;
6259 }
6260
6261 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6262 split_edge_bb_loc (exit), true);
6263 if (total_count)
6264 {
6265 scale_bbs_frequencies_gcov_type (region, n_region,
6266 total_count - exit_count,
6267 total_count);
6268 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6269 total_count);
6270 }
6271 else
6272 {
6273 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6274 total_freq);
6275 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6276 }
6277
6278 /* Create the switch block, and put the exit condition to it. */
6279 entry_bb = entry->dest;
6280 nentry_bb = get_bb_copy (entry_bb);
6281 if (!last_stmt (entry->src)
6282 || !stmt_ends_bb_p (last_stmt (entry->src)))
6283 switch_bb = entry->src;
6284 else
6285 switch_bb = split_edge (entry);
6286 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6287
6288 gsi = gsi_last_bb (switch_bb);
6289 cond_stmt = last_stmt (exit->src);
6290 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6291 cond_stmt = gimple_copy (cond_stmt);
6292
6293 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6294
6295 sorig = single_succ_edge (switch_bb);
6296 sorig->flags = exits[1]->flags;
6297 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6298
6299 /* Register the new edge from SWITCH_BB in loop exit lists. */
6300 rescan_loop_exit (snew, true, false);
6301
6302 /* Add the PHI node arguments. */
6303 add_phi_args_after_copy (region_copy, n_region, snew);
6304
6305 /* Get rid of now superfluous conditions and associated edges (and phi node
6306 arguments). */
6307 exit_bb = exit->dest;
6308
6309 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6310 PENDING_STMT (e) = NULL;
6311
6312 /* The latch of ORIG_LOOP was copied, and so was the backedge
6313 to the original header. We redirect this backedge to EXIT_BB. */
6314 for (i = 0; i < n_region; i++)
6315 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6316 {
6317 gcc_assert (single_succ_edge (region_copy[i]));
6318 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6319 PENDING_STMT (e) = NULL;
6320 for (psi = gsi_start_phis (exit_bb);
6321 !gsi_end_p (psi);
6322 gsi_next (&psi))
6323 {
6324 phi = psi.phi ();
6325 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6326 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6327 }
6328 }
6329 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6330 PENDING_STMT (e) = NULL;
6331
6332 /* Anything that is outside of the region, but was dominated by something
6333 inside needs to update dominance info. */
6334 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6335 doms.release ();
6336 /* Update the SSA web. */
6337 update_ssa (TODO_update_ssa);
6338
6339 if (free_region_copy)
6340 free (region_copy);
6341
6342 free_original_copy_tables ();
6343 return true;
6344 }
6345
6346 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6347 adding blocks when the dominator traversal reaches EXIT. This
6348 function silently assumes that ENTRY strictly dominates EXIT. */
6349
6350 void
6351 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6352 vec<basic_block> *bbs_p)
6353 {
6354 basic_block son;
6355
6356 for (son = first_dom_son (CDI_DOMINATORS, entry);
6357 son;
6358 son = next_dom_son (CDI_DOMINATORS, son))
6359 {
6360 bbs_p->safe_push (son);
6361 if (son != exit)
6362 gather_blocks_in_sese_region (son, exit, bbs_p);
6363 }
6364 }
6365
6366 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6367 The duplicates are recorded in VARS_MAP. */
6368
6369 static void
6370 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6371 tree to_context)
6372 {
6373 tree t = *tp, new_t;
6374 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6375
6376 if (DECL_CONTEXT (t) == to_context)
6377 return;
6378
6379 bool existed;
6380 tree &loc = vars_map->get_or_insert (t, &existed);
6381
6382 if (!existed)
6383 {
6384 if (SSA_VAR_P (t))
6385 {
6386 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6387 add_local_decl (f, new_t);
6388 }
6389 else
6390 {
6391 gcc_assert (TREE_CODE (t) == CONST_DECL);
6392 new_t = copy_node (t);
6393 }
6394 DECL_CONTEXT (new_t) = to_context;
6395
6396 loc = new_t;
6397 }
6398 else
6399 new_t = loc;
6400
6401 *tp = new_t;
6402 }
6403
6404
6405 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6406 VARS_MAP maps old ssa names and var_decls to the new ones. */
6407
6408 static tree
6409 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6410 tree to_context)
6411 {
6412 tree new_name;
6413
6414 gcc_assert (!virtual_operand_p (name));
6415
6416 tree *loc = vars_map->get (name);
6417
6418 if (!loc)
6419 {
6420 tree decl = SSA_NAME_VAR (name);
6421 if (decl)
6422 {
6423 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6424 replace_by_duplicate_decl (&decl, vars_map, to_context);
6425 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6426 decl, SSA_NAME_DEF_STMT (name));
6427 }
6428 else
6429 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6430 name, SSA_NAME_DEF_STMT (name));
6431
6432 /* Now that we've used the def stmt to define new_name, make sure it
6433 doesn't define name anymore. */
6434 SSA_NAME_DEF_STMT (name) = NULL;
6435
6436 vars_map->put (name, new_name);
6437 }
6438 else
6439 new_name = *loc;
6440
6441 return new_name;
6442 }
6443
6444 struct move_stmt_d
6445 {
6446 tree orig_block;
6447 tree new_block;
6448 tree from_context;
6449 tree to_context;
6450 hash_map<tree, tree> *vars_map;
6451 htab_t new_label_map;
6452 hash_map<void *, void *> *eh_map;
6453 bool remap_decls_p;
6454 };
6455
6456 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6457 contained in *TP if it has been ORIG_BLOCK previously and change the
6458 DECL_CONTEXT of every local variable referenced in *TP. */
6459
6460 static tree
6461 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6462 {
6463 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6464 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6465 tree t = *tp;
6466
6467 if (EXPR_P (t))
6468 {
6469 tree block = TREE_BLOCK (t);
6470 if (block == p->orig_block
6471 || (p->orig_block == NULL_TREE
6472 && block != NULL_TREE))
6473 TREE_SET_BLOCK (t, p->new_block);
6474 #ifdef ENABLE_CHECKING
6475 else if (block != NULL_TREE)
6476 {
6477 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6478 block = BLOCK_SUPERCONTEXT (block);
6479 gcc_assert (block == p->orig_block);
6480 }
6481 #endif
6482 }
6483 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6484 {
6485 if (TREE_CODE (t) == SSA_NAME)
6486 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6487 else if (TREE_CODE (t) == PARM_DECL
6488 && gimple_in_ssa_p (cfun))
6489 *tp = *(p->vars_map->get (t));
6490 else if (TREE_CODE (t) == LABEL_DECL)
6491 {
6492 if (p->new_label_map)
6493 {
6494 struct tree_map in, *out;
6495 in.base.from = t;
6496 out = (struct tree_map *)
6497 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6498 if (out)
6499 *tp = t = out->to;
6500 }
6501
6502 DECL_CONTEXT (t) = p->to_context;
6503 }
6504 else if (p->remap_decls_p)
6505 {
6506 /* Replace T with its duplicate. T should no longer appear in the
6507 parent function, so this looks wasteful; however, it may appear
6508 in referenced_vars, and more importantly, as virtual operands of
6509 statements, and in alias lists of other variables. It would be
6510 quite difficult to expunge it from all those places. ??? It might
6511 suffice to do this for addressable variables. */
6512 if ((TREE_CODE (t) == VAR_DECL
6513 && !is_global_var (t))
6514 || TREE_CODE (t) == CONST_DECL)
6515 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6516 }
6517 *walk_subtrees = 0;
6518 }
6519 else if (TYPE_P (t))
6520 *walk_subtrees = 0;
6521
6522 return NULL_TREE;
6523 }
6524
6525 /* Helper for move_stmt_r. Given an EH region number for the source
6526 function, map that to the duplicate EH regio number in the dest. */
6527
6528 static int
6529 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6530 {
6531 eh_region old_r, new_r;
6532
6533 old_r = get_eh_region_from_number (old_nr);
6534 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6535
6536 return new_r->index;
6537 }
6538
6539 /* Similar, but operate on INTEGER_CSTs. */
6540
6541 static tree
6542 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6543 {
6544 int old_nr, new_nr;
6545
6546 old_nr = tree_to_shwi (old_t_nr);
6547 new_nr = move_stmt_eh_region_nr (old_nr, p);
6548
6549 return build_int_cst (integer_type_node, new_nr);
6550 }
6551
6552 /* Like move_stmt_op, but for gimple statements.
6553
6554 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6555 contained in the current statement in *GSI_P and change the
6556 DECL_CONTEXT of every local variable referenced in the current
6557 statement. */
6558
6559 static tree
6560 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6561 struct walk_stmt_info *wi)
6562 {
6563 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6564 gimple *stmt = gsi_stmt (*gsi_p);
6565 tree block = gimple_block (stmt);
6566
6567 if (block == p->orig_block
6568 || (p->orig_block == NULL_TREE
6569 && block != NULL_TREE))
6570 gimple_set_block (stmt, p->new_block);
6571
6572 switch (gimple_code (stmt))
6573 {
6574 case GIMPLE_CALL:
6575 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6576 {
6577 tree r, fndecl = gimple_call_fndecl (stmt);
6578 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6579 switch (DECL_FUNCTION_CODE (fndecl))
6580 {
6581 case BUILT_IN_EH_COPY_VALUES:
6582 r = gimple_call_arg (stmt, 1);
6583 r = move_stmt_eh_region_tree_nr (r, p);
6584 gimple_call_set_arg (stmt, 1, r);
6585 /* FALLTHRU */
6586
6587 case BUILT_IN_EH_POINTER:
6588 case BUILT_IN_EH_FILTER:
6589 r = gimple_call_arg (stmt, 0);
6590 r = move_stmt_eh_region_tree_nr (r, p);
6591 gimple_call_set_arg (stmt, 0, r);
6592 break;
6593
6594 default:
6595 break;
6596 }
6597 }
6598 break;
6599
6600 case GIMPLE_RESX:
6601 {
6602 gresx *resx_stmt = as_a <gresx *> (stmt);
6603 int r = gimple_resx_region (resx_stmt);
6604 r = move_stmt_eh_region_nr (r, p);
6605 gimple_resx_set_region (resx_stmt, r);
6606 }
6607 break;
6608
6609 case GIMPLE_EH_DISPATCH:
6610 {
6611 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6612 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6613 r = move_stmt_eh_region_nr (r, p);
6614 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6615 }
6616 break;
6617
6618 case GIMPLE_OMP_RETURN:
6619 case GIMPLE_OMP_CONTINUE:
6620 break;
6621 default:
6622 if (is_gimple_omp (stmt))
6623 {
6624 /* Do not remap variables inside OMP directives. Variables
6625 referenced in clauses and directive header belong to the
6626 parent function and should not be moved into the child
6627 function. */
6628 bool save_remap_decls_p = p->remap_decls_p;
6629 p->remap_decls_p = false;
6630 *handled_ops_p = true;
6631
6632 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6633 move_stmt_op, wi);
6634
6635 p->remap_decls_p = save_remap_decls_p;
6636 }
6637 break;
6638 }
6639
6640 return NULL_TREE;
6641 }
6642
6643 /* Move basic block BB from function CFUN to function DEST_FN. The
6644 block is moved out of the original linked list and placed after
6645 block AFTER in the new list. Also, the block is removed from the
6646 original array of blocks and placed in DEST_FN's array of blocks.
6647 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6648 updated to reflect the moved edges.
6649
6650 The local variables are remapped to new instances, VARS_MAP is used
6651 to record the mapping. */
6652
6653 static void
6654 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6655 basic_block after, bool update_edge_count_p,
6656 struct move_stmt_d *d)
6657 {
6658 struct control_flow_graph *cfg;
6659 edge_iterator ei;
6660 edge e;
6661 gimple_stmt_iterator si;
6662 unsigned old_len, new_len;
6663
6664 /* Remove BB from dominance structures. */
6665 delete_from_dominance_info (CDI_DOMINATORS, bb);
6666
6667 /* Move BB from its current loop to the copy in the new function. */
6668 if (current_loops)
6669 {
6670 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6671 if (new_loop)
6672 bb->loop_father = new_loop;
6673 }
6674
6675 /* Link BB to the new linked list. */
6676 move_block_after (bb, after);
6677
6678 /* Update the edge count in the corresponding flowgraphs. */
6679 if (update_edge_count_p)
6680 FOR_EACH_EDGE (e, ei, bb->succs)
6681 {
6682 cfun->cfg->x_n_edges--;
6683 dest_cfun->cfg->x_n_edges++;
6684 }
6685
6686 /* Remove BB from the original basic block array. */
6687 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6688 cfun->cfg->x_n_basic_blocks--;
6689
6690 /* Grow DEST_CFUN's basic block array if needed. */
6691 cfg = dest_cfun->cfg;
6692 cfg->x_n_basic_blocks++;
6693 if (bb->index >= cfg->x_last_basic_block)
6694 cfg->x_last_basic_block = bb->index + 1;
6695
6696 old_len = vec_safe_length (cfg->x_basic_block_info);
6697 if ((unsigned) cfg->x_last_basic_block >= old_len)
6698 {
6699 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6700 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6701 }
6702
6703 (*cfg->x_basic_block_info)[bb->index] = bb;
6704
6705 /* Remap the variables in phi nodes. */
6706 for (gphi_iterator psi = gsi_start_phis (bb);
6707 !gsi_end_p (psi); )
6708 {
6709 gphi *phi = psi.phi ();
6710 use_operand_p use;
6711 tree op = PHI_RESULT (phi);
6712 ssa_op_iter oi;
6713 unsigned i;
6714
6715 if (virtual_operand_p (op))
6716 {
6717 /* Remove the phi nodes for virtual operands (alias analysis will be
6718 run for the new function, anyway). */
6719 remove_phi_node (&psi, true);
6720 continue;
6721 }
6722
6723 SET_PHI_RESULT (phi,
6724 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6725 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6726 {
6727 op = USE_FROM_PTR (use);
6728 if (TREE_CODE (op) == SSA_NAME)
6729 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6730 }
6731
6732 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6733 {
6734 location_t locus = gimple_phi_arg_location (phi, i);
6735 tree block = LOCATION_BLOCK (locus);
6736
6737 if (locus == UNKNOWN_LOCATION)
6738 continue;
6739 if (d->orig_block == NULL_TREE || block == d->orig_block)
6740 {
6741 if (d->new_block == NULL_TREE)
6742 locus = LOCATION_LOCUS (locus);
6743 else
6744 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6745 gimple_phi_arg_set_location (phi, i, locus);
6746 }
6747 }
6748
6749 gsi_next (&psi);
6750 }
6751
6752 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6753 {
6754 gimple *stmt = gsi_stmt (si);
6755 struct walk_stmt_info wi;
6756
6757 memset (&wi, 0, sizeof (wi));
6758 wi.info = d;
6759 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6760
6761 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6762 {
6763 tree label = gimple_label_label (label_stmt);
6764 int uid = LABEL_DECL_UID (label);
6765
6766 gcc_assert (uid > -1);
6767
6768 old_len = vec_safe_length (cfg->x_label_to_block_map);
6769 if (old_len <= (unsigned) uid)
6770 {
6771 new_len = 3 * uid / 2 + 1;
6772 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6773 }
6774
6775 (*cfg->x_label_to_block_map)[uid] = bb;
6776 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6777
6778 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6779
6780 if (uid >= dest_cfun->cfg->last_label_uid)
6781 dest_cfun->cfg->last_label_uid = uid + 1;
6782 }
6783
6784 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6785 remove_stmt_from_eh_lp_fn (cfun, stmt);
6786
6787 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6788 gimple_remove_stmt_histograms (cfun, stmt);
6789
6790 /* We cannot leave any operands allocated from the operand caches of
6791 the current function. */
6792 free_stmt_operands (cfun, stmt);
6793 push_cfun (dest_cfun);
6794 update_stmt (stmt);
6795 pop_cfun ();
6796 }
6797
6798 FOR_EACH_EDGE (e, ei, bb->succs)
6799 if (e->goto_locus != UNKNOWN_LOCATION)
6800 {
6801 tree block = LOCATION_BLOCK (e->goto_locus);
6802 if (d->orig_block == NULL_TREE
6803 || block == d->orig_block)
6804 e->goto_locus = d->new_block ?
6805 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6806 LOCATION_LOCUS (e->goto_locus);
6807 }
6808 }
6809
6810 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6811 the outermost EH region. Use REGION as the incoming base EH region. */
6812
6813 static eh_region
6814 find_outermost_region_in_block (struct function *src_cfun,
6815 basic_block bb, eh_region region)
6816 {
6817 gimple_stmt_iterator si;
6818
6819 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6820 {
6821 gimple *stmt = gsi_stmt (si);
6822 eh_region stmt_region;
6823 int lp_nr;
6824
6825 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6826 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6827 if (stmt_region)
6828 {
6829 if (region == NULL)
6830 region = stmt_region;
6831 else if (stmt_region != region)
6832 {
6833 region = eh_region_outermost (src_cfun, stmt_region, region);
6834 gcc_assert (region != NULL);
6835 }
6836 }
6837 }
6838
6839 return region;
6840 }
6841
6842 static tree
6843 new_label_mapper (tree decl, void *data)
6844 {
6845 htab_t hash = (htab_t) data;
6846 struct tree_map *m;
6847 void **slot;
6848
6849 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6850
6851 m = XNEW (struct tree_map);
6852 m->hash = DECL_UID (decl);
6853 m->base.from = decl;
6854 m->to = create_artificial_label (UNKNOWN_LOCATION);
6855 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6856 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6857 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6858
6859 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6860 gcc_assert (*slot == NULL);
6861
6862 *slot = m;
6863
6864 return m->to;
6865 }
6866
6867 /* Tree walker to replace the decls used inside value expressions by
6868 duplicates. */
6869
6870 static tree
6871 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6872 {
6873 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6874
6875 switch (TREE_CODE (*tp))
6876 {
6877 case VAR_DECL:
6878 case PARM_DECL:
6879 case RESULT_DECL:
6880 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6881 break;
6882 default:
6883 break;
6884 }
6885
6886 if (IS_TYPE_OR_DECL_P (*tp))
6887 *walk_subtrees = false;
6888
6889 return NULL;
6890 }
6891
6892 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6893 subblocks. */
6894
6895 static void
6896 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6897 tree to_context)
6898 {
6899 tree *tp, t;
6900
6901 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6902 {
6903 t = *tp;
6904 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6905 continue;
6906 replace_by_duplicate_decl (&t, vars_map, to_context);
6907 if (t != *tp)
6908 {
6909 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6910 {
6911 tree x = DECL_VALUE_EXPR (*tp);
6912 struct replace_decls_d rd = { vars_map, to_context };
6913 unshare_expr (x);
6914 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6915 SET_DECL_VALUE_EXPR (t, x);
6916 DECL_HAS_VALUE_EXPR_P (t) = 1;
6917 }
6918 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6919 *tp = t;
6920 }
6921 }
6922
6923 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6924 replace_block_vars_by_duplicates (block, vars_map, to_context);
6925 }
6926
6927 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6928 from FN1 to FN2. */
6929
6930 static void
6931 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6932 struct loop *loop)
6933 {
6934 /* Discard it from the old loop array. */
6935 (*get_loops (fn1))[loop->num] = NULL;
6936
6937 /* Place it in the new loop array, assigning it a new number. */
6938 loop->num = number_of_loops (fn2);
6939 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6940
6941 /* Recurse to children. */
6942 for (loop = loop->inner; loop; loop = loop->next)
6943 fixup_loop_arrays_after_move (fn1, fn2, loop);
6944 }
6945
6946 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6947 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6948
6949 DEBUG_FUNCTION void
6950 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6951 {
6952 basic_block bb;
6953 edge_iterator ei;
6954 edge e;
6955 bitmap bbs = BITMAP_ALLOC (NULL);
6956 int i;
6957
6958 gcc_assert (entry != NULL);
6959 gcc_assert (entry != exit);
6960 gcc_assert (bbs_p != NULL);
6961
6962 gcc_assert (bbs_p->length () > 0);
6963
6964 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6965 bitmap_set_bit (bbs, bb->index);
6966
6967 gcc_assert (bitmap_bit_p (bbs, entry->index));
6968 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6969
6970 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6971 {
6972 if (bb == entry)
6973 {
6974 gcc_assert (single_pred_p (entry));
6975 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6976 }
6977 else
6978 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6979 {
6980 e = ei_edge (ei);
6981 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6982 }
6983
6984 if (bb == exit)
6985 {
6986 gcc_assert (single_succ_p (exit));
6987 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6988 }
6989 else
6990 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6991 {
6992 e = ei_edge (ei);
6993 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6994 }
6995 }
6996
6997 BITMAP_FREE (bbs);
6998 }
6999
7000 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7001
7002 bool
7003 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7004 {
7005 bitmap release_names = (bitmap)data;
7006
7007 if (TREE_CODE (from) != SSA_NAME)
7008 return true;
7009
7010 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7011 return true;
7012 }
7013
7014 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7015 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7016 single basic block in the original CFG and the new basic block is
7017 returned. DEST_CFUN must not have a CFG yet.
7018
7019 Note that the region need not be a pure SESE region. Blocks inside
7020 the region may contain calls to abort/exit. The only restriction
7021 is that ENTRY_BB should be the only entry point and it must
7022 dominate EXIT_BB.
7023
7024 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7025 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7026 to the new function.
7027
7028 All local variables referenced in the region are assumed to be in
7029 the corresponding BLOCK_VARS and unexpanded variable lists
7030 associated with DEST_CFUN.
7031
7032 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7033 reimplement move_sese_region_to_fn by duplicating the region rather than
7034 moving it. */
7035
7036 basic_block
7037 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7038 basic_block exit_bb, tree orig_block)
7039 {
7040 vec<basic_block> bbs, dom_bbs;
7041 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7042 basic_block after, bb, *entry_pred, *exit_succ, abb;
7043 struct function *saved_cfun = cfun;
7044 int *entry_flag, *exit_flag;
7045 unsigned *entry_prob, *exit_prob;
7046 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7047 edge e;
7048 edge_iterator ei;
7049 htab_t new_label_map;
7050 hash_map<void *, void *> *eh_map;
7051 struct loop *loop = entry_bb->loop_father;
7052 struct loop *loop0 = get_loop (saved_cfun, 0);
7053 struct move_stmt_d d;
7054
7055 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7056 region. */
7057 gcc_assert (entry_bb != exit_bb
7058 && (!exit_bb
7059 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7060
7061 /* Collect all the blocks in the region. Manually add ENTRY_BB
7062 because it won't be added by dfs_enumerate_from. */
7063 bbs.create (0);
7064 bbs.safe_push (entry_bb);
7065 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7066 #ifdef ENABLE_CHECKING
7067 verify_sese (entry_bb, exit_bb, &bbs);
7068 #endif
7069
7070 /* The blocks that used to be dominated by something in BBS will now be
7071 dominated by the new block. */
7072 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7073 bbs.address (),
7074 bbs.length ());
7075
7076 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7077 the predecessor edges to ENTRY_BB and the successor edges to
7078 EXIT_BB so that we can re-attach them to the new basic block that
7079 will replace the region. */
7080 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7081 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7082 entry_flag = XNEWVEC (int, num_entry_edges);
7083 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7084 i = 0;
7085 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7086 {
7087 entry_prob[i] = e->probability;
7088 entry_flag[i] = e->flags;
7089 entry_pred[i++] = e->src;
7090 remove_edge (e);
7091 }
7092
7093 if (exit_bb)
7094 {
7095 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7096 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7097 exit_flag = XNEWVEC (int, num_exit_edges);
7098 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7099 i = 0;
7100 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7101 {
7102 exit_prob[i] = e->probability;
7103 exit_flag[i] = e->flags;
7104 exit_succ[i++] = e->dest;
7105 remove_edge (e);
7106 }
7107 }
7108 else
7109 {
7110 num_exit_edges = 0;
7111 exit_succ = NULL;
7112 exit_flag = NULL;
7113 exit_prob = NULL;
7114 }
7115
7116 /* Switch context to the child function to initialize DEST_FN's CFG. */
7117 gcc_assert (dest_cfun->cfg == NULL);
7118 push_cfun (dest_cfun);
7119
7120 init_empty_tree_cfg ();
7121
7122 /* Initialize EH information for the new function. */
7123 eh_map = NULL;
7124 new_label_map = NULL;
7125 if (saved_cfun->eh)
7126 {
7127 eh_region region = NULL;
7128
7129 FOR_EACH_VEC_ELT (bbs, i, bb)
7130 region = find_outermost_region_in_block (saved_cfun, bb, region);
7131
7132 init_eh_for_function ();
7133 if (region != NULL)
7134 {
7135 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7136 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7137 new_label_mapper, new_label_map);
7138 }
7139 }
7140
7141 /* Initialize an empty loop tree. */
7142 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7143 init_loops_structure (dest_cfun, loops, 1);
7144 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7145 set_loops_for_fn (dest_cfun, loops);
7146
7147 /* Move the outlined loop tree part. */
7148 num_nodes = bbs.length ();
7149 FOR_EACH_VEC_ELT (bbs, i, bb)
7150 {
7151 if (bb->loop_father->header == bb)
7152 {
7153 struct loop *this_loop = bb->loop_father;
7154 struct loop *outer = loop_outer (this_loop);
7155 if (outer == loop
7156 /* If the SESE region contains some bbs ending with
7157 a noreturn call, those are considered to belong
7158 to the outermost loop in saved_cfun, rather than
7159 the entry_bb's loop_father. */
7160 || outer == loop0)
7161 {
7162 if (outer != loop)
7163 num_nodes -= this_loop->num_nodes;
7164 flow_loop_tree_node_remove (bb->loop_father);
7165 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7166 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7167 }
7168 }
7169 else if (bb->loop_father == loop0 && loop0 != loop)
7170 num_nodes--;
7171
7172 /* Remove loop exits from the outlined region. */
7173 if (loops_for_fn (saved_cfun)->exits)
7174 FOR_EACH_EDGE (e, ei, bb->succs)
7175 {
7176 struct loops *l = loops_for_fn (saved_cfun);
7177 loop_exit **slot
7178 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7179 NO_INSERT);
7180 if (slot)
7181 l->exits->clear_slot (slot);
7182 }
7183 }
7184
7185
7186 /* Adjust the number of blocks in the tree root of the outlined part. */
7187 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7188
7189 /* Setup a mapping to be used by move_block_to_fn. */
7190 loop->aux = current_loops->tree_root;
7191 loop0->aux = current_loops->tree_root;
7192
7193 pop_cfun ();
7194
7195 /* Move blocks from BBS into DEST_CFUN. */
7196 gcc_assert (bbs.length () >= 2);
7197 after = dest_cfun->cfg->x_entry_block_ptr;
7198 hash_map<tree, tree> vars_map;
7199
7200 memset (&d, 0, sizeof (d));
7201 d.orig_block = orig_block;
7202 d.new_block = DECL_INITIAL (dest_cfun->decl);
7203 d.from_context = cfun->decl;
7204 d.to_context = dest_cfun->decl;
7205 d.vars_map = &vars_map;
7206 d.new_label_map = new_label_map;
7207 d.eh_map = eh_map;
7208 d.remap_decls_p = true;
7209
7210 if (gimple_in_ssa_p (cfun))
7211 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7212 {
7213 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7214 set_ssa_default_def (dest_cfun, arg, narg);
7215 vars_map.put (arg, narg);
7216 }
7217
7218 FOR_EACH_VEC_ELT (bbs, i, bb)
7219 {
7220 /* No need to update edge counts on the last block. It has
7221 already been updated earlier when we detached the region from
7222 the original CFG. */
7223 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7224 after = bb;
7225 }
7226
7227 loop->aux = NULL;
7228 loop0->aux = NULL;
7229 /* Loop sizes are no longer correct, fix them up. */
7230 loop->num_nodes -= num_nodes;
7231 for (struct loop *outer = loop_outer (loop);
7232 outer; outer = loop_outer (outer))
7233 outer->num_nodes -= num_nodes;
7234 loop0->num_nodes -= bbs.length () - num_nodes;
7235
7236 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7237 {
7238 struct loop *aloop;
7239 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7240 if (aloop != NULL)
7241 {
7242 if (aloop->simduid)
7243 {
7244 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7245 d.to_context);
7246 dest_cfun->has_simduid_loops = true;
7247 }
7248 if (aloop->force_vectorize)
7249 dest_cfun->has_force_vectorize_loops = true;
7250 }
7251 }
7252
7253 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7254 if (orig_block)
7255 {
7256 tree block;
7257 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7258 == NULL_TREE);
7259 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7260 = BLOCK_SUBBLOCKS (orig_block);
7261 for (block = BLOCK_SUBBLOCKS (orig_block);
7262 block; block = BLOCK_CHAIN (block))
7263 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7264 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7265 }
7266
7267 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7268 &vars_map, dest_cfun->decl);
7269
7270 if (new_label_map)
7271 htab_delete (new_label_map);
7272 if (eh_map)
7273 delete eh_map;
7274
7275 if (gimple_in_ssa_p (cfun))
7276 {
7277 /* We need to release ssa-names in a defined order, so first find them,
7278 and then iterate in ascending version order. */
7279 bitmap release_names = BITMAP_ALLOC (NULL);
7280 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7281 bitmap_iterator bi;
7282 unsigned i;
7283 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7284 release_ssa_name (ssa_name (i));
7285 BITMAP_FREE (release_names);
7286 }
7287
7288 /* Rewire the entry and exit blocks. The successor to the entry
7289 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7290 the child function. Similarly, the predecessor of DEST_FN's
7291 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7292 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7293 various CFG manipulation function get to the right CFG.
7294
7295 FIXME, this is silly. The CFG ought to become a parameter to
7296 these helpers. */
7297 push_cfun (dest_cfun);
7298 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7299 if (exit_bb)
7300 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7301 pop_cfun ();
7302
7303 /* Back in the original function, the SESE region has disappeared,
7304 create a new basic block in its place. */
7305 bb = create_empty_bb (entry_pred[0]);
7306 if (current_loops)
7307 add_bb_to_loop (bb, loop);
7308 for (i = 0; i < num_entry_edges; i++)
7309 {
7310 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7311 e->probability = entry_prob[i];
7312 }
7313
7314 for (i = 0; i < num_exit_edges; i++)
7315 {
7316 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7317 e->probability = exit_prob[i];
7318 }
7319
7320 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7321 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7322 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7323 dom_bbs.release ();
7324
7325 if (exit_bb)
7326 {
7327 free (exit_prob);
7328 free (exit_flag);
7329 free (exit_succ);
7330 }
7331 free (entry_prob);
7332 free (entry_flag);
7333 free (entry_pred);
7334 bbs.release ();
7335
7336 return bb;
7337 }
7338
7339
7340 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7341 */
7342
7343 void
7344 dump_function_to_file (tree fndecl, FILE *file, int flags)
7345 {
7346 tree arg, var, old_current_fndecl = current_function_decl;
7347 struct function *dsf;
7348 bool ignore_topmost_bind = false, any_var = false;
7349 basic_block bb;
7350 tree chain;
7351 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7352 && decl_is_tm_clone (fndecl));
7353 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7354
7355 current_function_decl = fndecl;
7356 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7357
7358 arg = DECL_ARGUMENTS (fndecl);
7359 while (arg)
7360 {
7361 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7362 fprintf (file, " ");
7363 print_generic_expr (file, arg, dump_flags);
7364 if (flags & TDF_VERBOSE)
7365 print_node (file, "", arg, 4);
7366 if (DECL_CHAIN (arg))
7367 fprintf (file, ", ");
7368 arg = DECL_CHAIN (arg);
7369 }
7370 fprintf (file, ")\n");
7371
7372 if (flags & TDF_VERBOSE)
7373 print_node (file, "", fndecl, 2);
7374
7375 dsf = DECL_STRUCT_FUNCTION (fndecl);
7376 if (dsf && (flags & TDF_EH))
7377 dump_eh_tree (file, dsf);
7378
7379 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7380 {
7381 dump_node (fndecl, TDF_SLIM | flags, file);
7382 current_function_decl = old_current_fndecl;
7383 return;
7384 }
7385
7386 /* When GIMPLE is lowered, the variables are no longer available in
7387 BIND_EXPRs, so display them separately. */
7388 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7389 {
7390 unsigned ix;
7391 ignore_topmost_bind = true;
7392
7393 fprintf (file, "{\n");
7394 if (!vec_safe_is_empty (fun->local_decls))
7395 FOR_EACH_LOCAL_DECL (fun, ix, var)
7396 {
7397 print_generic_decl (file, var, flags);
7398 if (flags & TDF_VERBOSE)
7399 print_node (file, "", var, 4);
7400 fprintf (file, "\n");
7401
7402 any_var = true;
7403 }
7404 if (gimple_in_ssa_p (cfun))
7405 for (ix = 1; ix < num_ssa_names; ++ix)
7406 {
7407 tree name = ssa_name (ix);
7408 if (name && !SSA_NAME_VAR (name))
7409 {
7410 fprintf (file, " ");
7411 print_generic_expr (file, TREE_TYPE (name), flags);
7412 fprintf (file, " ");
7413 print_generic_expr (file, name, flags);
7414 fprintf (file, ";\n");
7415
7416 any_var = true;
7417 }
7418 }
7419 }
7420
7421 if (fun && fun->decl == fndecl
7422 && fun->cfg
7423 && basic_block_info_for_fn (fun))
7424 {
7425 /* If the CFG has been built, emit a CFG-based dump. */
7426 if (!ignore_topmost_bind)
7427 fprintf (file, "{\n");
7428
7429 if (any_var && n_basic_blocks_for_fn (fun))
7430 fprintf (file, "\n");
7431
7432 FOR_EACH_BB_FN (bb, fun)
7433 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7434
7435 fprintf (file, "}\n");
7436 }
7437 else if (DECL_SAVED_TREE (fndecl) == NULL)
7438 {
7439 /* The function is now in GIMPLE form but the CFG has not been
7440 built yet. Emit the single sequence of GIMPLE statements
7441 that make up its body. */
7442 gimple_seq body = gimple_body (fndecl);
7443
7444 if (gimple_seq_first_stmt (body)
7445 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7446 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7447 print_gimple_seq (file, body, 0, flags);
7448 else
7449 {
7450 if (!ignore_topmost_bind)
7451 fprintf (file, "{\n");
7452
7453 if (any_var)
7454 fprintf (file, "\n");
7455
7456 print_gimple_seq (file, body, 2, flags);
7457 fprintf (file, "}\n");
7458 }
7459 }
7460 else
7461 {
7462 int indent;
7463
7464 /* Make a tree based dump. */
7465 chain = DECL_SAVED_TREE (fndecl);
7466 if (chain && TREE_CODE (chain) == BIND_EXPR)
7467 {
7468 if (ignore_topmost_bind)
7469 {
7470 chain = BIND_EXPR_BODY (chain);
7471 indent = 2;
7472 }
7473 else
7474 indent = 0;
7475 }
7476 else
7477 {
7478 if (!ignore_topmost_bind)
7479 {
7480 fprintf (file, "{\n");
7481 /* No topmost bind, pretend it's ignored for later. */
7482 ignore_topmost_bind = true;
7483 }
7484 indent = 2;
7485 }
7486
7487 if (any_var)
7488 fprintf (file, "\n");
7489
7490 print_generic_stmt_indented (file, chain, flags, indent);
7491 if (ignore_topmost_bind)
7492 fprintf (file, "}\n");
7493 }
7494
7495 if (flags & TDF_ENUMERATE_LOCALS)
7496 dump_enumerated_decls (file, flags);
7497 fprintf (file, "\n\n");
7498
7499 current_function_decl = old_current_fndecl;
7500 }
7501
7502 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7503
7504 DEBUG_FUNCTION void
7505 debug_function (tree fn, int flags)
7506 {
7507 dump_function_to_file (fn, stderr, flags);
7508 }
7509
7510
7511 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7512
7513 static void
7514 print_pred_bbs (FILE *file, basic_block bb)
7515 {
7516 edge e;
7517 edge_iterator ei;
7518
7519 FOR_EACH_EDGE (e, ei, bb->preds)
7520 fprintf (file, "bb_%d ", e->src->index);
7521 }
7522
7523
7524 /* Print on FILE the indexes for the successors of basic_block BB. */
7525
7526 static void
7527 print_succ_bbs (FILE *file, basic_block bb)
7528 {
7529 edge e;
7530 edge_iterator ei;
7531
7532 FOR_EACH_EDGE (e, ei, bb->succs)
7533 fprintf (file, "bb_%d ", e->dest->index);
7534 }
7535
7536 /* Print to FILE the basic block BB following the VERBOSITY level. */
7537
7538 void
7539 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7540 {
7541 char *s_indent = (char *) alloca ((size_t) indent + 1);
7542 memset ((void *) s_indent, ' ', (size_t) indent);
7543 s_indent[indent] = '\0';
7544
7545 /* Print basic_block's header. */
7546 if (verbosity >= 2)
7547 {
7548 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7549 print_pred_bbs (file, bb);
7550 fprintf (file, "}, succs = {");
7551 print_succ_bbs (file, bb);
7552 fprintf (file, "})\n");
7553 }
7554
7555 /* Print basic_block's body. */
7556 if (verbosity >= 3)
7557 {
7558 fprintf (file, "%s {\n", s_indent);
7559 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7560 fprintf (file, "%s }\n", s_indent);
7561 }
7562 }
7563
7564 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7565
7566 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7567 VERBOSITY level this outputs the contents of the loop, or just its
7568 structure. */
7569
7570 static void
7571 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7572 {
7573 char *s_indent;
7574 basic_block bb;
7575
7576 if (loop == NULL)
7577 return;
7578
7579 s_indent = (char *) alloca ((size_t) indent + 1);
7580 memset ((void *) s_indent, ' ', (size_t) indent);
7581 s_indent[indent] = '\0';
7582
7583 /* Print loop's header. */
7584 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7585 if (loop->header)
7586 fprintf (file, "header = %d", loop->header->index);
7587 else
7588 {
7589 fprintf (file, "deleted)\n");
7590 return;
7591 }
7592 if (loop->latch)
7593 fprintf (file, ", latch = %d", loop->latch->index);
7594 else
7595 fprintf (file, ", multiple latches");
7596 fprintf (file, ", niter = ");
7597 print_generic_expr (file, loop->nb_iterations, 0);
7598
7599 if (loop->any_upper_bound)
7600 {
7601 fprintf (file, ", upper_bound = ");
7602 print_decu (loop->nb_iterations_upper_bound, file);
7603 }
7604
7605 if (loop->any_estimate)
7606 {
7607 fprintf (file, ", estimate = ");
7608 print_decu (loop->nb_iterations_estimate, file);
7609 }
7610 fprintf (file, ")\n");
7611
7612 /* Print loop's body. */
7613 if (verbosity >= 1)
7614 {
7615 fprintf (file, "%s{\n", s_indent);
7616 FOR_EACH_BB_FN (bb, cfun)
7617 if (bb->loop_father == loop)
7618 print_loops_bb (file, bb, indent, verbosity);
7619
7620 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7621 fprintf (file, "%s}\n", s_indent);
7622 }
7623 }
7624
7625 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7626 spaces. Following VERBOSITY level this outputs the contents of the
7627 loop, or just its structure. */
7628
7629 static void
7630 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7631 int verbosity)
7632 {
7633 if (loop == NULL)
7634 return;
7635
7636 print_loop (file, loop, indent, verbosity);
7637 print_loop_and_siblings (file, loop->next, indent, verbosity);
7638 }
7639
7640 /* Follow a CFG edge from the entry point of the program, and on entry
7641 of a loop, pretty print the loop structure on FILE. */
7642
7643 void
7644 print_loops (FILE *file, int verbosity)
7645 {
7646 basic_block bb;
7647
7648 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7649 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7650 if (bb && bb->loop_father)
7651 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7652 }
7653
7654 /* Dump a loop. */
7655
7656 DEBUG_FUNCTION void
7657 debug (struct loop &ref)
7658 {
7659 print_loop (stderr, &ref, 0, /*verbosity*/0);
7660 }
7661
7662 DEBUG_FUNCTION void
7663 debug (struct loop *ptr)
7664 {
7665 if (ptr)
7666 debug (*ptr);
7667 else
7668 fprintf (stderr, "<nil>\n");
7669 }
7670
7671 /* Dump a loop verbosely. */
7672
7673 DEBUG_FUNCTION void
7674 debug_verbose (struct loop &ref)
7675 {
7676 print_loop (stderr, &ref, 0, /*verbosity*/3);
7677 }
7678
7679 DEBUG_FUNCTION void
7680 debug_verbose (struct loop *ptr)
7681 {
7682 if (ptr)
7683 debug (*ptr);
7684 else
7685 fprintf (stderr, "<nil>\n");
7686 }
7687
7688
7689 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7690
7691 DEBUG_FUNCTION void
7692 debug_loops (int verbosity)
7693 {
7694 print_loops (stderr, verbosity);
7695 }
7696
7697 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7698
7699 DEBUG_FUNCTION void
7700 debug_loop (struct loop *loop, int verbosity)
7701 {
7702 print_loop (stderr, loop, 0, verbosity);
7703 }
7704
7705 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7706 level. */
7707
7708 DEBUG_FUNCTION void
7709 debug_loop_num (unsigned num, int verbosity)
7710 {
7711 debug_loop (get_loop (cfun, num), verbosity);
7712 }
7713
7714 /* Return true if BB ends with a call, possibly followed by some
7715 instructions that must stay with the call. Return false,
7716 otherwise. */
7717
7718 static bool
7719 gimple_block_ends_with_call_p (basic_block bb)
7720 {
7721 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7722 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7723 }
7724
7725
7726 /* Return true if BB ends with a conditional branch. Return false,
7727 otherwise. */
7728
7729 static bool
7730 gimple_block_ends_with_condjump_p (const_basic_block bb)
7731 {
7732 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7733 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7734 }
7735
7736
7737 /* Return true if we need to add fake edge to exit at statement T.
7738 Helper function for gimple_flow_call_edges_add. */
7739
7740 static bool
7741 need_fake_edge_p (gimple *t)
7742 {
7743 tree fndecl = NULL_TREE;
7744 int call_flags = 0;
7745
7746 /* NORETURN and LONGJMP calls already have an edge to exit.
7747 CONST and PURE calls do not need one.
7748 We don't currently check for CONST and PURE here, although
7749 it would be a good idea, because those attributes are
7750 figured out from the RTL in mark_constant_function, and
7751 the counter incrementation code from -fprofile-arcs
7752 leads to different results from -fbranch-probabilities. */
7753 if (is_gimple_call (t))
7754 {
7755 fndecl = gimple_call_fndecl (t);
7756 call_flags = gimple_call_flags (t);
7757 }
7758
7759 if (is_gimple_call (t)
7760 && fndecl
7761 && DECL_BUILT_IN (fndecl)
7762 && (call_flags & ECF_NOTHROW)
7763 && !(call_flags & ECF_RETURNS_TWICE)
7764 /* fork() doesn't really return twice, but the effect of
7765 wrapping it in __gcov_fork() which calls __gcov_flush()
7766 and clears the counters before forking has the same
7767 effect as returning twice. Force a fake edge. */
7768 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7769 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7770 return false;
7771
7772 if (is_gimple_call (t))
7773 {
7774 edge_iterator ei;
7775 edge e;
7776 basic_block bb;
7777
7778 if (!(call_flags & ECF_NORETURN))
7779 return true;
7780
7781 bb = gimple_bb (t);
7782 FOR_EACH_EDGE (e, ei, bb->succs)
7783 if ((e->flags & EDGE_FAKE) == 0)
7784 return true;
7785 }
7786
7787 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7788 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7789 return true;
7790
7791 return false;
7792 }
7793
7794
7795 /* Add fake edges to the function exit for any non constant and non
7796 noreturn calls (or noreturn calls with EH/abnormal edges),
7797 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7798 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7799 that were split.
7800
7801 The goal is to expose cases in which entering a basic block does
7802 not imply that all subsequent instructions must be executed. */
7803
7804 static int
7805 gimple_flow_call_edges_add (sbitmap blocks)
7806 {
7807 int i;
7808 int blocks_split = 0;
7809 int last_bb = last_basic_block_for_fn (cfun);
7810 bool check_last_block = false;
7811
7812 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7813 return 0;
7814
7815 if (! blocks)
7816 check_last_block = true;
7817 else
7818 check_last_block = bitmap_bit_p (blocks,
7819 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7820
7821 /* In the last basic block, before epilogue generation, there will be
7822 a fallthru edge to EXIT. Special care is required if the last insn
7823 of the last basic block is a call because make_edge folds duplicate
7824 edges, which would result in the fallthru edge also being marked
7825 fake, which would result in the fallthru edge being removed by
7826 remove_fake_edges, which would result in an invalid CFG.
7827
7828 Moreover, we can't elide the outgoing fake edge, since the block
7829 profiler needs to take this into account in order to solve the minimal
7830 spanning tree in the case that the call doesn't return.
7831
7832 Handle this by adding a dummy instruction in a new last basic block. */
7833 if (check_last_block)
7834 {
7835 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7836 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7837 gimple *t = NULL;
7838
7839 if (!gsi_end_p (gsi))
7840 t = gsi_stmt (gsi);
7841
7842 if (t && need_fake_edge_p (t))
7843 {
7844 edge e;
7845
7846 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7847 if (e)
7848 {
7849 gsi_insert_on_edge (e, gimple_build_nop ());
7850 gsi_commit_edge_inserts ();
7851 }
7852 }
7853 }
7854
7855 /* Now add fake edges to the function exit for any non constant
7856 calls since there is no way that we can determine if they will
7857 return or not... */
7858 for (i = 0; i < last_bb; i++)
7859 {
7860 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7861 gimple_stmt_iterator gsi;
7862 gimple *stmt, *last_stmt;
7863
7864 if (!bb)
7865 continue;
7866
7867 if (blocks && !bitmap_bit_p (blocks, i))
7868 continue;
7869
7870 gsi = gsi_last_nondebug_bb (bb);
7871 if (!gsi_end_p (gsi))
7872 {
7873 last_stmt = gsi_stmt (gsi);
7874 do
7875 {
7876 stmt = gsi_stmt (gsi);
7877 if (need_fake_edge_p (stmt))
7878 {
7879 edge e;
7880
7881 /* The handling above of the final block before the
7882 epilogue should be enough to verify that there is
7883 no edge to the exit block in CFG already.
7884 Calling make_edge in such case would cause us to
7885 mark that edge as fake and remove it later. */
7886 #ifdef ENABLE_CHECKING
7887 if (stmt == last_stmt)
7888 {
7889 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7890 gcc_assert (e == NULL);
7891 }
7892 #endif
7893
7894 /* Note that the following may create a new basic block
7895 and renumber the existing basic blocks. */
7896 if (stmt != last_stmt)
7897 {
7898 e = split_block (bb, stmt);
7899 if (e)
7900 blocks_split++;
7901 }
7902 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7903 }
7904 gsi_prev (&gsi);
7905 }
7906 while (!gsi_end_p (gsi));
7907 }
7908 }
7909
7910 if (blocks_split)
7911 verify_flow_info ();
7912
7913 return blocks_split;
7914 }
7915
7916 /* Removes edge E and all the blocks dominated by it, and updates dominance
7917 information. The IL in E->src needs to be updated separately.
7918 If dominance info is not available, only the edge E is removed.*/
7919
7920 void
7921 remove_edge_and_dominated_blocks (edge e)
7922 {
7923 vec<basic_block> bbs_to_remove = vNULL;
7924 vec<basic_block> bbs_to_fix_dom = vNULL;
7925 bitmap df, df_idom;
7926 edge f;
7927 edge_iterator ei;
7928 bool none_removed = false;
7929 unsigned i;
7930 basic_block bb, dbb;
7931 bitmap_iterator bi;
7932
7933 /* If we are removing a path inside a non-root loop that may change
7934 loop ownership of blocks or remove loops. Mark loops for fixup. */
7935 if (current_loops
7936 && loop_outer (e->src->loop_father) != NULL
7937 && e->src->loop_father == e->dest->loop_father)
7938 loops_state_set (LOOPS_NEED_FIXUP);
7939
7940 if (!dom_info_available_p (CDI_DOMINATORS))
7941 {
7942 remove_edge (e);
7943 return;
7944 }
7945
7946 /* No updating is needed for edges to exit. */
7947 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7948 {
7949 if (cfgcleanup_altered_bbs)
7950 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7951 remove_edge (e);
7952 return;
7953 }
7954
7955 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7956 that is not dominated by E->dest, then this set is empty. Otherwise,
7957 all the basic blocks dominated by E->dest are removed.
7958
7959 Also, to DF_IDOM we store the immediate dominators of the blocks in
7960 the dominance frontier of E (i.e., of the successors of the
7961 removed blocks, if there are any, and of E->dest otherwise). */
7962 FOR_EACH_EDGE (f, ei, e->dest->preds)
7963 {
7964 if (f == e)
7965 continue;
7966
7967 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7968 {
7969 none_removed = true;
7970 break;
7971 }
7972 }
7973
7974 df = BITMAP_ALLOC (NULL);
7975 df_idom = BITMAP_ALLOC (NULL);
7976
7977 if (none_removed)
7978 bitmap_set_bit (df_idom,
7979 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7980 else
7981 {
7982 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7983 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7984 {
7985 FOR_EACH_EDGE (f, ei, bb->succs)
7986 {
7987 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7988 bitmap_set_bit (df, f->dest->index);
7989 }
7990 }
7991 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7992 bitmap_clear_bit (df, bb->index);
7993
7994 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7995 {
7996 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7997 bitmap_set_bit (df_idom,
7998 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7999 }
8000 }
8001
8002 if (cfgcleanup_altered_bbs)
8003 {
8004 /* Record the set of the altered basic blocks. */
8005 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8006 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8007 }
8008
8009 /* Remove E and the cancelled blocks. */
8010 if (none_removed)
8011 remove_edge (e);
8012 else
8013 {
8014 /* Walk backwards so as to get a chance to substitute all
8015 released DEFs into debug stmts. See
8016 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8017 details. */
8018 for (i = bbs_to_remove.length (); i-- > 0; )
8019 delete_basic_block (bbs_to_remove[i]);
8020 }
8021
8022 /* Update the dominance information. The immediate dominator may change only
8023 for blocks whose immediate dominator belongs to DF_IDOM:
8024
8025 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8026 removal. Let Z the arbitrary block such that idom(Z) = Y and
8027 Z dominates X after the removal. Before removal, there exists a path P
8028 from Y to X that avoids Z. Let F be the last edge on P that is
8029 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8030 dominates W, and because of P, Z does not dominate W), and W belongs to
8031 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8032 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8033 {
8034 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8035 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8036 dbb;
8037 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8038 bbs_to_fix_dom.safe_push (dbb);
8039 }
8040
8041 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8042
8043 BITMAP_FREE (df);
8044 BITMAP_FREE (df_idom);
8045 bbs_to_remove.release ();
8046 bbs_to_fix_dom.release ();
8047 }
8048
8049 /* Purge dead EH edges from basic block BB. */
8050
8051 bool
8052 gimple_purge_dead_eh_edges (basic_block bb)
8053 {
8054 bool changed = false;
8055 edge e;
8056 edge_iterator ei;
8057 gimple *stmt = last_stmt (bb);
8058
8059 if (stmt && stmt_can_throw_internal (stmt))
8060 return false;
8061
8062 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8063 {
8064 if (e->flags & EDGE_EH)
8065 {
8066 remove_edge_and_dominated_blocks (e);
8067 changed = true;
8068 }
8069 else
8070 ei_next (&ei);
8071 }
8072
8073 return changed;
8074 }
8075
8076 /* Purge dead EH edges from basic block listed in BLOCKS. */
8077
8078 bool
8079 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8080 {
8081 bool changed = false;
8082 unsigned i;
8083 bitmap_iterator bi;
8084
8085 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8086 {
8087 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8088
8089 /* Earlier gimple_purge_dead_eh_edges could have removed
8090 this basic block already. */
8091 gcc_assert (bb || changed);
8092 if (bb != NULL)
8093 changed |= gimple_purge_dead_eh_edges (bb);
8094 }
8095
8096 return changed;
8097 }
8098
8099 /* Purge dead abnormal call edges from basic block BB. */
8100
8101 bool
8102 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8103 {
8104 bool changed = false;
8105 edge e;
8106 edge_iterator ei;
8107 gimple *stmt = last_stmt (bb);
8108
8109 if (!cfun->has_nonlocal_label
8110 && !cfun->calls_setjmp)
8111 return false;
8112
8113 if (stmt && stmt_can_make_abnormal_goto (stmt))
8114 return false;
8115
8116 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8117 {
8118 if (e->flags & EDGE_ABNORMAL)
8119 {
8120 if (e->flags & EDGE_FALLTHRU)
8121 e->flags &= ~EDGE_ABNORMAL;
8122 else
8123 remove_edge_and_dominated_blocks (e);
8124 changed = true;
8125 }
8126 else
8127 ei_next (&ei);
8128 }
8129
8130 return changed;
8131 }
8132
8133 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8134
8135 bool
8136 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8137 {
8138 bool changed = false;
8139 unsigned i;
8140 bitmap_iterator bi;
8141
8142 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8143 {
8144 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8145
8146 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8147 this basic block already. */
8148 gcc_assert (bb || changed);
8149 if (bb != NULL)
8150 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8151 }
8152
8153 return changed;
8154 }
8155
8156 /* This function is called whenever a new edge is created or
8157 redirected. */
8158
8159 static void
8160 gimple_execute_on_growing_pred (edge e)
8161 {
8162 basic_block bb = e->dest;
8163
8164 if (!gimple_seq_empty_p (phi_nodes (bb)))
8165 reserve_phi_args_for_new_edge (bb);
8166 }
8167
8168 /* This function is called immediately before edge E is removed from
8169 the edge vector E->dest->preds. */
8170
8171 static void
8172 gimple_execute_on_shrinking_pred (edge e)
8173 {
8174 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8175 remove_phi_args (e);
8176 }
8177
8178 /*---------------------------------------------------------------------------
8179 Helper functions for Loop versioning
8180 ---------------------------------------------------------------------------*/
8181
8182 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8183 of 'first'. Both of them are dominated by 'new_head' basic block. When
8184 'new_head' was created by 'second's incoming edge it received phi arguments
8185 on the edge by split_edge(). Later, additional edge 'e' was created to
8186 connect 'new_head' and 'first'. Now this routine adds phi args on this
8187 additional edge 'e' that new_head to second edge received as part of edge
8188 splitting. */
8189
8190 static void
8191 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8192 basic_block new_head, edge e)
8193 {
8194 gphi *phi1, *phi2;
8195 gphi_iterator psi1, psi2;
8196 tree def;
8197 edge e2 = find_edge (new_head, second);
8198
8199 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8200 edge, we should always have an edge from NEW_HEAD to SECOND. */
8201 gcc_assert (e2 != NULL);
8202
8203 /* Browse all 'second' basic block phi nodes and add phi args to
8204 edge 'e' for 'first' head. PHI args are always in correct order. */
8205
8206 for (psi2 = gsi_start_phis (second),
8207 psi1 = gsi_start_phis (first);
8208 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8209 gsi_next (&psi2), gsi_next (&psi1))
8210 {
8211 phi1 = psi1.phi ();
8212 phi2 = psi2.phi ();
8213 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8214 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8215 }
8216 }
8217
8218
8219 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8220 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8221 the destination of the ELSE part. */
8222
8223 static void
8224 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8225 basic_block second_head ATTRIBUTE_UNUSED,
8226 basic_block cond_bb, void *cond_e)
8227 {
8228 gimple_stmt_iterator gsi;
8229 gimple *new_cond_expr;
8230 tree cond_expr = (tree) cond_e;
8231 edge e0;
8232
8233 /* Build new conditional expr */
8234 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8235 NULL_TREE, NULL_TREE);
8236
8237 /* Add new cond in cond_bb. */
8238 gsi = gsi_last_bb (cond_bb);
8239 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8240
8241 /* Adjust edges appropriately to connect new head with first head
8242 as well as second head. */
8243 e0 = single_succ_edge (cond_bb);
8244 e0->flags &= ~EDGE_FALLTHRU;
8245 e0->flags |= EDGE_FALSE_VALUE;
8246 }
8247
8248
8249 /* Do book-keeping of basic block BB for the profile consistency checker.
8250 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8251 then do post-pass accounting. Store the counting in RECORD. */
8252 static void
8253 gimple_account_profile_record (basic_block bb, int after_pass,
8254 struct profile_record *record)
8255 {
8256 gimple_stmt_iterator i;
8257 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8258 {
8259 record->size[after_pass]
8260 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8261 if (profile_status_for_fn (cfun) == PROFILE_READ)
8262 record->time[after_pass]
8263 += estimate_num_insns (gsi_stmt (i),
8264 &eni_time_weights) * bb->count;
8265 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8266 record->time[after_pass]
8267 += estimate_num_insns (gsi_stmt (i),
8268 &eni_time_weights) * bb->frequency;
8269 }
8270 }
8271
8272 struct cfg_hooks gimple_cfg_hooks = {
8273 "gimple",
8274 gimple_verify_flow_info,
8275 gimple_dump_bb, /* dump_bb */
8276 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8277 create_bb, /* create_basic_block */
8278 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8279 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8280 gimple_can_remove_branch_p, /* can_remove_branch_p */
8281 remove_bb, /* delete_basic_block */
8282 gimple_split_block, /* split_block */
8283 gimple_move_block_after, /* move_block_after */
8284 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8285 gimple_merge_blocks, /* merge_blocks */
8286 gimple_predict_edge, /* predict_edge */
8287 gimple_predicted_by_p, /* predicted_by_p */
8288 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8289 gimple_duplicate_bb, /* duplicate_block */
8290 gimple_split_edge, /* split_edge */
8291 gimple_make_forwarder_block, /* make_forward_block */
8292 NULL, /* tidy_fallthru_edge */
8293 NULL, /* force_nonfallthru */
8294 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8295 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8296 gimple_flow_call_edges_add, /* flow_call_edges_add */
8297 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8298 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8299 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8300 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8301 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8302 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8303 flush_pending_stmts, /* flush_pending_stmts */
8304 gimple_empty_block_p, /* block_empty_p */
8305 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8306 gimple_account_profile_record,
8307 };
8308
8309
8310 /* Split all critical edges. */
8311
8312 unsigned int
8313 split_critical_edges (void)
8314 {
8315 basic_block bb;
8316 edge e;
8317 edge_iterator ei;
8318
8319 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8320 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8321 mappings around the calls to split_edge. */
8322 start_recording_case_labels ();
8323 FOR_ALL_BB_FN (bb, cfun)
8324 {
8325 FOR_EACH_EDGE (e, ei, bb->succs)
8326 {
8327 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8328 split_edge (e);
8329 /* PRE inserts statements to edges and expects that
8330 since split_critical_edges was done beforehand, committing edge
8331 insertions will not split more edges. In addition to critical
8332 edges we must split edges that have multiple successors and
8333 end by control flow statements, such as RESX.
8334 Go ahead and split them too. This matches the logic in
8335 gimple_find_edge_insert_loc. */
8336 else if ((!single_pred_p (e->dest)
8337 || !gimple_seq_empty_p (phi_nodes (e->dest))
8338 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8339 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8340 && !(e->flags & EDGE_ABNORMAL))
8341 {
8342 gimple_stmt_iterator gsi;
8343
8344 gsi = gsi_last_bb (e->src);
8345 if (!gsi_end_p (gsi)
8346 && stmt_ends_bb_p (gsi_stmt (gsi))
8347 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8348 && !gimple_call_builtin_p (gsi_stmt (gsi),
8349 BUILT_IN_RETURN)))
8350 split_edge (e);
8351 }
8352 }
8353 }
8354 end_recording_case_labels ();
8355 return 0;
8356 }
8357
8358 namespace {
8359
8360 const pass_data pass_data_split_crit_edges =
8361 {
8362 GIMPLE_PASS, /* type */
8363 "crited", /* name */
8364 OPTGROUP_NONE, /* optinfo_flags */
8365 TV_TREE_SPLIT_EDGES, /* tv_id */
8366 PROP_cfg, /* properties_required */
8367 PROP_no_crit_edges, /* properties_provided */
8368 0, /* properties_destroyed */
8369 0, /* todo_flags_start */
8370 0, /* todo_flags_finish */
8371 };
8372
8373 class pass_split_crit_edges : public gimple_opt_pass
8374 {
8375 public:
8376 pass_split_crit_edges (gcc::context *ctxt)
8377 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8378 {}
8379
8380 /* opt_pass methods: */
8381 virtual unsigned int execute (function *) { return split_critical_edges (); }
8382
8383 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8384 }; // class pass_split_crit_edges
8385
8386 } // anon namespace
8387
8388 gimple_opt_pass *
8389 make_pass_split_crit_edges (gcc::context *ctxt)
8390 {
8391 return new pass_split_crit_edges (ctxt);
8392 }
8393
8394
8395 /* Insert COND expression which is GIMPLE_COND after STMT
8396 in basic block BB with appropriate basic block split
8397 and creation of a new conditionally executed basic block.
8398 Return created basic block. */
8399 basic_block
8400 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8401 {
8402 edge fall = split_block (bb, stmt);
8403 gimple_stmt_iterator iter = gsi_last_bb (bb);
8404 basic_block new_bb;
8405
8406 /* Insert cond statement. */
8407 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8408 if (gsi_end_p (iter))
8409 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8410 else
8411 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8412
8413 /* Create conditionally executed block. */
8414 new_bb = create_empty_bb (bb);
8415 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8416 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8417
8418 /* Fix edge for split bb. */
8419 fall->flags = EDGE_FALSE_VALUE;
8420
8421 /* Update dominance info. */
8422 if (dom_info_available_p (CDI_DOMINATORS))
8423 {
8424 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8425 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8426 }
8427
8428 /* Update loop info. */
8429 if (current_loops)
8430 add_bb_to_loop (new_bb, bb->loop_father);
8431
8432 return new_bb;
8433 }
8434
8435 /* Build a ternary operation and gimplify it. Emit code before GSI.
8436 Return the gimple_val holding the result. */
8437
8438 tree
8439 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8440 tree type, tree a, tree b, tree c)
8441 {
8442 tree ret;
8443 location_t loc = gimple_location (gsi_stmt (*gsi));
8444
8445 ret = fold_build3_loc (loc, code, type, a, b, c);
8446 STRIP_NOPS (ret);
8447
8448 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8449 GSI_SAME_STMT);
8450 }
8451
8452 /* Build a binary operation and gimplify it. Emit code before GSI.
8453 Return the gimple_val holding the result. */
8454
8455 tree
8456 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8457 tree type, tree a, tree b)
8458 {
8459 tree ret;
8460
8461 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8462 STRIP_NOPS (ret);
8463
8464 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8465 GSI_SAME_STMT);
8466 }
8467
8468 /* Build a unary operation and gimplify it. Emit code before GSI.
8469 Return the gimple_val holding the result. */
8470
8471 tree
8472 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8473 tree a)
8474 {
8475 tree ret;
8476
8477 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8478 STRIP_NOPS (ret);
8479
8480 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8481 GSI_SAME_STMT);
8482 }
8483
8484
8485 \f
8486 /* Given a basic block B which ends with a conditional and has
8487 precisely two successors, determine which of the edges is taken if
8488 the conditional is true and which is taken if the conditional is
8489 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8490
8491 void
8492 extract_true_false_edges_from_block (basic_block b,
8493 edge *true_edge,
8494 edge *false_edge)
8495 {
8496 edge e = EDGE_SUCC (b, 0);
8497
8498 if (e->flags & EDGE_TRUE_VALUE)
8499 {
8500 *true_edge = e;
8501 *false_edge = EDGE_SUCC (b, 1);
8502 }
8503 else
8504 {
8505 *false_edge = e;
8506 *true_edge = EDGE_SUCC (b, 1);
8507 }
8508 }
8509
8510 /* Emit return warnings. */
8511
8512 namespace {
8513
8514 const pass_data pass_data_warn_function_return =
8515 {
8516 GIMPLE_PASS, /* type */
8517 "*warn_function_return", /* name */
8518 OPTGROUP_NONE, /* optinfo_flags */
8519 TV_NONE, /* tv_id */
8520 PROP_cfg, /* properties_required */
8521 0, /* properties_provided */
8522 0, /* properties_destroyed */
8523 0, /* todo_flags_start */
8524 0, /* todo_flags_finish */
8525 };
8526
8527 class pass_warn_function_return : public gimple_opt_pass
8528 {
8529 public:
8530 pass_warn_function_return (gcc::context *ctxt)
8531 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8532 {}
8533
8534 /* opt_pass methods: */
8535 virtual unsigned int execute (function *);
8536
8537 }; // class pass_warn_function_return
8538
8539 unsigned int
8540 pass_warn_function_return::execute (function *fun)
8541 {
8542 source_location location;
8543 gimple *last;
8544 edge e;
8545 edge_iterator ei;
8546
8547 if (!targetm.warn_func_return (fun->decl))
8548 return 0;
8549
8550 /* If we have a path to EXIT, then we do return. */
8551 if (TREE_THIS_VOLATILE (fun->decl)
8552 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8553 {
8554 location = UNKNOWN_LOCATION;
8555 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8556 {
8557 last = last_stmt (e->src);
8558 if ((gimple_code (last) == GIMPLE_RETURN
8559 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8560 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8561 break;
8562 }
8563 if (location == UNKNOWN_LOCATION)
8564 location = cfun->function_end_locus;
8565 warning_at (location, 0, "%<noreturn%> function does return");
8566 }
8567
8568 /* If we see "return;" in some basic block, then we do reach the end
8569 without returning a value. */
8570 else if (warn_return_type
8571 && !TREE_NO_WARNING (fun->decl)
8572 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8573 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8574 {
8575 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8576 {
8577 gimple *last = last_stmt (e->src);
8578 greturn *return_stmt = dyn_cast <greturn *> (last);
8579 if (return_stmt
8580 && gimple_return_retval (return_stmt) == NULL
8581 && !gimple_no_warning_p (last))
8582 {
8583 location = gimple_location (last);
8584 if (location == UNKNOWN_LOCATION)
8585 location = fun->function_end_locus;
8586 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8587 TREE_NO_WARNING (fun->decl) = 1;
8588 break;
8589 }
8590 }
8591 }
8592 return 0;
8593 }
8594
8595 } // anon namespace
8596
8597 gimple_opt_pass *
8598 make_pass_warn_function_return (gcc::context *ctxt)
8599 {
8600 return new pass_warn_function_return (ctxt);
8601 }
8602
8603 /* Walk a gimplified function and warn for functions whose return value is
8604 ignored and attribute((warn_unused_result)) is set. This is done before
8605 inlining, so we don't have to worry about that. */
8606
8607 static void
8608 do_warn_unused_result (gimple_seq seq)
8609 {
8610 tree fdecl, ftype;
8611 gimple_stmt_iterator i;
8612
8613 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8614 {
8615 gimple *g = gsi_stmt (i);
8616
8617 switch (gimple_code (g))
8618 {
8619 case GIMPLE_BIND:
8620 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8621 break;
8622 case GIMPLE_TRY:
8623 do_warn_unused_result (gimple_try_eval (g));
8624 do_warn_unused_result (gimple_try_cleanup (g));
8625 break;
8626 case GIMPLE_CATCH:
8627 do_warn_unused_result (gimple_catch_handler (
8628 as_a <gcatch *> (g)));
8629 break;
8630 case GIMPLE_EH_FILTER:
8631 do_warn_unused_result (gimple_eh_filter_failure (g));
8632 break;
8633
8634 case GIMPLE_CALL:
8635 if (gimple_call_lhs (g))
8636 break;
8637 if (gimple_call_internal_p (g))
8638 break;
8639
8640 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8641 LHS. All calls whose value is ignored should be
8642 represented like this. Look for the attribute. */
8643 fdecl = gimple_call_fndecl (g);
8644 ftype = gimple_call_fntype (g);
8645
8646 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8647 {
8648 location_t loc = gimple_location (g);
8649
8650 if (fdecl)
8651 warning_at (loc, OPT_Wunused_result,
8652 "ignoring return value of %qD, "
8653 "declared with attribute warn_unused_result",
8654 fdecl);
8655 else
8656 warning_at (loc, OPT_Wunused_result,
8657 "ignoring return value of function "
8658 "declared with attribute warn_unused_result");
8659 }
8660 break;
8661
8662 default:
8663 /* Not a container, not a call, or a call whose value is used. */
8664 break;
8665 }
8666 }
8667 }
8668
8669 namespace {
8670
8671 const pass_data pass_data_warn_unused_result =
8672 {
8673 GIMPLE_PASS, /* type */
8674 "*warn_unused_result", /* name */
8675 OPTGROUP_NONE, /* optinfo_flags */
8676 TV_NONE, /* tv_id */
8677 PROP_gimple_any, /* properties_required */
8678 0, /* properties_provided */
8679 0, /* properties_destroyed */
8680 0, /* todo_flags_start */
8681 0, /* todo_flags_finish */
8682 };
8683
8684 class pass_warn_unused_result : public gimple_opt_pass
8685 {
8686 public:
8687 pass_warn_unused_result (gcc::context *ctxt)
8688 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8689 {}
8690
8691 /* opt_pass methods: */
8692 virtual bool gate (function *) { return flag_warn_unused_result; }
8693 virtual unsigned int execute (function *)
8694 {
8695 do_warn_unused_result (gimple_body (current_function_decl));
8696 return 0;
8697 }
8698
8699 }; // class pass_warn_unused_result
8700
8701 } // anon namespace
8702
8703 gimple_opt_pass *
8704 make_pass_warn_unused_result (gcc::context *ctxt)
8705 {
8706 return new pass_warn_unused_result (ctxt);
8707 }
8708
8709 /* IPA passes, compilation of earlier functions or inlining
8710 might have changed some properties, such as marked functions nothrow,
8711 pure, const or noreturn.
8712 Remove redundant edges and basic blocks, and create new ones if necessary.
8713
8714 This pass can't be executed as stand alone pass from pass manager, because
8715 in between inlining and this fixup the verify_flow_info would fail. */
8716
8717 unsigned int
8718 execute_fixup_cfg (void)
8719 {
8720 basic_block bb;
8721 gimple_stmt_iterator gsi;
8722 int todo = 0;
8723 gcov_type count_scale;
8724 edge e;
8725 edge_iterator ei;
8726
8727 count_scale
8728 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8729 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8730
8731 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8732 cgraph_node::get (current_function_decl)->count;
8733 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8734 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8735 count_scale);
8736
8737 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8738 e->count = apply_scale (e->count, count_scale);
8739
8740 FOR_EACH_BB_FN (bb, cfun)
8741 {
8742 bb->count = apply_scale (bb->count, count_scale);
8743 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8744 {
8745 gimple *stmt = gsi_stmt (gsi);
8746 tree decl = is_gimple_call (stmt)
8747 ? gimple_call_fndecl (stmt)
8748 : NULL;
8749 if (decl)
8750 {
8751 int flags = gimple_call_flags (stmt);
8752 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8753 {
8754 if (gimple_purge_dead_abnormal_call_edges (bb))
8755 todo |= TODO_cleanup_cfg;
8756
8757 if (gimple_in_ssa_p (cfun))
8758 {
8759 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8760 update_stmt (stmt);
8761 }
8762 }
8763
8764 if (flags & ECF_NORETURN
8765 && fixup_noreturn_call (stmt))
8766 todo |= TODO_cleanup_cfg;
8767 }
8768
8769 /* Remove stores to variables we marked write-only.
8770 Keep access when store has side effect, i.e. in case when source
8771 is volatile. */
8772 if (gimple_store_p (stmt)
8773 && !gimple_has_side_effects (stmt))
8774 {
8775 tree lhs = get_base_address (gimple_get_lhs (stmt));
8776
8777 if (TREE_CODE (lhs) == VAR_DECL
8778 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8779 && varpool_node::get (lhs)->writeonly)
8780 {
8781 unlink_stmt_vdef (stmt);
8782 gsi_remove (&gsi, true);
8783 release_defs (stmt);
8784 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8785 continue;
8786 }
8787 }
8788 /* For calls we can simply remove LHS when it is known
8789 to be write-only. */
8790 if (is_gimple_call (stmt)
8791 && gimple_get_lhs (stmt))
8792 {
8793 tree lhs = get_base_address (gimple_get_lhs (stmt));
8794
8795 if (TREE_CODE (lhs) == VAR_DECL
8796 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8797 && varpool_node::get (lhs)->writeonly)
8798 {
8799 gimple_call_set_lhs (stmt, NULL);
8800 update_stmt (stmt);
8801 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8802 }
8803 }
8804
8805 if (maybe_clean_eh_stmt (stmt)
8806 && gimple_purge_dead_eh_edges (bb))
8807 todo |= TODO_cleanup_cfg;
8808 gsi_next (&gsi);
8809 }
8810
8811 FOR_EACH_EDGE (e, ei, bb->succs)
8812 e->count = apply_scale (e->count, count_scale);
8813
8814 /* If we have a basic block with no successors that does not
8815 end with a control statement or a noreturn call end it with
8816 a call to __builtin_unreachable. This situation can occur
8817 when inlining a noreturn call that does in fact return. */
8818 if (EDGE_COUNT (bb->succs) == 0)
8819 {
8820 gimple *stmt = last_stmt (bb);
8821 if (!stmt
8822 || (!is_ctrl_stmt (stmt)
8823 && (!is_gimple_call (stmt)
8824 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8825 {
8826 if (stmt && is_gimple_call (stmt))
8827 gimple_call_set_ctrl_altering (stmt, false);
8828 stmt = gimple_build_call
8829 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8830 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8831 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8832 }
8833 }
8834 }
8835 if (count_scale != REG_BR_PROB_BASE)
8836 compute_function_frequency ();
8837
8838 if (current_loops
8839 && (todo & TODO_cleanup_cfg))
8840 loops_state_set (LOOPS_NEED_FIXUP);
8841
8842 return todo;
8843 }
8844
8845 namespace {
8846
8847 const pass_data pass_data_fixup_cfg =
8848 {
8849 GIMPLE_PASS, /* type */
8850 "fixup_cfg", /* name */
8851 OPTGROUP_NONE, /* optinfo_flags */
8852 TV_NONE, /* tv_id */
8853 PROP_cfg, /* properties_required */
8854 0, /* properties_provided */
8855 0, /* properties_destroyed */
8856 0, /* todo_flags_start */
8857 0, /* todo_flags_finish */
8858 };
8859
8860 class pass_fixup_cfg : public gimple_opt_pass
8861 {
8862 public:
8863 pass_fixup_cfg (gcc::context *ctxt)
8864 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8865 {}
8866
8867 /* opt_pass methods: */
8868 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8869 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8870
8871 }; // class pass_fixup_cfg
8872
8873 } // anon namespace
8874
8875 gimple_opt_pass *
8876 make_pass_fixup_cfg (gcc::context *ctxt)
8877 {
8878 return new pass_fixup_cfg (ctxt);
8879 }
8880
8881 /* Garbage collection support for edge_def. */
8882
8883 extern void gt_ggc_mx (tree&);
8884 extern void gt_ggc_mx (gimple *&);
8885 extern void gt_ggc_mx (rtx&);
8886 extern void gt_ggc_mx (basic_block&);
8887
8888 static void
8889 gt_ggc_mx (rtx_insn *& x)
8890 {
8891 if (x)
8892 gt_ggc_mx_rtx_def ((void *) x);
8893 }
8894
8895 void
8896 gt_ggc_mx (edge_def *e)
8897 {
8898 tree block = LOCATION_BLOCK (e->goto_locus);
8899 gt_ggc_mx (e->src);
8900 gt_ggc_mx (e->dest);
8901 if (current_ir_type () == IR_GIMPLE)
8902 gt_ggc_mx (e->insns.g);
8903 else
8904 gt_ggc_mx (e->insns.r);
8905 gt_ggc_mx (block);
8906 }
8907
8908 /* PCH support for edge_def. */
8909
8910 extern void gt_pch_nx (tree&);
8911 extern void gt_pch_nx (gimple *&);
8912 extern void gt_pch_nx (rtx&);
8913 extern void gt_pch_nx (basic_block&);
8914
8915 static void
8916 gt_pch_nx (rtx_insn *& x)
8917 {
8918 if (x)
8919 gt_pch_nx_rtx_def ((void *) x);
8920 }
8921
8922 void
8923 gt_pch_nx (edge_def *e)
8924 {
8925 tree block = LOCATION_BLOCK (e->goto_locus);
8926 gt_pch_nx (e->src);
8927 gt_pch_nx (e->dest);
8928 if (current_ir_type () == IR_GIMPLE)
8929 gt_pch_nx (e->insns.g);
8930 else
8931 gt_pch_nx (e->insns.r);
8932 gt_pch_nx (block);
8933 }
8934
8935 void
8936 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8937 {
8938 tree block = LOCATION_BLOCK (e->goto_locus);
8939 op (&(e->src), cookie);
8940 op (&(e->dest), cookie);
8941 if (current_ir_type () == IR_GIMPLE)
8942 op (&(e->insns.g), cookie);
8943 else
8944 op (&(e->insns.r), cookie);
8945 op (&(block), cookie);
8946 }