gimplify.h (get_initialized_tmp_var): Add allow_ssa parameter default true.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61
62 /* This file contains functions for building the Control Flow Graph (CFG)
63 for a function tree. */
64
65 /* Local declarations. */
66
67 /* Initial capacity for the basic block array. */
68 static const int initial_cfg_capacity = 20;
69
70 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
71 which use a particular edge. The CASE_LABEL_EXPRs are chained together
72 via their CASE_CHAIN field, which we clear after we're done with the
73 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
74
75 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
76 update the case vector in response to edge redirections.
77
78 Right now this table is set up and torn down at key points in the
79 compilation process. It would be nice if we could make the table
80 more persistent. The key is getting notification of changes to
81 the CFG (particularly edge removal, creation and redirection). */
82
83 static hash_map<edge, tree> *edge_to_cases;
84
85 /* If we record edge_to_cases, this bitmap will hold indexes
86 of basic blocks that end in a GIMPLE_SWITCH which we touched
87 due to edge manipulations. */
88
89 static bitmap touched_switch_bbs;
90
91 /* CFG statistics. */
92 struct cfg_stats_d
93 {
94 long num_merged_labels;
95 };
96
97 static struct cfg_stats_d cfg_stats;
98
99 /* Data to pass to replace_block_vars_by_duplicates_1. */
100 struct replace_decls_d
101 {
102 hash_map<tree, tree> *vars_map;
103 tree to_context;
104 };
105
106 /* Hash table to store last discriminator assigned for each locus. */
107 struct locus_discrim_map
108 {
109 location_t locus;
110 int discriminator;
111 };
112
113 /* Hashtable helpers. */
114
115 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
116 {
117 static inline hashval_t hash (const locus_discrim_map *);
118 static inline bool equal (const locus_discrim_map *,
119 const locus_discrim_map *);
120 };
121
122 /* Trivial hash function for a location_t. ITEM is a pointer to
123 a hash table entry that maps a location_t to a discriminator. */
124
125 inline hashval_t
126 locus_discrim_hasher::hash (const locus_discrim_map *item)
127 {
128 return LOCATION_LINE (item->locus);
129 }
130
131 /* Equality function for the locus-to-discriminator map. A and B
132 point to the two hash table entries to compare. */
133
134 inline bool
135 locus_discrim_hasher::equal (const locus_discrim_map *a,
136 const locus_discrim_map *b)
137 {
138 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
139 }
140
141 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
142
143 /* Basic blocks and flowgraphs. */
144 static void make_blocks (gimple_seq);
145
146 /* Edges. */
147 static void make_edges (void);
148 static void assign_discriminators (void);
149 static void make_cond_expr_edges (basic_block);
150 static void make_gimple_switch_edges (gswitch *, basic_block);
151 static bool make_goto_expr_edges (basic_block);
152 static void make_gimple_asm_edges (basic_block);
153 static edge gimple_redirect_edge_and_branch (edge, basic_block);
154 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
155
156 /* Various helpers. */
157 static inline bool stmt_starts_bb_p (gimple *, gimple *);
158 static int gimple_verify_flow_info (void);
159 static void gimple_make_forwarder_block (edge);
160 static gimple *first_non_label_stmt (basic_block);
161 static bool verify_gimple_transaction (gtransaction *);
162 static bool call_can_make_abnormal_goto (gimple *);
163
164 /* Flowgraph optimization and cleanup. */
165 static void gimple_merge_blocks (basic_block, basic_block);
166 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
167 static void remove_bb (basic_block);
168 static edge find_taken_edge_computed_goto (basic_block, tree);
169 static edge find_taken_edge_cond_expr (basic_block, tree);
170 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
171 static tree find_case_label_for_value (gswitch *, tree);
172
173 void
174 init_empty_tree_cfg_for_function (struct function *fn)
175 {
176 /* Initialize the basic block array. */
177 init_flow (fn);
178 profile_status_for_fn (fn) = PROFILE_ABSENT;
179 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
180 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
181 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
182 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
183 initial_cfg_capacity);
184
185 /* Build a mapping of labels to their associated blocks. */
186 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
188 initial_cfg_capacity);
189
190 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
191 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
192
193 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
194 = EXIT_BLOCK_PTR_FOR_FN (fn);
195 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
196 = ENTRY_BLOCK_PTR_FOR_FN (fn);
197 }
198
199 void
200 init_empty_tree_cfg (void)
201 {
202 init_empty_tree_cfg_for_function (cfun);
203 }
204
205 /*---------------------------------------------------------------------------
206 Create basic blocks
207 ---------------------------------------------------------------------------*/
208
209 /* Entry point to the CFG builder for trees. SEQ is the sequence of
210 statements to be added to the flowgraph. */
211
212 static void
213 build_gimple_cfg (gimple_seq seq)
214 {
215 /* Register specific gimple functions. */
216 gimple_register_cfg_hooks ();
217
218 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
219
220 init_empty_tree_cfg ();
221
222 make_blocks (seq);
223
224 /* Make sure there is always at least one block, even if it's empty. */
225 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
226 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
227
228 /* Adjust the size of the array. */
229 if (basic_block_info_for_fn (cfun)->length ()
230 < (size_t) n_basic_blocks_for_fn (cfun))
231 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
232 n_basic_blocks_for_fn (cfun));
233
234 /* To speed up statement iterator walks, we first purge dead labels. */
235 cleanup_dead_labels ();
236
237 /* Group case nodes to reduce the number of edges.
238 We do this after cleaning up dead labels because otherwise we miss
239 a lot of obvious case merging opportunities. */
240 group_case_labels ();
241
242 /* Create the edges of the flowgraph. */
243 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
244 make_edges ();
245 assign_discriminators ();
246 cleanup_dead_labels ();
247 delete discriminator_per_locus;
248 discriminator_per_locus = NULL;
249 }
250
251 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
252 them and propagate the information to LOOP. We assume that the annotations
253 come immediately before the condition in BB, if any. */
254
255 static void
256 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
257 {
258 gimple_stmt_iterator gsi = gsi_last_bb (bb);
259 gimple *stmt = gsi_stmt (gsi);
260
261 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
262 return;
263
264 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
265 {
266 stmt = gsi_stmt (gsi);
267 if (gimple_code (stmt) != GIMPLE_CALL)
268 break;
269 if (!gimple_call_internal_p (stmt)
270 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
271 break;
272
273 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
274 {
275 case annot_expr_ivdep_kind:
276 loop->safelen = INT_MAX;
277 break;
278 case annot_expr_no_vector_kind:
279 loop->dont_vectorize = true;
280 break;
281 case annot_expr_vector_kind:
282 loop->force_vectorize = true;
283 cfun->has_force_vectorize_loops = true;
284 break;
285 default:
286 gcc_unreachable ();
287 }
288
289 stmt = gimple_build_assign (gimple_call_lhs (stmt),
290 gimple_call_arg (stmt, 0));
291 gsi_replace (&gsi, stmt, true);
292 }
293 }
294
295 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
296 them and propagate the information to the loop. We assume that the
297 annotations come immediately before the condition of the loop. */
298
299 static void
300 replace_loop_annotate (void)
301 {
302 struct loop *loop;
303 basic_block bb;
304 gimple_stmt_iterator gsi;
305 gimple *stmt;
306
307 FOR_EACH_LOOP (loop, 0)
308 {
309 /* First look into the header. */
310 replace_loop_annotate_in_block (loop->header, loop);
311
312 /* Then look into the latch, if any. */
313 if (loop->latch)
314 replace_loop_annotate_in_block (loop->latch, loop);
315 }
316
317 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
318 FOR_EACH_BB_FN (bb, cfun)
319 {
320 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
321 {
322 stmt = gsi_stmt (gsi);
323 if (gimple_code (stmt) != GIMPLE_CALL)
324 continue;
325 if (!gimple_call_internal_p (stmt)
326 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
327 continue;
328
329 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
330 {
331 case annot_expr_ivdep_kind:
332 case annot_expr_no_vector_kind:
333 case annot_expr_vector_kind:
334 break;
335 default:
336 gcc_unreachable ();
337 }
338
339 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
340 stmt = gimple_build_assign (gimple_call_lhs (stmt),
341 gimple_call_arg (stmt, 0));
342 gsi_replace (&gsi, stmt, true);
343 }
344 }
345 }
346
347
348 static unsigned int
349 execute_build_cfg (void)
350 {
351 gimple_seq body = gimple_body (current_function_decl);
352
353 build_gimple_cfg (body);
354 gimple_set_body (current_function_decl, NULL);
355 if (dump_file && (dump_flags & TDF_DETAILS))
356 {
357 fprintf (dump_file, "Scope blocks:\n");
358 dump_scope_blocks (dump_file, dump_flags);
359 }
360 cleanup_tree_cfg ();
361 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
362 replace_loop_annotate ();
363 return 0;
364 }
365
366 namespace {
367
368 const pass_data pass_data_build_cfg =
369 {
370 GIMPLE_PASS, /* type */
371 "cfg", /* name */
372 OPTGROUP_NONE, /* optinfo_flags */
373 TV_TREE_CFG, /* tv_id */
374 PROP_gimple_leh, /* properties_required */
375 ( PROP_cfg | PROP_loops ), /* properties_provided */
376 0, /* properties_destroyed */
377 0, /* todo_flags_start */
378 0, /* todo_flags_finish */
379 };
380
381 class pass_build_cfg : public gimple_opt_pass
382 {
383 public:
384 pass_build_cfg (gcc::context *ctxt)
385 : gimple_opt_pass (pass_data_build_cfg, ctxt)
386 {}
387
388 /* opt_pass methods: */
389 virtual unsigned int execute (function *) { return execute_build_cfg (); }
390
391 }; // class pass_build_cfg
392
393 } // anon namespace
394
395 gimple_opt_pass *
396 make_pass_build_cfg (gcc::context *ctxt)
397 {
398 return new pass_build_cfg (ctxt);
399 }
400
401
402 /* Return true if T is a computed goto. */
403
404 bool
405 computed_goto_p (gimple *t)
406 {
407 return (gimple_code (t) == GIMPLE_GOTO
408 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
409 }
410
411 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
412 the other edge points to a bb with just __builtin_unreachable ().
413 I.e. return true for C->M edge in:
414 <bb C>:
415 ...
416 if (something)
417 goto <bb N>;
418 else
419 goto <bb M>;
420 <bb N>:
421 __builtin_unreachable ();
422 <bb M>: */
423
424 bool
425 assert_unreachable_fallthru_edge_p (edge e)
426 {
427 basic_block pred_bb = e->src;
428 gimple *last = last_stmt (pred_bb);
429 if (last && gimple_code (last) == GIMPLE_COND)
430 {
431 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
432 if (other_bb == e->dest)
433 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
434 if (EDGE_COUNT (other_bb->succs) == 0)
435 {
436 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
437 gimple *stmt;
438
439 if (gsi_end_p (gsi))
440 return false;
441 stmt = gsi_stmt (gsi);
442 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
443 {
444 gsi_next (&gsi);
445 if (gsi_end_p (gsi))
446 return false;
447 stmt = gsi_stmt (gsi);
448 }
449 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
450 }
451 }
452 return false;
453 }
454
455
456 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
457 could alter control flow except via eh. We initialize the flag at
458 CFG build time and only ever clear it later. */
459
460 static void
461 gimple_call_initialize_ctrl_altering (gimple *stmt)
462 {
463 int flags = gimple_call_flags (stmt);
464
465 /* A call alters control flow if it can make an abnormal goto. */
466 if (call_can_make_abnormal_goto (stmt)
467 /* A call also alters control flow if it does not return. */
468 || flags & ECF_NORETURN
469 /* TM ending statements have backedges out of the transaction.
470 Return true so we split the basic block containing them.
471 Note that the TM_BUILTIN test is merely an optimization. */
472 || ((flags & ECF_TM_BUILTIN)
473 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
474 /* BUILT_IN_RETURN call is same as return statement. */
475 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
476 /* IFN_UNIQUE should be the last insn, to make checking for it
477 as cheap as possible. */
478 || (gimple_call_internal_p (stmt)
479 && gimple_call_internal_unique_p (stmt)))
480 gimple_call_set_ctrl_altering (stmt, true);
481 else
482 gimple_call_set_ctrl_altering (stmt, false);
483 }
484
485
486 /* Insert SEQ after BB and build a flowgraph. */
487
488 static basic_block
489 make_blocks_1 (gimple_seq seq, basic_block bb)
490 {
491 gimple_stmt_iterator i = gsi_start (seq);
492 gimple *stmt = NULL;
493 bool start_new_block = true;
494 bool first_stmt_of_seq = true;
495
496 while (!gsi_end_p (i))
497 {
498 gimple *prev_stmt;
499
500 prev_stmt = stmt;
501 stmt = gsi_stmt (i);
502
503 if (stmt && is_gimple_call (stmt))
504 gimple_call_initialize_ctrl_altering (stmt);
505
506 /* If the statement starts a new basic block or if we have determined
507 in a previous pass that we need to create a new block for STMT, do
508 so now. */
509 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
510 {
511 if (!first_stmt_of_seq)
512 gsi_split_seq_before (&i, &seq);
513 bb = create_basic_block (seq, bb);
514 start_new_block = false;
515 }
516
517 /* Now add STMT to BB and create the subgraphs for special statement
518 codes. */
519 gimple_set_bb (stmt, bb);
520
521 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
522 next iteration. */
523 if (stmt_ends_bb_p (stmt))
524 {
525 /* If the stmt can make abnormal goto use a new temporary
526 for the assignment to the LHS. This makes sure the old value
527 of the LHS is available on the abnormal edge. Otherwise
528 we will end up with overlapping life-ranges for abnormal
529 SSA names. */
530 if (gimple_has_lhs (stmt)
531 && stmt_can_make_abnormal_goto (stmt)
532 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
533 {
534 tree lhs = gimple_get_lhs (stmt);
535 tree tmp = create_tmp_var (TREE_TYPE (lhs));
536 gimple *s = gimple_build_assign (lhs, tmp);
537 gimple_set_location (s, gimple_location (stmt));
538 gimple_set_block (s, gimple_block (stmt));
539 gimple_set_lhs (stmt, tmp);
540 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
541 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
542 DECL_GIMPLE_REG_P (tmp) = 1;
543 gsi_insert_after (&i, s, GSI_SAME_STMT);
544 }
545 start_new_block = true;
546 }
547
548 gsi_next (&i);
549 first_stmt_of_seq = false;
550 }
551 return bb;
552 }
553
554 /* Build a flowgraph for the sequence of stmts SEQ. */
555
556 static void
557 make_blocks (gimple_seq seq)
558 {
559 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
560 }
561
562 /* Create and return a new empty basic block after bb AFTER. */
563
564 static basic_block
565 create_bb (void *h, void *e, basic_block after)
566 {
567 basic_block bb;
568
569 gcc_assert (!e);
570
571 /* Create and initialize a new basic block. Since alloc_block uses
572 GC allocation that clears memory to allocate a basic block, we do
573 not have to clear the newly allocated basic block here. */
574 bb = alloc_block ();
575
576 bb->index = last_basic_block_for_fn (cfun);
577 bb->flags = BB_NEW;
578 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
579
580 /* Add the new block to the linked list of blocks. */
581 link_block (bb, after);
582
583 /* Grow the basic block array if needed. */
584 if ((size_t) last_basic_block_for_fn (cfun)
585 == basic_block_info_for_fn (cfun)->length ())
586 {
587 size_t new_size =
588 (last_basic_block_for_fn (cfun)
589 + (last_basic_block_for_fn (cfun) + 3) / 4);
590 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
591 }
592
593 /* Add the newly created block to the array. */
594 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
595
596 n_basic_blocks_for_fn (cfun)++;
597 last_basic_block_for_fn (cfun)++;
598
599 return bb;
600 }
601
602
603 /*---------------------------------------------------------------------------
604 Edge creation
605 ---------------------------------------------------------------------------*/
606
607 /* If basic block BB has an abnormal edge to a basic block
608 containing IFN_ABNORMAL_DISPATCHER internal call, return
609 that the dispatcher's basic block, otherwise return NULL. */
610
611 basic_block
612 get_abnormal_succ_dispatcher (basic_block bb)
613 {
614 edge e;
615 edge_iterator ei;
616
617 FOR_EACH_EDGE (e, ei, bb->succs)
618 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
619 {
620 gimple_stmt_iterator gsi
621 = gsi_start_nondebug_after_labels_bb (e->dest);
622 gimple *g = gsi_stmt (gsi);
623 if (g
624 && is_gimple_call (g)
625 && gimple_call_internal_p (g)
626 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
627 return e->dest;
628 }
629 return NULL;
630 }
631
632 /* Helper function for make_edges. Create a basic block with
633 with ABNORMAL_DISPATCHER internal call in it if needed, and
634 create abnormal edges from BBS to it and from it to FOR_BB
635 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
636
637 static void
638 handle_abnormal_edges (basic_block *dispatcher_bbs,
639 basic_block for_bb, int *bb_to_omp_idx,
640 auto_vec<basic_block> *bbs, bool computed_goto)
641 {
642 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
643 unsigned int idx = 0;
644 basic_block bb;
645 bool inner = false;
646
647 if (bb_to_omp_idx)
648 {
649 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
650 if (bb_to_omp_idx[for_bb->index] != 0)
651 inner = true;
652 }
653
654 /* If the dispatcher has been created already, then there are basic
655 blocks with abnormal edges to it, so just make a new edge to
656 for_bb. */
657 if (*dispatcher == NULL)
658 {
659 /* Check if there are any basic blocks that need to have
660 abnormal edges to this dispatcher. If there are none, return
661 early. */
662 if (bb_to_omp_idx == NULL)
663 {
664 if (bbs->is_empty ())
665 return;
666 }
667 else
668 {
669 FOR_EACH_VEC_ELT (*bbs, idx, bb)
670 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
671 break;
672 if (bb == NULL)
673 return;
674 }
675
676 /* Create the dispatcher bb. */
677 *dispatcher = create_basic_block (NULL, for_bb);
678 if (computed_goto)
679 {
680 /* Factor computed gotos into a common computed goto site. Also
681 record the location of that site so that we can un-factor the
682 gotos after we have converted back to normal form. */
683 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
684
685 /* Create the destination of the factored goto. Each original
686 computed goto will put its desired destination into this
687 variable and jump to the label we create immediately below. */
688 tree var = create_tmp_var (ptr_type_node, "gotovar");
689
690 /* Build a label for the new block which will contain the
691 factored computed goto. */
692 tree factored_label_decl
693 = create_artificial_label (UNKNOWN_LOCATION);
694 gimple *factored_computed_goto_label
695 = gimple_build_label (factored_label_decl);
696 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
697
698 /* Build our new computed goto. */
699 gimple *factored_computed_goto = gimple_build_goto (var);
700 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
701
702 FOR_EACH_VEC_ELT (*bbs, idx, bb)
703 {
704 if (bb_to_omp_idx
705 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
706 continue;
707
708 gsi = gsi_last_bb (bb);
709 gimple *last = gsi_stmt (gsi);
710
711 gcc_assert (computed_goto_p (last));
712
713 /* Copy the original computed goto's destination into VAR. */
714 gimple *assignment
715 = gimple_build_assign (var, gimple_goto_dest (last));
716 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
717
718 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
719 e->goto_locus = gimple_location (last);
720 gsi_remove (&gsi, true);
721 }
722 }
723 else
724 {
725 tree arg = inner ? boolean_true_node : boolean_false_node;
726 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
727 1, arg);
728 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
729 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
730
731 /* Create predecessor edges of the dispatcher. */
732 FOR_EACH_VEC_ELT (*bbs, idx, bb)
733 {
734 if (bb_to_omp_idx
735 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
736 continue;
737 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
738 }
739 }
740 }
741
742 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
743 }
744
745 /* Creates outgoing edges for BB. Returns 1 when it ends with an
746 computed goto, returns 2 when it ends with a statement that
747 might return to this function via an nonlocal goto, otherwise
748 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
749
750 static int
751 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
752 {
753 gimple *last = last_stmt (bb);
754 bool fallthru = false;
755 int ret = 0;
756
757 if (!last)
758 return ret;
759
760 switch (gimple_code (last))
761 {
762 case GIMPLE_GOTO:
763 if (make_goto_expr_edges (bb))
764 ret = 1;
765 fallthru = false;
766 break;
767 case GIMPLE_RETURN:
768 {
769 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
770 e->goto_locus = gimple_location (last);
771 fallthru = false;
772 }
773 break;
774 case GIMPLE_COND:
775 make_cond_expr_edges (bb);
776 fallthru = false;
777 break;
778 case GIMPLE_SWITCH:
779 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
780 fallthru = false;
781 break;
782 case GIMPLE_RESX:
783 make_eh_edges (last);
784 fallthru = false;
785 break;
786 case GIMPLE_EH_DISPATCH:
787 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
788 break;
789
790 case GIMPLE_CALL:
791 /* If this function receives a nonlocal goto, then we need to
792 make edges from this call site to all the nonlocal goto
793 handlers. */
794 if (stmt_can_make_abnormal_goto (last))
795 ret = 2;
796
797 /* If this statement has reachable exception handlers, then
798 create abnormal edges to them. */
799 make_eh_edges (last);
800
801 /* BUILTIN_RETURN is really a return statement. */
802 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
803 {
804 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
805 fallthru = false;
806 }
807 /* Some calls are known not to return. */
808 else
809 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
810 break;
811
812 case GIMPLE_ASSIGN:
813 /* A GIMPLE_ASSIGN may throw internally and thus be considered
814 control-altering. */
815 if (is_ctrl_altering_stmt (last))
816 make_eh_edges (last);
817 fallthru = true;
818 break;
819
820 case GIMPLE_ASM:
821 make_gimple_asm_edges (bb);
822 fallthru = true;
823 break;
824
825 CASE_GIMPLE_OMP:
826 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
827 break;
828
829 case GIMPLE_TRANSACTION:
830 {
831 gtransaction *txn = as_a <gtransaction *> (last);
832 tree label1 = gimple_transaction_label_norm (txn);
833 tree label2 = gimple_transaction_label_uninst (txn);
834
835 if (label1)
836 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
837 if (label2)
838 make_edge (bb, label_to_block (label2),
839 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
840
841 tree label3 = gimple_transaction_label_over (txn);
842 if (gimple_transaction_subcode (txn)
843 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
844 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
845
846 fallthru = false;
847 }
848 break;
849
850 default:
851 gcc_assert (!stmt_ends_bb_p (last));
852 fallthru = true;
853 break;
854 }
855
856 if (fallthru)
857 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
858
859 return ret;
860 }
861
862 /* Join all the blocks in the flowgraph. */
863
864 static void
865 make_edges (void)
866 {
867 basic_block bb;
868 struct omp_region *cur_region = NULL;
869 auto_vec<basic_block> ab_edge_goto;
870 auto_vec<basic_block> ab_edge_call;
871 int *bb_to_omp_idx = NULL;
872 int cur_omp_region_idx = 0;
873
874 /* Create an edge from entry to the first block with executable
875 statements in it. */
876 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
877 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
878 EDGE_FALLTHRU);
879
880 /* Traverse the basic block array placing edges. */
881 FOR_EACH_BB_FN (bb, cfun)
882 {
883 int mer;
884
885 if (bb_to_omp_idx)
886 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
887
888 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
889 if (mer == 1)
890 ab_edge_goto.safe_push (bb);
891 else if (mer == 2)
892 ab_edge_call.safe_push (bb);
893
894 if (cur_region && bb_to_omp_idx == NULL)
895 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
896 }
897
898 /* Computed gotos are hell to deal with, especially if there are
899 lots of them with a large number of destinations. So we factor
900 them to a common computed goto location before we build the
901 edge list. After we convert back to normal form, we will un-factor
902 the computed gotos since factoring introduces an unwanted jump.
903 For non-local gotos and abnormal edges from calls to calls that return
904 twice or forced labels, factor the abnormal edges too, by having all
905 abnormal edges from the calls go to a common artificial basic block
906 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
907 basic block to all forced labels and calls returning twice.
908 We do this per-OpenMP structured block, because those regions
909 are guaranteed to be single entry single exit by the standard,
910 so it is not allowed to enter or exit such regions abnormally this way,
911 thus all computed gotos, non-local gotos and setjmp/longjmp calls
912 must not transfer control across SESE region boundaries. */
913 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
914 {
915 gimple_stmt_iterator gsi;
916 basic_block dispatcher_bb_array[2] = { NULL, NULL };
917 basic_block *dispatcher_bbs = dispatcher_bb_array;
918 int count = n_basic_blocks_for_fn (cfun);
919
920 if (bb_to_omp_idx)
921 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
922
923 FOR_EACH_BB_FN (bb, cfun)
924 {
925 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
926 {
927 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
928 tree target;
929
930 if (!label_stmt)
931 break;
932
933 target = gimple_label_label (label_stmt);
934
935 /* Make an edge to every label block that has been marked as a
936 potential target for a computed goto or a non-local goto. */
937 if (FORCED_LABEL (target))
938 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
939 &ab_edge_goto, true);
940 if (DECL_NONLOCAL (target))
941 {
942 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
943 &ab_edge_call, false);
944 break;
945 }
946 }
947
948 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
949 gsi_next_nondebug (&gsi);
950 if (!gsi_end_p (gsi))
951 {
952 /* Make an edge to every setjmp-like call. */
953 gimple *call_stmt = gsi_stmt (gsi);
954 if (is_gimple_call (call_stmt)
955 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
956 || gimple_call_builtin_p (call_stmt,
957 BUILT_IN_SETJMP_RECEIVER)))
958 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
959 &ab_edge_call, false);
960 }
961 }
962
963 if (bb_to_omp_idx)
964 XDELETE (dispatcher_bbs);
965 }
966
967 XDELETE (bb_to_omp_idx);
968
969 free_omp_regions ();
970 }
971
972 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
973 needed. Returns true if new bbs were created.
974 Note: This is transitional code, and should not be used for new code. We
975 should be able to get rid of this by rewriting all target va-arg
976 gimplification hooks to use an interface gimple_build_cond_value as described
977 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
978
979 bool
980 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
981 {
982 gimple *stmt = gsi_stmt (*gsi);
983 basic_block bb = gimple_bb (stmt);
984 basic_block lastbb, afterbb;
985 int old_num_bbs = n_basic_blocks_for_fn (cfun);
986 edge e;
987 lastbb = make_blocks_1 (seq, bb);
988 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
989 return false;
990 e = split_block (bb, stmt);
991 /* Move e->dest to come after the new basic blocks. */
992 afterbb = e->dest;
993 unlink_block (afterbb);
994 link_block (afterbb, lastbb);
995 redirect_edge_succ (e, bb->next_bb);
996 bb = bb->next_bb;
997 while (bb != afterbb)
998 {
999 struct omp_region *cur_region = NULL;
1000 int cur_omp_region_idx = 0;
1001 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1002 gcc_assert (!mer && !cur_region);
1003 add_bb_to_loop (bb, afterbb->loop_father);
1004 bb = bb->next_bb;
1005 }
1006 return true;
1007 }
1008
1009 /* Find the next available discriminator value for LOCUS. The
1010 discriminator distinguishes among several basic blocks that
1011 share a common locus, allowing for more accurate sample-based
1012 profiling. */
1013
1014 static int
1015 next_discriminator_for_locus (location_t locus)
1016 {
1017 struct locus_discrim_map item;
1018 struct locus_discrim_map **slot;
1019
1020 item.locus = locus;
1021 item.discriminator = 0;
1022 slot = discriminator_per_locus->find_slot_with_hash (
1023 &item, LOCATION_LINE (locus), INSERT);
1024 gcc_assert (slot);
1025 if (*slot == HTAB_EMPTY_ENTRY)
1026 {
1027 *slot = XNEW (struct locus_discrim_map);
1028 gcc_assert (*slot);
1029 (*slot)->locus = locus;
1030 (*slot)->discriminator = 0;
1031 }
1032 (*slot)->discriminator++;
1033 return (*slot)->discriminator;
1034 }
1035
1036 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1037
1038 static bool
1039 same_line_p (location_t locus1, location_t locus2)
1040 {
1041 expanded_location from, to;
1042
1043 if (locus1 == locus2)
1044 return true;
1045
1046 from = expand_location (locus1);
1047 to = expand_location (locus2);
1048
1049 if (from.line != to.line)
1050 return false;
1051 if (from.file == to.file)
1052 return true;
1053 return (from.file != NULL
1054 && to.file != NULL
1055 && filename_cmp (from.file, to.file) == 0);
1056 }
1057
1058 /* Assign discriminators to each basic block. */
1059
1060 static void
1061 assign_discriminators (void)
1062 {
1063 basic_block bb;
1064
1065 FOR_EACH_BB_FN (bb, cfun)
1066 {
1067 edge e;
1068 edge_iterator ei;
1069 gimple *last = last_stmt (bb);
1070 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1071
1072 if (locus == UNKNOWN_LOCATION)
1073 continue;
1074
1075 FOR_EACH_EDGE (e, ei, bb->succs)
1076 {
1077 gimple *first = first_non_label_stmt (e->dest);
1078 gimple *last = last_stmt (e->dest);
1079 if ((first && same_line_p (locus, gimple_location (first)))
1080 || (last && same_line_p (locus, gimple_location (last))))
1081 {
1082 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1083 bb->discriminator = next_discriminator_for_locus (locus);
1084 else
1085 e->dest->discriminator = next_discriminator_for_locus (locus);
1086 }
1087 }
1088 }
1089 }
1090
1091 /* Create the edges for a GIMPLE_COND starting at block BB. */
1092
1093 static void
1094 make_cond_expr_edges (basic_block bb)
1095 {
1096 gcond *entry = as_a <gcond *> (last_stmt (bb));
1097 gimple *then_stmt, *else_stmt;
1098 basic_block then_bb, else_bb;
1099 tree then_label, else_label;
1100 edge e;
1101
1102 gcc_assert (entry);
1103 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1104
1105 /* Entry basic blocks for each component. */
1106 then_label = gimple_cond_true_label (entry);
1107 else_label = gimple_cond_false_label (entry);
1108 then_bb = label_to_block (then_label);
1109 else_bb = label_to_block (else_label);
1110 then_stmt = first_stmt (then_bb);
1111 else_stmt = first_stmt (else_bb);
1112
1113 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1114 e->goto_locus = gimple_location (then_stmt);
1115 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1116 if (e)
1117 e->goto_locus = gimple_location (else_stmt);
1118
1119 /* We do not need the labels anymore. */
1120 gimple_cond_set_true_label (entry, NULL_TREE);
1121 gimple_cond_set_false_label (entry, NULL_TREE);
1122 }
1123
1124
1125 /* Called for each element in the hash table (P) as we delete the
1126 edge to cases hash table.
1127
1128 Clear all the TREE_CHAINs to prevent problems with copying of
1129 SWITCH_EXPRs and structure sharing rules, then free the hash table
1130 element. */
1131
1132 bool
1133 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1134 {
1135 tree t, next;
1136
1137 for (t = value; t; t = next)
1138 {
1139 next = CASE_CHAIN (t);
1140 CASE_CHAIN (t) = NULL;
1141 }
1142
1143 return true;
1144 }
1145
1146 /* Start recording information mapping edges to case labels. */
1147
1148 void
1149 start_recording_case_labels (void)
1150 {
1151 gcc_assert (edge_to_cases == NULL);
1152 edge_to_cases = new hash_map<edge, tree>;
1153 touched_switch_bbs = BITMAP_ALLOC (NULL);
1154 }
1155
1156 /* Return nonzero if we are recording information for case labels. */
1157
1158 static bool
1159 recording_case_labels_p (void)
1160 {
1161 return (edge_to_cases != NULL);
1162 }
1163
1164 /* Stop recording information mapping edges to case labels and
1165 remove any information we have recorded. */
1166 void
1167 end_recording_case_labels (void)
1168 {
1169 bitmap_iterator bi;
1170 unsigned i;
1171 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1172 delete edge_to_cases;
1173 edge_to_cases = NULL;
1174 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1175 {
1176 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1177 if (bb)
1178 {
1179 gimple *stmt = last_stmt (bb);
1180 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1181 group_case_labels_stmt (as_a <gswitch *> (stmt));
1182 }
1183 }
1184 BITMAP_FREE (touched_switch_bbs);
1185 }
1186
1187 /* If we are inside a {start,end}_recording_cases block, then return
1188 a chain of CASE_LABEL_EXPRs from T which reference E.
1189
1190 Otherwise return NULL. */
1191
1192 static tree
1193 get_cases_for_edge (edge e, gswitch *t)
1194 {
1195 tree *slot;
1196 size_t i, n;
1197
1198 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1199 chains available. Return NULL so the caller can detect this case. */
1200 if (!recording_case_labels_p ())
1201 return NULL;
1202
1203 slot = edge_to_cases->get (e);
1204 if (slot)
1205 return *slot;
1206
1207 /* If we did not find E in the hash table, then this must be the first
1208 time we have been queried for information about E & T. Add all the
1209 elements from T to the hash table then perform the query again. */
1210
1211 n = gimple_switch_num_labels (t);
1212 for (i = 0; i < n; i++)
1213 {
1214 tree elt = gimple_switch_label (t, i);
1215 tree lab = CASE_LABEL (elt);
1216 basic_block label_bb = label_to_block (lab);
1217 edge this_edge = find_edge (e->src, label_bb);
1218
1219 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1220 a new chain. */
1221 tree &s = edge_to_cases->get_or_insert (this_edge);
1222 CASE_CHAIN (elt) = s;
1223 s = elt;
1224 }
1225
1226 return *edge_to_cases->get (e);
1227 }
1228
1229 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1230
1231 static void
1232 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1233 {
1234 size_t i, n;
1235
1236 n = gimple_switch_num_labels (entry);
1237
1238 for (i = 0; i < n; ++i)
1239 {
1240 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1241 basic_block label_bb = label_to_block (lab);
1242 make_edge (bb, label_bb, 0);
1243 }
1244 }
1245
1246
1247 /* Return the basic block holding label DEST. */
1248
1249 basic_block
1250 label_to_block_fn (struct function *ifun, tree dest)
1251 {
1252 int uid = LABEL_DECL_UID (dest);
1253
1254 /* We would die hard when faced by an undefined label. Emit a label to
1255 the very first basic block. This will hopefully make even the dataflow
1256 and undefined variable warnings quite right. */
1257 if (seen_error () && uid < 0)
1258 {
1259 gimple_stmt_iterator gsi =
1260 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1261 gimple *stmt;
1262
1263 stmt = gimple_build_label (dest);
1264 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1265 uid = LABEL_DECL_UID (dest);
1266 }
1267 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1268 return NULL;
1269 return (*ifun->cfg->x_label_to_block_map)[uid];
1270 }
1271
1272 /* Create edges for a goto statement at block BB. Returns true
1273 if abnormal edges should be created. */
1274
1275 static bool
1276 make_goto_expr_edges (basic_block bb)
1277 {
1278 gimple_stmt_iterator last = gsi_last_bb (bb);
1279 gimple *goto_t = gsi_stmt (last);
1280
1281 /* A simple GOTO creates normal edges. */
1282 if (simple_goto_p (goto_t))
1283 {
1284 tree dest = gimple_goto_dest (goto_t);
1285 basic_block label_bb = label_to_block (dest);
1286 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1287 e->goto_locus = gimple_location (goto_t);
1288 gsi_remove (&last, true);
1289 return false;
1290 }
1291
1292 /* A computed GOTO creates abnormal edges. */
1293 return true;
1294 }
1295
1296 /* Create edges for an asm statement with labels at block BB. */
1297
1298 static void
1299 make_gimple_asm_edges (basic_block bb)
1300 {
1301 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1302 int i, n = gimple_asm_nlabels (stmt);
1303
1304 for (i = 0; i < n; ++i)
1305 {
1306 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1307 basic_block label_bb = label_to_block (label);
1308 make_edge (bb, label_bb, 0);
1309 }
1310 }
1311
1312 /*---------------------------------------------------------------------------
1313 Flowgraph analysis
1314 ---------------------------------------------------------------------------*/
1315
1316 /* Cleanup useless labels in basic blocks. This is something we wish
1317 to do early because it allows us to group case labels before creating
1318 the edges for the CFG, and it speeds up block statement iterators in
1319 all passes later on.
1320 We rerun this pass after CFG is created, to get rid of the labels that
1321 are no longer referenced. After then we do not run it any more, since
1322 (almost) no new labels should be created. */
1323
1324 /* A map from basic block index to the leading label of that block. */
1325 static struct label_record
1326 {
1327 /* The label. */
1328 tree label;
1329
1330 /* True if the label is referenced from somewhere. */
1331 bool used;
1332 } *label_for_bb;
1333
1334 /* Given LABEL return the first label in the same basic block. */
1335
1336 static tree
1337 main_block_label (tree label)
1338 {
1339 basic_block bb = label_to_block (label);
1340 tree main_label = label_for_bb[bb->index].label;
1341
1342 /* label_to_block possibly inserted undefined label into the chain. */
1343 if (!main_label)
1344 {
1345 label_for_bb[bb->index].label = label;
1346 main_label = label;
1347 }
1348
1349 label_for_bb[bb->index].used = true;
1350 return main_label;
1351 }
1352
1353 /* Clean up redundant labels within the exception tree. */
1354
1355 static void
1356 cleanup_dead_labels_eh (void)
1357 {
1358 eh_landing_pad lp;
1359 eh_region r;
1360 tree lab;
1361 int i;
1362
1363 if (cfun->eh == NULL)
1364 return;
1365
1366 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1367 if (lp && lp->post_landing_pad)
1368 {
1369 lab = main_block_label (lp->post_landing_pad);
1370 if (lab != lp->post_landing_pad)
1371 {
1372 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1373 EH_LANDING_PAD_NR (lab) = lp->index;
1374 }
1375 }
1376
1377 FOR_ALL_EH_REGION (r)
1378 switch (r->type)
1379 {
1380 case ERT_CLEANUP:
1381 case ERT_MUST_NOT_THROW:
1382 break;
1383
1384 case ERT_TRY:
1385 {
1386 eh_catch c;
1387 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1388 {
1389 lab = c->label;
1390 if (lab)
1391 c->label = main_block_label (lab);
1392 }
1393 }
1394 break;
1395
1396 case ERT_ALLOWED_EXCEPTIONS:
1397 lab = r->u.allowed.label;
1398 if (lab)
1399 r->u.allowed.label = main_block_label (lab);
1400 break;
1401 }
1402 }
1403
1404
1405 /* Cleanup redundant labels. This is a three-step process:
1406 1) Find the leading label for each block.
1407 2) Redirect all references to labels to the leading labels.
1408 3) Cleanup all useless labels. */
1409
1410 void
1411 cleanup_dead_labels (void)
1412 {
1413 basic_block bb;
1414 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1415
1416 /* Find a suitable label for each block. We use the first user-defined
1417 label if there is one, or otherwise just the first label we see. */
1418 FOR_EACH_BB_FN (bb, cfun)
1419 {
1420 gimple_stmt_iterator i;
1421
1422 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1423 {
1424 tree label;
1425 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1426
1427 if (!label_stmt)
1428 break;
1429
1430 label = gimple_label_label (label_stmt);
1431
1432 /* If we have not yet seen a label for the current block,
1433 remember this one and see if there are more labels. */
1434 if (!label_for_bb[bb->index].label)
1435 {
1436 label_for_bb[bb->index].label = label;
1437 continue;
1438 }
1439
1440 /* If we did see a label for the current block already, but it
1441 is an artificially created label, replace it if the current
1442 label is a user defined label. */
1443 if (!DECL_ARTIFICIAL (label)
1444 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1445 {
1446 label_for_bb[bb->index].label = label;
1447 break;
1448 }
1449 }
1450 }
1451
1452 /* Now redirect all jumps/branches to the selected label.
1453 First do so for each block ending in a control statement. */
1454 FOR_EACH_BB_FN (bb, cfun)
1455 {
1456 gimple *stmt = last_stmt (bb);
1457 tree label, new_label;
1458
1459 if (!stmt)
1460 continue;
1461
1462 switch (gimple_code (stmt))
1463 {
1464 case GIMPLE_COND:
1465 {
1466 gcond *cond_stmt = as_a <gcond *> (stmt);
1467 label = gimple_cond_true_label (cond_stmt);
1468 if (label)
1469 {
1470 new_label = main_block_label (label);
1471 if (new_label != label)
1472 gimple_cond_set_true_label (cond_stmt, new_label);
1473 }
1474
1475 label = gimple_cond_false_label (cond_stmt);
1476 if (label)
1477 {
1478 new_label = main_block_label (label);
1479 if (new_label != label)
1480 gimple_cond_set_false_label (cond_stmt, new_label);
1481 }
1482 }
1483 break;
1484
1485 case GIMPLE_SWITCH:
1486 {
1487 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1488 size_t i, n = gimple_switch_num_labels (switch_stmt);
1489
1490 /* Replace all destination labels. */
1491 for (i = 0; i < n; ++i)
1492 {
1493 tree case_label = gimple_switch_label (switch_stmt, i);
1494 label = CASE_LABEL (case_label);
1495 new_label = main_block_label (label);
1496 if (new_label != label)
1497 CASE_LABEL (case_label) = new_label;
1498 }
1499 break;
1500 }
1501
1502 case GIMPLE_ASM:
1503 {
1504 gasm *asm_stmt = as_a <gasm *> (stmt);
1505 int i, n = gimple_asm_nlabels (asm_stmt);
1506
1507 for (i = 0; i < n; ++i)
1508 {
1509 tree cons = gimple_asm_label_op (asm_stmt, i);
1510 tree label = main_block_label (TREE_VALUE (cons));
1511 TREE_VALUE (cons) = label;
1512 }
1513 break;
1514 }
1515
1516 /* We have to handle gotos until they're removed, and we don't
1517 remove them until after we've created the CFG edges. */
1518 case GIMPLE_GOTO:
1519 if (!computed_goto_p (stmt))
1520 {
1521 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1522 label = gimple_goto_dest (goto_stmt);
1523 new_label = main_block_label (label);
1524 if (new_label != label)
1525 gimple_goto_set_dest (goto_stmt, new_label);
1526 }
1527 break;
1528
1529 case GIMPLE_TRANSACTION:
1530 {
1531 gtransaction *txn = as_a <gtransaction *> (stmt);
1532
1533 label = gimple_transaction_label_norm (txn);
1534 if (label)
1535 {
1536 new_label = main_block_label (label);
1537 if (new_label != label)
1538 gimple_transaction_set_label_norm (txn, new_label);
1539 }
1540
1541 label = gimple_transaction_label_uninst (txn);
1542 if (label)
1543 {
1544 new_label = main_block_label (label);
1545 if (new_label != label)
1546 gimple_transaction_set_label_uninst (txn, new_label);
1547 }
1548
1549 label = gimple_transaction_label_over (txn);
1550 if (label)
1551 {
1552 new_label = main_block_label (label);
1553 if (new_label != label)
1554 gimple_transaction_set_label_over (txn, new_label);
1555 }
1556 }
1557 break;
1558
1559 default:
1560 break;
1561 }
1562 }
1563
1564 /* Do the same for the exception region tree labels. */
1565 cleanup_dead_labels_eh ();
1566
1567 /* Finally, purge dead labels. All user-defined labels and labels that
1568 can be the target of non-local gotos and labels which have their
1569 address taken are preserved. */
1570 FOR_EACH_BB_FN (bb, cfun)
1571 {
1572 gimple_stmt_iterator i;
1573 tree label_for_this_bb = label_for_bb[bb->index].label;
1574
1575 if (!label_for_this_bb)
1576 continue;
1577
1578 /* If the main label of the block is unused, we may still remove it. */
1579 if (!label_for_bb[bb->index].used)
1580 label_for_this_bb = NULL;
1581
1582 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1583 {
1584 tree label;
1585 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1586
1587 if (!label_stmt)
1588 break;
1589
1590 label = gimple_label_label (label_stmt);
1591
1592 if (label == label_for_this_bb
1593 || !DECL_ARTIFICIAL (label)
1594 || DECL_NONLOCAL (label)
1595 || FORCED_LABEL (label))
1596 gsi_next (&i);
1597 else
1598 gsi_remove (&i, true);
1599 }
1600 }
1601
1602 free (label_for_bb);
1603 }
1604
1605 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1606 the ones jumping to the same label.
1607 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1608
1609 void
1610 group_case_labels_stmt (gswitch *stmt)
1611 {
1612 int old_size = gimple_switch_num_labels (stmt);
1613 int i, j, new_size = old_size;
1614 basic_block default_bb = NULL;
1615
1616 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1617
1618 /* Look for possible opportunities to merge cases. */
1619 i = 1;
1620 while (i < old_size)
1621 {
1622 tree base_case, base_high;
1623 basic_block base_bb;
1624
1625 base_case = gimple_switch_label (stmt, i);
1626
1627 gcc_assert (base_case);
1628 base_bb = label_to_block (CASE_LABEL (base_case));
1629
1630 /* Discard cases that have the same destination as the
1631 default case. */
1632 if (base_bb == default_bb)
1633 {
1634 gimple_switch_set_label (stmt, i, NULL_TREE);
1635 i++;
1636 new_size--;
1637 continue;
1638 }
1639
1640 base_high = CASE_HIGH (base_case)
1641 ? CASE_HIGH (base_case)
1642 : CASE_LOW (base_case);
1643 i++;
1644
1645 /* Try to merge case labels. Break out when we reach the end
1646 of the label vector or when we cannot merge the next case
1647 label with the current one. */
1648 while (i < old_size)
1649 {
1650 tree merge_case = gimple_switch_label (stmt, i);
1651 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1652 wide_int bhp1 = wi::add (base_high, 1);
1653
1654 /* Merge the cases if they jump to the same place,
1655 and their ranges are consecutive. */
1656 if (merge_bb == base_bb
1657 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1658 {
1659 base_high = CASE_HIGH (merge_case) ?
1660 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1661 CASE_HIGH (base_case) = base_high;
1662 gimple_switch_set_label (stmt, i, NULL_TREE);
1663 new_size--;
1664 i++;
1665 }
1666 else
1667 break;
1668 }
1669 }
1670
1671 /* Compress the case labels in the label vector, and adjust the
1672 length of the vector. */
1673 for (i = 0, j = 0; i < new_size; i++)
1674 {
1675 while (! gimple_switch_label (stmt, j))
1676 j++;
1677 gimple_switch_set_label (stmt, i,
1678 gimple_switch_label (stmt, j++));
1679 }
1680
1681 gcc_assert (new_size <= old_size);
1682 gimple_switch_set_num_labels (stmt, new_size);
1683 }
1684
1685 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1686 and scan the sorted vector of cases. Combine the ones jumping to the
1687 same label. */
1688
1689 void
1690 group_case_labels (void)
1691 {
1692 basic_block bb;
1693
1694 FOR_EACH_BB_FN (bb, cfun)
1695 {
1696 gimple *stmt = last_stmt (bb);
1697 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1698 group_case_labels_stmt (as_a <gswitch *> (stmt));
1699 }
1700 }
1701
1702 /* Checks whether we can merge block B into block A. */
1703
1704 static bool
1705 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1706 {
1707 gimple *stmt;
1708
1709 if (!single_succ_p (a))
1710 return false;
1711
1712 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1713 return false;
1714
1715 if (single_succ (a) != b)
1716 return false;
1717
1718 if (!single_pred_p (b))
1719 return false;
1720
1721 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1722 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1723 return false;
1724
1725 /* If A ends by a statement causing exceptions or something similar, we
1726 cannot merge the blocks. */
1727 stmt = last_stmt (a);
1728 if (stmt && stmt_ends_bb_p (stmt))
1729 return false;
1730
1731 /* Do not allow a block with only a non-local label to be merged. */
1732 if (stmt)
1733 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1734 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1735 return false;
1736
1737 /* Examine the labels at the beginning of B. */
1738 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1739 gsi_next (&gsi))
1740 {
1741 tree lab;
1742 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1743 if (!label_stmt)
1744 break;
1745 lab = gimple_label_label (label_stmt);
1746
1747 /* Do not remove user forced labels or for -O0 any user labels. */
1748 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1749 return false;
1750 }
1751
1752 /* Protect simple loop latches. We only want to avoid merging
1753 the latch with the loop header or with a block in another
1754 loop in this case. */
1755 if (current_loops
1756 && b->loop_father->latch == b
1757 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1758 && (b->loop_father->header == a
1759 || b->loop_father != a->loop_father))
1760 return false;
1761
1762 /* It must be possible to eliminate all phi nodes in B. If ssa form
1763 is not up-to-date and a name-mapping is registered, we cannot eliminate
1764 any phis. Symbols marked for renaming are never a problem though. */
1765 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1766 gsi_next (&gsi))
1767 {
1768 gphi *phi = gsi.phi ();
1769 /* Technically only new names matter. */
1770 if (name_registered_for_update_p (PHI_RESULT (phi)))
1771 return false;
1772 }
1773
1774 /* When not optimizing, don't merge if we'd lose goto_locus. */
1775 if (!optimize
1776 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1777 {
1778 location_t goto_locus = single_succ_edge (a)->goto_locus;
1779 gimple_stmt_iterator prev, next;
1780 prev = gsi_last_nondebug_bb (a);
1781 next = gsi_after_labels (b);
1782 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1783 gsi_next_nondebug (&next);
1784 if ((gsi_end_p (prev)
1785 || gimple_location (gsi_stmt (prev)) != goto_locus)
1786 && (gsi_end_p (next)
1787 || gimple_location (gsi_stmt (next)) != goto_locus))
1788 return false;
1789 }
1790
1791 return true;
1792 }
1793
1794 /* Replaces all uses of NAME by VAL. */
1795
1796 void
1797 replace_uses_by (tree name, tree val)
1798 {
1799 imm_use_iterator imm_iter;
1800 use_operand_p use;
1801 gimple *stmt;
1802 edge e;
1803
1804 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1805 {
1806 /* Mark the block if we change the last stmt in it. */
1807 if (cfgcleanup_altered_bbs
1808 && stmt_ends_bb_p (stmt))
1809 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1810
1811 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1812 {
1813 replace_exp (use, val);
1814
1815 if (gimple_code (stmt) == GIMPLE_PHI)
1816 {
1817 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1818 PHI_ARG_INDEX_FROM_USE (use));
1819 if (e->flags & EDGE_ABNORMAL
1820 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1821 {
1822 /* This can only occur for virtual operands, since
1823 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1824 would prevent replacement. */
1825 gcc_checking_assert (virtual_operand_p (name));
1826 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1827 }
1828 }
1829 }
1830
1831 if (gimple_code (stmt) != GIMPLE_PHI)
1832 {
1833 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1834 gimple *orig_stmt = stmt;
1835 size_t i;
1836
1837 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1838 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1839 only change sth from non-invariant to invariant, and only
1840 when propagating constants. */
1841 if (is_gimple_min_invariant (val))
1842 for (i = 0; i < gimple_num_ops (stmt); i++)
1843 {
1844 tree op = gimple_op (stmt, i);
1845 /* Operands may be empty here. For example, the labels
1846 of a GIMPLE_COND are nulled out following the creation
1847 of the corresponding CFG edges. */
1848 if (op && TREE_CODE (op) == ADDR_EXPR)
1849 recompute_tree_invariant_for_addr_expr (op);
1850 }
1851
1852 if (fold_stmt (&gsi))
1853 stmt = gsi_stmt (gsi);
1854
1855 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1856 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1857
1858 update_stmt (stmt);
1859 }
1860 }
1861
1862 gcc_checking_assert (has_zero_uses (name));
1863
1864 /* Also update the trees stored in loop structures. */
1865 if (current_loops)
1866 {
1867 struct loop *loop;
1868
1869 FOR_EACH_LOOP (loop, 0)
1870 {
1871 substitute_in_loop_info (loop, name, val);
1872 }
1873 }
1874 }
1875
1876 /* Merge block B into block A. */
1877
1878 static void
1879 gimple_merge_blocks (basic_block a, basic_block b)
1880 {
1881 gimple_stmt_iterator last, gsi;
1882 gphi_iterator psi;
1883
1884 if (dump_file)
1885 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1886
1887 /* Remove all single-valued PHI nodes from block B of the form
1888 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1889 gsi = gsi_last_bb (a);
1890 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1891 {
1892 gimple *phi = gsi_stmt (psi);
1893 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1894 gimple *copy;
1895 bool may_replace_uses = (virtual_operand_p (def)
1896 || may_propagate_copy (def, use));
1897
1898 /* In case we maintain loop closed ssa form, do not propagate arguments
1899 of loop exit phi nodes. */
1900 if (current_loops
1901 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1902 && !virtual_operand_p (def)
1903 && TREE_CODE (use) == SSA_NAME
1904 && a->loop_father != b->loop_father)
1905 may_replace_uses = false;
1906
1907 if (!may_replace_uses)
1908 {
1909 gcc_assert (!virtual_operand_p (def));
1910
1911 /* Note that just emitting the copies is fine -- there is no problem
1912 with ordering of phi nodes. This is because A is the single
1913 predecessor of B, therefore results of the phi nodes cannot
1914 appear as arguments of the phi nodes. */
1915 copy = gimple_build_assign (def, use);
1916 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1917 remove_phi_node (&psi, false);
1918 }
1919 else
1920 {
1921 /* If we deal with a PHI for virtual operands, we can simply
1922 propagate these without fussing with folding or updating
1923 the stmt. */
1924 if (virtual_operand_p (def))
1925 {
1926 imm_use_iterator iter;
1927 use_operand_p use_p;
1928 gimple *stmt;
1929
1930 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1931 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1932 SET_USE (use_p, use);
1933
1934 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1935 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1936 }
1937 else
1938 replace_uses_by (def, use);
1939
1940 remove_phi_node (&psi, true);
1941 }
1942 }
1943
1944 /* Ensure that B follows A. */
1945 move_block_after (b, a);
1946
1947 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1948 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1949
1950 /* Remove labels from B and set gimple_bb to A for other statements. */
1951 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1952 {
1953 gimple *stmt = gsi_stmt (gsi);
1954 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1955 {
1956 tree label = gimple_label_label (label_stmt);
1957 int lp_nr;
1958
1959 gsi_remove (&gsi, false);
1960
1961 /* Now that we can thread computed gotos, we might have
1962 a situation where we have a forced label in block B
1963 However, the label at the start of block B might still be
1964 used in other ways (think about the runtime checking for
1965 Fortran assigned gotos). So we can not just delete the
1966 label. Instead we move the label to the start of block A. */
1967 if (FORCED_LABEL (label))
1968 {
1969 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1970 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1971 }
1972 /* Other user labels keep around in a form of a debug stmt. */
1973 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1974 {
1975 gimple *dbg = gimple_build_debug_bind (label,
1976 integer_zero_node,
1977 stmt);
1978 gimple_debug_bind_reset_value (dbg);
1979 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1980 }
1981
1982 lp_nr = EH_LANDING_PAD_NR (label);
1983 if (lp_nr)
1984 {
1985 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1986 lp->post_landing_pad = NULL;
1987 }
1988 }
1989 else
1990 {
1991 gimple_set_bb (stmt, a);
1992 gsi_next (&gsi);
1993 }
1994 }
1995
1996 /* When merging two BBs, if their counts are different, the larger count
1997 is selected as the new bb count. This is to handle inconsistent
1998 profiles. */
1999 if (a->loop_father == b->loop_father)
2000 {
2001 a->count = MAX (a->count, b->count);
2002 a->frequency = MAX (a->frequency, b->frequency);
2003 }
2004
2005 /* Merge the sequences. */
2006 last = gsi_last_bb (a);
2007 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2008 set_bb_seq (b, NULL);
2009
2010 if (cfgcleanup_altered_bbs)
2011 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2012 }
2013
2014
2015 /* Return the one of two successors of BB that is not reachable by a
2016 complex edge, if there is one. Else, return BB. We use
2017 this in optimizations that use post-dominators for their heuristics,
2018 to catch the cases in C++ where function calls are involved. */
2019
2020 basic_block
2021 single_noncomplex_succ (basic_block bb)
2022 {
2023 edge e0, e1;
2024 if (EDGE_COUNT (bb->succs) != 2)
2025 return bb;
2026
2027 e0 = EDGE_SUCC (bb, 0);
2028 e1 = EDGE_SUCC (bb, 1);
2029 if (e0->flags & EDGE_COMPLEX)
2030 return e1->dest;
2031 if (e1->flags & EDGE_COMPLEX)
2032 return e0->dest;
2033
2034 return bb;
2035 }
2036
2037 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2038
2039 void
2040 notice_special_calls (gcall *call)
2041 {
2042 int flags = gimple_call_flags (call);
2043
2044 if (flags & ECF_MAY_BE_ALLOCA)
2045 cfun->calls_alloca = true;
2046 if (flags & ECF_RETURNS_TWICE)
2047 cfun->calls_setjmp = true;
2048 }
2049
2050
2051 /* Clear flags set by notice_special_calls. Used by dead code removal
2052 to update the flags. */
2053
2054 void
2055 clear_special_calls (void)
2056 {
2057 cfun->calls_alloca = false;
2058 cfun->calls_setjmp = false;
2059 }
2060
2061 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2062
2063 static void
2064 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2065 {
2066 /* Since this block is no longer reachable, we can just delete all
2067 of its PHI nodes. */
2068 remove_phi_nodes (bb);
2069
2070 /* Remove edges to BB's successors. */
2071 while (EDGE_COUNT (bb->succs) > 0)
2072 remove_edge (EDGE_SUCC (bb, 0));
2073 }
2074
2075
2076 /* Remove statements of basic block BB. */
2077
2078 static void
2079 remove_bb (basic_block bb)
2080 {
2081 gimple_stmt_iterator i;
2082
2083 if (dump_file)
2084 {
2085 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2086 if (dump_flags & TDF_DETAILS)
2087 {
2088 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2089 fprintf (dump_file, "\n");
2090 }
2091 }
2092
2093 if (current_loops)
2094 {
2095 struct loop *loop = bb->loop_father;
2096
2097 /* If a loop gets removed, clean up the information associated
2098 with it. */
2099 if (loop->latch == bb
2100 || loop->header == bb)
2101 free_numbers_of_iterations_estimates_loop (loop);
2102 }
2103
2104 /* Remove all the instructions in the block. */
2105 if (bb_seq (bb) != NULL)
2106 {
2107 /* Walk backwards so as to get a chance to substitute all
2108 released DEFs into debug stmts. See
2109 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2110 details. */
2111 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2112 {
2113 gimple *stmt = gsi_stmt (i);
2114 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2115 if (label_stmt
2116 && (FORCED_LABEL (gimple_label_label (label_stmt))
2117 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2118 {
2119 basic_block new_bb;
2120 gimple_stmt_iterator new_gsi;
2121
2122 /* A non-reachable non-local label may still be referenced.
2123 But it no longer needs to carry the extra semantics of
2124 non-locality. */
2125 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2126 {
2127 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2128 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2129 }
2130
2131 new_bb = bb->prev_bb;
2132 new_gsi = gsi_start_bb (new_bb);
2133 gsi_remove (&i, false);
2134 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2135 }
2136 else
2137 {
2138 /* Release SSA definitions. */
2139 release_defs (stmt);
2140 gsi_remove (&i, true);
2141 }
2142
2143 if (gsi_end_p (i))
2144 i = gsi_last_bb (bb);
2145 else
2146 gsi_prev (&i);
2147 }
2148 }
2149
2150 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2151 bb->il.gimple.seq = NULL;
2152 bb->il.gimple.phi_nodes = NULL;
2153 }
2154
2155
2156 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2157 predicate VAL, return the edge that will be taken out of the block.
2158 If VAL does not match a unique edge, NULL is returned. */
2159
2160 edge
2161 find_taken_edge (basic_block bb, tree val)
2162 {
2163 gimple *stmt;
2164
2165 stmt = last_stmt (bb);
2166
2167 gcc_assert (stmt);
2168 gcc_assert (is_ctrl_stmt (stmt));
2169
2170 if (val == NULL)
2171 return NULL;
2172
2173 if (!is_gimple_min_invariant (val))
2174 return NULL;
2175
2176 if (gimple_code (stmt) == GIMPLE_COND)
2177 return find_taken_edge_cond_expr (bb, val);
2178
2179 if (gimple_code (stmt) == GIMPLE_SWITCH)
2180 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2181
2182 if (computed_goto_p (stmt))
2183 {
2184 /* Only optimize if the argument is a label, if the argument is
2185 not a label then we can not construct a proper CFG.
2186
2187 It may be the case that we only need to allow the LABEL_REF to
2188 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2189 appear inside a LABEL_EXPR just to be safe. */
2190 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2191 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2192 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2193 return NULL;
2194 }
2195
2196 gcc_unreachable ();
2197 }
2198
2199 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2200 statement, determine which of the outgoing edges will be taken out of the
2201 block. Return NULL if either edge may be taken. */
2202
2203 static edge
2204 find_taken_edge_computed_goto (basic_block bb, tree val)
2205 {
2206 basic_block dest;
2207 edge e = NULL;
2208
2209 dest = label_to_block (val);
2210 if (dest)
2211 {
2212 e = find_edge (bb, dest);
2213 gcc_assert (e != NULL);
2214 }
2215
2216 return e;
2217 }
2218
2219 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2220 statement, determine which of the two edges will be taken out of the
2221 block. Return NULL if either edge may be taken. */
2222
2223 static edge
2224 find_taken_edge_cond_expr (basic_block bb, tree val)
2225 {
2226 edge true_edge, false_edge;
2227
2228 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2229
2230 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2231 return (integer_zerop (val) ? false_edge : true_edge);
2232 }
2233
2234 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2235 statement, determine which edge will be taken out of the block. Return
2236 NULL if any edge may be taken. */
2237
2238 static edge
2239 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2240 tree val)
2241 {
2242 basic_block dest_bb;
2243 edge e;
2244 tree taken_case;
2245
2246 taken_case = find_case_label_for_value (switch_stmt, val);
2247 dest_bb = label_to_block (CASE_LABEL (taken_case));
2248
2249 e = find_edge (bb, dest_bb);
2250 gcc_assert (e);
2251 return e;
2252 }
2253
2254
2255 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2256 We can make optimal use here of the fact that the case labels are
2257 sorted: We can do a binary search for a case matching VAL. */
2258
2259 static tree
2260 find_case_label_for_value (gswitch *switch_stmt, tree val)
2261 {
2262 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2263 tree default_case = gimple_switch_default_label (switch_stmt);
2264
2265 for (low = 0, high = n; high - low > 1; )
2266 {
2267 size_t i = (high + low) / 2;
2268 tree t = gimple_switch_label (switch_stmt, i);
2269 int cmp;
2270
2271 /* Cache the result of comparing CASE_LOW and val. */
2272 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2273
2274 if (cmp > 0)
2275 high = i;
2276 else
2277 low = i;
2278
2279 if (CASE_HIGH (t) == NULL)
2280 {
2281 /* A singe-valued case label. */
2282 if (cmp == 0)
2283 return t;
2284 }
2285 else
2286 {
2287 /* A case range. We can only handle integer ranges. */
2288 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2289 return t;
2290 }
2291 }
2292
2293 return default_case;
2294 }
2295
2296
2297 /* Dump a basic block on stderr. */
2298
2299 void
2300 gimple_debug_bb (basic_block bb)
2301 {
2302 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2303 }
2304
2305
2306 /* Dump basic block with index N on stderr. */
2307
2308 basic_block
2309 gimple_debug_bb_n (int n)
2310 {
2311 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2312 return BASIC_BLOCK_FOR_FN (cfun, n);
2313 }
2314
2315
2316 /* Dump the CFG on stderr.
2317
2318 FLAGS are the same used by the tree dumping functions
2319 (see TDF_* in dumpfile.h). */
2320
2321 void
2322 gimple_debug_cfg (int flags)
2323 {
2324 gimple_dump_cfg (stderr, flags);
2325 }
2326
2327
2328 /* Dump the program showing basic block boundaries on the given FILE.
2329
2330 FLAGS are the same used by the tree dumping functions (see TDF_* in
2331 tree.h). */
2332
2333 void
2334 gimple_dump_cfg (FILE *file, int flags)
2335 {
2336 if (flags & TDF_DETAILS)
2337 {
2338 dump_function_header (file, current_function_decl, flags);
2339 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2340 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2341 last_basic_block_for_fn (cfun));
2342
2343 brief_dump_cfg (file, flags | TDF_COMMENT);
2344 fprintf (file, "\n");
2345 }
2346
2347 if (flags & TDF_STATS)
2348 dump_cfg_stats (file);
2349
2350 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2351 }
2352
2353
2354 /* Dump CFG statistics on FILE. */
2355
2356 void
2357 dump_cfg_stats (FILE *file)
2358 {
2359 static long max_num_merged_labels = 0;
2360 unsigned long size, total = 0;
2361 long num_edges;
2362 basic_block bb;
2363 const char * const fmt_str = "%-30s%-13s%12s\n";
2364 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2365 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2366 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2367 const char *funcname = current_function_name ();
2368
2369 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2370
2371 fprintf (file, "---------------------------------------------------------\n");
2372 fprintf (file, fmt_str, "", " Number of ", "Memory");
2373 fprintf (file, fmt_str, "", " instances ", "used ");
2374 fprintf (file, "---------------------------------------------------------\n");
2375
2376 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2377 total += size;
2378 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2379 SCALE (size), LABEL (size));
2380
2381 num_edges = 0;
2382 FOR_EACH_BB_FN (bb, cfun)
2383 num_edges += EDGE_COUNT (bb->succs);
2384 size = num_edges * sizeof (struct edge_def);
2385 total += size;
2386 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2387
2388 fprintf (file, "---------------------------------------------------------\n");
2389 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2390 LABEL (total));
2391 fprintf (file, "---------------------------------------------------------\n");
2392 fprintf (file, "\n");
2393
2394 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2395 max_num_merged_labels = cfg_stats.num_merged_labels;
2396
2397 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2398 cfg_stats.num_merged_labels, max_num_merged_labels);
2399
2400 fprintf (file, "\n");
2401 }
2402
2403
2404 /* Dump CFG statistics on stderr. Keep extern so that it's always
2405 linked in the final executable. */
2406
2407 DEBUG_FUNCTION void
2408 debug_cfg_stats (void)
2409 {
2410 dump_cfg_stats (stderr);
2411 }
2412
2413 /*---------------------------------------------------------------------------
2414 Miscellaneous helpers
2415 ---------------------------------------------------------------------------*/
2416
2417 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2418 flow. Transfers of control flow associated with EH are excluded. */
2419
2420 static bool
2421 call_can_make_abnormal_goto (gimple *t)
2422 {
2423 /* If the function has no non-local labels, then a call cannot make an
2424 abnormal transfer of control. */
2425 if (!cfun->has_nonlocal_label
2426 && !cfun->calls_setjmp)
2427 return false;
2428
2429 /* Likewise if the call has no side effects. */
2430 if (!gimple_has_side_effects (t))
2431 return false;
2432
2433 /* Likewise if the called function is leaf. */
2434 if (gimple_call_flags (t) & ECF_LEAF)
2435 return false;
2436
2437 return true;
2438 }
2439
2440
2441 /* Return true if T can make an abnormal transfer of control flow.
2442 Transfers of control flow associated with EH are excluded. */
2443
2444 bool
2445 stmt_can_make_abnormal_goto (gimple *t)
2446 {
2447 if (computed_goto_p (t))
2448 return true;
2449 if (is_gimple_call (t))
2450 return call_can_make_abnormal_goto (t);
2451 return false;
2452 }
2453
2454
2455 /* Return true if T represents a stmt that always transfers control. */
2456
2457 bool
2458 is_ctrl_stmt (gimple *t)
2459 {
2460 switch (gimple_code (t))
2461 {
2462 case GIMPLE_COND:
2463 case GIMPLE_SWITCH:
2464 case GIMPLE_GOTO:
2465 case GIMPLE_RETURN:
2466 case GIMPLE_RESX:
2467 return true;
2468 default:
2469 return false;
2470 }
2471 }
2472
2473
2474 /* Return true if T is a statement that may alter the flow of control
2475 (e.g., a call to a non-returning function). */
2476
2477 bool
2478 is_ctrl_altering_stmt (gimple *t)
2479 {
2480 gcc_assert (t);
2481
2482 switch (gimple_code (t))
2483 {
2484 case GIMPLE_CALL:
2485 /* Per stmt call flag indicates whether the call could alter
2486 controlflow. */
2487 if (gimple_call_ctrl_altering_p (t))
2488 return true;
2489 break;
2490
2491 case GIMPLE_EH_DISPATCH:
2492 /* EH_DISPATCH branches to the individual catch handlers at
2493 this level of a try or allowed-exceptions region. It can
2494 fallthru to the next statement as well. */
2495 return true;
2496
2497 case GIMPLE_ASM:
2498 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2499 return true;
2500 break;
2501
2502 CASE_GIMPLE_OMP:
2503 /* OpenMP directives alter control flow. */
2504 return true;
2505
2506 case GIMPLE_TRANSACTION:
2507 /* A transaction start alters control flow. */
2508 return true;
2509
2510 default:
2511 break;
2512 }
2513
2514 /* If a statement can throw, it alters control flow. */
2515 return stmt_can_throw_internal (t);
2516 }
2517
2518
2519 /* Return true if T is a simple local goto. */
2520
2521 bool
2522 simple_goto_p (gimple *t)
2523 {
2524 return (gimple_code (t) == GIMPLE_GOTO
2525 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2526 }
2527
2528
2529 /* Return true if STMT should start a new basic block. PREV_STMT is
2530 the statement preceding STMT. It is used when STMT is a label or a
2531 case label. Labels should only start a new basic block if their
2532 previous statement wasn't a label. Otherwise, sequence of labels
2533 would generate unnecessary basic blocks that only contain a single
2534 label. */
2535
2536 static inline bool
2537 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2538 {
2539 if (stmt == NULL)
2540 return false;
2541
2542 /* Labels start a new basic block only if the preceding statement
2543 wasn't a label of the same type. This prevents the creation of
2544 consecutive blocks that have nothing but a single label. */
2545 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2546 {
2547 /* Nonlocal and computed GOTO targets always start a new block. */
2548 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2549 || FORCED_LABEL (gimple_label_label (label_stmt)))
2550 return true;
2551
2552 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2553 {
2554 if (DECL_NONLOCAL (gimple_label_label (
2555 as_a <glabel *> (prev_stmt))))
2556 return true;
2557
2558 cfg_stats.num_merged_labels++;
2559 return false;
2560 }
2561 else
2562 return true;
2563 }
2564 else if (gimple_code (stmt) == GIMPLE_CALL
2565 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2566 /* setjmp acts similar to a nonlocal GOTO target and thus should
2567 start a new block. */
2568 return true;
2569
2570 return false;
2571 }
2572
2573
2574 /* Return true if T should end a basic block. */
2575
2576 bool
2577 stmt_ends_bb_p (gimple *t)
2578 {
2579 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2580 }
2581
2582 /* Remove block annotations and other data structures. */
2583
2584 void
2585 delete_tree_cfg_annotations (struct function *fn)
2586 {
2587 vec_free (label_to_block_map_for_fn (fn));
2588 }
2589
2590 /* Return the virtual phi in BB. */
2591
2592 gphi *
2593 get_virtual_phi (basic_block bb)
2594 {
2595 for (gphi_iterator gsi = gsi_start_phis (bb);
2596 !gsi_end_p (gsi);
2597 gsi_next (&gsi))
2598 {
2599 gphi *phi = gsi.phi ();
2600
2601 if (virtual_operand_p (PHI_RESULT (phi)))
2602 return phi;
2603 }
2604
2605 return NULL;
2606 }
2607
2608 /* Return the first statement in basic block BB. */
2609
2610 gimple *
2611 first_stmt (basic_block bb)
2612 {
2613 gimple_stmt_iterator i = gsi_start_bb (bb);
2614 gimple *stmt = NULL;
2615
2616 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2617 {
2618 gsi_next (&i);
2619 stmt = NULL;
2620 }
2621 return stmt;
2622 }
2623
2624 /* Return the first non-label statement in basic block BB. */
2625
2626 static gimple *
2627 first_non_label_stmt (basic_block bb)
2628 {
2629 gimple_stmt_iterator i = gsi_start_bb (bb);
2630 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2631 gsi_next (&i);
2632 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2633 }
2634
2635 /* Return the last statement in basic block BB. */
2636
2637 gimple *
2638 last_stmt (basic_block bb)
2639 {
2640 gimple_stmt_iterator i = gsi_last_bb (bb);
2641 gimple *stmt = NULL;
2642
2643 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2644 {
2645 gsi_prev (&i);
2646 stmt = NULL;
2647 }
2648 return stmt;
2649 }
2650
2651 /* Return the last statement of an otherwise empty block. Return NULL
2652 if the block is totally empty, or if it contains more than one
2653 statement. */
2654
2655 gimple *
2656 last_and_only_stmt (basic_block bb)
2657 {
2658 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2659 gimple *last, *prev;
2660
2661 if (gsi_end_p (i))
2662 return NULL;
2663
2664 last = gsi_stmt (i);
2665 gsi_prev_nondebug (&i);
2666 if (gsi_end_p (i))
2667 return last;
2668
2669 /* Empty statements should no longer appear in the instruction stream.
2670 Everything that might have appeared before should be deleted by
2671 remove_useless_stmts, and the optimizers should just gsi_remove
2672 instead of smashing with build_empty_stmt.
2673
2674 Thus the only thing that should appear here in a block containing
2675 one executable statement is a label. */
2676 prev = gsi_stmt (i);
2677 if (gimple_code (prev) == GIMPLE_LABEL)
2678 return last;
2679 else
2680 return NULL;
2681 }
2682
2683 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2684
2685 static void
2686 reinstall_phi_args (edge new_edge, edge old_edge)
2687 {
2688 edge_var_map *vm;
2689 int i;
2690 gphi_iterator phis;
2691
2692 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2693 if (!v)
2694 return;
2695
2696 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2697 v->iterate (i, &vm) && !gsi_end_p (phis);
2698 i++, gsi_next (&phis))
2699 {
2700 gphi *phi = phis.phi ();
2701 tree result = redirect_edge_var_map_result (vm);
2702 tree arg = redirect_edge_var_map_def (vm);
2703
2704 gcc_assert (result == gimple_phi_result (phi));
2705
2706 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2707 }
2708
2709 redirect_edge_var_map_clear (old_edge);
2710 }
2711
2712 /* Returns the basic block after which the new basic block created
2713 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2714 near its "logical" location. This is of most help to humans looking
2715 at debugging dumps. */
2716
2717 basic_block
2718 split_edge_bb_loc (edge edge_in)
2719 {
2720 basic_block dest = edge_in->dest;
2721 basic_block dest_prev = dest->prev_bb;
2722
2723 if (dest_prev)
2724 {
2725 edge e = find_edge (dest_prev, dest);
2726 if (e && !(e->flags & EDGE_COMPLEX))
2727 return edge_in->src;
2728 }
2729 return dest_prev;
2730 }
2731
2732 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2733 Abort on abnormal edges. */
2734
2735 static basic_block
2736 gimple_split_edge (edge edge_in)
2737 {
2738 basic_block new_bb, after_bb, dest;
2739 edge new_edge, e;
2740
2741 /* Abnormal edges cannot be split. */
2742 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2743
2744 dest = edge_in->dest;
2745
2746 after_bb = split_edge_bb_loc (edge_in);
2747
2748 new_bb = create_empty_bb (after_bb);
2749 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2750 new_bb->count = edge_in->count;
2751 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2752 new_edge->probability = REG_BR_PROB_BASE;
2753 new_edge->count = edge_in->count;
2754
2755 e = redirect_edge_and_branch (edge_in, new_bb);
2756 gcc_assert (e == edge_in);
2757 reinstall_phi_args (new_edge, e);
2758
2759 return new_bb;
2760 }
2761
2762
2763 /* Verify properties of the address expression T with base object BASE. */
2764
2765 static tree
2766 verify_address (tree t, tree base)
2767 {
2768 bool old_constant;
2769 bool old_side_effects;
2770 bool new_constant;
2771 bool new_side_effects;
2772
2773 old_constant = TREE_CONSTANT (t);
2774 old_side_effects = TREE_SIDE_EFFECTS (t);
2775
2776 recompute_tree_invariant_for_addr_expr (t);
2777 new_side_effects = TREE_SIDE_EFFECTS (t);
2778 new_constant = TREE_CONSTANT (t);
2779
2780 if (old_constant != new_constant)
2781 {
2782 error ("constant not recomputed when ADDR_EXPR changed");
2783 return t;
2784 }
2785 if (old_side_effects != new_side_effects)
2786 {
2787 error ("side effects not recomputed when ADDR_EXPR changed");
2788 return t;
2789 }
2790
2791 if (!(TREE_CODE (base) == VAR_DECL
2792 || TREE_CODE (base) == PARM_DECL
2793 || TREE_CODE (base) == RESULT_DECL))
2794 return NULL_TREE;
2795
2796 if (DECL_GIMPLE_REG_P (base))
2797 {
2798 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2799 return base;
2800 }
2801
2802 return NULL_TREE;
2803 }
2804
2805 /* Callback for walk_tree, check that all elements with address taken are
2806 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2807 inside a PHI node. */
2808
2809 static tree
2810 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2811 {
2812 tree t = *tp, x;
2813
2814 if (TYPE_P (t))
2815 *walk_subtrees = 0;
2816
2817 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2818 #define CHECK_OP(N, MSG) \
2819 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2820 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2821
2822 switch (TREE_CODE (t))
2823 {
2824 case SSA_NAME:
2825 if (SSA_NAME_IN_FREE_LIST (t))
2826 {
2827 error ("SSA name in freelist but still referenced");
2828 return *tp;
2829 }
2830 break;
2831
2832 case PARM_DECL:
2833 case VAR_DECL:
2834 case RESULT_DECL:
2835 {
2836 tree context = decl_function_context (t);
2837 if (context != cfun->decl
2838 && !SCOPE_FILE_SCOPE_P (context)
2839 && !TREE_STATIC (t)
2840 && !DECL_EXTERNAL (t))
2841 {
2842 error ("Local declaration from a different function");
2843 return t;
2844 }
2845 }
2846 break;
2847
2848 case INDIRECT_REF:
2849 error ("INDIRECT_REF in gimple IL");
2850 return t;
2851
2852 case MEM_REF:
2853 x = TREE_OPERAND (t, 0);
2854 if (!POINTER_TYPE_P (TREE_TYPE (x))
2855 || !is_gimple_mem_ref_addr (x))
2856 {
2857 error ("invalid first operand of MEM_REF");
2858 return x;
2859 }
2860 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2861 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2862 {
2863 error ("invalid offset operand of MEM_REF");
2864 return TREE_OPERAND (t, 1);
2865 }
2866 if (TREE_CODE (x) == ADDR_EXPR)
2867 {
2868 tree va = verify_address (x, TREE_OPERAND (x, 0));
2869 if (va)
2870 return va;
2871 x = TREE_OPERAND (x, 0);
2872 }
2873 walk_tree (&x, verify_expr, data, NULL);
2874 *walk_subtrees = 0;
2875 break;
2876
2877 case ASSERT_EXPR:
2878 x = fold (ASSERT_EXPR_COND (t));
2879 if (x == boolean_false_node)
2880 {
2881 error ("ASSERT_EXPR with an always-false condition");
2882 return *tp;
2883 }
2884 break;
2885
2886 case MODIFY_EXPR:
2887 error ("MODIFY_EXPR not expected while having tuples");
2888 return *tp;
2889
2890 case ADDR_EXPR:
2891 {
2892 tree tem;
2893
2894 gcc_assert (is_gimple_address (t));
2895
2896 /* Skip any references (they will be checked when we recurse down the
2897 tree) and ensure that any variable used as a prefix is marked
2898 addressable. */
2899 for (x = TREE_OPERAND (t, 0);
2900 handled_component_p (x);
2901 x = TREE_OPERAND (x, 0))
2902 ;
2903
2904 if ((tem = verify_address (t, x)))
2905 return tem;
2906
2907 if (!(TREE_CODE (x) == VAR_DECL
2908 || TREE_CODE (x) == PARM_DECL
2909 || TREE_CODE (x) == RESULT_DECL))
2910 return NULL;
2911
2912 if (!TREE_ADDRESSABLE (x))
2913 {
2914 error ("address taken, but ADDRESSABLE bit not set");
2915 return x;
2916 }
2917
2918 break;
2919 }
2920
2921 case COND_EXPR:
2922 x = COND_EXPR_COND (t);
2923 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2924 {
2925 error ("non-integral used in condition");
2926 return x;
2927 }
2928 if (!is_gimple_condexpr (x))
2929 {
2930 error ("invalid conditional operand");
2931 return x;
2932 }
2933 break;
2934
2935 case NON_LVALUE_EXPR:
2936 case TRUTH_NOT_EXPR:
2937 gcc_unreachable ();
2938
2939 CASE_CONVERT:
2940 case FIX_TRUNC_EXPR:
2941 case FLOAT_EXPR:
2942 case NEGATE_EXPR:
2943 case ABS_EXPR:
2944 case BIT_NOT_EXPR:
2945 CHECK_OP (0, "invalid operand to unary operator");
2946 break;
2947
2948 case REALPART_EXPR:
2949 case IMAGPART_EXPR:
2950 case BIT_FIELD_REF:
2951 if (!is_gimple_reg_type (TREE_TYPE (t)))
2952 {
2953 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2954 return t;
2955 }
2956
2957 if (TREE_CODE (t) == BIT_FIELD_REF)
2958 {
2959 tree t0 = TREE_OPERAND (t, 0);
2960 tree t1 = TREE_OPERAND (t, 1);
2961 tree t2 = TREE_OPERAND (t, 2);
2962 if (!tree_fits_uhwi_p (t1)
2963 || !tree_fits_uhwi_p (t2))
2964 {
2965 error ("invalid position or size operand to BIT_FIELD_REF");
2966 return t;
2967 }
2968 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2969 && (TYPE_PRECISION (TREE_TYPE (t))
2970 != tree_to_uhwi (t1)))
2971 {
2972 error ("integral result type precision does not match "
2973 "field size of BIT_FIELD_REF");
2974 return t;
2975 }
2976 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2977 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2978 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2979 != tree_to_uhwi (t1)))
2980 {
2981 error ("mode size of non-integral result does not "
2982 "match field size of BIT_FIELD_REF");
2983 return t;
2984 }
2985 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2986 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2987 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2988 {
2989 error ("position plus size exceeds size of referenced object in "
2990 "BIT_FIELD_REF");
2991 return t;
2992 }
2993 }
2994 t = TREE_OPERAND (t, 0);
2995
2996 /* Fall-through. */
2997 case COMPONENT_REF:
2998 case ARRAY_REF:
2999 case ARRAY_RANGE_REF:
3000 case VIEW_CONVERT_EXPR:
3001 /* We have a nest of references. Verify that each of the operands
3002 that determine where to reference is either a constant or a variable,
3003 verify that the base is valid, and then show we've already checked
3004 the subtrees. */
3005 while (handled_component_p (t))
3006 {
3007 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3008 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3009 else if (TREE_CODE (t) == ARRAY_REF
3010 || TREE_CODE (t) == ARRAY_RANGE_REF)
3011 {
3012 CHECK_OP (1, "invalid array index");
3013 if (TREE_OPERAND (t, 2))
3014 CHECK_OP (2, "invalid array lower bound");
3015 if (TREE_OPERAND (t, 3))
3016 CHECK_OP (3, "invalid array stride");
3017 }
3018 else if (TREE_CODE (t) == BIT_FIELD_REF
3019 || TREE_CODE (t) == REALPART_EXPR
3020 || TREE_CODE (t) == IMAGPART_EXPR)
3021 {
3022 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3023 "REALPART_EXPR");
3024 return t;
3025 }
3026
3027 t = TREE_OPERAND (t, 0);
3028 }
3029
3030 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3031 {
3032 error ("invalid reference prefix");
3033 return t;
3034 }
3035 walk_tree (&t, verify_expr, data, NULL);
3036 *walk_subtrees = 0;
3037 break;
3038 case PLUS_EXPR:
3039 case MINUS_EXPR:
3040 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3041 POINTER_PLUS_EXPR. */
3042 if (POINTER_TYPE_P (TREE_TYPE (t)))
3043 {
3044 error ("invalid operand to plus/minus, type is a pointer");
3045 return t;
3046 }
3047 CHECK_OP (0, "invalid operand to binary operator");
3048 CHECK_OP (1, "invalid operand to binary operator");
3049 break;
3050
3051 case POINTER_PLUS_EXPR:
3052 /* Check to make sure the first operand is a pointer or reference type. */
3053 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3054 {
3055 error ("invalid operand to pointer plus, first operand is not a pointer");
3056 return t;
3057 }
3058 /* Check to make sure the second operand is a ptrofftype. */
3059 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3060 {
3061 error ("invalid operand to pointer plus, second operand is not an "
3062 "integer type of appropriate width");
3063 return t;
3064 }
3065 /* FALLTHROUGH */
3066 case LT_EXPR:
3067 case LE_EXPR:
3068 case GT_EXPR:
3069 case GE_EXPR:
3070 case EQ_EXPR:
3071 case NE_EXPR:
3072 case UNORDERED_EXPR:
3073 case ORDERED_EXPR:
3074 case UNLT_EXPR:
3075 case UNLE_EXPR:
3076 case UNGT_EXPR:
3077 case UNGE_EXPR:
3078 case UNEQ_EXPR:
3079 case LTGT_EXPR:
3080 case MULT_EXPR:
3081 case TRUNC_DIV_EXPR:
3082 case CEIL_DIV_EXPR:
3083 case FLOOR_DIV_EXPR:
3084 case ROUND_DIV_EXPR:
3085 case TRUNC_MOD_EXPR:
3086 case CEIL_MOD_EXPR:
3087 case FLOOR_MOD_EXPR:
3088 case ROUND_MOD_EXPR:
3089 case RDIV_EXPR:
3090 case EXACT_DIV_EXPR:
3091 case MIN_EXPR:
3092 case MAX_EXPR:
3093 case LSHIFT_EXPR:
3094 case RSHIFT_EXPR:
3095 case LROTATE_EXPR:
3096 case RROTATE_EXPR:
3097 case BIT_IOR_EXPR:
3098 case BIT_XOR_EXPR:
3099 case BIT_AND_EXPR:
3100 CHECK_OP (0, "invalid operand to binary operator");
3101 CHECK_OP (1, "invalid operand to binary operator");
3102 break;
3103
3104 case CONSTRUCTOR:
3105 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3106 *walk_subtrees = 0;
3107 break;
3108
3109 case CASE_LABEL_EXPR:
3110 if (CASE_CHAIN (t))
3111 {
3112 error ("invalid CASE_CHAIN");
3113 return t;
3114 }
3115 break;
3116
3117 default:
3118 break;
3119 }
3120 return NULL;
3121
3122 #undef CHECK_OP
3123 }
3124
3125
3126 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3127 Returns true if there is an error, otherwise false. */
3128
3129 static bool
3130 verify_types_in_gimple_min_lval (tree expr)
3131 {
3132 tree op;
3133
3134 if (is_gimple_id (expr))
3135 return false;
3136
3137 if (TREE_CODE (expr) != TARGET_MEM_REF
3138 && TREE_CODE (expr) != MEM_REF)
3139 {
3140 error ("invalid expression for min lvalue");
3141 return true;
3142 }
3143
3144 /* TARGET_MEM_REFs are strange beasts. */
3145 if (TREE_CODE (expr) == TARGET_MEM_REF)
3146 return false;
3147
3148 op = TREE_OPERAND (expr, 0);
3149 if (!is_gimple_val (op))
3150 {
3151 error ("invalid operand in indirect reference");
3152 debug_generic_stmt (op);
3153 return true;
3154 }
3155 /* Memory references now generally can involve a value conversion. */
3156
3157 return false;
3158 }
3159
3160 /* Verify if EXPR is a valid GIMPLE reference expression. If
3161 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3162 if there is an error, otherwise false. */
3163
3164 static bool
3165 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3166 {
3167 while (handled_component_p (expr))
3168 {
3169 tree op = TREE_OPERAND (expr, 0);
3170
3171 if (TREE_CODE (expr) == ARRAY_REF
3172 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3173 {
3174 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3175 || (TREE_OPERAND (expr, 2)
3176 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3177 || (TREE_OPERAND (expr, 3)
3178 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3179 {
3180 error ("invalid operands to array reference");
3181 debug_generic_stmt (expr);
3182 return true;
3183 }
3184 }
3185
3186 /* Verify if the reference array element types are compatible. */
3187 if (TREE_CODE (expr) == ARRAY_REF
3188 && !useless_type_conversion_p (TREE_TYPE (expr),
3189 TREE_TYPE (TREE_TYPE (op))))
3190 {
3191 error ("type mismatch in array reference");
3192 debug_generic_stmt (TREE_TYPE (expr));
3193 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3194 return true;
3195 }
3196 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3197 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3198 TREE_TYPE (TREE_TYPE (op))))
3199 {
3200 error ("type mismatch in array range reference");
3201 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3202 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3203 return true;
3204 }
3205
3206 if ((TREE_CODE (expr) == REALPART_EXPR
3207 || TREE_CODE (expr) == IMAGPART_EXPR)
3208 && !useless_type_conversion_p (TREE_TYPE (expr),
3209 TREE_TYPE (TREE_TYPE (op))))
3210 {
3211 error ("type mismatch in real/imagpart reference");
3212 debug_generic_stmt (TREE_TYPE (expr));
3213 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3214 return true;
3215 }
3216
3217 if (TREE_CODE (expr) == COMPONENT_REF
3218 && !useless_type_conversion_p (TREE_TYPE (expr),
3219 TREE_TYPE (TREE_OPERAND (expr, 1))))
3220 {
3221 error ("type mismatch in component reference");
3222 debug_generic_stmt (TREE_TYPE (expr));
3223 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3224 return true;
3225 }
3226
3227 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3228 {
3229 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3230 that their operand is not an SSA name or an invariant when
3231 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3232 bug). Otherwise there is nothing to verify, gross mismatches at
3233 most invoke undefined behavior. */
3234 if (require_lvalue
3235 && (TREE_CODE (op) == SSA_NAME
3236 || is_gimple_min_invariant (op)))
3237 {
3238 error ("conversion of an SSA_NAME on the left hand side");
3239 debug_generic_stmt (expr);
3240 return true;
3241 }
3242 else if (TREE_CODE (op) == SSA_NAME
3243 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3244 {
3245 error ("conversion of register to a different size");
3246 debug_generic_stmt (expr);
3247 return true;
3248 }
3249 else if (!handled_component_p (op))
3250 return false;
3251 }
3252
3253 expr = op;
3254 }
3255
3256 if (TREE_CODE (expr) == MEM_REF)
3257 {
3258 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3259 {
3260 error ("invalid address operand in MEM_REF");
3261 debug_generic_stmt (expr);
3262 return true;
3263 }
3264 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3265 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3266 {
3267 error ("invalid offset operand in MEM_REF");
3268 debug_generic_stmt (expr);
3269 return true;
3270 }
3271 }
3272 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3273 {
3274 if (!TMR_BASE (expr)
3275 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3276 {
3277 error ("invalid address operand in TARGET_MEM_REF");
3278 return true;
3279 }
3280 if (!TMR_OFFSET (expr)
3281 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3282 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3283 {
3284 error ("invalid offset operand in TARGET_MEM_REF");
3285 debug_generic_stmt (expr);
3286 return true;
3287 }
3288 }
3289
3290 return ((require_lvalue || !is_gimple_min_invariant (expr))
3291 && verify_types_in_gimple_min_lval (expr));
3292 }
3293
3294 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3295 list of pointer-to types that is trivially convertible to DEST. */
3296
3297 static bool
3298 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3299 {
3300 tree src;
3301
3302 if (!TYPE_POINTER_TO (src_obj))
3303 return true;
3304
3305 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3306 if (useless_type_conversion_p (dest, src))
3307 return true;
3308
3309 return false;
3310 }
3311
3312 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3313 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3314
3315 static bool
3316 valid_fixed_convert_types_p (tree type1, tree type2)
3317 {
3318 return (FIXED_POINT_TYPE_P (type1)
3319 && (INTEGRAL_TYPE_P (type2)
3320 || SCALAR_FLOAT_TYPE_P (type2)
3321 || FIXED_POINT_TYPE_P (type2)));
3322 }
3323
3324 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3325 is a problem, otherwise false. */
3326
3327 static bool
3328 verify_gimple_call (gcall *stmt)
3329 {
3330 tree fn = gimple_call_fn (stmt);
3331 tree fntype, fndecl;
3332 unsigned i;
3333
3334 if (gimple_call_internal_p (stmt))
3335 {
3336 if (fn)
3337 {
3338 error ("gimple call has two targets");
3339 debug_generic_stmt (fn);
3340 return true;
3341 }
3342 }
3343 else
3344 {
3345 if (!fn)
3346 {
3347 error ("gimple call has no target");
3348 return true;
3349 }
3350 }
3351
3352 if (fn && !is_gimple_call_addr (fn))
3353 {
3354 error ("invalid function in gimple call");
3355 debug_generic_stmt (fn);
3356 return true;
3357 }
3358
3359 if (fn
3360 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3361 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3362 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3363 {
3364 error ("non-function in gimple call");
3365 return true;
3366 }
3367
3368 fndecl = gimple_call_fndecl (stmt);
3369 if (fndecl
3370 && TREE_CODE (fndecl) == FUNCTION_DECL
3371 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3372 && !DECL_PURE_P (fndecl)
3373 && !TREE_READONLY (fndecl))
3374 {
3375 error ("invalid pure const state for function");
3376 return true;
3377 }
3378
3379 tree lhs = gimple_call_lhs (stmt);
3380 if (lhs
3381 && (!is_gimple_lvalue (lhs)
3382 || verify_types_in_gimple_reference (lhs, true)))
3383 {
3384 error ("invalid LHS in gimple call");
3385 return true;
3386 }
3387
3388 if (lhs
3389 && gimple_call_ctrl_altering_p (stmt)
3390 && gimple_call_noreturn_p (stmt)
3391 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST
3392 && !TREE_ADDRESSABLE (TREE_TYPE (lhs)))
3393 {
3394 error ("LHS in noreturn call");
3395 return true;
3396 }
3397
3398 fntype = gimple_call_fntype (stmt);
3399 if (fntype
3400 && lhs
3401 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3402 /* ??? At least C++ misses conversions at assignments from
3403 void * call results.
3404 ??? Java is completely off. Especially with functions
3405 returning java.lang.Object.
3406 For now simply allow arbitrary pointer type conversions. */
3407 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3408 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3409 {
3410 error ("invalid conversion in gimple call");
3411 debug_generic_stmt (TREE_TYPE (lhs));
3412 debug_generic_stmt (TREE_TYPE (fntype));
3413 return true;
3414 }
3415
3416 if (gimple_call_chain (stmt)
3417 && !is_gimple_val (gimple_call_chain (stmt)))
3418 {
3419 error ("invalid static chain in gimple call");
3420 debug_generic_stmt (gimple_call_chain (stmt));
3421 return true;
3422 }
3423
3424 /* If there is a static chain argument, the call should either be
3425 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3426 if (gimple_call_chain (stmt)
3427 && fndecl
3428 && !DECL_STATIC_CHAIN (fndecl))
3429 {
3430 error ("static chain with function that doesn%'t use one");
3431 return true;
3432 }
3433
3434 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3435 {
3436 switch (DECL_FUNCTION_CODE (fndecl))
3437 {
3438 case BUILT_IN_UNREACHABLE:
3439 case BUILT_IN_TRAP:
3440 if (gimple_call_num_args (stmt) > 0)
3441 {
3442 /* Built-in unreachable with parameters might not be caught by
3443 undefined behavior sanitizer. Front-ends do check users do not
3444 call them that way but we also produce calls to
3445 __builtin_unreachable internally, for example when IPA figures
3446 out a call cannot happen in a legal program. In such cases,
3447 we must make sure arguments are stripped off. */
3448 error ("__builtin_unreachable or __builtin_trap call with "
3449 "arguments");
3450 return true;
3451 }
3452 break;
3453 default:
3454 break;
3455 }
3456 }
3457
3458 /* ??? The C frontend passes unpromoted arguments in case it
3459 didn't see a function declaration before the call. So for now
3460 leave the call arguments mostly unverified. Once we gimplify
3461 unit-at-a-time we have a chance to fix this. */
3462
3463 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3464 {
3465 tree arg = gimple_call_arg (stmt, i);
3466 if ((is_gimple_reg_type (TREE_TYPE (arg))
3467 && !is_gimple_val (arg))
3468 || (!is_gimple_reg_type (TREE_TYPE (arg))
3469 && !is_gimple_lvalue (arg)))
3470 {
3471 error ("invalid argument to gimple call");
3472 debug_generic_expr (arg);
3473 return true;
3474 }
3475 }
3476
3477 return false;
3478 }
3479
3480 /* Verifies the gimple comparison with the result type TYPE and
3481 the operands OP0 and OP1, comparison code is CODE. */
3482
3483 static bool
3484 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3485 {
3486 tree op0_type = TREE_TYPE (op0);
3487 tree op1_type = TREE_TYPE (op1);
3488
3489 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3490 {
3491 error ("invalid operands in gimple comparison");
3492 return true;
3493 }
3494
3495 /* For comparisons we do not have the operations type as the
3496 effective type the comparison is carried out in. Instead
3497 we require that either the first operand is trivially
3498 convertible into the second, or the other way around.
3499 Because we special-case pointers to void we allow
3500 comparisons of pointers with the same mode as well. */
3501 if (!useless_type_conversion_p (op0_type, op1_type)
3502 && !useless_type_conversion_p (op1_type, op0_type)
3503 && (!POINTER_TYPE_P (op0_type)
3504 || !POINTER_TYPE_P (op1_type)
3505 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3506 {
3507 error ("mismatching comparison operand types");
3508 debug_generic_expr (op0_type);
3509 debug_generic_expr (op1_type);
3510 return true;
3511 }
3512
3513 /* The resulting type of a comparison may be an effective boolean type. */
3514 if (INTEGRAL_TYPE_P (type)
3515 && (TREE_CODE (type) == BOOLEAN_TYPE
3516 || TYPE_PRECISION (type) == 1))
3517 {
3518 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3519 || TREE_CODE (op1_type) == VECTOR_TYPE)
3520 && code != EQ_EXPR && code != NE_EXPR
3521 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3522 && !VECTOR_INTEGER_TYPE_P (op0_type))
3523 {
3524 error ("unsupported operation or type for vector comparison"
3525 " returning a boolean");
3526 debug_generic_expr (op0_type);
3527 debug_generic_expr (op1_type);
3528 return true;
3529 }
3530 }
3531 /* Or a boolean vector type with the same element count
3532 as the comparison operand types. */
3533 else if (TREE_CODE (type) == VECTOR_TYPE
3534 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3535 {
3536 if (TREE_CODE (op0_type) != VECTOR_TYPE
3537 || TREE_CODE (op1_type) != VECTOR_TYPE)
3538 {
3539 error ("non-vector operands in vector comparison");
3540 debug_generic_expr (op0_type);
3541 debug_generic_expr (op1_type);
3542 return true;
3543 }
3544
3545 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3546 {
3547 error ("invalid vector comparison resulting type");
3548 debug_generic_expr (type);
3549 return true;
3550 }
3551 }
3552 else
3553 {
3554 error ("bogus comparison result type");
3555 debug_generic_expr (type);
3556 return true;
3557 }
3558
3559 return false;
3560 }
3561
3562 /* Verify a gimple assignment statement STMT with an unary rhs.
3563 Returns true if anything is wrong. */
3564
3565 static bool
3566 verify_gimple_assign_unary (gassign *stmt)
3567 {
3568 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3569 tree lhs = gimple_assign_lhs (stmt);
3570 tree lhs_type = TREE_TYPE (lhs);
3571 tree rhs1 = gimple_assign_rhs1 (stmt);
3572 tree rhs1_type = TREE_TYPE (rhs1);
3573
3574 if (!is_gimple_reg (lhs))
3575 {
3576 error ("non-register as LHS of unary operation");
3577 return true;
3578 }
3579
3580 if (!is_gimple_val (rhs1))
3581 {
3582 error ("invalid operand in unary operation");
3583 return true;
3584 }
3585
3586 /* First handle conversions. */
3587 switch (rhs_code)
3588 {
3589 CASE_CONVERT:
3590 {
3591 /* Allow conversions from pointer type to integral type only if
3592 there is no sign or zero extension involved.
3593 For targets were the precision of ptrofftype doesn't match that
3594 of pointers we need to allow arbitrary conversions to ptrofftype. */
3595 if ((POINTER_TYPE_P (lhs_type)
3596 && INTEGRAL_TYPE_P (rhs1_type))
3597 || (POINTER_TYPE_P (rhs1_type)
3598 && INTEGRAL_TYPE_P (lhs_type)
3599 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3600 || ptrofftype_p (sizetype))))
3601 return false;
3602
3603 /* Allow conversion from integral to offset type and vice versa. */
3604 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3605 && INTEGRAL_TYPE_P (rhs1_type))
3606 || (INTEGRAL_TYPE_P (lhs_type)
3607 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3608 return false;
3609
3610 /* Otherwise assert we are converting between types of the
3611 same kind. */
3612 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3613 {
3614 error ("invalid types in nop conversion");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 return true;
3618 }
3619
3620 return false;
3621 }
3622
3623 case ADDR_SPACE_CONVERT_EXPR:
3624 {
3625 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3626 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3627 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3628 {
3629 error ("invalid types in address space conversion");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 return true;
3633 }
3634
3635 return false;
3636 }
3637
3638 case FIXED_CONVERT_EXPR:
3639 {
3640 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3641 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3642 {
3643 error ("invalid types in fixed-point conversion");
3644 debug_generic_expr (lhs_type);
3645 debug_generic_expr (rhs1_type);
3646 return true;
3647 }
3648
3649 return false;
3650 }
3651
3652 case FLOAT_EXPR:
3653 {
3654 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3655 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3656 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3657 {
3658 error ("invalid types in conversion to floating point");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 return true;
3662 }
3663
3664 return false;
3665 }
3666
3667 case FIX_TRUNC_EXPR:
3668 {
3669 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3670 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3671 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3672 {
3673 error ("invalid types in conversion to integer");
3674 debug_generic_expr (lhs_type);
3675 debug_generic_expr (rhs1_type);
3676 return true;
3677 }
3678
3679 return false;
3680 }
3681 case REDUC_MAX_EXPR:
3682 case REDUC_MIN_EXPR:
3683 case REDUC_PLUS_EXPR:
3684 if (!VECTOR_TYPE_P (rhs1_type)
3685 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3686 {
3687 error ("reduction should convert from vector to element type");
3688 debug_generic_expr (lhs_type);
3689 debug_generic_expr (rhs1_type);
3690 return true;
3691 }
3692 return false;
3693
3694 case VEC_UNPACK_HI_EXPR:
3695 case VEC_UNPACK_LO_EXPR:
3696 case VEC_UNPACK_FLOAT_HI_EXPR:
3697 case VEC_UNPACK_FLOAT_LO_EXPR:
3698 /* FIXME. */
3699 return false;
3700
3701 case NEGATE_EXPR:
3702 case ABS_EXPR:
3703 case BIT_NOT_EXPR:
3704 case PAREN_EXPR:
3705 case CONJ_EXPR:
3706 break;
3707
3708 default:
3709 gcc_unreachable ();
3710 }
3711
3712 /* For the remaining codes assert there is no conversion involved. */
3713 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3714 {
3715 error ("non-trivial conversion in unary operation");
3716 debug_generic_expr (lhs_type);
3717 debug_generic_expr (rhs1_type);
3718 return true;
3719 }
3720
3721 return false;
3722 }
3723
3724 /* Verify a gimple assignment statement STMT with a binary rhs.
3725 Returns true if anything is wrong. */
3726
3727 static bool
3728 verify_gimple_assign_binary (gassign *stmt)
3729 {
3730 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3731 tree lhs = gimple_assign_lhs (stmt);
3732 tree lhs_type = TREE_TYPE (lhs);
3733 tree rhs1 = gimple_assign_rhs1 (stmt);
3734 tree rhs1_type = TREE_TYPE (rhs1);
3735 tree rhs2 = gimple_assign_rhs2 (stmt);
3736 tree rhs2_type = TREE_TYPE (rhs2);
3737
3738 if (!is_gimple_reg (lhs))
3739 {
3740 error ("non-register as LHS of binary operation");
3741 return true;
3742 }
3743
3744 if (!is_gimple_val (rhs1)
3745 || !is_gimple_val (rhs2))
3746 {
3747 error ("invalid operands in binary operation");
3748 return true;
3749 }
3750
3751 /* First handle operations that involve different types. */
3752 switch (rhs_code)
3753 {
3754 case COMPLEX_EXPR:
3755 {
3756 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3757 || !(INTEGRAL_TYPE_P (rhs1_type)
3758 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3759 || !(INTEGRAL_TYPE_P (rhs2_type)
3760 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3761 {
3762 error ("type mismatch in complex expression");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 debug_generic_expr (rhs2_type);
3766 return true;
3767 }
3768
3769 return false;
3770 }
3771
3772 case LSHIFT_EXPR:
3773 case RSHIFT_EXPR:
3774 case LROTATE_EXPR:
3775 case RROTATE_EXPR:
3776 {
3777 /* Shifts and rotates are ok on integral types, fixed point
3778 types and integer vector types. */
3779 if ((!INTEGRAL_TYPE_P (rhs1_type)
3780 && !FIXED_POINT_TYPE_P (rhs1_type)
3781 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3782 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3783 || (!INTEGRAL_TYPE_P (rhs2_type)
3784 /* Vector shifts of vectors are also ok. */
3785 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3786 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3787 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3788 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3789 || !useless_type_conversion_p (lhs_type, rhs1_type))
3790 {
3791 error ("type mismatch in shift expression");
3792 debug_generic_expr (lhs_type);
3793 debug_generic_expr (rhs1_type);
3794 debug_generic_expr (rhs2_type);
3795 return true;
3796 }
3797
3798 return false;
3799 }
3800
3801 case WIDEN_LSHIFT_EXPR:
3802 {
3803 if (!INTEGRAL_TYPE_P (lhs_type)
3804 || !INTEGRAL_TYPE_P (rhs1_type)
3805 || TREE_CODE (rhs2) != INTEGER_CST
3806 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3807 {
3808 error ("type mismatch in widening vector shift expression");
3809 debug_generic_expr (lhs_type);
3810 debug_generic_expr (rhs1_type);
3811 debug_generic_expr (rhs2_type);
3812 return true;
3813 }
3814
3815 return false;
3816 }
3817
3818 case VEC_WIDEN_LSHIFT_HI_EXPR:
3819 case VEC_WIDEN_LSHIFT_LO_EXPR:
3820 {
3821 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3822 || TREE_CODE (lhs_type) != VECTOR_TYPE
3823 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3824 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3825 || TREE_CODE (rhs2) != INTEGER_CST
3826 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3827 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3828 {
3829 error ("type mismatch in widening vector shift expression");
3830 debug_generic_expr (lhs_type);
3831 debug_generic_expr (rhs1_type);
3832 debug_generic_expr (rhs2_type);
3833 return true;
3834 }
3835
3836 return false;
3837 }
3838
3839 case PLUS_EXPR:
3840 case MINUS_EXPR:
3841 {
3842 tree lhs_etype = lhs_type;
3843 tree rhs1_etype = rhs1_type;
3844 tree rhs2_etype = rhs2_type;
3845 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3846 {
3847 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3848 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3849 {
3850 error ("invalid non-vector operands to vector valued plus");
3851 return true;
3852 }
3853 lhs_etype = TREE_TYPE (lhs_type);
3854 rhs1_etype = TREE_TYPE (rhs1_type);
3855 rhs2_etype = TREE_TYPE (rhs2_type);
3856 }
3857 if (POINTER_TYPE_P (lhs_etype)
3858 || POINTER_TYPE_P (rhs1_etype)
3859 || POINTER_TYPE_P (rhs2_etype))
3860 {
3861 error ("invalid (pointer) operands to plus/minus");
3862 return true;
3863 }
3864
3865 /* Continue with generic binary expression handling. */
3866 break;
3867 }
3868
3869 case POINTER_PLUS_EXPR:
3870 {
3871 if (!POINTER_TYPE_P (rhs1_type)
3872 || !useless_type_conversion_p (lhs_type, rhs1_type)
3873 || !ptrofftype_p (rhs2_type))
3874 {
3875 error ("type mismatch in pointer plus expression");
3876 debug_generic_stmt (lhs_type);
3877 debug_generic_stmt (rhs1_type);
3878 debug_generic_stmt (rhs2_type);
3879 return true;
3880 }
3881
3882 return false;
3883 }
3884
3885 case TRUTH_ANDIF_EXPR:
3886 case TRUTH_ORIF_EXPR:
3887 case TRUTH_AND_EXPR:
3888 case TRUTH_OR_EXPR:
3889 case TRUTH_XOR_EXPR:
3890
3891 gcc_unreachable ();
3892
3893 case LT_EXPR:
3894 case LE_EXPR:
3895 case GT_EXPR:
3896 case GE_EXPR:
3897 case EQ_EXPR:
3898 case NE_EXPR:
3899 case UNORDERED_EXPR:
3900 case ORDERED_EXPR:
3901 case UNLT_EXPR:
3902 case UNLE_EXPR:
3903 case UNGT_EXPR:
3904 case UNGE_EXPR:
3905 case UNEQ_EXPR:
3906 case LTGT_EXPR:
3907 /* Comparisons are also binary, but the result type is not
3908 connected to the operand types. */
3909 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3910
3911 case WIDEN_MULT_EXPR:
3912 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3913 return true;
3914 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3915 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3916
3917 case WIDEN_SUM_EXPR:
3918 case VEC_WIDEN_MULT_HI_EXPR:
3919 case VEC_WIDEN_MULT_LO_EXPR:
3920 case VEC_WIDEN_MULT_EVEN_EXPR:
3921 case VEC_WIDEN_MULT_ODD_EXPR:
3922 case VEC_PACK_TRUNC_EXPR:
3923 case VEC_PACK_SAT_EXPR:
3924 case VEC_PACK_FIX_TRUNC_EXPR:
3925 /* FIXME. */
3926 return false;
3927
3928 case MULT_EXPR:
3929 case MULT_HIGHPART_EXPR:
3930 case TRUNC_DIV_EXPR:
3931 case CEIL_DIV_EXPR:
3932 case FLOOR_DIV_EXPR:
3933 case ROUND_DIV_EXPR:
3934 case TRUNC_MOD_EXPR:
3935 case CEIL_MOD_EXPR:
3936 case FLOOR_MOD_EXPR:
3937 case ROUND_MOD_EXPR:
3938 case RDIV_EXPR:
3939 case EXACT_DIV_EXPR:
3940 case MIN_EXPR:
3941 case MAX_EXPR:
3942 case BIT_IOR_EXPR:
3943 case BIT_XOR_EXPR:
3944 case BIT_AND_EXPR:
3945 /* Continue with generic binary expression handling. */
3946 break;
3947
3948 default:
3949 gcc_unreachable ();
3950 }
3951
3952 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3953 || !useless_type_conversion_p (lhs_type, rhs2_type))
3954 {
3955 error ("type mismatch in binary expression");
3956 debug_generic_stmt (lhs_type);
3957 debug_generic_stmt (rhs1_type);
3958 debug_generic_stmt (rhs2_type);
3959 return true;
3960 }
3961
3962 return false;
3963 }
3964
3965 /* Verify a gimple assignment statement STMT with a ternary rhs.
3966 Returns true if anything is wrong. */
3967
3968 static bool
3969 verify_gimple_assign_ternary (gassign *stmt)
3970 {
3971 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3972 tree lhs = gimple_assign_lhs (stmt);
3973 tree lhs_type = TREE_TYPE (lhs);
3974 tree rhs1 = gimple_assign_rhs1 (stmt);
3975 tree rhs1_type = TREE_TYPE (rhs1);
3976 tree rhs2 = gimple_assign_rhs2 (stmt);
3977 tree rhs2_type = TREE_TYPE (rhs2);
3978 tree rhs3 = gimple_assign_rhs3 (stmt);
3979 tree rhs3_type = TREE_TYPE (rhs3);
3980
3981 if (!is_gimple_reg (lhs))
3982 {
3983 error ("non-register as LHS of ternary operation");
3984 return true;
3985 }
3986
3987 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3988 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3989 || !is_gimple_val (rhs2)
3990 || !is_gimple_val (rhs3))
3991 {
3992 error ("invalid operands in ternary operation");
3993 return true;
3994 }
3995
3996 /* First handle operations that involve different types. */
3997 switch (rhs_code)
3998 {
3999 case WIDEN_MULT_PLUS_EXPR:
4000 case WIDEN_MULT_MINUS_EXPR:
4001 if ((!INTEGRAL_TYPE_P (rhs1_type)
4002 && !FIXED_POINT_TYPE_P (rhs1_type))
4003 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4004 || !useless_type_conversion_p (lhs_type, rhs3_type)
4005 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4006 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4007 {
4008 error ("type mismatch in widening multiply-accumulate expression");
4009 debug_generic_expr (lhs_type);
4010 debug_generic_expr (rhs1_type);
4011 debug_generic_expr (rhs2_type);
4012 debug_generic_expr (rhs3_type);
4013 return true;
4014 }
4015 break;
4016
4017 case FMA_EXPR:
4018 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4019 || !useless_type_conversion_p (lhs_type, rhs2_type)
4020 || !useless_type_conversion_p (lhs_type, rhs3_type))
4021 {
4022 error ("type mismatch in fused multiply-add expression");
4023 debug_generic_expr (lhs_type);
4024 debug_generic_expr (rhs1_type);
4025 debug_generic_expr (rhs2_type);
4026 debug_generic_expr (rhs3_type);
4027 return true;
4028 }
4029 break;
4030
4031 case VEC_COND_EXPR:
4032 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4033 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4034 != TYPE_VECTOR_SUBPARTS (lhs_type))
4035 {
4036 error ("the first argument of a VEC_COND_EXPR must be of a "
4037 "boolean vector type of the same number of elements "
4038 "as the result");
4039 debug_generic_expr (lhs_type);
4040 debug_generic_expr (rhs1_type);
4041 return true;
4042 }
4043 /* Fallthrough. */
4044 case COND_EXPR:
4045 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4046 || !useless_type_conversion_p (lhs_type, rhs3_type))
4047 {
4048 error ("type mismatch in conditional expression");
4049 debug_generic_expr (lhs_type);
4050 debug_generic_expr (rhs2_type);
4051 debug_generic_expr (rhs3_type);
4052 return true;
4053 }
4054 break;
4055
4056 case VEC_PERM_EXPR:
4057 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4058 || !useless_type_conversion_p (lhs_type, rhs2_type))
4059 {
4060 error ("type mismatch in vector permute expression");
4061 debug_generic_expr (lhs_type);
4062 debug_generic_expr (rhs1_type);
4063 debug_generic_expr (rhs2_type);
4064 debug_generic_expr (rhs3_type);
4065 return true;
4066 }
4067
4068 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4069 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4070 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4071 {
4072 error ("vector types expected in vector permute expression");
4073 debug_generic_expr (lhs_type);
4074 debug_generic_expr (rhs1_type);
4075 debug_generic_expr (rhs2_type);
4076 debug_generic_expr (rhs3_type);
4077 return true;
4078 }
4079
4080 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4081 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4082 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4083 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4084 != TYPE_VECTOR_SUBPARTS (lhs_type))
4085 {
4086 error ("vectors with different element number found "
4087 "in vector permute expression");
4088 debug_generic_expr (lhs_type);
4089 debug_generic_expr (rhs1_type);
4090 debug_generic_expr (rhs2_type);
4091 debug_generic_expr (rhs3_type);
4092 return true;
4093 }
4094
4095 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4096 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4097 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4098 {
4099 error ("invalid mask type in vector permute expression");
4100 debug_generic_expr (lhs_type);
4101 debug_generic_expr (rhs1_type);
4102 debug_generic_expr (rhs2_type);
4103 debug_generic_expr (rhs3_type);
4104 return true;
4105 }
4106
4107 return false;
4108
4109 case SAD_EXPR:
4110 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4111 || !useless_type_conversion_p (lhs_type, rhs3_type)
4112 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4113 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4114 {
4115 error ("type mismatch in sad expression");
4116 debug_generic_expr (lhs_type);
4117 debug_generic_expr (rhs1_type);
4118 debug_generic_expr (rhs2_type);
4119 debug_generic_expr (rhs3_type);
4120 return true;
4121 }
4122
4123 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4124 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4125 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4126 {
4127 error ("vector types expected in sad expression");
4128 debug_generic_expr (lhs_type);
4129 debug_generic_expr (rhs1_type);
4130 debug_generic_expr (rhs2_type);
4131 debug_generic_expr (rhs3_type);
4132 return true;
4133 }
4134
4135 return false;
4136
4137 case DOT_PROD_EXPR:
4138 case REALIGN_LOAD_EXPR:
4139 /* FIXME. */
4140 return false;
4141
4142 default:
4143 gcc_unreachable ();
4144 }
4145 return false;
4146 }
4147
4148 /* Verify a gimple assignment statement STMT with a single rhs.
4149 Returns true if anything is wrong. */
4150
4151 static bool
4152 verify_gimple_assign_single (gassign *stmt)
4153 {
4154 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4155 tree lhs = gimple_assign_lhs (stmt);
4156 tree lhs_type = TREE_TYPE (lhs);
4157 tree rhs1 = gimple_assign_rhs1 (stmt);
4158 tree rhs1_type = TREE_TYPE (rhs1);
4159 bool res = false;
4160
4161 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4162 {
4163 error ("non-trivial conversion at assignment");
4164 debug_generic_expr (lhs_type);
4165 debug_generic_expr (rhs1_type);
4166 return true;
4167 }
4168
4169 if (gimple_clobber_p (stmt)
4170 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4171 {
4172 error ("non-decl/MEM_REF LHS in clobber statement");
4173 debug_generic_expr (lhs);
4174 return true;
4175 }
4176
4177 if (handled_component_p (lhs)
4178 || TREE_CODE (lhs) == MEM_REF
4179 || TREE_CODE (lhs) == TARGET_MEM_REF)
4180 res |= verify_types_in_gimple_reference (lhs, true);
4181
4182 /* Special codes we cannot handle via their class. */
4183 switch (rhs_code)
4184 {
4185 case ADDR_EXPR:
4186 {
4187 tree op = TREE_OPERAND (rhs1, 0);
4188 if (!is_gimple_addressable (op))
4189 {
4190 error ("invalid operand in unary expression");
4191 return true;
4192 }
4193
4194 /* Technically there is no longer a need for matching types, but
4195 gimple hygiene asks for this check. In LTO we can end up
4196 combining incompatible units and thus end up with addresses
4197 of globals that change their type to a common one. */
4198 if (!in_lto_p
4199 && !types_compatible_p (TREE_TYPE (op),
4200 TREE_TYPE (TREE_TYPE (rhs1)))
4201 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4202 TREE_TYPE (op)))
4203 {
4204 error ("type mismatch in address expression");
4205 debug_generic_stmt (TREE_TYPE (rhs1));
4206 debug_generic_stmt (TREE_TYPE (op));
4207 return true;
4208 }
4209
4210 return verify_types_in_gimple_reference (op, true);
4211 }
4212
4213 /* tcc_reference */
4214 case INDIRECT_REF:
4215 error ("INDIRECT_REF in gimple IL");
4216 return true;
4217
4218 case COMPONENT_REF:
4219 case BIT_FIELD_REF:
4220 case ARRAY_REF:
4221 case ARRAY_RANGE_REF:
4222 case VIEW_CONVERT_EXPR:
4223 case REALPART_EXPR:
4224 case IMAGPART_EXPR:
4225 case TARGET_MEM_REF:
4226 case MEM_REF:
4227 if (!is_gimple_reg (lhs)
4228 && is_gimple_reg_type (TREE_TYPE (lhs)))
4229 {
4230 error ("invalid rhs for gimple memory store");
4231 debug_generic_stmt (lhs);
4232 debug_generic_stmt (rhs1);
4233 return true;
4234 }
4235 return res || verify_types_in_gimple_reference (rhs1, false);
4236
4237 /* tcc_constant */
4238 case SSA_NAME:
4239 case INTEGER_CST:
4240 case REAL_CST:
4241 case FIXED_CST:
4242 case COMPLEX_CST:
4243 case VECTOR_CST:
4244 case STRING_CST:
4245 return res;
4246
4247 /* tcc_declaration */
4248 case CONST_DECL:
4249 return res;
4250 case VAR_DECL:
4251 case PARM_DECL:
4252 if (!is_gimple_reg (lhs)
4253 && !is_gimple_reg (rhs1)
4254 && is_gimple_reg_type (TREE_TYPE (lhs)))
4255 {
4256 error ("invalid rhs for gimple memory store");
4257 debug_generic_stmt (lhs);
4258 debug_generic_stmt (rhs1);
4259 return true;
4260 }
4261 return res;
4262
4263 case CONSTRUCTOR:
4264 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4265 {
4266 unsigned int i;
4267 tree elt_i, elt_v, elt_t = NULL_TREE;
4268
4269 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4270 return res;
4271 /* For vector CONSTRUCTORs we require that either it is empty
4272 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4273 (then the element count must be correct to cover the whole
4274 outer vector and index must be NULL on all elements, or it is
4275 a CONSTRUCTOR of scalar elements, where we as an exception allow
4276 smaller number of elements (assuming zero filling) and
4277 consecutive indexes as compared to NULL indexes (such
4278 CONSTRUCTORs can appear in the IL from FEs). */
4279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4280 {
4281 if (elt_t == NULL_TREE)
4282 {
4283 elt_t = TREE_TYPE (elt_v);
4284 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4285 {
4286 tree elt_t = TREE_TYPE (elt_v);
4287 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4288 TREE_TYPE (elt_t)))
4289 {
4290 error ("incorrect type of vector CONSTRUCTOR"
4291 " elements");
4292 debug_generic_stmt (rhs1);
4293 return true;
4294 }
4295 else if (CONSTRUCTOR_NELTS (rhs1)
4296 * TYPE_VECTOR_SUBPARTS (elt_t)
4297 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4298 {
4299 error ("incorrect number of vector CONSTRUCTOR"
4300 " elements");
4301 debug_generic_stmt (rhs1);
4302 return true;
4303 }
4304 }
4305 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4306 elt_t))
4307 {
4308 error ("incorrect type of vector CONSTRUCTOR elements");
4309 debug_generic_stmt (rhs1);
4310 return true;
4311 }
4312 else if (CONSTRUCTOR_NELTS (rhs1)
4313 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4314 {
4315 error ("incorrect number of vector CONSTRUCTOR elements");
4316 debug_generic_stmt (rhs1);
4317 return true;
4318 }
4319 }
4320 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4321 {
4322 error ("incorrect type of vector CONSTRUCTOR elements");
4323 debug_generic_stmt (rhs1);
4324 return true;
4325 }
4326 if (elt_i != NULL_TREE
4327 && (TREE_CODE (elt_t) == VECTOR_TYPE
4328 || TREE_CODE (elt_i) != INTEGER_CST
4329 || compare_tree_int (elt_i, i) != 0))
4330 {
4331 error ("vector CONSTRUCTOR with non-NULL element index");
4332 debug_generic_stmt (rhs1);
4333 return true;
4334 }
4335 if (!is_gimple_val (elt_v))
4336 {
4337 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4338 debug_generic_stmt (rhs1);
4339 return true;
4340 }
4341 }
4342 }
4343 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4344 {
4345 error ("non-vector CONSTRUCTOR with elements");
4346 debug_generic_stmt (rhs1);
4347 return true;
4348 }
4349 return res;
4350 case OBJ_TYPE_REF:
4351 case ASSERT_EXPR:
4352 case WITH_SIZE_EXPR:
4353 /* FIXME. */
4354 return res;
4355
4356 default:;
4357 }
4358
4359 return res;
4360 }
4361
4362 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4363 is a problem, otherwise false. */
4364
4365 static bool
4366 verify_gimple_assign (gassign *stmt)
4367 {
4368 switch (gimple_assign_rhs_class (stmt))
4369 {
4370 case GIMPLE_SINGLE_RHS:
4371 return verify_gimple_assign_single (stmt);
4372
4373 case GIMPLE_UNARY_RHS:
4374 return verify_gimple_assign_unary (stmt);
4375
4376 case GIMPLE_BINARY_RHS:
4377 return verify_gimple_assign_binary (stmt);
4378
4379 case GIMPLE_TERNARY_RHS:
4380 return verify_gimple_assign_ternary (stmt);
4381
4382 default:
4383 gcc_unreachable ();
4384 }
4385 }
4386
4387 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4388 is a problem, otherwise false. */
4389
4390 static bool
4391 verify_gimple_return (greturn *stmt)
4392 {
4393 tree op = gimple_return_retval (stmt);
4394 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4395
4396 /* We cannot test for present return values as we do not fix up missing
4397 return values from the original source. */
4398 if (op == NULL)
4399 return false;
4400
4401 if (!is_gimple_val (op)
4402 && TREE_CODE (op) != RESULT_DECL)
4403 {
4404 error ("invalid operand in return statement");
4405 debug_generic_stmt (op);
4406 return true;
4407 }
4408
4409 if ((TREE_CODE (op) == RESULT_DECL
4410 && DECL_BY_REFERENCE (op))
4411 || (TREE_CODE (op) == SSA_NAME
4412 && SSA_NAME_VAR (op)
4413 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4414 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4415 op = TREE_TYPE (op);
4416
4417 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4418 {
4419 error ("invalid conversion in return statement");
4420 debug_generic_stmt (restype);
4421 debug_generic_stmt (TREE_TYPE (op));
4422 return true;
4423 }
4424
4425 return false;
4426 }
4427
4428
4429 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4430 is a problem, otherwise false. */
4431
4432 static bool
4433 verify_gimple_goto (ggoto *stmt)
4434 {
4435 tree dest = gimple_goto_dest (stmt);
4436
4437 /* ??? We have two canonical forms of direct goto destinations, a
4438 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4439 if (TREE_CODE (dest) != LABEL_DECL
4440 && (!is_gimple_val (dest)
4441 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4442 {
4443 error ("goto destination is neither a label nor a pointer");
4444 return true;
4445 }
4446
4447 return false;
4448 }
4449
4450 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4451 is a problem, otherwise false. */
4452
4453 static bool
4454 verify_gimple_switch (gswitch *stmt)
4455 {
4456 unsigned int i, n;
4457 tree elt, prev_upper_bound = NULL_TREE;
4458 tree index_type, elt_type = NULL_TREE;
4459
4460 if (!is_gimple_val (gimple_switch_index (stmt)))
4461 {
4462 error ("invalid operand to switch statement");
4463 debug_generic_stmt (gimple_switch_index (stmt));
4464 return true;
4465 }
4466
4467 index_type = TREE_TYPE (gimple_switch_index (stmt));
4468 if (! INTEGRAL_TYPE_P (index_type))
4469 {
4470 error ("non-integral type switch statement");
4471 debug_generic_expr (index_type);
4472 return true;
4473 }
4474
4475 elt = gimple_switch_label (stmt, 0);
4476 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4477 {
4478 error ("invalid default case label in switch statement");
4479 debug_generic_expr (elt);
4480 return true;
4481 }
4482
4483 n = gimple_switch_num_labels (stmt);
4484 for (i = 1; i < n; i++)
4485 {
4486 elt = gimple_switch_label (stmt, i);
4487
4488 if (! CASE_LOW (elt))
4489 {
4490 error ("invalid case label in switch statement");
4491 debug_generic_expr (elt);
4492 return true;
4493 }
4494 if (CASE_HIGH (elt)
4495 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4496 {
4497 error ("invalid case range in switch statement");
4498 debug_generic_expr (elt);
4499 return true;
4500 }
4501
4502 if (elt_type)
4503 {
4504 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4505 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4506 {
4507 error ("type mismatch for case label in switch statement");
4508 debug_generic_expr (elt);
4509 return true;
4510 }
4511 }
4512 else
4513 {
4514 elt_type = TREE_TYPE (CASE_LOW (elt));
4515 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4516 {
4517 error ("type precision mismatch in switch statement");
4518 return true;
4519 }
4520 }
4521
4522 if (prev_upper_bound)
4523 {
4524 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4525 {
4526 error ("case labels not sorted in switch statement");
4527 return true;
4528 }
4529 }
4530
4531 prev_upper_bound = CASE_HIGH (elt);
4532 if (! prev_upper_bound)
4533 prev_upper_bound = CASE_LOW (elt);
4534 }
4535
4536 return false;
4537 }
4538
4539 /* Verify a gimple debug statement STMT.
4540 Returns true if anything is wrong. */
4541
4542 static bool
4543 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4544 {
4545 /* There isn't much that could be wrong in a gimple debug stmt. A
4546 gimple debug bind stmt, for example, maps a tree, that's usually
4547 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4548 component or member of an aggregate type, to another tree, that
4549 can be an arbitrary expression. These stmts expand into debug
4550 insns, and are converted to debug notes by var-tracking.c. */
4551 return false;
4552 }
4553
4554 /* Verify a gimple label statement STMT.
4555 Returns true if anything is wrong. */
4556
4557 static bool
4558 verify_gimple_label (glabel *stmt)
4559 {
4560 tree decl = gimple_label_label (stmt);
4561 int uid;
4562 bool err = false;
4563
4564 if (TREE_CODE (decl) != LABEL_DECL)
4565 return true;
4566 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4567 && DECL_CONTEXT (decl) != current_function_decl)
4568 {
4569 error ("label's context is not the current function decl");
4570 err |= true;
4571 }
4572
4573 uid = LABEL_DECL_UID (decl);
4574 if (cfun->cfg
4575 && (uid == -1
4576 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4577 {
4578 error ("incorrect entry in label_to_block_map");
4579 err |= true;
4580 }
4581
4582 uid = EH_LANDING_PAD_NR (decl);
4583 if (uid)
4584 {
4585 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4586 if (decl != lp->post_landing_pad)
4587 {
4588 error ("incorrect setting of landing pad number");
4589 err |= true;
4590 }
4591 }
4592
4593 return err;
4594 }
4595
4596 /* Verify a gimple cond statement STMT.
4597 Returns true if anything is wrong. */
4598
4599 static bool
4600 verify_gimple_cond (gcond *stmt)
4601 {
4602 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4603 {
4604 error ("invalid comparison code in gimple cond");
4605 return true;
4606 }
4607 if (!(!gimple_cond_true_label (stmt)
4608 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4609 || !(!gimple_cond_false_label (stmt)
4610 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4611 {
4612 error ("invalid labels in gimple cond");
4613 return true;
4614 }
4615
4616 return verify_gimple_comparison (boolean_type_node,
4617 gimple_cond_lhs (stmt),
4618 gimple_cond_rhs (stmt),
4619 gimple_cond_code (stmt));
4620 }
4621
4622 /* Verify the GIMPLE statement STMT. Returns true if there is an
4623 error, otherwise false. */
4624
4625 static bool
4626 verify_gimple_stmt (gimple *stmt)
4627 {
4628 switch (gimple_code (stmt))
4629 {
4630 case GIMPLE_ASSIGN:
4631 return verify_gimple_assign (as_a <gassign *> (stmt));
4632
4633 case GIMPLE_LABEL:
4634 return verify_gimple_label (as_a <glabel *> (stmt));
4635
4636 case GIMPLE_CALL:
4637 return verify_gimple_call (as_a <gcall *> (stmt));
4638
4639 case GIMPLE_COND:
4640 return verify_gimple_cond (as_a <gcond *> (stmt));
4641
4642 case GIMPLE_GOTO:
4643 return verify_gimple_goto (as_a <ggoto *> (stmt));
4644
4645 case GIMPLE_SWITCH:
4646 return verify_gimple_switch (as_a <gswitch *> (stmt));
4647
4648 case GIMPLE_RETURN:
4649 return verify_gimple_return (as_a <greturn *> (stmt));
4650
4651 case GIMPLE_ASM:
4652 return false;
4653
4654 case GIMPLE_TRANSACTION:
4655 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4656
4657 /* Tuples that do not have tree operands. */
4658 case GIMPLE_NOP:
4659 case GIMPLE_PREDICT:
4660 case GIMPLE_RESX:
4661 case GIMPLE_EH_DISPATCH:
4662 case GIMPLE_EH_MUST_NOT_THROW:
4663 return false;
4664
4665 CASE_GIMPLE_OMP:
4666 /* OpenMP directives are validated by the FE and never operated
4667 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4668 non-gimple expressions when the main index variable has had
4669 its address taken. This does not affect the loop itself
4670 because the header of an GIMPLE_OMP_FOR is merely used to determine
4671 how to setup the parallel iteration. */
4672 return false;
4673
4674 case GIMPLE_DEBUG:
4675 return verify_gimple_debug (stmt);
4676
4677 default:
4678 gcc_unreachable ();
4679 }
4680 }
4681
4682 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4683 and false otherwise. */
4684
4685 static bool
4686 verify_gimple_phi (gimple *phi)
4687 {
4688 bool err = false;
4689 unsigned i;
4690 tree phi_result = gimple_phi_result (phi);
4691 bool virtual_p;
4692
4693 if (!phi_result)
4694 {
4695 error ("invalid PHI result");
4696 return true;
4697 }
4698
4699 virtual_p = virtual_operand_p (phi_result);
4700 if (TREE_CODE (phi_result) != SSA_NAME
4701 || (virtual_p
4702 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4703 {
4704 error ("invalid PHI result");
4705 err = true;
4706 }
4707
4708 for (i = 0; i < gimple_phi_num_args (phi); i++)
4709 {
4710 tree t = gimple_phi_arg_def (phi, i);
4711
4712 if (!t)
4713 {
4714 error ("missing PHI def");
4715 err |= true;
4716 continue;
4717 }
4718 /* Addressable variables do have SSA_NAMEs but they
4719 are not considered gimple values. */
4720 else if ((TREE_CODE (t) == SSA_NAME
4721 && virtual_p != virtual_operand_p (t))
4722 || (virtual_p
4723 && (TREE_CODE (t) != SSA_NAME
4724 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4725 || (!virtual_p
4726 && !is_gimple_val (t)))
4727 {
4728 error ("invalid PHI argument");
4729 debug_generic_expr (t);
4730 err |= true;
4731 }
4732 #ifdef ENABLE_TYPES_CHECKING
4733 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4734 {
4735 error ("incompatible types in PHI argument %u", i);
4736 debug_generic_stmt (TREE_TYPE (phi_result));
4737 debug_generic_stmt (TREE_TYPE (t));
4738 err |= true;
4739 }
4740 #endif
4741 }
4742
4743 return err;
4744 }
4745
4746 /* Verify the GIMPLE statements inside the sequence STMTS. */
4747
4748 static bool
4749 verify_gimple_in_seq_2 (gimple_seq stmts)
4750 {
4751 gimple_stmt_iterator ittr;
4752 bool err = false;
4753
4754 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4755 {
4756 gimple *stmt = gsi_stmt (ittr);
4757
4758 switch (gimple_code (stmt))
4759 {
4760 case GIMPLE_BIND:
4761 err |= verify_gimple_in_seq_2 (
4762 gimple_bind_body (as_a <gbind *> (stmt)));
4763 break;
4764
4765 case GIMPLE_TRY:
4766 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4767 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4768 break;
4769
4770 case GIMPLE_EH_FILTER:
4771 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4772 break;
4773
4774 case GIMPLE_EH_ELSE:
4775 {
4776 geh_else *eh_else = as_a <geh_else *> (stmt);
4777 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4778 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4779 }
4780 break;
4781
4782 case GIMPLE_CATCH:
4783 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4784 as_a <gcatch *> (stmt)));
4785 break;
4786
4787 case GIMPLE_TRANSACTION:
4788 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4789 break;
4790
4791 default:
4792 {
4793 bool err2 = verify_gimple_stmt (stmt);
4794 if (err2)
4795 debug_gimple_stmt (stmt);
4796 err |= err2;
4797 }
4798 }
4799 }
4800
4801 return err;
4802 }
4803
4804 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4805 is a problem, otherwise false. */
4806
4807 static bool
4808 verify_gimple_transaction (gtransaction *stmt)
4809 {
4810 tree lab;
4811
4812 lab = gimple_transaction_label_norm (stmt);
4813 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4814 return true;
4815 lab = gimple_transaction_label_uninst (stmt);
4816 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4817 return true;
4818 lab = gimple_transaction_label_over (stmt);
4819 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4820 return true;
4821
4822 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4823 }
4824
4825
4826 /* Verify the GIMPLE statements inside the statement list STMTS. */
4827
4828 DEBUG_FUNCTION void
4829 verify_gimple_in_seq (gimple_seq stmts)
4830 {
4831 timevar_push (TV_TREE_STMT_VERIFY);
4832 if (verify_gimple_in_seq_2 (stmts))
4833 internal_error ("verify_gimple failed");
4834 timevar_pop (TV_TREE_STMT_VERIFY);
4835 }
4836
4837 /* Return true when the T can be shared. */
4838
4839 static bool
4840 tree_node_can_be_shared (tree t)
4841 {
4842 if (IS_TYPE_OR_DECL_P (t)
4843 || is_gimple_min_invariant (t)
4844 || TREE_CODE (t) == SSA_NAME
4845 || t == error_mark_node
4846 || TREE_CODE (t) == IDENTIFIER_NODE)
4847 return true;
4848
4849 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4850 return true;
4851
4852 if (DECL_P (t))
4853 return true;
4854
4855 return false;
4856 }
4857
4858 /* Called via walk_tree. Verify tree sharing. */
4859
4860 static tree
4861 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4862 {
4863 hash_set<void *> *visited = (hash_set<void *> *) data;
4864
4865 if (tree_node_can_be_shared (*tp))
4866 {
4867 *walk_subtrees = false;
4868 return NULL;
4869 }
4870
4871 if (visited->add (*tp))
4872 return *tp;
4873
4874 return NULL;
4875 }
4876
4877 /* Called via walk_gimple_stmt. Verify tree sharing. */
4878
4879 static tree
4880 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4881 {
4882 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4883 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4884 }
4885
4886 static bool eh_error_found;
4887 bool
4888 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4889 hash_set<gimple *> *visited)
4890 {
4891 if (!visited->contains (stmt))
4892 {
4893 error ("dead STMT in EH table");
4894 debug_gimple_stmt (stmt);
4895 eh_error_found = true;
4896 }
4897 return true;
4898 }
4899
4900 /* Verify if the location LOCs block is in BLOCKS. */
4901
4902 static bool
4903 verify_location (hash_set<tree> *blocks, location_t loc)
4904 {
4905 tree block = LOCATION_BLOCK (loc);
4906 if (block != NULL_TREE
4907 && !blocks->contains (block))
4908 {
4909 error ("location references block not in block tree");
4910 return true;
4911 }
4912 if (block != NULL_TREE)
4913 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4914 return false;
4915 }
4916
4917 /* Called via walk_tree. Verify that expressions have no blocks. */
4918
4919 static tree
4920 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4921 {
4922 if (!EXPR_P (*tp))
4923 {
4924 *walk_subtrees = false;
4925 return NULL;
4926 }
4927
4928 location_t loc = EXPR_LOCATION (*tp);
4929 if (LOCATION_BLOCK (loc) != NULL)
4930 return *tp;
4931
4932 return NULL;
4933 }
4934
4935 /* Called via walk_tree. Verify locations of expressions. */
4936
4937 static tree
4938 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4939 {
4940 hash_set<tree> *blocks = (hash_set<tree> *) data;
4941
4942 if (TREE_CODE (*tp) == VAR_DECL
4943 && DECL_HAS_DEBUG_EXPR_P (*tp))
4944 {
4945 tree t = DECL_DEBUG_EXPR (*tp);
4946 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4947 if (addr)
4948 return addr;
4949 }
4950 if ((TREE_CODE (*tp) == VAR_DECL
4951 || TREE_CODE (*tp) == PARM_DECL
4952 || TREE_CODE (*tp) == RESULT_DECL)
4953 && DECL_HAS_VALUE_EXPR_P (*tp))
4954 {
4955 tree t = DECL_VALUE_EXPR (*tp);
4956 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4957 if (addr)
4958 return addr;
4959 }
4960
4961 if (!EXPR_P (*tp))
4962 {
4963 *walk_subtrees = false;
4964 return NULL;
4965 }
4966
4967 location_t loc = EXPR_LOCATION (*tp);
4968 if (verify_location (blocks, loc))
4969 return *tp;
4970
4971 return NULL;
4972 }
4973
4974 /* Called via walk_gimple_op. Verify locations of expressions. */
4975
4976 static tree
4977 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4978 {
4979 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4980 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4981 }
4982
4983 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4984
4985 static void
4986 collect_subblocks (hash_set<tree> *blocks, tree block)
4987 {
4988 tree t;
4989 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4990 {
4991 blocks->add (t);
4992 collect_subblocks (blocks, t);
4993 }
4994 }
4995
4996 /* Verify the GIMPLE statements in the CFG of FN. */
4997
4998 DEBUG_FUNCTION void
4999 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5000 {
5001 basic_block bb;
5002 bool err = false;
5003
5004 timevar_push (TV_TREE_STMT_VERIFY);
5005 hash_set<void *> visited;
5006 hash_set<gimple *> visited_stmts;
5007
5008 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5009 hash_set<tree> blocks;
5010 if (DECL_INITIAL (fn->decl))
5011 {
5012 blocks.add (DECL_INITIAL (fn->decl));
5013 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5014 }
5015
5016 FOR_EACH_BB_FN (bb, fn)
5017 {
5018 gimple_stmt_iterator gsi;
5019
5020 for (gphi_iterator gpi = gsi_start_phis (bb);
5021 !gsi_end_p (gpi);
5022 gsi_next (&gpi))
5023 {
5024 gphi *phi = gpi.phi ();
5025 bool err2 = false;
5026 unsigned i;
5027
5028 visited_stmts.add (phi);
5029
5030 if (gimple_bb (phi) != bb)
5031 {
5032 error ("gimple_bb (phi) is set to a wrong basic block");
5033 err2 = true;
5034 }
5035
5036 err2 |= verify_gimple_phi (phi);
5037
5038 /* Only PHI arguments have locations. */
5039 if (gimple_location (phi) != UNKNOWN_LOCATION)
5040 {
5041 error ("PHI node with location");
5042 err2 = true;
5043 }
5044
5045 for (i = 0; i < gimple_phi_num_args (phi); i++)
5046 {
5047 tree arg = gimple_phi_arg_def (phi, i);
5048 tree addr = walk_tree (&arg, verify_node_sharing_1,
5049 &visited, NULL);
5050 if (addr)
5051 {
5052 error ("incorrect sharing of tree nodes");
5053 debug_generic_expr (addr);
5054 err2 |= true;
5055 }
5056 location_t loc = gimple_phi_arg_location (phi, i);
5057 if (virtual_operand_p (gimple_phi_result (phi))
5058 && loc != UNKNOWN_LOCATION)
5059 {
5060 error ("virtual PHI with argument locations");
5061 err2 = true;
5062 }
5063 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5064 if (addr)
5065 {
5066 debug_generic_expr (addr);
5067 err2 = true;
5068 }
5069 err2 |= verify_location (&blocks, loc);
5070 }
5071
5072 if (err2)
5073 debug_gimple_stmt (phi);
5074 err |= err2;
5075 }
5076
5077 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5078 {
5079 gimple *stmt = gsi_stmt (gsi);
5080 bool err2 = false;
5081 struct walk_stmt_info wi;
5082 tree addr;
5083 int lp_nr;
5084
5085 visited_stmts.add (stmt);
5086
5087 if (gimple_bb (stmt) != bb)
5088 {
5089 error ("gimple_bb (stmt) is set to a wrong basic block");
5090 err2 = true;
5091 }
5092
5093 err2 |= verify_gimple_stmt (stmt);
5094 err2 |= verify_location (&blocks, gimple_location (stmt));
5095
5096 memset (&wi, 0, sizeof (wi));
5097 wi.info = (void *) &visited;
5098 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5099 if (addr)
5100 {
5101 error ("incorrect sharing of tree nodes");
5102 debug_generic_expr (addr);
5103 err2 |= true;
5104 }
5105
5106 memset (&wi, 0, sizeof (wi));
5107 wi.info = (void *) &blocks;
5108 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5109 if (addr)
5110 {
5111 debug_generic_expr (addr);
5112 err2 |= true;
5113 }
5114
5115 /* ??? Instead of not checking these stmts at all the walker
5116 should know its context via wi. */
5117 if (!is_gimple_debug (stmt)
5118 && !is_gimple_omp (stmt))
5119 {
5120 memset (&wi, 0, sizeof (wi));
5121 addr = walk_gimple_op (stmt, verify_expr, &wi);
5122 if (addr)
5123 {
5124 debug_generic_expr (addr);
5125 inform (gimple_location (stmt), "in statement");
5126 err2 |= true;
5127 }
5128 }
5129
5130 /* If the statement is marked as part of an EH region, then it is
5131 expected that the statement could throw. Verify that when we
5132 have optimizations that simplify statements such that we prove
5133 that they cannot throw, that we update other data structures
5134 to match. */
5135 lp_nr = lookup_stmt_eh_lp (stmt);
5136 if (lp_nr > 0)
5137 {
5138 if (!stmt_could_throw_p (stmt))
5139 {
5140 if (verify_nothrow)
5141 {
5142 error ("statement marked for throw, but doesn%'t");
5143 err2 |= true;
5144 }
5145 }
5146 else if (!gsi_one_before_end_p (gsi))
5147 {
5148 error ("statement marked for throw in middle of block");
5149 err2 |= true;
5150 }
5151 }
5152
5153 if (err2)
5154 debug_gimple_stmt (stmt);
5155 err |= err2;
5156 }
5157 }
5158
5159 eh_error_found = false;
5160 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5161 if (eh_table)
5162 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5163 (&visited_stmts);
5164
5165 if (err || eh_error_found)
5166 internal_error ("verify_gimple failed");
5167
5168 verify_histograms ();
5169 timevar_pop (TV_TREE_STMT_VERIFY);
5170 }
5171
5172
5173 /* Verifies that the flow information is OK. */
5174
5175 static int
5176 gimple_verify_flow_info (void)
5177 {
5178 int err = 0;
5179 basic_block bb;
5180 gimple_stmt_iterator gsi;
5181 gimple *stmt;
5182 edge e;
5183 edge_iterator ei;
5184
5185 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5186 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5187 {
5188 error ("ENTRY_BLOCK has IL associated with it");
5189 err = 1;
5190 }
5191
5192 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5193 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5194 {
5195 error ("EXIT_BLOCK has IL associated with it");
5196 err = 1;
5197 }
5198
5199 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5200 if (e->flags & EDGE_FALLTHRU)
5201 {
5202 error ("fallthru to exit from bb %d", e->src->index);
5203 err = 1;
5204 }
5205
5206 FOR_EACH_BB_FN (bb, cfun)
5207 {
5208 bool found_ctrl_stmt = false;
5209
5210 stmt = NULL;
5211
5212 /* Skip labels on the start of basic block. */
5213 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5214 {
5215 tree label;
5216 gimple *prev_stmt = stmt;
5217
5218 stmt = gsi_stmt (gsi);
5219
5220 if (gimple_code (stmt) != GIMPLE_LABEL)
5221 break;
5222
5223 label = gimple_label_label (as_a <glabel *> (stmt));
5224 if (prev_stmt && DECL_NONLOCAL (label))
5225 {
5226 error ("nonlocal label ");
5227 print_generic_expr (stderr, label, 0);
5228 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5229 bb->index);
5230 err = 1;
5231 }
5232
5233 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5234 {
5235 error ("EH landing pad label ");
5236 print_generic_expr (stderr, label, 0);
5237 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5238 bb->index);
5239 err = 1;
5240 }
5241
5242 if (label_to_block (label) != bb)
5243 {
5244 error ("label ");
5245 print_generic_expr (stderr, label, 0);
5246 fprintf (stderr, " to block does not match in bb %d",
5247 bb->index);
5248 err = 1;
5249 }
5250
5251 if (decl_function_context (label) != current_function_decl)
5252 {
5253 error ("label ");
5254 print_generic_expr (stderr, label, 0);
5255 fprintf (stderr, " has incorrect context in bb %d",
5256 bb->index);
5257 err = 1;
5258 }
5259 }
5260
5261 /* Verify that body of basic block BB is free of control flow. */
5262 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5263 {
5264 gimple *stmt = gsi_stmt (gsi);
5265
5266 if (found_ctrl_stmt)
5267 {
5268 error ("control flow in the middle of basic block %d",
5269 bb->index);
5270 err = 1;
5271 }
5272
5273 if (stmt_ends_bb_p (stmt))
5274 found_ctrl_stmt = true;
5275
5276 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5277 {
5278 error ("label ");
5279 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5280 fprintf (stderr, " in the middle of basic block %d", bb->index);
5281 err = 1;
5282 }
5283 }
5284
5285 gsi = gsi_last_bb (bb);
5286 if (gsi_end_p (gsi))
5287 continue;
5288
5289 stmt = gsi_stmt (gsi);
5290
5291 if (gimple_code (stmt) == GIMPLE_LABEL)
5292 continue;
5293
5294 err |= verify_eh_edges (stmt);
5295
5296 if (is_ctrl_stmt (stmt))
5297 {
5298 FOR_EACH_EDGE (e, ei, bb->succs)
5299 if (e->flags & EDGE_FALLTHRU)
5300 {
5301 error ("fallthru edge after a control statement in bb %d",
5302 bb->index);
5303 err = 1;
5304 }
5305 }
5306
5307 if (gimple_code (stmt) != GIMPLE_COND)
5308 {
5309 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5310 after anything else but if statement. */
5311 FOR_EACH_EDGE (e, ei, bb->succs)
5312 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5313 {
5314 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5315 bb->index);
5316 err = 1;
5317 }
5318 }
5319
5320 switch (gimple_code (stmt))
5321 {
5322 case GIMPLE_COND:
5323 {
5324 edge true_edge;
5325 edge false_edge;
5326
5327 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5328
5329 if (!true_edge
5330 || !false_edge
5331 || !(true_edge->flags & EDGE_TRUE_VALUE)
5332 || !(false_edge->flags & EDGE_FALSE_VALUE)
5333 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5334 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5335 || EDGE_COUNT (bb->succs) >= 3)
5336 {
5337 error ("wrong outgoing edge flags at end of bb %d",
5338 bb->index);
5339 err = 1;
5340 }
5341 }
5342 break;
5343
5344 case GIMPLE_GOTO:
5345 if (simple_goto_p (stmt))
5346 {
5347 error ("explicit goto at end of bb %d", bb->index);
5348 err = 1;
5349 }
5350 else
5351 {
5352 /* FIXME. We should double check that the labels in the
5353 destination blocks have their address taken. */
5354 FOR_EACH_EDGE (e, ei, bb->succs)
5355 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5356 | EDGE_FALSE_VALUE))
5357 || !(e->flags & EDGE_ABNORMAL))
5358 {
5359 error ("wrong outgoing edge flags at end of bb %d",
5360 bb->index);
5361 err = 1;
5362 }
5363 }
5364 break;
5365
5366 case GIMPLE_CALL:
5367 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5368 break;
5369 /* ... fallthru ... */
5370 case GIMPLE_RETURN:
5371 if (!single_succ_p (bb)
5372 || (single_succ_edge (bb)->flags
5373 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5374 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5375 {
5376 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5377 err = 1;
5378 }
5379 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5380 {
5381 error ("return edge does not point to exit in bb %d",
5382 bb->index);
5383 err = 1;
5384 }
5385 break;
5386
5387 case GIMPLE_SWITCH:
5388 {
5389 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5390 tree prev;
5391 edge e;
5392 size_t i, n;
5393
5394 n = gimple_switch_num_labels (switch_stmt);
5395
5396 /* Mark all the destination basic blocks. */
5397 for (i = 0; i < n; ++i)
5398 {
5399 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5400 basic_block label_bb = label_to_block (lab);
5401 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5402 label_bb->aux = (void *)1;
5403 }
5404
5405 /* Verify that the case labels are sorted. */
5406 prev = gimple_switch_label (switch_stmt, 0);
5407 for (i = 1; i < n; ++i)
5408 {
5409 tree c = gimple_switch_label (switch_stmt, i);
5410 if (!CASE_LOW (c))
5411 {
5412 error ("found default case not at the start of "
5413 "case vector");
5414 err = 1;
5415 continue;
5416 }
5417 if (CASE_LOW (prev)
5418 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5419 {
5420 error ("case labels not sorted: ");
5421 print_generic_expr (stderr, prev, 0);
5422 fprintf (stderr," is greater than ");
5423 print_generic_expr (stderr, c, 0);
5424 fprintf (stderr," but comes before it.\n");
5425 err = 1;
5426 }
5427 prev = c;
5428 }
5429 /* VRP will remove the default case if it can prove it will
5430 never be executed. So do not verify there always exists
5431 a default case here. */
5432
5433 FOR_EACH_EDGE (e, ei, bb->succs)
5434 {
5435 if (!e->dest->aux)
5436 {
5437 error ("extra outgoing edge %d->%d",
5438 bb->index, e->dest->index);
5439 err = 1;
5440 }
5441
5442 e->dest->aux = (void *)2;
5443 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5444 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5445 {
5446 error ("wrong outgoing edge flags at end of bb %d",
5447 bb->index);
5448 err = 1;
5449 }
5450 }
5451
5452 /* Check that we have all of them. */
5453 for (i = 0; i < n; ++i)
5454 {
5455 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5456 basic_block label_bb = label_to_block (lab);
5457
5458 if (label_bb->aux != (void *)2)
5459 {
5460 error ("missing edge %i->%i", bb->index, label_bb->index);
5461 err = 1;
5462 }
5463 }
5464
5465 FOR_EACH_EDGE (e, ei, bb->succs)
5466 e->dest->aux = (void *)0;
5467 }
5468 break;
5469
5470 case GIMPLE_EH_DISPATCH:
5471 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5472 break;
5473
5474 default:
5475 break;
5476 }
5477 }
5478
5479 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5480 verify_dominators (CDI_DOMINATORS);
5481
5482 return err;
5483 }
5484
5485
5486 /* Updates phi nodes after creating a forwarder block joined
5487 by edge FALLTHRU. */
5488
5489 static void
5490 gimple_make_forwarder_block (edge fallthru)
5491 {
5492 edge e;
5493 edge_iterator ei;
5494 basic_block dummy, bb;
5495 tree var;
5496 gphi_iterator gsi;
5497
5498 dummy = fallthru->src;
5499 bb = fallthru->dest;
5500
5501 if (single_pred_p (bb))
5502 return;
5503
5504 /* If we redirected a branch we must create new PHI nodes at the
5505 start of BB. */
5506 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5507 {
5508 gphi *phi, *new_phi;
5509
5510 phi = gsi.phi ();
5511 var = gimple_phi_result (phi);
5512 new_phi = create_phi_node (var, bb);
5513 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5514 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5515 UNKNOWN_LOCATION);
5516 }
5517
5518 /* Add the arguments we have stored on edges. */
5519 FOR_EACH_EDGE (e, ei, bb->preds)
5520 {
5521 if (e == fallthru)
5522 continue;
5523
5524 flush_pending_stmts (e);
5525 }
5526 }
5527
5528
5529 /* Return a non-special label in the head of basic block BLOCK.
5530 Create one if it doesn't exist. */
5531
5532 tree
5533 gimple_block_label (basic_block bb)
5534 {
5535 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5536 bool first = true;
5537 tree label;
5538 glabel *stmt;
5539
5540 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5541 {
5542 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5543 if (!stmt)
5544 break;
5545 label = gimple_label_label (stmt);
5546 if (!DECL_NONLOCAL (label))
5547 {
5548 if (!first)
5549 gsi_move_before (&i, &s);
5550 return label;
5551 }
5552 }
5553
5554 label = create_artificial_label (UNKNOWN_LOCATION);
5555 stmt = gimple_build_label (label);
5556 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5557 return label;
5558 }
5559
5560
5561 /* Attempt to perform edge redirection by replacing a possibly complex
5562 jump instruction by a goto or by removing the jump completely.
5563 This can apply only if all edges now point to the same block. The
5564 parameters and return values are equivalent to
5565 redirect_edge_and_branch. */
5566
5567 static edge
5568 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5569 {
5570 basic_block src = e->src;
5571 gimple_stmt_iterator i;
5572 gimple *stmt;
5573
5574 /* We can replace or remove a complex jump only when we have exactly
5575 two edges. */
5576 if (EDGE_COUNT (src->succs) != 2
5577 /* Verify that all targets will be TARGET. Specifically, the
5578 edge that is not E must also go to TARGET. */
5579 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5580 return NULL;
5581
5582 i = gsi_last_bb (src);
5583 if (gsi_end_p (i))
5584 return NULL;
5585
5586 stmt = gsi_stmt (i);
5587
5588 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5589 {
5590 gsi_remove (&i, true);
5591 e = ssa_redirect_edge (e, target);
5592 e->flags = EDGE_FALLTHRU;
5593 return e;
5594 }
5595
5596 return NULL;
5597 }
5598
5599
5600 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5601 edge representing the redirected branch. */
5602
5603 static edge
5604 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5605 {
5606 basic_block bb = e->src;
5607 gimple_stmt_iterator gsi;
5608 edge ret;
5609 gimple *stmt;
5610
5611 if (e->flags & EDGE_ABNORMAL)
5612 return NULL;
5613
5614 if (e->dest == dest)
5615 return NULL;
5616
5617 if (e->flags & EDGE_EH)
5618 return redirect_eh_edge (e, dest);
5619
5620 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5621 {
5622 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5623 if (ret)
5624 return ret;
5625 }
5626
5627 gsi = gsi_last_bb (bb);
5628 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5629
5630 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5631 {
5632 case GIMPLE_COND:
5633 /* For COND_EXPR, we only need to redirect the edge. */
5634 break;
5635
5636 case GIMPLE_GOTO:
5637 /* No non-abnormal edges should lead from a non-simple goto, and
5638 simple ones should be represented implicitly. */
5639 gcc_unreachable ();
5640
5641 case GIMPLE_SWITCH:
5642 {
5643 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5644 tree label = gimple_block_label (dest);
5645 tree cases = get_cases_for_edge (e, switch_stmt);
5646
5647 /* If we have a list of cases associated with E, then use it
5648 as it's a lot faster than walking the entire case vector. */
5649 if (cases)
5650 {
5651 edge e2 = find_edge (e->src, dest);
5652 tree last, first;
5653
5654 first = cases;
5655 while (cases)
5656 {
5657 last = cases;
5658 CASE_LABEL (cases) = label;
5659 cases = CASE_CHAIN (cases);
5660 }
5661
5662 /* If there was already an edge in the CFG, then we need
5663 to move all the cases associated with E to E2. */
5664 if (e2)
5665 {
5666 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5667
5668 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5669 CASE_CHAIN (cases2) = first;
5670 }
5671 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5672 }
5673 else
5674 {
5675 size_t i, n = gimple_switch_num_labels (switch_stmt);
5676
5677 for (i = 0; i < n; i++)
5678 {
5679 tree elt = gimple_switch_label (switch_stmt, i);
5680 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5681 CASE_LABEL (elt) = label;
5682 }
5683 }
5684 }
5685 break;
5686
5687 case GIMPLE_ASM:
5688 {
5689 gasm *asm_stmt = as_a <gasm *> (stmt);
5690 int i, n = gimple_asm_nlabels (asm_stmt);
5691 tree label = NULL;
5692
5693 for (i = 0; i < n; ++i)
5694 {
5695 tree cons = gimple_asm_label_op (asm_stmt, i);
5696 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5697 {
5698 if (!label)
5699 label = gimple_block_label (dest);
5700 TREE_VALUE (cons) = label;
5701 }
5702 }
5703
5704 /* If we didn't find any label matching the former edge in the
5705 asm labels, we must be redirecting the fallthrough
5706 edge. */
5707 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5708 }
5709 break;
5710
5711 case GIMPLE_RETURN:
5712 gsi_remove (&gsi, true);
5713 e->flags |= EDGE_FALLTHRU;
5714 break;
5715
5716 case GIMPLE_OMP_RETURN:
5717 case GIMPLE_OMP_CONTINUE:
5718 case GIMPLE_OMP_SECTIONS_SWITCH:
5719 case GIMPLE_OMP_FOR:
5720 /* The edges from OMP constructs can be simply redirected. */
5721 break;
5722
5723 case GIMPLE_EH_DISPATCH:
5724 if (!(e->flags & EDGE_FALLTHRU))
5725 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5726 break;
5727
5728 case GIMPLE_TRANSACTION:
5729 if (e->flags & EDGE_TM_ABORT)
5730 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5731 gimple_block_label (dest));
5732 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5733 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5734 gimple_block_label (dest));
5735 else
5736 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5737 gimple_block_label (dest));
5738 break;
5739
5740 default:
5741 /* Otherwise it must be a fallthru edge, and we don't need to
5742 do anything besides redirecting it. */
5743 gcc_assert (e->flags & EDGE_FALLTHRU);
5744 break;
5745 }
5746
5747 /* Update/insert PHI nodes as necessary. */
5748
5749 /* Now update the edges in the CFG. */
5750 e = ssa_redirect_edge (e, dest);
5751
5752 return e;
5753 }
5754
5755 /* Returns true if it is possible to remove edge E by redirecting
5756 it to the destination of the other edge from E->src. */
5757
5758 static bool
5759 gimple_can_remove_branch_p (const_edge e)
5760 {
5761 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5762 return false;
5763
5764 return true;
5765 }
5766
5767 /* Simple wrapper, as we can always redirect fallthru edges. */
5768
5769 static basic_block
5770 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5771 {
5772 e = gimple_redirect_edge_and_branch (e, dest);
5773 gcc_assert (e);
5774
5775 return NULL;
5776 }
5777
5778
5779 /* Splits basic block BB after statement STMT (but at least after the
5780 labels). If STMT is NULL, BB is split just after the labels. */
5781
5782 static basic_block
5783 gimple_split_block (basic_block bb, void *stmt)
5784 {
5785 gimple_stmt_iterator gsi;
5786 gimple_stmt_iterator gsi_tgt;
5787 gimple_seq list;
5788 basic_block new_bb;
5789 edge e;
5790 edge_iterator ei;
5791
5792 new_bb = create_empty_bb (bb);
5793
5794 /* Redirect the outgoing edges. */
5795 new_bb->succs = bb->succs;
5796 bb->succs = NULL;
5797 FOR_EACH_EDGE (e, ei, new_bb->succs)
5798 e->src = new_bb;
5799
5800 /* Get a stmt iterator pointing to the first stmt to move. */
5801 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5802 gsi = gsi_after_labels (bb);
5803 else
5804 {
5805 gsi = gsi_for_stmt ((gimple *) stmt);
5806 gsi_next (&gsi);
5807 }
5808
5809 /* Move everything from GSI to the new basic block. */
5810 if (gsi_end_p (gsi))
5811 return new_bb;
5812
5813 /* Split the statement list - avoid re-creating new containers as this
5814 brings ugly quadratic memory consumption in the inliner.
5815 (We are still quadratic since we need to update stmt BB pointers,
5816 sadly.) */
5817 gsi_split_seq_before (&gsi, &list);
5818 set_bb_seq (new_bb, list);
5819 for (gsi_tgt = gsi_start (list);
5820 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5821 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5822
5823 return new_bb;
5824 }
5825
5826
5827 /* Moves basic block BB after block AFTER. */
5828
5829 static bool
5830 gimple_move_block_after (basic_block bb, basic_block after)
5831 {
5832 if (bb->prev_bb == after)
5833 return true;
5834
5835 unlink_block (bb);
5836 link_block (bb, after);
5837
5838 return true;
5839 }
5840
5841
5842 /* Return TRUE if block BB has no executable statements, otherwise return
5843 FALSE. */
5844
5845 static bool
5846 gimple_empty_block_p (basic_block bb)
5847 {
5848 /* BB must have no executable statements. */
5849 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5850 if (phi_nodes (bb))
5851 return false;
5852 if (gsi_end_p (gsi))
5853 return true;
5854 if (is_gimple_debug (gsi_stmt (gsi)))
5855 gsi_next_nondebug (&gsi);
5856 return gsi_end_p (gsi);
5857 }
5858
5859
5860 /* Split a basic block if it ends with a conditional branch and if the
5861 other part of the block is not empty. */
5862
5863 static basic_block
5864 gimple_split_block_before_cond_jump (basic_block bb)
5865 {
5866 gimple *last, *split_point;
5867 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5868 if (gsi_end_p (gsi))
5869 return NULL;
5870 last = gsi_stmt (gsi);
5871 if (gimple_code (last) != GIMPLE_COND
5872 && gimple_code (last) != GIMPLE_SWITCH)
5873 return NULL;
5874 gsi_prev (&gsi);
5875 split_point = gsi_stmt (gsi);
5876 return split_block (bb, split_point)->dest;
5877 }
5878
5879
5880 /* Return true if basic_block can be duplicated. */
5881
5882 static bool
5883 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5884 {
5885 return true;
5886 }
5887
5888 /* Create a duplicate of the basic block BB. NOTE: This does not
5889 preserve SSA form. */
5890
5891 static basic_block
5892 gimple_duplicate_bb (basic_block bb)
5893 {
5894 basic_block new_bb;
5895 gimple_stmt_iterator gsi_tgt;
5896
5897 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5898
5899 /* Copy the PHI nodes. We ignore PHI node arguments here because
5900 the incoming edges have not been setup yet. */
5901 for (gphi_iterator gpi = gsi_start_phis (bb);
5902 !gsi_end_p (gpi);
5903 gsi_next (&gpi))
5904 {
5905 gphi *phi, *copy;
5906 phi = gpi.phi ();
5907 copy = create_phi_node (NULL_TREE, new_bb);
5908 create_new_def_for (gimple_phi_result (phi), copy,
5909 gimple_phi_result_ptr (copy));
5910 gimple_set_uid (copy, gimple_uid (phi));
5911 }
5912
5913 gsi_tgt = gsi_start_bb (new_bb);
5914 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5915 !gsi_end_p (gsi);
5916 gsi_next (&gsi))
5917 {
5918 def_operand_p def_p;
5919 ssa_op_iter op_iter;
5920 tree lhs;
5921 gimple *stmt, *copy;
5922
5923 stmt = gsi_stmt (gsi);
5924 if (gimple_code (stmt) == GIMPLE_LABEL)
5925 continue;
5926
5927 /* Don't duplicate label debug stmts. */
5928 if (gimple_debug_bind_p (stmt)
5929 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5930 == LABEL_DECL)
5931 continue;
5932
5933 /* Create a new copy of STMT and duplicate STMT's virtual
5934 operands. */
5935 copy = gimple_copy (stmt);
5936 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5937
5938 maybe_duplicate_eh_stmt (copy, stmt);
5939 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5940
5941 /* When copying around a stmt writing into a local non-user
5942 aggregate, make sure it won't share stack slot with other
5943 vars. */
5944 lhs = gimple_get_lhs (stmt);
5945 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5946 {
5947 tree base = get_base_address (lhs);
5948 if (base
5949 && (TREE_CODE (base) == VAR_DECL
5950 || TREE_CODE (base) == RESULT_DECL)
5951 && DECL_IGNORED_P (base)
5952 && !TREE_STATIC (base)
5953 && !DECL_EXTERNAL (base)
5954 && (TREE_CODE (base) != VAR_DECL
5955 || !DECL_HAS_VALUE_EXPR_P (base)))
5956 DECL_NONSHAREABLE (base) = 1;
5957 }
5958
5959 /* Create new names for all the definitions created by COPY and
5960 add replacement mappings for each new name. */
5961 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5962 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5963 }
5964
5965 return new_bb;
5966 }
5967
5968 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5969
5970 static void
5971 add_phi_args_after_copy_edge (edge e_copy)
5972 {
5973 basic_block bb, bb_copy = e_copy->src, dest;
5974 edge e;
5975 edge_iterator ei;
5976 gphi *phi, *phi_copy;
5977 tree def;
5978 gphi_iterator psi, psi_copy;
5979
5980 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5981 return;
5982
5983 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5984
5985 if (e_copy->dest->flags & BB_DUPLICATED)
5986 dest = get_bb_original (e_copy->dest);
5987 else
5988 dest = e_copy->dest;
5989
5990 e = find_edge (bb, dest);
5991 if (!e)
5992 {
5993 /* During loop unrolling the target of the latch edge is copied.
5994 In this case we are not looking for edge to dest, but to
5995 duplicated block whose original was dest. */
5996 FOR_EACH_EDGE (e, ei, bb->succs)
5997 {
5998 if ((e->dest->flags & BB_DUPLICATED)
5999 && get_bb_original (e->dest) == dest)
6000 break;
6001 }
6002
6003 gcc_assert (e != NULL);
6004 }
6005
6006 for (psi = gsi_start_phis (e->dest),
6007 psi_copy = gsi_start_phis (e_copy->dest);
6008 !gsi_end_p (psi);
6009 gsi_next (&psi), gsi_next (&psi_copy))
6010 {
6011 phi = psi.phi ();
6012 phi_copy = psi_copy.phi ();
6013 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6014 add_phi_arg (phi_copy, def, e_copy,
6015 gimple_phi_arg_location_from_edge (phi, e));
6016 }
6017 }
6018
6019
6020 /* Basic block BB_COPY was created by code duplication. Add phi node
6021 arguments for edges going out of BB_COPY. The blocks that were
6022 duplicated have BB_DUPLICATED set. */
6023
6024 void
6025 add_phi_args_after_copy_bb (basic_block bb_copy)
6026 {
6027 edge e_copy;
6028 edge_iterator ei;
6029
6030 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6031 {
6032 add_phi_args_after_copy_edge (e_copy);
6033 }
6034 }
6035
6036 /* Blocks in REGION_COPY array of length N_REGION were created by
6037 duplication of basic blocks. Add phi node arguments for edges
6038 going from these blocks. If E_COPY is not NULL, also add
6039 phi node arguments for its destination.*/
6040
6041 void
6042 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6043 edge e_copy)
6044 {
6045 unsigned i;
6046
6047 for (i = 0; i < n_region; i++)
6048 region_copy[i]->flags |= BB_DUPLICATED;
6049
6050 for (i = 0; i < n_region; i++)
6051 add_phi_args_after_copy_bb (region_copy[i]);
6052 if (e_copy)
6053 add_phi_args_after_copy_edge (e_copy);
6054
6055 for (i = 0; i < n_region; i++)
6056 region_copy[i]->flags &= ~BB_DUPLICATED;
6057 }
6058
6059 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6060 important exit edge EXIT. By important we mean that no SSA name defined
6061 inside region is live over the other exit edges of the region. All entry
6062 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6063 to the duplicate of the region. Dominance and loop information is
6064 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6065 UPDATE_DOMINANCE is false then we assume that the caller will update the
6066 dominance information after calling this function. The new basic
6067 blocks are stored to REGION_COPY in the same order as they had in REGION,
6068 provided that REGION_COPY is not NULL.
6069 The function returns false if it is unable to copy the region,
6070 true otherwise. */
6071
6072 bool
6073 gimple_duplicate_sese_region (edge entry, edge exit,
6074 basic_block *region, unsigned n_region,
6075 basic_block *region_copy,
6076 bool update_dominance)
6077 {
6078 unsigned i;
6079 bool free_region_copy = false, copying_header = false;
6080 struct loop *loop = entry->dest->loop_father;
6081 edge exit_copy;
6082 vec<basic_block> doms;
6083 edge redirected;
6084 int total_freq = 0, entry_freq = 0;
6085 gcov_type total_count = 0, entry_count = 0;
6086
6087 if (!can_copy_bbs_p (region, n_region))
6088 return false;
6089
6090 /* Some sanity checking. Note that we do not check for all possible
6091 missuses of the functions. I.e. if you ask to copy something weird,
6092 it will work, but the state of structures probably will not be
6093 correct. */
6094 for (i = 0; i < n_region; i++)
6095 {
6096 /* We do not handle subloops, i.e. all the blocks must belong to the
6097 same loop. */
6098 if (region[i]->loop_father != loop)
6099 return false;
6100
6101 if (region[i] != entry->dest
6102 && region[i] == loop->header)
6103 return false;
6104 }
6105
6106 /* In case the function is used for loop header copying (which is the primary
6107 use), ensure that EXIT and its copy will be new latch and entry edges. */
6108 if (loop->header == entry->dest)
6109 {
6110 copying_header = true;
6111
6112 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6113 return false;
6114
6115 for (i = 0; i < n_region; i++)
6116 if (region[i] != exit->src
6117 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6118 return false;
6119 }
6120
6121 initialize_original_copy_tables ();
6122
6123 if (copying_header)
6124 set_loop_copy (loop, loop_outer (loop));
6125 else
6126 set_loop_copy (loop, loop);
6127
6128 if (!region_copy)
6129 {
6130 region_copy = XNEWVEC (basic_block, n_region);
6131 free_region_copy = true;
6132 }
6133
6134 /* Record blocks outside the region that are dominated by something
6135 inside. */
6136 if (update_dominance)
6137 {
6138 doms.create (0);
6139 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6140 }
6141
6142 if (entry->dest->count)
6143 {
6144 total_count = entry->dest->count;
6145 entry_count = entry->count;
6146 /* Fix up corner cases, to avoid division by zero or creation of negative
6147 frequencies. */
6148 if (entry_count > total_count)
6149 entry_count = total_count;
6150 }
6151 else
6152 {
6153 total_freq = entry->dest->frequency;
6154 entry_freq = EDGE_FREQUENCY (entry);
6155 /* Fix up corner cases, to avoid division by zero or creation of negative
6156 frequencies. */
6157 if (total_freq == 0)
6158 total_freq = 1;
6159 else if (entry_freq > total_freq)
6160 entry_freq = total_freq;
6161 }
6162
6163 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6164 split_edge_bb_loc (entry), update_dominance);
6165 if (total_count)
6166 {
6167 scale_bbs_frequencies_gcov_type (region, n_region,
6168 total_count - entry_count,
6169 total_count);
6170 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6171 total_count);
6172 }
6173 else
6174 {
6175 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6176 total_freq);
6177 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6178 }
6179
6180 if (copying_header)
6181 {
6182 loop->header = exit->dest;
6183 loop->latch = exit->src;
6184 }
6185
6186 /* Redirect the entry and add the phi node arguments. */
6187 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6188 gcc_assert (redirected != NULL);
6189 flush_pending_stmts (entry);
6190
6191 /* Concerning updating of dominators: We must recount dominators
6192 for entry block and its copy. Anything that is outside of the
6193 region, but was dominated by something inside needs recounting as
6194 well. */
6195 if (update_dominance)
6196 {
6197 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6198 doms.safe_push (get_bb_original (entry->dest));
6199 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6200 doms.release ();
6201 }
6202
6203 /* Add the other PHI node arguments. */
6204 add_phi_args_after_copy (region_copy, n_region, NULL);
6205
6206 if (free_region_copy)
6207 free (region_copy);
6208
6209 free_original_copy_tables ();
6210 return true;
6211 }
6212
6213 /* Checks if BB is part of the region defined by N_REGION BBS. */
6214 static bool
6215 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6216 {
6217 unsigned int n;
6218
6219 for (n = 0; n < n_region; n++)
6220 {
6221 if (bb == bbs[n])
6222 return true;
6223 }
6224 return false;
6225 }
6226
6227 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6228 are stored to REGION_COPY in the same order in that they appear
6229 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6230 the region, EXIT an exit from it. The condition guarding EXIT
6231 is moved to ENTRY. Returns true if duplication succeeds, false
6232 otherwise.
6233
6234 For example,
6235
6236 some_code;
6237 if (cond)
6238 A;
6239 else
6240 B;
6241
6242 is transformed to
6243
6244 if (cond)
6245 {
6246 some_code;
6247 A;
6248 }
6249 else
6250 {
6251 some_code;
6252 B;
6253 }
6254 */
6255
6256 bool
6257 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6258 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6259 basic_block *region_copy ATTRIBUTE_UNUSED)
6260 {
6261 unsigned i;
6262 bool free_region_copy = false;
6263 struct loop *loop = exit->dest->loop_father;
6264 struct loop *orig_loop = entry->dest->loop_father;
6265 basic_block switch_bb, entry_bb, nentry_bb;
6266 vec<basic_block> doms;
6267 int total_freq = 0, exit_freq = 0;
6268 gcov_type total_count = 0, exit_count = 0;
6269 edge exits[2], nexits[2], e;
6270 gimple_stmt_iterator gsi;
6271 gimple *cond_stmt;
6272 edge sorig, snew;
6273 basic_block exit_bb;
6274 gphi_iterator psi;
6275 gphi *phi;
6276 tree def;
6277 struct loop *target, *aloop, *cloop;
6278
6279 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6280 exits[0] = exit;
6281 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6282
6283 if (!can_copy_bbs_p (region, n_region))
6284 return false;
6285
6286 initialize_original_copy_tables ();
6287 set_loop_copy (orig_loop, loop);
6288
6289 target= loop;
6290 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6291 {
6292 if (bb_part_of_region_p (aloop->header, region, n_region))
6293 {
6294 cloop = duplicate_loop (aloop, target);
6295 duplicate_subloops (aloop, cloop);
6296 }
6297 }
6298
6299 if (!region_copy)
6300 {
6301 region_copy = XNEWVEC (basic_block, n_region);
6302 free_region_copy = true;
6303 }
6304
6305 gcc_assert (!need_ssa_update_p (cfun));
6306
6307 /* Record blocks outside the region that are dominated by something
6308 inside. */
6309 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6310
6311 if (exit->src->count)
6312 {
6313 total_count = exit->src->count;
6314 exit_count = exit->count;
6315 /* Fix up corner cases, to avoid division by zero or creation of negative
6316 frequencies. */
6317 if (exit_count > total_count)
6318 exit_count = total_count;
6319 }
6320 else
6321 {
6322 total_freq = exit->src->frequency;
6323 exit_freq = EDGE_FREQUENCY (exit);
6324 /* Fix up corner cases, to avoid division by zero or creation of negative
6325 frequencies. */
6326 if (total_freq == 0)
6327 total_freq = 1;
6328 if (exit_freq > total_freq)
6329 exit_freq = total_freq;
6330 }
6331
6332 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6333 split_edge_bb_loc (exit), true);
6334 if (total_count)
6335 {
6336 scale_bbs_frequencies_gcov_type (region, n_region,
6337 total_count - exit_count,
6338 total_count);
6339 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6340 total_count);
6341 }
6342 else
6343 {
6344 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6345 total_freq);
6346 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6347 }
6348
6349 /* Create the switch block, and put the exit condition to it. */
6350 entry_bb = entry->dest;
6351 nentry_bb = get_bb_copy (entry_bb);
6352 if (!last_stmt (entry->src)
6353 || !stmt_ends_bb_p (last_stmt (entry->src)))
6354 switch_bb = entry->src;
6355 else
6356 switch_bb = split_edge (entry);
6357 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6358
6359 gsi = gsi_last_bb (switch_bb);
6360 cond_stmt = last_stmt (exit->src);
6361 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6362 cond_stmt = gimple_copy (cond_stmt);
6363
6364 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6365
6366 sorig = single_succ_edge (switch_bb);
6367 sorig->flags = exits[1]->flags;
6368 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6369
6370 /* Register the new edge from SWITCH_BB in loop exit lists. */
6371 rescan_loop_exit (snew, true, false);
6372
6373 /* Add the PHI node arguments. */
6374 add_phi_args_after_copy (region_copy, n_region, snew);
6375
6376 /* Get rid of now superfluous conditions and associated edges (and phi node
6377 arguments). */
6378 exit_bb = exit->dest;
6379
6380 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6381 PENDING_STMT (e) = NULL;
6382
6383 /* The latch of ORIG_LOOP was copied, and so was the backedge
6384 to the original header. We redirect this backedge to EXIT_BB. */
6385 for (i = 0; i < n_region; i++)
6386 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6387 {
6388 gcc_assert (single_succ_edge (region_copy[i]));
6389 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6390 PENDING_STMT (e) = NULL;
6391 for (psi = gsi_start_phis (exit_bb);
6392 !gsi_end_p (psi);
6393 gsi_next (&psi))
6394 {
6395 phi = psi.phi ();
6396 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6397 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6398 }
6399 }
6400 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6401 PENDING_STMT (e) = NULL;
6402
6403 /* Anything that is outside of the region, but was dominated by something
6404 inside needs to update dominance info. */
6405 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6406 doms.release ();
6407 /* Update the SSA web. */
6408 update_ssa (TODO_update_ssa);
6409
6410 if (free_region_copy)
6411 free (region_copy);
6412
6413 free_original_copy_tables ();
6414 return true;
6415 }
6416
6417 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6418 adding blocks when the dominator traversal reaches EXIT. This
6419 function silently assumes that ENTRY strictly dominates EXIT. */
6420
6421 void
6422 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6423 vec<basic_block> *bbs_p)
6424 {
6425 basic_block son;
6426
6427 for (son = first_dom_son (CDI_DOMINATORS, entry);
6428 son;
6429 son = next_dom_son (CDI_DOMINATORS, son))
6430 {
6431 bbs_p->safe_push (son);
6432 if (son != exit)
6433 gather_blocks_in_sese_region (son, exit, bbs_p);
6434 }
6435 }
6436
6437 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6438 The duplicates are recorded in VARS_MAP. */
6439
6440 static void
6441 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6442 tree to_context)
6443 {
6444 tree t = *tp, new_t;
6445 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6446
6447 if (DECL_CONTEXT (t) == to_context)
6448 return;
6449
6450 bool existed;
6451 tree &loc = vars_map->get_or_insert (t, &existed);
6452
6453 if (!existed)
6454 {
6455 if (SSA_VAR_P (t))
6456 {
6457 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6458 add_local_decl (f, new_t);
6459 }
6460 else
6461 {
6462 gcc_assert (TREE_CODE (t) == CONST_DECL);
6463 new_t = copy_node (t);
6464 }
6465 DECL_CONTEXT (new_t) = to_context;
6466
6467 loc = new_t;
6468 }
6469 else
6470 new_t = loc;
6471
6472 *tp = new_t;
6473 }
6474
6475
6476 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6477 VARS_MAP maps old ssa names and var_decls to the new ones. */
6478
6479 static tree
6480 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6481 tree to_context)
6482 {
6483 tree new_name;
6484
6485 gcc_assert (!virtual_operand_p (name));
6486
6487 tree *loc = vars_map->get (name);
6488
6489 if (!loc)
6490 {
6491 tree decl = SSA_NAME_VAR (name);
6492 if (decl)
6493 {
6494 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6495 replace_by_duplicate_decl (&decl, vars_map, to_context);
6496 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6497 decl, SSA_NAME_DEF_STMT (name));
6498 }
6499 else
6500 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6501 name, SSA_NAME_DEF_STMT (name));
6502
6503 /* Now that we've used the def stmt to define new_name, make sure it
6504 doesn't define name anymore. */
6505 SSA_NAME_DEF_STMT (name) = NULL;
6506
6507 vars_map->put (name, new_name);
6508 }
6509 else
6510 new_name = *loc;
6511
6512 return new_name;
6513 }
6514
6515 struct move_stmt_d
6516 {
6517 tree orig_block;
6518 tree new_block;
6519 tree from_context;
6520 tree to_context;
6521 hash_map<tree, tree> *vars_map;
6522 htab_t new_label_map;
6523 hash_map<void *, void *> *eh_map;
6524 bool remap_decls_p;
6525 };
6526
6527 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6528 contained in *TP if it has been ORIG_BLOCK previously and change the
6529 DECL_CONTEXT of every local variable referenced in *TP. */
6530
6531 static tree
6532 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6533 {
6534 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6535 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6536 tree t = *tp;
6537
6538 if (EXPR_P (t))
6539 {
6540 tree block = TREE_BLOCK (t);
6541 if (block == p->orig_block
6542 || (p->orig_block == NULL_TREE
6543 && block != NULL_TREE))
6544 TREE_SET_BLOCK (t, p->new_block);
6545 else if (flag_checking && block != NULL_TREE)
6546 {
6547 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6548 block = BLOCK_SUPERCONTEXT (block);
6549 gcc_assert (block == p->orig_block);
6550 }
6551 }
6552 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6553 {
6554 if (TREE_CODE (t) == SSA_NAME)
6555 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6556 else if (TREE_CODE (t) == PARM_DECL
6557 && gimple_in_ssa_p (cfun))
6558 *tp = *(p->vars_map->get (t));
6559 else if (TREE_CODE (t) == LABEL_DECL)
6560 {
6561 if (p->new_label_map)
6562 {
6563 struct tree_map in, *out;
6564 in.base.from = t;
6565 out = (struct tree_map *)
6566 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6567 if (out)
6568 *tp = t = out->to;
6569 }
6570
6571 DECL_CONTEXT (t) = p->to_context;
6572 }
6573 else if (p->remap_decls_p)
6574 {
6575 /* Replace T with its duplicate. T should no longer appear in the
6576 parent function, so this looks wasteful; however, it may appear
6577 in referenced_vars, and more importantly, as virtual operands of
6578 statements, and in alias lists of other variables. It would be
6579 quite difficult to expunge it from all those places. ??? It might
6580 suffice to do this for addressable variables. */
6581 if ((TREE_CODE (t) == VAR_DECL
6582 && !is_global_var (t))
6583 || TREE_CODE (t) == CONST_DECL)
6584 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6585 }
6586 *walk_subtrees = 0;
6587 }
6588 else if (TYPE_P (t))
6589 *walk_subtrees = 0;
6590
6591 return NULL_TREE;
6592 }
6593
6594 /* Helper for move_stmt_r. Given an EH region number for the source
6595 function, map that to the duplicate EH regio number in the dest. */
6596
6597 static int
6598 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6599 {
6600 eh_region old_r, new_r;
6601
6602 old_r = get_eh_region_from_number (old_nr);
6603 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6604
6605 return new_r->index;
6606 }
6607
6608 /* Similar, but operate on INTEGER_CSTs. */
6609
6610 static tree
6611 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6612 {
6613 int old_nr, new_nr;
6614
6615 old_nr = tree_to_shwi (old_t_nr);
6616 new_nr = move_stmt_eh_region_nr (old_nr, p);
6617
6618 return build_int_cst (integer_type_node, new_nr);
6619 }
6620
6621 /* Like move_stmt_op, but for gimple statements.
6622
6623 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6624 contained in the current statement in *GSI_P and change the
6625 DECL_CONTEXT of every local variable referenced in the current
6626 statement. */
6627
6628 static tree
6629 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6630 struct walk_stmt_info *wi)
6631 {
6632 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6633 gimple *stmt = gsi_stmt (*gsi_p);
6634 tree block = gimple_block (stmt);
6635
6636 if (block == p->orig_block
6637 || (p->orig_block == NULL_TREE
6638 && block != NULL_TREE))
6639 gimple_set_block (stmt, p->new_block);
6640
6641 switch (gimple_code (stmt))
6642 {
6643 case GIMPLE_CALL:
6644 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6645 {
6646 tree r, fndecl = gimple_call_fndecl (stmt);
6647 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6648 switch (DECL_FUNCTION_CODE (fndecl))
6649 {
6650 case BUILT_IN_EH_COPY_VALUES:
6651 r = gimple_call_arg (stmt, 1);
6652 r = move_stmt_eh_region_tree_nr (r, p);
6653 gimple_call_set_arg (stmt, 1, r);
6654 /* FALLTHRU */
6655
6656 case BUILT_IN_EH_POINTER:
6657 case BUILT_IN_EH_FILTER:
6658 r = gimple_call_arg (stmt, 0);
6659 r = move_stmt_eh_region_tree_nr (r, p);
6660 gimple_call_set_arg (stmt, 0, r);
6661 break;
6662
6663 default:
6664 break;
6665 }
6666 }
6667 break;
6668
6669 case GIMPLE_RESX:
6670 {
6671 gresx *resx_stmt = as_a <gresx *> (stmt);
6672 int r = gimple_resx_region (resx_stmt);
6673 r = move_stmt_eh_region_nr (r, p);
6674 gimple_resx_set_region (resx_stmt, r);
6675 }
6676 break;
6677
6678 case GIMPLE_EH_DISPATCH:
6679 {
6680 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6681 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6682 r = move_stmt_eh_region_nr (r, p);
6683 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6684 }
6685 break;
6686
6687 case GIMPLE_OMP_RETURN:
6688 case GIMPLE_OMP_CONTINUE:
6689 break;
6690 default:
6691 if (is_gimple_omp (stmt))
6692 {
6693 /* Do not remap variables inside OMP directives. Variables
6694 referenced in clauses and directive header belong to the
6695 parent function and should not be moved into the child
6696 function. */
6697 bool save_remap_decls_p = p->remap_decls_p;
6698 p->remap_decls_p = false;
6699 *handled_ops_p = true;
6700
6701 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6702 move_stmt_op, wi);
6703
6704 p->remap_decls_p = save_remap_decls_p;
6705 }
6706 break;
6707 }
6708
6709 return NULL_TREE;
6710 }
6711
6712 /* Move basic block BB from function CFUN to function DEST_FN. The
6713 block is moved out of the original linked list and placed after
6714 block AFTER in the new list. Also, the block is removed from the
6715 original array of blocks and placed in DEST_FN's array of blocks.
6716 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6717 updated to reflect the moved edges.
6718
6719 The local variables are remapped to new instances, VARS_MAP is used
6720 to record the mapping. */
6721
6722 static void
6723 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6724 basic_block after, bool update_edge_count_p,
6725 struct move_stmt_d *d)
6726 {
6727 struct control_flow_graph *cfg;
6728 edge_iterator ei;
6729 edge e;
6730 gimple_stmt_iterator si;
6731 unsigned old_len, new_len;
6732
6733 /* Remove BB from dominance structures. */
6734 delete_from_dominance_info (CDI_DOMINATORS, bb);
6735
6736 /* Move BB from its current loop to the copy in the new function. */
6737 if (current_loops)
6738 {
6739 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6740 if (new_loop)
6741 bb->loop_father = new_loop;
6742 }
6743
6744 /* Link BB to the new linked list. */
6745 move_block_after (bb, after);
6746
6747 /* Update the edge count in the corresponding flowgraphs. */
6748 if (update_edge_count_p)
6749 FOR_EACH_EDGE (e, ei, bb->succs)
6750 {
6751 cfun->cfg->x_n_edges--;
6752 dest_cfun->cfg->x_n_edges++;
6753 }
6754
6755 /* Remove BB from the original basic block array. */
6756 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6757 cfun->cfg->x_n_basic_blocks--;
6758
6759 /* Grow DEST_CFUN's basic block array if needed. */
6760 cfg = dest_cfun->cfg;
6761 cfg->x_n_basic_blocks++;
6762 if (bb->index >= cfg->x_last_basic_block)
6763 cfg->x_last_basic_block = bb->index + 1;
6764
6765 old_len = vec_safe_length (cfg->x_basic_block_info);
6766 if ((unsigned) cfg->x_last_basic_block >= old_len)
6767 {
6768 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6769 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6770 }
6771
6772 (*cfg->x_basic_block_info)[bb->index] = bb;
6773
6774 /* Remap the variables in phi nodes. */
6775 for (gphi_iterator psi = gsi_start_phis (bb);
6776 !gsi_end_p (psi); )
6777 {
6778 gphi *phi = psi.phi ();
6779 use_operand_p use;
6780 tree op = PHI_RESULT (phi);
6781 ssa_op_iter oi;
6782 unsigned i;
6783
6784 if (virtual_operand_p (op))
6785 {
6786 /* Remove the phi nodes for virtual operands (alias analysis will be
6787 run for the new function, anyway). */
6788 remove_phi_node (&psi, true);
6789 continue;
6790 }
6791
6792 SET_PHI_RESULT (phi,
6793 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6794 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6795 {
6796 op = USE_FROM_PTR (use);
6797 if (TREE_CODE (op) == SSA_NAME)
6798 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6799 }
6800
6801 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6802 {
6803 location_t locus = gimple_phi_arg_location (phi, i);
6804 tree block = LOCATION_BLOCK (locus);
6805
6806 if (locus == UNKNOWN_LOCATION)
6807 continue;
6808 if (d->orig_block == NULL_TREE || block == d->orig_block)
6809 {
6810 locus = set_block (locus, d->new_block);
6811 gimple_phi_arg_set_location (phi, i, locus);
6812 }
6813 }
6814
6815 gsi_next (&psi);
6816 }
6817
6818 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6819 {
6820 gimple *stmt = gsi_stmt (si);
6821 struct walk_stmt_info wi;
6822
6823 memset (&wi, 0, sizeof (wi));
6824 wi.info = d;
6825 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6826
6827 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6828 {
6829 tree label = gimple_label_label (label_stmt);
6830 int uid = LABEL_DECL_UID (label);
6831
6832 gcc_assert (uid > -1);
6833
6834 old_len = vec_safe_length (cfg->x_label_to_block_map);
6835 if (old_len <= (unsigned) uid)
6836 {
6837 new_len = 3 * uid / 2 + 1;
6838 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6839 }
6840
6841 (*cfg->x_label_to_block_map)[uid] = bb;
6842 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6843
6844 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6845
6846 if (uid >= dest_cfun->cfg->last_label_uid)
6847 dest_cfun->cfg->last_label_uid = uid + 1;
6848 }
6849
6850 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6851 remove_stmt_from_eh_lp_fn (cfun, stmt);
6852
6853 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6854 gimple_remove_stmt_histograms (cfun, stmt);
6855
6856 /* We cannot leave any operands allocated from the operand caches of
6857 the current function. */
6858 free_stmt_operands (cfun, stmt);
6859 push_cfun (dest_cfun);
6860 update_stmt (stmt);
6861 pop_cfun ();
6862 }
6863
6864 FOR_EACH_EDGE (e, ei, bb->succs)
6865 if (e->goto_locus != UNKNOWN_LOCATION)
6866 {
6867 tree block = LOCATION_BLOCK (e->goto_locus);
6868 if (d->orig_block == NULL_TREE
6869 || block == d->orig_block)
6870 e->goto_locus = set_block (e->goto_locus, d->new_block);
6871 }
6872 }
6873
6874 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6875 the outermost EH region. Use REGION as the incoming base EH region. */
6876
6877 static eh_region
6878 find_outermost_region_in_block (struct function *src_cfun,
6879 basic_block bb, eh_region region)
6880 {
6881 gimple_stmt_iterator si;
6882
6883 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6884 {
6885 gimple *stmt = gsi_stmt (si);
6886 eh_region stmt_region;
6887 int lp_nr;
6888
6889 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6890 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6891 if (stmt_region)
6892 {
6893 if (region == NULL)
6894 region = stmt_region;
6895 else if (stmt_region != region)
6896 {
6897 region = eh_region_outermost (src_cfun, stmt_region, region);
6898 gcc_assert (region != NULL);
6899 }
6900 }
6901 }
6902
6903 return region;
6904 }
6905
6906 static tree
6907 new_label_mapper (tree decl, void *data)
6908 {
6909 htab_t hash = (htab_t) data;
6910 struct tree_map *m;
6911 void **slot;
6912
6913 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6914
6915 m = XNEW (struct tree_map);
6916 m->hash = DECL_UID (decl);
6917 m->base.from = decl;
6918 m->to = create_artificial_label (UNKNOWN_LOCATION);
6919 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6920 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6921 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6922
6923 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6924 gcc_assert (*slot == NULL);
6925
6926 *slot = m;
6927
6928 return m->to;
6929 }
6930
6931 /* Tree walker to replace the decls used inside value expressions by
6932 duplicates. */
6933
6934 static tree
6935 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6936 {
6937 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6938
6939 switch (TREE_CODE (*tp))
6940 {
6941 case VAR_DECL:
6942 case PARM_DECL:
6943 case RESULT_DECL:
6944 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6945 break;
6946 default:
6947 break;
6948 }
6949
6950 if (IS_TYPE_OR_DECL_P (*tp))
6951 *walk_subtrees = false;
6952
6953 return NULL;
6954 }
6955
6956 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6957 subblocks. */
6958
6959 static void
6960 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6961 tree to_context)
6962 {
6963 tree *tp, t;
6964
6965 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6966 {
6967 t = *tp;
6968 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6969 continue;
6970 replace_by_duplicate_decl (&t, vars_map, to_context);
6971 if (t != *tp)
6972 {
6973 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6974 {
6975 tree x = DECL_VALUE_EXPR (*tp);
6976 struct replace_decls_d rd = { vars_map, to_context };
6977 unshare_expr (x);
6978 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6979 SET_DECL_VALUE_EXPR (t, x);
6980 DECL_HAS_VALUE_EXPR_P (t) = 1;
6981 }
6982 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6983 *tp = t;
6984 }
6985 }
6986
6987 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6988 replace_block_vars_by_duplicates (block, vars_map, to_context);
6989 }
6990
6991 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6992 from FN1 to FN2. */
6993
6994 static void
6995 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6996 struct loop *loop)
6997 {
6998 /* Discard it from the old loop array. */
6999 (*get_loops (fn1))[loop->num] = NULL;
7000
7001 /* Place it in the new loop array, assigning it a new number. */
7002 loop->num = number_of_loops (fn2);
7003 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7004
7005 /* Recurse to children. */
7006 for (loop = loop->inner; loop; loop = loop->next)
7007 fixup_loop_arrays_after_move (fn1, fn2, loop);
7008 }
7009
7010 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7011 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7012
7013 DEBUG_FUNCTION void
7014 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7015 {
7016 basic_block bb;
7017 edge_iterator ei;
7018 edge e;
7019 bitmap bbs = BITMAP_ALLOC (NULL);
7020 int i;
7021
7022 gcc_assert (entry != NULL);
7023 gcc_assert (entry != exit);
7024 gcc_assert (bbs_p != NULL);
7025
7026 gcc_assert (bbs_p->length () > 0);
7027
7028 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7029 bitmap_set_bit (bbs, bb->index);
7030
7031 gcc_assert (bitmap_bit_p (bbs, entry->index));
7032 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7033
7034 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7035 {
7036 if (bb == entry)
7037 {
7038 gcc_assert (single_pred_p (entry));
7039 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7040 }
7041 else
7042 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7043 {
7044 e = ei_edge (ei);
7045 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7046 }
7047
7048 if (bb == exit)
7049 {
7050 gcc_assert (single_succ_p (exit));
7051 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7052 }
7053 else
7054 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7055 {
7056 e = ei_edge (ei);
7057 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7058 }
7059 }
7060
7061 BITMAP_FREE (bbs);
7062 }
7063
7064 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7065
7066 bool
7067 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7068 {
7069 bitmap release_names = (bitmap)data;
7070
7071 if (TREE_CODE (from) != SSA_NAME)
7072 return true;
7073
7074 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7075 return true;
7076 }
7077
7078 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7079 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7080 single basic block in the original CFG and the new basic block is
7081 returned. DEST_CFUN must not have a CFG yet.
7082
7083 Note that the region need not be a pure SESE region. Blocks inside
7084 the region may contain calls to abort/exit. The only restriction
7085 is that ENTRY_BB should be the only entry point and it must
7086 dominate EXIT_BB.
7087
7088 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7089 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7090 to the new function.
7091
7092 All local variables referenced in the region are assumed to be in
7093 the corresponding BLOCK_VARS and unexpanded variable lists
7094 associated with DEST_CFUN.
7095
7096 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7097 reimplement move_sese_region_to_fn by duplicating the region rather than
7098 moving it. */
7099
7100 basic_block
7101 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7102 basic_block exit_bb, tree orig_block)
7103 {
7104 vec<basic_block> bbs, dom_bbs;
7105 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7106 basic_block after, bb, *entry_pred, *exit_succ, abb;
7107 struct function *saved_cfun = cfun;
7108 int *entry_flag, *exit_flag;
7109 unsigned *entry_prob, *exit_prob;
7110 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7111 edge e;
7112 edge_iterator ei;
7113 htab_t new_label_map;
7114 hash_map<void *, void *> *eh_map;
7115 struct loop *loop = entry_bb->loop_father;
7116 struct loop *loop0 = get_loop (saved_cfun, 0);
7117 struct move_stmt_d d;
7118
7119 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7120 region. */
7121 gcc_assert (entry_bb != exit_bb
7122 && (!exit_bb
7123 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7124
7125 /* Collect all the blocks in the region. Manually add ENTRY_BB
7126 because it won't be added by dfs_enumerate_from. */
7127 bbs.create (0);
7128 bbs.safe_push (entry_bb);
7129 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7130
7131 if (flag_checking)
7132 verify_sese (entry_bb, exit_bb, &bbs);
7133
7134 /* The blocks that used to be dominated by something in BBS will now be
7135 dominated by the new block. */
7136 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7137 bbs.address (),
7138 bbs.length ());
7139
7140 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7141 the predecessor edges to ENTRY_BB and the successor edges to
7142 EXIT_BB so that we can re-attach them to the new basic block that
7143 will replace the region. */
7144 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7145 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7146 entry_flag = XNEWVEC (int, num_entry_edges);
7147 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7148 i = 0;
7149 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7150 {
7151 entry_prob[i] = e->probability;
7152 entry_flag[i] = e->flags;
7153 entry_pred[i++] = e->src;
7154 remove_edge (e);
7155 }
7156
7157 if (exit_bb)
7158 {
7159 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7160 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7161 exit_flag = XNEWVEC (int, num_exit_edges);
7162 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7163 i = 0;
7164 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7165 {
7166 exit_prob[i] = e->probability;
7167 exit_flag[i] = e->flags;
7168 exit_succ[i++] = e->dest;
7169 remove_edge (e);
7170 }
7171 }
7172 else
7173 {
7174 num_exit_edges = 0;
7175 exit_succ = NULL;
7176 exit_flag = NULL;
7177 exit_prob = NULL;
7178 }
7179
7180 /* Switch context to the child function to initialize DEST_FN's CFG. */
7181 gcc_assert (dest_cfun->cfg == NULL);
7182 push_cfun (dest_cfun);
7183
7184 init_empty_tree_cfg ();
7185
7186 /* Initialize EH information for the new function. */
7187 eh_map = NULL;
7188 new_label_map = NULL;
7189 if (saved_cfun->eh)
7190 {
7191 eh_region region = NULL;
7192
7193 FOR_EACH_VEC_ELT (bbs, i, bb)
7194 region = find_outermost_region_in_block (saved_cfun, bb, region);
7195
7196 init_eh_for_function ();
7197 if (region != NULL)
7198 {
7199 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7200 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7201 new_label_mapper, new_label_map);
7202 }
7203 }
7204
7205 /* Initialize an empty loop tree. */
7206 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7207 init_loops_structure (dest_cfun, loops, 1);
7208 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7209 set_loops_for_fn (dest_cfun, loops);
7210
7211 /* Move the outlined loop tree part. */
7212 num_nodes = bbs.length ();
7213 FOR_EACH_VEC_ELT (bbs, i, bb)
7214 {
7215 if (bb->loop_father->header == bb)
7216 {
7217 struct loop *this_loop = bb->loop_father;
7218 struct loop *outer = loop_outer (this_loop);
7219 if (outer == loop
7220 /* If the SESE region contains some bbs ending with
7221 a noreturn call, those are considered to belong
7222 to the outermost loop in saved_cfun, rather than
7223 the entry_bb's loop_father. */
7224 || outer == loop0)
7225 {
7226 if (outer != loop)
7227 num_nodes -= this_loop->num_nodes;
7228 flow_loop_tree_node_remove (bb->loop_father);
7229 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7230 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7231 }
7232 }
7233 else if (bb->loop_father == loop0 && loop0 != loop)
7234 num_nodes--;
7235
7236 /* Remove loop exits from the outlined region. */
7237 if (loops_for_fn (saved_cfun)->exits)
7238 FOR_EACH_EDGE (e, ei, bb->succs)
7239 {
7240 struct loops *l = loops_for_fn (saved_cfun);
7241 loop_exit **slot
7242 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7243 NO_INSERT);
7244 if (slot)
7245 l->exits->clear_slot (slot);
7246 }
7247 }
7248
7249
7250 /* Adjust the number of blocks in the tree root of the outlined part. */
7251 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7252
7253 /* Setup a mapping to be used by move_block_to_fn. */
7254 loop->aux = current_loops->tree_root;
7255 loop0->aux = current_loops->tree_root;
7256
7257 pop_cfun ();
7258
7259 /* Move blocks from BBS into DEST_CFUN. */
7260 gcc_assert (bbs.length () >= 2);
7261 after = dest_cfun->cfg->x_entry_block_ptr;
7262 hash_map<tree, tree> vars_map;
7263
7264 memset (&d, 0, sizeof (d));
7265 d.orig_block = orig_block;
7266 d.new_block = DECL_INITIAL (dest_cfun->decl);
7267 d.from_context = cfun->decl;
7268 d.to_context = dest_cfun->decl;
7269 d.vars_map = &vars_map;
7270 d.new_label_map = new_label_map;
7271 d.eh_map = eh_map;
7272 d.remap_decls_p = true;
7273
7274 if (gimple_in_ssa_p (cfun))
7275 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7276 {
7277 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7278 set_ssa_default_def (dest_cfun, arg, narg);
7279 vars_map.put (arg, narg);
7280 }
7281
7282 FOR_EACH_VEC_ELT (bbs, i, bb)
7283 {
7284 /* No need to update edge counts on the last block. It has
7285 already been updated earlier when we detached the region from
7286 the original CFG. */
7287 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7288 after = bb;
7289 }
7290
7291 loop->aux = NULL;
7292 loop0->aux = NULL;
7293 /* Loop sizes are no longer correct, fix them up. */
7294 loop->num_nodes -= num_nodes;
7295 for (struct loop *outer = loop_outer (loop);
7296 outer; outer = loop_outer (outer))
7297 outer->num_nodes -= num_nodes;
7298 loop0->num_nodes -= bbs.length () - num_nodes;
7299
7300 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7301 {
7302 struct loop *aloop;
7303 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7304 if (aloop != NULL)
7305 {
7306 if (aloop->simduid)
7307 {
7308 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7309 d.to_context);
7310 dest_cfun->has_simduid_loops = true;
7311 }
7312 if (aloop->force_vectorize)
7313 dest_cfun->has_force_vectorize_loops = true;
7314 }
7315 }
7316
7317 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7318 if (orig_block)
7319 {
7320 tree block;
7321 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7322 == NULL_TREE);
7323 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7324 = BLOCK_SUBBLOCKS (orig_block);
7325 for (block = BLOCK_SUBBLOCKS (orig_block);
7326 block; block = BLOCK_CHAIN (block))
7327 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7328 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7329 }
7330
7331 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7332 &vars_map, dest_cfun->decl);
7333
7334 if (new_label_map)
7335 htab_delete (new_label_map);
7336 if (eh_map)
7337 delete eh_map;
7338
7339 if (gimple_in_ssa_p (cfun))
7340 {
7341 /* We need to release ssa-names in a defined order, so first find them,
7342 and then iterate in ascending version order. */
7343 bitmap release_names = BITMAP_ALLOC (NULL);
7344 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7345 bitmap_iterator bi;
7346 unsigned i;
7347 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7348 release_ssa_name (ssa_name (i));
7349 BITMAP_FREE (release_names);
7350 }
7351
7352 /* Rewire the entry and exit blocks. The successor to the entry
7353 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7354 the child function. Similarly, the predecessor of DEST_FN's
7355 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7356 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7357 various CFG manipulation function get to the right CFG.
7358
7359 FIXME, this is silly. The CFG ought to become a parameter to
7360 these helpers. */
7361 push_cfun (dest_cfun);
7362 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7363 if (exit_bb)
7364 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7365 pop_cfun ();
7366
7367 /* Back in the original function, the SESE region has disappeared,
7368 create a new basic block in its place. */
7369 bb = create_empty_bb (entry_pred[0]);
7370 if (current_loops)
7371 add_bb_to_loop (bb, loop);
7372 for (i = 0; i < num_entry_edges; i++)
7373 {
7374 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7375 e->probability = entry_prob[i];
7376 }
7377
7378 for (i = 0; i < num_exit_edges; i++)
7379 {
7380 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7381 e->probability = exit_prob[i];
7382 }
7383
7384 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7385 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7386 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7387 dom_bbs.release ();
7388
7389 if (exit_bb)
7390 {
7391 free (exit_prob);
7392 free (exit_flag);
7393 free (exit_succ);
7394 }
7395 free (entry_prob);
7396 free (entry_flag);
7397 free (entry_pred);
7398 bbs.release ();
7399
7400 return bb;
7401 }
7402
7403 /* Dump default def DEF to file FILE using FLAGS and indentation
7404 SPC. */
7405
7406 static void
7407 dump_default_def (FILE *file, tree def, int spc, int flags)
7408 {
7409 for (int i = 0; i < spc; ++i)
7410 fprintf (file, " ");
7411 dump_ssaname_info_to_file (file, def, spc);
7412
7413 print_generic_expr (file, TREE_TYPE (def), flags);
7414 fprintf (file, " ");
7415 print_generic_expr (file, def, flags);
7416 fprintf (file, " = ");
7417 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7418 fprintf (file, ";\n");
7419 }
7420
7421 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7422 */
7423
7424 void
7425 dump_function_to_file (tree fndecl, FILE *file, int flags)
7426 {
7427 tree arg, var, old_current_fndecl = current_function_decl;
7428 struct function *dsf;
7429 bool ignore_topmost_bind = false, any_var = false;
7430 basic_block bb;
7431 tree chain;
7432 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7433 && decl_is_tm_clone (fndecl));
7434 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7435
7436 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7437 {
7438 fprintf (file, "__attribute__((");
7439
7440 bool first = true;
7441 tree chain;
7442 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7443 first = false, chain = TREE_CHAIN (chain))
7444 {
7445 if (!first)
7446 fprintf (file, ", ");
7447
7448 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7449 if (TREE_VALUE (chain) != NULL_TREE)
7450 {
7451 fprintf (file, " (");
7452 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7453 fprintf (file, ")");
7454 }
7455 }
7456
7457 fprintf (file, "))\n");
7458 }
7459
7460 current_function_decl = fndecl;
7461 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7462
7463 arg = DECL_ARGUMENTS (fndecl);
7464 while (arg)
7465 {
7466 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7467 fprintf (file, " ");
7468 print_generic_expr (file, arg, dump_flags);
7469 if (flags & TDF_VERBOSE)
7470 print_node (file, "", arg, 4);
7471 if (DECL_CHAIN (arg))
7472 fprintf (file, ", ");
7473 arg = DECL_CHAIN (arg);
7474 }
7475 fprintf (file, ")\n");
7476
7477 if (flags & TDF_VERBOSE)
7478 print_node (file, "", fndecl, 2);
7479
7480 dsf = DECL_STRUCT_FUNCTION (fndecl);
7481 if (dsf && (flags & TDF_EH))
7482 dump_eh_tree (file, dsf);
7483
7484 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7485 {
7486 dump_node (fndecl, TDF_SLIM | flags, file);
7487 current_function_decl = old_current_fndecl;
7488 return;
7489 }
7490
7491 /* When GIMPLE is lowered, the variables are no longer available in
7492 BIND_EXPRs, so display them separately. */
7493 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7494 {
7495 unsigned ix;
7496 ignore_topmost_bind = true;
7497
7498 fprintf (file, "{\n");
7499 if (gimple_in_ssa_p (fun)
7500 && (flags & TDF_ALIAS))
7501 {
7502 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7503 arg = DECL_CHAIN (arg))
7504 {
7505 tree def = ssa_default_def (fun, arg);
7506 if (def)
7507 dump_default_def (file, def, 2, flags);
7508 }
7509
7510 tree res = DECL_RESULT (fun->decl);
7511 if (res != NULL_TREE
7512 && DECL_BY_REFERENCE (res))
7513 {
7514 tree def = ssa_default_def (fun, res);
7515 if (def)
7516 dump_default_def (file, def, 2, flags);
7517 }
7518
7519 tree static_chain = fun->static_chain_decl;
7520 if (static_chain != NULL_TREE)
7521 {
7522 tree def = ssa_default_def (fun, static_chain);
7523 if (def)
7524 dump_default_def (file, def, 2, flags);
7525 }
7526 }
7527
7528 if (!vec_safe_is_empty (fun->local_decls))
7529 FOR_EACH_LOCAL_DECL (fun, ix, var)
7530 {
7531 print_generic_decl (file, var, flags);
7532 if (flags & TDF_VERBOSE)
7533 print_node (file, "", var, 4);
7534 fprintf (file, "\n");
7535
7536 any_var = true;
7537 }
7538 if (gimple_in_ssa_p (cfun))
7539 for (ix = 1; ix < num_ssa_names; ++ix)
7540 {
7541 tree name = ssa_name (ix);
7542 if (name && !SSA_NAME_VAR (name))
7543 {
7544 fprintf (file, " ");
7545 print_generic_expr (file, TREE_TYPE (name), flags);
7546 fprintf (file, " ");
7547 print_generic_expr (file, name, flags);
7548 fprintf (file, ";\n");
7549
7550 any_var = true;
7551 }
7552 }
7553 }
7554
7555 if (fun && fun->decl == fndecl
7556 && fun->cfg
7557 && basic_block_info_for_fn (fun))
7558 {
7559 /* If the CFG has been built, emit a CFG-based dump. */
7560 if (!ignore_topmost_bind)
7561 fprintf (file, "{\n");
7562
7563 if (any_var && n_basic_blocks_for_fn (fun))
7564 fprintf (file, "\n");
7565
7566 FOR_EACH_BB_FN (bb, fun)
7567 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7568
7569 fprintf (file, "}\n");
7570 }
7571 else if (DECL_SAVED_TREE (fndecl) == NULL)
7572 {
7573 /* The function is now in GIMPLE form but the CFG has not been
7574 built yet. Emit the single sequence of GIMPLE statements
7575 that make up its body. */
7576 gimple_seq body = gimple_body (fndecl);
7577
7578 if (gimple_seq_first_stmt (body)
7579 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7580 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7581 print_gimple_seq (file, body, 0, flags);
7582 else
7583 {
7584 if (!ignore_topmost_bind)
7585 fprintf (file, "{\n");
7586
7587 if (any_var)
7588 fprintf (file, "\n");
7589
7590 print_gimple_seq (file, body, 2, flags);
7591 fprintf (file, "}\n");
7592 }
7593 }
7594 else
7595 {
7596 int indent;
7597
7598 /* Make a tree based dump. */
7599 chain = DECL_SAVED_TREE (fndecl);
7600 if (chain && TREE_CODE (chain) == BIND_EXPR)
7601 {
7602 if (ignore_topmost_bind)
7603 {
7604 chain = BIND_EXPR_BODY (chain);
7605 indent = 2;
7606 }
7607 else
7608 indent = 0;
7609 }
7610 else
7611 {
7612 if (!ignore_topmost_bind)
7613 {
7614 fprintf (file, "{\n");
7615 /* No topmost bind, pretend it's ignored for later. */
7616 ignore_topmost_bind = true;
7617 }
7618 indent = 2;
7619 }
7620
7621 if (any_var)
7622 fprintf (file, "\n");
7623
7624 print_generic_stmt_indented (file, chain, flags, indent);
7625 if (ignore_topmost_bind)
7626 fprintf (file, "}\n");
7627 }
7628
7629 if (flags & TDF_ENUMERATE_LOCALS)
7630 dump_enumerated_decls (file, flags);
7631 fprintf (file, "\n\n");
7632
7633 current_function_decl = old_current_fndecl;
7634 }
7635
7636 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7637
7638 DEBUG_FUNCTION void
7639 debug_function (tree fn, int flags)
7640 {
7641 dump_function_to_file (fn, stderr, flags);
7642 }
7643
7644
7645 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7646
7647 static void
7648 print_pred_bbs (FILE *file, basic_block bb)
7649 {
7650 edge e;
7651 edge_iterator ei;
7652
7653 FOR_EACH_EDGE (e, ei, bb->preds)
7654 fprintf (file, "bb_%d ", e->src->index);
7655 }
7656
7657
7658 /* Print on FILE the indexes for the successors of basic_block BB. */
7659
7660 static void
7661 print_succ_bbs (FILE *file, basic_block bb)
7662 {
7663 edge e;
7664 edge_iterator ei;
7665
7666 FOR_EACH_EDGE (e, ei, bb->succs)
7667 fprintf (file, "bb_%d ", e->dest->index);
7668 }
7669
7670 /* Print to FILE the basic block BB following the VERBOSITY level. */
7671
7672 void
7673 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7674 {
7675 char *s_indent = (char *) alloca ((size_t) indent + 1);
7676 memset ((void *) s_indent, ' ', (size_t) indent);
7677 s_indent[indent] = '\0';
7678
7679 /* Print basic_block's header. */
7680 if (verbosity >= 2)
7681 {
7682 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7683 print_pred_bbs (file, bb);
7684 fprintf (file, "}, succs = {");
7685 print_succ_bbs (file, bb);
7686 fprintf (file, "})\n");
7687 }
7688
7689 /* Print basic_block's body. */
7690 if (verbosity >= 3)
7691 {
7692 fprintf (file, "%s {\n", s_indent);
7693 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7694 fprintf (file, "%s }\n", s_indent);
7695 }
7696 }
7697
7698 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7699
7700 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7701 VERBOSITY level this outputs the contents of the loop, or just its
7702 structure. */
7703
7704 static void
7705 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7706 {
7707 char *s_indent;
7708 basic_block bb;
7709
7710 if (loop == NULL)
7711 return;
7712
7713 s_indent = (char *) alloca ((size_t) indent + 1);
7714 memset ((void *) s_indent, ' ', (size_t) indent);
7715 s_indent[indent] = '\0';
7716
7717 /* Print loop's header. */
7718 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7719 if (loop->header)
7720 fprintf (file, "header = %d", loop->header->index);
7721 else
7722 {
7723 fprintf (file, "deleted)\n");
7724 return;
7725 }
7726 if (loop->latch)
7727 fprintf (file, ", latch = %d", loop->latch->index);
7728 else
7729 fprintf (file, ", multiple latches");
7730 fprintf (file, ", niter = ");
7731 print_generic_expr (file, loop->nb_iterations, 0);
7732
7733 if (loop->any_upper_bound)
7734 {
7735 fprintf (file, ", upper_bound = ");
7736 print_decu (loop->nb_iterations_upper_bound, file);
7737 }
7738
7739 if (loop->any_estimate)
7740 {
7741 fprintf (file, ", estimate = ");
7742 print_decu (loop->nb_iterations_estimate, file);
7743 }
7744 fprintf (file, ")\n");
7745
7746 /* Print loop's body. */
7747 if (verbosity >= 1)
7748 {
7749 fprintf (file, "%s{\n", s_indent);
7750 FOR_EACH_BB_FN (bb, cfun)
7751 if (bb->loop_father == loop)
7752 print_loops_bb (file, bb, indent, verbosity);
7753
7754 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7755 fprintf (file, "%s}\n", s_indent);
7756 }
7757 }
7758
7759 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7760 spaces. Following VERBOSITY level this outputs the contents of the
7761 loop, or just its structure. */
7762
7763 static void
7764 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7765 int verbosity)
7766 {
7767 if (loop == NULL)
7768 return;
7769
7770 print_loop (file, loop, indent, verbosity);
7771 print_loop_and_siblings (file, loop->next, indent, verbosity);
7772 }
7773
7774 /* Follow a CFG edge from the entry point of the program, and on entry
7775 of a loop, pretty print the loop structure on FILE. */
7776
7777 void
7778 print_loops (FILE *file, int verbosity)
7779 {
7780 basic_block bb;
7781
7782 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7783 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7784 if (bb && bb->loop_father)
7785 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7786 }
7787
7788 /* Dump a loop. */
7789
7790 DEBUG_FUNCTION void
7791 debug (struct loop &ref)
7792 {
7793 print_loop (stderr, &ref, 0, /*verbosity*/0);
7794 }
7795
7796 DEBUG_FUNCTION void
7797 debug (struct loop *ptr)
7798 {
7799 if (ptr)
7800 debug (*ptr);
7801 else
7802 fprintf (stderr, "<nil>\n");
7803 }
7804
7805 /* Dump a loop verbosely. */
7806
7807 DEBUG_FUNCTION void
7808 debug_verbose (struct loop &ref)
7809 {
7810 print_loop (stderr, &ref, 0, /*verbosity*/3);
7811 }
7812
7813 DEBUG_FUNCTION void
7814 debug_verbose (struct loop *ptr)
7815 {
7816 if (ptr)
7817 debug (*ptr);
7818 else
7819 fprintf (stderr, "<nil>\n");
7820 }
7821
7822
7823 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7824
7825 DEBUG_FUNCTION void
7826 debug_loops (int verbosity)
7827 {
7828 print_loops (stderr, verbosity);
7829 }
7830
7831 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7832
7833 DEBUG_FUNCTION void
7834 debug_loop (struct loop *loop, int verbosity)
7835 {
7836 print_loop (stderr, loop, 0, verbosity);
7837 }
7838
7839 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7840 level. */
7841
7842 DEBUG_FUNCTION void
7843 debug_loop_num (unsigned num, int verbosity)
7844 {
7845 debug_loop (get_loop (cfun, num), verbosity);
7846 }
7847
7848 /* Return true if BB ends with a call, possibly followed by some
7849 instructions that must stay with the call. Return false,
7850 otherwise. */
7851
7852 static bool
7853 gimple_block_ends_with_call_p (basic_block bb)
7854 {
7855 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7856 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7857 }
7858
7859
7860 /* Return true if BB ends with a conditional branch. Return false,
7861 otherwise. */
7862
7863 static bool
7864 gimple_block_ends_with_condjump_p (const_basic_block bb)
7865 {
7866 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7867 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7868 }
7869
7870
7871 /* Return true if we need to add fake edge to exit at statement T.
7872 Helper function for gimple_flow_call_edges_add. */
7873
7874 static bool
7875 need_fake_edge_p (gimple *t)
7876 {
7877 tree fndecl = NULL_TREE;
7878 int call_flags = 0;
7879
7880 /* NORETURN and LONGJMP calls already have an edge to exit.
7881 CONST and PURE calls do not need one.
7882 We don't currently check for CONST and PURE here, although
7883 it would be a good idea, because those attributes are
7884 figured out from the RTL in mark_constant_function, and
7885 the counter incrementation code from -fprofile-arcs
7886 leads to different results from -fbranch-probabilities. */
7887 if (is_gimple_call (t))
7888 {
7889 fndecl = gimple_call_fndecl (t);
7890 call_flags = gimple_call_flags (t);
7891 }
7892
7893 if (is_gimple_call (t)
7894 && fndecl
7895 && DECL_BUILT_IN (fndecl)
7896 && (call_flags & ECF_NOTHROW)
7897 && !(call_flags & ECF_RETURNS_TWICE)
7898 /* fork() doesn't really return twice, but the effect of
7899 wrapping it in __gcov_fork() which calls __gcov_flush()
7900 and clears the counters before forking has the same
7901 effect as returning twice. Force a fake edge. */
7902 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7903 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7904 return false;
7905
7906 if (is_gimple_call (t))
7907 {
7908 edge_iterator ei;
7909 edge e;
7910 basic_block bb;
7911
7912 if (!(call_flags & ECF_NORETURN))
7913 return true;
7914
7915 bb = gimple_bb (t);
7916 FOR_EACH_EDGE (e, ei, bb->succs)
7917 if ((e->flags & EDGE_FAKE) == 0)
7918 return true;
7919 }
7920
7921 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7922 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7923 return true;
7924
7925 return false;
7926 }
7927
7928
7929 /* Add fake edges to the function exit for any non constant and non
7930 noreturn calls (or noreturn calls with EH/abnormal edges),
7931 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7932 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7933 that were split.
7934
7935 The goal is to expose cases in which entering a basic block does
7936 not imply that all subsequent instructions must be executed. */
7937
7938 static int
7939 gimple_flow_call_edges_add (sbitmap blocks)
7940 {
7941 int i;
7942 int blocks_split = 0;
7943 int last_bb = last_basic_block_for_fn (cfun);
7944 bool check_last_block = false;
7945
7946 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7947 return 0;
7948
7949 if (! blocks)
7950 check_last_block = true;
7951 else
7952 check_last_block = bitmap_bit_p (blocks,
7953 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7954
7955 /* In the last basic block, before epilogue generation, there will be
7956 a fallthru edge to EXIT. Special care is required if the last insn
7957 of the last basic block is a call because make_edge folds duplicate
7958 edges, which would result in the fallthru edge also being marked
7959 fake, which would result in the fallthru edge being removed by
7960 remove_fake_edges, which would result in an invalid CFG.
7961
7962 Moreover, we can't elide the outgoing fake edge, since the block
7963 profiler needs to take this into account in order to solve the minimal
7964 spanning tree in the case that the call doesn't return.
7965
7966 Handle this by adding a dummy instruction in a new last basic block. */
7967 if (check_last_block)
7968 {
7969 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7970 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7971 gimple *t = NULL;
7972
7973 if (!gsi_end_p (gsi))
7974 t = gsi_stmt (gsi);
7975
7976 if (t && need_fake_edge_p (t))
7977 {
7978 edge e;
7979
7980 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7981 if (e)
7982 {
7983 gsi_insert_on_edge (e, gimple_build_nop ());
7984 gsi_commit_edge_inserts ();
7985 }
7986 }
7987 }
7988
7989 /* Now add fake edges to the function exit for any non constant
7990 calls since there is no way that we can determine if they will
7991 return or not... */
7992 for (i = 0; i < last_bb; i++)
7993 {
7994 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7995 gimple_stmt_iterator gsi;
7996 gimple *stmt, *last_stmt;
7997
7998 if (!bb)
7999 continue;
8000
8001 if (blocks && !bitmap_bit_p (blocks, i))
8002 continue;
8003
8004 gsi = gsi_last_nondebug_bb (bb);
8005 if (!gsi_end_p (gsi))
8006 {
8007 last_stmt = gsi_stmt (gsi);
8008 do
8009 {
8010 stmt = gsi_stmt (gsi);
8011 if (need_fake_edge_p (stmt))
8012 {
8013 edge e;
8014
8015 /* The handling above of the final block before the
8016 epilogue should be enough to verify that there is
8017 no edge to the exit block in CFG already.
8018 Calling make_edge in such case would cause us to
8019 mark that edge as fake and remove it later. */
8020 if (flag_checking && stmt == last_stmt)
8021 {
8022 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8023 gcc_assert (e == NULL);
8024 }
8025
8026 /* Note that the following may create a new basic block
8027 and renumber the existing basic blocks. */
8028 if (stmt != last_stmt)
8029 {
8030 e = split_block (bb, stmt);
8031 if (e)
8032 blocks_split++;
8033 }
8034 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8035 }
8036 gsi_prev (&gsi);
8037 }
8038 while (!gsi_end_p (gsi));
8039 }
8040 }
8041
8042 if (blocks_split)
8043 verify_flow_info ();
8044
8045 return blocks_split;
8046 }
8047
8048 /* Removes edge E and all the blocks dominated by it, and updates dominance
8049 information. The IL in E->src needs to be updated separately.
8050 If dominance info is not available, only the edge E is removed.*/
8051
8052 void
8053 remove_edge_and_dominated_blocks (edge e)
8054 {
8055 vec<basic_block> bbs_to_remove = vNULL;
8056 vec<basic_block> bbs_to_fix_dom = vNULL;
8057 bitmap df, df_idom;
8058 edge f;
8059 edge_iterator ei;
8060 bool none_removed = false;
8061 unsigned i;
8062 basic_block bb, dbb;
8063 bitmap_iterator bi;
8064
8065 /* If we are removing a path inside a non-root loop that may change
8066 loop ownership of blocks or remove loops. Mark loops for fixup. */
8067 if (current_loops
8068 && loop_outer (e->src->loop_father) != NULL
8069 && e->src->loop_father == e->dest->loop_father)
8070 loops_state_set (LOOPS_NEED_FIXUP);
8071
8072 if (!dom_info_available_p (CDI_DOMINATORS))
8073 {
8074 remove_edge (e);
8075 return;
8076 }
8077
8078 /* No updating is needed for edges to exit. */
8079 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8080 {
8081 if (cfgcleanup_altered_bbs)
8082 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8083 remove_edge (e);
8084 return;
8085 }
8086
8087 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8088 that is not dominated by E->dest, then this set is empty. Otherwise,
8089 all the basic blocks dominated by E->dest are removed.
8090
8091 Also, to DF_IDOM we store the immediate dominators of the blocks in
8092 the dominance frontier of E (i.e., of the successors of the
8093 removed blocks, if there are any, and of E->dest otherwise). */
8094 FOR_EACH_EDGE (f, ei, e->dest->preds)
8095 {
8096 if (f == e)
8097 continue;
8098
8099 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8100 {
8101 none_removed = true;
8102 break;
8103 }
8104 }
8105
8106 df = BITMAP_ALLOC (NULL);
8107 df_idom = BITMAP_ALLOC (NULL);
8108
8109 if (none_removed)
8110 bitmap_set_bit (df_idom,
8111 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8112 else
8113 {
8114 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8115 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8116 {
8117 FOR_EACH_EDGE (f, ei, bb->succs)
8118 {
8119 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8120 bitmap_set_bit (df, f->dest->index);
8121 }
8122 }
8123 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8124 bitmap_clear_bit (df, bb->index);
8125
8126 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8127 {
8128 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8129 bitmap_set_bit (df_idom,
8130 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8131 }
8132 }
8133
8134 if (cfgcleanup_altered_bbs)
8135 {
8136 /* Record the set of the altered basic blocks. */
8137 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8138 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8139 }
8140
8141 /* Remove E and the cancelled blocks. */
8142 if (none_removed)
8143 remove_edge (e);
8144 else
8145 {
8146 /* Walk backwards so as to get a chance to substitute all
8147 released DEFs into debug stmts. See
8148 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8149 details. */
8150 for (i = bbs_to_remove.length (); i-- > 0; )
8151 delete_basic_block (bbs_to_remove[i]);
8152 }
8153
8154 /* Update the dominance information. The immediate dominator may change only
8155 for blocks whose immediate dominator belongs to DF_IDOM:
8156
8157 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8158 removal. Let Z the arbitrary block such that idom(Z) = Y and
8159 Z dominates X after the removal. Before removal, there exists a path P
8160 from Y to X that avoids Z. Let F be the last edge on P that is
8161 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8162 dominates W, and because of P, Z does not dominate W), and W belongs to
8163 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8164 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8165 {
8166 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8167 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8168 dbb;
8169 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8170 bbs_to_fix_dom.safe_push (dbb);
8171 }
8172
8173 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8174
8175 BITMAP_FREE (df);
8176 BITMAP_FREE (df_idom);
8177 bbs_to_remove.release ();
8178 bbs_to_fix_dom.release ();
8179 }
8180
8181 /* Purge dead EH edges from basic block BB. */
8182
8183 bool
8184 gimple_purge_dead_eh_edges (basic_block bb)
8185 {
8186 bool changed = false;
8187 edge e;
8188 edge_iterator ei;
8189 gimple *stmt = last_stmt (bb);
8190
8191 if (stmt && stmt_can_throw_internal (stmt))
8192 return false;
8193
8194 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8195 {
8196 if (e->flags & EDGE_EH)
8197 {
8198 remove_edge_and_dominated_blocks (e);
8199 changed = true;
8200 }
8201 else
8202 ei_next (&ei);
8203 }
8204
8205 return changed;
8206 }
8207
8208 /* Purge dead EH edges from basic block listed in BLOCKS. */
8209
8210 bool
8211 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8212 {
8213 bool changed = false;
8214 unsigned i;
8215 bitmap_iterator bi;
8216
8217 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8218 {
8219 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8220
8221 /* Earlier gimple_purge_dead_eh_edges could have removed
8222 this basic block already. */
8223 gcc_assert (bb || changed);
8224 if (bb != NULL)
8225 changed |= gimple_purge_dead_eh_edges (bb);
8226 }
8227
8228 return changed;
8229 }
8230
8231 /* Purge dead abnormal call edges from basic block BB. */
8232
8233 bool
8234 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8235 {
8236 bool changed = false;
8237 edge e;
8238 edge_iterator ei;
8239 gimple *stmt = last_stmt (bb);
8240
8241 if (!cfun->has_nonlocal_label
8242 && !cfun->calls_setjmp)
8243 return false;
8244
8245 if (stmt && stmt_can_make_abnormal_goto (stmt))
8246 return false;
8247
8248 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8249 {
8250 if (e->flags & EDGE_ABNORMAL)
8251 {
8252 if (e->flags & EDGE_FALLTHRU)
8253 e->flags &= ~EDGE_ABNORMAL;
8254 else
8255 remove_edge_and_dominated_blocks (e);
8256 changed = true;
8257 }
8258 else
8259 ei_next (&ei);
8260 }
8261
8262 return changed;
8263 }
8264
8265 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8266
8267 bool
8268 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8269 {
8270 bool changed = false;
8271 unsigned i;
8272 bitmap_iterator bi;
8273
8274 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8275 {
8276 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8277
8278 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8279 this basic block already. */
8280 gcc_assert (bb || changed);
8281 if (bb != NULL)
8282 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8283 }
8284
8285 return changed;
8286 }
8287
8288 /* This function is called whenever a new edge is created or
8289 redirected. */
8290
8291 static void
8292 gimple_execute_on_growing_pred (edge e)
8293 {
8294 basic_block bb = e->dest;
8295
8296 if (!gimple_seq_empty_p (phi_nodes (bb)))
8297 reserve_phi_args_for_new_edge (bb);
8298 }
8299
8300 /* This function is called immediately before edge E is removed from
8301 the edge vector E->dest->preds. */
8302
8303 static void
8304 gimple_execute_on_shrinking_pred (edge e)
8305 {
8306 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8307 remove_phi_args (e);
8308 }
8309
8310 /*---------------------------------------------------------------------------
8311 Helper functions for Loop versioning
8312 ---------------------------------------------------------------------------*/
8313
8314 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8315 of 'first'. Both of them are dominated by 'new_head' basic block. When
8316 'new_head' was created by 'second's incoming edge it received phi arguments
8317 on the edge by split_edge(). Later, additional edge 'e' was created to
8318 connect 'new_head' and 'first'. Now this routine adds phi args on this
8319 additional edge 'e' that new_head to second edge received as part of edge
8320 splitting. */
8321
8322 static void
8323 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8324 basic_block new_head, edge e)
8325 {
8326 gphi *phi1, *phi2;
8327 gphi_iterator psi1, psi2;
8328 tree def;
8329 edge e2 = find_edge (new_head, second);
8330
8331 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8332 edge, we should always have an edge from NEW_HEAD to SECOND. */
8333 gcc_assert (e2 != NULL);
8334
8335 /* Browse all 'second' basic block phi nodes and add phi args to
8336 edge 'e' for 'first' head. PHI args are always in correct order. */
8337
8338 for (psi2 = gsi_start_phis (second),
8339 psi1 = gsi_start_phis (first);
8340 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8341 gsi_next (&psi2), gsi_next (&psi1))
8342 {
8343 phi1 = psi1.phi ();
8344 phi2 = psi2.phi ();
8345 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8346 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8347 }
8348 }
8349
8350
8351 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8352 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8353 the destination of the ELSE part. */
8354
8355 static void
8356 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8357 basic_block second_head ATTRIBUTE_UNUSED,
8358 basic_block cond_bb, void *cond_e)
8359 {
8360 gimple_stmt_iterator gsi;
8361 gimple *new_cond_expr;
8362 tree cond_expr = (tree) cond_e;
8363 edge e0;
8364
8365 /* Build new conditional expr */
8366 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8367 NULL_TREE, NULL_TREE);
8368
8369 /* Add new cond in cond_bb. */
8370 gsi = gsi_last_bb (cond_bb);
8371 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8372
8373 /* Adjust edges appropriately to connect new head with first head
8374 as well as second head. */
8375 e0 = single_succ_edge (cond_bb);
8376 e0->flags &= ~EDGE_FALLTHRU;
8377 e0->flags |= EDGE_FALSE_VALUE;
8378 }
8379
8380
8381 /* Do book-keeping of basic block BB for the profile consistency checker.
8382 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8383 then do post-pass accounting. Store the counting in RECORD. */
8384 static void
8385 gimple_account_profile_record (basic_block bb, int after_pass,
8386 struct profile_record *record)
8387 {
8388 gimple_stmt_iterator i;
8389 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8390 {
8391 record->size[after_pass]
8392 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8393 if (profile_status_for_fn (cfun) == PROFILE_READ)
8394 record->time[after_pass]
8395 += estimate_num_insns (gsi_stmt (i),
8396 &eni_time_weights) * bb->count;
8397 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8398 record->time[after_pass]
8399 += estimate_num_insns (gsi_stmt (i),
8400 &eni_time_weights) * bb->frequency;
8401 }
8402 }
8403
8404 struct cfg_hooks gimple_cfg_hooks = {
8405 "gimple",
8406 gimple_verify_flow_info,
8407 gimple_dump_bb, /* dump_bb */
8408 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8409 create_bb, /* create_basic_block */
8410 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8411 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8412 gimple_can_remove_branch_p, /* can_remove_branch_p */
8413 remove_bb, /* delete_basic_block */
8414 gimple_split_block, /* split_block */
8415 gimple_move_block_after, /* move_block_after */
8416 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8417 gimple_merge_blocks, /* merge_blocks */
8418 gimple_predict_edge, /* predict_edge */
8419 gimple_predicted_by_p, /* predicted_by_p */
8420 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8421 gimple_duplicate_bb, /* duplicate_block */
8422 gimple_split_edge, /* split_edge */
8423 gimple_make_forwarder_block, /* make_forward_block */
8424 NULL, /* tidy_fallthru_edge */
8425 NULL, /* force_nonfallthru */
8426 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8427 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8428 gimple_flow_call_edges_add, /* flow_call_edges_add */
8429 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8430 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8431 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8432 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8433 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8434 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8435 flush_pending_stmts, /* flush_pending_stmts */
8436 gimple_empty_block_p, /* block_empty_p */
8437 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8438 gimple_account_profile_record,
8439 };
8440
8441
8442 /* Split all critical edges. */
8443
8444 unsigned int
8445 split_critical_edges (void)
8446 {
8447 basic_block bb;
8448 edge e;
8449 edge_iterator ei;
8450
8451 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8452 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8453 mappings around the calls to split_edge. */
8454 start_recording_case_labels ();
8455 FOR_ALL_BB_FN (bb, cfun)
8456 {
8457 FOR_EACH_EDGE (e, ei, bb->succs)
8458 {
8459 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8460 split_edge (e);
8461 /* PRE inserts statements to edges and expects that
8462 since split_critical_edges was done beforehand, committing edge
8463 insertions will not split more edges. In addition to critical
8464 edges we must split edges that have multiple successors and
8465 end by control flow statements, such as RESX.
8466 Go ahead and split them too. This matches the logic in
8467 gimple_find_edge_insert_loc. */
8468 else if ((!single_pred_p (e->dest)
8469 || !gimple_seq_empty_p (phi_nodes (e->dest))
8470 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8471 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8472 && !(e->flags & EDGE_ABNORMAL))
8473 {
8474 gimple_stmt_iterator gsi;
8475
8476 gsi = gsi_last_bb (e->src);
8477 if (!gsi_end_p (gsi)
8478 && stmt_ends_bb_p (gsi_stmt (gsi))
8479 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8480 && !gimple_call_builtin_p (gsi_stmt (gsi),
8481 BUILT_IN_RETURN)))
8482 split_edge (e);
8483 }
8484 }
8485 }
8486 end_recording_case_labels ();
8487 return 0;
8488 }
8489
8490 namespace {
8491
8492 const pass_data pass_data_split_crit_edges =
8493 {
8494 GIMPLE_PASS, /* type */
8495 "crited", /* name */
8496 OPTGROUP_NONE, /* optinfo_flags */
8497 TV_TREE_SPLIT_EDGES, /* tv_id */
8498 PROP_cfg, /* properties_required */
8499 PROP_no_crit_edges, /* properties_provided */
8500 0, /* properties_destroyed */
8501 0, /* todo_flags_start */
8502 0, /* todo_flags_finish */
8503 };
8504
8505 class pass_split_crit_edges : public gimple_opt_pass
8506 {
8507 public:
8508 pass_split_crit_edges (gcc::context *ctxt)
8509 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8510 {}
8511
8512 /* opt_pass methods: */
8513 virtual unsigned int execute (function *) { return split_critical_edges (); }
8514
8515 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8516 }; // class pass_split_crit_edges
8517
8518 } // anon namespace
8519
8520 gimple_opt_pass *
8521 make_pass_split_crit_edges (gcc::context *ctxt)
8522 {
8523 return new pass_split_crit_edges (ctxt);
8524 }
8525
8526
8527 /* Insert COND expression which is GIMPLE_COND after STMT
8528 in basic block BB with appropriate basic block split
8529 and creation of a new conditionally executed basic block.
8530 Return created basic block. */
8531 basic_block
8532 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8533 {
8534 edge fall = split_block (bb, stmt);
8535 gimple_stmt_iterator iter = gsi_last_bb (bb);
8536 basic_block new_bb;
8537
8538 /* Insert cond statement. */
8539 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8540 if (gsi_end_p (iter))
8541 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8542 else
8543 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8544
8545 /* Create conditionally executed block. */
8546 new_bb = create_empty_bb (bb);
8547 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8548 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8549
8550 /* Fix edge for split bb. */
8551 fall->flags = EDGE_FALSE_VALUE;
8552
8553 /* Update dominance info. */
8554 if (dom_info_available_p (CDI_DOMINATORS))
8555 {
8556 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8557 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8558 }
8559
8560 /* Update loop info. */
8561 if (current_loops)
8562 add_bb_to_loop (new_bb, bb->loop_father);
8563
8564 return new_bb;
8565 }
8566
8567 /* Build a ternary operation and gimplify it. Emit code before GSI.
8568 Return the gimple_val holding the result. */
8569
8570 tree
8571 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8572 tree type, tree a, tree b, tree c)
8573 {
8574 tree ret;
8575 location_t loc = gimple_location (gsi_stmt (*gsi));
8576
8577 ret = fold_build3_loc (loc, code, type, a, b, c);
8578 STRIP_NOPS (ret);
8579
8580 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8581 GSI_SAME_STMT);
8582 }
8583
8584 /* Build a binary operation and gimplify it. Emit code before GSI.
8585 Return the gimple_val holding the result. */
8586
8587 tree
8588 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8589 tree type, tree a, tree b)
8590 {
8591 tree ret;
8592
8593 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8594 STRIP_NOPS (ret);
8595
8596 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8597 GSI_SAME_STMT);
8598 }
8599
8600 /* Build a unary operation and gimplify it. Emit code before GSI.
8601 Return the gimple_val holding the result. */
8602
8603 tree
8604 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8605 tree a)
8606 {
8607 tree ret;
8608
8609 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8610 STRIP_NOPS (ret);
8611
8612 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8613 GSI_SAME_STMT);
8614 }
8615
8616
8617 \f
8618 /* Given a basic block B which ends with a conditional and has
8619 precisely two successors, determine which of the edges is taken if
8620 the conditional is true and which is taken if the conditional is
8621 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8622
8623 void
8624 extract_true_false_edges_from_block (basic_block b,
8625 edge *true_edge,
8626 edge *false_edge)
8627 {
8628 edge e = EDGE_SUCC (b, 0);
8629
8630 if (e->flags & EDGE_TRUE_VALUE)
8631 {
8632 *true_edge = e;
8633 *false_edge = EDGE_SUCC (b, 1);
8634 }
8635 else
8636 {
8637 *false_edge = e;
8638 *true_edge = EDGE_SUCC (b, 1);
8639 }
8640 }
8641
8642
8643 /* From a controlling predicate in the immediate dominator DOM of
8644 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8645 predicate evaluates to true and false and store them to
8646 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8647 they are non-NULL. Returns true if the edges can be determined,
8648 else return false. */
8649
8650 bool
8651 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8652 edge *true_controlled_edge,
8653 edge *false_controlled_edge)
8654 {
8655 basic_block bb = phiblock;
8656 edge true_edge, false_edge, tem;
8657 edge e0 = NULL, e1 = NULL;
8658
8659 /* We have to verify that one edge into the PHI node is dominated
8660 by the true edge of the predicate block and the other edge
8661 dominated by the false edge. This ensures that the PHI argument
8662 we are going to take is completely determined by the path we
8663 take from the predicate block.
8664 We can only use BB dominance checks below if the destination of
8665 the true/false edges are dominated by their edge, thus only
8666 have a single predecessor. */
8667 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8668 tem = EDGE_PRED (bb, 0);
8669 if (tem == true_edge
8670 || (single_pred_p (true_edge->dest)
8671 && (tem->src == true_edge->dest
8672 || dominated_by_p (CDI_DOMINATORS,
8673 tem->src, true_edge->dest))))
8674 e0 = tem;
8675 else if (tem == false_edge
8676 || (single_pred_p (false_edge->dest)
8677 && (tem->src == false_edge->dest
8678 || dominated_by_p (CDI_DOMINATORS,
8679 tem->src, false_edge->dest))))
8680 e1 = tem;
8681 else
8682 return false;
8683 tem = EDGE_PRED (bb, 1);
8684 if (tem == true_edge
8685 || (single_pred_p (true_edge->dest)
8686 && (tem->src == true_edge->dest
8687 || dominated_by_p (CDI_DOMINATORS,
8688 tem->src, true_edge->dest))))
8689 e0 = tem;
8690 else if (tem == false_edge
8691 || (single_pred_p (false_edge->dest)
8692 && (tem->src == false_edge->dest
8693 || dominated_by_p (CDI_DOMINATORS,
8694 tem->src, false_edge->dest))))
8695 e1 = tem;
8696 else
8697 return false;
8698 if (!e0 || !e1)
8699 return false;
8700
8701 if (true_controlled_edge)
8702 *true_controlled_edge = e0;
8703 if (false_controlled_edge)
8704 *false_controlled_edge = e1;
8705
8706 return true;
8707 }
8708
8709
8710
8711 /* Emit return warnings. */
8712
8713 namespace {
8714
8715 const pass_data pass_data_warn_function_return =
8716 {
8717 GIMPLE_PASS, /* type */
8718 "*warn_function_return", /* name */
8719 OPTGROUP_NONE, /* optinfo_flags */
8720 TV_NONE, /* tv_id */
8721 PROP_cfg, /* properties_required */
8722 0, /* properties_provided */
8723 0, /* properties_destroyed */
8724 0, /* todo_flags_start */
8725 0, /* todo_flags_finish */
8726 };
8727
8728 class pass_warn_function_return : public gimple_opt_pass
8729 {
8730 public:
8731 pass_warn_function_return (gcc::context *ctxt)
8732 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8733 {}
8734
8735 /* opt_pass methods: */
8736 virtual unsigned int execute (function *);
8737
8738 }; // class pass_warn_function_return
8739
8740 unsigned int
8741 pass_warn_function_return::execute (function *fun)
8742 {
8743 source_location location;
8744 gimple *last;
8745 edge e;
8746 edge_iterator ei;
8747
8748 if (!targetm.warn_func_return (fun->decl))
8749 return 0;
8750
8751 /* If we have a path to EXIT, then we do return. */
8752 if (TREE_THIS_VOLATILE (fun->decl)
8753 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8754 {
8755 location = UNKNOWN_LOCATION;
8756 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8757 {
8758 last = last_stmt (e->src);
8759 if ((gimple_code (last) == GIMPLE_RETURN
8760 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8761 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8762 break;
8763 }
8764 if (location == UNKNOWN_LOCATION)
8765 location = cfun->function_end_locus;
8766 warning_at (location, 0, "%<noreturn%> function does return");
8767 }
8768
8769 /* If we see "return;" in some basic block, then we do reach the end
8770 without returning a value. */
8771 else if (warn_return_type
8772 && !TREE_NO_WARNING (fun->decl)
8773 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8774 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8775 {
8776 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8777 {
8778 gimple *last = last_stmt (e->src);
8779 greturn *return_stmt = dyn_cast <greturn *> (last);
8780 if (return_stmt
8781 && gimple_return_retval (return_stmt) == NULL
8782 && !gimple_no_warning_p (last))
8783 {
8784 location = gimple_location (last);
8785 if (location == UNKNOWN_LOCATION)
8786 location = fun->function_end_locus;
8787 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8788 TREE_NO_WARNING (fun->decl) = 1;
8789 break;
8790 }
8791 }
8792 }
8793 return 0;
8794 }
8795
8796 } // anon namespace
8797
8798 gimple_opt_pass *
8799 make_pass_warn_function_return (gcc::context *ctxt)
8800 {
8801 return new pass_warn_function_return (ctxt);
8802 }
8803
8804 /* Walk a gimplified function and warn for functions whose return value is
8805 ignored and attribute((warn_unused_result)) is set. This is done before
8806 inlining, so we don't have to worry about that. */
8807
8808 static void
8809 do_warn_unused_result (gimple_seq seq)
8810 {
8811 tree fdecl, ftype;
8812 gimple_stmt_iterator i;
8813
8814 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8815 {
8816 gimple *g = gsi_stmt (i);
8817
8818 switch (gimple_code (g))
8819 {
8820 case GIMPLE_BIND:
8821 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8822 break;
8823 case GIMPLE_TRY:
8824 do_warn_unused_result (gimple_try_eval (g));
8825 do_warn_unused_result (gimple_try_cleanup (g));
8826 break;
8827 case GIMPLE_CATCH:
8828 do_warn_unused_result (gimple_catch_handler (
8829 as_a <gcatch *> (g)));
8830 break;
8831 case GIMPLE_EH_FILTER:
8832 do_warn_unused_result (gimple_eh_filter_failure (g));
8833 break;
8834
8835 case GIMPLE_CALL:
8836 if (gimple_call_lhs (g))
8837 break;
8838 if (gimple_call_internal_p (g))
8839 break;
8840
8841 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8842 LHS. All calls whose value is ignored should be
8843 represented like this. Look for the attribute. */
8844 fdecl = gimple_call_fndecl (g);
8845 ftype = gimple_call_fntype (g);
8846
8847 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8848 {
8849 location_t loc = gimple_location (g);
8850
8851 if (fdecl)
8852 warning_at (loc, OPT_Wunused_result,
8853 "ignoring return value of %qD, "
8854 "declared with attribute warn_unused_result",
8855 fdecl);
8856 else
8857 warning_at (loc, OPT_Wunused_result,
8858 "ignoring return value of function "
8859 "declared with attribute warn_unused_result");
8860 }
8861 break;
8862
8863 default:
8864 /* Not a container, not a call, or a call whose value is used. */
8865 break;
8866 }
8867 }
8868 }
8869
8870 namespace {
8871
8872 const pass_data pass_data_warn_unused_result =
8873 {
8874 GIMPLE_PASS, /* type */
8875 "*warn_unused_result", /* name */
8876 OPTGROUP_NONE, /* optinfo_flags */
8877 TV_NONE, /* tv_id */
8878 PROP_gimple_any, /* properties_required */
8879 0, /* properties_provided */
8880 0, /* properties_destroyed */
8881 0, /* todo_flags_start */
8882 0, /* todo_flags_finish */
8883 };
8884
8885 class pass_warn_unused_result : public gimple_opt_pass
8886 {
8887 public:
8888 pass_warn_unused_result (gcc::context *ctxt)
8889 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8890 {}
8891
8892 /* opt_pass methods: */
8893 virtual bool gate (function *) { return flag_warn_unused_result; }
8894 virtual unsigned int execute (function *)
8895 {
8896 do_warn_unused_result (gimple_body (current_function_decl));
8897 return 0;
8898 }
8899
8900 }; // class pass_warn_unused_result
8901
8902 } // anon namespace
8903
8904 gimple_opt_pass *
8905 make_pass_warn_unused_result (gcc::context *ctxt)
8906 {
8907 return new pass_warn_unused_result (ctxt);
8908 }
8909
8910 /* IPA passes, compilation of earlier functions or inlining
8911 might have changed some properties, such as marked functions nothrow,
8912 pure, const or noreturn.
8913 Remove redundant edges and basic blocks, and create new ones if necessary.
8914
8915 This pass can't be executed as stand alone pass from pass manager, because
8916 in between inlining and this fixup the verify_flow_info would fail. */
8917
8918 unsigned int
8919 execute_fixup_cfg (void)
8920 {
8921 basic_block bb;
8922 gimple_stmt_iterator gsi;
8923 int todo = 0;
8924 gcov_type count_scale;
8925 edge e;
8926 edge_iterator ei;
8927
8928 count_scale
8929 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8930 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8931
8932 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8933 cgraph_node::get (current_function_decl)->count;
8934 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8935 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8936 count_scale);
8937
8938 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8939 e->count = apply_scale (e->count, count_scale);
8940
8941 FOR_EACH_BB_FN (bb, cfun)
8942 {
8943 bb->count = apply_scale (bb->count, count_scale);
8944 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8945 {
8946 gimple *stmt = gsi_stmt (gsi);
8947 tree decl = is_gimple_call (stmt)
8948 ? gimple_call_fndecl (stmt)
8949 : NULL;
8950 if (decl)
8951 {
8952 int flags = gimple_call_flags (stmt);
8953 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8954 {
8955 if (gimple_purge_dead_abnormal_call_edges (bb))
8956 todo |= TODO_cleanup_cfg;
8957
8958 if (gimple_in_ssa_p (cfun))
8959 {
8960 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8961 update_stmt (stmt);
8962 }
8963 }
8964
8965 if (flags & ECF_NORETURN
8966 && fixup_noreturn_call (stmt))
8967 todo |= TODO_cleanup_cfg;
8968 }
8969
8970 /* Remove stores to variables we marked write-only.
8971 Keep access when store has side effect, i.e. in case when source
8972 is volatile. */
8973 if (gimple_store_p (stmt)
8974 && !gimple_has_side_effects (stmt))
8975 {
8976 tree lhs = get_base_address (gimple_get_lhs (stmt));
8977
8978 if (TREE_CODE (lhs) == VAR_DECL
8979 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8980 && varpool_node::get (lhs)->writeonly)
8981 {
8982 unlink_stmt_vdef (stmt);
8983 gsi_remove (&gsi, true);
8984 release_defs (stmt);
8985 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8986 continue;
8987 }
8988 }
8989 /* For calls we can simply remove LHS when it is known
8990 to be write-only. */
8991 if (is_gimple_call (stmt)
8992 && gimple_get_lhs (stmt))
8993 {
8994 tree lhs = get_base_address (gimple_get_lhs (stmt));
8995
8996 if (TREE_CODE (lhs) == VAR_DECL
8997 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8998 && varpool_node::get (lhs)->writeonly)
8999 {
9000 gimple_call_set_lhs (stmt, NULL);
9001 update_stmt (stmt);
9002 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9003 }
9004 }
9005
9006 if (maybe_clean_eh_stmt (stmt)
9007 && gimple_purge_dead_eh_edges (bb))
9008 todo |= TODO_cleanup_cfg;
9009 gsi_next (&gsi);
9010 }
9011
9012 FOR_EACH_EDGE (e, ei, bb->succs)
9013 e->count = apply_scale (e->count, count_scale);
9014
9015 /* If we have a basic block with no successors that does not
9016 end with a control statement or a noreturn call end it with
9017 a call to __builtin_unreachable. This situation can occur
9018 when inlining a noreturn call that does in fact return. */
9019 if (EDGE_COUNT (bb->succs) == 0)
9020 {
9021 gimple *stmt = last_stmt (bb);
9022 if (!stmt
9023 || (!is_ctrl_stmt (stmt)
9024 && (!is_gimple_call (stmt)
9025 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
9026 {
9027 if (stmt && is_gimple_call (stmt))
9028 gimple_call_set_ctrl_altering (stmt, false);
9029 stmt = gimple_build_call
9030 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
9031 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9032 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9033 }
9034 }
9035 }
9036 if (count_scale != REG_BR_PROB_BASE)
9037 compute_function_frequency ();
9038
9039 if (current_loops
9040 && (todo & TODO_cleanup_cfg))
9041 loops_state_set (LOOPS_NEED_FIXUP);
9042
9043 return todo;
9044 }
9045
9046 namespace {
9047
9048 const pass_data pass_data_fixup_cfg =
9049 {
9050 GIMPLE_PASS, /* type */
9051 "fixup_cfg", /* name */
9052 OPTGROUP_NONE, /* optinfo_flags */
9053 TV_NONE, /* tv_id */
9054 PROP_cfg, /* properties_required */
9055 0, /* properties_provided */
9056 0, /* properties_destroyed */
9057 0, /* todo_flags_start */
9058 0, /* todo_flags_finish */
9059 };
9060
9061 class pass_fixup_cfg : public gimple_opt_pass
9062 {
9063 public:
9064 pass_fixup_cfg (gcc::context *ctxt)
9065 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9066 {}
9067
9068 /* opt_pass methods: */
9069 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9070 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9071
9072 }; // class pass_fixup_cfg
9073
9074 } // anon namespace
9075
9076 gimple_opt_pass *
9077 make_pass_fixup_cfg (gcc::context *ctxt)
9078 {
9079 return new pass_fixup_cfg (ctxt);
9080 }
9081
9082 /* Garbage collection support for edge_def. */
9083
9084 extern void gt_ggc_mx (tree&);
9085 extern void gt_ggc_mx (gimple *&);
9086 extern void gt_ggc_mx (rtx&);
9087 extern void gt_ggc_mx (basic_block&);
9088
9089 static void
9090 gt_ggc_mx (rtx_insn *& x)
9091 {
9092 if (x)
9093 gt_ggc_mx_rtx_def ((void *) x);
9094 }
9095
9096 void
9097 gt_ggc_mx (edge_def *e)
9098 {
9099 tree block = LOCATION_BLOCK (e->goto_locus);
9100 gt_ggc_mx (e->src);
9101 gt_ggc_mx (e->dest);
9102 if (current_ir_type () == IR_GIMPLE)
9103 gt_ggc_mx (e->insns.g);
9104 else
9105 gt_ggc_mx (e->insns.r);
9106 gt_ggc_mx (block);
9107 }
9108
9109 /* PCH support for edge_def. */
9110
9111 extern void gt_pch_nx (tree&);
9112 extern void gt_pch_nx (gimple *&);
9113 extern void gt_pch_nx (rtx&);
9114 extern void gt_pch_nx (basic_block&);
9115
9116 static void
9117 gt_pch_nx (rtx_insn *& x)
9118 {
9119 if (x)
9120 gt_pch_nx_rtx_def ((void *) x);
9121 }
9122
9123 void
9124 gt_pch_nx (edge_def *e)
9125 {
9126 tree block = LOCATION_BLOCK (e->goto_locus);
9127 gt_pch_nx (e->src);
9128 gt_pch_nx (e->dest);
9129 if (current_ir_type () == IR_GIMPLE)
9130 gt_pch_nx (e->insns.g);
9131 else
9132 gt_pch_nx (e->insns.r);
9133 gt_pch_nx (block);
9134 }
9135
9136 void
9137 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9138 {
9139 tree block = LOCATION_BLOCK (e->goto_locus);
9140 op (&(e->src), cookie);
9141 op (&(e->dest), cookie);
9142 if (current_ir_type () == IR_GIMPLE)
9143 op (&(e->insns.g), cookie);
9144 else
9145 op (&(e->insns.r), cookie);
9146 op (&(block), cookie);
9147 }