re PR target/69140 (stack alignment + O1 breaks with Microsoft ABI)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61
62 /* This file contains functions for building the Control Flow Graph (CFG)
63 for a function tree. */
64
65 /* Local declarations. */
66
67 /* Initial capacity for the basic block array. */
68 static const int initial_cfg_capacity = 20;
69
70 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
71 which use a particular edge. The CASE_LABEL_EXPRs are chained together
72 via their CASE_CHAIN field, which we clear after we're done with the
73 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
74
75 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
76 update the case vector in response to edge redirections.
77
78 Right now this table is set up and torn down at key points in the
79 compilation process. It would be nice if we could make the table
80 more persistent. The key is getting notification of changes to
81 the CFG (particularly edge removal, creation and redirection). */
82
83 static hash_map<edge, tree> *edge_to_cases;
84
85 /* If we record edge_to_cases, this bitmap will hold indexes
86 of basic blocks that end in a GIMPLE_SWITCH which we touched
87 due to edge manipulations. */
88
89 static bitmap touched_switch_bbs;
90
91 /* CFG statistics. */
92 struct cfg_stats_d
93 {
94 long num_merged_labels;
95 };
96
97 static struct cfg_stats_d cfg_stats;
98
99 /* Data to pass to replace_block_vars_by_duplicates_1. */
100 struct replace_decls_d
101 {
102 hash_map<tree, tree> *vars_map;
103 tree to_context;
104 };
105
106 /* Hash table to store last discriminator assigned for each locus. */
107 struct locus_discrim_map
108 {
109 location_t locus;
110 int discriminator;
111 };
112
113 /* Hashtable helpers. */
114
115 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
116 {
117 static inline hashval_t hash (const locus_discrim_map *);
118 static inline bool equal (const locus_discrim_map *,
119 const locus_discrim_map *);
120 };
121
122 /* Trivial hash function for a location_t. ITEM is a pointer to
123 a hash table entry that maps a location_t to a discriminator. */
124
125 inline hashval_t
126 locus_discrim_hasher::hash (const locus_discrim_map *item)
127 {
128 return LOCATION_LINE (item->locus);
129 }
130
131 /* Equality function for the locus-to-discriminator map. A and B
132 point to the two hash table entries to compare. */
133
134 inline bool
135 locus_discrim_hasher::equal (const locus_discrim_map *a,
136 const locus_discrim_map *b)
137 {
138 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
139 }
140
141 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
142
143 /* Basic blocks and flowgraphs. */
144 static void make_blocks (gimple_seq);
145
146 /* Edges. */
147 static void make_edges (void);
148 static void assign_discriminators (void);
149 static void make_cond_expr_edges (basic_block);
150 static void make_gimple_switch_edges (gswitch *, basic_block);
151 static bool make_goto_expr_edges (basic_block);
152 static void make_gimple_asm_edges (basic_block);
153 static edge gimple_redirect_edge_and_branch (edge, basic_block);
154 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
155
156 /* Various helpers. */
157 static inline bool stmt_starts_bb_p (gimple *, gimple *);
158 static int gimple_verify_flow_info (void);
159 static void gimple_make_forwarder_block (edge);
160 static gimple *first_non_label_stmt (basic_block);
161 static bool verify_gimple_transaction (gtransaction *);
162 static bool call_can_make_abnormal_goto (gimple *);
163
164 /* Flowgraph optimization and cleanup. */
165 static void gimple_merge_blocks (basic_block, basic_block);
166 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
167 static void remove_bb (basic_block);
168 static edge find_taken_edge_computed_goto (basic_block, tree);
169 static edge find_taken_edge_cond_expr (basic_block, tree);
170 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
171 static tree find_case_label_for_value (gswitch *, tree);
172
173 void
174 init_empty_tree_cfg_for_function (struct function *fn)
175 {
176 /* Initialize the basic block array. */
177 init_flow (fn);
178 profile_status_for_fn (fn) = PROFILE_ABSENT;
179 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
180 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
181 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
182 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
183 initial_cfg_capacity);
184
185 /* Build a mapping of labels to their associated blocks. */
186 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
188 initial_cfg_capacity);
189
190 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
191 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
192
193 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
194 = EXIT_BLOCK_PTR_FOR_FN (fn);
195 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
196 = ENTRY_BLOCK_PTR_FOR_FN (fn);
197 }
198
199 void
200 init_empty_tree_cfg (void)
201 {
202 init_empty_tree_cfg_for_function (cfun);
203 }
204
205 /*---------------------------------------------------------------------------
206 Create basic blocks
207 ---------------------------------------------------------------------------*/
208
209 /* Entry point to the CFG builder for trees. SEQ is the sequence of
210 statements to be added to the flowgraph. */
211
212 static void
213 build_gimple_cfg (gimple_seq seq)
214 {
215 /* Register specific gimple functions. */
216 gimple_register_cfg_hooks ();
217
218 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
219
220 init_empty_tree_cfg ();
221
222 make_blocks (seq);
223
224 /* Make sure there is always at least one block, even if it's empty. */
225 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
226 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
227
228 /* Adjust the size of the array. */
229 if (basic_block_info_for_fn (cfun)->length ()
230 < (size_t) n_basic_blocks_for_fn (cfun))
231 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
232 n_basic_blocks_for_fn (cfun));
233
234 /* To speed up statement iterator walks, we first purge dead labels. */
235 cleanup_dead_labels ();
236
237 /* Group case nodes to reduce the number of edges.
238 We do this after cleaning up dead labels because otherwise we miss
239 a lot of obvious case merging opportunities. */
240 group_case_labels ();
241
242 /* Create the edges of the flowgraph. */
243 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
244 make_edges ();
245 assign_discriminators ();
246 cleanup_dead_labels ();
247 delete discriminator_per_locus;
248 discriminator_per_locus = NULL;
249 }
250
251 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
252 them and propagate the information to LOOP. We assume that the annotations
253 come immediately before the condition in BB, if any. */
254
255 static void
256 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
257 {
258 gimple_stmt_iterator gsi = gsi_last_bb (bb);
259 gimple *stmt = gsi_stmt (gsi);
260
261 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
262 return;
263
264 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
265 {
266 stmt = gsi_stmt (gsi);
267 if (gimple_code (stmt) != GIMPLE_CALL)
268 break;
269 if (!gimple_call_internal_p (stmt)
270 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
271 break;
272
273 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
274 {
275 case annot_expr_ivdep_kind:
276 loop->safelen = INT_MAX;
277 break;
278 case annot_expr_no_vector_kind:
279 loop->dont_vectorize = true;
280 break;
281 case annot_expr_vector_kind:
282 loop->force_vectorize = true;
283 cfun->has_force_vectorize_loops = true;
284 break;
285 default:
286 gcc_unreachable ();
287 }
288
289 stmt = gimple_build_assign (gimple_call_lhs (stmt),
290 gimple_call_arg (stmt, 0));
291 gsi_replace (&gsi, stmt, true);
292 }
293 }
294
295 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
296 them and propagate the information to the loop. We assume that the
297 annotations come immediately before the condition of the loop. */
298
299 static void
300 replace_loop_annotate (void)
301 {
302 struct loop *loop;
303 basic_block bb;
304 gimple_stmt_iterator gsi;
305 gimple *stmt;
306
307 FOR_EACH_LOOP (loop, 0)
308 {
309 /* First look into the header. */
310 replace_loop_annotate_in_block (loop->header, loop);
311
312 /* Then look into the latch, if any. */
313 if (loop->latch)
314 replace_loop_annotate_in_block (loop->latch, loop);
315 }
316
317 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
318 FOR_EACH_BB_FN (bb, cfun)
319 {
320 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
321 {
322 stmt = gsi_stmt (gsi);
323 if (gimple_code (stmt) != GIMPLE_CALL)
324 continue;
325 if (!gimple_call_internal_p (stmt)
326 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
327 continue;
328
329 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
330 {
331 case annot_expr_ivdep_kind:
332 case annot_expr_no_vector_kind:
333 case annot_expr_vector_kind:
334 break;
335 default:
336 gcc_unreachable ();
337 }
338
339 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
340 stmt = gimple_build_assign (gimple_call_lhs (stmt),
341 gimple_call_arg (stmt, 0));
342 gsi_replace (&gsi, stmt, true);
343 }
344 }
345 }
346
347
348 static unsigned int
349 execute_build_cfg (void)
350 {
351 gimple_seq body = gimple_body (current_function_decl);
352
353 build_gimple_cfg (body);
354 gimple_set_body (current_function_decl, NULL);
355 if (dump_file && (dump_flags & TDF_DETAILS))
356 {
357 fprintf (dump_file, "Scope blocks:\n");
358 dump_scope_blocks (dump_file, dump_flags);
359 }
360 cleanup_tree_cfg ();
361 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
362 replace_loop_annotate ();
363 return 0;
364 }
365
366 namespace {
367
368 const pass_data pass_data_build_cfg =
369 {
370 GIMPLE_PASS, /* type */
371 "cfg", /* name */
372 OPTGROUP_NONE, /* optinfo_flags */
373 TV_TREE_CFG, /* tv_id */
374 PROP_gimple_leh, /* properties_required */
375 ( PROP_cfg | PROP_loops ), /* properties_provided */
376 0, /* properties_destroyed */
377 0, /* todo_flags_start */
378 0, /* todo_flags_finish */
379 };
380
381 class pass_build_cfg : public gimple_opt_pass
382 {
383 public:
384 pass_build_cfg (gcc::context *ctxt)
385 : gimple_opt_pass (pass_data_build_cfg, ctxt)
386 {}
387
388 /* opt_pass methods: */
389 virtual unsigned int execute (function *) { return execute_build_cfg (); }
390
391 }; // class pass_build_cfg
392
393 } // anon namespace
394
395 gimple_opt_pass *
396 make_pass_build_cfg (gcc::context *ctxt)
397 {
398 return new pass_build_cfg (ctxt);
399 }
400
401
402 /* Return true if T is a computed goto. */
403
404 bool
405 computed_goto_p (gimple *t)
406 {
407 return (gimple_code (t) == GIMPLE_GOTO
408 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
409 }
410
411 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
412 the other edge points to a bb with just __builtin_unreachable ().
413 I.e. return true for C->M edge in:
414 <bb C>:
415 ...
416 if (something)
417 goto <bb N>;
418 else
419 goto <bb M>;
420 <bb N>:
421 __builtin_unreachable ();
422 <bb M>: */
423
424 bool
425 assert_unreachable_fallthru_edge_p (edge e)
426 {
427 basic_block pred_bb = e->src;
428 gimple *last = last_stmt (pred_bb);
429 if (last && gimple_code (last) == GIMPLE_COND)
430 {
431 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
432 if (other_bb == e->dest)
433 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
434 if (EDGE_COUNT (other_bb->succs) == 0)
435 {
436 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
437 gimple *stmt;
438
439 if (gsi_end_p (gsi))
440 return false;
441 stmt = gsi_stmt (gsi);
442 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
443 {
444 gsi_next (&gsi);
445 if (gsi_end_p (gsi))
446 return false;
447 stmt = gsi_stmt (gsi);
448 }
449 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
450 }
451 }
452 return false;
453 }
454
455
456 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
457 could alter control flow except via eh. We initialize the flag at
458 CFG build time and only ever clear it later. */
459
460 static void
461 gimple_call_initialize_ctrl_altering (gimple *stmt)
462 {
463 int flags = gimple_call_flags (stmt);
464
465 /* A call alters control flow if it can make an abnormal goto. */
466 if (call_can_make_abnormal_goto (stmt)
467 /* A call also alters control flow if it does not return. */
468 || flags & ECF_NORETURN
469 /* TM ending statements have backedges out of the transaction.
470 Return true so we split the basic block containing them.
471 Note that the TM_BUILTIN test is merely an optimization. */
472 || ((flags & ECF_TM_BUILTIN)
473 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
474 /* BUILT_IN_RETURN call is same as return statement. */
475 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
476 /* IFN_UNIQUE should be the last insn, to make checking for it
477 as cheap as possible. */
478 || (gimple_call_internal_p (stmt)
479 && gimple_call_internal_unique_p (stmt)))
480 gimple_call_set_ctrl_altering (stmt, true);
481 else
482 gimple_call_set_ctrl_altering (stmt, false);
483 }
484
485
486 /* Insert SEQ after BB and build a flowgraph. */
487
488 static basic_block
489 make_blocks_1 (gimple_seq seq, basic_block bb)
490 {
491 gimple_stmt_iterator i = gsi_start (seq);
492 gimple *stmt = NULL;
493 bool start_new_block = true;
494 bool first_stmt_of_seq = true;
495
496 while (!gsi_end_p (i))
497 {
498 gimple *prev_stmt;
499
500 prev_stmt = stmt;
501 stmt = gsi_stmt (i);
502
503 if (stmt && is_gimple_call (stmt))
504 gimple_call_initialize_ctrl_altering (stmt);
505
506 /* If the statement starts a new basic block or if we have determined
507 in a previous pass that we need to create a new block for STMT, do
508 so now. */
509 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
510 {
511 if (!first_stmt_of_seq)
512 gsi_split_seq_before (&i, &seq);
513 bb = create_basic_block (seq, bb);
514 start_new_block = false;
515 }
516
517 /* Now add STMT to BB and create the subgraphs for special statement
518 codes. */
519 gimple_set_bb (stmt, bb);
520
521 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
522 next iteration. */
523 if (stmt_ends_bb_p (stmt))
524 {
525 /* If the stmt can make abnormal goto use a new temporary
526 for the assignment to the LHS. This makes sure the old value
527 of the LHS is available on the abnormal edge. Otherwise
528 we will end up with overlapping life-ranges for abnormal
529 SSA names. */
530 if (gimple_has_lhs (stmt)
531 && stmt_can_make_abnormal_goto (stmt)
532 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
533 {
534 tree lhs = gimple_get_lhs (stmt);
535 tree tmp = create_tmp_var (TREE_TYPE (lhs));
536 gimple *s = gimple_build_assign (lhs, tmp);
537 gimple_set_location (s, gimple_location (stmt));
538 gimple_set_block (s, gimple_block (stmt));
539 gimple_set_lhs (stmt, tmp);
540 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
541 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
542 DECL_GIMPLE_REG_P (tmp) = 1;
543 gsi_insert_after (&i, s, GSI_SAME_STMT);
544 }
545 start_new_block = true;
546 }
547
548 gsi_next (&i);
549 first_stmt_of_seq = false;
550 }
551 return bb;
552 }
553
554 /* Build a flowgraph for the sequence of stmts SEQ. */
555
556 static void
557 make_blocks (gimple_seq seq)
558 {
559 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
560 }
561
562 /* Create and return a new empty basic block after bb AFTER. */
563
564 static basic_block
565 create_bb (void *h, void *e, basic_block after)
566 {
567 basic_block bb;
568
569 gcc_assert (!e);
570
571 /* Create and initialize a new basic block. Since alloc_block uses
572 GC allocation that clears memory to allocate a basic block, we do
573 not have to clear the newly allocated basic block here. */
574 bb = alloc_block ();
575
576 bb->index = last_basic_block_for_fn (cfun);
577 bb->flags = BB_NEW;
578 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
579
580 /* Add the new block to the linked list of blocks. */
581 link_block (bb, after);
582
583 /* Grow the basic block array if needed. */
584 if ((size_t) last_basic_block_for_fn (cfun)
585 == basic_block_info_for_fn (cfun)->length ())
586 {
587 size_t new_size =
588 (last_basic_block_for_fn (cfun)
589 + (last_basic_block_for_fn (cfun) + 3) / 4);
590 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
591 }
592
593 /* Add the newly created block to the array. */
594 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
595
596 n_basic_blocks_for_fn (cfun)++;
597 last_basic_block_for_fn (cfun)++;
598
599 return bb;
600 }
601
602
603 /*---------------------------------------------------------------------------
604 Edge creation
605 ---------------------------------------------------------------------------*/
606
607 /* If basic block BB has an abnormal edge to a basic block
608 containing IFN_ABNORMAL_DISPATCHER internal call, return
609 that the dispatcher's basic block, otherwise return NULL. */
610
611 basic_block
612 get_abnormal_succ_dispatcher (basic_block bb)
613 {
614 edge e;
615 edge_iterator ei;
616
617 FOR_EACH_EDGE (e, ei, bb->succs)
618 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
619 {
620 gimple_stmt_iterator gsi
621 = gsi_start_nondebug_after_labels_bb (e->dest);
622 gimple *g = gsi_stmt (gsi);
623 if (g
624 && is_gimple_call (g)
625 && gimple_call_internal_p (g)
626 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
627 return e->dest;
628 }
629 return NULL;
630 }
631
632 /* Helper function for make_edges. Create a basic block with
633 with ABNORMAL_DISPATCHER internal call in it if needed, and
634 create abnormal edges from BBS to it and from it to FOR_BB
635 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
636
637 static void
638 handle_abnormal_edges (basic_block *dispatcher_bbs,
639 basic_block for_bb, int *bb_to_omp_idx,
640 auto_vec<basic_block> *bbs, bool computed_goto)
641 {
642 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
643 unsigned int idx = 0;
644 basic_block bb;
645 bool inner = false;
646
647 if (bb_to_omp_idx)
648 {
649 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
650 if (bb_to_omp_idx[for_bb->index] != 0)
651 inner = true;
652 }
653
654 /* If the dispatcher has been created already, then there are basic
655 blocks with abnormal edges to it, so just make a new edge to
656 for_bb. */
657 if (*dispatcher == NULL)
658 {
659 /* Check if there are any basic blocks that need to have
660 abnormal edges to this dispatcher. If there are none, return
661 early. */
662 if (bb_to_omp_idx == NULL)
663 {
664 if (bbs->is_empty ())
665 return;
666 }
667 else
668 {
669 FOR_EACH_VEC_ELT (*bbs, idx, bb)
670 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
671 break;
672 if (bb == NULL)
673 return;
674 }
675
676 /* Create the dispatcher bb. */
677 *dispatcher = create_basic_block (NULL, for_bb);
678 if (computed_goto)
679 {
680 /* Factor computed gotos into a common computed goto site. Also
681 record the location of that site so that we can un-factor the
682 gotos after we have converted back to normal form. */
683 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
684
685 /* Create the destination of the factored goto. Each original
686 computed goto will put its desired destination into this
687 variable and jump to the label we create immediately below. */
688 tree var = create_tmp_var (ptr_type_node, "gotovar");
689
690 /* Build a label for the new block which will contain the
691 factored computed goto. */
692 tree factored_label_decl
693 = create_artificial_label (UNKNOWN_LOCATION);
694 gimple *factored_computed_goto_label
695 = gimple_build_label (factored_label_decl);
696 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
697
698 /* Build our new computed goto. */
699 gimple *factored_computed_goto = gimple_build_goto (var);
700 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
701
702 FOR_EACH_VEC_ELT (*bbs, idx, bb)
703 {
704 if (bb_to_omp_idx
705 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
706 continue;
707
708 gsi = gsi_last_bb (bb);
709 gimple *last = gsi_stmt (gsi);
710
711 gcc_assert (computed_goto_p (last));
712
713 /* Copy the original computed goto's destination into VAR. */
714 gimple *assignment
715 = gimple_build_assign (var, gimple_goto_dest (last));
716 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
717
718 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
719 e->goto_locus = gimple_location (last);
720 gsi_remove (&gsi, true);
721 }
722 }
723 else
724 {
725 tree arg = inner ? boolean_true_node : boolean_false_node;
726 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
727 1, arg);
728 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
729 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
730
731 /* Create predecessor edges of the dispatcher. */
732 FOR_EACH_VEC_ELT (*bbs, idx, bb)
733 {
734 if (bb_to_omp_idx
735 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
736 continue;
737 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
738 }
739 }
740 }
741
742 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
743 }
744
745 /* Creates outgoing edges for BB. Returns 1 when it ends with an
746 computed goto, returns 2 when it ends with a statement that
747 might return to this function via an nonlocal goto, otherwise
748 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
749
750 static int
751 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
752 {
753 gimple *last = last_stmt (bb);
754 bool fallthru = false;
755 int ret = 0;
756
757 if (!last)
758 return ret;
759
760 switch (gimple_code (last))
761 {
762 case GIMPLE_GOTO:
763 if (make_goto_expr_edges (bb))
764 ret = 1;
765 fallthru = false;
766 break;
767 case GIMPLE_RETURN:
768 {
769 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
770 e->goto_locus = gimple_location (last);
771 fallthru = false;
772 }
773 break;
774 case GIMPLE_COND:
775 make_cond_expr_edges (bb);
776 fallthru = false;
777 break;
778 case GIMPLE_SWITCH:
779 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
780 fallthru = false;
781 break;
782 case GIMPLE_RESX:
783 make_eh_edges (last);
784 fallthru = false;
785 break;
786 case GIMPLE_EH_DISPATCH:
787 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
788 break;
789
790 case GIMPLE_CALL:
791 /* If this function receives a nonlocal goto, then we need to
792 make edges from this call site to all the nonlocal goto
793 handlers. */
794 if (stmt_can_make_abnormal_goto (last))
795 ret = 2;
796
797 /* If this statement has reachable exception handlers, then
798 create abnormal edges to them. */
799 make_eh_edges (last);
800
801 /* BUILTIN_RETURN is really a return statement. */
802 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
803 {
804 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
805 fallthru = false;
806 }
807 /* Some calls are known not to return. */
808 else
809 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
810 break;
811
812 case GIMPLE_ASSIGN:
813 /* A GIMPLE_ASSIGN may throw internally and thus be considered
814 control-altering. */
815 if (is_ctrl_altering_stmt (last))
816 make_eh_edges (last);
817 fallthru = true;
818 break;
819
820 case GIMPLE_ASM:
821 make_gimple_asm_edges (bb);
822 fallthru = true;
823 break;
824
825 CASE_GIMPLE_OMP:
826 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
827 break;
828
829 case GIMPLE_TRANSACTION:
830 {
831 gtransaction *txn = as_a <gtransaction *> (last);
832 tree label1 = gimple_transaction_label_norm (txn);
833 tree label2 = gimple_transaction_label_uninst (txn);
834
835 if (label1)
836 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
837 if (label2)
838 make_edge (bb, label_to_block (label2),
839 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
840
841 tree label3 = gimple_transaction_label_over (txn);
842 if (gimple_transaction_subcode (txn)
843 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
844 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
845
846 fallthru = false;
847 }
848 break;
849
850 default:
851 gcc_assert (!stmt_ends_bb_p (last));
852 fallthru = true;
853 break;
854 }
855
856 if (fallthru)
857 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
858
859 return ret;
860 }
861
862 /* Join all the blocks in the flowgraph. */
863
864 static void
865 make_edges (void)
866 {
867 basic_block bb;
868 struct omp_region *cur_region = NULL;
869 auto_vec<basic_block> ab_edge_goto;
870 auto_vec<basic_block> ab_edge_call;
871 int *bb_to_omp_idx = NULL;
872 int cur_omp_region_idx = 0;
873
874 /* Create an edge from entry to the first block with executable
875 statements in it. */
876 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
877 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
878 EDGE_FALLTHRU);
879
880 /* Traverse the basic block array placing edges. */
881 FOR_EACH_BB_FN (bb, cfun)
882 {
883 int mer;
884
885 if (bb_to_omp_idx)
886 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
887
888 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
889 if (mer == 1)
890 ab_edge_goto.safe_push (bb);
891 else if (mer == 2)
892 ab_edge_call.safe_push (bb);
893
894 if (cur_region && bb_to_omp_idx == NULL)
895 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
896 }
897
898 /* Computed gotos are hell to deal with, especially if there are
899 lots of them with a large number of destinations. So we factor
900 them to a common computed goto location before we build the
901 edge list. After we convert back to normal form, we will un-factor
902 the computed gotos since factoring introduces an unwanted jump.
903 For non-local gotos and abnormal edges from calls to calls that return
904 twice or forced labels, factor the abnormal edges too, by having all
905 abnormal edges from the calls go to a common artificial basic block
906 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
907 basic block to all forced labels and calls returning twice.
908 We do this per-OpenMP structured block, because those regions
909 are guaranteed to be single entry single exit by the standard,
910 so it is not allowed to enter or exit such regions abnormally this way,
911 thus all computed gotos, non-local gotos and setjmp/longjmp calls
912 must not transfer control across SESE region boundaries. */
913 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
914 {
915 gimple_stmt_iterator gsi;
916 basic_block dispatcher_bb_array[2] = { NULL, NULL };
917 basic_block *dispatcher_bbs = dispatcher_bb_array;
918 int count = n_basic_blocks_for_fn (cfun);
919
920 if (bb_to_omp_idx)
921 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
922
923 FOR_EACH_BB_FN (bb, cfun)
924 {
925 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
926 {
927 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
928 tree target;
929
930 if (!label_stmt)
931 break;
932
933 target = gimple_label_label (label_stmt);
934
935 /* Make an edge to every label block that has been marked as a
936 potential target for a computed goto or a non-local goto. */
937 if (FORCED_LABEL (target))
938 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
939 &ab_edge_goto, true);
940 if (DECL_NONLOCAL (target))
941 {
942 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
943 &ab_edge_call, false);
944 break;
945 }
946 }
947
948 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
949 gsi_next_nondebug (&gsi);
950 if (!gsi_end_p (gsi))
951 {
952 /* Make an edge to every setjmp-like call. */
953 gimple *call_stmt = gsi_stmt (gsi);
954 if (is_gimple_call (call_stmt)
955 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
956 || gimple_call_builtin_p (call_stmt,
957 BUILT_IN_SETJMP_RECEIVER)))
958 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
959 &ab_edge_call, false);
960 }
961 }
962
963 if (bb_to_omp_idx)
964 XDELETE (dispatcher_bbs);
965 }
966
967 XDELETE (bb_to_omp_idx);
968
969 free_omp_regions ();
970 }
971
972 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
973 needed. Returns true if new bbs were created.
974 Note: This is transitional code, and should not be used for new code. We
975 should be able to get rid of this by rewriting all target va-arg
976 gimplification hooks to use an interface gimple_build_cond_value as described
977 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
978
979 bool
980 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
981 {
982 gimple *stmt = gsi_stmt (*gsi);
983 basic_block bb = gimple_bb (stmt);
984 basic_block lastbb, afterbb;
985 int old_num_bbs = n_basic_blocks_for_fn (cfun);
986 edge e;
987 lastbb = make_blocks_1 (seq, bb);
988 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
989 return false;
990 e = split_block (bb, stmt);
991 /* Move e->dest to come after the new basic blocks. */
992 afterbb = e->dest;
993 unlink_block (afterbb);
994 link_block (afterbb, lastbb);
995 redirect_edge_succ (e, bb->next_bb);
996 bb = bb->next_bb;
997 while (bb != afterbb)
998 {
999 struct omp_region *cur_region = NULL;
1000 int cur_omp_region_idx = 0;
1001 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1002 gcc_assert (!mer && !cur_region);
1003 add_bb_to_loop (bb, afterbb->loop_father);
1004 bb = bb->next_bb;
1005 }
1006 return true;
1007 }
1008
1009 /* Find the next available discriminator value for LOCUS. The
1010 discriminator distinguishes among several basic blocks that
1011 share a common locus, allowing for more accurate sample-based
1012 profiling. */
1013
1014 static int
1015 next_discriminator_for_locus (location_t locus)
1016 {
1017 struct locus_discrim_map item;
1018 struct locus_discrim_map **slot;
1019
1020 item.locus = locus;
1021 item.discriminator = 0;
1022 slot = discriminator_per_locus->find_slot_with_hash (
1023 &item, LOCATION_LINE (locus), INSERT);
1024 gcc_assert (slot);
1025 if (*slot == HTAB_EMPTY_ENTRY)
1026 {
1027 *slot = XNEW (struct locus_discrim_map);
1028 gcc_assert (*slot);
1029 (*slot)->locus = locus;
1030 (*slot)->discriminator = 0;
1031 }
1032 (*slot)->discriminator++;
1033 return (*slot)->discriminator;
1034 }
1035
1036 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1037
1038 static bool
1039 same_line_p (location_t locus1, location_t locus2)
1040 {
1041 expanded_location from, to;
1042
1043 if (locus1 == locus2)
1044 return true;
1045
1046 from = expand_location (locus1);
1047 to = expand_location (locus2);
1048
1049 if (from.line != to.line)
1050 return false;
1051 if (from.file == to.file)
1052 return true;
1053 return (from.file != NULL
1054 && to.file != NULL
1055 && filename_cmp (from.file, to.file) == 0);
1056 }
1057
1058 /* Assign discriminators to each basic block. */
1059
1060 static void
1061 assign_discriminators (void)
1062 {
1063 basic_block bb;
1064
1065 FOR_EACH_BB_FN (bb, cfun)
1066 {
1067 edge e;
1068 edge_iterator ei;
1069 gimple *last = last_stmt (bb);
1070 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1071
1072 if (locus == UNKNOWN_LOCATION)
1073 continue;
1074
1075 FOR_EACH_EDGE (e, ei, bb->succs)
1076 {
1077 gimple *first = first_non_label_stmt (e->dest);
1078 gimple *last = last_stmt (e->dest);
1079 if ((first && same_line_p (locus, gimple_location (first)))
1080 || (last && same_line_p (locus, gimple_location (last))))
1081 {
1082 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1083 bb->discriminator = next_discriminator_for_locus (locus);
1084 else
1085 e->dest->discriminator = next_discriminator_for_locus (locus);
1086 }
1087 }
1088 }
1089 }
1090
1091 /* Create the edges for a GIMPLE_COND starting at block BB. */
1092
1093 static void
1094 make_cond_expr_edges (basic_block bb)
1095 {
1096 gcond *entry = as_a <gcond *> (last_stmt (bb));
1097 gimple *then_stmt, *else_stmt;
1098 basic_block then_bb, else_bb;
1099 tree then_label, else_label;
1100 edge e;
1101
1102 gcc_assert (entry);
1103 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1104
1105 /* Entry basic blocks for each component. */
1106 then_label = gimple_cond_true_label (entry);
1107 else_label = gimple_cond_false_label (entry);
1108 then_bb = label_to_block (then_label);
1109 else_bb = label_to_block (else_label);
1110 then_stmt = first_stmt (then_bb);
1111 else_stmt = first_stmt (else_bb);
1112
1113 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1114 e->goto_locus = gimple_location (then_stmt);
1115 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1116 if (e)
1117 e->goto_locus = gimple_location (else_stmt);
1118
1119 /* We do not need the labels anymore. */
1120 gimple_cond_set_true_label (entry, NULL_TREE);
1121 gimple_cond_set_false_label (entry, NULL_TREE);
1122 }
1123
1124
1125 /* Called for each element in the hash table (P) as we delete the
1126 edge to cases hash table.
1127
1128 Clear all the TREE_CHAINs to prevent problems with copying of
1129 SWITCH_EXPRs and structure sharing rules, then free the hash table
1130 element. */
1131
1132 bool
1133 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1134 {
1135 tree t, next;
1136
1137 for (t = value; t; t = next)
1138 {
1139 next = CASE_CHAIN (t);
1140 CASE_CHAIN (t) = NULL;
1141 }
1142
1143 return true;
1144 }
1145
1146 /* Start recording information mapping edges to case labels. */
1147
1148 void
1149 start_recording_case_labels (void)
1150 {
1151 gcc_assert (edge_to_cases == NULL);
1152 edge_to_cases = new hash_map<edge, tree>;
1153 touched_switch_bbs = BITMAP_ALLOC (NULL);
1154 }
1155
1156 /* Return nonzero if we are recording information for case labels. */
1157
1158 static bool
1159 recording_case_labels_p (void)
1160 {
1161 return (edge_to_cases != NULL);
1162 }
1163
1164 /* Stop recording information mapping edges to case labels and
1165 remove any information we have recorded. */
1166 void
1167 end_recording_case_labels (void)
1168 {
1169 bitmap_iterator bi;
1170 unsigned i;
1171 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1172 delete edge_to_cases;
1173 edge_to_cases = NULL;
1174 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1175 {
1176 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1177 if (bb)
1178 {
1179 gimple *stmt = last_stmt (bb);
1180 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1181 group_case_labels_stmt (as_a <gswitch *> (stmt));
1182 }
1183 }
1184 BITMAP_FREE (touched_switch_bbs);
1185 }
1186
1187 /* If we are inside a {start,end}_recording_cases block, then return
1188 a chain of CASE_LABEL_EXPRs from T which reference E.
1189
1190 Otherwise return NULL. */
1191
1192 static tree
1193 get_cases_for_edge (edge e, gswitch *t)
1194 {
1195 tree *slot;
1196 size_t i, n;
1197
1198 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1199 chains available. Return NULL so the caller can detect this case. */
1200 if (!recording_case_labels_p ())
1201 return NULL;
1202
1203 slot = edge_to_cases->get (e);
1204 if (slot)
1205 return *slot;
1206
1207 /* If we did not find E in the hash table, then this must be the first
1208 time we have been queried for information about E & T. Add all the
1209 elements from T to the hash table then perform the query again. */
1210
1211 n = gimple_switch_num_labels (t);
1212 for (i = 0; i < n; i++)
1213 {
1214 tree elt = gimple_switch_label (t, i);
1215 tree lab = CASE_LABEL (elt);
1216 basic_block label_bb = label_to_block (lab);
1217 edge this_edge = find_edge (e->src, label_bb);
1218
1219 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1220 a new chain. */
1221 tree &s = edge_to_cases->get_or_insert (this_edge);
1222 CASE_CHAIN (elt) = s;
1223 s = elt;
1224 }
1225
1226 return *edge_to_cases->get (e);
1227 }
1228
1229 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1230
1231 static void
1232 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1233 {
1234 size_t i, n;
1235
1236 n = gimple_switch_num_labels (entry);
1237
1238 for (i = 0; i < n; ++i)
1239 {
1240 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1241 basic_block label_bb = label_to_block (lab);
1242 make_edge (bb, label_bb, 0);
1243 }
1244 }
1245
1246
1247 /* Return the basic block holding label DEST. */
1248
1249 basic_block
1250 label_to_block_fn (struct function *ifun, tree dest)
1251 {
1252 int uid = LABEL_DECL_UID (dest);
1253
1254 /* We would die hard when faced by an undefined label. Emit a label to
1255 the very first basic block. This will hopefully make even the dataflow
1256 and undefined variable warnings quite right. */
1257 if (seen_error () && uid < 0)
1258 {
1259 gimple_stmt_iterator gsi =
1260 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1261 gimple *stmt;
1262
1263 stmt = gimple_build_label (dest);
1264 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1265 uid = LABEL_DECL_UID (dest);
1266 }
1267 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1268 return NULL;
1269 return (*ifun->cfg->x_label_to_block_map)[uid];
1270 }
1271
1272 /* Create edges for a goto statement at block BB. Returns true
1273 if abnormal edges should be created. */
1274
1275 static bool
1276 make_goto_expr_edges (basic_block bb)
1277 {
1278 gimple_stmt_iterator last = gsi_last_bb (bb);
1279 gimple *goto_t = gsi_stmt (last);
1280
1281 /* A simple GOTO creates normal edges. */
1282 if (simple_goto_p (goto_t))
1283 {
1284 tree dest = gimple_goto_dest (goto_t);
1285 basic_block label_bb = label_to_block (dest);
1286 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1287 e->goto_locus = gimple_location (goto_t);
1288 gsi_remove (&last, true);
1289 return false;
1290 }
1291
1292 /* A computed GOTO creates abnormal edges. */
1293 return true;
1294 }
1295
1296 /* Create edges for an asm statement with labels at block BB. */
1297
1298 static void
1299 make_gimple_asm_edges (basic_block bb)
1300 {
1301 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1302 int i, n = gimple_asm_nlabels (stmt);
1303
1304 for (i = 0; i < n; ++i)
1305 {
1306 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1307 basic_block label_bb = label_to_block (label);
1308 make_edge (bb, label_bb, 0);
1309 }
1310 }
1311
1312 /*---------------------------------------------------------------------------
1313 Flowgraph analysis
1314 ---------------------------------------------------------------------------*/
1315
1316 /* Cleanup useless labels in basic blocks. This is something we wish
1317 to do early because it allows us to group case labels before creating
1318 the edges for the CFG, and it speeds up block statement iterators in
1319 all passes later on.
1320 We rerun this pass after CFG is created, to get rid of the labels that
1321 are no longer referenced. After then we do not run it any more, since
1322 (almost) no new labels should be created. */
1323
1324 /* A map from basic block index to the leading label of that block. */
1325 static struct label_record
1326 {
1327 /* The label. */
1328 tree label;
1329
1330 /* True if the label is referenced from somewhere. */
1331 bool used;
1332 } *label_for_bb;
1333
1334 /* Given LABEL return the first label in the same basic block. */
1335
1336 static tree
1337 main_block_label (tree label)
1338 {
1339 basic_block bb = label_to_block (label);
1340 tree main_label = label_for_bb[bb->index].label;
1341
1342 /* label_to_block possibly inserted undefined label into the chain. */
1343 if (!main_label)
1344 {
1345 label_for_bb[bb->index].label = label;
1346 main_label = label;
1347 }
1348
1349 label_for_bb[bb->index].used = true;
1350 return main_label;
1351 }
1352
1353 /* Clean up redundant labels within the exception tree. */
1354
1355 static void
1356 cleanup_dead_labels_eh (void)
1357 {
1358 eh_landing_pad lp;
1359 eh_region r;
1360 tree lab;
1361 int i;
1362
1363 if (cfun->eh == NULL)
1364 return;
1365
1366 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1367 if (lp && lp->post_landing_pad)
1368 {
1369 lab = main_block_label (lp->post_landing_pad);
1370 if (lab != lp->post_landing_pad)
1371 {
1372 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1373 EH_LANDING_PAD_NR (lab) = lp->index;
1374 }
1375 }
1376
1377 FOR_ALL_EH_REGION (r)
1378 switch (r->type)
1379 {
1380 case ERT_CLEANUP:
1381 case ERT_MUST_NOT_THROW:
1382 break;
1383
1384 case ERT_TRY:
1385 {
1386 eh_catch c;
1387 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1388 {
1389 lab = c->label;
1390 if (lab)
1391 c->label = main_block_label (lab);
1392 }
1393 }
1394 break;
1395
1396 case ERT_ALLOWED_EXCEPTIONS:
1397 lab = r->u.allowed.label;
1398 if (lab)
1399 r->u.allowed.label = main_block_label (lab);
1400 break;
1401 }
1402 }
1403
1404
1405 /* Cleanup redundant labels. This is a three-step process:
1406 1) Find the leading label for each block.
1407 2) Redirect all references to labels to the leading labels.
1408 3) Cleanup all useless labels. */
1409
1410 void
1411 cleanup_dead_labels (void)
1412 {
1413 basic_block bb;
1414 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1415
1416 /* Find a suitable label for each block. We use the first user-defined
1417 label if there is one, or otherwise just the first label we see. */
1418 FOR_EACH_BB_FN (bb, cfun)
1419 {
1420 gimple_stmt_iterator i;
1421
1422 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1423 {
1424 tree label;
1425 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1426
1427 if (!label_stmt)
1428 break;
1429
1430 label = gimple_label_label (label_stmt);
1431
1432 /* If we have not yet seen a label for the current block,
1433 remember this one and see if there are more labels. */
1434 if (!label_for_bb[bb->index].label)
1435 {
1436 label_for_bb[bb->index].label = label;
1437 continue;
1438 }
1439
1440 /* If we did see a label for the current block already, but it
1441 is an artificially created label, replace it if the current
1442 label is a user defined label. */
1443 if (!DECL_ARTIFICIAL (label)
1444 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1445 {
1446 label_for_bb[bb->index].label = label;
1447 break;
1448 }
1449 }
1450 }
1451
1452 /* Now redirect all jumps/branches to the selected label.
1453 First do so for each block ending in a control statement. */
1454 FOR_EACH_BB_FN (bb, cfun)
1455 {
1456 gimple *stmt = last_stmt (bb);
1457 tree label, new_label;
1458
1459 if (!stmt)
1460 continue;
1461
1462 switch (gimple_code (stmt))
1463 {
1464 case GIMPLE_COND:
1465 {
1466 gcond *cond_stmt = as_a <gcond *> (stmt);
1467 label = gimple_cond_true_label (cond_stmt);
1468 if (label)
1469 {
1470 new_label = main_block_label (label);
1471 if (new_label != label)
1472 gimple_cond_set_true_label (cond_stmt, new_label);
1473 }
1474
1475 label = gimple_cond_false_label (cond_stmt);
1476 if (label)
1477 {
1478 new_label = main_block_label (label);
1479 if (new_label != label)
1480 gimple_cond_set_false_label (cond_stmt, new_label);
1481 }
1482 }
1483 break;
1484
1485 case GIMPLE_SWITCH:
1486 {
1487 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1488 size_t i, n = gimple_switch_num_labels (switch_stmt);
1489
1490 /* Replace all destination labels. */
1491 for (i = 0; i < n; ++i)
1492 {
1493 tree case_label = gimple_switch_label (switch_stmt, i);
1494 label = CASE_LABEL (case_label);
1495 new_label = main_block_label (label);
1496 if (new_label != label)
1497 CASE_LABEL (case_label) = new_label;
1498 }
1499 break;
1500 }
1501
1502 case GIMPLE_ASM:
1503 {
1504 gasm *asm_stmt = as_a <gasm *> (stmt);
1505 int i, n = gimple_asm_nlabels (asm_stmt);
1506
1507 for (i = 0; i < n; ++i)
1508 {
1509 tree cons = gimple_asm_label_op (asm_stmt, i);
1510 tree label = main_block_label (TREE_VALUE (cons));
1511 TREE_VALUE (cons) = label;
1512 }
1513 break;
1514 }
1515
1516 /* We have to handle gotos until they're removed, and we don't
1517 remove them until after we've created the CFG edges. */
1518 case GIMPLE_GOTO:
1519 if (!computed_goto_p (stmt))
1520 {
1521 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1522 label = gimple_goto_dest (goto_stmt);
1523 new_label = main_block_label (label);
1524 if (new_label != label)
1525 gimple_goto_set_dest (goto_stmt, new_label);
1526 }
1527 break;
1528
1529 case GIMPLE_TRANSACTION:
1530 {
1531 gtransaction *txn = as_a <gtransaction *> (stmt);
1532
1533 label = gimple_transaction_label_norm (txn);
1534 if (label)
1535 {
1536 new_label = main_block_label (label);
1537 if (new_label != label)
1538 gimple_transaction_set_label_norm (txn, new_label);
1539 }
1540
1541 label = gimple_transaction_label_uninst (txn);
1542 if (label)
1543 {
1544 new_label = main_block_label (label);
1545 if (new_label != label)
1546 gimple_transaction_set_label_uninst (txn, new_label);
1547 }
1548
1549 label = gimple_transaction_label_over (txn);
1550 if (label)
1551 {
1552 new_label = main_block_label (label);
1553 if (new_label != label)
1554 gimple_transaction_set_label_over (txn, new_label);
1555 }
1556 }
1557 break;
1558
1559 default:
1560 break;
1561 }
1562 }
1563
1564 /* Do the same for the exception region tree labels. */
1565 cleanup_dead_labels_eh ();
1566
1567 /* Finally, purge dead labels. All user-defined labels and labels that
1568 can be the target of non-local gotos and labels which have their
1569 address taken are preserved. */
1570 FOR_EACH_BB_FN (bb, cfun)
1571 {
1572 gimple_stmt_iterator i;
1573 tree label_for_this_bb = label_for_bb[bb->index].label;
1574
1575 if (!label_for_this_bb)
1576 continue;
1577
1578 /* If the main label of the block is unused, we may still remove it. */
1579 if (!label_for_bb[bb->index].used)
1580 label_for_this_bb = NULL;
1581
1582 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1583 {
1584 tree label;
1585 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1586
1587 if (!label_stmt)
1588 break;
1589
1590 label = gimple_label_label (label_stmt);
1591
1592 if (label == label_for_this_bb
1593 || !DECL_ARTIFICIAL (label)
1594 || DECL_NONLOCAL (label)
1595 || FORCED_LABEL (label))
1596 gsi_next (&i);
1597 else
1598 gsi_remove (&i, true);
1599 }
1600 }
1601
1602 free (label_for_bb);
1603 }
1604
1605 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1606 the ones jumping to the same label.
1607 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1608
1609 void
1610 group_case_labels_stmt (gswitch *stmt)
1611 {
1612 int old_size = gimple_switch_num_labels (stmt);
1613 int i, j, new_size = old_size;
1614 basic_block default_bb = NULL;
1615
1616 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1617
1618 /* Look for possible opportunities to merge cases. */
1619 i = 1;
1620 while (i < old_size)
1621 {
1622 tree base_case, base_high;
1623 basic_block base_bb;
1624
1625 base_case = gimple_switch_label (stmt, i);
1626
1627 gcc_assert (base_case);
1628 base_bb = label_to_block (CASE_LABEL (base_case));
1629
1630 /* Discard cases that have the same destination as the
1631 default case. */
1632 if (base_bb == default_bb)
1633 {
1634 gimple_switch_set_label (stmt, i, NULL_TREE);
1635 i++;
1636 new_size--;
1637 continue;
1638 }
1639
1640 base_high = CASE_HIGH (base_case)
1641 ? CASE_HIGH (base_case)
1642 : CASE_LOW (base_case);
1643 i++;
1644
1645 /* Try to merge case labels. Break out when we reach the end
1646 of the label vector or when we cannot merge the next case
1647 label with the current one. */
1648 while (i < old_size)
1649 {
1650 tree merge_case = gimple_switch_label (stmt, i);
1651 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1652 wide_int bhp1 = wi::add (base_high, 1);
1653
1654 /* Merge the cases if they jump to the same place,
1655 and their ranges are consecutive. */
1656 if (merge_bb == base_bb
1657 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1658 {
1659 base_high = CASE_HIGH (merge_case) ?
1660 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1661 CASE_HIGH (base_case) = base_high;
1662 gimple_switch_set_label (stmt, i, NULL_TREE);
1663 new_size--;
1664 i++;
1665 }
1666 else
1667 break;
1668 }
1669 }
1670
1671 /* Compress the case labels in the label vector, and adjust the
1672 length of the vector. */
1673 for (i = 0, j = 0; i < new_size; i++)
1674 {
1675 while (! gimple_switch_label (stmt, j))
1676 j++;
1677 gimple_switch_set_label (stmt, i,
1678 gimple_switch_label (stmt, j++));
1679 }
1680
1681 gcc_assert (new_size <= old_size);
1682 gimple_switch_set_num_labels (stmt, new_size);
1683 }
1684
1685 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1686 and scan the sorted vector of cases. Combine the ones jumping to the
1687 same label. */
1688
1689 void
1690 group_case_labels (void)
1691 {
1692 basic_block bb;
1693
1694 FOR_EACH_BB_FN (bb, cfun)
1695 {
1696 gimple *stmt = last_stmt (bb);
1697 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1698 group_case_labels_stmt (as_a <gswitch *> (stmt));
1699 }
1700 }
1701
1702 /* Checks whether we can merge block B into block A. */
1703
1704 static bool
1705 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1706 {
1707 gimple *stmt;
1708
1709 if (!single_succ_p (a))
1710 return false;
1711
1712 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1713 return false;
1714
1715 if (single_succ (a) != b)
1716 return false;
1717
1718 if (!single_pred_p (b))
1719 return false;
1720
1721 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1722 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1723 return false;
1724
1725 /* If A ends by a statement causing exceptions or something similar, we
1726 cannot merge the blocks. */
1727 stmt = last_stmt (a);
1728 if (stmt && stmt_ends_bb_p (stmt))
1729 return false;
1730
1731 /* Do not allow a block with only a non-local label to be merged. */
1732 if (stmt)
1733 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1734 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1735 return false;
1736
1737 /* Examine the labels at the beginning of B. */
1738 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1739 gsi_next (&gsi))
1740 {
1741 tree lab;
1742 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1743 if (!label_stmt)
1744 break;
1745 lab = gimple_label_label (label_stmt);
1746
1747 /* Do not remove user forced labels or for -O0 any user labels. */
1748 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1749 return false;
1750 }
1751
1752 /* Protect simple loop latches. We only want to avoid merging
1753 the latch with the loop header or with a block in another
1754 loop in this case. */
1755 if (current_loops
1756 && b->loop_father->latch == b
1757 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1758 && (b->loop_father->header == a
1759 || b->loop_father != a->loop_father))
1760 return false;
1761
1762 /* It must be possible to eliminate all phi nodes in B. If ssa form
1763 is not up-to-date and a name-mapping is registered, we cannot eliminate
1764 any phis. Symbols marked for renaming are never a problem though. */
1765 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1766 gsi_next (&gsi))
1767 {
1768 gphi *phi = gsi.phi ();
1769 /* Technically only new names matter. */
1770 if (name_registered_for_update_p (PHI_RESULT (phi)))
1771 return false;
1772 }
1773
1774 /* When not optimizing, don't merge if we'd lose goto_locus. */
1775 if (!optimize
1776 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1777 {
1778 location_t goto_locus = single_succ_edge (a)->goto_locus;
1779 gimple_stmt_iterator prev, next;
1780 prev = gsi_last_nondebug_bb (a);
1781 next = gsi_after_labels (b);
1782 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1783 gsi_next_nondebug (&next);
1784 if ((gsi_end_p (prev)
1785 || gimple_location (gsi_stmt (prev)) != goto_locus)
1786 && (gsi_end_p (next)
1787 || gimple_location (gsi_stmt (next)) != goto_locus))
1788 return false;
1789 }
1790
1791 return true;
1792 }
1793
1794 /* Replaces all uses of NAME by VAL. */
1795
1796 void
1797 replace_uses_by (tree name, tree val)
1798 {
1799 imm_use_iterator imm_iter;
1800 use_operand_p use;
1801 gimple *stmt;
1802 edge e;
1803
1804 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1805 {
1806 /* Mark the block if we change the last stmt in it. */
1807 if (cfgcleanup_altered_bbs
1808 && stmt_ends_bb_p (stmt))
1809 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1810
1811 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1812 {
1813 replace_exp (use, val);
1814
1815 if (gimple_code (stmt) == GIMPLE_PHI)
1816 {
1817 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1818 PHI_ARG_INDEX_FROM_USE (use));
1819 if (e->flags & EDGE_ABNORMAL
1820 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1821 {
1822 /* This can only occur for virtual operands, since
1823 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1824 would prevent replacement. */
1825 gcc_checking_assert (virtual_operand_p (name));
1826 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1827 }
1828 }
1829 }
1830
1831 if (gimple_code (stmt) != GIMPLE_PHI)
1832 {
1833 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1834 gimple *orig_stmt = stmt;
1835 size_t i;
1836
1837 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1838 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1839 only change sth from non-invariant to invariant, and only
1840 when propagating constants. */
1841 if (is_gimple_min_invariant (val))
1842 for (i = 0; i < gimple_num_ops (stmt); i++)
1843 {
1844 tree op = gimple_op (stmt, i);
1845 /* Operands may be empty here. For example, the labels
1846 of a GIMPLE_COND are nulled out following the creation
1847 of the corresponding CFG edges. */
1848 if (op && TREE_CODE (op) == ADDR_EXPR)
1849 recompute_tree_invariant_for_addr_expr (op);
1850 }
1851
1852 if (fold_stmt (&gsi))
1853 stmt = gsi_stmt (gsi);
1854
1855 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1856 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1857
1858 update_stmt (stmt);
1859 }
1860 }
1861
1862 gcc_checking_assert (has_zero_uses (name));
1863
1864 /* Also update the trees stored in loop structures. */
1865 if (current_loops)
1866 {
1867 struct loop *loop;
1868
1869 FOR_EACH_LOOP (loop, 0)
1870 {
1871 substitute_in_loop_info (loop, name, val);
1872 }
1873 }
1874 }
1875
1876 /* Merge block B into block A. */
1877
1878 static void
1879 gimple_merge_blocks (basic_block a, basic_block b)
1880 {
1881 gimple_stmt_iterator last, gsi;
1882 gphi_iterator psi;
1883
1884 if (dump_file)
1885 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1886
1887 /* Remove all single-valued PHI nodes from block B of the form
1888 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1889 gsi = gsi_last_bb (a);
1890 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1891 {
1892 gimple *phi = gsi_stmt (psi);
1893 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1894 gimple *copy;
1895 bool may_replace_uses = (virtual_operand_p (def)
1896 || may_propagate_copy (def, use));
1897
1898 /* In case we maintain loop closed ssa form, do not propagate arguments
1899 of loop exit phi nodes. */
1900 if (current_loops
1901 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1902 && !virtual_operand_p (def)
1903 && TREE_CODE (use) == SSA_NAME
1904 && a->loop_father != b->loop_father)
1905 may_replace_uses = false;
1906
1907 if (!may_replace_uses)
1908 {
1909 gcc_assert (!virtual_operand_p (def));
1910
1911 /* Note that just emitting the copies is fine -- there is no problem
1912 with ordering of phi nodes. This is because A is the single
1913 predecessor of B, therefore results of the phi nodes cannot
1914 appear as arguments of the phi nodes. */
1915 copy = gimple_build_assign (def, use);
1916 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1917 remove_phi_node (&psi, false);
1918 }
1919 else
1920 {
1921 /* If we deal with a PHI for virtual operands, we can simply
1922 propagate these without fussing with folding or updating
1923 the stmt. */
1924 if (virtual_operand_p (def))
1925 {
1926 imm_use_iterator iter;
1927 use_operand_p use_p;
1928 gimple *stmt;
1929
1930 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1931 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1932 SET_USE (use_p, use);
1933
1934 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1935 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1936 }
1937 else
1938 replace_uses_by (def, use);
1939
1940 remove_phi_node (&psi, true);
1941 }
1942 }
1943
1944 /* Ensure that B follows A. */
1945 move_block_after (b, a);
1946
1947 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1948 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1949
1950 /* Remove labels from B and set gimple_bb to A for other statements. */
1951 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1952 {
1953 gimple *stmt = gsi_stmt (gsi);
1954 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1955 {
1956 tree label = gimple_label_label (label_stmt);
1957 int lp_nr;
1958
1959 gsi_remove (&gsi, false);
1960
1961 /* Now that we can thread computed gotos, we might have
1962 a situation where we have a forced label in block B
1963 However, the label at the start of block B might still be
1964 used in other ways (think about the runtime checking for
1965 Fortran assigned gotos). So we can not just delete the
1966 label. Instead we move the label to the start of block A. */
1967 if (FORCED_LABEL (label))
1968 {
1969 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1970 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1971 }
1972 /* Other user labels keep around in a form of a debug stmt. */
1973 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1974 {
1975 gimple *dbg = gimple_build_debug_bind (label,
1976 integer_zero_node,
1977 stmt);
1978 gimple_debug_bind_reset_value (dbg);
1979 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1980 }
1981
1982 lp_nr = EH_LANDING_PAD_NR (label);
1983 if (lp_nr)
1984 {
1985 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1986 lp->post_landing_pad = NULL;
1987 }
1988 }
1989 else
1990 {
1991 gimple_set_bb (stmt, a);
1992 gsi_next (&gsi);
1993 }
1994 }
1995
1996 /* When merging two BBs, if their counts are different, the larger count
1997 is selected as the new bb count. This is to handle inconsistent
1998 profiles. */
1999 if (a->loop_father == b->loop_father)
2000 {
2001 a->count = MAX (a->count, b->count);
2002 a->frequency = MAX (a->frequency, b->frequency);
2003 }
2004
2005 /* Merge the sequences. */
2006 last = gsi_last_bb (a);
2007 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2008 set_bb_seq (b, NULL);
2009
2010 if (cfgcleanup_altered_bbs)
2011 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2012 }
2013
2014
2015 /* Return the one of two successors of BB that is not reachable by a
2016 complex edge, if there is one. Else, return BB. We use
2017 this in optimizations that use post-dominators for their heuristics,
2018 to catch the cases in C++ where function calls are involved. */
2019
2020 basic_block
2021 single_noncomplex_succ (basic_block bb)
2022 {
2023 edge e0, e1;
2024 if (EDGE_COUNT (bb->succs) != 2)
2025 return bb;
2026
2027 e0 = EDGE_SUCC (bb, 0);
2028 e1 = EDGE_SUCC (bb, 1);
2029 if (e0->flags & EDGE_COMPLEX)
2030 return e1->dest;
2031 if (e1->flags & EDGE_COMPLEX)
2032 return e0->dest;
2033
2034 return bb;
2035 }
2036
2037 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2038
2039 void
2040 notice_special_calls (gcall *call)
2041 {
2042 int flags = gimple_call_flags (call);
2043
2044 if (flags & ECF_MAY_BE_ALLOCA)
2045 cfun->calls_alloca = true;
2046 if (flags & ECF_RETURNS_TWICE)
2047 cfun->calls_setjmp = true;
2048 }
2049
2050
2051 /* Clear flags set by notice_special_calls. Used by dead code removal
2052 to update the flags. */
2053
2054 void
2055 clear_special_calls (void)
2056 {
2057 cfun->calls_alloca = false;
2058 cfun->calls_setjmp = false;
2059 }
2060
2061 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2062
2063 static void
2064 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2065 {
2066 /* Since this block is no longer reachable, we can just delete all
2067 of its PHI nodes. */
2068 remove_phi_nodes (bb);
2069
2070 /* Remove edges to BB's successors. */
2071 while (EDGE_COUNT (bb->succs) > 0)
2072 remove_edge (EDGE_SUCC (bb, 0));
2073 }
2074
2075
2076 /* Remove statements of basic block BB. */
2077
2078 static void
2079 remove_bb (basic_block bb)
2080 {
2081 gimple_stmt_iterator i;
2082
2083 if (dump_file)
2084 {
2085 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2086 if (dump_flags & TDF_DETAILS)
2087 {
2088 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2089 fprintf (dump_file, "\n");
2090 }
2091 }
2092
2093 if (current_loops)
2094 {
2095 struct loop *loop = bb->loop_father;
2096
2097 /* If a loop gets removed, clean up the information associated
2098 with it. */
2099 if (loop->latch == bb
2100 || loop->header == bb)
2101 free_numbers_of_iterations_estimates_loop (loop);
2102 }
2103
2104 /* Remove all the instructions in the block. */
2105 if (bb_seq (bb) != NULL)
2106 {
2107 /* Walk backwards so as to get a chance to substitute all
2108 released DEFs into debug stmts. See
2109 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2110 details. */
2111 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2112 {
2113 gimple *stmt = gsi_stmt (i);
2114 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2115 if (label_stmt
2116 && (FORCED_LABEL (gimple_label_label (label_stmt))
2117 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2118 {
2119 basic_block new_bb;
2120 gimple_stmt_iterator new_gsi;
2121
2122 /* A non-reachable non-local label may still be referenced.
2123 But it no longer needs to carry the extra semantics of
2124 non-locality. */
2125 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2126 {
2127 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2128 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2129 }
2130
2131 new_bb = bb->prev_bb;
2132 new_gsi = gsi_start_bb (new_bb);
2133 gsi_remove (&i, false);
2134 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2135 }
2136 else
2137 {
2138 /* Release SSA definitions if we are in SSA. Note that we
2139 may be called when not in SSA. For example,
2140 final_cleanup calls this function via
2141 cleanup_tree_cfg. */
2142 if (gimple_in_ssa_p (cfun))
2143 release_defs (stmt);
2144
2145 gsi_remove (&i, true);
2146 }
2147
2148 if (gsi_end_p (i))
2149 i = gsi_last_bb (bb);
2150 else
2151 gsi_prev (&i);
2152 }
2153 }
2154
2155 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2156 bb->il.gimple.seq = NULL;
2157 bb->il.gimple.phi_nodes = NULL;
2158 }
2159
2160
2161 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2162 predicate VAL, return the edge that will be taken out of the block.
2163 If VAL does not match a unique edge, NULL is returned. */
2164
2165 edge
2166 find_taken_edge (basic_block bb, tree val)
2167 {
2168 gimple *stmt;
2169
2170 stmt = last_stmt (bb);
2171
2172 gcc_assert (stmt);
2173 gcc_assert (is_ctrl_stmt (stmt));
2174
2175 if (val == NULL)
2176 return NULL;
2177
2178 if (!is_gimple_min_invariant (val))
2179 return NULL;
2180
2181 if (gimple_code (stmt) == GIMPLE_COND)
2182 return find_taken_edge_cond_expr (bb, val);
2183
2184 if (gimple_code (stmt) == GIMPLE_SWITCH)
2185 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2186
2187 if (computed_goto_p (stmt))
2188 {
2189 /* Only optimize if the argument is a label, if the argument is
2190 not a label then we can not construct a proper CFG.
2191
2192 It may be the case that we only need to allow the LABEL_REF to
2193 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2194 appear inside a LABEL_EXPR just to be safe. */
2195 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2196 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2197 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2198 return NULL;
2199 }
2200
2201 gcc_unreachable ();
2202 }
2203
2204 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2205 statement, determine which of the outgoing edges will be taken out of the
2206 block. Return NULL if either edge may be taken. */
2207
2208 static edge
2209 find_taken_edge_computed_goto (basic_block bb, tree val)
2210 {
2211 basic_block dest;
2212 edge e = NULL;
2213
2214 dest = label_to_block (val);
2215 if (dest)
2216 {
2217 e = find_edge (bb, dest);
2218 gcc_assert (e != NULL);
2219 }
2220
2221 return e;
2222 }
2223
2224 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2225 statement, determine which of the two edges will be taken out of the
2226 block. Return NULL if either edge may be taken. */
2227
2228 static edge
2229 find_taken_edge_cond_expr (basic_block bb, tree val)
2230 {
2231 edge true_edge, false_edge;
2232
2233 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2234
2235 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2236 return (integer_zerop (val) ? false_edge : true_edge);
2237 }
2238
2239 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2240 statement, determine which edge will be taken out of the block. Return
2241 NULL if any edge may be taken. */
2242
2243 static edge
2244 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2245 tree val)
2246 {
2247 basic_block dest_bb;
2248 edge e;
2249 tree taken_case;
2250
2251 taken_case = find_case_label_for_value (switch_stmt, val);
2252 dest_bb = label_to_block (CASE_LABEL (taken_case));
2253
2254 e = find_edge (bb, dest_bb);
2255 gcc_assert (e);
2256 return e;
2257 }
2258
2259
2260 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2261 We can make optimal use here of the fact that the case labels are
2262 sorted: We can do a binary search for a case matching VAL. */
2263
2264 static tree
2265 find_case_label_for_value (gswitch *switch_stmt, tree val)
2266 {
2267 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2268 tree default_case = gimple_switch_default_label (switch_stmt);
2269
2270 for (low = 0, high = n; high - low > 1; )
2271 {
2272 size_t i = (high + low) / 2;
2273 tree t = gimple_switch_label (switch_stmt, i);
2274 int cmp;
2275
2276 /* Cache the result of comparing CASE_LOW and val. */
2277 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2278
2279 if (cmp > 0)
2280 high = i;
2281 else
2282 low = i;
2283
2284 if (CASE_HIGH (t) == NULL)
2285 {
2286 /* A singe-valued case label. */
2287 if (cmp == 0)
2288 return t;
2289 }
2290 else
2291 {
2292 /* A case range. We can only handle integer ranges. */
2293 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2294 return t;
2295 }
2296 }
2297
2298 return default_case;
2299 }
2300
2301
2302 /* Dump a basic block on stderr. */
2303
2304 void
2305 gimple_debug_bb (basic_block bb)
2306 {
2307 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2308 }
2309
2310
2311 /* Dump basic block with index N on stderr. */
2312
2313 basic_block
2314 gimple_debug_bb_n (int n)
2315 {
2316 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2317 return BASIC_BLOCK_FOR_FN (cfun, n);
2318 }
2319
2320
2321 /* Dump the CFG on stderr.
2322
2323 FLAGS are the same used by the tree dumping functions
2324 (see TDF_* in dumpfile.h). */
2325
2326 void
2327 gimple_debug_cfg (int flags)
2328 {
2329 gimple_dump_cfg (stderr, flags);
2330 }
2331
2332
2333 /* Dump the program showing basic block boundaries on the given FILE.
2334
2335 FLAGS are the same used by the tree dumping functions (see TDF_* in
2336 tree.h). */
2337
2338 void
2339 gimple_dump_cfg (FILE *file, int flags)
2340 {
2341 if (flags & TDF_DETAILS)
2342 {
2343 dump_function_header (file, current_function_decl, flags);
2344 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2345 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2346 last_basic_block_for_fn (cfun));
2347
2348 brief_dump_cfg (file, flags | TDF_COMMENT);
2349 fprintf (file, "\n");
2350 }
2351
2352 if (flags & TDF_STATS)
2353 dump_cfg_stats (file);
2354
2355 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2356 }
2357
2358
2359 /* Dump CFG statistics on FILE. */
2360
2361 void
2362 dump_cfg_stats (FILE *file)
2363 {
2364 static long max_num_merged_labels = 0;
2365 unsigned long size, total = 0;
2366 long num_edges;
2367 basic_block bb;
2368 const char * const fmt_str = "%-30s%-13s%12s\n";
2369 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2370 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2371 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2372 const char *funcname = current_function_name ();
2373
2374 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2375
2376 fprintf (file, "---------------------------------------------------------\n");
2377 fprintf (file, fmt_str, "", " Number of ", "Memory");
2378 fprintf (file, fmt_str, "", " instances ", "used ");
2379 fprintf (file, "---------------------------------------------------------\n");
2380
2381 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2382 total += size;
2383 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2384 SCALE (size), LABEL (size));
2385
2386 num_edges = 0;
2387 FOR_EACH_BB_FN (bb, cfun)
2388 num_edges += EDGE_COUNT (bb->succs);
2389 size = num_edges * sizeof (struct edge_def);
2390 total += size;
2391 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2392
2393 fprintf (file, "---------------------------------------------------------\n");
2394 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2395 LABEL (total));
2396 fprintf (file, "---------------------------------------------------------\n");
2397 fprintf (file, "\n");
2398
2399 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2400 max_num_merged_labels = cfg_stats.num_merged_labels;
2401
2402 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2403 cfg_stats.num_merged_labels, max_num_merged_labels);
2404
2405 fprintf (file, "\n");
2406 }
2407
2408
2409 /* Dump CFG statistics on stderr. Keep extern so that it's always
2410 linked in the final executable. */
2411
2412 DEBUG_FUNCTION void
2413 debug_cfg_stats (void)
2414 {
2415 dump_cfg_stats (stderr);
2416 }
2417
2418 /*---------------------------------------------------------------------------
2419 Miscellaneous helpers
2420 ---------------------------------------------------------------------------*/
2421
2422 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2423 flow. Transfers of control flow associated with EH are excluded. */
2424
2425 static bool
2426 call_can_make_abnormal_goto (gimple *t)
2427 {
2428 /* If the function has no non-local labels, then a call cannot make an
2429 abnormal transfer of control. */
2430 if (!cfun->has_nonlocal_label
2431 && !cfun->calls_setjmp)
2432 return false;
2433
2434 /* Likewise if the call has no side effects. */
2435 if (!gimple_has_side_effects (t))
2436 return false;
2437
2438 /* Likewise if the called function is leaf. */
2439 if (gimple_call_flags (t) & ECF_LEAF)
2440 return false;
2441
2442 return true;
2443 }
2444
2445
2446 /* Return true if T can make an abnormal transfer of control flow.
2447 Transfers of control flow associated with EH are excluded. */
2448
2449 bool
2450 stmt_can_make_abnormal_goto (gimple *t)
2451 {
2452 if (computed_goto_p (t))
2453 return true;
2454 if (is_gimple_call (t))
2455 return call_can_make_abnormal_goto (t);
2456 return false;
2457 }
2458
2459
2460 /* Return true if T represents a stmt that always transfers control. */
2461
2462 bool
2463 is_ctrl_stmt (gimple *t)
2464 {
2465 switch (gimple_code (t))
2466 {
2467 case GIMPLE_COND:
2468 case GIMPLE_SWITCH:
2469 case GIMPLE_GOTO:
2470 case GIMPLE_RETURN:
2471 case GIMPLE_RESX:
2472 return true;
2473 default:
2474 return false;
2475 }
2476 }
2477
2478
2479 /* Return true if T is a statement that may alter the flow of control
2480 (e.g., a call to a non-returning function). */
2481
2482 bool
2483 is_ctrl_altering_stmt (gimple *t)
2484 {
2485 gcc_assert (t);
2486
2487 switch (gimple_code (t))
2488 {
2489 case GIMPLE_CALL:
2490 /* Per stmt call flag indicates whether the call could alter
2491 controlflow. */
2492 if (gimple_call_ctrl_altering_p (t))
2493 return true;
2494 break;
2495
2496 case GIMPLE_EH_DISPATCH:
2497 /* EH_DISPATCH branches to the individual catch handlers at
2498 this level of a try or allowed-exceptions region. It can
2499 fallthru to the next statement as well. */
2500 return true;
2501
2502 case GIMPLE_ASM:
2503 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2504 return true;
2505 break;
2506
2507 CASE_GIMPLE_OMP:
2508 /* OpenMP directives alter control flow. */
2509 return true;
2510
2511 case GIMPLE_TRANSACTION:
2512 /* A transaction start alters control flow. */
2513 return true;
2514
2515 default:
2516 break;
2517 }
2518
2519 /* If a statement can throw, it alters control flow. */
2520 return stmt_can_throw_internal (t);
2521 }
2522
2523
2524 /* Return true if T is a simple local goto. */
2525
2526 bool
2527 simple_goto_p (gimple *t)
2528 {
2529 return (gimple_code (t) == GIMPLE_GOTO
2530 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2531 }
2532
2533
2534 /* Return true if STMT should start a new basic block. PREV_STMT is
2535 the statement preceding STMT. It is used when STMT is a label or a
2536 case label. Labels should only start a new basic block if their
2537 previous statement wasn't a label. Otherwise, sequence of labels
2538 would generate unnecessary basic blocks that only contain a single
2539 label. */
2540
2541 static inline bool
2542 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2543 {
2544 if (stmt == NULL)
2545 return false;
2546
2547 /* Labels start a new basic block only if the preceding statement
2548 wasn't a label of the same type. This prevents the creation of
2549 consecutive blocks that have nothing but a single label. */
2550 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2551 {
2552 /* Nonlocal and computed GOTO targets always start a new block. */
2553 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2554 || FORCED_LABEL (gimple_label_label (label_stmt)))
2555 return true;
2556
2557 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2558 {
2559 if (DECL_NONLOCAL (gimple_label_label (
2560 as_a <glabel *> (prev_stmt))))
2561 return true;
2562
2563 cfg_stats.num_merged_labels++;
2564 return false;
2565 }
2566 else
2567 return true;
2568 }
2569 else if (gimple_code (stmt) == GIMPLE_CALL
2570 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2571 /* setjmp acts similar to a nonlocal GOTO target and thus should
2572 start a new block. */
2573 return true;
2574
2575 return false;
2576 }
2577
2578
2579 /* Return true if T should end a basic block. */
2580
2581 bool
2582 stmt_ends_bb_p (gimple *t)
2583 {
2584 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2585 }
2586
2587 /* Remove block annotations and other data structures. */
2588
2589 void
2590 delete_tree_cfg_annotations (struct function *fn)
2591 {
2592 vec_free (label_to_block_map_for_fn (fn));
2593 }
2594
2595 /* Return the virtual phi in BB. */
2596
2597 gphi *
2598 get_virtual_phi (basic_block bb)
2599 {
2600 for (gphi_iterator gsi = gsi_start_phis (bb);
2601 !gsi_end_p (gsi);
2602 gsi_next (&gsi))
2603 {
2604 gphi *phi = gsi.phi ();
2605
2606 if (virtual_operand_p (PHI_RESULT (phi)))
2607 return phi;
2608 }
2609
2610 return NULL;
2611 }
2612
2613 /* Return the first statement in basic block BB. */
2614
2615 gimple *
2616 first_stmt (basic_block bb)
2617 {
2618 gimple_stmt_iterator i = gsi_start_bb (bb);
2619 gimple *stmt = NULL;
2620
2621 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2622 {
2623 gsi_next (&i);
2624 stmt = NULL;
2625 }
2626 return stmt;
2627 }
2628
2629 /* Return the first non-label statement in basic block BB. */
2630
2631 static gimple *
2632 first_non_label_stmt (basic_block bb)
2633 {
2634 gimple_stmt_iterator i = gsi_start_bb (bb);
2635 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2636 gsi_next (&i);
2637 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2638 }
2639
2640 /* Return the last statement in basic block BB. */
2641
2642 gimple *
2643 last_stmt (basic_block bb)
2644 {
2645 gimple_stmt_iterator i = gsi_last_bb (bb);
2646 gimple *stmt = NULL;
2647
2648 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2649 {
2650 gsi_prev (&i);
2651 stmt = NULL;
2652 }
2653 return stmt;
2654 }
2655
2656 /* Return the last statement of an otherwise empty block. Return NULL
2657 if the block is totally empty, or if it contains more than one
2658 statement. */
2659
2660 gimple *
2661 last_and_only_stmt (basic_block bb)
2662 {
2663 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2664 gimple *last, *prev;
2665
2666 if (gsi_end_p (i))
2667 return NULL;
2668
2669 last = gsi_stmt (i);
2670 gsi_prev_nondebug (&i);
2671 if (gsi_end_p (i))
2672 return last;
2673
2674 /* Empty statements should no longer appear in the instruction stream.
2675 Everything that might have appeared before should be deleted by
2676 remove_useless_stmts, and the optimizers should just gsi_remove
2677 instead of smashing with build_empty_stmt.
2678
2679 Thus the only thing that should appear here in a block containing
2680 one executable statement is a label. */
2681 prev = gsi_stmt (i);
2682 if (gimple_code (prev) == GIMPLE_LABEL)
2683 return last;
2684 else
2685 return NULL;
2686 }
2687
2688 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2689
2690 static void
2691 reinstall_phi_args (edge new_edge, edge old_edge)
2692 {
2693 edge_var_map *vm;
2694 int i;
2695 gphi_iterator phis;
2696
2697 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2698 if (!v)
2699 return;
2700
2701 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2702 v->iterate (i, &vm) && !gsi_end_p (phis);
2703 i++, gsi_next (&phis))
2704 {
2705 gphi *phi = phis.phi ();
2706 tree result = redirect_edge_var_map_result (vm);
2707 tree arg = redirect_edge_var_map_def (vm);
2708
2709 gcc_assert (result == gimple_phi_result (phi));
2710
2711 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2712 }
2713
2714 redirect_edge_var_map_clear (old_edge);
2715 }
2716
2717 /* Returns the basic block after which the new basic block created
2718 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2719 near its "logical" location. This is of most help to humans looking
2720 at debugging dumps. */
2721
2722 basic_block
2723 split_edge_bb_loc (edge edge_in)
2724 {
2725 basic_block dest = edge_in->dest;
2726 basic_block dest_prev = dest->prev_bb;
2727
2728 if (dest_prev)
2729 {
2730 edge e = find_edge (dest_prev, dest);
2731 if (e && !(e->flags & EDGE_COMPLEX))
2732 return edge_in->src;
2733 }
2734 return dest_prev;
2735 }
2736
2737 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2738 Abort on abnormal edges. */
2739
2740 static basic_block
2741 gimple_split_edge (edge edge_in)
2742 {
2743 basic_block new_bb, after_bb, dest;
2744 edge new_edge, e;
2745
2746 /* Abnormal edges cannot be split. */
2747 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2748
2749 dest = edge_in->dest;
2750
2751 after_bb = split_edge_bb_loc (edge_in);
2752
2753 new_bb = create_empty_bb (after_bb);
2754 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2755 new_bb->count = edge_in->count;
2756 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2757 new_edge->probability = REG_BR_PROB_BASE;
2758 new_edge->count = edge_in->count;
2759
2760 e = redirect_edge_and_branch (edge_in, new_bb);
2761 gcc_assert (e == edge_in);
2762 reinstall_phi_args (new_edge, e);
2763
2764 return new_bb;
2765 }
2766
2767
2768 /* Verify properties of the address expression T with base object BASE. */
2769
2770 static tree
2771 verify_address (tree t, tree base)
2772 {
2773 bool old_constant;
2774 bool old_side_effects;
2775 bool new_constant;
2776 bool new_side_effects;
2777
2778 old_constant = TREE_CONSTANT (t);
2779 old_side_effects = TREE_SIDE_EFFECTS (t);
2780
2781 recompute_tree_invariant_for_addr_expr (t);
2782 new_side_effects = TREE_SIDE_EFFECTS (t);
2783 new_constant = TREE_CONSTANT (t);
2784
2785 if (old_constant != new_constant)
2786 {
2787 error ("constant not recomputed when ADDR_EXPR changed");
2788 return t;
2789 }
2790 if (old_side_effects != new_side_effects)
2791 {
2792 error ("side effects not recomputed when ADDR_EXPR changed");
2793 return t;
2794 }
2795
2796 if (!(TREE_CODE (base) == VAR_DECL
2797 || TREE_CODE (base) == PARM_DECL
2798 || TREE_CODE (base) == RESULT_DECL))
2799 return NULL_TREE;
2800
2801 if (DECL_GIMPLE_REG_P (base))
2802 {
2803 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2804 return base;
2805 }
2806
2807 return NULL_TREE;
2808 }
2809
2810 /* Callback for walk_tree, check that all elements with address taken are
2811 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2812 inside a PHI node. */
2813
2814 static tree
2815 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2816 {
2817 tree t = *tp, x;
2818
2819 if (TYPE_P (t))
2820 *walk_subtrees = 0;
2821
2822 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2823 #define CHECK_OP(N, MSG) \
2824 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2825 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2826
2827 switch (TREE_CODE (t))
2828 {
2829 case SSA_NAME:
2830 if (SSA_NAME_IN_FREE_LIST (t))
2831 {
2832 error ("SSA name in freelist but still referenced");
2833 return *tp;
2834 }
2835 break;
2836
2837 case INDIRECT_REF:
2838 error ("INDIRECT_REF in gimple IL");
2839 return t;
2840
2841 case MEM_REF:
2842 x = TREE_OPERAND (t, 0);
2843 if (!POINTER_TYPE_P (TREE_TYPE (x))
2844 || !is_gimple_mem_ref_addr (x))
2845 {
2846 error ("invalid first operand of MEM_REF");
2847 return x;
2848 }
2849 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2850 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2851 {
2852 error ("invalid offset operand of MEM_REF");
2853 return TREE_OPERAND (t, 1);
2854 }
2855 if (TREE_CODE (x) == ADDR_EXPR
2856 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2857 return x;
2858 *walk_subtrees = 0;
2859 break;
2860
2861 case ASSERT_EXPR:
2862 x = fold (ASSERT_EXPR_COND (t));
2863 if (x == boolean_false_node)
2864 {
2865 error ("ASSERT_EXPR with an always-false condition");
2866 return *tp;
2867 }
2868 break;
2869
2870 case MODIFY_EXPR:
2871 error ("MODIFY_EXPR not expected while having tuples");
2872 return *tp;
2873
2874 case ADDR_EXPR:
2875 {
2876 tree tem;
2877
2878 gcc_assert (is_gimple_address (t));
2879
2880 /* Skip any references (they will be checked when we recurse down the
2881 tree) and ensure that any variable used as a prefix is marked
2882 addressable. */
2883 for (x = TREE_OPERAND (t, 0);
2884 handled_component_p (x);
2885 x = TREE_OPERAND (x, 0))
2886 ;
2887
2888 if ((tem = verify_address (t, x)))
2889 return tem;
2890
2891 if (!(TREE_CODE (x) == VAR_DECL
2892 || TREE_CODE (x) == PARM_DECL
2893 || TREE_CODE (x) == RESULT_DECL))
2894 return NULL;
2895
2896 if (!TREE_ADDRESSABLE (x))
2897 {
2898 error ("address taken, but ADDRESSABLE bit not set");
2899 return x;
2900 }
2901
2902 break;
2903 }
2904
2905 case COND_EXPR:
2906 x = COND_EXPR_COND (t);
2907 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2908 {
2909 error ("non-integral used in condition");
2910 return x;
2911 }
2912 if (!is_gimple_condexpr (x))
2913 {
2914 error ("invalid conditional operand");
2915 return x;
2916 }
2917 break;
2918
2919 case NON_LVALUE_EXPR:
2920 case TRUTH_NOT_EXPR:
2921 gcc_unreachable ();
2922
2923 CASE_CONVERT:
2924 case FIX_TRUNC_EXPR:
2925 case FLOAT_EXPR:
2926 case NEGATE_EXPR:
2927 case ABS_EXPR:
2928 case BIT_NOT_EXPR:
2929 CHECK_OP (0, "invalid operand to unary operator");
2930 break;
2931
2932 case REALPART_EXPR:
2933 case IMAGPART_EXPR:
2934 case BIT_FIELD_REF:
2935 if (!is_gimple_reg_type (TREE_TYPE (t)))
2936 {
2937 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2938 return t;
2939 }
2940
2941 if (TREE_CODE (t) == BIT_FIELD_REF)
2942 {
2943 tree t0 = TREE_OPERAND (t, 0);
2944 tree t1 = TREE_OPERAND (t, 1);
2945 tree t2 = TREE_OPERAND (t, 2);
2946 if (!tree_fits_uhwi_p (t1)
2947 || !tree_fits_uhwi_p (t2))
2948 {
2949 error ("invalid position or size operand to BIT_FIELD_REF");
2950 return t;
2951 }
2952 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2953 && (TYPE_PRECISION (TREE_TYPE (t))
2954 != tree_to_uhwi (t1)))
2955 {
2956 error ("integral result type precision does not match "
2957 "field size of BIT_FIELD_REF");
2958 return t;
2959 }
2960 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2961 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2962 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2963 != tree_to_uhwi (t1)))
2964 {
2965 error ("mode precision of non-integral result does not "
2966 "match field size of BIT_FIELD_REF");
2967 return t;
2968 }
2969 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2970 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2971 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2972 {
2973 error ("position plus size exceeds size of referenced object in "
2974 "BIT_FIELD_REF");
2975 return t;
2976 }
2977 }
2978 t = TREE_OPERAND (t, 0);
2979
2980 /* Fall-through. */
2981 case COMPONENT_REF:
2982 case ARRAY_REF:
2983 case ARRAY_RANGE_REF:
2984 case VIEW_CONVERT_EXPR:
2985 /* We have a nest of references. Verify that each of the operands
2986 that determine where to reference is either a constant or a variable,
2987 verify that the base is valid, and then show we've already checked
2988 the subtrees. */
2989 while (handled_component_p (t))
2990 {
2991 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2992 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2993 else if (TREE_CODE (t) == ARRAY_REF
2994 || TREE_CODE (t) == ARRAY_RANGE_REF)
2995 {
2996 CHECK_OP (1, "invalid array index");
2997 if (TREE_OPERAND (t, 2))
2998 CHECK_OP (2, "invalid array lower bound");
2999 if (TREE_OPERAND (t, 3))
3000 CHECK_OP (3, "invalid array stride");
3001 }
3002 else if (TREE_CODE (t) == BIT_FIELD_REF
3003 || TREE_CODE (t) == REALPART_EXPR
3004 || TREE_CODE (t) == IMAGPART_EXPR)
3005 {
3006 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3007 "REALPART_EXPR");
3008 return t;
3009 }
3010
3011 t = TREE_OPERAND (t, 0);
3012 }
3013
3014 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3015 {
3016 error ("invalid reference prefix");
3017 return t;
3018 }
3019 *walk_subtrees = 0;
3020 break;
3021 case PLUS_EXPR:
3022 case MINUS_EXPR:
3023 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3024 POINTER_PLUS_EXPR. */
3025 if (POINTER_TYPE_P (TREE_TYPE (t)))
3026 {
3027 error ("invalid operand to plus/minus, type is a pointer");
3028 return t;
3029 }
3030 CHECK_OP (0, "invalid operand to binary operator");
3031 CHECK_OP (1, "invalid operand to binary operator");
3032 break;
3033
3034 case POINTER_PLUS_EXPR:
3035 /* Check to make sure the first operand is a pointer or reference type. */
3036 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3037 {
3038 error ("invalid operand to pointer plus, first operand is not a pointer");
3039 return t;
3040 }
3041 /* Check to make sure the second operand is a ptrofftype. */
3042 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3043 {
3044 error ("invalid operand to pointer plus, second operand is not an "
3045 "integer type of appropriate width");
3046 return t;
3047 }
3048 /* FALLTHROUGH */
3049 case LT_EXPR:
3050 case LE_EXPR:
3051 case GT_EXPR:
3052 case GE_EXPR:
3053 case EQ_EXPR:
3054 case NE_EXPR:
3055 case UNORDERED_EXPR:
3056 case ORDERED_EXPR:
3057 case UNLT_EXPR:
3058 case UNLE_EXPR:
3059 case UNGT_EXPR:
3060 case UNGE_EXPR:
3061 case UNEQ_EXPR:
3062 case LTGT_EXPR:
3063 case MULT_EXPR:
3064 case TRUNC_DIV_EXPR:
3065 case CEIL_DIV_EXPR:
3066 case FLOOR_DIV_EXPR:
3067 case ROUND_DIV_EXPR:
3068 case TRUNC_MOD_EXPR:
3069 case CEIL_MOD_EXPR:
3070 case FLOOR_MOD_EXPR:
3071 case ROUND_MOD_EXPR:
3072 case RDIV_EXPR:
3073 case EXACT_DIV_EXPR:
3074 case MIN_EXPR:
3075 case MAX_EXPR:
3076 case LSHIFT_EXPR:
3077 case RSHIFT_EXPR:
3078 case LROTATE_EXPR:
3079 case RROTATE_EXPR:
3080 case BIT_IOR_EXPR:
3081 case BIT_XOR_EXPR:
3082 case BIT_AND_EXPR:
3083 CHECK_OP (0, "invalid operand to binary operator");
3084 CHECK_OP (1, "invalid operand to binary operator");
3085 break;
3086
3087 case CONSTRUCTOR:
3088 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3089 *walk_subtrees = 0;
3090 break;
3091
3092 case CASE_LABEL_EXPR:
3093 if (CASE_CHAIN (t))
3094 {
3095 error ("invalid CASE_CHAIN");
3096 return t;
3097 }
3098 break;
3099
3100 default:
3101 break;
3102 }
3103 return NULL;
3104
3105 #undef CHECK_OP
3106 }
3107
3108
3109 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3110 Returns true if there is an error, otherwise false. */
3111
3112 static bool
3113 verify_types_in_gimple_min_lval (tree expr)
3114 {
3115 tree op;
3116
3117 if (is_gimple_id (expr))
3118 return false;
3119
3120 if (TREE_CODE (expr) != TARGET_MEM_REF
3121 && TREE_CODE (expr) != MEM_REF)
3122 {
3123 error ("invalid expression for min lvalue");
3124 return true;
3125 }
3126
3127 /* TARGET_MEM_REFs are strange beasts. */
3128 if (TREE_CODE (expr) == TARGET_MEM_REF)
3129 return false;
3130
3131 op = TREE_OPERAND (expr, 0);
3132 if (!is_gimple_val (op))
3133 {
3134 error ("invalid operand in indirect reference");
3135 debug_generic_stmt (op);
3136 return true;
3137 }
3138 /* Memory references now generally can involve a value conversion. */
3139
3140 return false;
3141 }
3142
3143 /* Verify if EXPR is a valid GIMPLE reference expression. If
3144 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3145 if there is an error, otherwise false. */
3146
3147 static bool
3148 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3149 {
3150 while (handled_component_p (expr))
3151 {
3152 tree op = TREE_OPERAND (expr, 0);
3153
3154 if (TREE_CODE (expr) == ARRAY_REF
3155 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3156 {
3157 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3158 || (TREE_OPERAND (expr, 2)
3159 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3160 || (TREE_OPERAND (expr, 3)
3161 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3162 {
3163 error ("invalid operands to array reference");
3164 debug_generic_stmt (expr);
3165 return true;
3166 }
3167 }
3168
3169 /* Verify if the reference array element types are compatible. */
3170 if (TREE_CODE (expr) == ARRAY_REF
3171 && !useless_type_conversion_p (TREE_TYPE (expr),
3172 TREE_TYPE (TREE_TYPE (op))))
3173 {
3174 error ("type mismatch in array reference");
3175 debug_generic_stmt (TREE_TYPE (expr));
3176 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3177 return true;
3178 }
3179 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3180 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3181 TREE_TYPE (TREE_TYPE (op))))
3182 {
3183 error ("type mismatch in array range reference");
3184 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3185 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3186 return true;
3187 }
3188
3189 if ((TREE_CODE (expr) == REALPART_EXPR
3190 || TREE_CODE (expr) == IMAGPART_EXPR)
3191 && !useless_type_conversion_p (TREE_TYPE (expr),
3192 TREE_TYPE (TREE_TYPE (op))))
3193 {
3194 error ("type mismatch in real/imagpart reference");
3195 debug_generic_stmt (TREE_TYPE (expr));
3196 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3197 return true;
3198 }
3199
3200 if (TREE_CODE (expr) == COMPONENT_REF
3201 && !useless_type_conversion_p (TREE_TYPE (expr),
3202 TREE_TYPE (TREE_OPERAND (expr, 1))))
3203 {
3204 error ("type mismatch in component reference");
3205 debug_generic_stmt (TREE_TYPE (expr));
3206 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3207 return true;
3208 }
3209
3210 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3211 {
3212 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3213 that their operand is not an SSA name or an invariant when
3214 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3215 bug). Otherwise there is nothing to verify, gross mismatches at
3216 most invoke undefined behavior. */
3217 if (require_lvalue
3218 && (TREE_CODE (op) == SSA_NAME
3219 || is_gimple_min_invariant (op)))
3220 {
3221 error ("conversion of an SSA_NAME on the left hand side");
3222 debug_generic_stmt (expr);
3223 return true;
3224 }
3225 else if (TREE_CODE (op) == SSA_NAME
3226 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3227 {
3228 error ("conversion of register to a different size");
3229 debug_generic_stmt (expr);
3230 return true;
3231 }
3232 else if (!handled_component_p (op))
3233 return false;
3234 }
3235
3236 expr = op;
3237 }
3238
3239 if (TREE_CODE (expr) == MEM_REF)
3240 {
3241 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3242 {
3243 error ("invalid address operand in MEM_REF");
3244 debug_generic_stmt (expr);
3245 return true;
3246 }
3247 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3248 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3249 {
3250 error ("invalid offset operand in MEM_REF");
3251 debug_generic_stmt (expr);
3252 return true;
3253 }
3254 }
3255 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3256 {
3257 if (!TMR_BASE (expr)
3258 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3259 {
3260 error ("invalid address operand in TARGET_MEM_REF");
3261 return true;
3262 }
3263 if (!TMR_OFFSET (expr)
3264 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3265 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3266 {
3267 error ("invalid offset operand in TARGET_MEM_REF");
3268 debug_generic_stmt (expr);
3269 return true;
3270 }
3271 }
3272
3273 return ((require_lvalue || !is_gimple_min_invariant (expr))
3274 && verify_types_in_gimple_min_lval (expr));
3275 }
3276
3277 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3278 list of pointer-to types that is trivially convertible to DEST. */
3279
3280 static bool
3281 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3282 {
3283 tree src;
3284
3285 if (!TYPE_POINTER_TO (src_obj))
3286 return true;
3287
3288 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3289 if (useless_type_conversion_p (dest, src))
3290 return true;
3291
3292 return false;
3293 }
3294
3295 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3296 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3297
3298 static bool
3299 valid_fixed_convert_types_p (tree type1, tree type2)
3300 {
3301 return (FIXED_POINT_TYPE_P (type1)
3302 && (INTEGRAL_TYPE_P (type2)
3303 || SCALAR_FLOAT_TYPE_P (type2)
3304 || FIXED_POINT_TYPE_P (type2)));
3305 }
3306
3307 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3308 is a problem, otherwise false. */
3309
3310 static bool
3311 verify_gimple_call (gcall *stmt)
3312 {
3313 tree fn = gimple_call_fn (stmt);
3314 tree fntype, fndecl;
3315 unsigned i;
3316
3317 if (gimple_call_internal_p (stmt))
3318 {
3319 if (fn)
3320 {
3321 error ("gimple call has two targets");
3322 debug_generic_stmt (fn);
3323 return true;
3324 }
3325 }
3326 else
3327 {
3328 if (!fn)
3329 {
3330 error ("gimple call has no target");
3331 return true;
3332 }
3333 }
3334
3335 if (fn && !is_gimple_call_addr (fn))
3336 {
3337 error ("invalid function in gimple call");
3338 debug_generic_stmt (fn);
3339 return true;
3340 }
3341
3342 if (fn
3343 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3344 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3345 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3346 {
3347 error ("non-function in gimple call");
3348 return true;
3349 }
3350
3351 fndecl = gimple_call_fndecl (stmt);
3352 if (fndecl
3353 && TREE_CODE (fndecl) == FUNCTION_DECL
3354 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3355 && !DECL_PURE_P (fndecl)
3356 && !TREE_READONLY (fndecl))
3357 {
3358 error ("invalid pure const state for function");
3359 return true;
3360 }
3361
3362 tree lhs = gimple_call_lhs (stmt);
3363 if (lhs
3364 && (!is_gimple_lvalue (lhs)
3365 || verify_types_in_gimple_reference (lhs, true)))
3366 {
3367 error ("invalid LHS in gimple call");
3368 return true;
3369 }
3370
3371 if (lhs
3372 && gimple_call_ctrl_altering_p (stmt)
3373 && gimple_call_noreturn_p (stmt)
3374 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3375 {
3376 error ("LHS in noreturn call");
3377 return true;
3378 }
3379
3380 fntype = gimple_call_fntype (stmt);
3381 if (fntype
3382 && lhs
3383 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3384 /* ??? At least C++ misses conversions at assignments from
3385 void * call results.
3386 ??? Java is completely off. Especially with functions
3387 returning java.lang.Object.
3388 For now simply allow arbitrary pointer type conversions. */
3389 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3390 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3391 {
3392 error ("invalid conversion in gimple call");
3393 debug_generic_stmt (TREE_TYPE (lhs));
3394 debug_generic_stmt (TREE_TYPE (fntype));
3395 return true;
3396 }
3397
3398 if (gimple_call_chain (stmt)
3399 && !is_gimple_val (gimple_call_chain (stmt)))
3400 {
3401 error ("invalid static chain in gimple call");
3402 debug_generic_stmt (gimple_call_chain (stmt));
3403 return true;
3404 }
3405
3406 /* If there is a static chain argument, the call should either be
3407 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3408 if (gimple_call_chain (stmt)
3409 && fndecl
3410 && !DECL_STATIC_CHAIN (fndecl))
3411 {
3412 error ("static chain with function that doesn%'t use one");
3413 return true;
3414 }
3415
3416 /* ??? The C frontend passes unpromoted arguments in case it
3417 didn't see a function declaration before the call. So for now
3418 leave the call arguments mostly unverified. Once we gimplify
3419 unit-at-a-time we have a chance to fix this. */
3420
3421 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3422 {
3423 tree arg = gimple_call_arg (stmt, i);
3424 if ((is_gimple_reg_type (TREE_TYPE (arg))
3425 && !is_gimple_val (arg))
3426 || (!is_gimple_reg_type (TREE_TYPE (arg))
3427 && !is_gimple_lvalue (arg)))
3428 {
3429 error ("invalid argument to gimple call");
3430 debug_generic_expr (arg);
3431 return true;
3432 }
3433 }
3434
3435 return false;
3436 }
3437
3438 /* Verifies the gimple comparison with the result type TYPE and
3439 the operands OP0 and OP1. */
3440
3441 static bool
3442 verify_gimple_comparison (tree type, tree op0, tree op1)
3443 {
3444 tree op0_type = TREE_TYPE (op0);
3445 tree op1_type = TREE_TYPE (op1);
3446
3447 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3448 {
3449 error ("invalid operands in gimple comparison");
3450 return true;
3451 }
3452
3453 /* For comparisons we do not have the operations type as the
3454 effective type the comparison is carried out in. Instead
3455 we require that either the first operand is trivially
3456 convertible into the second, or the other way around.
3457 Because we special-case pointers to void we allow
3458 comparisons of pointers with the same mode as well. */
3459 if (!useless_type_conversion_p (op0_type, op1_type)
3460 && !useless_type_conversion_p (op1_type, op0_type)
3461 && (!POINTER_TYPE_P (op0_type)
3462 || !POINTER_TYPE_P (op1_type)
3463 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3464 {
3465 error ("mismatching comparison operand types");
3466 debug_generic_expr (op0_type);
3467 debug_generic_expr (op1_type);
3468 return true;
3469 }
3470
3471 /* The resulting type of a comparison may be an effective boolean type. */
3472 if (INTEGRAL_TYPE_P (type)
3473 && (TREE_CODE (type) == BOOLEAN_TYPE
3474 || TYPE_PRECISION (type) == 1))
3475 {
3476 if (TREE_CODE (op0_type) == VECTOR_TYPE
3477 || TREE_CODE (op1_type) == VECTOR_TYPE)
3478 {
3479 error ("vector comparison returning a boolean");
3480 debug_generic_expr (op0_type);
3481 debug_generic_expr (op1_type);
3482 return true;
3483 }
3484 }
3485 /* Or a boolean vector type with the same element count
3486 as the comparison operand types. */
3487 else if (TREE_CODE (type) == VECTOR_TYPE
3488 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3489 {
3490 if (TREE_CODE (op0_type) != VECTOR_TYPE
3491 || TREE_CODE (op1_type) != VECTOR_TYPE)
3492 {
3493 error ("non-vector operands in vector comparison");
3494 debug_generic_expr (op0_type);
3495 debug_generic_expr (op1_type);
3496 return true;
3497 }
3498
3499 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3500 {
3501 error ("invalid vector comparison resulting type");
3502 debug_generic_expr (type);
3503 return true;
3504 }
3505 }
3506 else
3507 {
3508 error ("bogus comparison result type");
3509 debug_generic_expr (type);
3510 return true;
3511 }
3512
3513 return false;
3514 }
3515
3516 /* Verify a gimple assignment statement STMT with an unary rhs.
3517 Returns true if anything is wrong. */
3518
3519 static bool
3520 verify_gimple_assign_unary (gassign *stmt)
3521 {
3522 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3523 tree lhs = gimple_assign_lhs (stmt);
3524 tree lhs_type = TREE_TYPE (lhs);
3525 tree rhs1 = gimple_assign_rhs1 (stmt);
3526 tree rhs1_type = TREE_TYPE (rhs1);
3527
3528 if (!is_gimple_reg (lhs))
3529 {
3530 error ("non-register as LHS of unary operation");
3531 return true;
3532 }
3533
3534 if (!is_gimple_val (rhs1))
3535 {
3536 error ("invalid operand in unary operation");
3537 return true;
3538 }
3539
3540 /* First handle conversions. */
3541 switch (rhs_code)
3542 {
3543 CASE_CONVERT:
3544 {
3545 /* Allow conversions from pointer type to integral type only if
3546 there is no sign or zero extension involved.
3547 For targets were the precision of ptrofftype doesn't match that
3548 of pointers we need to allow arbitrary conversions to ptrofftype. */
3549 if ((POINTER_TYPE_P (lhs_type)
3550 && INTEGRAL_TYPE_P (rhs1_type))
3551 || (POINTER_TYPE_P (rhs1_type)
3552 && INTEGRAL_TYPE_P (lhs_type)
3553 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3554 || ptrofftype_p (sizetype))))
3555 return false;
3556
3557 /* Allow conversion from integral to offset type and vice versa. */
3558 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3559 && INTEGRAL_TYPE_P (rhs1_type))
3560 || (INTEGRAL_TYPE_P (lhs_type)
3561 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3562 return false;
3563
3564 /* Otherwise assert we are converting between types of the
3565 same kind. */
3566 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3567 {
3568 error ("invalid types in nop conversion");
3569 debug_generic_expr (lhs_type);
3570 debug_generic_expr (rhs1_type);
3571 return true;
3572 }
3573
3574 return false;
3575 }
3576
3577 case ADDR_SPACE_CONVERT_EXPR:
3578 {
3579 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3580 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3581 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3582 {
3583 error ("invalid types in address space conversion");
3584 debug_generic_expr (lhs_type);
3585 debug_generic_expr (rhs1_type);
3586 return true;
3587 }
3588
3589 return false;
3590 }
3591
3592 case FIXED_CONVERT_EXPR:
3593 {
3594 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3595 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3596 {
3597 error ("invalid types in fixed-point conversion");
3598 debug_generic_expr (lhs_type);
3599 debug_generic_expr (rhs1_type);
3600 return true;
3601 }
3602
3603 return false;
3604 }
3605
3606 case FLOAT_EXPR:
3607 {
3608 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3609 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3610 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3611 {
3612 error ("invalid types in conversion to floating point");
3613 debug_generic_expr (lhs_type);
3614 debug_generic_expr (rhs1_type);
3615 return true;
3616 }
3617
3618 return false;
3619 }
3620
3621 case FIX_TRUNC_EXPR:
3622 {
3623 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3624 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3625 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3626 {
3627 error ("invalid types in conversion to integer");
3628 debug_generic_expr (lhs_type);
3629 debug_generic_expr (rhs1_type);
3630 return true;
3631 }
3632
3633 return false;
3634 }
3635 case REDUC_MAX_EXPR:
3636 case REDUC_MIN_EXPR:
3637 case REDUC_PLUS_EXPR:
3638 if (!VECTOR_TYPE_P (rhs1_type)
3639 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3640 {
3641 error ("reduction should convert from vector to element type");
3642 debug_generic_expr (lhs_type);
3643 debug_generic_expr (rhs1_type);
3644 return true;
3645 }
3646 return false;
3647
3648 case VEC_UNPACK_HI_EXPR:
3649 case VEC_UNPACK_LO_EXPR:
3650 case VEC_UNPACK_FLOAT_HI_EXPR:
3651 case VEC_UNPACK_FLOAT_LO_EXPR:
3652 /* FIXME. */
3653 return false;
3654
3655 case NEGATE_EXPR:
3656 case ABS_EXPR:
3657 case BIT_NOT_EXPR:
3658 case PAREN_EXPR:
3659 case CONJ_EXPR:
3660 break;
3661
3662 default:
3663 gcc_unreachable ();
3664 }
3665
3666 /* For the remaining codes assert there is no conversion involved. */
3667 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3668 {
3669 error ("non-trivial conversion in unary operation");
3670 debug_generic_expr (lhs_type);
3671 debug_generic_expr (rhs1_type);
3672 return true;
3673 }
3674
3675 return false;
3676 }
3677
3678 /* Verify a gimple assignment statement STMT with a binary rhs.
3679 Returns true if anything is wrong. */
3680
3681 static bool
3682 verify_gimple_assign_binary (gassign *stmt)
3683 {
3684 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3685 tree lhs = gimple_assign_lhs (stmt);
3686 tree lhs_type = TREE_TYPE (lhs);
3687 tree rhs1 = gimple_assign_rhs1 (stmt);
3688 tree rhs1_type = TREE_TYPE (rhs1);
3689 tree rhs2 = gimple_assign_rhs2 (stmt);
3690 tree rhs2_type = TREE_TYPE (rhs2);
3691
3692 if (!is_gimple_reg (lhs))
3693 {
3694 error ("non-register as LHS of binary operation");
3695 return true;
3696 }
3697
3698 if (!is_gimple_val (rhs1)
3699 || !is_gimple_val (rhs2))
3700 {
3701 error ("invalid operands in binary operation");
3702 return true;
3703 }
3704
3705 /* First handle operations that involve different types. */
3706 switch (rhs_code)
3707 {
3708 case COMPLEX_EXPR:
3709 {
3710 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3711 || !(INTEGRAL_TYPE_P (rhs1_type)
3712 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3713 || !(INTEGRAL_TYPE_P (rhs2_type)
3714 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3715 {
3716 error ("type mismatch in complex expression");
3717 debug_generic_expr (lhs_type);
3718 debug_generic_expr (rhs1_type);
3719 debug_generic_expr (rhs2_type);
3720 return true;
3721 }
3722
3723 return false;
3724 }
3725
3726 case LSHIFT_EXPR:
3727 case RSHIFT_EXPR:
3728 case LROTATE_EXPR:
3729 case RROTATE_EXPR:
3730 {
3731 /* Shifts and rotates are ok on integral types, fixed point
3732 types and integer vector types. */
3733 if ((!INTEGRAL_TYPE_P (rhs1_type)
3734 && !FIXED_POINT_TYPE_P (rhs1_type)
3735 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3736 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3737 || (!INTEGRAL_TYPE_P (rhs2_type)
3738 /* Vector shifts of vectors are also ok. */
3739 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3740 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3741 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3742 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3743 || !useless_type_conversion_p (lhs_type, rhs1_type))
3744 {
3745 error ("type mismatch in shift expression");
3746 debug_generic_expr (lhs_type);
3747 debug_generic_expr (rhs1_type);
3748 debug_generic_expr (rhs2_type);
3749 return true;
3750 }
3751
3752 return false;
3753 }
3754
3755 case WIDEN_LSHIFT_EXPR:
3756 {
3757 if (!INTEGRAL_TYPE_P (lhs_type)
3758 || !INTEGRAL_TYPE_P (rhs1_type)
3759 || TREE_CODE (rhs2) != INTEGER_CST
3760 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3761 {
3762 error ("type mismatch in widening vector shift expression");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 debug_generic_expr (rhs2_type);
3766 return true;
3767 }
3768
3769 return false;
3770 }
3771
3772 case VEC_WIDEN_LSHIFT_HI_EXPR:
3773 case VEC_WIDEN_LSHIFT_LO_EXPR:
3774 {
3775 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3776 || TREE_CODE (lhs_type) != VECTOR_TYPE
3777 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3778 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3779 || TREE_CODE (rhs2) != INTEGER_CST
3780 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3781 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3782 {
3783 error ("type mismatch in widening vector shift expression");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 debug_generic_expr (rhs2_type);
3787 return true;
3788 }
3789
3790 return false;
3791 }
3792
3793 case PLUS_EXPR:
3794 case MINUS_EXPR:
3795 {
3796 tree lhs_etype = lhs_type;
3797 tree rhs1_etype = rhs1_type;
3798 tree rhs2_etype = rhs2_type;
3799 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3800 {
3801 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3802 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3803 {
3804 error ("invalid non-vector operands to vector valued plus");
3805 return true;
3806 }
3807 lhs_etype = TREE_TYPE (lhs_type);
3808 rhs1_etype = TREE_TYPE (rhs1_type);
3809 rhs2_etype = TREE_TYPE (rhs2_type);
3810 }
3811 if (POINTER_TYPE_P (lhs_etype)
3812 || POINTER_TYPE_P (rhs1_etype)
3813 || POINTER_TYPE_P (rhs2_etype))
3814 {
3815 error ("invalid (pointer) operands to plus/minus");
3816 return true;
3817 }
3818
3819 /* Continue with generic binary expression handling. */
3820 break;
3821 }
3822
3823 case POINTER_PLUS_EXPR:
3824 {
3825 if (!POINTER_TYPE_P (rhs1_type)
3826 || !useless_type_conversion_p (lhs_type, rhs1_type)
3827 || !ptrofftype_p (rhs2_type))
3828 {
3829 error ("type mismatch in pointer plus expression");
3830 debug_generic_stmt (lhs_type);
3831 debug_generic_stmt (rhs1_type);
3832 debug_generic_stmt (rhs2_type);
3833 return true;
3834 }
3835
3836 return false;
3837 }
3838
3839 case TRUTH_ANDIF_EXPR:
3840 case TRUTH_ORIF_EXPR:
3841 case TRUTH_AND_EXPR:
3842 case TRUTH_OR_EXPR:
3843 case TRUTH_XOR_EXPR:
3844
3845 gcc_unreachable ();
3846
3847 case LT_EXPR:
3848 case LE_EXPR:
3849 case GT_EXPR:
3850 case GE_EXPR:
3851 case EQ_EXPR:
3852 case NE_EXPR:
3853 case UNORDERED_EXPR:
3854 case ORDERED_EXPR:
3855 case UNLT_EXPR:
3856 case UNLE_EXPR:
3857 case UNGT_EXPR:
3858 case UNGE_EXPR:
3859 case UNEQ_EXPR:
3860 case LTGT_EXPR:
3861 /* Comparisons are also binary, but the result type is not
3862 connected to the operand types. */
3863 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3864
3865 case WIDEN_MULT_EXPR:
3866 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3867 return true;
3868 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3869 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3870
3871 case WIDEN_SUM_EXPR:
3872 case VEC_WIDEN_MULT_HI_EXPR:
3873 case VEC_WIDEN_MULT_LO_EXPR:
3874 case VEC_WIDEN_MULT_EVEN_EXPR:
3875 case VEC_WIDEN_MULT_ODD_EXPR:
3876 case VEC_PACK_TRUNC_EXPR:
3877 case VEC_PACK_SAT_EXPR:
3878 case VEC_PACK_FIX_TRUNC_EXPR:
3879 /* FIXME. */
3880 return false;
3881
3882 case MULT_EXPR:
3883 case MULT_HIGHPART_EXPR:
3884 case TRUNC_DIV_EXPR:
3885 case CEIL_DIV_EXPR:
3886 case FLOOR_DIV_EXPR:
3887 case ROUND_DIV_EXPR:
3888 case TRUNC_MOD_EXPR:
3889 case CEIL_MOD_EXPR:
3890 case FLOOR_MOD_EXPR:
3891 case ROUND_MOD_EXPR:
3892 case RDIV_EXPR:
3893 case EXACT_DIV_EXPR:
3894 case MIN_EXPR:
3895 case MAX_EXPR:
3896 case BIT_IOR_EXPR:
3897 case BIT_XOR_EXPR:
3898 case BIT_AND_EXPR:
3899 /* Continue with generic binary expression handling. */
3900 break;
3901
3902 default:
3903 gcc_unreachable ();
3904 }
3905
3906 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3907 || !useless_type_conversion_p (lhs_type, rhs2_type))
3908 {
3909 error ("type mismatch in binary expression");
3910 debug_generic_stmt (lhs_type);
3911 debug_generic_stmt (rhs1_type);
3912 debug_generic_stmt (rhs2_type);
3913 return true;
3914 }
3915
3916 return false;
3917 }
3918
3919 /* Verify a gimple assignment statement STMT with a ternary rhs.
3920 Returns true if anything is wrong. */
3921
3922 static bool
3923 verify_gimple_assign_ternary (gassign *stmt)
3924 {
3925 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3926 tree lhs = gimple_assign_lhs (stmt);
3927 tree lhs_type = TREE_TYPE (lhs);
3928 tree rhs1 = gimple_assign_rhs1 (stmt);
3929 tree rhs1_type = TREE_TYPE (rhs1);
3930 tree rhs2 = gimple_assign_rhs2 (stmt);
3931 tree rhs2_type = TREE_TYPE (rhs2);
3932 tree rhs3 = gimple_assign_rhs3 (stmt);
3933 tree rhs3_type = TREE_TYPE (rhs3);
3934
3935 if (!is_gimple_reg (lhs))
3936 {
3937 error ("non-register as LHS of ternary operation");
3938 return true;
3939 }
3940
3941 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3942 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3943 || !is_gimple_val (rhs2)
3944 || !is_gimple_val (rhs3))
3945 {
3946 error ("invalid operands in ternary operation");
3947 return true;
3948 }
3949
3950 /* First handle operations that involve different types. */
3951 switch (rhs_code)
3952 {
3953 case WIDEN_MULT_PLUS_EXPR:
3954 case WIDEN_MULT_MINUS_EXPR:
3955 if ((!INTEGRAL_TYPE_P (rhs1_type)
3956 && !FIXED_POINT_TYPE_P (rhs1_type))
3957 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3958 || !useless_type_conversion_p (lhs_type, rhs3_type)
3959 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3960 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3961 {
3962 error ("type mismatch in widening multiply-accumulate expression");
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs1_type);
3965 debug_generic_expr (rhs2_type);
3966 debug_generic_expr (rhs3_type);
3967 return true;
3968 }
3969 break;
3970
3971 case FMA_EXPR:
3972 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3973 || !useless_type_conversion_p (lhs_type, rhs2_type)
3974 || !useless_type_conversion_p (lhs_type, rhs3_type))
3975 {
3976 error ("type mismatch in fused multiply-add expression");
3977 debug_generic_expr (lhs_type);
3978 debug_generic_expr (rhs1_type);
3979 debug_generic_expr (rhs2_type);
3980 debug_generic_expr (rhs3_type);
3981 return true;
3982 }
3983 break;
3984
3985 case VEC_COND_EXPR:
3986 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3987 || TYPE_VECTOR_SUBPARTS (rhs1_type)
3988 != TYPE_VECTOR_SUBPARTS (lhs_type))
3989 {
3990 error ("the first argument of a VEC_COND_EXPR must be of a "
3991 "boolean vector type of the same number of elements "
3992 "as the result");
3993 debug_generic_expr (lhs_type);
3994 debug_generic_expr (rhs1_type);
3995 return true;
3996 }
3997 /* Fallthrough. */
3998 case COND_EXPR:
3999 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4000 || !useless_type_conversion_p (lhs_type, rhs3_type))
4001 {
4002 error ("type mismatch in conditional expression");
4003 debug_generic_expr (lhs_type);
4004 debug_generic_expr (rhs2_type);
4005 debug_generic_expr (rhs3_type);
4006 return true;
4007 }
4008 break;
4009
4010 case VEC_PERM_EXPR:
4011 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4012 || !useless_type_conversion_p (lhs_type, rhs2_type))
4013 {
4014 error ("type mismatch in vector permute expression");
4015 debug_generic_expr (lhs_type);
4016 debug_generic_expr (rhs1_type);
4017 debug_generic_expr (rhs2_type);
4018 debug_generic_expr (rhs3_type);
4019 return true;
4020 }
4021
4022 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4023 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4024 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4025 {
4026 error ("vector types expected in vector permute expression");
4027 debug_generic_expr (lhs_type);
4028 debug_generic_expr (rhs1_type);
4029 debug_generic_expr (rhs2_type);
4030 debug_generic_expr (rhs3_type);
4031 return true;
4032 }
4033
4034 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4035 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4036 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4037 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4038 != TYPE_VECTOR_SUBPARTS (lhs_type))
4039 {
4040 error ("vectors with different element number found "
4041 "in vector permute expression");
4042 debug_generic_expr (lhs_type);
4043 debug_generic_expr (rhs1_type);
4044 debug_generic_expr (rhs2_type);
4045 debug_generic_expr (rhs3_type);
4046 return true;
4047 }
4048
4049 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4050 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4051 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4052 {
4053 error ("invalid mask type in vector permute expression");
4054 debug_generic_expr (lhs_type);
4055 debug_generic_expr (rhs1_type);
4056 debug_generic_expr (rhs2_type);
4057 debug_generic_expr (rhs3_type);
4058 return true;
4059 }
4060
4061 return false;
4062
4063 case SAD_EXPR:
4064 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4065 || !useless_type_conversion_p (lhs_type, rhs3_type)
4066 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4067 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4068 {
4069 error ("type mismatch in sad expression");
4070 debug_generic_expr (lhs_type);
4071 debug_generic_expr (rhs1_type);
4072 debug_generic_expr (rhs2_type);
4073 debug_generic_expr (rhs3_type);
4074 return true;
4075 }
4076
4077 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4078 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4079 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4080 {
4081 error ("vector types expected in sad expression");
4082 debug_generic_expr (lhs_type);
4083 debug_generic_expr (rhs1_type);
4084 debug_generic_expr (rhs2_type);
4085 debug_generic_expr (rhs3_type);
4086 return true;
4087 }
4088
4089 return false;
4090
4091 case DOT_PROD_EXPR:
4092 case REALIGN_LOAD_EXPR:
4093 /* FIXME. */
4094 return false;
4095
4096 default:
4097 gcc_unreachable ();
4098 }
4099 return false;
4100 }
4101
4102 /* Verify a gimple assignment statement STMT with a single rhs.
4103 Returns true if anything is wrong. */
4104
4105 static bool
4106 verify_gimple_assign_single (gassign *stmt)
4107 {
4108 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4109 tree lhs = gimple_assign_lhs (stmt);
4110 tree lhs_type = TREE_TYPE (lhs);
4111 tree rhs1 = gimple_assign_rhs1 (stmt);
4112 tree rhs1_type = TREE_TYPE (rhs1);
4113 bool res = false;
4114
4115 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4116 {
4117 error ("non-trivial conversion at assignment");
4118 debug_generic_expr (lhs_type);
4119 debug_generic_expr (rhs1_type);
4120 return true;
4121 }
4122
4123 if (gimple_clobber_p (stmt)
4124 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4125 {
4126 error ("non-decl/MEM_REF LHS in clobber statement");
4127 debug_generic_expr (lhs);
4128 return true;
4129 }
4130
4131 if (handled_component_p (lhs)
4132 || TREE_CODE (lhs) == MEM_REF
4133 || TREE_CODE (lhs) == TARGET_MEM_REF)
4134 res |= verify_types_in_gimple_reference (lhs, true);
4135
4136 /* Special codes we cannot handle via their class. */
4137 switch (rhs_code)
4138 {
4139 case ADDR_EXPR:
4140 {
4141 tree op = TREE_OPERAND (rhs1, 0);
4142 if (!is_gimple_addressable (op))
4143 {
4144 error ("invalid operand in unary expression");
4145 return true;
4146 }
4147
4148 /* Technically there is no longer a need for matching types, but
4149 gimple hygiene asks for this check. In LTO we can end up
4150 combining incompatible units and thus end up with addresses
4151 of globals that change their type to a common one. */
4152 if (!in_lto_p
4153 && !types_compatible_p (TREE_TYPE (op),
4154 TREE_TYPE (TREE_TYPE (rhs1)))
4155 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4156 TREE_TYPE (op)))
4157 {
4158 error ("type mismatch in address expression");
4159 debug_generic_stmt (TREE_TYPE (rhs1));
4160 debug_generic_stmt (TREE_TYPE (op));
4161 return true;
4162 }
4163
4164 return verify_types_in_gimple_reference (op, true);
4165 }
4166
4167 /* tcc_reference */
4168 case INDIRECT_REF:
4169 error ("INDIRECT_REF in gimple IL");
4170 return true;
4171
4172 case COMPONENT_REF:
4173 case BIT_FIELD_REF:
4174 case ARRAY_REF:
4175 case ARRAY_RANGE_REF:
4176 case VIEW_CONVERT_EXPR:
4177 case REALPART_EXPR:
4178 case IMAGPART_EXPR:
4179 case TARGET_MEM_REF:
4180 case MEM_REF:
4181 if (!is_gimple_reg (lhs)
4182 && is_gimple_reg_type (TREE_TYPE (lhs)))
4183 {
4184 error ("invalid rhs for gimple memory store");
4185 debug_generic_stmt (lhs);
4186 debug_generic_stmt (rhs1);
4187 return true;
4188 }
4189 return res || verify_types_in_gimple_reference (rhs1, false);
4190
4191 /* tcc_constant */
4192 case SSA_NAME:
4193 case INTEGER_CST:
4194 case REAL_CST:
4195 case FIXED_CST:
4196 case COMPLEX_CST:
4197 case VECTOR_CST:
4198 case STRING_CST:
4199 return res;
4200
4201 /* tcc_declaration */
4202 case CONST_DECL:
4203 return res;
4204 case VAR_DECL:
4205 case PARM_DECL:
4206 if (!is_gimple_reg (lhs)
4207 && !is_gimple_reg (rhs1)
4208 && is_gimple_reg_type (TREE_TYPE (lhs)))
4209 {
4210 error ("invalid rhs for gimple memory store");
4211 debug_generic_stmt (lhs);
4212 debug_generic_stmt (rhs1);
4213 return true;
4214 }
4215 return res;
4216
4217 case CONSTRUCTOR:
4218 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4219 {
4220 unsigned int i;
4221 tree elt_i, elt_v, elt_t = NULL_TREE;
4222
4223 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4224 return res;
4225 /* For vector CONSTRUCTORs we require that either it is empty
4226 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4227 (then the element count must be correct to cover the whole
4228 outer vector and index must be NULL on all elements, or it is
4229 a CONSTRUCTOR of scalar elements, where we as an exception allow
4230 smaller number of elements (assuming zero filling) and
4231 consecutive indexes as compared to NULL indexes (such
4232 CONSTRUCTORs can appear in the IL from FEs). */
4233 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4234 {
4235 if (elt_t == NULL_TREE)
4236 {
4237 elt_t = TREE_TYPE (elt_v);
4238 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4239 {
4240 tree elt_t = TREE_TYPE (elt_v);
4241 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4242 TREE_TYPE (elt_t)))
4243 {
4244 error ("incorrect type of vector CONSTRUCTOR"
4245 " elements");
4246 debug_generic_stmt (rhs1);
4247 return true;
4248 }
4249 else if (CONSTRUCTOR_NELTS (rhs1)
4250 * TYPE_VECTOR_SUBPARTS (elt_t)
4251 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4252 {
4253 error ("incorrect number of vector CONSTRUCTOR"
4254 " elements");
4255 debug_generic_stmt (rhs1);
4256 return true;
4257 }
4258 }
4259 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4260 elt_t))
4261 {
4262 error ("incorrect type of vector CONSTRUCTOR elements");
4263 debug_generic_stmt (rhs1);
4264 return true;
4265 }
4266 else if (CONSTRUCTOR_NELTS (rhs1)
4267 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4268 {
4269 error ("incorrect number of vector CONSTRUCTOR elements");
4270 debug_generic_stmt (rhs1);
4271 return true;
4272 }
4273 }
4274 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4275 {
4276 error ("incorrect type of vector CONSTRUCTOR elements");
4277 debug_generic_stmt (rhs1);
4278 return true;
4279 }
4280 if (elt_i != NULL_TREE
4281 && (TREE_CODE (elt_t) == VECTOR_TYPE
4282 || TREE_CODE (elt_i) != INTEGER_CST
4283 || compare_tree_int (elt_i, i) != 0))
4284 {
4285 error ("vector CONSTRUCTOR with non-NULL element index");
4286 debug_generic_stmt (rhs1);
4287 return true;
4288 }
4289 if (!is_gimple_val (elt_v))
4290 {
4291 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4292 debug_generic_stmt (rhs1);
4293 return true;
4294 }
4295 }
4296 }
4297 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4298 {
4299 error ("non-vector CONSTRUCTOR with elements");
4300 debug_generic_stmt (rhs1);
4301 return true;
4302 }
4303 return res;
4304 case OBJ_TYPE_REF:
4305 case ASSERT_EXPR:
4306 case WITH_SIZE_EXPR:
4307 /* FIXME. */
4308 return res;
4309
4310 default:;
4311 }
4312
4313 return res;
4314 }
4315
4316 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4317 is a problem, otherwise false. */
4318
4319 static bool
4320 verify_gimple_assign (gassign *stmt)
4321 {
4322 switch (gimple_assign_rhs_class (stmt))
4323 {
4324 case GIMPLE_SINGLE_RHS:
4325 return verify_gimple_assign_single (stmt);
4326
4327 case GIMPLE_UNARY_RHS:
4328 return verify_gimple_assign_unary (stmt);
4329
4330 case GIMPLE_BINARY_RHS:
4331 return verify_gimple_assign_binary (stmt);
4332
4333 case GIMPLE_TERNARY_RHS:
4334 return verify_gimple_assign_ternary (stmt);
4335
4336 default:
4337 gcc_unreachable ();
4338 }
4339 }
4340
4341 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4342 is a problem, otherwise false. */
4343
4344 static bool
4345 verify_gimple_return (greturn *stmt)
4346 {
4347 tree op = gimple_return_retval (stmt);
4348 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4349
4350 /* We cannot test for present return values as we do not fix up missing
4351 return values from the original source. */
4352 if (op == NULL)
4353 return false;
4354
4355 if (!is_gimple_val (op)
4356 && TREE_CODE (op) != RESULT_DECL)
4357 {
4358 error ("invalid operand in return statement");
4359 debug_generic_stmt (op);
4360 return true;
4361 }
4362
4363 if ((TREE_CODE (op) == RESULT_DECL
4364 && DECL_BY_REFERENCE (op))
4365 || (TREE_CODE (op) == SSA_NAME
4366 && SSA_NAME_VAR (op)
4367 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4368 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4369 op = TREE_TYPE (op);
4370
4371 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4372 {
4373 error ("invalid conversion in return statement");
4374 debug_generic_stmt (restype);
4375 debug_generic_stmt (TREE_TYPE (op));
4376 return true;
4377 }
4378
4379 return false;
4380 }
4381
4382
4383 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4384 is a problem, otherwise false. */
4385
4386 static bool
4387 verify_gimple_goto (ggoto *stmt)
4388 {
4389 tree dest = gimple_goto_dest (stmt);
4390
4391 /* ??? We have two canonical forms of direct goto destinations, a
4392 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4393 if (TREE_CODE (dest) != LABEL_DECL
4394 && (!is_gimple_val (dest)
4395 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4396 {
4397 error ("goto destination is neither a label nor a pointer");
4398 return true;
4399 }
4400
4401 return false;
4402 }
4403
4404 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4405 is a problem, otherwise false. */
4406
4407 static bool
4408 verify_gimple_switch (gswitch *stmt)
4409 {
4410 unsigned int i, n;
4411 tree elt, prev_upper_bound = NULL_TREE;
4412 tree index_type, elt_type = NULL_TREE;
4413
4414 if (!is_gimple_val (gimple_switch_index (stmt)))
4415 {
4416 error ("invalid operand to switch statement");
4417 debug_generic_stmt (gimple_switch_index (stmt));
4418 return true;
4419 }
4420
4421 index_type = TREE_TYPE (gimple_switch_index (stmt));
4422 if (! INTEGRAL_TYPE_P (index_type))
4423 {
4424 error ("non-integral type switch statement");
4425 debug_generic_expr (index_type);
4426 return true;
4427 }
4428
4429 elt = gimple_switch_label (stmt, 0);
4430 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4431 {
4432 error ("invalid default case label in switch statement");
4433 debug_generic_expr (elt);
4434 return true;
4435 }
4436
4437 n = gimple_switch_num_labels (stmt);
4438 for (i = 1; i < n; i++)
4439 {
4440 elt = gimple_switch_label (stmt, i);
4441
4442 if (! CASE_LOW (elt))
4443 {
4444 error ("invalid case label in switch statement");
4445 debug_generic_expr (elt);
4446 return true;
4447 }
4448 if (CASE_HIGH (elt)
4449 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4450 {
4451 error ("invalid case range in switch statement");
4452 debug_generic_expr (elt);
4453 return true;
4454 }
4455
4456 if (elt_type)
4457 {
4458 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4459 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4460 {
4461 error ("type mismatch for case label in switch statement");
4462 debug_generic_expr (elt);
4463 return true;
4464 }
4465 }
4466 else
4467 {
4468 elt_type = TREE_TYPE (CASE_LOW (elt));
4469 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4470 {
4471 error ("type precision mismatch in switch statement");
4472 return true;
4473 }
4474 }
4475
4476 if (prev_upper_bound)
4477 {
4478 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4479 {
4480 error ("case labels not sorted in switch statement");
4481 return true;
4482 }
4483 }
4484
4485 prev_upper_bound = CASE_HIGH (elt);
4486 if (! prev_upper_bound)
4487 prev_upper_bound = CASE_LOW (elt);
4488 }
4489
4490 return false;
4491 }
4492
4493 /* Verify a gimple debug statement STMT.
4494 Returns true if anything is wrong. */
4495
4496 static bool
4497 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4498 {
4499 /* There isn't much that could be wrong in a gimple debug stmt. A
4500 gimple debug bind stmt, for example, maps a tree, that's usually
4501 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4502 component or member of an aggregate type, to another tree, that
4503 can be an arbitrary expression. These stmts expand into debug
4504 insns, and are converted to debug notes by var-tracking.c. */
4505 return false;
4506 }
4507
4508 /* Verify a gimple label statement STMT.
4509 Returns true if anything is wrong. */
4510
4511 static bool
4512 verify_gimple_label (glabel *stmt)
4513 {
4514 tree decl = gimple_label_label (stmt);
4515 int uid;
4516 bool err = false;
4517
4518 if (TREE_CODE (decl) != LABEL_DECL)
4519 return true;
4520 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4521 && DECL_CONTEXT (decl) != current_function_decl)
4522 {
4523 error ("label's context is not the current function decl");
4524 err |= true;
4525 }
4526
4527 uid = LABEL_DECL_UID (decl);
4528 if (cfun->cfg
4529 && (uid == -1
4530 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4531 {
4532 error ("incorrect entry in label_to_block_map");
4533 err |= true;
4534 }
4535
4536 uid = EH_LANDING_PAD_NR (decl);
4537 if (uid)
4538 {
4539 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4540 if (decl != lp->post_landing_pad)
4541 {
4542 error ("incorrect setting of landing pad number");
4543 err |= true;
4544 }
4545 }
4546
4547 return err;
4548 }
4549
4550 /* Verify a gimple cond statement STMT.
4551 Returns true if anything is wrong. */
4552
4553 static bool
4554 verify_gimple_cond (gcond *stmt)
4555 {
4556 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4557 {
4558 error ("invalid comparison code in gimple cond");
4559 return true;
4560 }
4561 if (!(!gimple_cond_true_label (stmt)
4562 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4563 || !(!gimple_cond_false_label (stmt)
4564 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4565 {
4566 error ("invalid labels in gimple cond");
4567 return true;
4568 }
4569
4570 return verify_gimple_comparison (boolean_type_node,
4571 gimple_cond_lhs (stmt),
4572 gimple_cond_rhs (stmt));
4573 }
4574
4575 /* Verify the GIMPLE statement STMT. Returns true if there is an
4576 error, otherwise false. */
4577
4578 static bool
4579 verify_gimple_stmt (gimple *stmt)
4580 {
4581 switch (gimple_code (stmt))
4582 {
4583 case GIMPLE_ASSIGN:
4584 return verify_gimple_assign (as_a <gassign *> (stmt));
4585
4586 case GIMPLE_LABEL:
4587 return verify_gimple_label (as_a <glabel *> (stmt));
4588
4589 case GIMPLE_CALL:
4590 return verify_gimple_call (as_a <gcall *> (stmt));
4591
4592 case GIMPLE_COND:
4593 return verify_gimple_cond (as_a <gcond *> (stmt));
4594
4595 case GIMPLE_GOTO:
4596 return verify_gimple_goto (as_a <ggoto *> (stmt));
4597
4598 case GIMPLE_SWITCH:
4599 return verify_gimple_switch (as_a <gswitch *> (stmt));
4600
4601 case GIMPLE_RETURN:
4602 return verify_gimple_return (as_a <greturn *> (stmt));
4603
4604 case GIMPLE_ASM:
4605 return false;
4606
4607 case GIMPLE_TRANSACTION:
4608 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4609
4610 /* Tuples that do not have tree operands. */
4611 case GIMPLE_NOP:
4612 case GIMPLE_PREDICT:
4613 case GIMPLE_RESX:
4614 case GIMPLE_EH_DISPATCH:
4615 case GIMPLE_EH_MUST_NOT_THROW:
4616 return false;
4617
4618 CASE_GIMPLE_OMP:
4619 /* OpenMP directives are validated by the FE and never operated
4620 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4621 non-gimple expressions when the main index variable has had
4622 its address taken. This does not affect the loop itself
4623 because the header of an GIMPLE_OMP_FOR is merely used to determine
4624 how to setup the parallel iteration. */
4625 return false;
4626
4627 case GIMPLE_DEBUG:
4628 return verify_gimple_debug (stmt);
4629
4630 default:
4631 gcc_unreachable ();
4632 }
4633 }
4634
4635 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4636 and false otherwise. */
4637
4638 static bool
4639 verify_gimple_phi (gimple *phi)
4640 {
4641 bool err = false;
4642 unsigned i;
4643 tree phi_result = gimple_phi_result (phi);
4644 bool virtual_p;
4645
4646 if (!phi_result)
4647 {
4648 error ("invalid PHI result");
4649 return true;
4650 }
4651
4652 virtual_p = virtual_operand_p (phi_result);
4653 if (TREE_CODE (phi_result) != SSA_NAME
4654 || (virtual_p
4655 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4656 {
4657 error ("invalid PHI result");
4658 err = true;
4659 }
4660
4661 for (i = 0; i < gimple_phi_num_args (phi); i++)
4662 {
4663 tree t = gimple_phi_arg_def (phi, i);
4664
4665 if (!t)
4666 {
4667 error ("missing PHI def");
4668 err |= true;
4669 continue;
4670 }
4671 /* Addressable variables do have SSA_NAMEs but they
4672 are not considered gimple values. */
4673 else if ((TREE_CODE (t) == SSA_NAME
4674 && virtual_p != virtual_operand_p (t))
4675 || (virtual_p
4676 && (TREE_CODE (t) != SSA_NAME
4677 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4678 || (!virtual_p
4679 && !is_gimple_val (t)))
4680 {
4681 error ("invalid PHI argument");
4682 debug_generic_expr (t);
4683 err |= true;
4684 }
4685 #ifdef ENABLE_TYPES_CHECKING
4686 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4687 {
4688 error ("incompatible types in PHI argument %u", i);
4689 debug_generic_stmt (TREE_TYPE (phi_result));
4690 debug_generic_stmt (TREE_TYPE (t));
4691 err |= true;
4692 }
4693 #endif
4694 }
4695
4696 return err;
4697 }
4698
4699 /* Verify the GIMPLE statements inside the sequence STMTS. */
4700
4701 static bool
4702 verify_gimple_in_seq_2 (gimple_seq stmts)
4703 {
4704 gimple_stmt_iterator ittr;
4705 bool err = false;
4706
4707 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4708 {
4709 gimple *stmt = gsi_stmt (ittr);
4710
4711 switch (gimple_code (stmt))
4712 {
4713 case GIMPLE_BIND:
4714 err |= verify_gimple_in_seq_2 (
4715 gimple_bind_body (as_a <gbind *> (stmt)));
4716 break;
4717
4718 case GIMPLE_TRY:
4719 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4720 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4721 break;
4722
4723 case GIMPLE_EH_FILTER:
4724 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4725 break;
4726
4727 case GIMPLE_EH_ELSE:
4728 {
4729 geh_else *eh_else = as_a <geh_else *> (stmt);
4730 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4731 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4732 }
4733 break;
4734
4735 case GIMPLE_CATCH:
4736 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4737 as_a <gcatch *> (stmt)));
4738 break;
4739
4740 case GIMPLE_TRANSACTION:
4741 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4742 break;
4743
4744 default:
4745 {
4746 bool err2 = verify_gimple_stmt (stmt);
4747 if (err2)
4748 debug_gimple_stmt (stmt);
4749 err |= err2;
4750 }
4751 }
4752 }
4753
4754 return err;
4755 }
4756
4757 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4758 is a problem, otherwise false. */
4759
4760 static bool
4761 verify_gimple_transaction (gtransaction *stmt)
4762 {
4763 tree lab;
4764
4765 lab = gimple_transaction_label_norm (stmt);
4766 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4767 return true;
4768 lab = gimple_transaction_label_uninst (stmt);
4769 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4770 return true;
4771 lab = gimple_transaction_label_over (stmt);
4772 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4773 return true;
4774
4775 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4776 }
4777
4778
4779 /* Verify the GIMPLE statements inside the statement list STMTS. */
4780
4781 DEBUG_FUNCTION void
4782 verify_gimple_in_seq (gimple_seq stmts)
4783 {
4784 timevar_push (TV_TREE_STMT_VERIFY);
4785 if (verify_gimple_in_seq_2 (stmts))
4786 internal_error ("verify_gimple failed");
4787 timevar_pop (TV_TREE_STMT_VERIFY);
4788 }
4789
4790 /* Return true when the T can be shared. */
4791
4792 static bool
4793 tree_node_can_be_shared (tree t)
4794 {
4795 if (IS_TYPE_OR_DECL_P (t)
4796 || is_gimple_min_invariant (t)
4797 || TREE_CODE (t) == SSA_NAME
4798 || t == error_mark_node
4799 || TREE_CODE (t) == IDENTIFIER_NODE)
4800 return true;
4801
4802 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4803 return true;
4804
4805 if (DECL_P (t))
4806 return true;
4807
4808 return false;
4809 }
4810
4811 /* Called via walk_tree. Verify tree sharing. */
4812
4813 static tree
4814 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4815 {
4816 hash_set<void *> *visited = (hash_set<void *> *) data;
4817
4818 if (tree_node_can_be_shared (*tp))
4819 {
4820 *walk_subtrees = false;
4821 return NULL;
4822 }
4823
4824 if (visited->add (*tp))
4825 return *tp;
4826
4827 return NULL;
4828 }
4829
4830 /* Called via walk_gimple_stmt. Verify tree sharing. */
4831
4832 static tree
4833 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4834 {
4835 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4836 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4837 }
4838
4839 static bool eh_error_found;
4840 bool
4841 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4842 hash_set<gimple *> *visited)
4843 {
4844 if (!visited->contains (stmt))
4845 {
4846 error ("dead STMT in EH table");
4847 debug_gimple_stmt (stmt);
4848 eh_error_found = true;
4849 }
4850 return true;
4851 }
4852
4853 /* Verify if the location LOCs block is in BLOCKS. */
4854
4855 static bool
4856 verify_location (hash_set<tree> *blocks, location_t loc)
4857 {
4858 tree block = LOCATION_BLOCK (loc);
4859 if (block != NULL_TREE
4860 && !blocks->contains (block))
4861 {
4862 error ("location references block not in block tree");
4863 return true;
4864 }
4865 if (block != NULL_TREE)
4866 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4867 return false;
4868 }
4869
4870 /* Called via walk_tree. Verify that expressions have no blocks. */
4871
4872 static tree
4873 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4874 {
4875 if (!EXPR_P (*tp))
4876 {
4877 *walk_subtrees = false;
4878 return NULL;
4879 }
4880
4881 location_t loc = EXPR_LOCATION (*tp);
4882 if (LOCATION_BLOCK (loc) != NULL)
4883 return *tp;
4884
4885 return NULL;
4886 }
4887
4888 /* Called via walk_tree. Verify locations of expressions. */
4889
4890 static tree
4891 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4892 {
4893 hash_set<tree> *blocks = (hash_set<tree> *) data;
4894
4895 if (TREE_CODE (*tp) == VAR_DECL
4896 && DECL_HAS_DEBUG_EXPR_P (*tp))
4897 {
4898 tree t = DECL_DEBUG_EXPR (*tp);
4899 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4900 if (addr)
4901 return addr;
4902 }
4903 if ((TREE_CODE (*tp) == VAR_DECL
4904 || TREE_CODE (*tp) == PARM_DECL
4905 || TREE_CODE (*tp) == RESULT_DECL)
4906 && DECL_HAS_VALUE_EXPR_P (*tp))
4907 {
4908 tree t = DECL_VALUE_EXPR (*tp);
4909 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4910 if (addr)
4911 return addr;
4912 }
4913
4914 if (!EXPR_P (*tp))
4915 {
4916 *walk_subtrees = false;
4917 return NULL;
4918 }
4919
4920 location_t loc = EXPR_LOCATION (*tp);
4921 if (verify_location (blocks, loc))
4922 return *tp;
4923
4924 return NULL;
4925 }
4926
4927 /* Called via walk_gimple_op. Verify locations of expressions. */
4928
4929 static tree
4930 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4931 {
4932 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4933 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4934 }
4935
4936 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4937
4938 static void
4939 collect_subblocks (hash_set<tree> *blocks, tree block)
4940 {
4941 tree t;
4942 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4943 {
4944 blocks->add (t);
4945 collect_subblocks (blocks, t);
4946 }
4947 }
4948
4949 /* Verify the GIMPLE statements in the CFG of FN. */
4950
4951 DEBUG_FUNCTION void
4952 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4953 {
4954 basic_block bb;
4955 bool err = false;
4956
4957 timevar_push (TV_TREE_STMT_VERIFY);
4958 hash_set<void *> visited;
4959 hash_set<gimple *> visited_stmts;
4960
4961 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4962 hash_set<tree> blocks;
4963 if (DECL_INITIAL (fn->decl))
4964 {
4965 blocks.add (DECL_INITIAL (fn->decl));
4966 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4967 }
4968
4969 FOR_EACH_BB_FN (bb, fn)
4970 {
4971 gimple_stmt_iterator gsi;
4972
4973 for (gphi_iterator gpi = gsi_start_phis (bb);
4974 !gsi_end_p (gpi);
4975 gsi_next (&gpi))
4976 {
4977 gphi *phi = gpi.phi ();
4978 bool err2 = false;
4979 unsigned i;
4980
4981 visited_stmts.add (phi);
4982
4983 if (gimple_bb (phi) != bb)
4984 {
4985 error ("gimple_bb (phi) is set to a wrong basic block");
4986 err2 = true;
4987 }
4988
4989 err2 |= verify_gimple_phi (phi);
4990
4991 /* Only PHI arguments have locations. */
4992 if (gimple_location (phi) != UNKNOWN_LOCATION)
4993 {
4994 error ("PHI node with location");
4995 err2 = true;
4996 }
4997
4998 for (i = 0; i < gimple_phi_num_args (phi); i++)
4999 {
5000 tree arg = gimple_phi_arg_def (phi, i);
5001 tree addr = walk_tree (&arg, verify_node_sharing_1,
5002 &visited, NULL);
5003 if (addr)
5004 {
5005 error ("incorrect sharing of tree nodes");
5006 debug_generic_expr (addr);
5007 err2 |= true;
5008 }
5009 location_t loc = gimple_phi_arg_location (phi, i);
5010 if (virtual_operand_p (gimple_phi_result (phi))
5011 && loc != UNKNOWN_LOCATION)
5012 {
5013 error ("virtual PHI with argument locations");
5014 err2 = true;
5015 }
5016 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5017 if (addr)
5018 {
5019 debug_generic_expr (addr);
5020 err2 = true;
5021 }
5022 err2 |= verify_location (&blocks, loc);
5023 }
5024
5025 if (err2)
5026 debug_gimple_stmt (phi);
5027 err |= err2;
5028 }
5029
5030 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5031 {
5032 gimple *stmt = gsi_stmt (gsi);
5033 bool err2 = false;
5034 struct walk_stmt_info wi;
5035 tree addr;
5036 int lp_nr;
5037
5038 visited_stmts.add (stmt);
5039
5040 if (gimple_bb (stmt) != bb)
5041 {
5042 error ("gimple_bb (stmt) is set to a wrong basic block");
5043 err2 = true;
5044 }
5045
5046 err2 |= verify_gimple_stmt (stmt);
5047 err2 |= verify_location (&blocks, gimple_location (stmt));
5048
5049 memset (&wi, 0, sizeof (wi));
5050 wi.info = (void *) &visited;
5051 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5052 if (addr)
5053 {
5054 error ("incorrect sharing of tree nodes");
5055 debug_generic_expr (addr);
5056 err2 |= true;
5057 }
5058
5059 memset (&wi, 0, sizeof (wi));
5060 wi.info = (void *) &blocks;
5061 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5062 if (addr)
5063 {
5064 debug_generic_expr (addr);
5065 err2 |= true;
5066 }
5067
5068 /* ??? Instead of not checking these stmts at all the walker
5069 should know its context via wi. */
5070 if (!is_gimple_debug (stmt)
5071 && !is_gimple_omp (stmt))
5072 {
5073 memset (&wi, 0, sizeof (wi));
5074 addr = walk_gimple_op (stmt, verify_expr, &wi);
5075 if (addr)
5076 {
5077 debug_generic_expr (addr);
5078 inform (gimple_location (stmt), "in statement");
5079 err2 |= true;
5080 }
5081 }
5082
5083 /* If the statement is marked as part of an EH region, then it is
5084 expected that the statement could throw. Verify that when we
5085 have optimizations that simplify statements such that we prove
5086 that they cannot throw, that we update other data structures
5087 to match. */
5088 lp_nr = lookup_stmt_eh_lp (stmt);
5089 if (lp_nr > 0)
5090 {
5091 if (!stmt_could_throw_p (stmt))
5092 {
5093 if (verify_nothrow)
5094 {
5095 error ("statement marked for throw, but doesn%'t");
5096 err2 |= true;
5097 }
5098 }
5099 else if (!gsi_one_before_end_p (gsi))
5100 {
5101 error ("statement marked for throw in middle of block");
5102 err2 |= true;
5103 }
5104 }
5105
5106 if (err2)
5107 debug_gimple_stmt (stmt);
5108 err |= err2;
5109 }
5110 }
5111
5112 eh_error_found = false;
5113 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5114 if (eh_table)
5115 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5116 (&visited_stmts);
5117
5118 if (err || eh_error_found)
5119 internal_error ("verify_gimple failed");
5120
5121 verify_histograms ();
5122 timevar_pop (TV_TREE_STMT_VERIFY);
5123 }
5124
5125
5126 /* Verifies that the flow information is OK. */
5127
5128 static int
5129 gimple_verify_flow_info (void)
5130 {
5131 int err = 0;
5132 basic_block bb;
5133 gimple_stmt_iterator gsi;
5134 gimple *stmt;
5135 edge e;
5136 edge_iterator ei;
5137
5138 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5139 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5140 {
5141 error ("ENTRY_BLOCK has IL associated with it");
5142 err = 1;
5143 }
5144
5145 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5146 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5147 {
5148 error ("EXIT_BLOCK has IL associated with it");
5149 err = 1;
5150 }
5151
5152 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5153 if (e->flags & EDGE_FALLTHRU)
5154 {
5155 error ("fallthru to exit from bb %d", e->src->index);
5156 err = 1;
5157 }
5158
5159 FOR_EACH_BB_FN (bb, cfun)
5160 {
5161 bool found_ctrl_stmt = false;
5162
5163 stmt = NULL;
5164
5165 /* Skip labels on the start of basic block. */
5166 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5167 {
5168 tree label;
5169 gimple *prev_stmt = stmt;
5170
5171 stmt = gsi_stmt (gsi);
5172
5173 if (gimple_code (stmt) != GIMPLE_LABEL)
5174 break;
5175
5176 label = gimple_label_label (as_a <glabel *> (stmt));
5177 if (prev_stmt && DECL_NONLOCAL (label))
5178 {
5179 error ("nonlocal label ");
5180 print_generic_expr (stderr, label, 0);
5181 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5182 bb->index);
5183 err = 1;
5184 }
5185
5186 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5187 {
5188 error ("EH landing pad label ");
5189 print_generic_expr (stderr, label, 0);
5190 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5191 bb->index);
5192 err = 1;
5193 }
5194
5195 if (label_to_block (label) != bb)
5196 {
5197 error ("label ");
5198 print_generic_expr (stderr, label, 0);
5199 fprintf (stderr, " to block does not match in bb %d",
5200 bb->index);
5201 err = 1;
5202 }
5203
5204 if (decl_function_context (label) != current_function_decl)
5205 {
5206 error ("label ");
5207 print_generic_expr (stderr, label, 0);
5208 fprintf (stderr, " has incorrect context in bb %d",
5209 bb->index);
5210 err = 1;
5211 }
5212 }
5213
5214 /* Verify that body of basic block BB is free of control flow. */
5215 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5216 {
5217 gimple *stmt = gsi_stmt (gsi);
5218
5219 if (found_ctrl_stmt)
5220 {
5221 error ("control flow in the middle of basic block %d",
5222 bb->index);
5223 err = 1;
5224 }
5225
5226 if (stmt_ends_bb_p (stmt))
5227 found_ctrl_stmt = true;
5228
5229 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5230 {
5231 error ("label ");
5232 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5233 fprintf (stderr, " in the middle of basic block %d", bb->index);
5234 err = 1;
5235 }
5236 }
5237
5238 gsi = gsi_last_bb (bb);
5239 if (gsi_end_p (gsi))
5240 continue;
5241
5242 stmt = gsi_stmt (gsi);
5243
5244 if (gimple_code (stmt) == GIMPLE_LABEL)
5245 continue;
5246
5247 err |= verify_eh_edges (stmt);
5248
5249 if (is_ctrl_stmt (stmt))
5250 {
5251 FOR_EACH_EDGE (e, ei, bb->succs)
5252 if (e->flags & EDGE_FALLTHRU)
5253 {
5254 error ("fallthru edge after a control statement in bb %d",
5255 bb->index);
5256 err = 1;
5257 }
5258 }
5259
5260 if (gimple_code (stmt) != GIMPLE_COND)
5261 {
5262 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5263 after anything else but if statement. */
5264 FOR_EACH_EDGE (e, ei, bb->succs)
5265 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5266 {
5267 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5268 bb->index);
5269 err = 1;
5270 }
5271 }
5272
5273 switch (gimple_code (stmt))
5274 {
5275 case GIMPLE_COND:
5276 {
5277 edge true_edge;
5278 edge false_edge;
5279
5280 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5281
5282 if (!true_edge
5283 || !false_edge
5284 || !(true_edge->flags & EDGE_TRUE_VALUE)
5285 || !(false_edge->flags & EDGE_FALSE_VALUE)
5286 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5287 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5288 || EDGE_COUNT (bb->succs) >= 3)
5289 {
5290 error ("wrong outgoing edge flags at end of bb %d",
5291 bb->index);
5292 err = 1;
5293 }
5294 }
5295 break;
5296
5297 case GIMPLE_GOTO:
5298 if (simple_goto_p (stmt))
5299 {
5300 error ("explicit goto at end of bb %d", bb->index);
5301 err = 1;
5302 }
5303 else
5304 {
5305 /* FIXME. We should double check that the labels in the
5306 destination blocks have their address taken. */
5307 FOR_EACH_EDGE (e, ei, bb->succs)
5308 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5309 | EDGE_FALSE_VALUE))
5310 || !(e->flags & EDGE_ABNORMAL))
5311 {
5312 error ("wrong outgoing edge flags at end of bb %d",
5313 bb->index);
5314 err = 1;
5315 }
5316 }
5317 break;
5318
5319 case GIMPLE_CALL:
5320 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5321 break;
5322 /* ... fallthru ... */
5323 case GIMPLE_RETURN:
5324 if (!single_succ_p (bb)
5325 || (single_succ_edge (bb)->flags
5326 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5327 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5328 {
5329 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5330 err = 1;
5331 }
5332 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5333 {
5334 error ("return edge does not point to exit in bb %d",
5335 bb->index);
5336 err = 1;
5337 }
5338 break;
5339
5340 case GIMPLE_SWITCH:
5341 {
5342 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5343 tree prev;
5344 edge e;
5345 size_t i, n;
5346
5347 n = gimple_switch_num_labels (switch_stmt);
5348
5349 /* Mark all the destination basic blocks. */
5350 for (i = 0; i < n; ++i)
5351 {
5352 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5353 basic_block label_bb = label_to_block (lab);
5354 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5355 label_bb->aux = (void *)1;
5356 }
5357
5358 /* Verify that the case labels are sorted. */
5359 prev = gimple_switch_label (switch_stmt, 0);
5360 for (i = 1; i < n; ++i)
5361 {
5362 tree c = gimple_switch_label (switch_stmt, i);
5363 if (!CASE_LOW (c))
5364 {
5365 error ("found default case not at the start of "
5366 "case vector");
5367 err = 1;
5368 continue;
5369 }
5370 if (CASE_LOW (prev)
5371 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5372 {
5373 error ("case labels not sorted: ");
5374 print_generic_expr (stderr, prev, 0);
5375 fprintf (stderr," is greater than ");
5376 print_generic_expr (stderr, c, 0);
5377 fprintf (stderr," but comes before it.\n");
5378 err = 1;
5379 }
5380 prev = c;
5381 }
5382 /* VRP will remove the default case if it can prove it will
5383 never be executed. So do not verify there always exists
5384 a default case here. */
5385
5386 FOR_EACH_EDGE (e, ei, bb->succs)
5387 {
5388 if (!e->dest->aux)
5389 {
5390 error ("extra outgoing edge %d->%d",
5391 bb->index, e->dest->index);
5392 err = 1;
5393 }
5394
5395 e->dest->aux = (void *)2;
5396 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5397 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5398 {
5399 error ("wrong outgoing edge flags at end of bb %d",
5400 bb->index);
5401 err = 1;
5402 }
5403 }
5404
5405 /* Check that we have all of them. */
5406 for (i = 0; i < n; ++i)
5407 {
5408 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5409 basic_block label_bb = label_to_block (lab);
5410
5411 if (label_bb->aux != (void *)2)
5412 {
5413 error ("missing edge %i->%i", bb->index, label_bb->index);
5414 err = 1;
5415 }
5416 }
5417
5418 FOR_EACH_EDGE (e, ei, bb->succs)
5419 e->dest->aux = (void *)0;
5420 }
5421 break;
5422
5423 case GIMPLE_EH_DISPATCH:
5424 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5425 break;
5426
5427 default:
5428 break;
5429 }
5430 }
5431
5432 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5433 verify_dominators (CDI_DOMINATORS);
5434
5435 return err;
5436 }
5437
5438
5439 /* Updates phi nodes after creating a forwarder block joined
5440 by edge FALLTHRU. */
5441
5442 static void
5443 gimple_make_forwarder_block (edge fallthru)
5444 {
5445 edge e;
5446 edge_iterator ei;
5447 basic_block dummy, bb;
5448 tree var;
5449 gphi_iterator gsi;
5450
5451 dummy = fallthru->src;
5452 bb = fallthru->dest;
5453
5454 if (single_pred_p (bb))
5455 return;
5456
5457 /* If we redirected a branch we must create new PHI nodes at the
5458 start of BB. */
5459 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5460 {
5461 gphi *phi, *new_phi;
5462
5463 phi = gsi.phi ();
5464 var = gimple_phi_result (phi);
5465 new_phi = create_phi_node (var, bb);
5466 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5467 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5468 UNKNOWN_LOCATION);
5469 }
5470
5471 /* Add the arguments we have stored on edges. */
5472 FOR_EACH_EDGE (e, ei, bb->preds)
5473 {
5474 if (e == fallthru)
5475 continue;
5476
5477 flush_pending_stmts (e);
5478 }
5479 }
5480
5481
5482 /* Return a non-special label in the head of basic block BLOCK.
5483 Create one if it doesn't exist. */
5484
5485 tree
5486 gimple_block_label (basic_block bb)
5487 {
5488 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5489 bool first = true;
5490 tree label;
5491 glabel *stmt;
5492
5493 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5494 {
5495 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5496 if (!stmt)
5497 break;
5498 label = gimple_label_label (stmt);
5499 if (!DECL_NONLOCAL (label))
5500 {
5501 if (!first)
5502 gsi_move_before (&i, &s);
5503 return label;
5504 }
5505 }
5506
5507 label = create_artificial_label (UNKNOWN_LOCATION);
5508 stmt = gimple_build_label (label);
5509 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5510 return label;
5511 }
5512
5513
5514 /* Attempt to perform edge redirection by replacing a possibly complex
5515 jump instruction by a goto or by removing the jump completely.
5516 This can apply only if all edges now point to the same block. The
5517 parameters and return values are equivalent to
5518 redirect_edge_and_branch. */
5519
5520 static edge
5521 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5522 {
5523 basic_block src = e->src;
5524 gimple_stmt_iterator i;
5525 gimple *stmt;
5526
5527 /* We can replace or remove a complex jump only when we have exactly
5528 two edges. */
5529 if (EDGE_COUNT (src->succs) != 2
5530 /* Verify that all targets will be TARGET. Specifically, the
5531 edge that is not E must also go to TARGET. */
5532 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5533 return NULL;
5534
5535 i = gsi_last_bb (src);
5536 if (gsi_end_p (i))
5537 return NULL;
5538
5539 stmt = gsi_stmt (i);
5540
5541 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5542 {
5543 gsi_remove (&i, true);
5544 e = ssa_redirect_edge (e, target);
5545 e->flags = EDGE_FALLTHRU;
5546 return e;
5547 }
5548
5549 return NULL;
5550 }
5551
5552
5553 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5554 edge representing the redirected branch. */
5555
5556 static edge
5557 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5558 {
5559 basic_block bb = e->src;
5560 gimple_stmt_iterator gsi;
5561 edge ret;
5562 gimple *stmt;
5563
5564 if (e->flags & EDGE_ABNORMAL)
5565 return NULL;
5566
5567 if (e->dest == dest)
5568 return NULL;
5569
5570 if (e->flags & EDGE_EH)
5571 return redirect_eh_edge (e, dest);
5572
5573 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5574 {
5575 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5576 if (ret)
5577 return ret;
5578 }
5579
5580 gsi = gsi_last_bb (bb);
5581 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5582
5583 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5584 {
5585 case GIMPLE_COND:
5586 /* For COND_EXPR, we only need to redirect the edge. */
5587 break;
5588
5589 case GIMPLE_GOTO:
5590 /* No non-abnormal edges should lead from a non-simple goto, and
5591 simple ones should be represented implicitly. */
5592 gcc_unreachable ();
5593
5594 case GIMPLE_SWITCH:
5595 {
5596 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5597 tree label = gimple_block_label (dest);
5598 tree cases = get_cases_for_edge (e, switch_stmt);
5599
5600 /* If we have a list of cases associated with E, then use it
5601 as it's a lot faster than walking the entire case vector. */
5602 if (cases)
5603 {
5604 edge e2 = find_edge (e->src, dest);
5605 tree last, first;
5606
5607 first = cases;
5608 while (cases)
5609 {
5610 last = cases;
5611 CASE_LABEL (cases) = label;
5612 cases = CASE_CHAIN (cases);
5613 }
5614
5615 /* If there was already an edge in the CFG, then we need
5616 to move all the cases associated with E to E2. */
5617 if (e2)
5618 {
5619 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5620
5621 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5622 CASE_CHAIN (cases2) = first;
5623 }
5624 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5625 }
5626 else
5627 {
5628 size_t i, n = gimple_switch_num_labels (switch_stmt);
5629
5630 for (i = 0; i < n; i++)
5631 {
5632 tree elt = gimple_switch_label (switch_stmt, i);
5633 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5634 CASE_LABEL (elt) = label;
5635 }
5636 }
5637 }
5638 break;
5639
5640 case GIMPLE_ASM:
5641 {
5642 gasm *asm_stmt = as_a <gasm *> (stmt);
5643 int i, n = gimple_asm_nlabels (asm_stmt);
5644 tree label = NULL;
5645
5646 for (i = 0; i < n; ++i)
5647 {
5648 tree cons = gimple_asm_label_op (asm_stmt, i);
5649 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5650 {
5651 if (!label)
5652 label = gimple_block_label (dest);
5653 TREE_VALUE (cons) = label;
5654 }
5655 }
5656
5657 /* If we didn't find any label matching the former edge in the
5658 asm labels, we must be redirecting the fallthrough
5659 edge. */
5660 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5661 }
5662 break;
5663
5664 case GIMPLE_RETURN:
5665 gsi_remove (&gsi, true);
5666 e->flags |= EDGE_FALLTHRU;
5667 break;
5668
5669 case GIMPLE_OMP_RETURN:
5670 case GIMPLE_OMP_CONTINUE:
5671 case GIMPLE_OMP_SECTIONS_SWITCH:
5672 case GIMPLE_OMP_FOR:
5673 /* The edges from OMP constructs can be simply redirected. */
5674 break;
5675
5676 case GIMPLE_EH_DISPATCH:
5677 if (!(e->flags & EDGE_FALLTHRU))
5678 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5679 break;
5680
5681 case GIMPLE_TRANSACTION:
5682 if (e->flags & EDGE_TM_ABORT)
5683 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5684 gimple_block_label (dest));
5685 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5686 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5687 gimple_block_label (dest));
5688 else
5689 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5690 gimple_block_label (dest));
5691 break;
5692
5693 default:
5694 /* Otherwise it must be a fallthru edge, and we don't need to
5695 do anything besides redirecting it. */
5696 gcc_assert (e->flags & EDGE_FALLTHRU);
5697 break;
5698 }
5699
5700 /* Update/insert PHI nodes as necessary. */
5701
5702 /* Now update the edges in the CFG. */
5703 e = ssa_redirect_edge (e, dest);
5704
5705 return e;
5706 }
5707
5708 /* Returns true if it is possible to remove edge E by redirecting
5709 it to the destination of the other edge from E->src. */
5710
5711 static bool
5712 gimple_can_remove_branch_p (const_edge e)
5713 {
5714 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5715 return false;
5716
5717 return true;
5718 }
5719
5720 /* Simple wrapper, as we can always redirect fallthru edges. */
5721
5722 static basic_block
5723 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5724 {
5725 e = gimple_redirect_edge_and_branch (e, dest);
5726 gcc_assert (e);
5727
5728 return NULL;
5729 }
5730
5731
5732 /* Splits basic block BB after statement STMT (but at least after the
5733 labels). If STMT is NULL, BB is split just after the labels. */
5734
5735 static basic_block
5736 gimple_split_block (basic_block bb, void *stmt)
5737 {
5738 gimple_stmt_iterator gsi;
5739 gimple_stmt_iterator gsi_tgt;
5740 gimple_seq list;
5741 basic_block new_bb;
5742 edge e;
5743 edge_iterator ei;
5744
5745 new_bb = create_empty_bb (bb);
5746
5747 /* Redirect the outgoing edges. */
5748 new_bb->succs = bb->succs;
5749 bb->succs = NULL;
5750 FOR_EACH_EDGE (e, ei, new_bb->succs)
5751 e->src = new_bb;
5752
5753 /* Get a stmt iterator pointing to the first stmt to move. */
5754 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5755 gsi = gsi_after_labels (bb);
5756 else
5757 {
5758 gsi = gsi_for_stmt ((gimple *) stmt);
5759 gsi_next (&gsi);
5760 }
5761
5762 /* Move everything from GSI to the new basic block. */
5763 if (gsi_end_p (gsi))
5764 return new_bb;
5765
5766 /* Split the statement list - avoid re-creating new containers as this
5767 brings ugly quadratic memory consumption in the inliner.
5768 (We are still quadratic since we need to update stmt BB pointers,
5769 sadly.) */
5770 gsi_split_seq_before (&gsi, &list);
5771 set_bb_seq (new_bb, list);
5772 for (gsi_tgt = gsi_start (list);
5773 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5774 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5775
5776 return new_bb;
5777 }
5778
5779
5780 /* Moves basic block BB after block AFTER. */
5781
5782 static bool
5783 gimple_move_block_after (basic_block bb, basic_block after)
5784 {
5785 if (bb->prev_bb == after)
5786 return true;
5787
5788 unlink_block (bb);
5789 link_block (bb, after);
5790
5791 return true;
5792 }
5793
5794
5795 /* Return TRUE if block BB has no executable statements, otherwise return
5796 FALSE. */
5797
5798 static bool
5799 gimple_empty_block_p (basic_block bb)
5800 {
5801 /* BB must have no executable statements. */
5802 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5803 if (phi_nodes (bb))
5804 return false;
5805 if (gsi_end_p (gsi))
5806 return true;
5807 if (is_gimple_debug (gsi_stmt (gsi)))
5808 gsi_next_nondebug (&gsi);
5809 return gsi_end_p (gsi);
5810 }
5811
5812
5813 /* Split a basic block if it ends with a conditional branch and if the
5814 other part of the block is not empty. */
5815
5816 static basic_block
5817 gimple_split_block_before_cond_jump (basic_block bb)
5818 {
5819 gimple *last, *split_point;
5820 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5821 if (gsi_end_p (gsi))
5822 return NULL;
5823 last = gsi_stmt (gsi);
5824 if (gimple_code (last) != GIMPLE_COND
5825 && gimple_code (last) != GIMPLE_SWITCH)
5826 return NULL;
5827 gsi_prev (&gsi);
5828 split_point = gsi_stmt (gsi);
5829 return split_block (bb, split_point)->dest;
5830 }
5831
5832
5833 /* Return true if basic_block can be duplicated. */
5834
5835 static bool
5836 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5837 {
5838 return true;
5839 }
5840
5841 /* Create a duplicate of the basic block BB. NOTE: This does not
5842 preserve SSA form. */
5843
5844 static basic_block
5845 gimple_duplicate_bb (basic_block bb)
5846 {
5847 basic_block new_bb;
5848 gimple_stmt_iterator gsi_tgt;
5849
5850 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5851
5852 /* Copy the PHI nodes. We ignore PHI node arguments here because
5853 the incoming edges have not been setup yet. */
5854 for (gphi_iterator gpi = gsi_start_phis (bb);
5855 !gsi_end_p (gpi);
5856 gsi_next (&gpi))
5857 {
5858 gphi *phi, *copy;
5859 phi = gpi.phi ();
5860 copy = create_phi_node (NULL_TREE, new_bb);
5861 create_new_def_for (gimple_phi_result (phi), copy,
5862 gimple_phi_result_ptr (copy));
5863 gimple_set_uid (copy, gimple_uid (phi));
5864 }
5865
5866 gsi_tgt = gsi_start_bb (new_bb);
5867 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5868 !gsi_end_p (gsi);
5869 gsi_next (&gsi))
5870 {
5871 def_operand_p def_p;
5872 ssa_op_iter op_iter;
5873 tree lhs;
5874 gimple *stmt, *copy;
5875
5876 stmt = gsi_stmt (gsi);
5877 if (gimple_code (stmt) == GIMPLE_LABEL)
5878 continue;
5879
5880 /* Don't duplicate label debug stmts. */
5881 if (gimple_debug_bind_p (stmt)
5882 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5883 == LABEL_DECL)
5884 continue;
5885
5886 /* Create a new copy of STMT and duplicate STMT's virtual
5887 operands. */
5888 copy = gimple_copy (stmt);
5889 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5890
5891 maybe_duplicate_eh_stmt (copy, stmt);
5892 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5893
5894 /* When copying around a stmt writing into a local non-user
5895 aggregate, make sure it won't share stack slot with other
5896 vars. */
5897 lhs = gimple_get_lhs (stmt);
5898 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5899 {
5900 tree base = get_base_address (lhs);
5901 if (base
5902 && (TREE_CODE (base) == VAR_DECL
5903 || TREE_CODE (base) == RESULT_DECL)
5904 && DECL_IGNORED_P (base)
5905 && !TREE_STATIC (base)
5906 && !DECL_EXTERNAL (base)
5907 && (TREE_CODE (base) != VAR_DECL
5908 || !DECL_HAS_VALUE_EXPR_P (base)))
5909 DECL_NONSHAREABLE (base) = 1;
5910 }
5911
5912 /* Create new names for all the definitions created by COPY and
5913 add replacement mappings for each new name. */
5914 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5915 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5916 }
5917
5918 return new_bb;
5919 }
5920
5921 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5922
5923 static void
5924 add_phi_args_after_copy_edge (edge e_copy)
5925 {
5926 basic_block bb, bb_copy = e_copy->src, dest;
5927 edge e;
5928 edge_iterator ei;
5929 gphi *phi, *phi_copy;
5930 tree def;
5931 gphi_iterator psi, psi_copy;
5932
5933 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5934 return;
5935
5936 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5937
5938 if (e_copy->dest->flags & BB_DUPLICATED)
5939 dest = get_bb_original (e_copy->dest);
5940 else
5941 dest = e_copy->dest;
5942
5943 e = find_edge (bb, dest);
5944 if (!e)
5945 {
5946 /* During loop unrolling the target of the latch edge is copied.
5947 In this case we are not looking for edge to dest, but to
5948 duplicated block whose original was dest. */
5949 FOR_EACH_EDGE (e, ei, bb->succs)
5950 {
5951 if ((e->dest->flags & BB_DUPLICATED)
5952 && get_bb_original (e->dest) == dest)
5953 break;
5954 }
5955
5956 gcc_assert (e != NULL);
5957 }
5958
5959 for (psi = gsi_start_phis (e->dest),
5960 psi_copy = gsi_start_phis (e_copy->dest);
5961 !gsi_end_p (psi);
5962 gsi_next (&psi), gsi_next (&psi_copy))
5963 {
5964 phi = psi.phi ();
5965 phi_copy = psi_copy.phi ();
5966 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5967 add_phi_arg (phi_copy, def, e_copy,
5968 gimple_phi_arg_location_from_edge (phi, e));
5969 }
5970 }
5971
5972
5973 /* Basic block BB_COPY was created by code duplication. Add phi node
5974 arguments for edges going out of BB_COPY. The blocks that were
5975 duplicated have BB_DUPLICATED set. */
5976
5977 void
5978 add_phi_args_after_copy_bb (basic_block bb_copy)
5979 {
5980 edge e_copy;
5981 edge_iterator ei;
5982
5983 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5984 {
5985 add_phi_args_after_copy_edge (e_copy);
5986 }
5987 }
5988
5989 /* Blocks in REGION_COPY array of length N_REGION were created by
5990 duplication of basic blocks. Add phi node arguments for edges
5991 going from these blocks. If E_COPY is not NULL, also add
5992 phi node arguments for its destination.*/
5993
5994 void
5995 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5996 edge e_copy)
5997 {
5998 unsigned i;
5999
6000 for (i = 0; i < n_region; i++)
6001 region_copy[i]->flags |= BB_DUPLICATED;
6002
6003 for (i = 0; i < n_region; i++)
6004 add_phi_args_after_copy_bb (region_copy[i]);
6005 if (e_copy)
6006 add_phi_args_after_copy_edge (e_copy);
6007
6008 for (i = 0; i < n_region; i++)
6009 region_copy[i]->flags &= ~BB_DUPLICATED;
6010 }
6011
6012 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6013 important exit edge EXIT. By important we mean that no SSA name defined
6014 inside region is live over the other exit edges of the region. All entry
6015 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6016 to the duplicate of the region. Dominance and loop information is
6017 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6018 UPDATE_DOMINANCE is false then we assume that the caller will update the
6019 dominance information after calling this function. The new basic
6020 blocks are stored to REGION_COPY in the same order as they had in REGION,
6021 provided that REGION_COPY is not NULL.
6022 The function returns false if it is unable to copy the region,
6023 true otherwise. */
6024
6025 bool
6026 gimple_duplicate_sese_region (edge entry, edge exit,
6027 basic_block *region, unsigned n_region,
6028 basic_block *region_copy,
6029 bool update_dominance)
6030 {
6031 unsigned i;
6032 bool free_region_copy = false, copying_header = false;
6033 struct loop *loop = entry->dest->loop_father;
6034 edge exit_copy;
6035 vec<basic_block> doms;
6036 edge redirected;
6037 int total_freq = 0, entry_freq = 0;
6038 gcov_type total_count = 0, entry_count = 0;
6039
6040 if (!can_copy_bbs_p (region, n_region))
6041 return false;
6042
6043 /* Some sanity checking. Note that we do not check for all possible
6044 missuses of the functions. I.e. if you ask to copy something weird,
6045 it will work, but the state of structures probably will not be
6046 correct. */
6047 for (i = 0; i < n_region; i++)
6048 {
6049 /* We do not handle subloops, i.e. all the blocks must belong to the
6050 same loop. */
6051 if (region[i]->loop_father != loop)
6052 return false;
6053
6054 if (region[i] != entry->dest
6055 && region[i] == loop->header)
6056 return false;
6057 }
6058
6059 /* In case the function is used for loop header copying (which is the primary
6060 use), ensure that EXIT and its copy will be new latch and entry edges. */
6061 if (loop->header == entry->dest)
6062 {
6063 copying_header = true;
6064
6065 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6066 return false;
6067
6068 for (i = 0; i < n_region; i++)
6069 if (region[i] != exit->src
6070 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6071 return false;
6072 }
6073
6074 initialize_original_copy_tables ();
6075
6076 if (copying_header)
6077 set_loop_copy (loop, loop_outer (loop));
6078 else
6079 set_loop_copy (loop, loop);
6080
6081 if (!region_copy)
6082 {
6083 region_copy = XNEWVEC (basic_block, n_region);
6084 free_region_copy = true;
6085 }
6086
6087 /* Record blocks outside the region that are dominated by something
6088 inside. */
6089 if (update_dominance)
6090 {
6091 doms.create (0);
6092 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6093 }
6094
6095 if (entry->dest->count)
6096 {
6097 total_count = entry->dest->count;
6098 entry_count = entry->count;
6099 /* Fix up corner cases, to avoid division by zero or creation of negative
6100 frequencies. */
6101 if (entry_count > total_count)
6102 entry_count = total_count;
6103 }
6104 else
6105 {
6106 total_freq = entry->dest->frequency;
6107 entry_freq = EDGE_FREQUENCY (entry);
6108 /* Fix up corner cases, to avoid division by zero or creation of negative
6109 frequencies. */
6110 if (total_freq == 0)
6111 total_freq = 1;
6112 else if (entry_freq > total_freq)
6113 entry_freq = total_freq;
6114 }
6115
6116 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6117 split_edge_bb_loc (entry), update_dominance);
6118 if (total_count)
6119 {
6120 scale_bbs_frequencies_gcov_type (region, n_region,
6121 total_count - entry_count,
6122 total_count);
6123 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6124 total_count);
6125 }
6126 else
6127 {
6128 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6129 total_freq);
6130 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6131 }
6132
6133 if (copying_header)
6134 {
6135 loop->header = exit->dest;
6136 loop->latch = exit->src;
6137 }
6138
6139 /* Redirect the entry and add the phi node arguments. */
6140 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6141 gcc_assert (redirected != NULL);
6142 flush_pending_stmts (entry);
6143
6144 /* Concerning updating of dominators: We must recount dominators
6145 for entry block and its copy. Anything that is outside of the
6146 region, but was dominated by something inside needs recounting as
6147 well. */
6148 if (update_dominance)
6149 {
6150 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6151 doms.safe_push (get_bb_original (entry->dest));
6152 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6153 doms.release ();
6154 }
6155
6156 /* Add the other PHI node arguments. */
6157 add_phi_args_after_copy (region_copy, n_region, NULL);
6158
6159 if (free_region_copy)
6160 free (region_copy);
6161
6162 free_original_copy_tables ();
6163 return true;
6164 }
6165
6166 /* Checks if BB is part of the region defined by N_REGION BBS. */
6167 static bool
6168 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6169 {
6170 unsigned int n;
6171
6172 for (n = 0; n < n_region; n++)
6173 {
6174 if (bb == bbs[n])
6175 return true;
6176 }
6177 return false;
6178 }
6179
6180 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6181 are stored to REGION_COPY in the same order in that they appear
6182 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6183 the region, EXIT an exit from it. The condition guarding EXIT
6184 is moved to ENTRY. Returns true if duplication succeeds, false
6185 otherwise.
6186
6187 For example,
6188
6189 some_code;
6190 if (cond)
6191 A;
6192 else
6193 B;
6194
6195 is transformed to
6196
6197 if (cond)
6198 {
6199 some_code;
6200 A;
6201 }
6202 else
6203 {
6204 some_code;
6205 B;
6206 }
6207 */
6208
6209 bool
6210 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6211 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6212 basic_block *region_copy ATTRIBUTE_UNUSED)
6213 {
6214 unsigned i;
6215 bool free_region_copy = false;
6216 struct loop *loop = exit->dest->loop_father;
6217 struct loop *orig_loop = entry->dest->loop_father;
6218 basic_block switch_bb, entry_bb, nentry_bb;
6219 vec<basic_block> doms;
6220 int total_freq = 0, exit_freq = 0;
6221 gcov_type total_count = 0, exit_count = 0;
6222 edge exits[2], nexits[2], e;
6223 gimple_stmt_iterator gsi;
6224 gimple *cond_stmt;
6225 edge sorig, snew;
6226 basic_block exit_bb;
6227 gphi_iterator psi;
6228 gphi *phi;
6229 tree def;
6230 struct loop *target, *aloop, *cloop;
6231
6232 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6233 exits[0] = exit;
6234 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6235
6236 if (!can_copy_bbs_p (region, n_region))
6237 return false;
6238
6239 initialize_original_copy_tables ();
6240 set_loop_copy (orig_loop, loop);
6241
6242 target= loop;
6243 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6244 {
6245 if (bb_part_of_region_p (aloop->header, region, n_region))
6246 {
6247 cloop = duplicate_loop (aloop, target);
6248 duplicate_subloops (aloop, cloop);
6249 }
6250 }
6251
6252 if (!region_copy)
6253 {
6254 region_copy = XNEWVEC (basic_block, n_region);
6255 free_region_copy = true;
6256 }
6257
6258 gcc_assert (!need_ssa_update_p (cfun));
6259
6260 /* Record blocks outside the region that are dominated by something
6261 inside. */
6262 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6263
6264 if (exit->src->count)
6265 {
6266 total_count = exit->src->count;
6267 exit_count = exit->count;
6268 /* Fix up corner cases, to avoid division by zero or creation of negative
6269 frequencies. */
6270 if (exit_count > total_count)
6271 exit_count = total_count;
6272 }
6273 else
6274 {
6275 total_freq = exit->src->frequency;
6276 exit_freq = EDGE_FREQUENCY (exit);
6277 /* Fix up corner cases, to avoid division by zero or creation of negative
6278 frequencies. */
6279 if (total_freq == 0)
6280 total_freq = 1;
6281 if (exit_freq > total_freq)
6282 exit_freq = total_freq;
6283 }
6284
6285 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6286 split_edge_bb_loc (exit), true);
6287 if (total_count)
6288 {
6289 scale_bbs_frequencies_gcov_type (region, n_region,
6290 total_count - exit_count,
6291 total_count);
6292 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6293 total_count);
6294 }
6295 else
6296 {
6297 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6298 total_freq);
6299 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6300 }
6301
6302 /* Create the switch block, and put the exit condition to it. */
6303 entry_bb = entry->dest;
6304 nentry_bb = get_bb_copy (entry_bb);
6305 if (!last_stmt (entry->src)
6306 || !stmt_ends_bb_p (last_stmt (entry->src)))
6307 switch_bb = entry->src;
6308 else
6309 switch_bb = split_edge (entry);
6310 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6311
6312 gsi = gsi_last_bb (switch_bb);
6313 cond_stmt = last_stmt (exit->src);
6314 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6315 cond_stmt = gimple_copy (cond_stmt);
6316
6317 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6318
6319 sorig = single_succ_edge (switch_bb);
6320 sorig->flags = exits[1]->flags;
6321 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6322
6323 /* Register the new edge from SWITCH_BB in loop exit lists. */
6324 rescan_loop_exit (snew, true, false);
6325
6326 /* Add the PHI node arguments. */
6327 add_phi_args_after_copy (region_copy, n_region, snew);
6328
6329 /* Get rid of now superfluous conditions and associated edges (and phi node
6330 arguments). */
6331 exit_bb = exit->dest;
6332
6333 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6334 PENDING_STMT (e) = NULL;
6335
6336 /* The latch of ORIG_LOOP was copied, and so was the backedge
6337 to the original header. We redirect this backedge to EXIT_BB. */
6338 for (i = 0; i < n_region; i++)
6339 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6340 {
6341 gcc_assert (single_succ_edge (region_copy[i]));
6342 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6343 PENDING_STMT (e) = NULL;
6344 for (psi = gsi_start_phis (exit_bb);
6345 !gsi_end_p (psi);
6346 gsi_next (&psi))
6347 {
6348 phi = psi.phi ();
6349 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6350 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6351 }
6352 }
6353 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6354 PENDING_STMT (e) = NULL;
6355
6356 /* Anything that is outside of the region, but was dominated by something
6357 inside needs to update dominance info. */
6358 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6359 doms.release ();
6360 /* Update the SSA web. */
6361 update_ssa (TODO_update_ssa);
6362
6363 if (free_region_copy)
6364 free (region_copy);
6365
6366 free_original_copy_tables ();
6367 return true;
6368 }
6369
6370 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6371 adding blocks when the dominator traversal reaches EXIT. This
6372 function silently assumes that ENTRY strictly dominates EXIT. */
6373
6374 void
6375 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6376 vec<basic_block> *bbs_p)
6377 {
6378 basic_block son;
6379
6380 for (son = first_dom_son (CDI_DOMINATORS, entry);
6381 son;
6382 son = next_dom_son (CDI_DOMINATORS, son))
6383 {
6384 bbs_p->safe_push (son);
6385 if (son != exit)
6386 gather_blocks_in_sese_region (son, exit, bbs_p);
6387 }
6388 }
6389
6390 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6391 The duplicates are recorded in VARS_MAP. */
6392
6393 static void
6394 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6395 tree to_context)
6396 {
6397 tree t = *tp, new_t;
6398 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6399
6400 if (DECL_CONTEXT (t) == to_context)
6401 return;
6402
6403 bool existed;
6404 tree &loc = vars_map->get_or_insert (t, &existed);
6405
6406 if (!existed)
6407 {
6408 if (SSA_VAR_P (t))
6409 {
6410 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6411 add_local_decl (f, new_t);
6412 }
6413 else
6414 {
6415 gcc_assert (TREE_CODE (t) == CONST_DECL);
6416 new_t = copy_node (t);
6417 }
6418 DECL_CONTEXT (new_t) = to_context;
6419
6420 loc = new_t;
6421 }
6422 else
6423 new_t = loc;
6424
6425 *tp = new_t;
6426 }
6427
6428
6429 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6430 VARS_MAP maps old ssa names and var_decls to the new ones. */
6431
6432 static tree
6433 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6434 tree to_context)
6435 {
6436 tree new_name;
6437
6438 gcc_assert (!virtual_operand_p (name));
6439
6440 tree *loc = vars_map->get (name);
6441
6442 if (!loc)
6443 {
6444 tree decl = SSA_NAME_VAR (name);
6445 if (decl)
6446 {
6447 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6448 replace_by_duplicate_decl (&decl, vars_map, to_context);
6449 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6450 decl, SSA_NAME_DEF_STMT (name));
6451 }
6452 else
6453 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6454 name, SSA_NAME_DEF_STMT (name));
6455
6456 /* Now that we've used the def stmt to define new_name, make sure it
6457 doesn't define name anymore. */
6458 SSA_NAME_DEF_STMT (name) = NULL;
6459
6460 vars_map->put (name, new_name);
6461 }
6462 else
6463 new_name = *loc;
6464
6465 return new_name;
6466 }
6467
6468 struct move_stmt_d
6469 {
6470 tree orig_block;
6471 tree new_block;
6472 tree from_context;
6473 tree to_context;
6474 hash_map<tree, tree> *vars_map;
6475 htab_t new_label_map;
6476 hash_map<void *, void *> *eh_map;
6477 bool remap_decls_p;
6478 };
6479
6480 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6481 contained in *TP if it has been ORIG_BLOCK previously and change the
6482 DECL_CONTEXT of every local variable referenced in *TP. */
6483
6484 static tree
6485 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6486 {
6487 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6488 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6489 tree t = *tp;
6490
6491 if (EXPR_P (t))
6492 {
6493 tree block = TREE_BLOCK (t);
6494 if (block == p->orig_block
6495 || (p->orig_block == NULL_TREE
6496 && block != NULL_TREE))
6497 TREE_SET_BLOCK (t, p->new_block);
6498 else if (flag_checking && block != NULL_TREE)
6499 {
6500 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6501 block = BLOCK_SUPERCONTEXT (block);
6502 gcc_assert (block == p->orig_block);
6503 }
6504 }
6505 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6506 {
6507 if (TREE_CODE (t) == SSA_NAME)
6508 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6509 else if (TREE_CODE (t) == PARM_DECL
6510 && gimple_in_ssa_p (cfun))
6511 *tp = *(p->vars_map->get (t));
6512 else if (TREE_CODE (t) == LABEL_DECL)
6513 {
6514 if (p->new_label_map)
6515 {
6516 struct tree_map in, *out;
6517 in.base.from = t;
6518 out = (struct tree_map *)
6519 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6520 if (out)
6521 *tp = t = out->to;
6522 }
6523
6524 DECL_CONTEXT (t) = p->to_context;
6525 }
6526 else if (p->remap_decls_p)
6527 {
6528 /* Replace T with its duplicate. T should no longer appear in the
6529 parent function, so this looks wasteful; however, it may appear
6530 in referenced_vars, and more importantly, as virtual operands of
6531 statements, and in alias lists of other variables. It would be
6532 quite difficult to expunge it from all those places. ??? It might
6533 suffice to do this for addressable variables. */
6534 if ((TREE_CODE (t) == VAR_DECL
6535 && !is_global_var (t))
6536 || TREE_CODE (t) == CONST_DECL)
6537 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6538 }
6539 *walk_subtrees = 0;
6540 }
6541 else if (TYPE_P (t))
6542 *walk_subtrees = 0;
6543
6544 return NULL_TREE;
6545 }
6546
6547 /* Helper for move_stmt_r. Given an EH region number for the source
6548 function, map that to the duplicate EH regio number in the dest. */
6549
6550 static int
6551 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6552 {
6553 eh_region old_r, new_r;
6554
6555 old_r = get_eh_region_from_number (old_nr);
6556 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6557
6558 return new_r->index;
6559 }
6560
6561 /* Similar, but operate on INTEGER_CSTs. */
6562
6563 static tree
6564 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6565 {
6566 int old_nr, new_nr;
6567
6568 old_nr = tree_to_shwi (old_t_nr);
6569 new_nr = move_stmt_eh_region_nr (old_nr, p);
6570
6571 return build_int_cst (integer_type_node, new_nr);
6572 }
6573
6574 /* Like move_stmt_op, but for gimple statements.
6575
6576 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6577 contained in the current statement in *GSI_P and change the
6578 DECL_CONTEXT of every local variable referenced in the current
6579 statement. */
6580
6581 static tree
6582 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6583 struct walk_stmt_info *wi)
6584 {
6585 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6586 gimple *stmt = gsi_stmt (*gsi_p);
6587 tree block = gimple_block (stmt);
6588
6589 if (block == p->orig_block
6590 || (p->orig_block == NULL_TREE
6591 && block != NULL_TREE))
6592 gimple_set_block (stmt, p->new_block);
6593
6594 switch (gimple_code (stmt))
6595 {
6596 case GIMPLE_CALL:
6597 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6598 {
6599 tree r, fndecl = gimple_call_fndecl (stmt);
6600 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6601 switch (DECL_FUNCTION_CODE (fndecl))
6602 {
6603 case BUILT_IN_EH_COPY_VALUES:
6604 r = gimple_call_arg (stmt, 1);
6605 r = move_stmt_eh_region_tree_nr (r, p);
6606 gimple_call_set_arg (stmt, 1, r);
6607 /* FALLTHRU */
6608
6609 case BUILT_IN_EH_POINTER:
6610 case BUILT_IN_EH_FILTER:
6611 r = gimple_call_arg (stmt, 0);
6612 r = move_stmt_eh_region_tree_nr (r, p);
6613 gimple_call_set_arg (stmt, 0, r);
6614 break;
6615
6616 default:
6617 break;
6618 }
6619 }
6620 break;
6621
6622 case GIMPLE_RESX:
6623 {
6624 gresx *resx_stmt = as_a <gresx *> (stmt);
6625 int r = gimple_resx_region (resx_stmt);
6626 r = move_stmt_eh_region_nr (r, p);
6627 gimple_resx_set_region (resx_stmt, r);
6628 }
6629 break;
6630
6631 case GIMPLE_EH_DISPATCH:
6632 {
6633 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6634 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6635 r = move_stmt_eh_region_nr (r, p);
6636 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6637 }
6638 break;
6639
6640 case GIMPLE_OMP_RETURN:
6641 case GIMPLE_OMP_CONTINUE:
6642 break;
6643 default:
6644 if (is_gimple_omp (stmt))
6645 {
6646 /* Do not remap variables inside OMP directives. Variables
6647 referenced in clauses and directive header belong to the
6648 parent function and should not be moved into the child
6649 function. */
6650 bool save_remap_decls_p = p->remap_decls_p;
6651 p->remap_decls_p = false;
6652 *handled_ops_p = true;
6653
6654 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6655 move_stmt_op, wi);
6656
6657 p->remap_decls_p = save_remap_decls_p;
6658 }
6659 break;
6660 }
6661
6662 return NULL_TREE;
6663 }
6664
6665 /* Move basic block BB from function CFUN to function DEST_FN. The
6666 block is moved out of the original linked list and placed after
6667 block AFTER in the new list. Also, the block is removed from the
6668 original array of blocks and placed in DEST_FN's array of blocks.
6669 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6670 updated to reflect the moved edges.
6671
6672 The local variables are remapped to new instances, VARS_MAP is used
6673 to record the mapping. */
6674
6675 static void
6676 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6677 basic_block after, bool update_edge_count_p,
6678 struct move_stmt_d *d)
6679 {
6680 struct control_flow_graph *cfg;
6681 edge_iterator ei;
6682 edge e;
6683 gimple_stmt_iterator si;
6684 unsigned old_len, new_len;
6685
6686 /* Remove BB from dominance structures. */
6687 delete_from_dominance_info (CDI_DOMINATORS, bb);
6688
6689 /* Move BB from its current loop to the copy in the new function. */
6690 if (current_loops)
6691 {
6692 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6693 if (new_loop)
6694 bb->loop_father = new_loop;
6695 }
6696
6697 /* Link BB to the new linked list. */
6698 move_block_after (bb, after);
6699
6700 /* Update the edge count in the corresponding flowgraphs. */
6701 if (update_edge_count_p)
6702 FOR_EACH_EDGE (e, ei, bb->succs)
6703 {
6704 cfun->cfg->x_n_edges--;
6705 dest_cfun->cfg->x_n_edges++;
6706 }
6707
6708 /* Remove BB from the original basic block array. */
6709 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6710 cfun->cfg->x_n_basic_blocks--;
6711
6712 /* Grow DEST_CFUN's basic block array if needed. */
6713 cfg = dest_cfun->cfg;
6714 cfg->x_n_basic_blocks++;
6715 if (bb->index >= cfg->x_last_basic_block)
6716 cfg->x_last_basic_block = bb->index + 1;
6717
6718 old_len = vec_safe_length (cfg->x_basic_block_info);
6719 if ((unsigned) cfg->x_last_basic_block >= old_len)
6720 {
6721 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6722 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6723 }
6724
6725 (*cfg->x_basic_block_info)[bb->index] = bb;
6726
6727 /* Remap the variables in phi nodes. */
6728 for (gphi_iterator psi = gsi_start_phis (bb);
6729 !gsi_end_p (psi); )
6730 {
6731 gphi *phi = psi.phi ();
6732 use_operand_p use;
6733 tree op = PHI_RESULT (phi);
6734 ssa_op_iter oi;
6735 unsigned i;
6736
6737 if (virtual_operand_p (op))
6738 {
6739 /* Remove the phi nodes for virtual operands (alias analysis will be
6740 run for the new function, anyway). */
6741 remove_phi_node (&psi, true);
6742 continue;
6743 }
6744
6745 SET_PHI_RESULT (phi,
6746 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6747 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6748 {
6749 op = USE_FROM_PTR (use);
6750 if (TREE_CODE (op) == SSA_NAME)
6751 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6752 }
6753
6754 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6755 {
6756 location_t locus = gimple_phi_arg_location (phi, i);
6757 tree block = LOCATION_BLOCK (locus);
6758
6759 if (locus == UNKNOWN_LOCATION)
6760 continue;
6761 if (d->orig_block == NULL_TREE || block == d->orig_block)
6762 {
6763 locus = set_block (locus, d->new_block);
6764 gimple_phi_arg_set_location (phi, i, locus);
6765 }
6766 }
6767
6768 gsi_next (&psi);
6769 }
6770
6771 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6772 {
6773 gimple *stmt = gsi_stmt (si);
6774 struct walk_stmt_info wi;
6775
6776 memset (&wi, 0, sizeof (wi));
6777 wi.info = d;
6778 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6779
6780 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6781 {
6782 tree label = gimple_label_label (label_stmt);
6783 int uid = LABEL_DECL_UID (label);
6784
6785 gcc_assert (uid > -1);
6786
6787 old_len = vec_safe_length (cfg->x_label_to_block_map);
6788 if (old_len <= (unsigned) uid)
6789 {
6790 new_len = 3 * uid / 2 + 1;
6791 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6792 }
6793
6794 (*cfg->x_label_to_block_map)[uid] = bb;
6795 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6796
6797 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6798
6799 if (uid >= dest_cfun->cfg->last_label_uid)
6800 dest_cfun->cfg->last_label_uid = uid + 1;
6801 }
6802
6803 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6804 remove_stmt_from_eh_lp_fn (cfun, stmt);
6805
6806 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6807 gimple_remove_stmt_histograms (cfun, stmt);
6808
6809 /* We cannot leave any operands allocated from the operand caches of
6810 the current function. */
6811 free_stmt_operands (cfun, stmt);
6812 push_cfun (dest_cfun);
6813 update_stmt (stmt);
6814 pop_cfun ();
6815 }
6816
6817 FOR_EACH_EDGE (e, ei, bb->succs)
6818 if (e->goto_locus != UNKNOWN_LOCATION)
6819 {
6820 tree block = LOCATION_BLOCK (e->goto_locus);
6821 if (d->orig_block == NULL_TREE
6822 || block == d->orig_block)
6823 e->goto_locus = set_block (e->goto_locus, d->new_block);
6824 }
6825 }
6826
6827 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6828 the outermost EH region. Use REGION as the incoming base EH region. */
6829
6830 static eh_region
6831 find_outermost_region_in_block (struct function *src_cfun,
6832 basic_block bb, eh_region region)
6833 {
6834 gimple_stmt_iterator si;
6835
6836 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6837 {
6838 gimple *stmt = gsi_stmt (si);
6839 eh_region stmt_region;
6840 int lp_nr;
6841
6842 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6843 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6844 if (stmt_region)
6845 {
6846 if (region == NULL)
6847 region = stmt_region;
6848 else if (stmt_region != region)
6849 {
6850 region = eh_region_outermost (src_cfun, stmt_region, region);
6851 gcc_assert (region != NULL);
6852 }
6853 }
6854 }
6855
6856 return region;
6857 }
6858
6859 static tree
6860 new_label_mapper (tree decl, void *data)
6861 {
6862 htab_t hash = (htab_t) data;
6863 struct tree_map *m;
6864 void **slot;
6865
6866 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6867
6868 m = XNEW (struct tree_map);
6869 m->hash = DECL_UID (decl);
6870 m->base.from = decl;
6871 m->to = create_artificial_label (UNKNOWN_LOCATION);
6872 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6873 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6874 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6875
6876 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6877 gcc_assert (*slot == NULL);
6878
6879 *slot = m;
6880
6881 return m->to;
6882 }
6883
6884 /* Tree walker to replace the decls used inside value expressions by
6885 duplicates. */
6886
6887 static tree
6888 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6889 {
6890 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6891
6892 switch (TREE_CODE (*tp))
6893 {
6894 case VAR_DECL:
6895 case PARM_DECL:
6896 case RESULT_DECL:
6897 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6898 break;
6899 default:
6900 break;
6901 }
6902
6903 if (IS_TYPE_OR_DECL_P (*tp))
6904 *walk_subtrees = false;
6905
6906 return NULL;
6907 }
6908
6909 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6910 subblocks. */
6911
6912 static void
6913 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6914 tree to_context)
6915 {
6916 tree *tp, t;
6917
6918 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6919 {
6920 t = *tp;
6921 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6922 continue;
6923 replace_by_duplicate_decl (&t, vars_map, to_context);
6924 if (t != *tp)
6925 {
6926 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6927 {
6928 tree x = DECL_VALUE_EXPR (*tp);
6929 struct replace_decls_d rd = { vars_map, to_context };
6930 unshare_expr (x);
6931 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6932 SET_DECL_VALUE_EXPR (t, x);
6933 DECL_HAS_VALUE_EXPR_P (t) = 1;
6934 }
6935 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6936 *tp = t;
6937 }
6938 }
6939
6940 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6941 replace_block_vars_by_duplicates (block, vars_map, to_context);
6942 }
6943
6944 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6945 from FN1 to FN2. */
6946
6947 static void
6948 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6949 struct loop *loop)
6950 {
6951 /* Discard it from the old loop array. */
6952 (*get_loops (fn1))[loop->num] = NULL;
6953
6954 /* Place it in the new loop array, assigning it a new number. */
6955 loop->num = number_of_loops (fn2);
6956 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6957
6958 /* Recurse to children. */
6959 for (loop = loop->inner; loop; loop = loop->next)
6960 fixup_loop_arrays_after_move (fn1, fn2, loop);
6961 }
6962
6963 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6964 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6965
6966 DEBUG_FUNCTION void
6967 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6968 {
6969 basic_block bb;
6970 edge_iterator ei;
6971 edge e;
6972 bitmap bbs = BITMAP_ALLOC (NULL);
6973 int i;
6974
6975 gcc_assert (entry != NULL);
6976 gcc_assert (entry != exit);
6977 gcc_assert (bbs_p != NULL);
6978
6979 gcc_assert (bbs_p->length () > 0);
6980
6981 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6982 bitmap_set_bit (bbs, bb->index);
6983
6984 gcc_assert (bitmap_bit_p (bbs, entry->index));
6985 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6986
6987 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6988 {
6989 if (bb == entry)
6990 {
6991 gcc_assert (single_pred_p (entry));
6992 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6993 }
6994 else
6995 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6996 {
6997 e = ei_edge (ei);
6998 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6999 }
7000
7001 if (bb == exit)
7002 {
7003 gcc_assert (single_succ_p (exit));
7004 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7005 }
7006 else
7007 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7008 {
7009 e = ei_edge (ei);
7010 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7011 }
7012 }
7013
7014 BITMAP_FREE (bbs);
7015 }
7016
7017 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7018
7019 bool
7020 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7021 {
7022 bitmap release_names = (bitmap)data;
7023
7024 if (TREE_CODE (from) != SSA_NAME)
7025 return true;
7026
7027 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7028 return true;
7029 }
7030
7031 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7032 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7033 single basic block in the original CFG and the new basic block is
7034 returned. DEST_CFUN must not have a CFG yet.
7035
7036 Note that the region need not be a pure SESE region. Blocks inside
7037 the region may contain calls to abort/exit. The only restriction
7038 is that ENTRY_BB should be the only entry point and it must
7039 dominate EXIT_BB.
7040
7041 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7042 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7043 to the new function.
7044
7045 All local variables referenced in the region are assumed to be in
7046 the corresponding BLOCK_VARS and unexpanded variable lists
7047 associated with DEST_CFUN.
7048
7049 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7050 reimplement move_sese_region_to_fn by duplicating the region rather than
7051 moving it. */
7052
7053 basic_block
7054 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7055 basic_block exit_bb, tree orig_block)
7056 {
7057 vec<basic_block> bbs, dom_bbs;
7058 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7059 basic_block after, bb, *entry_pred, *exit_succ, abb;
7060 struct function *saved_cfun = cfun;
7061 int *entry_flag, *exit_flag;
7062 unsigned *entry_prob, *exit_prob;
7063 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7064 edge e;
7065 edge_iterator ei;
7066 htab_t new_label_map;
7067 hash_map<void *, void *> *eh_map;
7068 struct loop *loop = entry_bb->loop_father;
7069 struct loop *loop0 = get_loop (saved_cfun, 0);
7070 struct move_stmt_d d;
7071
7072 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7073 region. */
7074 gcc_assert (entry_bb != exit_bb
7075 && (!exit_bb
7076 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7077
7078 /* Collect all the blocks in the region. Manually add ENTRY_BB
7079 because it won't be added by dfs_enumerate_from. */
7080 bbs.create (0);
7081 bbs.safe_push (entry_bb);
7082 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7083
7084 if (flag_checking)
7085 verify_sese (entry_bb, exit_bb, &bbs);
7086
7087 /* The blocks that used to be dominated by something in BBS will now be
7088 dominated by the new block. */
7089 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7090 bbs.address (),
7091 bbs.length ());
7092
7093 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7094 the predecessor edges to ENTRY_BB and the successor edges to
7095 EXIT_BB so that we can re-attach them to the new basic block that
7096 will replace the region. */
7097 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7098 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7099 entry_flag = XNEWVEC (int, num_entry_edges);
7100 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7101 i = 0;
7102 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7103 {
7104 entry_prob[i] = e->probability;
7105 entry_flag[i] = e->flags;
7106 entry_pred[i++] = e->src;
7107 remove_edge (e);
7108 }
7109
7110 if (exit_bb)
7111 {
7112 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7113 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7114 exit_flag = XNEWVEC (int, num_exit_edges);
7115 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7116 i = 0;
7117 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7118 {
7119 exit_prob[i] = e->probability;
7120 exit_flag[i] = e->flags;
7121 exit_succ[i++] = e->dest;
7122 remove_edge (e);
7123 }
7124 }
7125 else
7126 {
7127 num_exit_edges = 0;
7128 exit_succ = NULL;
7129 exit_flag = NULL;
7130 exit_prob = NULL;
7131 }
7132
7133 /* Switch context to the child function to initialize DEST_FN's CFG. */
7134 gcc_assert (dest_cfun->cfg == NULL);
7135 push_cfun (dest_cfun);
7136
7137 init_empty_tree_cfg ();
7138
7139 /* Initialize EH information for the new function. */
7140 eh_map = NULL;
7141 new_label_map = NULL;
7142 if (saved_cfun->eh)
7143 {
7144 eh_region region = NULL;
7145
7146 FOR_EACH_VEC_ELT (bbs, i, bb)
7147 region = find_outermost_region_in_block (saved_cfun, bb, region);
7148
7149 init_eh_for_function ();
7150 if (region != NULL)
7151 {
7152 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7153 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7154 new_label_mapper, new_label_map);
7155 }
7156 }
7157
7158 /* Initialize an empty loop tree. */
7159 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7160 init_loops_structure (dest_cfun, loops, 1);
7161 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7162 set_loops_for_fn (dest_cfun, loops);
7163
7164 /* Move the outlined loop tree part. */
7165 num_nodes = bbs.length ();
7166 FOR_EACH_VEC_ELT (bbs, i, bb)
7167 {
7168 if (bb->loop_father->header == bb)
7169 {
7170 struct loop *this_loop = bb->loop_father;
7171 struct loop *outer = loop_outer (this_loop);
7172 if (outer == loop
7173 /* If the SESE region contains some bbs ending with
7174 a noreturn call, those are considered to belong
7175 to the outermost loop in saved_cfun, rather than
7176 the entry_bb's loop_father. */
7177 || outer == loop0)
7178 {
7179 if (outer != loop)
7180 num_nodes -= this_loop->num_nodes;
7181 flow_loop_tree_node_remove (bb->loop_father);
7182 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7183 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7184 }
7185 }
7186 else if (bb->loop_father == loop0 && loop0 != loop)
7187 num_nodes--;
7188
7189 /* Remove loop exits from the outlined region. */
7190 if (loops_for_fn (saved_cfun)->exits)
7191 FOR_EACH_EDGE (e, ei, bb->succs)
7192 {
7193 struct loops *l = loops_for_fn (saved_cfun);
7194 loop_exit **slot
7195 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7196 NO_INSERT);
7197 if (slot)
7198 l->exits->clear_slot (slot);
7199 }
7200 }
7201
7202
7203 /* Adjust the number of blocks in the tree root of the outlined part. */
7204 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7205
7206 /* Setup a mapping to be used by move_block_to_fn. */
7207 loop->aux = current_loops->tree_root;
7208 loop0->aux = current_loops->tree_root;
7209
7210 pop_cfun ();
7211
7212 /* Move blocks from BBS into DEST_CFUN. */
7213 gcc_assert (bbs.length () >= 2);
7214 after = dest_cfun->cfg->x_entry_block_ptr;
7215 hash_map<tree, tree> vars_map;
7216
7217 memset (&d, 0, sizeof (d));
7218 d.orig_block = orig_block;
7219 d.new_block = DECL_INITIAL (dest_cfun->decl);
7220 d.from_context = cfun->decl;
7221 d.to_context = dest_cfun->decl;
7222 d.vars_map = &vars_map;
7223 d.new_label_map = new_label_map;
7224 d.eh_map = eh_map;
7225 d.remap_decls_p = true;
7226
7227 if (gimple_in_ssa_p (cfun))
7228 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7229 {
7230 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7231 set_ssa_default_def (dest_cfun, arg, narg);
7232 vars_map.put (arg, narg);
7233 }
7234
7235 FOR_EACH_VEC_ELT (bbs, i, bb)
7236 {
7237 /* No need to update edge counts on the last block. It has
7238 already been updated earlier when we detached the region from
7239 the original CFG. */
7240 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7241 after = bb;
7242 }
7243
7244 loop->aux = NULL;
7245 loop0->aux = NULL;
7246 /* Loop sizes are no longer correct, fix them up. */
7247 loop->num_nodes -= num_nodes;
7248 for (struct loop *outer = loop_outer (loop);
7249 outer; outer = loop_outer (outer))
7250 outer->num_nodes -= num_nodes;
7251 loop0->num_nodes -= bbs.length () - num_nodes;
7252
7253 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7254 {
7255 struct loop *aloop;
7256 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7257 if (aloop != NULL)
7258 {
7259 if (aloop->simduid)
7260 {
7261 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7262 d.to_context);
7263 dest_cfun->has_simduid_loops = true;
7264 }
7265 if (aloop->force_vectorize)
7266 dest_cfun->has_force_vectorize_loops = true;
7267 }
7268 }
7269
7270 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7271 if (orig_block)
7272 {
7273 tree block;
7274 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7275 == NULL_TREE);
7276 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7277 = BLOCK_SUBBLOCKS (orig_block);
7278 for (block = BLOCK_SUBBLOCKS (orig_block);
7279 block; block = BLOCK_CHAIN (block))
7280 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7281 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7282 }
7283
7284 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7285 &vars_map, dest_cfun->decl);
7286
7287 if (new_label_map)
7288 htab_delete (new_label_map);
7289 if (eh_map)
7290 delete eh_map;
7291
7292 if (gimple_in_ssa_p (cfun))
7293 {
7294 /* We need to release ssa-names in a defined order, so first find them,
7295 and then iterate in ascending version order. */
7296 bitmap release_names = BITMAP_ALLOC (NULL);
7297 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7298 bitmap_iterator bi;
7299 unsigned i;
7300 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7301 release_ssa_name (ssa_name (i));
7302 BITMAP_FREE (release_names);
7303 }
7304
7305 /* Rewire the entry and exit blocks. The successor to the entry
7306 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7307 the child function. Similarly, the predecessor of DEST_FN's
7308 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7309 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7310 various CFG manipulation function get to the right CFG.
7311
7312 FIXME, this is silly. The CFG ought to become a parameter to
7313 these helpers. */
7314 push_cfun (dest_cfun);
7315 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7316 if (exit_bb)
7317 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7318 pop_cfun ();
7319
7320 /* Back in the original function, the SESE region has disappeared,
7321 create a new basic block in its place. */
7322 bb = create_empty_bb (entry_pred[0]);
7323 if (current_loops)
7324 add_bb_to_loop (bb, loop);
7325 for (i = 0; i < num_entry_edges; i++)
7326 {
7327 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7328 e->probability = entry_prob[i];
7329 }
7330
7331 for (i = 0; i < num_exit_edges; i++)
7332 {
7333 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7334 e->probability = exit_prob[i];
7335 }
7336
7337 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7338 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7339 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7340 dom_bbs.release ();
7341
7342 if (exit_bb)
7343 {
7344 free (exit_prob);
7345 free (exit_flag);
7346 free (exit_succ);
7347 }
7348 free (entry_prob);
7349 free (entry_flag);
7350 free (entry_pred);
7351 bbs.release ();
7352
7353 return bb;
7354 }
7355
7356 /* Dump default def DEF to file FILE using FLAGS and indentation
7357 SPC. */
7358
7359 static void
7360 dump_default_def (FILE *file, tree def, int spc, int flags)
7361 {
7362 for (int i = 0; i < spc; ++i)
7363 fprintf (file, " ");
7364 dump_ssaname_info_to_file (file, def, spc);
7365
7366 print_generic_expr (file, TREE_TYPE (def), flags);
7367 fprintf (file, " ");
7368 print_generic_expr (file, def, flags);
7369 fprintf (file, " = ");
7370 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7371 fprintf (file, ";\n");
7372 }
7373
7374 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7375 */
7376
7377 void
7378 dump_function_to_file (tree fndecl, FILE *file, int flags)
7379 {
7380 tree arg, var, old_current_fndecl = current_function_decl;
7381 struct function *dsf;
7382 bool ignore_topmost_bind = false, any_var = false;
7383 basic_block bb;
7384 tree chain;
7385 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7386 && decl_is_tm_clone (fndecl));
7387 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7388
7389 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7390 {
7391 fprintf (file, "__attribute__((");
7392
7393 bool first = true;
7394 tree chain;
7395 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7396 first = false, chain = TREE_CHAIN (chain))
7397 {
7398 if (!first)
7399 fprintf (file, ", ");
7400
7401 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7402 if (TREE_VALUE (chain) != NULL_TREE)
7403 {
7404 fprintf (file, " (");
7405 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7406 fprintf (file, ")");
7407 }
7408 }
7409
7410 fprintf (file, "))\n");
7411 }
7412
7413 current_function_decl = fndecl;
7414 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7415
7416 arg = DECL_ARGUMENTS (fndecl);
7417 while (arg)
7418 {
7419 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7420 fprintf (file, " ");
7421 print_generic_expr (file, arg, dump_flags);
7422 if (flags & TDF_VERBOSE)
7423 print_node (file, "", arg, 4);
7424 if (DECL_CHAIN (arg))
7425 fprintf (file, ", ");
7426 arg = DECL_CHAIN (arg);
7427 }
7428 fprintf (file, ")\n");
7429
7430 if (flags & TDF_VERBOSE)
7431 print_node (file, "", fndecl, 2);
7432
7433 dsf = DECL_STRUCT_FUNCTION (fndecl);
7434 if (dsf && (flags & TDF_EH))
7435 dump_eh_tree (file, dsf);
7436
7437 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7438 {
7439 dump_node (fndecl, TDF_SLIM | flags, file);
7440 current_function_decl = old_current_fndecl;
7441 return;
7442 }
7443
7444 /* When GIMPLE is lowered, the variables are no longer available in
7445 BIND_EXPRs, so display them separately. */
7446 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7447 {
7448 unsigned ix;
7449 ignore_topmost_bind = true;
7450
7451 fprintf (file, "{\n");
7452 if (gimple_in_ssa_p (fun)
7453 && (flags & TDF_ALIAS))
7454 {
7455 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7456 arg = DECL_CHAIN (arg))
7457 {
7458 tree def = ssa_default_def (fun, arg);
7459 if (def)
7460 dump_default_def (file, def, 2, flags);
7461 }
7462
7463 tree res = DECL_RESULT (fun->decl);
7464 if (res != NULL_TREE
7465 && DECL_BY_REFERENCE (res))
7466 {
7467 tree def = ssa_default_def (fun, res);
7468 if (def)
7469 dump_default_def (file, def, 2, flags);
7470 }
7471
7472 tree static_chain = fun->static_chain_decl;
7473 if (static_chain != NULL_TREE)
7474 {
7475 tree def = ssa_default_def (fun, static_chain);
7476 if (def)
7477 dump_default_def (file, def, 2, flags);
7478 }
7479 }
7480
7481 if (!vec_safe_is_empty (fun->local_decls))
7482 FOR_EACH_LOCAL_DECL (fun, ix, var)
7483 {
7484 print_generic_decl (file, var, flags);
7485 if (flags & TDF_VERBOSE)
7486 print_node (file, "", var, 4);
7487 fprintf (file, "\n");
7488
7489 any_var = true;
7490 }
7491 if (gimple_in_ssa_p (cfun))
7492 for (ix = 1; ix < num_ssa_names; ++ix)
7493 {
7494 tree name = ssa_name (ix);
7495 if (name && !SSA_NAME_VAR (name))
7496 {
7497 fprintf (file, " ");
7498 print_generic_expr (file, TREE_TYPE (name), flags);
7499 fprintf (file, " ");
7500 print_generic_expr (file, name, flags);
7501 fprintf (file, ";\n");
7502
7503 any_var = true;
7504 }
7505 }
7506 }
7507
7508 if (fun && fun->decl == fndecl
7509 && fun->cfg
7510 && basic_block_info_for_fn (fun))
7511 {
7512 /* If the CFG has been built, emit a CFG-based dump. */
7513 if (!ignore_topmost_bind)
7514 fprintf (file, "{\n");
7515
7516 if (any_var && n_basic_blocks_for_fn (fun))
7517 fprintf (file, "\n");
7518
7519 FOR_EACH_BB_FN (bb, fun)
7520 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7521
7522 fprintf (file, "}\n");
7523 }
7524 else if (DECL_SAVED_TREE (fndecl) == NULL)
7525 {
7526 /* The function is now in GIMPLE form but the CFG has not been
7527 built yet. Emit the single sequence of GIMPLE statements
7528 that make up its body. */
7529 gimple_seq body = gimple_body (fndecl);
7530
7531 if (gimple_seq_first_stmt (body)
7532 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7533 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7534 print_gimple_seq (file, body, 0, flags);
7535 else
7536 {
7537 if (!ignore_topmost_bind)
7538 fprintf (file, "{\n");
7539
7540 if (any_var)
7541 fprintf (file, "\n");
7542
7543 print_gimple_seq (file, body, 2, flags);
7544 fprintf (file, "}\n");
7545 }
7546 }
7547 else
7548 {
7549 int indent;
7550
7551 /* Make a tree based dump. */
7552 chain = DECL_SAVED_TREE (fndecl);
7553 if (chain && TREE_CODE (chain) == BIND_EXPR)
7554 {
7555 if (ignore_topmost_bind)
7556 {
7557 chain = BIND_EXPR_BODY (chain);
7558 indent = 2;
7559 }
7560 else
7561 indent = 0;
7562 }
7563 else
7564 {
7565 if (!ignore_topmost_bind)
7566 {
7567 fprintf (file, "{\n");
7568 /* No topmost bind, pretend it's ignored for later. */
7569 ignore_topmost_bind = true;
7570 }
7571 indent = 2;
7572 }
7573
7574 if (any_var)
7575 fprintf (file, "\n");
7576
7577 print_generic_stmt_indented (file, chain, flags, indent);
7578 if (ignore_topmost_bind)
7579 fprintf (file, "}\n");
7580 }
7581
7582 if (flags & TDF_ENUMERATE_LOCALS)
7583 dump_enumerated_decls (file, flags);
7584 fprintf (file, "\n\n");
7585
7586 current_function_decl = old_current_fndecl;
7587 }
7588
7589 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7590
7591 DEBUG_FUNCTION void
7592 debug_function (tree fn, int flags)
7593 {
7594 dump_function_to_file (fn, stderr, flags);
7595 }
7596
7597
7598 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7599
7600 static void
7601 print_pred_bbs (FILE *file, basic_block bb)
7602 {
7603 edge e;
7604 edge_iterator ei;
7605
7606 FOR_EACH_EDGE (e, ei, bb->preds)
7607 fprintf (file, "bb_%d ", e->src->index);
7608 }
7609
7610
7611 /* Print on FILE the indexes for the successors of basic_block BB. */
7612
7613 static void
7614 print_succ_bbs (FILE *file, basic_block bb)
7615 {
7616 edge e;
7617 edge_iterator ei;
7618
7619 FOR_EACH_EDGE (e, ei, bb->succs)
7620 fprintf (file, "bb_%d ", e->dest->index);
7621 }
7622
7623 /* Print to FILE the basic block BB following the VERBOSITY level. */
7624
7625 void
7626 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7627 {
7628 char *s_indent = (char *) alloca ((size_t) indent + 1);
7629 memset ((void *) s_indent, ' ', (size_t) indent);
7630 s_indent[indent] = '\0';
7631
7632 /* Print basic_block's header. */
7633 if (verbosity >= 2)
7634 {
7635 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7636 print_pred_bbs (file, bb);
7637 fprintf (file, "}, succs = {");
7638 print_succ_bbs (file, bb);
7639 fprintf (file, "})\n");
7640 }
7641
7642 /* Print basic_block's body. */
7643 if (verbosity >= 3)
7644 {
7645 fprintf (file, "%s {\n", s_indent);
7646 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7647 fprintf (file, "%s }\n", s_indent);
7648 }
7649 }
7650
7651 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7652
7653 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7654 VERBOSITY level this outputs the contents of the loop, or just its
7655 structure. */
7656
7657 static void
7658 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7659 {
7660 char *s_indent;
7661 basic_block bb;
7662
7663 if (loop == NULL)
7664 return;
7665
7666 s_indent = (char *) alloca ((size_t) indent + 1);
7667 memset ((void *) s_indent, ' ', (size_t) indent);
7668 s_indent[indent] = '\0';
7669
7670 /* Print loop's header. */
7671 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7672 if (loop->header)
7673 fprintf (file, "header = %d", loop->header->index);
7674 else
7675 {
7676 fprintf (file, "deleted)\n");
7677 return;
7678 }
7679 if (loop->latch)
7680 fprintf (file, ", latch = %d", loop->latch->index);
7681 else
7682 fprintf (file, ", multiple latches");
7683 fprintf (file, ", niter = ");
7684 print_generic_expr (file, loop->nb_iterations, 0);
7685
7686 if (loop->any_upper_bound)
7687 {
7688 fprintf (file, ", upper_bound = ");
7689 print_decu (loop->nb_iterations_upper_bound, file);
7690 }
7691
7692 if (loop->any_estimate)
7693 {
7694 fprintf (file, ", estimate = ");
7695 print_decu (loop->nb_iterations_estimate, file);
7696 }
7697 fprintf (file, ")\n");
7698
7699 /* Print loop's body. */
7700 if (verbosity >= 1)
7701 {
7702 fprintf (file, "%s{\n", s_indent);
7703 FOR_EACH_BB_FN (bb, cfun)
7704 if (bb->loop_father == loop)
7705 print_loops_bb (file, bb, indent, verbosity);
7706
7707 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7708 fprintf (file, "%s}\n", s_indent);
7709 }
7710 }
7711
7712 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7713 spaces. Following VERBOSITY level this outputs the contents of the
7714 loop, or just its structure. */
7715
7716 static void
7717 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7718 int verbosity)
7719 {
7720 if (loop == NULL)
7721 return;
7722
7723 print_loop (file, loop, indent, verbosity);
7724 print_loop_and_siblings (file, loop->next, indent, verbosity);
7725 }
7726
7727 /* Follow a CFG edge from the entry point of the program, and on entry
7728 of a loop, pretty print the loop structure on FILE. */
7729
7730 void
7731 print_loops (FILE *file, int verbosity)
7732 {
7733 basic_block bb;
7734
7735 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7736 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7737 if (bb && bb->loop_father)
7738 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7739 }
7740
7741 /* Dump a loop. */
7742
7743 DEBUG_FUNCTION void
7744 debug (struct loop &ref)
7745 {
7746 print_loop (stderr, &ref, 0, /*verbosity*/0);
7747 }
7748
7749 DEBUG_FUNCTION void
7750 debug (struct loop *ptr)
7751 {
7752 if (ptr)
7753 debug (*ptr);
7754 else
7755 fprintf (stderr, "<nil>\n");
7756 }
7757
7758 /* Dump a loop verbosely. */
7759
7760 DEBUG_FUNCTION void
7761 debug_verbose (struct loop &ref)
7762 {
7763 print_loop (stderr, &ref, 0, /*verbosity*/3);
7764 }
7765
7766 DEBUG_FUNCTION void
7767 debug_verbose (struct loop *ptr)
7768 {
7769 if (ptr)
7770 debug (*ptr);
7771 else
7772 fprintf (stderr, "<nil>\n");
7773 }
7774
7775
7776 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7777
7778 DEBUG_FUNCTION void
7779 debug_loops (int verbosity)
7780 {
7781 print_loops (stderr, verbosity);
7782 }
7783
7784 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7785
7786 DEBUG_FUNCTION void
7787 debug_loop (struct loop *loop, int verbosity)
7788 {
7789 print_loop (stderr, loop, 0, verbosity);
7790 }
7791
7792 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7793 level. */
7794
7795 DEBUG_FUNCTION void
7796 debug_loop_num (unsigned num, int verbosity)
7797 {
7798 debug_loop (get_loop (cfun, num), verbosity);
7799 }
7800
7801 /* Return true if BB ends with a call, possibly followed by some
7802 instructions that must stay with the call. Return false,
7803 otherwise. */
7804
7805 static bool
7806 gimple_block_ends_with_call_p (basic_block bb)
7807 {
7808 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7809 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7810 }
7811
7812
7813 /* Return true if BB ends with a conditional branch. Return false,
7814 otherwise. */
7815
7816 static bool
7817 gimple_block_ends_with_condjump_p (const_basic_block bb)
7818 {
7819 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7820 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7821 }
7822
7823
7824 /* Return true if we need to add fake edge to exit at statement T.
7825 Helper function for gimple_flow_call_edges_add. */
7826
7827 static bool
7828 need_fake_edge_p (gimple *t)
7829 {
7830 tree fndecl = NULL_TREE;
7831 int call_flags = 0;
7832
7833 /* NORETURN and LONGJMP calls already have an edge to exit.
7834 CONST and PURE calls do not need one.
7835 We don't currently check for CONST and PURE here, although
7836 it would be a good idea, because those attributes are
7837 figured out from the RTL in mark_constant_function, and
7838 the counter incrementation code from -fprofile-arcs
7839 leads to different results from -fbranch-probabilities. */
7840 if (is_gimple_call (t))
7841 {
7842 fndecl = gimple_call_fndecl (t);
7843 call_flags = gimple_call_flags (t);
7844 }
7845
7846 if (is_gimple_call (t)
7847 && fndecl
7848 && DECL_BUILT_IN (fndecl)
7849 && (call_flags & ECF_NOTHROW)
7850 && !(call_flags & ECF_RETURNS_TWICE)
7851 /* fork() doesn't really return twice, but the effect of
7852 wrapping it in __gcov_fork() which calls __gcov_flush()
7853 and clears the counters before forking has the same
7854 effect as returning twice. Force a fake edge. */
7855 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7856 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7857 return false;
7858
7859 if (is_gimple_call (t))
7860 {
7861 edge_iterator ei;
7862 edge e;
7863 basic_block bb;
7864
7865 if (!(call_flags & ECF_NORETURN))
7866 return true;
7867
7868 bb = gimple_bb (t);
7869 FOR_EACH_EDGE (e, ei, bb->succs)
7870 if ((e->flags & EDGE_FAKE) == 0)
7871 return true;
7872 }
7873
7874 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7875 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7876 return true;
7877
7878 return false;
7879 }
7880
7881
7882 /* Add fake edges to the function exit for any non constant and non
7883 noreturn calls (or noreturn calls with EH/abnormal edges),
7884 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7885 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7886 that were split.
7887
7888 The goal is to expose cases in which entering a basic block does
7889 not imply that all subsequent instructions must be executed. */
7890
7891 static int
7892 gimple_flow_call_edges_add (sbitmap blocks)
7893 {
7894 int i;
7895 int blocks_split = 0;
7896 int last_bb = last_basic_block_for_fn (cfun);
7897 bool check_last_block = false;
7898
7899 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7900 return 0;
7901
7902 if (! blocks)
7903 check_last_block = true;
7904 else
7905 check_last_block = bitmap_bit_p (blocks,
7906 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7907
7908 /* In the last basic block, before epilogue generation, there will be
7909 a fallthru edge to EXIT. Special care is required if the last insn
7910 of the last basic block is a call because make_edge folds duplicate
7911 edges, which would result in the fallthru edge also being marked
7912 fake, which would result in the fallthru edge being removed by
7913 remove_fake_edges, which would result in an invalid CFG.
7914
7915 Moreover, we can't elide the outgoing fake edge, since the block
7916 profiler needs to take this into account in order to solve the minimal
7917 spanning tree in the case that the call doesn't return.
7918
7919 Handle this by adding a dummy instruction in a new last basic block. */
7920 if (check_last_block)
7921 {
7922 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7923 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7924 gimple *t = NULL;
7925
7926 if (!gsi_end_p (gsi))
7927 t = gsi_stmt (gsi);
7928
7929 if (t && need_fake_edge_p (t))
7930 {
7931 edge e;
7932
7933 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7934 if (e)
7935 {
7936 gsi_insert_on_edge (e, gimple_build_nop ());
7937 gsi_commit_edge_inserts ();
7938 }
7939 }
7940 }
7941
7942 /* Now add fake edges to the function exit for any non constant
7943 calls since there is no way that we can determine if they will
7944 return or not... */
7945 for (i = 0; i < last_bb; i++)
7946 {
7947 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7948 gimple_stmt_iterator gsi;
7949 gimple *stmt, *last_stmt;
7950
7951 if (!bb)
7952 continue;
7953
7954 if (blocks && !bitmap_bit_p (blocks, i))
7955 continue;
7956
7957 gsi = gsi_last_nondebug_bb (bb);
7958 if (!gsi_end_p (gsi))
7959 {
7960 last_stmt = gsi_stmt (gsi);
7961 do
7962 {
7963 stmt = gsi_stmt (gsi);
7964 if (need_fake_edge_p (stmt))
7965 {
7966 edge e;
7967
7968 /* The handling above of the final block before the
7969 epilogue should be enough to verify that there is
7970 no edge to the exit block in CFG already.
7971 Calling make_edge in such case would cause us to
7972 mark that edge as fake and remove it later. */
7973 if (flag_checking && stmt == last_stmt)
7974 {
7975 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7976 gcc_assert (e == NULL);
7977 }
7978
7979 /* Note that the following may create a new basic block
7980 and renumber the existing basic blocks. */
7981 if (stmt != last_stmt)
7982 {
7983 e = split_block (bb, stmt);
7984 if (e)
7985 blocks_split++;
7986 }
7987 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7988 }
7989 gsi_prev (&gsi);
7990 }
7991 while (!gsi_end_p (gsi));
7992 }
7993 }
7994
7995 if (blocks_split)
7996 verify_flow_info ();
7997
7998 return blocks_split;
7999 }
8000
8001 /* Removes edge E and all the blocks dominated by it, and updates dominance
8002 information. The IL in E->src needs to be updated separately.
8003 If dominance info is not available, only the edge E is removed.*/
8004
8005 void
8006 remove_edge_and_dominated_blocks (edge e)
8007 {
8008 vec<basic_block> bbs_to_remove = vNULL;
8009 vec<basic_block> bbs_to_fix_dom = vNULL;
8010 bitmap df, df_idom;
8011 edge f;
8012 edge_iterator ei;
8013 bool none_removed = false;
8014 unsigned i;
8015 basic_block bb, dbb;
8016 bitmap_iterator bi;
8017
8018 /* If we are removing a path inside a non-root loop that may change
8019 loop ownership of blocks or remove loops. Mark loops for fixup. */
8020 if (current_loops
8021 && loop_outer (e->src->loop_father) != NULL
8022 && e->src->loop_father == e->dest->loop_father)
8023 loops_state_set (LOOPS_NEED_FIXUP);
8024
8025 if (!dom_info_available_p (CDI_DOMINATORS))
8026 {
8027 remove_edge (e);
8028 return;
8029 }
8030
8031 /* No updating is needed for edges to exit. */
8032 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8033 {
8034 if (cfgcleanup_altered_bbs)
8035 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8036 remove_edge (e);
8037 return;
8038 }
8039
8040 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8041 that is not dominated by E->dest, then this set is empty. Otherwise,
8042 all the basic blocks dominated by E->dest are removed.
8043
8044 Also, to DF_IDOM we store the immediate dominators of the blocks in
8045 the dominance frontier of E (i.e., of the successors of the
8046 removed blocks, if there are any, and of E->dest otherwise). */
8047 FOR_EACH_EDGE (f, ei, e->dest->preds)
8048 {
8049 if (f == e)
8050 continue;
8051
8052 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8053 {
8054 none_removed = true;
8055 break;
8056 }
8057 }
8058
8059 df = BITMAP_ALLOC (NULL);
8060 df_idom = BITMAP_ALLOC (NULL);
8061
8062 if (none_removed)
8063 bitmap_set_bit (df_idom,
8064 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8065 else
8066 {
8067 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8068 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8069 {
8070 FOR_EACH_EDGE (f, ei, bb->succs)
8071 {
8072 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8073 bitmap_set_bit (df, f->dest->index);
8074 }
8075 }
8076 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8077 bitmap_clear_bit (df, bb->index);
8078
8079 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8080 {
8081 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8082 bitmap_set_bit (df_idom,
8083 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8084 }
8085 }
8086
8087 if (cfgcleanup_altered_bbs)
8088 {
8089 /* Record the set of the altered basic blocks. */
8090 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8091 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8092 }
8093
8094 /* Remove E and the cancelled blocks. */
8095 if (none_removed)
8096 remove_edge (e);
8097 else
8098 {
8099 /* Walk backwards so as to get a chance to substitute all
8100 released DEFs into debug stmts. See
8101 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8102 details. */
8103 for (i = bbs_to_remove.length (); i-- > 0; )
8104 delete_basic_block (bbs_to_remove[i]);
8105 }
8106
8107 /* Update the dominance information. The immediate dominator may change only
8108 for blocks whose immediate dominator belongs to DF_IDOM:
8109
8110 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8111 removal. Let Z the arbitrary block such that idom(Z) = Y and
8112 Z dominates X after the removal. Before removal, there exists a path P
8113 from Y to X that avoids Z. Let F be the last edge on P that is
8114 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8115 dominates W, and because of P, Z does not dominate W), and W belongs to
8116 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8117 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8118 {
8119 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8120 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8121 dbb;
8122 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8123 bbs_to_fix_dom.safe_push (dbb);
8124 }
8125
8126 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8127
8128 BITMAP_FREE (df);
8129 BITMAP_FREE (df_idom);
8130 bbs_to_remove.release ();
8131 bbs_to_fix_dom.release ();
8132 }
8133
8134 /* Purge dead EH edges from basic block BB. */
8135
8136 bool
8137 gimple_purge_dead_eh_edges (basic_block bb)
8138 {
8139 bool changed = false;
8140 edge e;
8141 edge_iterator ei;
8142 gimple *stmt = last_stmt (bb);
8143
8144 if (stmt && stmt_can_throw_internal (stmt))
8145 return false;
8146
8147 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8148 {
8149 if (e->flags & EDGE_EH)
8150 {
8151 remove_edge_and_dominated_blocks (e);
8152 changed = true;
8153 }
8154 else
8155 ei_next (&ei);
8156 }
8157
8158 return changed;
8159 }
8160
8161 /* Purge dead EH edges from basic block listed in BLOCKS. */
8162
8163 bool
8164 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8165 {
8166 bool changed = false;
8167 unsigned i;
8168 bitmap_iterator bi;
8169
8170 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8171 {
8172 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8173
8174 /* Earlier gimple_purge_dead_eh_edges could have removed
8175 this basic block already. */
8176 gcc_assert (bb || changed);
8177 if (bb != NULL)
8178 changed |= gimple_purge_dead_eh_edges (bb);
8179 }
8180
8181 return changed;
8182 }
8183
8184 /* Purge dead abnormal call edges from basic block BB. */
8185
8186 bool
8187 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8188 {
8189 bool changed = false;
8190 edge e;
8191 edge_iterator ei;
8192 gimple *stmt = last_stmt (bb);
8193
8194 if (!cfun->has_nonlocal_label
8195 && !cfun->calls_setjmp)
8196 return false;
8197
8198 if (stmt && stmt_can_make_abnormal_goto (stmt))
8199 return false;
8200
8201 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8202 {
8203 if (e->flags & EDGE_ABNORMAL)
8204 {
8205 if (e->flags & EDGE_FALLTHRU)
8206 e->flags &= ~EDGE_ABNORMAL;
8207 else
8208 remove_edge_and_dominated_blocks (e);
8209 changed = true;
8210 }
8211 else
8212 ei_next (&ei);
8213 }
8214
8215 return changed;
8216 }
8217
8218 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8219
8220 bool
8221 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8222 {
8223 bool changed = false;
8224 unsigned i;
8225 bitmap_iterator bi;
8226
8227 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8228 {
8229 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8230
8231 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8232 this basic block already. */
8233 gcc_assert (bb || changed);
8234 if (bb != NULL)
8235 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8236 }
8237
8238 return changed;
8239 }
8240
8241 /* This function is called whenever a new edge is created or
8242 redirected. */
8243
8244 static void
8245 gimple_execute_on_growing_pred (edge e)
8246 {
8247 basic_block bb = e->dest;
8248
8249 if (!gimple_seq_empty_p (phi_nodes (bb)))
8250 reserve_phi_args_for_new_edge (bb);
8251 }
8252
8253 /* This function is called immediately before edge E is removed from
8254 the edge vector E->dest->preds. */
8255
8256 static void
8257 gimple_execute_on_shrinking_pred (edge e)
8258 {
8259 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8260 remove_phi_args (e);
8261 }
8262
8263 /*---------------------------------------------------------------------------
8264 Helper functions for Loop versioning
8265 ---------------------------------------------------------------------------*/
8266
8267 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8268 of 'first'. Both of them are dominated by 'new_head' basic block. When
8269 'new_head' was created by 'second's incoming edge it received phi arguments
8270 on the edge by split_edge(). Later, additional edge 'e' was created to
8271 connect 'new_head' and 'first'. Now this routine adds phi args on this
8272 additional edge 'e' that new_head to second edge received as part of edge
8273 splitting. */
8274
8275 static void
8276 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8277 basic_block new_head, edge e)
8278 {
8279 gphi *phi1, *phi2;
8280 gphi_iterator psi1, psi2;
8281 tree def;
8282 edge e2 = find_edge (new_head, second);
8283
8284 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8285 edge, we should always have an edge from NEW_HEAD to SECOND. */
8286 gcc_assert (e2 != NULL);
8287
8288 /* Browse all 'second' basic block phi nodes and add phi args to
8289 edge 'e' for 'first' head. PHI args are always in correct order. */
8290
8291 for (psi2 = gsi_start_phis (second),
8292 psi1 = gsi_start_phis (first);
8293 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8294 gsi_next (&psi2), gsi_next (&psi1))
8295 {
8296 phi1 = psi1.phi ();
8297 phi2 = psi2.phi ();
8298 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8299 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8300 }
8301 }
8302
8303
8304 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8305 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8306 the destination of the ELSE part. */
8307
8308 static void
8309 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8310 basic_block second_head ATTRIBUTE_UNUSED,
8311 basic_block cond_bb, void *cond_e)
8312 {
8313 gimple_stmt_iterator gsi;
8314 gimple *new_cond_expr;
8315 tree cond_expr = (tree) cond_e;
8316 edge e0;
8317
8318 /* Build new conditional expr */
8319 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8320 NULL_TREE, NULL_TREE);
8321
8322 /* Add new cond in cond_bb. */
8323 gsi = gsi_last_bb (cond_bb);
8324 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8325
8326 /* Adjust edges appropriately to connect new head with first head
8327 as well as second head. */
8328 e0 = single_succ_edge (cond_bb);
8329 e0->flags &= ~EDGE_FALLTHRU;
8330 e0->flags |= EDGE_FALSE_VALUE;
8331 }
8332
8333
8334 /* Do book-keeping of basic block BB for the profile consistency checker.
8335 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8336 then do post-pass accounting. Store the counting in RECORD. */
8337 static void
8338 gimple_account_profile_record (basic_block bb, int after_pass,
8339 struct profile_record *record)
8340 {
8341 gimple_stmt_iterator i;
8342 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8343 {
8344 record->size[after_pass]
8345 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8346 if (profile_status_for_fn (cfun) == PROFILE_READ)
8347 record->time[after_pass]
8348 += estimate_num_insns (gsi_stmt (i),
8349 &eni_time_weights) * bb->count;
8350 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8351 record->time[after_pass]
8352 += estimate_num_insns (gsi_stmt (i),
8353 &eni_time_weights) * bb->frequency;
8354 }
8355 }
8356
8357 struct cfg_hooks gimple_cfg_hooks = {
8358 "gimple",
8359 gimple_verify_flow_info,
8360 gimple_dump_bb, /* dump_bb */
8361 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8362 create_bb, /* create_basic_block */
8363 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8364 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8365 gimple_can_remove_branch_p, /* can_remove_branch_p */
8366 remove_bb, /* delete_basic_block */
8367 gimple_split_block, /* split_block */
8368 gimple_move_block_after, /* move_block_after */
8369 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8370 gimple_merge_blocks, /* merge_blocks */
8371 gimple_predict_edge, /* predict_edge */
8372 gimple_predicted_by_p, /* predicted_by_p */
8373 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8374 gimple_duplicate_bb, /* duplicate_block */
8375 gimple_split_edge, /* split_edge */
8376 gimple_make_forwarder_block, /* make_forward_block */
8377 NULL, /* tidy_fallthru_edge */
8378 NULL, /* force_nonfallthru */
8379 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8380 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8381 gimple_flow_call_edges_add, /* flow_call_edges_add */
8382 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8383 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8384 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8385 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8386 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8387 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8388 flush_pending_stmts, /* flush_pending_stmts */
8389 gimple_empty_block_p, /* block_empty_p */
8390 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8391 gimple_account_profile_record,
8392 };
8393
8394
8395 /* Split all critical edges. */
8396
8397 unsigned int
8398 split_critical_edges (void)
8399 {
8400 basic_block bb;
8401 edge e;
8402 edge_iterator ei;
8403
8404 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8405 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8406 mappings around the calls to split_edge. */
8407 start_recording_case_labels ();
8408 FOR_ALL_BB_FN (bb, cfun)
8409 {
8410 FOR_EACH_EDGE (e, ei, bb->succs)
8411 {
8412 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8413 split_edge (e);
8414 /* PRE inserts statements to edges and expects that
8415 since split_critical_edges was done beforehand, committing edge
8416 insertions will not split more edges. In addition to critical
8417 edges we must split edges that have multiple successors and
8418 end by control flow statements, such as RESX.
8419 Go ahead and split them too. This matches the logic in
8420 gimple_find_edge_insert_loc. */
8421 else if ((!single_pred_p (e->dest)
8422 || !gimple_seq_empty_p (phi_nodes (e->dest))
8423 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8424 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8425 && !(e->flags & EDGE_ABNORMAL))
8426 {
8427 gimple_stmt_iterator gsi;
8428
8429 gsi = gsi_last_bb (e->src);
8430 if (!gsi_end_p (gsi)
8431 && stmt_ends_bb_p (gsi_stmt (gsi))
8432 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8433 && !gimple_call_builtin_p (gsi_stmt (gsi),
8434 BUILT_IN_RETURN)))
8435 split_edge (e);
8436 }
8437 }
8438 }
8439 end_recording_case_labels ();
8440 return 0;
8441 }
8442
8443 namespace {
8444
8445 const pass_data pass_data_split_crit_edges =
8446 {
8447 GIMPLE_PASS, /* type */
8448 "crited", /* name */
8449 OPTGROUP_NONE, /* optinfo_flags */
8450 TV_TREE_SPLIT_EDGES, /* tv_id */
8451 PROP_cfg, /* properties_required */
8452 PROP_no_crit_edges, /* properties_provided */
8453 0, /* properties_destroyed */
8454 0, /* todo_flags_start */
8455 0, /* todo_flags_finish */
8456 };
8457
8458 class pass_split_crit_edges : public gimple_opt_pass
8459 {
8460 public:
8461 pass_split_crit_edges (gcc::context *ctxt)
8462 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8463 {}
8464
8465 /* opt_pass methods: */
8466 virtual unsigned int execute (function *) { return split_critical_edges (); }
8467
8468 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8469 }; // class pass_split_crit_edges
8470
8471 } // anon namespace
8472
8473 gimple_opt_pass *
8474 make_pass_split_crit_edges (gcc::context *ctxt)
8475 {
8476 return new pass_split_crit_edges (ctxt);
8477 }
8478
8479
8480 /* Insert COND expression which is GIMPLE_COND after STMT
8481 in basic block BB with appropriate basic block split
8482 and creation of a new conditionally executed basic block.
8483 Return created basic block. */
8484 basic_block
8485 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8486 {
8487 edge fall = split_block (bb, stmt);
8488 gimple_stmt_iterator iter = gsi_last_bb (bb);
8489 basic_block new_bb;
8490
8491 /* Insert cond statement. */
8492 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8493 if (gsi_end_p (iter))
8494 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8495 else
8496 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8497
8498 /* Create conditionally executed block. */
8499 new_bb = create_empty_bb (bb);
8500 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8501 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8502
8503 /* Fix edge for split bb. */
8504 fall->flags = EDGE_FALSE_VALUE;
8505
8506 /* Update dominance info. */
8507 if (dom_info_available_p (CDI_DOMINATORS))
8508 {
8509 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8510 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8511 }
8512
8513 /* Update loop info. */
8514 if (current_loops)
8515 add_bb_to_loop (new_bb, bb->loop_father);
8516
8517 return new_bb;
8518 }
8519
8520 /* Build a ternary operation and gimplify it. Emit code before GSI.
8521 Return the gimple_val holding the result. */
8522
8523 tree
8524 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8525 tree type, tree a, tree b, tree c)
8526 {
8527 tree ret;
8528 location_t loc = gimple_location (gsi_stmt (*gsi));
8529
8530 ret = fold_build3_loc (loc, code, type, a, b, c);
8531 STRIP_NOPS (ret);
8532
8533 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8534 GSI_SAME_STMT);
8535 }
8536
8537 /* Build a binary operation and gimplify it. Emit code before GSI.
8538 Return the gimple_val holding the result. */
8539
8540 tree
8541 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8542 tree type, tree a, tree b)
8543 {
8544 tree ret;
8545
8546 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8547 STRIP_NOPS (ret);
8548
8549 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8550 GSI_SAME_STMT);
8551 }
8552
8553 /* Build a unary operation and gimplify it. Emit code before GSI.
8554 Return the gimple_val holding the result. */
8555
8556 tree
8557 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8558 tree a)
8559 {
8560 tree ret;
8561
8562 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8563 STRIP_NOPS (ret);
8564
8565 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8566 GSI_SAME_STMT);
8567 }
8568
8569
8570 \f
8571 /* Given a basic block B which ends with a conditional and has
8572 precisely two successors, determine which of the edges is taken if
8573 the conditional is true and which is taken if the conditional is
8574 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8575
8576 void
8577 extract_true_false_edges_from_block (basic_block b,
8578 edge *true_edge,
8579 edge *false_edge)
8580 {
8581 edge e = EDGE_SUCC (b, 0);
8582
8583 if (e->flags & EDGE_TRUE_VALUE)
8584 {
8585 *true_edge = e;
8586 *false_edge = EDGE_SUCC (b, 1);
8587 }
8588 else
8589 {
8590 *false_edge = e;
8591 *true_edge = EDGE_SUCC (b, 1);
8592 }
8593 }
8594
8595
8596 /* From a controlling predicate in the immediate dominator DOM of
8597 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8598 predicate evaluates to true and false and store them to
8599 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8600 they are non-NULL. Returns true if the edges can be determined,
8601 else return false. */
8602
8603 bool
8604 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8605 edge *true_controlled_edge,
8606 edge *false_controlled_edge)
8607 {
8608 basic_block bb = phiblock;
8609 edge true_edge, false_edge, tem;
8610 edge e0 = NULL, e1 = NULL;
8611
8612 /* We have to verify that one edge into the PHI node is dominated
8613 by the true edge of the predicate block and the other edge
8614 dominated by the false edge. This ensures that the PHI argument
8615 we are going to take is completely determined by the path we
8616 take from the predicate block.
8617 We can only use BB dominance checks below if the destination of
8618 the true/false edges are dominated by their edge, thus only
8619 have a single predecessor. */
8620 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8621 tem = EDGE_PRED (bb, 0);
8622 if (tem == true_edge
8623 || (single_pred_p (true_edge->dest)
8624 && (tem->src == true_edge->dest
8625 || dominated_by_p (CDI_DOMINATORS,
8626 tem->src, true_edge->dest))))
8627 e0 = tem;
8628 else if (tem == false_edge
8629 || (single_pred_p (false_edge->dest)
8630 && (tem->src == false_edge->dest
8631 || dominated_by_p (CDI_DOMINATORS,
8632 tem->src, false_edge->dest))))
8633 e1 = tem;
8634 else
8635 return false;
8636 tem = EDGE_PRED (bb, 1);
8637 if (tem == true_edge
8638 || (single_pred_p (true_edge->dest)
8639 && (tem->src == true_edge->dest
8640 || dominated_by_p (CDI_DOMINATORS,
8641 tem->src, true_edge->dest))))
8642 e0 = tem;
8643 else if (tem == false_edge
8644 || (single_pred_p (false_edge->dest)
8645 && (tem->src == false_edge->dest
8646 || dominated_by_p (CDI_DOMINATORS,
8647 tem->src, false_edge->dest))))
8648 e1 = tem;
8649 else
8650 return false;
8651 if (!e0 || !e1)
8652 return false;
8653
8654 if (true_controlled_edge)
8655 *true_controlled_edge = e0;
8656 if (false_controlled_edge)
8657 *false_controlled_edge = e1;
8658
8659 return true;
8660 }
8661
8662
8663
8664 /* Emit return warnings. */
8665
8666 namespace {
8667
8668 const pass_data pass_data_warn_function_return =
8669 {
8670 GIMPLE_PASS, /* type */
8671 "*warn_function_return", /* name */
8672 OPTGROUP_NONE, /* optinfo_flags */
8673 TV_NONE, /* tv_id */
8674 PROP_cfg, /* properties_required */
8675 0, /* properties_provided */
8676 0, /* properties_destroyed */
8677 0, /* todo_flags_start */
8678 0, /* todo_flags_finish */
8679 };
8680
8681 class pass_warn_function_return : public gimple_opt_pass
8682 {
8683 public:
8684 pass_warn_function_return (gcc::context *ctxt)
8685 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8686 {}
8687
8688 /* opt_pass methods: */
8689 virtual unsigned int execute (function *);
8690
8691 }; // class pass_warn_function_return
8692
8693 unsigned int
8694 pass_warn_function_return::execute (function *fun)
8695 {
8696 source_location location;
8697 gimple *last;
8698 edge e;
8699 edge_iterator ei;
8700
8701 if (!targetm.warn_func_return (fun->decl))
8702 return 0;
8703
8704 /* If we have a path to EXIT, then we do return. */
8705 if (TREE_THIS_VOLATILE (fun->decl)
8706 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8707 {
8708 location = UNKNOWN_LOCATION;
8709 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8710 {
8711 last = last_stmt (e->src);
8712 if ((gimple_code (last) == GIMPLE_RETURN
8713 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8714 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8715 break;
8716 }
8717 if (location == UNKNOWN_LOCATION)
8718 location = cfun->function_end_locus;
8719 warning_at (location, 0, "%<noreturn%> function does return");
8720 }
8721
8722 /* If we see "return;" in some basic block, then we do reach the end
8723 without returning a value. */
8724 else if (warn_return_type
8725 && !TREE_NO_WARNING (fun->decl)
8726 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8727 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8728 {
8729 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8730 {
8731 gimple *last = last_stmt (e->src);
8732 greturn *return_stmt = dyn_cast <greturn *> (last);
8733 if (return_stmt
8734 && gimple_return_retval (return_stmt) == NULL
8735 && !gimple_no_warning_p (last))
8736 {
8737 location = gimple_location (last);
8738 if (location == UNKNOWN_LOCATION)
8739 location = fun->function_end_locus;
8740 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8741 TREE_NO_WARNING (fun->decl) = 1;
8742 break;
8743 }
8744 }
8745 }
8746 return 0;
8747 }
8748
8749 } // anon namespace
8750
8751 gimple_opt_pass *
8752 make_pass_warn_function_return (gcc::context *ctxt)
8753 {
8754 return new pass_warn_function_return (ctxt);
8755 }
8756
8757 /* Walk a gimplified function and warn for functions whose return value is
8758 ignored and attribute((warn_unused_result)) is set. This is done before
8759 inlining, so we don't have to worry about that. */
8760
8761 static void
8762 do_warn_unused_result (gimple_seq seq)
8763 {
8764 tree fdecl, ftype;
8765 gimple_stmt_iterator i;
8766
8767 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8768 {
8769 gimple *g = gsi_stmt (i);
8770
8771 switch (gimple_code (g))
8772 {
8773 case GIMPLE_BIND:
8774 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8775 break;
8776 case GIMPLE_TRY:
8777 do_warn_unused_result (gimple_try_eval (g));
8778 do_warn_unused_result (gimple_try_cleanup (g));
8779 break;
8780 case GIMPLE_CATCH:
8781 do_warn_unused_result (gimple_catch_handler (
8782 as_a <gcatch *> (g)));
8783 break;
8784 case GIMPLE_EH_FILTER:
8785 do_warn_unused_result (gimple_eh_filter_failure (g));
8786 break;
8787
8788 case GIMPLE_CALL:
8789 if (gimple_call_lhs (g))
8790 break;
8791 if (gimple_call_internal_p (g))
8792 break;
8793
8794 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8795 LHS. All calls whose value is ignored should be
8796 represented like this. Look for the attribute. */
8797 fdecl = gimple_call_fndecl (g);
8798 ftype = gimple_call_fntype (g);
8799
8800 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8801 {
8802 location_t loc = gimple_location (g);
8803
8804 if (fdecl)
8805 warning_at (loc, OPT_Wunused_result,
8806 "ignoring return value of %qD, "
8807 "declared with attribute warn_unused_result",
8808 fdecl);
8809 else
8810 warning_at (loc, OPT_Wunused_result,
8811 "ignoring return value of function "
8812 "declared with attribute warn_unused_result");
8813 }
8814 break;
8815
8816 default:
8817 /* Not a container, not a call, or a call whose value is used. */
8818 break;
8819 }
8820 }
8821 }
8822
8823 namespace {
8824
8825 const pass_data pass_data_warn_unused_result =
8826 {
8827 GIMPLE_PASS, /* type */
8828 "*warn_unused_result", /* name */
8829 OPTGROUP_NONE, /* optinfo_flags */
8830 TV_NONE, /* tv_id */
8831 PROP_gimple_any, /* properties_required */
8832 0, /* properties_provided */
8833 0, /* properties_destroyed */
8834 0, /* todo_flags_start */
8835 0, /* todo_flags_finish */
8836 };
8837
8838 class pass_warn_unused_result : public gimple_opt_pass
8839 {
8840 public:
8841 pass_warn_unused_result (gcc::context *ctxt)
8842 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8843 {}
8844
8845 /* opt_pass methods: */
8846 virtual bool gate (function *) { return flag_warn_unused_result; }
8847 virtual unsigned int execute (function *)
8848 {
8849 do_warn_unused_result (gimple_body (current_function_decl));
8850 return 0;
8851 }
8852
8853 }; // class pass_warn_unused_result
8854
8855 } // anon namespace
8856
8857 gimple_opt_pass *
8858 make_pass_warn_unused_result (gcc::context *ctxt)
8859 {
8860 return new pass_warn_unused_result (ctxt);
8861 }
8862
8863 /* IPA passes, compilation of earlier functions or inlining
8864 might have changed some properties, such as marked functions nothrow,
8865 pure, const or noreturn.
8866 Remove redundant edges and basic blocks, and create new ones if necessary.
8867
8868 This pass can't be executed as stand alone pass from pass manager, because
8869 in between inlining and this fixup the verify_flow_info would fail. */
8870
8871 unsigned int
8872 execute_fixup_cfg (void)
8873 {
8874 basic_block bb;
8875 gimple_stmt_iterator gsi;
8876 int todo = 0;
8877 gcov_type count_scale;
8878 edge e;
8879 edge_iterator ei;
8880
8881 count_scale
8882 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8883 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8884
8885 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8886 cgraph_node::get (current_function_decl)->count;
8887 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8888 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8889 count_scale);
8890
8891 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8892 e->count = apply_scale (e->count, count_scale);
8893
8894 FOR_EACH_BB_FN (bb, cfun)
8895 {
8896 bb->count = apply_scale (bb->count, count_scale);
8897 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8898 {
8899 gimple *stmt = gsi_stmt (gsi);
8900 tree decl = is_gimple_call (stmt)
8901 ? gimple_call_fndecl (stmt)
8902 : NULL;
8903 if (decl)
8904 {
8905 int flags = gimple_call_flags (stmt);
8906 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8907 {
8908 if (gimple_purge_dead_abnormal_call_edges (bb))
8909 todo |= TODO_cleanup_cfg;
8910
8911 if (gimple_in_ssa_p (cfun))
8912 {
8913 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8914 update_stmt (stmt);
8915 }
8916 }
8917
8918 if (flags & ECF_NORETURN
8919 && fixup_noreturn_call (stmt))
8920 todo |= TODO_cleanup_cfg;
8921 }
8922
8923 /* Remove stores to variables we marked write-only.
8924 Keep access when store has side effect, i.e. in case when source
8925 is volatile. */
8926 if (gimple_store_p (stmt)
8927 && !gimple_has_side_effects (stmt))
8928 {
8929 tree lhs = get_base_address (gimple_get_lhs (stmt));
8930
8931 if (TREE_CODE (lhs) == VAR_DECL
8932 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8933 && varpool_node::get (lhs)->writeonly)
8934 {
8935 unlink_stmt_vdef (stmt);
8936 gsi_remove (&gsi, true);
8937 release_defs (stmt);
8938 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8939 continue;
8940 }
8941 }
8942 /* For calls we can simply remove LHS when it is known
8943 to be write-only. */
8944 if (is_gimple_call (stmt)
8945 && gimple_get_lhs (stmt))
8946 {
8947 tree lhs = get_base_address (gimple_get_lhs (stmt));
8948
8949 if (TREE_CODE (lhs) == VAR_DECL
8950 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8951 && varpool_node::get (lhs)->writeonly)
8952 {
8953 gimple_call_set_lhs (stmt, NULL);
8954 update_stmt (stmt);
8955 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8956 }
8957 }
8958
8959 if (maybe_clean_eh_stmt (stmt)
8960 && gimple_purge_dead_eh_edges (bb))
8961 todo |= TODO_cleanup_cfg;
8962 gsi_next (&gsi);
8963 }
8964
8965 FOR_EACH_EDGE (e, ei, bb->succs)
8966 e->count = apply_scale (e->count, count_scale);
8967
8968 /* If we have a basic block with no successors that does not
8969 end with a control statement or a noreturn call end it with
8970 a call to __builtin_unreachable. This situation can occur
8971 when inlining a noreturn call that does in fact return. */
8972 if (EDGE_COUNT (bb->succs) == 0)
8973 {
8974 gimple *stmt = last_stmt (bb);
8975 if (!stmt
8976 || (!is_ctrl_stmt (stmt)
8977 && (!is_gimple_call (stmt)
8978 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8979 {
8980 if (stmt && is_gimple_call (stmt))
8981 gimple_call_set_ctrl_altering (stmt, false);
8982 stmt = gimple_build_call
8983 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8984 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8985 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8986 }
8987 }
8988 }
8989 if (count_scale != REG_BR_PROB_BASE)
8990 compute_function_frequency ();
8991
8992 if (current_loops
8993 && (todo & TODO_cleanup_cfg))
8994 loops_state_set (LOOPS_NEED_FIXUP);
8995
8996 return todo;
8997 }
8998
8999 namespace {
9000
9001 const pass_data pass_data_fixup_cfg =
9002 {
9003 GIMPLE_PASS, /* type */
9004 "fixup_cfg", /* name */
9005 OPTGROUP_NONE, /* optinfo_flags */
9006 TV_NONE, /* tv_id */
9007 PROP_cfg, /* properties_required */
9008 0, /* properties_provided */
9009 0, /* properties_destroyed */
9010 0, /* todo_flags_start */
9011 0, /* todo_flags_finish */
9012 };
9013
9014 class pass_fixup_cfg : public gimple_opt_pass
9015 {
9016 public:
9017 pass_fixup_cfg (gcc::context *ctxt)
9018 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9019 {}
9020
9021 /* opt_pass methods: */
9022 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9023 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9024
9025 }; // class pass_fixup_cfg
9026
9027 } // anon namespace
9028
9029 gimple_opt_pass *
9030 make_pass_fixup_cfg (gcc::context *ctxt)
9031 {
9032 return new pass_fixup_cfg (ctxt);
9033 }
9034
9035 /* Garbage collection support for edge_def. */
9036
9037 extern void gt_ggc_mx (tree&);
9038 extern void gt_ggc_mx (gimple *&);
9039 extern void gt_ggc_mx (rtx&);
9040 extern void gt_ggc_mx (basic_block&);
9041
9042 static void
9043 gt_ggc_mx (rtx_insn *& x)
9044 {
9045 if (x)
9046 gt_ggc_mx_rtx_def ((void *) x);
9047 }
9048
9049 void
9050 gt_ggc_mx (edge_def *e)
9051 {
9052 tree block = LOCATION_BLOCK (e->goto_locus);
9053 gt_ggc_mx (e->src);
9054 gt_ggc_mx (e->dest);
9055 if (current_ir_type () == IR_GIMPLE)
9056 gt_ggc_mx (e->insns.g);
9057 else
9058 gt_ggc_mx (e->insns.r);
9059 gt_ggc_mx (block);
9060 }
9061
9062 /* PCH support for edge_def. */
9063
9064 extern void gt_pch_nx (tree&);
9065 extern void gt_pch_nx (gimple *&);
9066 extern void gt_pch_nx (rtx&);
9067 extern void gt_pch_nx (basic_block&);
9068
9069 static void
9070 gt_pch_nx (rtx_insn *& x)
9071 {
9072 if (x)
9073 gt_pch_nx_rtx_def ((void *) x);
9074 }
9075
9076 void
9077 gt_pch_nx (edge_def *e)
9078 {
9079 tree block = LOCATION_BLOCK (e->goto_locus);
9080 gt_pch_nx (e->src);
9081 gt_pch_nx (e->dest);
9082 if (current_ir_type () == IR_GIMPLE)
9083 gt_pch_nx (e->insns.g);
9084 else
9085 gt_pch_nx (e->insns.r);
9086 gt_pch_nx (block);
9087 }
9088
9089 void
9090 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9091 {
9092 tree block = LOCATION_BLOCK (e->goto_locus);
9093 op (&(e->src), cookie);
9094 op (&(e->dest), cookie);
9095 if (current_ir_type () == IR_GIMPLE)
9096 op (&(e->insns.g), cookie);
9097 else
9098 op (&(e->insns.r), cookie);
9099 op (&(block), cookie);
9100 }