vsx.md (vsx_xxmrghw_<mode>): Adjust for little-endian.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
74
75 /* Local declarations. */
76
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity = 20;
79
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
84
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
87
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
92
93 static struct pointer_map_t *edge_to_cases;
94
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
98
99 static bitmap touched_switch_bbs;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
111 {
112 location_t locus;
113 int discriminator;
114 };
115
116 /* Hashtable helpers. */
117
118 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
119 {
120 typedef locus_discrim_map value_type;
121 typedef locus_discrim_map compare_type;
122 static inline hashval_t hash (const value_type *);
123 static inline bool equal (const value_type *, const compare_type *);
124 };
125
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
128
129 inline hashval_t
130 locus_discrim_hasher::hash (const value_type *item)
131 {
132 return LOCATION_LINE (item->locus);
133 }
134
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
137
138 inline bool
139 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
140 {
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 }
143
144 static hash_table <locus_discrim_hasher> discriminator_per_locus;
145
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
148
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
158
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple, gimple);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gimple);
165
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (basic_block, tree);
173 static tree find_case_label_for_value (gimple, tree);
174
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
177 {
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_fn (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 initial_cfg_capacity);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
190 initial_cfg_capacity);
191
192 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
193 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
194
195 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
196 = EXIT_BLOCK_PTR_FOR_FN (fn);
197 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
198 = ENTRY_BLOCK_PTR_FOR_FN (fn);
199 }
200
201 void
202 init_empty_tree_cfg (void)
203 {
204 init_empty_tree_cfg_for_function (cfun);
205 }
206
207 /*---------------------------------------------------------------------------
208 Create basic blocks
209 ---------------------------------------------------------------------------*/
210
211 /* Entry point to the CFG builder for trees. SEQ is the sequence of
212 statements to be added to the flowgraph. */
213
214 static void
215 build_gimple_cfg (gimple_seq seq)
216 {
217 /* Register specific gimple functions. */
218 gimple_register_cfg_hooks ();
219
220 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
221
222 init_empty_tree_cfg ();
223
224 make_blocks (seq);
225
226 /* Make sure there is always at least one block, even if it's empty. */
227 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
228 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
229
230 /* Adjust the size of the array. */
231 if (basic_block_info_for_fn (cfun)->length ()
232 < (size_t) n_basic_blocks_for_fn (cfun))
233 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
234 n_basic_blocks_for_fn (cfun));
235
236 /* To speed up statement iterator walks, we first purge dead labels. */
237 cleanup_dead_labels ();
238
239 /* Group case nodes to reduce the number of edges.
240 We do this after cleaning up dead labels because otherwise we miss
241 a lot of obvious case merging opportunities. */
242 group_case_labels ();
243
244 /* Create the edges of the flowgraph. */
245 discriminator_per_locus.create (13);
246 make_edges ();
247 assign_discriminators ();
248 cleanup_dead_labels ();
249 discriminator_per_locus.dispose ();
250 }
251
252
253 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
254 them and propagate the information to the loop. We assume that the
255 annotations come immediately before the condition of the loop. */
256
257 static void
258 replace_loop_annotate ()
259 {
260 struct loop *loop;
261 basic_block bb;
262 gimple_stmt_iterator gsi;
263 gimple stmt;
264
265 FOR_EACH_LOOP (loop, 0)
266 {
267 gsi = gsi_last_bb (loop->header);
268 stmt = gsi_stmt (gsi);
269 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
270 continue;
271 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 {
273 stmt = gsi_stmt (gsi);
274 if (gimple_code (stmt) != GIMPLE_CALL)
275 break;
276 if (!gimple_call_internal_p (stmt)
277 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
278 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 {
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_no_vector_kind:
285 loop->dont_vectorize = true;
286 break;
287 case annot_expr_vector_kind:
288 loop->force_vectorize = true;
289 cfun->has_force_vectorize_loops = true;
290 break;
291 default:
292 gcc_unreachable ();
293 }
294 stmt = gimple_build_assign (gimple_call_lhs (stmt),
295 gimple_call_arg (stmt, 0));
296 gsi_replace (&gsi, stmt, true);
297 }
298 }
299
300 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
301 FOR_EACH_BB_FN (bb, cfun)
302 {
303 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
304 {
305 stmt = gsi_stmt (gsi);
306 if (gimple_code (stmt) != GIMPLE_CALL)
307 break;
308 if (!gimple_call_internal_p (stmt)
309 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
310 break;
311 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
312 {
313 case annot_expr_ivdep_kind:
314 case annot_expr_no_vector_kind:
315 case annot_expr_vector_kind:
316 break;
317 default:
318 gcc_unreachable ();
319 }
320 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
321 stmt = gimple_build_assign (gimple_call_lhs (stmt),
322 gimple_call_arg (stmt, 0));
323 gsi_replace (&gsi, stmt, true);
324 }
325 }
326 }
327
328
329 static unsigned int
330 execute_build_cfg (void)
331 {
332 gimple_seq body = gimple_body (current_function_decl);
333
334 build_gimple_cfg (body);
335 gimple_set_body (current_function_decl, NULL);
336 if (dump_file && (dump_flags & TDF_DETAILS))
337 {
338 fprintf (dump_file, "Scope blocks:\n");
339 dump_scope_blocks (dump_file, dump_flags);
340 }
341 cleanup_tree_cfg ();
342 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
343 replace_loop_annotate ();
344 return 0;
345 }
346
347 namespace {
348
349 const pass_data pass_data_build_cfg =
350 {
351 GIMPLE_PASS, /* type */
352 "cfg", /* name */
353 OPTGROUP_NONE, /* optinfo_flags */
354 true, /* has_execute */
355 TV_TREE_CFG, /* tv_id */
356 PROP_gimple_leh, /* properties_required */
357 ( PROP_cfg | PROP_loops ), /* properties_provided */
358 0, /* properties_destroyed */
359 0, /* todo_flags_start */
360 TODO_verify_stmts, /* todo_flags_finish */
361 };
362
363 class pass_build_cfg : public gimple_opt_pass
364 {
365 public:
366 pass_build_cfg (gcc::context *ctxt)
367 : gimple_opt_pass (pass_data_build_cfg, ctxt)
368 {}
369
370 /* opt_pass methods: */
371 virtual unsigned int execute (function *) { return execute_build_cfg (); }
372
373 }; // class pass_build_cfg
374
375 } // anon namespace
376
377 gimple_opt_pass *
378 make_pass_build_cfg (gcc::context *ctxt)
379 {
380 return new pass_build_cfg (ctxt);
381 }
382
383
384 /* Return true if T is a computed goto. */
385
386 bool
387 computed_goto_p (gimple t)
388 {
389 return (gimple_code (t) == GIMPLE_GOTO
390 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
391 }
392
393 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
394 the other edge points to a bb with just __builtin_unreachable ().
395 I.e. return true for C->M edge in:
396 <bb C>:
397 ...
398 if (something)
399 goto <bb N>;
400 else
401 goto <bb M>;
402 <bb N>:
403 __builtin_unreachable ();
404 <bb M>: */
405
406 bool
407 assert_unreachable_fallthru_edge_p (edge e)
408 {
409 basic_block pred_bb = e->src;
410 gimple last = last_stmt (pred_bb);
411 if (last && gimple_code (last) == GIMPLE_COND)
412 {
413 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
414 if (other_bb == e->dest)
415 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
416 if (EDGE_COUNT (other_bb->succs) == 0)
417 {
418 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
419 gimple stmt;
420
421 if (gsi_end_p (gsi))
422 return false;
423 stmt = gsi_stmt (gsi);
424 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
425 {
426 gsi_next (&gsi);
427 if (gsi_end_p (gsi))
428 return false;
429 stmt = gsi_stmt (gsi);
430 }
431 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
432 }
433 }
434 return false;
435 }
436
437
438 /* Build a flowgraph for the sequence of stmts SEQ. */
439
440 static void
441 make_blocks (gimple_seq seq)
442 {
443 gimple_stmt_iterator i = gsi_start (seq);
444 gimple stmt = NULL;
445 bool start_new_block = true;
446 bool first_stmt_of_seq = true;
447 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
448
449 while (!gsi_end_p (i))
450 {
451 gimple prev_stmt;
452
453 prev_stmt = stmt;
454 stmt = gsi_stmt (i);
455
456 /* If the statement starts a new basic block or if we have determined
457 in a previous pass that we need to create a new block for STMT, do
458 so now. */
459 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
460 {
461 if (!first_stmt_of_seq)
462 gsi_split_seq_before (&i, &seq);
463 bb = create_basic_block (seq, NULL, bb);
464 start_new_block = false;
465 }
466
467 /* Now add STMT to BB and create the subgraphs for special statement
468 codes. */
469 gimple_set_bb (stmt, bb);
470
471 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
472 next iteration. */
473 if (stmt_ends_bb_p (stmt))
474 {
475 /* If the stmt can make abnormal goto use a new temporary
476 for the assignment to the LHS. This makes sure the old value
477 of the LHS is available on the abnormal edge. Otherwise
478 we will end up with overlapping life-ranges for abnormal
479 SSA names. */
480 if (gimple_has_lhs (stmt)
481 && stmt_can_make_abnormal_goto (stmt)
482 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
483 {
484 tree lhs = gimple_get_lhs (stmt);
485 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
486 gimple s = gimple_build_assign (lhs, tmp);
487 gimple_set_location (s, gimple_location (stmt));
488 gimple_set_block (s, gimple_block (stmt));
489 gimple_set_lhs (stmt, tmp);
490 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
491 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
492 DECL_GIMPLE_REG_P (tmp) = 1;
493 gsi_insert_after (&i, s, GSI_SAME_STMT);
494 }
495 start_new_block = true;
496 }
497
498 gsi_next (&i);
499 first_stmt_of_seq = false;
500 }
501 }
502
503
504 /* Create and return a new empty basic block after bb AFTER. */
505
506 static basic_block
507 create_bb (void *h, void *e, basic_block after)
508 {
509 basic_block bb;
510
511 gcc_assert (!e);
512
513 /* Create and initialize a new basic block. Since alloc_block uses
514 GC allocation that clears memory to allocate a basic block, we do
515 not have to clear the newly allocated basic block here. */
516 bb = alloc_block ();
517
518 bb->index = last_basic_block_for_fn (cfun);
519 bb->flags = BB_NEW;
520 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
521
522 /* Add the new block to the linked list of blocks. */
523 link_block (bb, after);
524
525 /* Grow the basic block array if needed. */
526 if ((size_t) last_basic_block_for_fn (cfun)
527 == basic_block_info_for_fn (cfun)->length ())
528 {
529 size_t new_size =
530 (last_basic_block_for_fn (cfun)
531 + (last_basic_block_for_fn (cfun) + 3) / 4);
532 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
533 }
534
535 /* Add the newly created block to the array. */
536 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
537
538 n_basic_blocks_for_fn (cfun)++;
539 last_basic_block_for_fn (cfun)++;
540
541 return bb;
542 }
543
544
545 /*---------------------------------------------------------------------------
546 Edge creation
547 ---------------------------------------------------------------------------*/
548
549 /* Fold COND_EXPR_COND of each COND_EXPR. */
550
551 void
552 fold_cond_expr_cond (void)
553 {
554 basic_block bb;
555
556 FOR_EACH_BB_FN (bb, cfun)
557 {
558 gimple stmt = last_stmt (bb);
559
560 if (stmt && gimple_code (stmt) == GIMPLE_COND)
561 {
562 location_t loc = gimple_location (stmt);
563 tree cond;
564 bool zerop, onep;
565
566 fold_defer_overflow_warnings ();
567 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
568 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
569 if (cond)
570 {
571 zerop = integer_zerop (cond);
572 onep = integer_onep (cond);
573 }
574 else
575 zerop = onep = false;
576
577 fold_undefer_overflow_warnings (zerop || onep,
578 stmt,
579 WARN_STRICT_OVERFLOW_CONDITIONAL);
580 if (zerop)
581 gimple_cond_make_false (stmt);
582 else if (onep)
583 gimple_cond_make_true (stmt);
584 }
585 }
586 }
587
588 /* If basic block BB has an abnormal edge to a basic block
589 containing IFN_ABNORMAL_DISPATCHER internal call, return
590 that the dispatcher's basic block, otherwise return NULL. */
591
592 basic_block
593 get_abnormal_succ_dispatcher (basic_block bb)
594 {
595 edge e;
596 edge_iterator ei;
597
598 FOR_EACH_EDGE (e, ei, bb->succs)
599 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
600 {
601 gimple_stmt_iterator gsi
602 = gsi_start_nondebug_after_labels_bb (e->dest);
603 gimple g = gsi_stmt (gsi);
604 if (g
605 && is_gimple_call (g)
606 && gimple_call_internal_p (g)
607 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
608 return e->dest;
609 }
610 return NULL;
611 }
612
613 /* Helper function for make_edges. Create a basic block with
614 with ABNORMAL_DISPATCHER internal call in it if needed, and
615 create abnormal edges from BBS to it and from it to FOR_BB
616 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
617
618 static void
619 handle_abnormal_edges (basic_block *dispatcher_bbs,
620 basic_block for_bb, int *bb_to_omp_idx,
621 auto_vec<basic_block> *bbs, bool computed_goto)
622 {
623 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
624 unsigned int idx = 0;
625 basic_block bb;
626 bool inner = false;
627
628 if (bb_to_omp_idx)
629 {
630 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
631 if (bb_to_omp_idx[for_bb->index] != 0)
632 inner = true;
633 }
634
635 /* If the dispatcher has been created already, then there are basic
636 blocks with abnormal edges to it, so just make a new edge to
637 for_bb. */
638 if (*dispatcher == NULL)
639 {
640 /* Check if there are any basic blocks that need to have
641 abnormal edges to this dispatcher. If there are none, return
642 early. */
643 if (bb_to_omp_idx == NULL)
644 {
645 if (bbs->is_empty ())
646 return;
647 }
648 else
649 {
650 FOR_EACH_VEC_ELT (*bbs, idx, bb)
651 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
652 break;
653 if (bb == NULL)
654 return;
655 }
656
657 /* Create the dispatcher bb. */
658 *dispatcher = create_basic_block (NULL, NULL, for_bb);
659 if (computed_goto)
660 {
661 /* Factor computed gotos into a common computed goto site. Also
662 record the location of that site so that we can un-factor the
663 gotos after we have converted back to normal form. */
664 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
665
666 /* Create the destination of the factored goto. Each original
667 computed goto will put its desired destination into this
668 variable and jump to the label we create immediately below. */
669 tree var = create_tmp_var (ptr_type_node, "gotovar");
670
671 /* Build a label for the new block which will contain the
672 factored computed goto. */
673 tree factored_label_decl
674 = create_artificial_label (UNKNOWN_LOCATION);
675 gimple factored_computed_goto_label
676 = gimple_build_label (factored_label_decl);
677 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
678
679 /* Build our new computed goto. */
680 gimple factored_computed_goto = gimple_build_goto (var);
681 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
682
683 FOR_EACH_VEC_ELT (*bbs, idx, bb)
684 {
685 if (bb_to_omp_idx
686 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
687 continue;
688
689 gsi = gsi_last_bb (bb);
690 gimple last = gsi_stmt (gsi);
691
692 gcc_assert (computed_goto_p (last));
693
694 /* Copy the original computed goto's destination into VAR. */
695 gimple assignment
696 = gimple_build_assign (var, gimple_goto_dest (last));
697 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
698
699 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
700 e->goto_locus = gimple_location (last);
701 gsi_remove (&gsi, true);
702 }
703 }
704 else
705 {
706 tree arg = inner ? boolean_true_node : boolean_false_node;
707 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
708 1, arg);
709 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
710 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
711
712 /* Create predecessor edges of the dispatcher. */
713 FOR_EACH_VEC_ELT (*bbs, idx, bb)
714 {
715 if (bb_to_omp_idx
716 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
717 continue;
718 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
719 }
720 }
721 }
722
723 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
724 }
725
726 /* Join all the blocks in the flowgraph. */
727
728 static void
729 make_edges (void)
730 {
731 basic_block bb;
732 struct omp_region *cur_region = NULL;
733 auto_vec<basic_block> ab_edge_goto;
734 auto_vec<basic_block> ab_edge_call;
735 int *bb_to_omp_idx = NULL;
736 int cur_omp_region_idx = 0;
737
738 /* Create an edge from entry to the first block with executable
739 statements in it. */
740 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
741 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
742 EDGE_FALLTHRU);
743
744 /* Traverse the basic block array placing edges. */
745 FOR_EACH_BB_FN (bb, cfun)
746 {
747 gimple last = last_stmt (bb);
748 bool fallthru;
749
750 if (bb_to_omp_idx)
751 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
752
753 if (last)
754 {
755 enum gimple_code code = gimple_code (last);
756 switch (code)
757 {
758 case GIMPLE_GOTO:
759 if (make_goto_expr_edges (bb))
760 ab_edge_goto.safe_push (bb);
761 fallthru = false;
762 break;
763 case GIMPLE_RETURN:
764 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
765 fallthru = false;
766 break;
767 case GIMPLE_COND:
768 make_cond_expr_edges (bb);
769 fallthru = false;
770 break;
771 case GIMPLE_SWITCH:
772 make_gimple_switch_edges (bb);
773 fallthru = false;
774 break;
775 case GIMPLE_RESX:
776 make_eh_edges (last);
777 fallthru = false;
778 break;
779 case GIMPLE_EH_DISPATCH:
780 fallthru = make_eh_dispatch_edges (last);
781 break;
782
783 case GIMPLE_CALL:
784 /* If this function receives a nonlocal goto, then we need to
785 make edges from this call site to all the nonlocal goto
786 handlers. */
787 if (stmt_can_make_abnormal_goto (last))
788 ab_edge_call.safe_push (bb);
789
790 /* If this statement has reachable exception handlers, then
791 create abnormal edges to them. */
792 make_eh_edges (last);
793
794 /* BUILTIN_RETURN is really a return statement. */
795 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
796 {
797 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
798 fallthru = false;
799 }
800 /* Some calls are known not to return. */
801 else
802 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
803 break;
804
805 case GIMPLE_ASSIGN:
806 /* A GIMPLE_ASSIGN may throw internally and thus be considered
807 control-altering. */
808 if (is_ctrl_altering_stmt (last))
809 make_eh_edges (last);
810 fallthru = true;
811 break;
812
813 case GIMPLE_ASM:
814 make_gimple_asm_edges (bb);
815 fallthru = true;
816 break;
817
818 CASE_GIMPLE_OMP:
819 fallthru = make_gimple_omp_edges (bb, &cur_region,
820 &cur_omp_region_idx);
821 if (cur_region && bb_to_omp_idx == NULL)
822 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
823 break;
824
825 case GIMPLE_TRANSACTION:
826 {
827 tree abort_label = gimple_transaction_label (last);
828 if (abort_label)
829 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
830 fallthru = true;
831 }
832 break;
833
834 default:
835 gcc_assert (!stmt_ends_bb_p (last));
836 fallthru = true;
837 }
838 }
839 else
840 fallthru = true;
841
842 if (fallthru)
843 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
844 }
845
846 /* Computed gotos are hell to deal with, especially if there are
847 lots of them with a large number of destinations. So we factor
848 them to a common computed goto location before we build the
849 edge list. After we convert back to normal form, we will un-factor
850 the computed gotos since factoring introduces an unwanted jump.
851 For non-local gotos and abnormal edges from calls to calls that return
852 twice or forced labels, factor the abnormal edges too, by having all
853 abnormal edges from the calls go to a common artificial basic block
854 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
855 basic block to all forced labels and calls returning twice.
856 We do this per-OpenMP structured block, because those regions
857 are guaranteed to be single entry single exit by the standard,
858 so it is not allowed to enter or exit such regions abnormally this way,
859 thus all computed gotos, non-local gotos and setjmp/longjmp calls
860 must not transfer control across SESE region boundaries. */
861 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
862 {
863 gimple_stmt_iterator gsi;
864 basic_block dispatcher_bb_array[2] = { NULL, NULL };
865 basic_block *dispatcher_bbs = dispatcher_bb_array;
866 int count = n_basic_blocks_for_fn (cfun);
867
868 if (bb_to_omp_idx)
869 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
870
871 FOR_EACH_BB_FN (bb, cfun)
872 {
873 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
874 {
875 gimple label_stmt = gsi_stmt (gsi);
876 tree target;
877
878 if (gimple_code (label_stmt) != GIMPLE_LABEL)
879 break;
880
881 target = gimple_label_label (label_stmt);
882
883 /* Make an edge to every label block that has been marked as a
884 potential target for a computed goto or a non-local goto. */
885 if (FORCED_LABEL (target))
886 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
887 &ab_edge_goto, true);
888 if (DECL_NONLOCAL (target))
889 {
890 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
891 &ab_edge_call, false);
892 break;
893 }
894 }
895
896 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
897 gsi_next_nondebug (&gsi);
898 if (!gsi_end_p (gsi))
899 {
900 /* Make an edge to every setjmp-like call. */
901 gimple call_stmt = gsi_stmt (gsi);
902 if (is_gimple_call (call_stmt)
903 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
904 || gimple_call_builtin_p (call_stmt,
905 BUILT_IN_SETJMP_RECEIVER)))
906 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
907 &ab_edge_call, false);
908 }
909 }
910
911 if (bb_to_omp_idx)
912 XDELETE (dispatcher_bbs);
913 }
914
915 XDELETE (bb_to_omp_idx);
916
917 free_omp_regions ();
918
919 /* Fold COND_EXPR_COND of each COND_EXPR. */
920 fold_cond_expr_cond ();
921 }
922
923 /* Find the next available discriminator value for LOCUS. The
924 discriminator distinguishes among several basic blocks that
925 share a common locus, allowing for more accurate sample-based
926 profiling. */
927
928 static int
929 next_discriminator_for_locus (location_t locus)
930 {
931 struct locus_discrim_map item;
932 struct locus_discrim_map **slot;
933
934 item.locus = locus;
935 item.discriminator = 0;
936 slot = discriminator_per_locus.find_slot_with_hash (
937 &item, LOCATION_LINE (locus), INSERT);
938 gcc_assert (slot);
939 if (*slot == HTAB_EMPTY_ENTRY)
940 {
941 *slot = XNEW (struct locus_discrim_map);
942 gcc_assert (*slot);
943 (*slot)->locus = locus;
944 (*slot)->discriminator = 0;
945 }
946 (*slot)->discriminator++;
947 return (*slot)->discriminator;
948 }
949
950 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
951
952 static bool
953 same_line_p (location_t locus1, location_t locus2)
954 {
955 expanded_location from, to;
956
957 if (locus1 == locus2)
958 return true;
959
960 from = expand_location (locus1);
961 to = expand_location (locus2);
962
963 if (from.line != to.line)
964 return false;
965 if (from.file == to.file)
966 return true;
967 return (from.file != NULL
968 && to.file != NULL
969 && filename_cmp (from.file, to.file) == 0);
970 }
971
972 /* Assign discriminators to each basic block. */
973
974 static void
975 assign_discriminators (void)
976 {
977 basic_block bb;
978
979 FOR_EACH_BB_FN (bb, cfun)
980 {
981 edge e;
982 edge_iterator ei;
983 gimple last = last_stmt (bb);
984 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
985
986 if (locus == UNKNOWN_LOCATION)
987 continue;
988
989 FOR_EACH_EDGE (e, ei, bb->succs)
990 {
991 gimple first = first_non_label_stmt (e->dest);
992 gimple last = last_stmt (e->dest);
993 if ((first && same_line_p (locus, gimple_location (first)))
994 || (last && same_line_p (locus, gimple_location (last))))
995 {
996 if (e->dest->discriminator != 0 && bb->discriminator == 0)
997 bb->discriminator = next_discriminator_for_locus (locus);
998 else
999 e->dest->discriminator = next_discriminator_for_locus (locus);
1000 }
1001 }
1002 }
1003 }
1004
1005 /* Create the edges for a GIMPLE_COND starting at block BB. */
1006
1007 static void
1008 make_cond_expr_edges (basic_block bb)
1009 {
1010 gimple entry = last_stmt (bb);
1011 gimple then_stmt, else_stmt;
1012 basic_block then_bb, else_bb;
1013 tree then_label, else_label;
1014 edge e;
1015
1016 gcc_assert (entry);
1017 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1018
1019 /* Entry basic blocks for each component. */
1020 then_label = gimple_cond_true_label (entry);
1021 else_label = gimple_cond_false_label (entry);
1022 then_bb = label_to_block (then_label);
1023 else_bb = label_to_block (else_label);
1024 then_stmt = first_stmt (then_bb);
1025 else_stmt = first_stmt (else_bb);
1026
1027 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1028 e->goto_locus = gimple_location (then_stmt);
1029 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1030 if (e)
1031 e->goto_locus = gimple_location (else_stmt);
1032
1033 /* We do not need the labels anymore. */
1034 gimple_cond_set_true_label (entry, NULL_TREE);
1035 gimple_cond_set_false_label (entry, NULL_TREE);
1036 }
1037
1038
1039 /* Called for each element in the hash table (P) as we delete the
1040 edge to cases hash table.
1041
1042 Clear all the TREE_CHAINs to prevent problems with copying of
1043 SWITCH_EXPRs and structure sharing rules, then free the hash table
1044 element. */
1045
1046 static bool
1047 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
1048 void *data ATTRIBUTE_UNUSED)
1049 {
1050 tree t, next;
1051
1052 for (t = (tree) *value; t; t = next)
1053 {
1054 next = CASE_CHAIN (t);
1055 CASE_CHAIN (t) = NULL;
1056 }
1057
1058 *value = NULL;
1059 return true;
1060 }
1061
1062 /* Start recording information mapping edges to case labels. */
1063
1064 void
1065 start_recording_case_labels (void)
1066 {
1067 gcc_assert (edge_to_cases == NULL);
1068 edge_to_cases = pointer_map_create ();
1069 touched_switch_bbs = BITMAP_ALLOC (NULL);
1070 }
1071
1072 /* Return nonzero if we are recording information for case labels. */
1073
1074 static bool
1075 recording_case_labels_p (void)
1076 {
1077 return (edge_to_cases != NULL);
1078 }
1079
1080 /* Stop recording information mapping edges to case labels and
1081 remove any information we have recorded. */
1082 void
1083 end_recording_case_labels (void)
1084 {
1085 bitmap_iterator bi;
1086 unsigned i;
1087 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
1088 pointer_map_destroy (edge_to_cases);
1089 edge_to_cases = NULL;
1090 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1091 {
1092 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1093 if (bb)
1094 {
1095 gimple stmt = last_stmt (bb);
1096 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1097 group_case_labels_stmt (stmt);
1098 }
1099 }
1100 BITMAP_FREE (touched_switch_bbs);
1101 }
1102
1103 /* If we are inside a {start,end}_recording_cases block, then return
1104 a chain of CASE_LABEL_EXPRs from T which reference E.
1105
1106 Otherwise return NULL. */
1107
1108 static tree
1109 get_cases_for_edge (edge e, gimple t)
1110 {
1111 void **slot;
1112 size_t i, n;
1113
1114 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1115 chains available. Return NULL so the caller can detect this case. */
1116 if (!recording_case_labels_p ())
1117 return NULL;
1118
1119 slot = pointer_map_contains (edge_to_cases, e);
1120 if (slot)
1121 return (tree) *slot;
1122
1123 /* If we did not find E in the hash table, then this must be the first
1124 time we have been queried for information about E & T. Add all the
1125 elements from T to the hash table then perform the query again. */
1126
1127 n = gimple_switch_num_labels (t);
1128 for (i = 0; i < n; i++)
1129 {
1130 tree elt = gimple_switch_label (t, i);
1131 tree lab = CASE_LABEL (elt);
1132 basic_block label_bb = label_to_block (lab);
1133 edge this_edge = find_edge (e->src, label_bb);
1134
1135 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1136 a new chain. */
1137 slot = pointer_map_insert (edge_to_cases, this_edge);
1138 CASE_CHAIN (elt) = (tree) *slot;
1139 *slot = elt;
1140 }
1141
1142 return (tree) *pointer_map_contains (edge_to_cases, e);
1143 }
1144
1145 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1146
1147 static void
1148 make_gimple_switch_edges (basic_block bb)
1149 {
1150 gimple entry = last_stmt (bb);
1151 size_t i, n;
1152
1153 n = gimple_switch_num_labels (entry);
1154
1155 for (i = 0; i < n; ++i)
1156 {
1157 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1158 basic_block label_bb = label_to_block (lab);
1159 make_edge (bb, label_bb, 0);
1160 }
1161 }
1162
1163
1164 /* Return the basic block holding label DEST. */
1165
1166 basic_block
1167 label_to_block_fn (struct function *ifun, tree dest)
1168 {
1169 int uid = LABEL_DECL_UID (dest);
1170
1171 /* We would die hard when faced by an undefined label. Emit a label to
1172 the very first basic block. This will hopefully make even the dataflow
1173 and undefined variable warnings quite right. */
1174 if (seen_error () && uid < 0)
1175 {
1176 gimple_stmt_iterator gsi =
1177 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1178 gimple stmt;
1179
1180 stmt = gimple_build_label (dest);
1181 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1182 uid = LABEL_DECL_UID (dest);
1183 }
1184 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1185 return NULL;
1186 return (*ifun->cfg->x_label_to_block_map)[uid];
1187 }
1188
1189 /* Create edges for a goto statement at block BB. Returns true
1190 if abnormal edges should be created. */
1191
1192 static bool
1193 make_goto_expr_edges (basic_block bb)
1194 {
1195 gimple_stmt_iterator last = gsi_last_bb (bb);
1196 gimple goto_t = gsi_stmt (last);
1197
1198 /* A simple GOTO creates normal edges. */
1199 if (simple_goto_p (goto_t))
1200 {
1201 tree dest = gimple_goto_dest (goto_t);
1202 basic_block label_bb = label_to_block (dest);
1203 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1204 e->goto_locus = gimple_location (goto_t);
1205 gsi_remove (&last, true);
1206 return false;
1207 }
1208
1209 /* A computed GOTO creates abnormal edges. */
1210 return true;
1211 }
1212
1213 /* Create edges for an asm statement with labels at block BB. */
1214
1215 static void
1216 make_gimple_asm_edges (basic_block bb)
1217 {
1218 gimple stmt = last_stmt (bb);
1219 int i, n = gimple_asm_nlabels (stmt);
1220
1221 for (i = 0; i < n; ++i)
1222 {
1223 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1224 basic_block label_bb = label_to_block (label);
1225 make_edge (bb, label_bb, 0);
1226 }
1227 }
1228
1229 /*---------------------------------------------------------------------------
1230 Flowgraph analysis
1231 ---------------------------------------------------------------------------*/
1232
1233 /* Cleanup useless labels in basic blocks. This is something we wish
1234 to do early because it allows us to group case labels before creating
1235 the edges for the CFG, and it speeds up block statement iterators in
1236 all passes later on.
1237 We rerun this pass after CFG is created, to get rid of the labels that
1238 are no longer referenced. After then we do not run it any more, since
1239 (almost) no new labels should be created. */
1240
1241 /* A map from basic block index to the leading label of that block. */
1242 static struct label_record
1243 {
1244 /* The label. */
1245 tree label;
1246
1247 /* True if the label is referenced from somewhere. */
1248 bool used;
1249 } *label_for_bb;
1250
1251 /* Given LABEL return the first label in the same basic block. */
1252
1253 static tree
1254 main_block_label (tree label)
1255 {
1256 basic_block bb = label_to_block (label);
1257 tree main_label = label_for_bb[bb->index].label;
1258
1259 /* label_to_block possibly inserted undefined label into the chain. */
1260 if (!main_label)
1261 {
1262 label_for_bb[bb->index].label = label;
1263 main_label = label;
1264 }
1265
1266 label_for_bb[bb->index].used = true;
1267 return main_label;
1268 }
1269
1270 /* Clean up redundant labels within the exception tree. */
1271
1272 static void
1273 cleanup_dead_labels_eh (void)
1274 {
1275 eh_landing_pad lp;
1276 eh_region r;
1277 tree lab;
1278 int i;
1279
1280 if (cfun->eh == NULL)
1281 return;
1282
1283 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1284 if (lp && lp->post_landing_pad)
1285 {
1286 lab = main_block_label (lp->post_landing_pad);
1287 if (lab != lp->post_landing_pad)
1288 {
1289 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1290 EH_LANDING_PAD_NR (lab) = lp->index;
1291 }
1292 }
1293
1294 FOR_ALL_EH_REGION (r)
1295 switch (r->type)
1296 {
1297 case ERT_CLEANUP:
1298 case ERT_MUST_NOT_THROW:
1299 break;
1300
1301 case ERT_TRY:
1302 {
1303 eh_catch c;
1304 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1305 {
1306 lab = c->label;
1307 if (lab)
1308 c->label = main_block_label (lab);
1309 }
1310 }
1311 break;
1312
1313 case ERT_ALLOWED_EXCEPTIONS:
1314 lab = r->u.allowed.label;
1315 if (lab)
1316 r->u.allowed.label = main_block_label (lab);
1317 break;
1318 }
1319 }
1320
1321
1322 /* Cleanup redundant labels. This is a three-step process:
1323 1) Find the leading label for each block.
1324 2) Redirect all references to labels to the leading labels.
1325 3) Cleanup all useless labels. */
1326
1327 void
1328 cleanup_dead_labels (void)
1329 {
1330 basic_block bb;
1331 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1332
1333 /* Find a suitable label for each block. We use the first user-defined
1334 label if there is one, or otherwise just the first label we see. */
1335 FOR_EACH_BB_FN (bb, cfun)
1336 {
1337 gimple_stmt_iterator i;
1338
1339 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1340 {
1341 tree label;
1342 gimple stmt = gsi_stmt (i);
1343
1344 if (gimple_code (stmt) != GIMPLE_LABEL)
1345 break;
1346
1347 label = gimple_label_label (stmt);
1348
1349 /* If we have not yet seen a label for the current block,
1350 remember this one and see if there are more labels. */
1351 if (!label_for_bb[bb->index].label)
1352 {
1353 label_for_bb[bb->index].label = label;
1354 continue;
1355 }
1356
1357 /* If we did see a label for the current block already, but it
1358 is an artificially created label, replace it if the current
1359 label is a user defined label. */
1360 if (!DECL_ARTIFICIAL (label)
1361 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1362 {
1363 label_for_bb[bb->index].label = label;
1364 break;
1365 }
1366 }
1367 }
1368
1369 /* Now redirect all jumps/branches to the selected label.
1370 First do so for each block ending in a control statement. */
1371 FOR_EACH_BB_FN (bb, cfun)
1372 {
1373 gimple stmt = last_stmt (bb);
1374 tree label, new_label;
1375
1376 if (!stmt)
1377 continue;
1378
1379 switch (gimple_code (stmt))
1380 {
1381 case GIMPLE_COND:
1382 label = gimple_cond_true_label (stmt);
1383 if (label)
1384 {
1385 new_label = main_block_label (label);
1386 if (new_label != label)
1387 gimple_cond_set_true_label (stmt, new_label);
1388 }
1389
1390 label = gimple_cond_false_label (stmt);
1391 if (label)
1392 {
1393 new_label = main_block_label (label);
1394 if (new_label != label)
1395 gimple_cond_set_false_label (stmt, new_label);
1396 }
1397 break;
1398
1399 case GIMPLE_SWITCH:
1400 {
1401 size_t i, n = gimple_switch_num_labels (stmt);
1402
1403 /* Replace all destination labels. */
1404 for (i = 0; i < n; ++i)
1405 {
1406 tree case_label = gimple_switch_label (stmt, i);
1407 label = CASE_LABEL (case_label);
1408 new_label = main_block_label (label);
1409 if (new_label != label)
1410 CASE_LABEL (case_label) = new_label;
1411 }
1412 break;
1413 }
1414
1415 case GIMPLE_ASM:
1416 {
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree cons = gimple_asm_label_op (stmt, i);
1422 tree label = main_block_label (TREE_VALUE (cons));
1423 TREE_VALUE (cons) = label;
1424 }
1425 break;
1426 }
1427
1428 /* We have to handle gotos until they're removed, and we don't
1429 remove them until after we've created the CFG edges. */
1430 case GIMPLE_GOTO:
1431 if (!computed_goto_p (stmt))
1432 {
1433 label = gimple_goto_dest (stmt);
1434 new_label = main_block_label (label);
1435 if (new_label != label)
1436 gimple_goto_set_dest (stmt, new_label);
1437 }
1438 break;
1439
1440 case GIMPLE_TRANSACTION:
1441 {
1442 tree label = gimple_transaction_label (stmt);
1443 if (label)
1444 {
1445 tree new_label = main_block_label (label);
1446 if (new_label != label)
1447 gimple_transaction_set_label (stmt, new_label);
1448 }
1449 }
1450 break;
1451
1452 default:
1453 break;
1454 }
1455 }
1456
1457 /* Do the same for the exception region tree labels. */
1458 cleanup_dead_labels_eh ();
1459
1460 /* Finally, purge dead labels. All user-defined labels and labels that
1461 can be the target of non-local gotos and labels which have their
1462 address taken are preserved. */
1463 FOR_EACH_BB_FN (bb, cfun)
1464 {
1465 gimple_stmt_iterator i;
1466 tree label_for_this_bb = label_for_bb[bb->index].label;
1467
1468 if (!label_for_this_bb)
1469 continue;
1470
1471 /* If the main label of the block is unused, we may still remove it. */
1472 if (!label_for_bb[bb->index].used)
1473 label_for_this_bb = NULL;
1474
1475 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1476 {
1477 tree label;
1478 gimple stmt = gsi_stmt (i);
1479
1480 if (gimple_code (stmt) != GIMPLE_LABEL)
1481 break;
1482
1483 label = gimple_label_label (stmt);
1484
1485 if (label == label_for_this_bb
1486 || !DECL_ARTIFICIAL (label)
1487 || DECL_NONLOCAL (label)
1488 || FORCED_LABEL (label))
1489 gsi_next (&i);
1490 else
1491 gsi_remove (&i, true);
1492 }
1493 }
1494
1495 free (label_for_bb);
1496 }
1497
1498 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1499 the ones jumping to the same label.
1500 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1501
1502 void
1503 group_case_labels_stmt (gimple stmt)
1504 {
1505 int old_size = gimple_switch_num_labels (stmt);
1506 int i, j, new_size = old_size;
1507 basic_block default_bb = NULL;
1508
1509 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1510
1511 /* Look for possible opportunities to merge cases. */
1512 i = 1;
1513 while (i < old_size)
1514 {
1515 tree base_case, base_high;
1516 basic_block base_bb;
1517
1518 base_case = gimple_switch_label (stmt, i);
1519
1520 gcc_assert (base_case);
1521 base_bb = label_to_block (CASE_LABEL (base_case));
1522
1523 /* Discard cases that have the same destination as the
1524 default case. */
1525 if (base_bb == default_bb)
1526 {
1527 gimple_switch_set_label (stmt, i, NULL_TREE);
1528 i++;
1529 new_size--;
1530 continue;
1531 }
1532
1533 base_high = CASE_HIGH (base_case)
1534 ? CASE_HIGH (base_case)
1535 : CASE_LOW (base_case);
1536 i++;
1537
1538 /* Try to merge case labels. Break out when we reach the end
1539 of the label vector or when we cannot merge the next case
1540 label with the current one. */
1541 while (i < old_size)
1542 {
1543 tree merge_case = gimple_switch_label (stmt, i);
1544 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1545 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1546
1547 /* Merge the cases if they jump to the same place,
1548 and their ranges are consecutive. */
1549 if (merge_bb == base_bb
1550 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1551 {
1552 base_high = CASE_HIGH (merge_case) ?
1553 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1554 CASE_HIGH (base_case) = base_high;
1555 gimple_switch_set_label (stmt, i, NULL_TREE);
1556 new_size--;
1557 i++;
1558 }
1559 else
1560 break;
1561 }
1562 }
1563
1564 /* Compress the case labels in the label vector, and adjust the
1565 length of the vector. */
1566 for (i = 0, j = 0; i < new_size; i++)
1567 {
1568 while (! gimple_switch_label (stmt, j))
1569 j++;
1570 gimple_switch_set_label (stmt, i,
1571 gimple_switch_label (stmt, j++));
1572 }
1573
1574 gcc_assert (new_size <= old_size);
1575 gimple_switch_set_num_labels (stmt, new_size);
1576 }
1577
1578 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1579 and scan the sorted vector of cases. Combine the ones jumping to the
1580 same label. */
1581
1582 void
1583 group_case_labels (void)
1584 {
1585 basic_block bb;
1586
1587 FOR_EACH_BB_FN (bb, cfun)
1588 {
1589 gimple stmt = last_stmt (bb);
1590 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1591 group_case_labels_stmt (stmt);
1592 }
1593 }
1594
1595 /* Checks whether we can merge block B into block A. */
1596
1597 static bool
1598 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1599 {
1600 gimple stmt;
1601 gimple_stmt_iterator gsi;
1602
1603 if (!single_succ_p (a))
1604 return false;
1605
1606 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1607 return false;
1608
1609 if (single_succ (a) != b)
1610 return false;
1611
1612 if (!single_pred_p (b))
1613 return false;
1614
1615 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1616 return false;
1617
1618 /* If A ends by a statement causing exceptions or something similar, we
1619 cannot merge the blocks. */
1620 stmt = last_stmt (a);
1621 if (stmt && stmt_ends_bb_p (stmt))
1622 return false;
1623
1624 /* Do not allow a block with only a non-local label to be merged. */
1625 if (stmt
1626 && gimple_code (stmt) == GIMPLE_LABEL
1627 && DECL_NONLOCAL (gimple_label_label (stmt)))
1628 return false;
1629
1630 /* Examine the labels at the beginning of B. */
1631 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1632 {
1633 tree lab;
1634 stmt = gsi_stmt (gsi);
1635 if (gimple_code (stmt) != GIMPLE_LABEL)
1636 break;
1637 lab = gimple_label_label (stmt);
1638
1639 /* Do not remove user forced labels or for -O0 any user labels. */
1640 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1641 return false;
1642 }
1643
1644 /* Protect the loop latches. */
1645 if (current_loops && b->loop_father->latch == b)
1646 return false;
1647
1648 /* It must be possible to eliminate all phi nodes in B. If ssa form
1649 is not up-to-date and a name-mapping is registered, we cannot eliminate
1650 any phis. Symbols marked for renaming are never a problem though. */
1651 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1652 {
1653 gimple phi = gsi_stmt (gsi);
1654 /* Technically only new names matter. */
1655 if (name_registered_for_update_p (PHI_RESULT (phi)))
1656 return false;
1657 }
1658
1659 /* When not optimizing, don't merge if we'd lose goto_locus. */
1660 if (!optimize
1661 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1662 {
1663 location_t goto_locus = single_succ_edge (a)->goto_locus;
1664 gimple_stmt_iterator prev, next;
1665 prev = gsi_last_nondebug_bb (a);
1666 next = gsi_after_labels (b);
1667 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1668 gsi_next_nondebug (&next);
1669 if ((gsi_end_p (prev)
1670 || gimple_location (gsi_stmt (prev)) != goto_locus)
1671 && (gsi_end_p (next)
1672 || gimple_location (gsi_stmt (next)) != goto_locus))
1673 return false;
1674 }
1675
1676 return true;
1677 }
1678
1679 /* Replaces all uses of NAME by VAL. */
1680
1681 void
1682 replace_uses_by (tree name, tree val)
1683 {
1684 imm_use_iterator imm_iter;
1685 use_operand_p use;
1686 gimple stmt;
1687 edge e;
1688
1689 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1690 {
1691 /* Mark the block if we change the last stmt in it. */
1692 if (cfgcleanup_altered_bbs
1693 && stmt_ends_bb_p (stmt))
1694 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1695
1696 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1697 {
1698 replace_exp (use, val);
1699
1700 if (gimple_code (stmt) == GIMPLE_PHI)
1701 {
1702 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1703 if (e->flags & EDGE_ABNORMAL)
1704 {
1705 /* This can only occur for virtual operands, since
1706 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1707 would prevent replacement. */
1708 gcc_checking_assert (virtual_operand_p (name));
1709 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1710 }
1711 }
1712 }
1713
1714 if (gimple_code (stmt) != GIMPLE_PHI)
1715 {
1716 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1717 gimple orig_stmt = stmt;
1718 size_t i;
1719
1720 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1721 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1722 only change sth from non-invariant to invariant, and only
1723 when propagating constants. */
1724 if (is_gimple_min_invariant (val))
1725 for (i = 0; i < gimple_num_ops (stmt); i++)
1726 {
1727 tree op = gimple_op (stmt, i);
1728 /* Operands may be empty here. For example, the labels
1729 of a GIMPLE_COND are nulled out following the creation
1730 of the corresponding CFG edges. */
1731 if (op && TREE_CODE (op) == ADDR_EXPR)
1732 recompute_tree_invariant_for_addr_expr (op);
1733 }
1734
1735 if (fold_stmt (&gsi))
1736 stmt = gsi_stmt (gsi);
1737
1738 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1739 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1740
1741 update_stmt (stmt);
1742 }
1743 }
1744
1745 gcc_checking_assert (has_zero_uses (name));
1746
1747 /* Also update the trees stored in loop structures. */
1748 if (current_loops)
1749 {
1750 struct loop *loop;
1751
1752 FOR_EACH_LOOP (loop, 0)
1753 {
1754 substitute_in_loop_info (loop, name, val);
1755 }
1756 }
1757 }
1758
1759 /* Merge block B into block A. */
1760
1761 static void
1762 gimple_merge_blocks (basic_block a, basic_block b)
1763 {
1764 gimple_stmt_iterator last, gsi, psi;
1765
1766 if (dump_file)
1767 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1768
1769 /* Remove all single-valued PHI nodes from block B of the form
1770 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1771 gsi = gsi_last_bb (a);
1772 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1773 {
1774 gimple phi = gsi_stmt (psi);
1775 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1776 gimple copy;
1777 bool may_replace_uses = (virtual_operand_p (def)
1778 || may_propagate_copy (def, use));
1779
1780 /* In case we maintain loop closed ssa form, do not propagate arguments
1781 of loop exit phi nodes. */
1782 if (current_loops
1783 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1784 && !virtual_operand_p (def)
1785 && TREE_CODE (use) == SSA_NAME
1786 && a->loop_father != b->loop_father)
1787 may_replace_uses = false;
1788
1789 if (!may_replace_uses)
1790 {
1791 gcc_assert (!virtual_operand_p (def));
1792
1793 /* Note that just emitting the copies is fine -- there is no problem
1794 with ordering of phi nodes. This is because A is the single
1795 predecessor of B, therefore results of the phi nodes cannot
1796 appear as arguments of the phi nodes. */
1797 copy = gimple_build_assign (def, use);
1798 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1799 remove_phi_node (&psi, false);
1800 }
1801 else
1802 {
1803 /* If we deal with a PHI for virtual operands, we can simply
1804 propagate these without fussing with folding or updating
1805 the stmt. */
1806 if (virtual_operand_p (def))
1807 {
1808 imm_use_iterator iter;
1809 use_operand_p use_p;
1810 gimple stmt;
1811
1812 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1813 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1814 SET_USE (use_p, use);
1815
1816 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1817 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1818 }
1819 else
1820 replace_uses_by (def, use);
1821
1822 remove_phi_node (&psi, true);
1823 }
1824 }
1825
1826 /* Ensure that B follows A. */
1827 move_block_after (b, a);
1828
1829 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1830 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1831
1832 /* Remove labels from B and set gimple_bb to A for other statements. */
1833 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1834 {
1835 gimple stmt = gsi_stmt (gsi);
1836 if (gimple_code (stmt) == GIMPLE_LABEL)
1837 {
1838 tree label = gimple_label_label (stmt);
1839 int lp_nr;
1840
1841 gsi_remove (&gsi, false);
1842
1843 /* Now that we can thread computed gotos, we might have
1844 a situation where we have a forced label in block B
1845 However, the label at the start of block B might still be
1846 used in other ways (think about the runtime checking for
1847 Fortran assigned gotos). So we can not just delete the
1848 label. Instead we move the label to the start of block A. */
1849 if (FORCED_LABEL (label))
1850 {
1851 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1852 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1853 }
1854 /* Other user labels keep around in a form of a debug stmt. */
1855 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1856 {
1857 gimple dbg = gimple_build_debug_bind (label,
1858 integer_zero_node,
1859 stmt);
1860 gimple_debug_bind_reset_value (dbg);
1861 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1862 }
1863
1864 lp_nr = EH_LANDING_PAD_NR (label);
1865 if (lp_nr)
1866 {
1867 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1868 lp->post_landing_pad = NULL;
1869 }
1870 }
1871 else
1872 {
1873 gimple_set_bb (stmt, a);
1874 gsi_next (&gsi);
1875 }
1876 }
1877
1878 /* Merge the sequences. */
1879 last = gsi_last_bb (a);
1880 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1881 set_bb_seq (b, NULL);
1882
1883 if (cfgcleanup_altered_bbs)
1884 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1885 }
1886
1887
1888 /* Return the one of two successors of BB that is not reachable by a
1889 complex edge, if there is one. Else, return BB. We use
1890 this in optimizations that use post-dominators for their heuristics,
1891 to catch the cases in C++ where function calls are involved. */
1892
1893 basic_block
1894 single_noncomplex_succ (basic_block bb)
1895 {
1896 edge e0, e1;
1897 if (EDGE_COUNT (bb->succs) != 2)
1898 return bb;
1899
1900 e0 = EDGE_SUCC (bb, 0);
1901 e1 = EDGE_SUCC (bb, 1);
1902 if (e0->flags & EDGE_COMPLEX)
1903 return e1->dest;
1904 if (e1->flags & EDGE_COMPLEX)
1905 return e0->dest;
1906
1907 return bb;
1908 }
1909
1910 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1911
1912 void
1913 notice_special_calls (gimple call)
1914 {
1915 int flags = gimple_call_flags (call);
1916
1917 if (flags & ECF_MAY_BE_ALLOCA)
1918 cfun->calls_alloca = true;
1919 if (flags & ECF_RETURNS_TWICE)
1920 cfun->calls_setjmp = true;
1921 }
1922
1923
1924 /* Clear flags set by notice_special_calls. Used by dead code removal
1925 to update the flags. */
1926
1927 void
1928 clear_special_calls (void)
1929 {
1930 cfun->calls_alloca = false;
1931 cfun->calls_setjmp = false;
1932 }
1933
1934 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1935
1936 static void
1937 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1938 {
1939 /* Since this block is no longer reachable, we can just delete all
1940 of its PHI nodes. */
1941 remove_phi_nodes (bb);
1942
1943 /* Remove edges to BB's successors. */
1944 while (EDGE_COUNT (bb->succs) > 0)
1945 remove_edge (EDGE_SUCC (bb, 0));
1946 }
1947
1948
1949 /* Remove statements of basic block BB. */
1950
1951 static void
1952 remove_bb (basic_block bb)
1953 {
1954 gimple_stmt_iterator i;
1955
1956 if (dump_file)
1957 {
1958 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1959 if (dump_flags & TDF_DETAILS)
1960 {
1961 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
1962 fprintf (dump_file, "\n");
1963 }
1964 }
1965
1966 if (current_loops)
1967 {
1968 struct loop *loop = bb->loop_father;
1969
1970 /* If a loop gets removed, clean up the information associated
1971 with it. */
1972 if (loop->latch == bb
1973 || loop->header == bb)
1974 free_numbers_of_iterations_estimates_loop (loop);
1975 }
1976
1977 /* Remove all the instructions in the block. */
1978 if (bb_seq (bb) != NULL)
1979 {
1980 /* Walk backwards so as to get a chance to substitute all
1981 released DEFs into debug stmts. See
1982 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1983 details. */
1984 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1985 {
1986 gimple stmt = gsi_stmt (i);
1987 if (gimple_code (stmt) == GIMPLE_LABEL
1988 && (FORCED_LABEL (gimple_label_label (stmt))
1989 || DECL_NONLOCAL (gimple_label_label (stmt))))
1990 {
1991 basic_block new_bb;
1992 gimple_stmt_iterator new_gsi;
1993
1994 /* A non-reachable non-local label may still be referenced.
1995 But it no longer needs to carry the extra semantics of
1996 non-locality. */
1997 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1998 {
1999 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2000 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2001 }
2002
2003 new_bb = bb->prev_bb;
2004 new_gsi = gsi_start_bb (new_bb);
2005 gsi_remove (&i, false);
2006 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2007 }
2008 else
2009 {
2010 /* Release SSA definitions if we are in SSA. Note that we
2011 may be called when not in SSA. For example,
2012 final_cleanup calls this function via
2013 cleanup_tree_cfg. */
2014 if (gimple_in_ssa_p (cfun))
2015 release_defs (stmt);
2016
2017 gsi_remove (&i, true);
2018 }
2019
2020 if (gsi_end_p (i))
2021 i = gsi_last_bb (bb);
2022 else
2023 gsi_prev (&i);
2024 }
2025 }
2026
2027 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2028 bb->il.gimple.seq = NULL;
2029 bb->il.gimple.phi_nodes = NULL;
2030 }
2031
2032
2033 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2034 predicate VAL, return the edge that will be taken out of the block.
2035 If VAL does not match a unique edge, NULL is returned. */
2036
2037 edge
2038 find_taken_edge (basic_block bb, tree val)
2039 {
2040 gimple stmt;
2041
2042 stmt = last_stmt (bb);
2043
2044 gcc_assert (stmt);
2045 gcc_assert (is_ctrl_stmt (stmt));
2046
2047 if (val == NULL)
2048 return NULL;
2049
2050 if (!is_gimple_min_invariant (val))
2051 return NULL;
2052
2053 if (gimple_code (stmt) == GIMPLE_COND)
2054 return find_taken_edge_cond_expr (bb, val);
2055
2056 if (gimple_code (stmt) == GIMPLE_SWITCH)
2057 return find_taken_edge_switch_expr (bb, val);
2058
2059 if (computed_goto_p (stmt))
2060 {
2061 /* Only optimize if the argument is a label, if the argument is
2062 not a label then we can not construct a proper CFG.
2063
2064 It may be the case that we only need to allow the LABEL_REF to
2065 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2066 appear inside a LABEL_EXPR just to be safe. */
2067 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2068 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2069 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2070 return NULL;
2071 }
2072
2073 gcc_unreachable ();
2074 }
2075
2076 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2077 statement, determine which of the outgoing edges will be taken out of the
2078 block. Return NULL if either edge may be taken. */
2079
2080 static edge
2081 find_taken_edge_computed_goto (basic_block bb, tree val)
2082 {
2083 basic_block dest;
2084 edge e = NULL;
2085
2086 dest = label_to_block (val);
2087 if (dest)
2088 {
2089 e = find_edge (bb, dest);
2090 gcc_assert (e != NULL);
2091 }
2092
2093 return e;
2094 }
2095
2096 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2097 statement, determine which of the two edges will be taken out of the
2098 block. Return NULL if either edge may be taken. */
2099
2100 static edge
2101 find_taken_edge_cond_expr (basic_block bb, tree val)
2102 {
2103 edge true_edge, false_edge;
2104
2105 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2106
2107 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2108 return (integer_zerop (val) ? false_edge : true_edge);
2109 }
2110
2111 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2112 statement, determine which edge will be taken out of the block. Return
2113 NULL if any edge may be taken. */
2114
2115 static edge
2116 find_taken_edge_switch_expr (basic_block bb, tree val)
2117 {
2118 basic_block dest_bb;
2119 edge e;
2120 gimple switch_stmt;
2121 tree taken_case;
2122
2123 switch_stmt = last_stmt (bb);
2124 taken_case = find_case_label_for_value (switch_stmt, val);
2125 dest_bb = label_to_block (CASE_LABEL (taken_case));
2126
2127 e = find_edge (bb, dest_bb);
2128 gcc_assert (e);
2129 return e;
2130 }
2131
2132
2133 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2134 We can make optimal use here of the fact that the case labels are
2135 sorted: We can do a binary search for a case matching VAL. */
2136
2137 static tree
2138 find_case_label_for_value (gimple switch_stmt, tree val)
2139 {
2140 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2141 tree default_case = gimple_switch_default_label (switch_stmt);
2142
2143 for (low = 0, high = n; high - low > 1; )
2144 {
2145 size_t i = (high + low) / 2;
2146 tree t = gimple_switch_label (switch_stmt, i);
2147 int cmp;
2148
2149 /* Cache the result of comparing CASE_LOW and val. */
2150 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2151
2152 if (cmp > 0)
2153 high = i;
2154 else
2155 low = i;
2156
2157 if (CASE_HIGH (t) == NULL)
2158 {
2159 /* A singe-valued case label. */
2160 if (cmp == 0)
2161 return t;
2162 }
2163 else
2164 {
2165 /* A case range. We can only handle integer ranges. */
2166 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2167 return t;
2168 }
2169 }
2170
2171 return default_case;
2172 }
2173
2174
2175 /* Dump a basic block on stderr. */
2176
2177 void
2178 gimple_debug_bb (basic_block bb)
2179 {
2180 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2181 }
2182
2183
2184 /* Dump basic block with index N on stderr. */
2185
2186 basic_block
2187 gimple_debug_bb_n (int n)
2188 {
2189 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2190 return BASIC_BLOCK_FOR_FN (cfun, n);
2191 }
2192
2193
2194 /* Dump the CFG on stderr.
2195
2196 FLAGS are the same used by the tree dumping functions
2197 (see TDF_* in dumpfile.h). */
2198
2199 void
2200 gimple_debug_cfg (int flags)
2201 {
2202 gimple_dump_cfg (stderr, flags);
2203 }
2204
2205
2206 /* Dump the program showing basic block boundaries on the given FILE.
2207
2208 FLAGS are the same used by the tree dumping functions (see TDF_* in
2209 tree.h). */
2210
2211 void
2212 gimple_dump_cfg (FILE *file, int flags)
2213 {
2214 if (flags & TDF_DETAILS)
2215 {
2216 dump_function_header (file, current_function_decl, flags);
2217 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2218 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2219 last_basic_block_for_fn (cfun));
2220
2221 brief_dump_cfg (file, flags | TDF_COMMENT);
2222 fprintf (file, "\n");
2223 }
2224
2225 if (flags & TDF_STATS)
2226 dump_cfg_stats (file);
2227
2228 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2229 }
2230
2231
2232 /* Dump CFG statistics on FILE. */
2233
2234 void
2235 dump_cfg_stats (FILE *file)
2236 {
2237 static long max_num_merged_labels = 0;
2238 unsigned long size, total = 0;
2239 long num_edges;
2240 basic_block bb;
2241 const char * const fmt_str = "%-30s%-13s%12s\n";
2242 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2243 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2244 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2245 const char *funcname = current_function_name ();
2246
2247 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2248
2249 fprintf (file, "---------------------------------------------------------\n");
2250 fprintf (file, fmt_str, "", " Number of ", "Memory");
2251 fprintf (file, fmt_str, "", " instances ", "used ");
2252 fprintf (file, "---------------------------------------------------------\n");
2253
2254 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2255 total += size;
2256 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2257 SCALE (size), LABEL (size));
2258
2259 num_edges = 0;
2260 FOR_EACH_BB_FN (bb, cfun)
2261 num_edges += EDGE_COUNT (bb->succs);
2262 size = num_edges * sizeof (struct edge_def);
2263 total += size;
2264 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2265
2266 fprintf (file, "---------------------------------------------------------\n");
2267 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2268 LABEL (total));
2269 fprintf (file, "---------------------------------------------------------\n");
2270 fprintf (file, "\n");
2271
2272 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2273 max_num_merged_labels = cfg_stats.num_merged_labels;
2274
2275 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2276 cfg_stats.num_merged_labels, max_num_merged_labels);
2277
2278 fprintf (file, "\n");
2279 }
2280
2281
2282 /* Dump CFG statistics on stderr. Keep extern so that it's always
2283 linked in the final executable. */
2284
2285 DEBUG_FUNCTION void
2286 debug_cfg_stats (void)
2287 {
2288 dump_cfg_stats (stderr);
2289 }
2290
2291 /*---------------------------------------------------------------------------
2292 Miscellaneous helpers
2293 ---------------------------------------------------------------------------*/
2294
2295 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2296 flow. Transfers of control flow associated with EH are excluded. */
2297
2298 static bool
2299 call_can_make_abnormal_goto (gimple t)
2300 {
2301 /* If the function has no non-local labels, then a call cannot make an
2302 abnormal transfer of control. */
2303 if (!cfun->has_nonlocal_label
2304 && !cfun->calls_setjmp)
2305 return false;
2306
2307 /* Likewise if the call has no side effects. */
2308 if (!gimple_has_side_effects (t))
2309 return false;
2310
2311 /* Likewise if the called function is leaf. */
2312 if (gimple_call_flags (t) & ECF_LEAF)
2313 return false;
2314
2315 return true;
2316 }
2317
2318
2319 /* Return true if T can make an abnormal transfer of control flow.
2320 Transfers of control flow associated with EH are excluded. */
2321
2322 bool
2323 stmt_can_make_abnormal_goto (gimple t)
2324 {
2325 if (computed_goto_p (t))
2326 return true;
2327 if (is_gimple_call (t))
2328 return call_can_make_abnormal_goto (t);
2329 return false;
2330 }
2331
2332
2333 /* Return true if T represents a stmt that always transfers control. */
2334
2335 bool
2336 is_ctrl_stmt (gimple t)
2337 {
2338 switch (gimple_code (t))
2339 {
2340 case GIMPLE_COND:
2341 case GIMPLE_SWITCH:
2342 case GIMPLE_GOTO:
2343 case GIMPLE_RETURN:
2344 case GIMPLE_RESX:
2345 return true;
2346 default:
2347 return false;
2348 }
2349 }
2350
2351
2352 /* Return true if T is a statement that may alter the flow of control
2353 (e.g., a call to a non-returning function). */
2354
2355 bool
2356 is_ctrl_altering_stmt (gimple t)
2357 {
2358 gcc_assert (t);
2359
2360 switch (gimple_code (t))
2361 {
2362 case GIMPLE_CALL:
2363 {
2364 int flags = gimple_call_flags (t);
2365
2366 /* A call alters control flow if it can make an abnormal goto. */
2367 if (call_can_make_abnormal_goto (t))
2368 return true;
2369
2370 /* A call also alters control flow if it does not return. */
2371 if (flags & ECF_NORETURN)
2372 return true;
2373
2374 /* TM ending statements have backedges out of the transaction.
2375 Return true so we split the basic block containing them.
2376 Note that the TM_BUILTIN test is merely an optimization. */
2377 if ((flags & ECF_TM_BUILTIN)
2378 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2379 return true;
2380
2381 /* BUILT_IN_RETURN call is same as return statement. */
2382 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2383 return true;
2384 }
2385 break;
2386
2387 case GIMPLE_EH_DISPATCH:
2388 /* EH_DISPATCH branches to the individual catch handlers at
2389 this level of a try or allowed-exceptions region. It can
2390 fallthru to the next statement as well. */
2391 return true;
2392
2393 case GIMPLE_ASM:
2394 if (gimple_asm_nlabels (t) > 0)
2395 return true;
2396 break;
2397
2398 CASE_GIMPLE_OMP:
2399 /* OpenMP directives alter control flow. */
2400 return true;
2401
2402 case GIMPLE_TRANSACTION:
2403 /* A transaction start alters control flow. */
2404 return true;
2405
2406 default:
2407 break;
2408 }
2409
2410 /* If a statement can throw, it alters control flow. */
2411 return stmt_can_throw_internal (t);
2412 }
2413
2414
2415 /* Return true if T is a simple local goto. */
2416
2417 bool
2418 simple_goto_p (gimple t)
2419 {
2420 return (gimple_code (t) == GIMPLE_GOTO
2421 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2422 }
2423
2424
2425 /* Return true if STMT should start a new basic block. PREV_STMT is
2426 the statement preceding STMT. It is used when STMT is a label or a
2427 case label. Labels should only start a new basic block if their
2428 previous statement wasn't a label. Otherwise, sequence of labels
2429 would generate unnecessary basic blocks that only contain a single
2430 label. */
2431
2432 static inline bool
2433 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2434 {
2435 if (stmt == NULL)
2436 return false;
2437
2438 /* Labels start a new basic block only if the preceding statement
2439 wasn't a label of the same type. This prevents the creation of
2440 consecutive blocks that have nothing but a single label. */
2441 if (gimple_code (stmt) == GIMPLE_LABEL)
2442 {
2443 /* Nonlocal and computed GOTO targets always start a new block. */
2444 if (DECL_NONLOCAL (gimple_label_label (stmt))
2445 || FORCED_LABEL (gimple_label_label (stmt)))
2446 return true;
2447
2448 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2449 {
2450 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2451 return true;
2452
2453 cfg_stats.num_merged_labels++;
2454 return false;
2455 }
2456 else
2457 return true;
2458 }
2459 else if (gimple_code (stmt) == GIMPLE_CALL
2460 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2461 /* setjmp acts similar to a nonlocal GOTO target and thus should
2462 start a new block. */
2463 return true;
2464
2465 return false;
2466 }
2467
2468
2469 /* Return true if T should end a basic block. */
2470
2471 bool
2472 stmt_ends_bb_p (gimple t)
2473 {
2474 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2475 }
2476
2477 /* Remove block annotations and other data structures. */
2478
2479 void
2480 delete_tree_cfg_annotations (void)
2481 {
2482 vec_free (label_to_block_map_for_fn (cfun));
2483 }
2484
2485
2486 /* Return the first statement in basic block BB. */
2487
2488 gimple
2489 first_stmt (basic_block bb)
2490 {
2491 gimple_stmt_iterator i = gsi_start_bb (bb);
2492 gimple stmt = NULL;
2493
2494 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2495 {
2496 gsi_next (&i);
2497 stmt = NULL;
2498 }
2499 return stmt;
2500 }
2501
2502 /* Return the first non-label statement in basic block BB. */
2503
2504 static gimple
2505 first_non_label_stmt (basic_block bb)
2506 {
2507 gimple_stmt_iterator i = gsi_start_bb (bb);
2508 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2509 gsi_next (&i);
2510 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2511 }
2512
2513 /* Return the last statement in basic block BB. */
2514
2515 gimple
2516 last_stmt (basic_block bb)
2517 {
2518 gimple_stmt_iterator i = gsi_last_bb (bb);
2519 gimple stmt = NULL;
2520
2521 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2522 {
2523 gsi_prev (&i);
2524 stmt = NULL;
2525 }
2526 return stmt;
2527 }
2528
2529 /* Return the last statement of an otherwise empty block. Return NULL
2530 if the block is totally empty, or if it contains more than one
2531 statement. */
2532
2533 gimple
2534 last_and_only_stmt (basic_block bb)
2535 {
2536 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2537 gimple last, prev;
2538
2539 if (gsi_end_p (i))
2540 return NULL;
2541
2542 last = gsi_stmt (i);
2543 gsi_prev_nondebug (&i);
2544 if (gsi_end_p (i))
2545 return last;
2546
2547 /* Empty statements should no longer appear in the instruction stream.
2548 Everything that might have appeared before should be deleted by
2549 remove_useless_stmts, and the optimizers should just gsi_remove
2550 instead of smashing with build_empty_stmt.
2551
2552 Thus the only thing that should appear here in a block containing
2553 one executable statement is a label. */
2554 prev = gsi_stmt (i);
2555 if (gimple_code (prev) == GIMPLE_LABEL)
2556 return last;
2557 else
2558 return NULL;
2559 }
2560
2561 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2562
2563 static void
2564 reinstall_phi_args (edge new_edge, edge old_edge)
2565 {
2566 edge_var_map_vector *v;
2567 edge_var_map *vm;
2568 int i;
2569 gimple_stmt_iterator phis;
2570
2571 v = redirect_edge_var_map_vector (old_edge);
2572 if (!v)
2573 return;
2574
2575 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2576 v->iterate (i, &vm) && !gsi_end_p (phis);
2577 i++, gsi_next (&phis))
2578 {
2579 gimple phi = gsi_stmt (phis);
2580 tree result = redirect_edge_var_map_result (vm);
2581 tree arg = redirect_edge_var_map_def (vm);
2582
2583 gcc_assert (result == gimple_phi_result (phi));
2584
2585 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2586 }
2587
2588 redirect_edge_var_map_clear (old_edge);
2589 }
2590
2591 /* Returns the basic block after which the new basic block created
2592 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2593 near its "logical" location. This is of most help to humans looking
2594 at debugging dumps. */
2595
2596 static basic_block
2597 split_edge_bb_loc (edge edge_in)
2598 {
2599 basic_block dest = edge_in->dest;
2600 basic_block dest_prev = dest->prev_bb;
2601
2602 if (dest_prev)
2603 {
2604 edge e = find_edge (dest_prev, dest);
2605 if (e && !(e->flags & EDGE_COMPLEX))
2606 return edge_in->src;
2607 }
2608 return dest_prev;
2609 }
2610
2611 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2612 Abort on abnormal edges. */
2613
2614 static basic_block
2615 gimple_split_edge (edge edge_in)
2616 {
2617 basic_block new_bb, after_bb, dest;
2618 edge new_edge, e;
2619
2620 /* Abnormal edges cannot be split. */
2621 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2622
2623 dest = edge_in->dest;
2624
2625 after_bb = split_edge_bb_loc (edge_in);
2626
2627 new_bb = create_empty_bb (after_bb);
2628 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2629 new_bb->count = edge_in->count;
2630 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2631 new_edge->probability = REG_BR_PROB_BASE;
2632 new_edge->count = edge_in->count;
2633
2634 e = redirect_edge_and_branch (edge_in, new_bb);
2635 gcc_assert (e == edge_in);
2636 reinstall_phi_args (new_edge, e);
2637
2638 return new_bb;
2639 }
2640
2641
2642 /* Verify properties of the address expression T with base object BASE. */
2643
2644 static tree
2645 verify_address (tree t, tree base)
2646 {
2647 bool old_constant;
2648 bool old_side_effects;
2649 bool new_constant;
2650 bool new_side_effects;
2651
2652 old_constant = TREE_CONSTANT (t);
2653 old_side_effects = TREE_SIDE_EFFECTS (t);
2654
2655 recompute_tree_invariant_for_addr_expr (t);
2656 new_side_effects = TREE_SIDE_EFFECTS (t);
2657 new_constant = TREE_CONSTANT (t);
2658
2659 if (old_constant != new_constant)
2660 {
2661 error ("constant not recomputed when ADDR_EXPR changed");
2662 return t;
2663 }
2664 if (old_side_effects != new_side_effects)
2665 {
2666 error ("side effects not recomputed when ADDR_EXPR changed");
2667 return t;
2668 }
2669
2670 if (!(TREE_CODE (base) == VAR_DECL
2671 || TREE_CODE (base) == PARM_DECL
2672 || TREE_CODE (base) == RESULT_DECL))
2673 return NULL_TREE;
2674
2675 if (DECL_GIMPLE_REG_P (base))
2676 {
2677 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2678 return base;
2679 }
2680
2681 return NULL_TREE;
2682 }
2683
2684 /* Callback for walk_tree, check that all elements with address taken are
2685 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2686 inside a PHI node. */
2687
2688 static tree
2689 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2690 {
2691 tree t = *tp, x;
2692
2693 if (TYPE_P (t))
2694 *walk_subtrees = 0;
2695
2696 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2697 #define CHECK_OP(N, MSG) \
2698 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2699 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2700
2701 switch (TREE_CODE (t))
2702 {
2703 case SSA_NAME:
2704 if (SSA_NAME_IN_FREE_LIST (t))
2705 {
2706 error ("SSA name in freelist but still referenced");
2707 return *tp;
2708 }
2709 break;
2710
2711 case INDIRECT_REF:
2712 error ("INDIRECT_REF in gimple IL");
2713 return t;
2714
2715 case MEM_REF:
2716 x = TREE_OPERAND (t, 0);
2717 if (!POINTER_TYPE_P (TREE_TYPE (x))
2718 || !is_gimple_mem_ref_addr (x))
2719 {
2720 error ("invalid first operand of MEM_REF");
2721 return x;
2722 }
2723 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2724 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2725 {
2726 error ("invalid offset operand of MEM_REF");
2727 return TREE_OPERAND (t, 1);
2728 }
2729 if (TREE_CODE (x) == ADDR_EXPR
2730 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2731 return x;
2732 *walk_subtrees = 0;
2733 break;
2734
2735 case ASSERT_EXPR:
2736 x = fold (ASSERT_EXPR_COND (t));
2737 if (x == boolean_false_node)
2738 {
2739 error ("ASSERT_EXPR with an always-false condition");
2740 return *tp;
2741 }
2742 break;
2743
2744 case MODIFY_EXPR:
2745 error ("MODIFY_EXPR not expected while having tuples");
2746 return *tp;
2747
2748 case ADDR_EXPR:
2749 {
2750 tree tem;
2751
2752 gcc_assert (is_gimple_address (t));
2753
2754 /* Skip any references (they will be checked when we recurse down the
2755 tree) and ensure that any variable used as a prefix is marked
2756 addressable. */
2757 for (x = TREE_OPERAND (t, 0);
2758 handled_component_p (x);
2759 x = TREE_OPERAND (x, 0))
2760 ;
2761
2762 if ((tem = verify_address (t, x)))
2763 return tem;
2764
2765 if (!(TREE_CODE (x) == VAR_DECL
2766 || TREE_CODE (x) == PARM_DECL
2767 || TREE_CODE (x) == RESULT_DECL))
2768 return NULL;
2769
2770 if (!TREE_ADDRESSABLE (x))
2771 {
2772 error ("address taken, but ADDRESSABLE bit not set");
2773 return x;
2774 }
2775
2776 break;
2777 }
2778
2779 case COND_EXPR:
2780 x = COND_EXPR_COND (t);
2781 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2782 {
2783 error ("non-integral used in condition");
2784 return x;
2785 }
2786 if (!is_gimple_condexpr (x))
2787 {
2788 error ("invalid conditional operand");
2789 return x;
2790 }
2791 break;
2792
2793 case NON_LVALUE_EXPR:
2794 case TRUTH_NOT_EXPR:
2795 gcc_unreachable ();
2796
2797 CASE_CONVERT:
2798 case FIX_TRUNC_EXPR:
2799 case FLOAT_EXPR:
2800 case NEGATE_EXPR:
2801 case ABS_EXPR:
2802 case BIT_NOT_EXPR:
2803 CHECK_OP (0, "invalid operand to unary operator");
2804 break;
2805
2806 case REALPART_EXPR:
2807 case IMAGPART_EXPR:
2808 case BIT_FIELD_REF:
2809 if (!is_gimple_reg_type (TREE_TYPE (t)))
2810 {
2811 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2812 return t;
2813 }
2814
2815 if (TREE_CODE (t) == BIT_FIELD_REF)
2816 {
2817 tree t0 = TREE_OPERAND (t, 0);
2818 tree t1 = TREE_OPERAND (t, 1);
2819 tree t2 = TREE_OPERAND (t, 2);
2820 if (!tree_fits_uhwi_p (t1)
2821 || !tree_fits_uhwi_p (t2))
2822 {
2823 error ("invalid position or size operand to BIT_FIELD_REF");
2824 return t;
2825 }
2826 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2827 && (TYPE_PRECISION (TREE_TYPE (t))
2828 != tree_to_uhwi (t1)))
2829 {
2830 error ("integral result type precision does not match "
2831 "field size of BIT_FIELD_REF");
2832 return t;
2833 }
2834 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2835 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2836 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2837 != tree_to_uhwi (t1)))
2838 {
2839 error ("mode precision of non-integral result does not "
2840 "match field size of BIT_FIELD_REF");
2841 return t;
2842 }
2843 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2844 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2845 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2846 {
2847 error ("position plus size exceeds size of referenced object in "
2848 "BIT_FIELD_REF");
2849 return t;
2850 }
2851 }
2852 t = TREE_OPERAND (t, 0);
2853
2854 /* Fall-through. */
2855 case COMPONENT_REF:
2856 case ARRAY_REF:
2857 case ARRAY_RANGE_REF:
2858 case VIEW_CONVERT_EXPR:
2859 /* We have a nest of references. Verify that each of the operands
2860 that determine where to reference is either a constant or a variable,
2861 verify that the base is valid, and then show we've already checked
2862 the subtrees. */
2863 while (handled_component_p (t))
2864 {
2865 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2866 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2867 else if (TREE_CODE (t) == ARRAY_REF
2868 || TREE_CODE (t) == ARRAY_RANGE_REF)
2869 {
2870 CHECK_OP (1, "invalid array index");
2871 if (TREE_OPERAND (t, 2))
2872 CHECK_OP (2, "invalid array lower bound");
2873 if (TREE_OPERAND (t, 3))
2874 CHECK_OP (3, "invalid array stride");
2875 }
2876 else if (TREE_CODE (t) == BIT_FIELD_REF
2877 || TREE_CODE (t) == REALPART_EXPR
2878 || TREE_CODE (t) == IMAGPART_EXPR)
2879 {
2880 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2881 "REALPART_EXPR");
2882 return t;
2883 }
2884
2885 t = TREE_OPERAND (t, 0);
2886 }
2887
2888 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2889 {
2890 error ("invalid reference prefix");
2891 return t;
2892 }
2893 *walk_subtrees = 0;
2894 break;
2895 case PLUS_EXPR:
2896 case MINUS_EXPR:
2897 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2898 POINTER_PLUS_EXPR. */
2899 if (POINTER_TYPE_P (TREE_TYPE (t)))
2900 {
2901 error ("invalid operand to plus/minus, type is a pointer");
2902 return t;
2903 }
2904 CHECK_OP (0, "invalid operand to binary operator");
2905 CHECK_OP (1, "invalid operand to binary operator");
2906 break;
2907
2908 case POINTER_PLUS_EXPR:
2909 /* Check to make sure the first operand is a pointer or reference type. */
2910 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2911 {
2912 error ("invalid operand to pointer plus, first operand is not a pointer");
2913 return t;
2914 }
2915 /* Check to make sure the second operand is a ptrofftype. */
2916 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2917 {
2918 error ("invalid operand to pointer plus, second operand is not an "
2919 "integer type of appropriate width");
2920 return t;
2921 }
2922 /* FALLTHROUGH */
2923 case LT_EXPR:
2924 case LE_EXPR:
2925 case GT_EXPR:
2926 case GE_EXPR:
2927 case EQ_EXPR:
2928 case NE_EXPR:
2929 case UNORDERED_EXPR:
2930 case ORDERED_EXPR:
2931 case UNLT_EXPR:
2932 case UNLE_EXPR:
2933 case UNGT_EXPR:
2934 case UNGE_EXPR:
2935 case UNEQ_EXPR:
2936 case LTGT_EXPR:
2937 case MULT_EXPR:
2938 case TRUNC_DIV_EXPR:
2939 case CEIL_DIV_EXPR:
2940 case FLOOR_DIV_EXPR:
2941 case ROUND_DIV_EXPR:
2942 case TRUNC_MOD_EXPR:
2943 case CEIL_MOD_EXPR:
2944 case FLOOR_MOD_EXPR:
2945 case ROUND_MOD_EXPR:
2946 case RDIV_EXPR:
2947 case EXACT_DIV_EXPR:
2948 case MIN_EXPR:
2949 case MAX_EXPR:
2950 case LSHIFT_EXPR:
2951 case RSHIFT_EXPR:
2952 case LROTATE_EXPR:
2953 case RROTATE_EXPR:
2954 case BIT_IOR_EXPR:
2955 case BIT_XOR_EXPR:
2956 case BIT_AND_EXPR:
2957 CHECK_OP (0, "invalid operand to binary operator");
2958 CHECK_OP (1, "invalid operand to binary operator");
2959 break;
2960
2961 case CONSTRUCTOR:
2962 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2963 *walk_subtrees = 0;
2964 break;
2965
2966 case CASE_LABEL_EXPR:
2967 if (CASE_CHAIN (t))
2968 {
2969 error ("invalid CASE_CHAIN");
2970 return t;
2971 }
2972 break;
2973
2974 default:
2975 break;
2976 }
2977 return NULL;
2978
2979 #undef CHECK_OP
2980 }
2981
2982
2983 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2984 Returns true if there is an error, otherwise false. */
2985
2986 static bool
2987 verify_types_in_gimple_min_lval (tree expr)
2988 {
2989 tree op;
2990
2991 if (is_gimple_id (expr))
2992 return false;
2993
2994 if (TREE_CODE (expr) != TARGET_MEM_REF
2995 && TREE_CODE (expr) != MEM_REF)
2996 {
2997 error ("invalid expression for min lvalue");
2998 return true;
2999 }
3000
3001 /* TARGET_MEM_REFs are strange beasts. */
3002 if (TREE_CODE (expr) == TARGET_MEM_REF)
3003 return false;
3004
3005 op = TREE_OPERAND (expr, 0);
3006 if (!is_gimple_val (op))
3007 {
3008 error ("invalid operand in indirect reference");
3009 debug_generic_stmt (op);
3010 return true;
3011 }
3012 /* Memory references now generally can involve a value conversion. */
3013
3014 return false;
3015 }
3016
3017 /* Verify if EXPR is a valid GIMPLE reference expression. If
3018 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3019 if there is an error, otherwise false. */
3020
3021 static bool
3022 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3023 {
3024 while (handled_component_p (expr))
3025 {
3026 tree op = TREE_OPERAND (expr, 0);
3027
3028 if (TREE_CODE (expr) == ARRAY_REF
3029 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3030 {
3031 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3032 || (TREE_OPERAND (expr, 2)
3033 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3034 || (TREE_OPERAND (expr, 3)
3035 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3036 {
3037 error ("invalid operands to array reference");
3038 debug_generic_stmt (expr);
3039 return true;
3040 }
3041 }
3042
3043 /* Verify if the reference array element types are compatible. */
3044 if (TREE_CODE (expr) == ARRAY_REF
3045 && !useless_type_conversion_p (TREE_TYPE (expr),
3046 TREE_TYPE (TREE_TYPE (op))))
3047 {
3048 error ("type mismatch in array reference");
3049 debug_generic_stmt (TREE_TYPE (expr));
3050 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3051 return true;
3052 }
3053 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3054 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3055 TREE_TYPE (TREE_TYPE (op))))
3056 {
3057 error ("type mismatch in array range reference");
3058 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3059 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3060 return true;
3061 }
3062
3063 if ((TREE_CODE (expr) == REALPART_EXPR
3064 || TREE_CODE (expr) == IMAGPART_EXPR)
3065 && !useless_type_conversion_p (TREE_TYPE (expr),
3066 TREE_TYPE (TREE_TYPE (op))))
3067 {
3068 error ("type mismatch in real/imagpart reference");
3069 debug_generic_stmt (TREE_TYPE (expr));
3070 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3071 return true;
3072 }
3073
3074 if (TREE_CODE (expr) == COMPONENT_REF
3075 && !useless_type_conversion_p (TREE_TYPE (expr),
3076 TREE_TYPE (TREE_OPERAND (expr, 1))))
3077 {
3078 error ("type mismatch in component reference");
3079 debug_generic_stmt (TREE_TYPE (expr));
3080 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3081 return true;
3082 }
3083
3084 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3085 {
3086 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3087 that their operand is not an SSA name or an invariant when
3088 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3089 bug). Otherwise there is nothing to verify, gross mismatches at
3090 most invoke undefined behavior. */
3091 if (require_lvalue
3092 && (TREE_CODE (op) == SSA_NAME
3093 || is_gimple_min_invariant (op)))
3094 {
3095 error ("conversion of an SSA_NAME on the left hand side");
3096 debug_generic_stmt (expr);
3097 return true;
3098 }
3099 else if (TREE_CODE (op) == SSA_NAME
3100 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3101 {
3102 error ("conversion of register to a different size");
3103 debug_generic_stmt (expr);
3104 return true;
3105 }
3106 else if (!handled_component_p (op))
3107 return false;
3108 }
3109
3110 expr = op;
3111 }
3112
3113 if (TREE_CODE (expr) == MEM_REF)
3114 {
3115 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3116 {
3117 error ("invalid address operand in MEM_REF");
3118 debug_generic_stmt (expr);
3119 return true;
3120 }
3121 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3122 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3123 {
3124 error ("invalid offset operand in MEM_REF");
3125 debug_generic_stmt (expr);
3126 return true;
3127 }
3128 }
3129 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3130 {
3131 if (!TMR_BASE (expr)
3132 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3133 {
3134 error ("invalid address operand in TARGET_MEM_REF");
3135 return true;
3136 }
3137 if (!TMR_OFFSET (expr)
3138 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3139 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3140 {
3141 error ("invalid offset operand in TARGET_MEM_REF");
3142 debug_generic_stmt (expr);
3143 return true;
3144 }
3145 }
3146
3147 return ((require_lvalue || !is_gimple_min_invariant (expr))
3148 && verify_types_in_gimple_min_lval (expr));
3149 }
3150
3151 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3152 list of pointer-to types that is trivially convertible to DEST. */
3153
3154 static bool
3155 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3156 {
3157 tree src;
3158
3159 if (!TYPE_POINTER_TO (src_obj))
3160 return true;
3161
3162 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3163 if (useless_type_conversion_p (dest, src))
3164 return true;
3165
3166 return false;
3167 }
3168
3169 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3170 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3171
3172 static bool
3173 valid_fixed_convert_types_p (tree type1, tree type2)
3174 {
3175 return (FIXED_POINT_TYPE_P (type1)
3176 && (INTEGRAL_TYPE_P (type2)
3177 || SCALAR_FLOAT_TYPE_P (type2)
3178 || FIXED_POINT_TYPE_P (type2)));
3179 }
3180
3181 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3182 is a problem, otherwise false. */
3183
3184 static bool
3185 verify_gimple_call (gimple stmt)
3186 {
3187 tree fn = gimple_call_fn (stmt);
3188 tree fntype, fndecl;
3189 unsigned i;
3190
3191 if (gimple_call_internal_p (stmt))
3192 {
3193 if (fn)
3194 {
3195 error ("gimple call has two targets");
3196 debug_generic_stmt (fn);
3197 return true;
3198 }
3199 }
3200 else
3201 {
3202 if (!fn)
3203 {
3204 error ("gimple call has no target");
3205 return true;
3206 }
3207 }
3208
3209 if (fn && !is_gimple_call_addr (fn))
3210 {
3211 error ("invalid function in gimple call");
3212 debug_generic_stmt (fn);
3213 return true;
3214 }
3215
3216 if (fn
3217 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3218 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3219 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3220 {
3221 error ("non-function in gimple call");
3222 return true;
3223 }
3224
3225 fndecl = gimple_call_fndecl (stmt);
3226 if (fndecl
3227 && TREE_CODE (fndecl) == FUNCTION_DECL
3228 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3229 && !DECL_PURE_P (fndecl)
3230 && !TREE_READONLY (fndecl))
3231 {
3232 error ("invalid pure const state for function");
3233 return true;
3234 }
3235
3236 if (gimple_call_lhs (stmt)
3237 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3238 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3239 {
3240 error ("invalid LHS in gimple call");
3241 return true;
3242 }
3243
3244 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3245 {
3246 error ("LHS in noreturn call");
3247 return true;
3248 }
3249
3250 fntype = gimple_call_fntype (stmt);
3251 if (fntype
3252 && gimple_call_lhs (stmt)
3253 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3254 TREE_TYPE (fntype))
3255 /* ??? At least C++ misses conversions at assignments from
3256 void * call results.
3257 ??? Java is completely off. Especially with functions
3258 returning java.lang.Object.
3259 For now simply allow arbitrary pointer type conversions. */
3260 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3261 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3262 {
3263 error ("invalid conversion in gimple call");
3264 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3265 debug_generic_stmt (TREE_TYPE (fntype));
3266 return true;
3267 }
3268
3269 if (gimple_call_chain (stmt)
3270 && !is_gimple_val (gimple_call_chain (stmt)))
3271 {
3272 error ("invalid static chain in gimple call");
3273 debug_generic_stmt (gimple_call_chain (stmt));
3274 return true;
3275 }
3276
3277 /* If there is a static chain argument, this should not be an indirect
3278 call, and the decl should have DECL_STATIC_CHAIN set. */
3279 if (gimple_call_chain (stmt))
3280 {
3281 if (!gimple_call_fndecl (stmt))
3282 {
3283 error ("static chain in indirect gimple call");
3284 return true;
3285 }
3286 fn = TREE_OPERAND (fn, 0);
3287
3288 if (!DECL_STATIC_CHAIN (fn))
3289 {
3290 error ("static chain with function that doesn%'t use one");
3291 return true;
3292 }
3293 }
3294
3295 /* ??? The C frontend passes unpromoted arguments in case it
3296 didn't see a function declaration before the call. So for now
3297 leave the call arguments mostly unverified. Once we gimplify
3298 unit-at-a-time we have a chance to fix this. */
3299
3300 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3301 {
3302 tree arg = gimple_call_arg (stmt, i);
3303 if ((is_gimple_reg_type (TREE_TYPE (arg))
3304 && !is_gimple_val (arg))
3305 || (!is_gimple_reg_type (TREE_TYPE (arg))
3306 && !is_gimple_lvalue (arg)))
3307 {
3308 error ("invalid argument to gimple call");
3309 debug_generic_expr (arg);
3310 return true;
3311 }
3312 }
3313
3314 return false;
3315 }
3316
3317 /* Verifies the gimple comparison with the result type TYPE and
3318 the operands OP0 and OP1. */
3319
3320 static bool
3321 verify_gimple_comparison (tree type, tree op0, tree op1)
3322 {
3323 tree op0_type = TREE_TYPE (op0);
3324 tree op1_type = TREE_TYPE (op1);
3325
3326 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3327 {
3328 error ("invalid operands in gimple comparison");
3329 return true;
3330 }
3331
3332 /* For comparisons we do not have the operations type as the
3333 effective type the comparison is carried out in. Instead
3334 we require that either the first operand is trivially
3335 convertible into the second, or the other way around.
3336 Because we special-case pointers to void we allow
3337 comparisons of pointers with the same mode as well. */
3338 if (!useless_type_conversion_p (op0_type, op1_type)
3339 && !useless_type_conversion_p (op1_type, op0_type)
3340 && (!POINTER_TYPE_P (op0_type)
3341 || !POINTER_TYPE_P (op1_type)
3342 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3343 {
3344 error ("mismatching comparison operand types");
3345 debug_generic_expr (op0_type);
3346 debug_generic_expr (op1_type);
3347 return true;
3348 }
3349
3350 /* The resulting type of a comparison may be an effective boolean type. */
3351 if (INTEGRAL_TYPE_P (type)
3352 && (TREE_CODE (type) == BOOLEAN_TYPE
3353 || TYPE_PRECISION (type) == 1))
3354 {
3355 if (TREE_CODE (op0_type) == VECTOR_TYPE
3356 || TREE_CODE (op1_type) == VECTOR_TYPE)
3357 {
3358 error ("vector comparison returning a boolean");
3359 debug_generic_expr (op0_type);
3360 debug_generic_expr (op1_type);
3361 return true;
3362 }
3363 }
3364 /* Or an integer vector type with the same size and element count
3365 as the comparison operand types. */
3366 else if (TREE_CODE (type) == VECTOR_TYPE
3367 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3368 {
3369 if (TREE_CODE (op0_type) != VECTOR_TYPE
3370 || TREE_CODE (op1_type) != VECTOR_TYPE)
3371 {
3372 error ("non-vector operands in vector comparison");
3373 debug_generic_expr (op0_type);
3374 debug_generic_expr (op1_type);
3375 return true;
3376 }
3377
3378 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3379 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3380 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3381 /* The result of a vector comparison is of signed
3382 integral type. */
3383 || TYPE_UNSIGNED (TREE_TYPE (type)))
3384 {
3385 error ("invalid vector comparison resulting type");
3386 debug_generic_expr (type);
3387 return true;
3388 }
3389 }
3390 else
3391 {
3392 error ("bogus comparison result type");
3393 debug_generic_expr (type);
3394 return true;
3395 }
3396
3397 return false;
3398 }
3399
3400 /* Verify a gimple assignment statement STMT with an unary rhs.
3401 Returns true if anything is wrong. */
3402
3403 static bool
3404 verify_gimple_assign_unary (gimple stmt)
3405 {
3406 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3407 tree lhs = gimple_assign_lhs (stmt);
3408 tree lhs_type = TREE_TYPE (lhs);
3409 tree rhs1 = gimple_assign_rhs1 (stmt);
3410 tree rhs1_type = TREE_TYPE (rhs1);
3411
3412 if (!is_gimple_reg (lhs))
3413 {
3414 error ("non-register as LHS of unary operation");
3415 return true;
3416 }
3417
3418 if (!is_gimple_val (rhs1))
3419 {
3420 error ("invalid operand in unary operation");
3421 return true;
3422 }
3423
3424 /* First handle conversions. */
3425 switch (rhs_code)
3426 {
3427 CASE_CONVERT:
3428 {
3429 /* Allow conversions from pointer type to integral type only if
3430 there is no sign or zero extension involved.
3431 For targets were the precision of ptrofftype doesn't match that
3432 of pointers we need to allow arbitrary conversions to ptrofftype. */
3433 if ((POINTER_TYPE_P (lhs_type)
3434 && INTEGRAL_TYPE_P (rhs1_type))
3435 || (POINTER_TYPE_P (rhs1_type)
3436 && INTEGRAL_TYPE_P (lhs_type)
3437 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3438 || ptrofftype_p (sizetype))))
3439 return false;
3440
3441 /* Allow conversion from integral to offset type and vice versa. */
3442 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3443 && INTEGRAL_TYPE_P (rhs1_type))
3444 || (INTEGRAL_TYPE_P (lhs_type)
3445 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3446 return false;
3447
3448 /* Otherwise assert we are converting between types of the
3449 same kind. */
3450 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3451 {
3452 error ("invalid types in nop conversion");
3453 debug_generic_expr (lhs_type);
3454 debug_generic_expr (rhs1_type);
3455 return true;
3456 }
3457
3458 return false;
3459 }
3460
3461 case ADDR_SPACE_CONVERT_EXPR:
3462 {
3463 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3464 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3465 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3466 {
3467 error ("invalid types in address space conversion");
3468 debug_generic_expr (lhs_type);
3469 debug_generic_expr (rhs1_type);
3470 return true;
3471 }
3472
3473 return false;
3474 }
3475
3476 case FIXED_CONVERT_EXPR:
3477 {
3478 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3479 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3480 {
3481 error ("invalid types in fixed-point conversion");
3482 debug_generic_expr (lhs_type);
3483 debug_generic_expr (rhs1_type);
3484 return true;
3485 }
3486
3487 return false;
3488 }
3489
3490 case FLOAT_EXPR:
3491 {
3492 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3493 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3494 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3495 {
3496 error ("invalid types in conversion to floating point");
3497 debug_generic_expr (lhs_type);
3498 debug_generic_expr (rhs1_type);
3499 return true;
3500 }
3501
3502 return false;
3503 }
3504
3505 case FIX_TRUNC_EXPR:
3506 {
3507 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3508 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3509 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3510 {
3511 error ("invalid types in conversion to integer");
3512 debug_generic_expr (lhs_type);
3513 debug_generic_expr (rhs1_type);
3514 return true;
3515 }
3516
3517 return false;
3518 }
3519
3520 case VEC_UNPACK_HI_EXPR:
3521 case VEC_UNPACK_LO_EXPR:
3522 case REDUC_MAX_EXPR:
3523 case REDUC_MIN_EXPR:
3524 case REDUC_PLUS_EXPR:
3525 case VEC_UNPACK_FLOAT_HI_EXPR:
3526 case VEC_UNPACK_FLOAT_LO_EXPR:
3527 /* FIXME. */
3528 return false;
3529
3530 case NEGATE_EXPR:
3531 case ABS_EXPR:
3532 case BIT_NOT_EXPR:
3533 case PAREN_EXPR:
3534 case NON_LVALUE_EXPR:
3535 case CONJ_EXPR:
3536 break;
3537
3538 default:
3539 gcc_unreachable ();
3540 }
3541
3542 /* For the remaining codes assert there is no conversion involved. */
3543 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3544 {
3545 error ("non-trivial conversion in unary operation");
3546 debug_generic_expr (lhs_type);
3547 debug_generic_expr (rhs1_type);
3548 return true;
3549 }
3550
3551 return false;
3552 }
3553
3554 /* Verify a gimple assignment statement STMT with a binary rhs.
3555 Returns true if anything is wrong. */
3556
3557 static bool
3558 verify_gimple_assign_binary (gimple stmt)
3559 {
3560 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3561 tree lhs = gimple_assign_lhs (stmt);
3562 tree lhs_type = TREE_TYPE (lhs);
3563 tree rhs1 = gimple_assign_rhs1 (stmt);
3564 tree rhs1_type = TREE_TYPE (rhs1);
3565 tree rhs2 = gimple_assign_rhs2 (stmt);
3566 tree rhs2_type = TREE_TYPE (rhs2);
3567
3568 if (!is_gimple_reg (lhs))
3569 {
3570 error ("non-register as LHS of binary operation");
3571 return true;
3572 }
3573
3574 if (!is_gimple_val (rhs1)
3575 || !is_gimple_val (rhs2))
3576 {
3577 error ("invalid operands in binary operation");
3578 return true;
3579 }
3580
3581 /* First handle operations that involve different types. */
3582 switch (rhs_code)
3583 {
3584 case COMPLEX_EXPR:
3585 {
3586 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3587 || !(INTEGRAL_TYPE_P (rhs1_type)
3588 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3589 || !(INTEGRAL_TYPE_P (rhs2_type)
3590 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3591 {
3592 error ("type mismatch in complex expression");
3593 debug_generic_expr (lhs_type);
3594 debug_generic_expr (rhs1_type);
3595 debug_generic_expr (rhs2_type);
3596 return true;
3597 }
3598
3599 return false;
3600 }
3601
3602 case LSHIFT_EXPR:
3603 case RSHIFT_EXPR:
3604 case LROTATE_EXPR:
3605 case RROTATE_EXPR:
3606 {
3607 /* Shifts and rotates are ok on integral types, fixed point
3608 types and integer vector types. */
3609 if ((!INTEGRAL_TYPE_P (rhs1_type)
3610 && !FIXED_POINT_TYPE_P (rhs1_type)
3611 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3612 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3613 || (!INTEGRAL_TYPE_P (rhs2_type)
3614 /* Vector shifts of vectors are also ok. */
3615 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3616 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3617 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3618 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3619 || !useless_type_conversion_p (lhs_type, rhs1_type))
3620 {
3621 error ("type mismatch in shift expression");
3622 debug_generic_expr (lhs_type);
3623 debug_generic_expr (rhs1_type);
3624 debug_generic_expr (rhs2_type);
3625 return true;
3626 }
3627
3628 return false;
3629 }
3630
3631 case VEC_LSHIFT_EXPR:
3632 case VEC_RSHIFT_EXPR:
3633 {
3634 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3635 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3636 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3637 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3638 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3639 || (!INTEGRAL_TYPE_P (rhs2_type)
3640 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3641 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3642 || !useless_type_conversion_p (lhs_type, rhs1_type))
3643 {
3644 error ("type mismatch in vector shift expression");
3645 debug_generic_expr (lhs_type);
3646 debug_generic_expr (rhs1_type);
3647 debug_generic_expr (rhs2_type);
3648 return true;
3649 }
3650 /* For shifting a vector of non-integral components we
3651 only allow shifting by a constant multiple of the element size. */
3652 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3653 && (TREE_CODE (rhs2) != INTEGER_CST
3654 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3655 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3656 {
3657 error ("non-element sized vector shift of floating point vector");
3658 return true;
3659 }
3660
3661 return false;
3662 }
3663
3664 case WIDEN_LSHIFT_EXPR:
3665 {
3666 if (!INTEGRAL_TYPE_P (lhs_type)
3667 || !INTEGRAL_TYPE_P (rhs1_type)
3668 || TREE_CODE (rhs2) != INTEGER_CST
3669 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3670 {
3671 error ("type mismatch in widening vector shift expression");
3672 debug_generic_expr (lhs_type);
3673 debug_generic_expr (rhs1_type);
3674 debug_generic_expr (rhs2_type);
3675 return true;
3676 }
3677
3678 return false;
3679 }
3680
3681 case VEC_WIDEN_LSHIFT_HI_EXPR:
3682 case VEC_WIDEN_LSHIFT_LO_EXPR:
3683 {
3684 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3685 || TREE_CODE (lhs_type) != VECTOR_TYPE
3686 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3687 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3688 || TREE_CODE (rhs2) != INTEGER_CST
3689 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3690 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3691 {
3692 error ("type mismatch in widening vector shift expression");
3693 debug_generic_expr (lhs_type);
3694 debug_generic_expr (rhs1_type);
3695 debug_generic_expr (rhs2_type);
3696 return true;
3697 }
3698
3699 return false;
3700 }
3701
3702 case PLUS_EXPR:
3703 case MINUS_EXPR:
3704 {
3705 tree lhs_etype = lhs_type;
3706 tree rhs1_etype = rhs1_type;
3707 tree rhs2_etype = rhs2_type;
3708 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3709 {
3710 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3711 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3712 {
3713 error ("invalid non-vector operands to vector valued plus");
3714 return true;
3715 }
3716 lhs_etype = TREE_TYPE (lhs_type);
3717 rhs1_etype = TREE_TYPE (rhs1_type);
3718 rhs2_etype = TREE_TYPE (rhs2_type);
3719 }
3720 if (POINTER_TYPE_P (lhs_etype)
3721 || POINTER_TYPE_P (rhs1_etype)
3722 || POINTER_TYPE_P (rhs2_etype))
3723 {
3724 error ("invalid (pointer) operands to plus/minus");
3725 return true;
3726 }
3727
3728 /* Continue with generic binary expression handling. */
3729 break;
3730 }
3731
3732 case POINTER_PLUS_EXPR:
3733 {
3734 if (!POINTER_TYPE_P (rhs1_type)
3735 || !useless_type_conversion_p (lhs_type, rhs1_type)
3736 || !ptrofftype_p (rhs2_type))
3737 {
3738 error ("type mismatch in pointer plus expression");
3739 debug_generic_stmt (lhs_type);
3740 debug_generic_stmt (rhs1_type);
3741 debug_generic_stmt (rhs2_type);
3742 return true;
3743 }
3744
3745 return false;
3746 }
3747
3748 case TRUTH_ANDIF_EXPR:
3749 case TRUTH_ORIF_EXPR:
3750 case TRUTH_AND_EXPR:
3751 case TRUTH_OR_EXPR:
3752 case TRUTH_XOR_EXPR:
3753
3754 gcc_unreachable ();
3755
3756 case LT_EXPR:
3757 case LE_EXPR:
3758 case GT_EXPR:
3759 case GE_EXPR:
3760 case EQ_EXPR:
3761 case NE_EXPR:
3762 case UNORDERED_EXPR:
3763 case ORDERED_EXPR:
3764 case UNLT_EXPR:
3765 case UNLE_EXPR:
3766 case UNGT_EXPR:
3767 case UNGE_EXPR:
3768 case UNEQ_EXPR:
3769 case LTGT_EXPR:
3770 /* Comparisons are also binary, but the result type is not
3771 connected to the operand types. */
3772 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3773
3774 case WIDEN_MULT_EXPR:
3775 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3776 return true;
3777 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3778 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3779
3780 case WIDEN_SUM_EXPR:
3781 case VEC_WIDEN_MULT_HI_EXPR:
3782 case VEC_WIDEN_MULT_LO_EXPR:
3783 case VEC_WIDEN_MULT_EVEN_EXPR:
3784 case VEC_WIDEN_MULT_ODD_EXPR:
3785 case VEC_PACK_TRUNC_EXPR:
3786 case VEC_PACK_SAT_EXPR:
3787 case VEC_PACK_FIX_TRUNC_EXPR:
3788 /* FIXME. */
3789 return false;
3790
3791 case MULT_EXPR:
3792 case MULT_HIGHPART_EXPR:
3793 case TRUNC_DIV_EXPR:
3794 case CEIL_DIV_EXPR:
3795 case FLOOR_DIV_EXPR:
3796 case ROUND_DIV_EXPR:
3797 case TRUNC_MOD_EXPR:
3798 case CEIL_MOD_EXPR:
3799 case FLOOR_MOD_EXPR:
3800 case ROUND_MOD_EXPR:
3801 case RDIV_EXPR:
3802 case EXACT_DIV_EXPR:
3803 case MIN_EXPR:
3804 case MAX_EXPR:
3805 case BIT_IOR_EXPR:
3806 case BIT_XOR_EXPR:
3807 case BIT_AND_EXPR:
3808 /* Continue with generic binary expression handling. */
3809 break;
3810
3811 default:
3812 gcc_unreachable ();
3813 }
3814
3815 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3816 || !useless_type_conversion_p (lhs_type, rhs2_type))
3817 {
3818 error ("type mismatch in binary expression");
3819 debug_generic_stmt (lhs_type);
3820 debug_generic_stmt (rhs1_type);
3821 debug_generic_stmt (rhs2_type);
3822 return true;
3823 }
3824
3825 return false;
3826 }
3827
3828 /* Verify a gimple assignment statement STMT with a ternary rhs.
3829 Returns true if anything is wrong. */
3830
3831 static bool
3832 verify_gimple_assign_ternary (gimple stmt)
3833 {
3834 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3835 tree lhs = gimple_assign_lhs (stmt);
3836 tree lhs_type = TREE_TYPE (lhs);
3837 tree rhs1 = gimple_assign_rhs1 (stmt);
3838 tree rhs1_type = TREE_TYPE (rhs1);
3839 tree rhs2 = gimple_assign_rhs2 (stmt);
3840 tree rhs2_type = TREE_TYPE (rhs2);
3841 tree rhs3 = gimple_assign_rhs3 (stmt);
3842 tree rhs3_type = TREE_TYPE (rhs3);
3843
3844 if (!is_gimple_reg (lhs))
3845 {
3846 error ("non-register as LHS of ternary operation");
3847 return true;
3848 }
3849
3850 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3851 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3852 || !is_gimple_val (rhs2)
3853 || !is_gimple_val (rhs3))
3854 {
3855 error ("invalid operands in ternary operation");
3856 return true;
3857 }
3858
3859 /* First handle operations that involve different types. */
3860 switch (rhs_code)
3861 {
3862 case WIDEN_MULT_PLUS_EXPR:
3863 case WIDEN_MULT_MINUS_EXPR:
3864 if ((!INTEGRAL_TYPE_P (rhs1_type)
3865 && !FIXED_POINT_TYPE_P (rhs1_type))
3866 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3867 || !useless_type_conversion_p (lhs_type, rhs3_type)
3868 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3869 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3870 {
3871 error ("type mismatch in widening multiply-accumulate expression");
3872 debug_generic_expr (lhs_type);
3873 debug_generic_expr (rhs1_type);
3874 debug_generic_expr (rhs2_type);
3875 debug_generic_expr (rhs3_type);
3876 return true;
3877 }
3878 break;
3879
3880 case FMA_EXPR:
3881 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3882 || !useless_type_conversion_p (lhs_type, rhs2_type)
3883 || !useless_type_conversion_p (lhs_type, rhs3_type))
3884 {
3885 error ("type mismatch in fused multiply-add expression");
3886 debug_generic_expr (lhs_type);
3887 debug_generic_expr (rhs1_type);
3888 debug_generic_expr (rhs2_type);
3889 debug_generic_expr (rhs3_type);
3890 return true;
3891 }
3892 break;
3893
3894 case COND_EXPR:
3895 case VEC_COND_EXPR:
3896 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3897 || !useless_type_conversion_p (lhs_type, rhs3_type))
3898 {
3899 error ("type mismatch in conditional expression");
3900 debug_generic_expr (lhs_type);
3901 debug_generic_expr (rhs2_type);
3902 debug_generic_expr (rhs3_type);
3903 return true;
3904 }
3905 break;
3906
3907 case VEC_PERM_EXPR:
3908 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3909 || !useless_type_conversion_p (lhs_type, rhs2_type))
3910 {
3911 error ("type mismatch in vector permute expression");
3912 debug_generic_expr (lhs_type);
3913 debug_generic_expr (rhs1_type);
3914 debug_generic_expr (rhs2_type);
3915 debug_generic_expr (rhs3_type);
3916 return true;
3917 }
3918
3919 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3920 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3921 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3922 {
3923 error ("vector types expected in vector permute expression");
3924 debug_generic_expr (lhs_type);
3925 debug_generic_expr (rhs1_type);
3926 debug_generic_expr (rhs2_type);
3927 debug_generic_expr (rhs3_type);
3928 return true;
3929 }
3930
3931 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3932 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3933 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3934 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3935 != TYPE_VECTOR_SUBPARTS (lhs_type))
3936 {
3937 error ("vectors with different element number found "
3938 "in vector permute expression");
3939 debug_generic_expr (lhs_type);
3940 debug_generic_expr (rhs1_type);
3941 debug_generic_expr (rhs2_type);
3942 debug_generic_expr (rhs3_type);
3943 return true;
3944 }
3945
3946 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3947 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3948 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3949 {
3950 error ("invalid mask type in vector permute expression");
3951 debug_generic_expr (lhs_type);
3952 debug_generic_expr (rhs1_type);
3953 debug_generic_expr (rhs2_type);
3954 debug_generic_expr (rhs3_type);
3955 return true;
3956 }
3957
3958 return false;
3959
3960 case DOT_PROD_EXPR:
3961 case REALIGN_LOAD_EXPR:
3962 /* FIXME. */
3963 return false;
3964
3965 default:
3966 gcc_unreachable ();
3967 }
3968 return false;
3969 }
3970
3971 /* Verify a gimple assignment statement STMT with a single rhs.
3972 Returns true if anything is wrong. */
3973
3974 static bool
3975 verify_gimple_assign_single (gimple stmt)
3976 {
3977 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3978 tree lhs = gimple_assign_lhs (stmt);
3979 tree lhs_type = TREE_TYPE (lhs);
3980 tree rhs1 = gimple_assign_rhs1 (stmt);
3981 tree rhs1_type = TREE_TYPE (rhs1);
3982 bool res = false;
3983
3984 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3985 {
3986 error ("non-trivial conversion at assignment");
3987 debug_generic_expr (lhs_type);
3988 debug_generic_expr (rhs1_type);
3989 return true;
3990 }
3991
3992 if (gimple_clobber_p (stmt)
3993 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3994 {
3995 error ("non-decl/MEM_REF LHS in clobber statement");
3996 debug_generic_expr (lhs);
3997 return true;
3998 }
3999
4000 if (handled_component_p (lhs)
4001 || TREE_CODE (lhs) == MEM_REF
4002 || TREE_CODE (lhs) == TARGET_MEM_REF)
4003 res |= verify_types_in_gimple_reference (lhs, true);
4004
4005 /* Special codes we cannot handle via their class. */
4006 switch (rhs_code)
4007 {
4008 case ADDR_EXPR:
4009 {
4010 tree op = TREE_OPERAND (rhs1, 0);
4011 if (!is_gimple_addressable (op))
4012 {
4013 error ("invalid operand in unary expression");
4014 return true;
4015 }
4016
4017 /* Technically there is no longer a need for matching types, but
4018 gimple hygiene asks for this check. In LTO we can end up
4019 combining incompatible units and thus end up with addresses
4020 of globals that change their type to a common one. */
4021 if (!in_lto_p
4022 && !types_compatible_p (TREE_TYPE (op),
4023 TREE_TYPE (TREE_TYPE (rhs1)))
4024 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4025 TREE_TYPE (op)))
4026 {
4027 error ("type mismatch in address expression");
4028 debug_generic_stmt (TREE_TYPE (rhs1));
4029 debug_generic_stmt (TREE_TYPE (op));
4030 return true;
4031 }
4032
4033 return verify_types_in_gimple_reference (op, true);
4034 }
4035
4036 /* tcc_reference */
4037 case INDIRECT_REF:
4038 error ("INDIRECT_REF in gimple IL");
4039 return true;
4040
4041 case COMPONENT_REF:
4042 case BIT_FIELD_REF:
4043 case ARRAY_REF:
4044 case ARRAY_RANGE_REF:
4045 case VIEW_CONVERT_EXPR:
4046 case REALPART_EXPR:
4047 case IMAGPART_EXPR:
4048 case TARGET_MEM_REF:
4049 case MEM_REF:
4050 if (!is_gimple_reg (lhs)
4051 && is_gimple_reg_type (TREE_TYPE (lhs)))
4052 {
4053 error ("invalid rhs for gimple memory store");
4054 debug_generic_stmt (lhs);
4055 debug_generic_stmt (rhs1);
4056 return true;
4057 }
4058 return res || verify_types_in_gimple_reference (rhs1, false);
4059
4060 /* tcc_constant */
4061 case SSA_NAME:
4062 case INTEGER_CST:
4063 case REAL_CST:
4064 case FIXED_CST:
4065 case COMPLEX_CST:
4066 case VECTOR_CST:
4067 case STRING_CST:
4068 return res;
4069
4070 /* tcc_declaration */
4071 case CONST_DECL:
4072 return res;
4073 case VAR_DECL:
4074 case PARM_DECL:
4075 if (!is_gimple_reg (lhs)
4076 && !is_gimple_reg (rhs1)
4077 && is_gimple_reg_type (TREE_TYPE (lhs)))
4078 {
4079 error ("invalid rhs for gimple memory store");
4080 debug_generic_stmt (lhs);
4081 debug_generic_stmt (rhs1);
4082 return true;
4083 }
4084 return res;
4085
4086 case CONSTRUCTOR:
4087 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4088 {
4089 unsigned int i;
4090 tree elt_i, elt_v, elt_t = NULL_TREE;
4091
4092 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4093 return res;
4094 /* For vector CONSTRUCTORs we require that either it is empty
4095 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4096 (then the element count must be correct to cover the whole
4097 outer vector and index must be NULL on all elements, or it is
4098 a CONSTRUCTOR of scalar elements, where we as an exception allow
4099 smaller number of elements (assuming zero filling) and
4100 consecutive indexes as compared to NULL indexes (such
4101 CONSTRUCTORs can appear in the IL from FEs). */
4102 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4103 {
4104 if (elt_t == NULL_TREE)
4105 {
4106 elt_t = TREE_TYPE (elt_v);
4107 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4108 {
4109 tree elt_t = TREE_TYPE (elt_v);
4110 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4111 TREE_TYPE (elt_t)))
4112 {
4113 error ("incorrect type of vector CONSTRUCTOR"
4114 " elements");
4115 debug_generic_stmt (rhs1);
4116 return true;
4117 }
4118 else if (CONSTRUCTOR_NELTS (rhs1)
4119 * TYPE_VECTOR_SUBPARTS (elt_t)
4120 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4121 {
4122 error ("incorrect number of vector CONSTRUCTOR"
4123 " elements");
4124 debug_generic_stmt (rhs1);
4125 return true;
4126 }
4127 }
4128 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4129 elt_t))
4130 {
4131 error ("incorrect type of vector CONSTRUCTOR elements");
4132 debug_generic_stmt (rhs1);
4133 return true;
4134 }
4135 else if (CONSTRUCTOR_NELTS (rhs1)
4136 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4137 {
4138 error ("incorrect number of vector CONSTRUCTOR elements");
4139 debug_generic_stmt (rhs1);
4140 return true;
4141 }
4142 }
4143 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4144 {
4145 error ("incorrect type of vector CONSTRUCTOR elements");
4146 debug_generic_stmt (rhs1);
4147 return true;
4148 }
4149 if (elt_i != NULL_TREE
4150 && (TREE_CODE (elt_t) == VECTOR_TYPE
4151 || TREE_CODE (elt_i) != INTEGER_CST
4152 || compare_tree_int (elt_i, i) != 0))
4153 {
4154 error ("vector CONSTRUCTOR with non-NULL element index");
4155 debug_generic_stmt (rhs1);
4156 return true;
4157 }
4158 }
4159 }
4160 return res;
4161 case OBJ_TYPE_REF:
4162 case ASSERT_EXPR:
4163 case WITH_SIZE_EXPR:
4164 /* FIXME. */
4165 return res;
4166
4167 default:;
4168 }
4169
4170 return res;
4171 }
4172
4173 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4174 is a problem, otherwise false. */
4175
4176 static bool
4177 verify_gimple_assign (gimple stmt)
4178 {
4179 switch (gimple_assign_rhs_class (stmt))
4180 {
4181 case GIMPLE_SINGLE_RHS:
4182 return verify_gimple_assign_single (stmt);
4183
4184 case GIMPLE_UNARY_RHS:
4185 return verify_gimple_assign_unary (stmt);
4186
4187 case GIMPLE_BINARY_RHS:
4188 return verify_gimple_assign_binary (stmt);
4189
4190 case GIMPLE_TERNARY_RHS:
4191 return verify_gimple_assign_ternary (stmt);
4192
4193 default:
4194 gcc_unreachable ();
4195 }
4196 }
4197
4198 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4199 is a problem, otherwise false. */
4200
4201 static bool
4202 verify_gimple_return (gimple stmt)
4203 {
4204 tree op = gimple_return_retval (stmt);
4205 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4206
4207 /* We cannot test for present return values as we do not fix up missing
4208 return values from the original source. */
4209 if (op == NULL)
4210 return false;
4211
4212 if (!is_gimple_val (op)
4213 && TREE_CODE (op) != RESULT_DECL)
4214 {
4215 error ("invalid operand in return statement");
4216 debug_generic_stmt (op);
4217 return true;
4218 }
4219
4220 if ((TREE_CODE (op) == RESULT_DECL
4221 && DECL_BY_REFERENCE (op))
4222 || (TREE_CODE (op) == SSA_NAME
4223 && SSA_NAME_VAR (op)
4224 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4225 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4226 op = TREE_TYPE (op);
4227
4228 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4229 {
4230 error ("invalid conversion in return statement");
4231 debug_generic_stmt (restype);
4232 debug_generic_stmt (TREE_TYPE (op));
4233 return true;
4234 }
4235
4236 return false;
4237 }
4238
4239
4240 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4241 is a problem, otherwise false. */
4242
4243 static bool
4244 verify_gimple_goto (gimple stmt)
4245 {
4246 tree dest = gimple_goto_dest (stmt);
4247
4248 /* ??? We have two canonical forms of direct goto destinations, a
4249 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4250 if (TREE_CODE (dest) != LABEL_DECL
4251 && (!is_gimple_val (dest)
4252 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4253 {
4254 error ("goto destination is neither a label nor a pointer");
4255 return true;
4256 }
4257
4258 return false;
4259 }
4260
4261 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4262 is a problem, otherwise false. */
4263
4264 static bool
4265 verify_gimple_switch (gimple stmt)
4266 {
4267 unsigned int i, n;
4268 tree elt, prev_upper_bound = NULL_TREE;
4269 tree index_type, elt_type = NULL_TREE;
4270
4271 if (!is_gimple_val (gimple_switch_index (stmt)))
4272 {
4273 error ("invalid operand to switch statement");
4274 debug_generic_stmt (gimple_switch_index (stmt));
4275 return true;
4276 }
4277
4278 index_type = TREE_TYPE (gimple_switch_index (stmt));
4279 if (! INTEGRAL_TYPE_P (index_type))
4280 {
4281 error ("non-integral type switch statement");
4282 debug_generic_expr (index_type);
4283 return true;
4284 }
4285
4286 elt = gimple_switch_label (stmt, 0);
4287 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4288 {
4289 error ("invalid default case label in switch statement");
4290 debug_generic_expr (elt);
4291 return true;
4292 }
4293
4294 n = gimple_switch_num_labels (stmt);
4295 for (i = 1; i < n; i++)
4296 {
4297 elt = gimple_switch_label (stmt, i);
4298
4299 if (! CASE_LOW (elt))
4300 {
4301 error ("invalid case label in switch statement");
4302 debug_generic_expr (elt);
4303 return true;
4304 }
4305 if (CASE_HIGH (elt)
4306 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4307 {
4308 error ("invalid case range in switch statement");
4309 debug_generic_expr (elt);
4310 return true;
4311 }
4312
4313 if (elt_type)
4314 {
4315 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4316 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4317 {
4318 error ("type mismatch for case label in switch statement");
4319 debug_generic_expr (elt);
4320 return true;
4321 }
4322 }
4323 else
4324 {
4325 elt_type = TREE_TYPE (CASE_LOW (elt));
4326 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4327 {
4328 error ("type precision mismatch in switch statement");
4329 return true;
4330 }
4331 }
4332
4333 if (prev_upper_bound)
4334 {
4335 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4336 {
4337 error ("case labels not sorted in switch statement");
4338 return true;
4339 }
4340 }
4341
4342 prev_upper_bound = CASE_HIGH (elt);
4343 if (! prev_upper_bound)
4344 prev_upper_bound = CASE_LOW (elt);
4345 }
4346
4347 return false;
4348 }
4349
4350 /* Verify a gimple debug statement STMT.
4351 Returns true if anything is wrong. */
4352
4353 static bool
4354 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4355 {
4356 /* There isn't much that could be wrong in a gimple debug stmt. A
4357 gimple debug bind stmt, for example, maps a tree, that's usually
4358 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4359 component or member of an aggregate type, to another tree, that
4360 can be an arbitrary expression. These stmts expand into debug
4361 insns, and are converted to debug notes by var-tracking.c. */
4362 return false;
4363 }
4364
4365 /* Verify a gimple label statement STMT.
4366 Returns true if anything is wrong. */
4367
4368 static bool
4369 verify_gimple_label (gimple stmt)
4370 {
4371 tree decl = gimple_label_label (stmt);
4372 int uid;
4373 bool err = false;
4374
4375 if (TREE_CODE (decl) != LABEL_DECL)
4376 return true;
4377 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4378 && DECL_CONTEXT (decl) != current_function_decl)
4379 {
4380 error ("label's context is not the current function decl");
4381 err |= true;
4382 }
4383
4384 uid = LABEL_DECL_UID (decl);
4385 if (cfun->cfg
4386 && (uid == -1
4387 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4388 {
4389 error ("incorrect entry in label_to_block_map");
4390 err |= true;
4391 }
4392
4393 uid = EH_LANDING_PAD_NR (decl);
4394 if (uid)
4395 {
4396 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4397 if (decl != lp->post_landing_pad)
4398 {
4399 error ("incorrect setting of landing pad number");
4400 err |= true;
4401 }
4402 }
4403
4404 return err;
4405 }
4406
4407 /* Verify the GIMPLE statement STMT. Returns true if there is an
4408 error, otherwise false. */
4409
4410 static bool
4411 verify_gimple_stmt (gimple stmt)
4412 {
4413 switch (gimple_code (stmt))
4414 {
4415 case GIMPLE_ASSIGN:
4416 return verify_gimple_assign (stmt);
4417
4418 case GIMPLE_LABEL:
4419 return verify_gimple_label (stmt);
4420
4421 case GIMPLE_CALL:
4422 return verify_gimple_call (stmt);
4423
4424 case GIMPLE_COND:
4425 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4426 {
4427 error ("invalid comparison code in gimple cond");
4428 return true;
4429 }
4430 if (!(!gimple_cond_true_label (stmt)
4431 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4432 || !(!gimple_cond_false_label (stmt)
4433 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4434 {
4435 error ("invalid labels in gimple cond");
4436 return true;
4437 }
4438
4439 return verify_gimple_comparison (boolean_type_node,
4440 gimple_cond_lhs (stmt),
4441 gimple_cond_rhs (stmt));
4442
4443 case GIMPLE_GOTO:
4444 return verify_gimple_goto (stmt);
4445
4446 case GIMPLE_SWITCH:
4447 return verify_gimple_switch (stmt);
4448
4449 case GIMPLE_RETURN:
4450 return verify_gimple_return (stmt);
4451
4452 case GIMPLE_ASM:
4453 return false;
4454
4455 case GIMPLE_TRANSACTION:
4456 return verify_gimple_transaction (stmt);
4457
4458 /* Tuples that do not have tree operands. */
4459 case GIMPLE_NOP:
4460 case GIMPLE_PREDICT:
4461 case GIMPLE_RESX:
4462 case GIMPLE_EH_DISPATCH:
4463 case GIMPLE_EH_MUST_NOT_THROW:
4464 return false;
4465
4466 CASE_GIMPLE_OMP:
4467 /* OpenMP directives are validated by the FE and never operated
4468 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4469 non-gimple expressions when the main index variable has had
4470 its address taken. This does not affect the loop itself
4471 because the header of an GIMPLE_OMP_FOR is merely used to determine
4472 how to setup the parallel iteration. */
4473 return false;
4474
4475 case GIMPLE_DEBUG:
4476 return verify_gimple_debug (stmt);
4477
4478 default:
4479 gcc_unreachable ();
4480 }
4481 }
4482
4483 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4484 and false otherwise. */
4485
4486 static bool
4487 verify_gimple_phi (gimple phi)
4488 {
4489 bool err = false;
4490 unsigned i;
4491 tree phi_result = gimple_phi_result (phi);
4492 bool virtual_p;
4493
4494 if (!phi_result)
4495 {
4496 error ("invalid PHI result");
4497 return true;
4498 }
4499
4500 virtual_p = virtual_operand_p (phi_result);
4501 if (TREE_CODE (phi_result) != SSA_NAME
4502 || (virtual_p
4503 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4504 {
4505 error ("invalid PHI result");
4506 err = true;
4507 }
4508
4509 for (i = 0; i < gimple_phi_num_args (phi); i++)
4510 {
4511 tree t = gimple_phi_arg_def (phi, i);
4512
4513 if (!t)
4514 {
4515 error ("missing PHI def");
4516 err |= true;
4517 continue;
4518 }
4519 /* Addressable variables do have SSA_NAMEs but they
4520 are not considered gimple values. */
4521 else if ((TREE_CODE (t) == SSA_NAME
4522 && virtual_p != virtual_operand_p (t))
4523 || (virtual_p
4524 && (TREE_CODE (t) != SSA_NAME
4525 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4526 || (!virtual_p
4527 && !is_gimple_val (t)))
4528 {
4529 error ("invalid PHI argument");
4530 debug_generic_expr (t);
4531 err |= true;
4532 }
4533 #ifdef ENABLE_TYPES_CHECKING
4534 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4535 {
4536 error ("incompatible types in PHI argument %u", i);
4537 debug_generic_stmt (TREE_TYPE (phi_result));
4538 debug_generic_stmt (TREE_TYPE (t));
4539 err |= true;
4540 }
4541 #endif
4542 }
4543
4544 return err;
4545 }
4546
4547 /* Verify the GIMPLE statements inside the sequence STMTS. */
4548
4549 static bool
4550 verify_gimple_in_seq_2 (gimple_seq stmts)
4551 {
4552 gimple_stmt_iterator ittr;
4553 bool err = false;
4554
4555 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4556 {
4557 gimple stmt = gsi_stmt (ittr);
4558
4559 switch (gimple_code (stmt))
4560 {
4561 case GIMPLE_BIND:
4562 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4563 break;
4564
4565 case GIMPLE_TRY:
4566 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4567 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4568 break;
4569
4570 case GIMPLE_EH_FILTER:
4571 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4572 break;
4573
4574 case GIMPLE_EH_ELSE:
4575 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4576 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4577 break;
4578
4579 case GIMPLE_CATCH:
4580 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4581 break;
4582
4583 case GIMPLE_TRANSACTION:
4584 err |= verify_gimple_transaction (stmt);
4585 break;
4586
4587 default:
4588 {
4589 bool err2 = verify_gimple_stmt (stmt);
4590 if (err2)
4591 debug_gimple_stmt (stmt);
4592 err |= err2;
4593 }
4594 }
4595 }
4596
4597 return err;
4598 }
4599
4600 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4601 is a problem, otherwise false. */
4602
4603 static bool
4604 verify_gimple_transaction (gimple stmt)
4605 {
4606 tree lab = gimple_transaction_label (stmt);
4607 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4608 return true;
4609 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4610 }
4611
4612
4613 /* Verify the GIMPLE statements inside the statement list STMTS. */
4614
4615 DEBUG_FUNCTION void
4616 verify_gimple_in_seq (gimple_seq stmts)
4617 {
4618 timevar_push (TV_TREE_STMT_VERIFY);
4619 if (verify_gimple_in_seq_2 (stmts))
4620 internal_error ("verify_gimple failed");
4621 timevar_pop (TV_TREE_STMT_VERIFY);
4622 }
4623
4624 /* Return true when the T can be shared. */
4625
4626 static bool
4627 tree_node_can_be_shared (tree t)
4628 {
4629 if (IS_TYPE_OR_DECL_P (t)
4630 || is_gimple_min_invariant (t)
4631 || TREE_CODE (t) == SSA_NAME
4632 || t == error_mark_node
4633 || TREE_CODE (t) == IDENTIFIER_NODE)
4634 return true;
4635
4636 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4637 return true;
4638
4639 if (DECL_P (t))
4640 return true;
4641
4642 return false;
4643 }
4644
4645 /* Called via walk_tree. Verify tree sharing. */
4646
4647 static tree
4648 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4649 {
4650 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4651
4652 if (tree_node_can_be_shared (*tp))
4653 {
4654 *walk_subtrees = false;
4655 return NULL;
4656 }
4657
4658 if (pointer_set_insert (visited, *tp))
4659 return *tp;
4660
4661 return NULL;
4662 }
4663
4664 /* Called via walk_gimple_stmt. Verify tree sharing. */
4665
4666 static tree
4667 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4668 {
4669 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4670 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4671 }
4672
4673 static bool eh_error_found;
4674 static int
4675 verify_eh_throw_stmt_node (void **slot, void *data)
4676 {
4677 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4678 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4679
4680 if (!pointer_set_contains (visited, node->stmt))
4681 {
4682 error ("dead STMT in EH table");
4683 debug_gimple_stmt (node->stmt);
4684 eh_error_found = true;
4685 }
4686 return 1;
4687 }
4688
4689 /* Verify if the location LOCs block is in BLOCKS. */
4690
4691 static bool
4692 verify_location (pointer_set_t *blocks, location_t loc)
4693 {
4694 tree block = LOCATION_BLOCK (loc);
4695 if (block != NULL_TREE
4696 && !pointer_set_contains (blocks, block))
4697 {
4698 error ("location references block not in block tree");
4699 return true;
4700 }
4701 if (block != NULL_TREE)
4702 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4703 return false;
4704 }
4705
4706 /* Called via walk_tree. Verify that expressions have no blocks. */
4707
4708 static tree
4709 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4710 {
4711 if (!EXPR_P (*tp))
4712 {
4713 *walk_subtrees = false;
4714 return NULL;
4715 }
4716
4717 location_t loc = EXPR_LOCATION (*tp);
4718 if (LOCATION_BLOCK (loc) != NULL)
4719 return *tp;
4720
4721 return NULL;
4722 }
4723
4724 /* Called via walk_tree. Verify locations of expressions. */
4725
4726 static tree
4727 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4728 {
4729 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4730
4731 if (TREE_CODE (*tp) == VAR_DECL
4732 && DECL_HAS_DEBUG_EXPR_P (*tp))
4733 {
4734 tree t = DECL_DEBUG_EXPR (*tp);
4735 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4736 if (addr)
4737 return addr;
4738 }
4739 if ((TREE_CODE (*tp) == VAR_DECL
4740 || TREE_CODE (*tp) == PARM_DECL
4741 || TREE_CODE (*tp) == RESULT_DECL)
4742 && DECL_HAS_VALUE_EXPR_P (*tp))
4743 {
4744 tree t = DECL_VALUE_EXPR (*tp);
4745 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4746 if (addr)
4747 return addr;
4748 }
4749
4750 if (!EXPR_P (*tp))
4751 {
4752 *walk_subtrees = false;
4753 return NULL;
4754 }
4755
4756 location_t loc = EXPR_LOCATION (*tp);
4757 if (verify_location (blocks, loc))
4758 return *tp;
4759
4760 return NULL;
4761 }
4762
4763 /* Called via walk_gimple_op. Verify locations of expressions. */
4764
4765 static tree
4766 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4767 {
4768 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4769 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4770 }
4771
4772 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4773
4774 static void
4775 collect_subblocks (pointer_set_t *blocks, tree block)
4776 {
4777 tree t;
4778 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4779 {
4780 pointer_set_insert (blocks, t);
4781 collect_subblocks (blocks, t);
4782 }
4783 }
4784
4785 /* Verify the GIMPLE statements in the CFG of FN. */
4786
4787 DEBUG_FUNCTION void
4788 verify_gimple_in_cfg (struct function *fn)
4789 {
4790 basic_block bb;
4791 bool err = false;
4792 struct pointer_set_t *visited, *visited_stmts, *blocks;
4793
4794 timevar_push (TV_TREE_STMT_VERIFY);
4795 visited = pointer_set_create ();
4796 visited_stmts = pointer_set_create ();
4797
4798 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4799 blocks = pointer_set_create ();
4800 if (DECL_INITIAL (fn->decl))
4801 {
4802 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4803 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4804 }
4805
4806 FOR_EACH_BB_FN (bb, fn)
4807 {
4808 gimple_stmt_iterator gsi;
4809
4810 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4811 {
4812 gimple phi = gsi_stmt (gsi);
4813 bool err2 = false;
4814 unsigned i;
4815
4816 pointer_set_insert (visited_stmts, phi);
4817
4818 if (gimple_bb (phi) != bb)
4819 {
4820 error ("gimple_bb (phi) is set to a wrong basic block");
4821 err2 = true;
4822 }
4823
4824 err2 |= verify_gimple_phi (phi);
4825
4826 /* Only PHI arguments have locations. */
4827 if (gimple_location (phi) != UNKNOWN_LOCATION)
4828 {
4829 error ("PHI node with location");
4830 err2 = true;
4831 }
4832
4833 for (i = 0; i < gimple_phi_num_args (phi); i++)
4834 {
4835 tree arg = gimple_phi_arg_def (phi, i);
4836 tree addr = walk_tree (&arg, verify_node_sharing_1,
4837 visited, NULL);
4838 if (addr)
4839 {
4840 error ("incorrect sharing of tree nodes");
4841 debug_generic_expr (addr);
4842 err2 |= true;
4843 }
4844 location_t loc = gimple_phi_arg_location (phi, i);
4845 if (virtual_operand_p (gimple_phi_result (phi))
4846 && loc != UNKNOWN_LOCATION)
4847 {
4848 error ("virtual PHI with argument locations");
4849 err2 = true;
4850 }
4851 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4852 if (addr)
4853 {
4854 debug_generic_expr (addr);
4855 err2 = true;
4856 }
4857 err2 |= verify_location (blocks, loc);
4858 }
4859
4860 if (err2)
4861 debug_gimple_stmt (phi);
4862 err |= err2;
4863 }
4864
4865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4866 {
4867 gimple stmt = gsi_stmt (gsi);
4868 bool err2 = false;
4869 struct walk_stmt_info wi;
4870 tree addr;
4871 int lp_nr;
4872
4873 pointer_set_insert (visited_stmts, stmt);
4874
4875 if (gimple_bb (stmt) != bb)
4876 {
4877 error ("gimple_bb (stmt) is set to a wrong basic block");
4878 err2 = true;
4879 }
4880
4881 err2 |= verify_gimple_stmt (stmt);
4882 err2 |= verify_location (blocks, gimple_location (stmt));
4883
4884 memset (&wi, 0, sizeof (wi));
4885 wi.info = (void *) visited;
4886 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4887 if (addr)
4888 {
4889 error ("incorrect sharing of tree nodes");
4890 debug_generic_expr (addr);
4891 err2 |= true;
4892 }
4893
4894 memset (&wi, 0, sizeof (wi));
4895 wi.info = (void *) blocks;
4896 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4897 if (addr)
4898 {
4899 debug_generic_expr (addr);
4900 err2 |= true;
4901 }
4902
4903 /* ??? Instead of not checking these stmts at all the walker
4904 should know its context via wi. */
4905 if (!is_gimple_debug (stmt)
4906 && !is_gimple_omp (stmt))
4907 {
4908 memset (&wi, 0, sizeof (wi));
4909 addr = walk_gimple_op (stmt, verify_expr, &wi);
4910 if (addr)
4911 {
4912 debug_generic_expr (addr);
4913 inform (gimple_location (stmt), "in statement");
4914 err2 |= true;
4915 }
4916 }
4917
4918 /* If the statement is marked as part of an EH region, then it is
4919 expected that the statement could throw. Verify that when we
4920 have optimizations that simplify statements such that we prove
4921 that they cannot throw, that we update other data structures
4922 to match. */
4923 lp_nr = lookup_stmt_eh_lp (stmt);
4924 if (lp_nr != 0)
4925 {
4926 if (!stmt_could_throw_p (stmt))
4927 {
4928 error ("statement marked for throw, but doesn%'t");
4929 err2 |= true;
4930 }
4931 else if (lp_nr > 0
4932 && !gsi_one_before_end_p (gsi)
4933 && stmt_can_throw_internal (stmt))
4934 {
4935 error ("statement marked for throw in middle of block");
4936 err2 |= true;
4937 }
4938 }
4939
4940 if (err2)
4941 debug_gimple_stmt (stmt);
4942 err |= err2;
4943 }
4944 }
4945
4946 eh_error_found = false;
4947 if (get_eh_throw_stmt_table (cfun))
4948 htab_traverse (get_eh_throw_stmt_table (cfun),
4949 verify_eh_throw_stmt_node,
4950 visited_stmts);
4951
4952 if (err || eh_error_found)
4953 internal_error ("verify_gimple failed");
4954
4955 pointer_set_destroy (visited);
4956 pointer_set_destroy (visited_stmts);
4957 pointer_set_destroy (blocks);
4958 verify_histograms ();
4959 timevar_pop (TV_TREE_STMT_VERIFY);
4960 }
4961
4962
4963 /* Verifies that the flow information is OK. */
4964
4965 static int
4966 gimple_verify_flow_info (void)
4967 {
4968 int err = 0;
4969 basic_block bb;
4970 gimple_stmt_iterator gsi;
4971 gimple stmt;
4972 edge e;
4973 edge_iterator ei;
4974
4975 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4976 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4977 {
4978 error ("ENTRY_BLOCK has IL associated with it");
4979 err = 1;
4980 }
4981
4982 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4983 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4984 {
4985 error ("EXIT_BLOCK has IL associated with it");
4986 err = 1;
4987 }
4988
4989 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4990 if (e->flags & EDGE_FALLTHRU)
4991 {
4992 error ("fallthru to exit from bb %d", e->src->index);
4993 err = 1;
4994 }
4995
4996 FOR_EACH_BB_FN (bb, cfun)
4997 {
4998 bool found_ctrl_stmt = false;
4999
5000 stmt = NULL;
5001
5002 /* Skip labels on the start of basic block. */
5003 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5004 {
5005 tree label;
5006 gimple prev_stmt = stmt;
5007
5008 stmt = gsi_stmt (gsi);
5009
5010 if (gimple_code (stmt) != GIMPLE_LABEL)
5011 break;
5012
5013 label = gimple_label_label (stmt);
5014 if (prev_stmt && DECL_NONLOCAL (label))
5015 {
5016 error ("nonlocal label ");
5017 print_generic_expr (stderr, label, 0);
5018 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5019 bb->index);
5020 err = 1;
5021 }
5022
5023 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5024 {
5025 error ("EH landing pad label ");
5026 print_generic_expr (stderr, label, 0);
5027 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5028 bb->index);
5029 err = 1;
5030 }
5031
5032 if (label_to_block (label) != bb)
5033 {
5034 error ("label ");
5035 print_generic_expr (stderr, label, 0);
5036 fprintf (stderr, " to block does not match in bb %d",
5037 bb->index);
5038 err = 1;
5039 }
5040
5041 if (decl_function_context (label) != current_function_decl)
5042 {
5043 error ("label ");
5044 print_generic_expr (stderr, label, 0);
5045 fprintf (stderr, " has incorrect context in bb %d",
5046 bb->index);
5047 err = 1;
5048 }
5049 }
5050
5051 /* Verify that body of basic block BB is free of control flow. */
5052 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5053 {
5054 gimple stmt = gsi_stmt (gsi);
5055
5056 if (found_ctrl_stmt)
5057 {
5058 error ("control flow in the middle of basic block %d",
5059 bb->index);
5060 err = 1;
5061 }
5062
5063 if (stmt_ends_bb_p (stmt))
5064 found_ctrl_stmt = true;
5065
5066 if (gimple_code (stmt) == GIMPLE_LABEL)
5067 {
5068 error ("label ");
5069 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5070 fprintf (stderr, " in the middle of basic block %d", bb->index);
5071 err = 1;
5072 }
5073 }
5074
5075 gsi = gsi_last_bb (bb);
5076 if (gsi_end_p (gsi))
5077 continue;
5078
5079 stmt = gsi_stmt (gsi);
5080
5081 if (gimple_code (stmt) == GIMPLE_LABEL)
5082 continue;
5083
5084 err |= verify_eh_edges (stmt);
5085
5086 if (is_ctrl_stmt (stmt))
5087 {
5088 FOR_EACH_EDGE (e, ei, bb->succs)
5089 if (e->flags & EDGE_FALLTHRU)
5090 {
5091 error ("fallthru edge after a control statement in bb %d",
5092 bb->index);
5093 err = 1;
5094 }
5095 }
5096
5097 if (gimple_code (stmt) != GIMPLE_COND)
5098 {
5099 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5100 after anything else but if statement. */
5101 FOR_EACH_EDGE (e, ei, bb->succs)
5102 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5103 {
5104 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5105 bb->index);
5106 err = 1;
5107 }
5108 }
5109
5110 switch (gimple_code (stmt))
5111 {
5112 case GIMPLE_COND:
5113 {
5114 edge true_edge;
5115 edge false_edge;
5116
5117 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5118
5119 if (!true_edge
5120 || !false_edge
5121 || !(true_edge->flags & EDGE_TRUE_VALUE)
5122 || !(false_edge->flags & EDGE_FALSE_VALUE)
5123 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5124 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5125 || EDGE_COUNT (bb->succs) >= 3)
5126 {
5127 error ("wrong outgoing edge flags at end of bb %d",
5128 bb->index);
5129 err = 1;
5130 }
5131 }
5132 break;
5133
5134 case GIMPLE_GOTO:
5135 if (simple_goto_p (stmt))
5136 {
5137 error ("explicit goto at end of bb %d", bb->index);
5138 err = 1;
5139 }
5140 else
5141 {
5142 /* FIXME. We should double check that the labels in the
5143 destination blocks have their address taken. */
5144 FOR_EACH_EDGE (e, ei, bb->succs)
5145 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5146 | EDGE_FALSE_VALUE))
5147 || !(e->flags & EDGE_ABNORMAL))
5148 {
5149 error ("wrong outgoing edge flags at end of bb %d",
5150 bb->index);
5151 err = 1;
5152 }
5153 }
5154 break;
5155
5156 case GIMPLE_CALL:
5157 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5158 break;
5159 /* ... fallthru ... */
5160 case GIMPLE_RETURN:
5161 if (!single_succ_p (bb)
5162 || (single_succ_edge (bb)->flags
5163 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5164 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5165 {
5166 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5167 err = 1;
5168 }
5169 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5170 {
5171 error ("return edge does not point to exit in bb %d",
5172 bb->index);
5173 err = 1;
5174 }
5175 break;
5176
5177 case GIMPLE_SWITCH:
5178 {
5179 tree prev;
5180 edge e;
5181 size_t i, n;
5182
5183 n = gimple_switch_num_labels (stmt);
5184
5185 /* Mark all the destination basic blocks. */
5186 for (i = 0; i < n; ++i)
5187 {
5188 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5189 basic_block label_bb = label_to_block (lab);
5190 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5191 label_bb->aux = (void *)1;
5192 }
5193
5194 /* Verify that the case labels are sorted. */
5195 prev = gimple_switch_label (stmt, 0);
5196 for (i = 1; i < n; ++i)
5197 {
5198 tree c = gimple_switch_label (stmt, i);
5199 if (!CASE_LOW (c))
5200 {
5201 error ("found default case not at the start of "
5202 "case vector");
5203 err = 1;
5204 continue;
5205 }
5206 if (CASE_LOW (prev)
5207 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5208 {
5209 error ("case labels not sorted: ");
5210 print_generic_expr (stderr, prev, 0);
5211 fprintf (stderr," is greater than ");
5212 print_generic_expr (stderr, c, 0);
5213 fprintf (stderr," but comes before it.\n");
5214 err = 1;
5215 }
5216 prev = c;
5217 }
5218 /* VRP will remove the default case if it can prove it will
5219 never be executed. So do not verify there always exists
5220 a default case here. */
5221
5222 FOR_EACH_EDGE (e, ei, bb->succs)
5223 {
5224 if (!e->dest->aux)
5225 {
5226 error ("extra outgoing edge %d->%d",
5227 bb->index, e->dest->index);
5228 err = 1;
5229 }
5230
5231 e->dest->aux = (void *)2;
5232 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5233 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5234 {
5235 error ("wrong outgoing edge flags at end of bb %d",
5236 bb->index);
5237 err = 1;
5238 }
5239 }
5240
5241 /* Check that we have all of them. */
5242 for (i = 0; i < n; ++i)
5243 {
5244 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5245 basic_block label_bb = label_to_block (lab);
5246
5247 if (label_bb->aux != (void *)2)
5248 {
5249 error ("missing edge %i->%i", bb->index, label_bb->index);
5250 err = 1;
5251 }
5252 }
5253
5254 FOR_EACH_EDGE (e, ei, bb->succs)
5255 e->dest->aux = (void *)0;
5256 }
5257 break;
5258
5259 case GIMPLE_EH_DISPATCH:
5260 err |= verify_eh_dispatch_edge (stmt);
5261 break;
5262
5263 default:
5264 break;
5265 }
5266 }
5267
5268 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5269 verify_dominators (CDI_DOMINATORS);
5270
5271 return err;
5272 }
5273
5274
5275 /* Updates phi nodes after creating a forwarder block joined
5276 by edge FALLTHRU. */
5277
5278 static void
5279 gimple_make_forwarder_block (edge fallthru)
5280 {
5281 edge e;
5282 edge_iterator ei;
5283 basic_block dummy, bb;
5284 tree var;
5285 gimple_stmt_iterator gsi;
5286
5287 dummy = fallthru->src;
5288 bb = fallthru->dest;
5289
5290 if (single_pred_p (bb))
5291 return;
5292
5293 /* If we redirected a branch we must create new PHI nodes at the
5294 start of BB. */
5295 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5296 {
5297 gimple phi, new_phi;
5298
5299 phi = gsi_stmt (gsi);
5300 var = gimple_phi_result (phi);
5301 new_phi = create_phi_node (var, bb);
5302 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5303 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5304 UNKNOWN_LOCATION);
5305 }
5306
5307 /* Add the arguments we have stored on edges. */
5308 FOR_EACH_EDGE (e, ei, bb->preds)
5309 {
5310 if (e == fallthru)
5311 continue;
5312
5313 flush_pending_stmts (e);
5314 }
5315 }
5316
5317
5318 /* Return a non-special label in the head of basic block BLOCK.
5319 Create one if it doesn't exist. */
5320
5321 tree
5322 gimple_block_label (basic_block bb)
5323 {
5324 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5325 bool first = true;
5326 tree label;
5327 gimple stmt;
5328
5329 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5330 {
5331 stmt = gsi_stmt (i);
5332 if (gimple_code (stmt) != GIMPLE_LABEL)
5333 break;
5334 label = gimple_label_label (stmt);
5335 if (!DECL_NONLOCAL (label))
5336 {
5337 if (!first)
5338 gsi_move_before (&i, &s);
5339 return label;
5340 }
5341 }
5342
5343 label = create_artificial_label (UNKNOWN_LOCATION);
5344 stmt = gimple_build_label (label);
5345 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5346 return label;
5347 }
5348
5349
5350 /* Attempt to perform edge redirection by replacing a possibly complex
5351 jump instruction by a goto or by removing the jump completely.
5352 This can apply only if all edges now point to the same block. The
5353 parameters and return values are equivalent to
5354 redirect_edge_and_branch. */
5355
5356 static edge
5357 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5358 {
5359 basic_block src = e->src;
5360 gimple_stmt_iterator i;
5361 gimple stmt;
5362
5363 /* We can replace or remove a complex jump only when we have exactly
5364 two edges. */
5365 if (EDGE_COUNT (src->succs) != 2
5366 /* Verify that all targets will be TARGET. Specifically, the
5367 edge that is not E must also go to TARGET. */
5368 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5369 return NULL;
5370
5371 i = gsi_last_bb (src);
5372 if (gsi_end_p (i))
5373 return NULL;
5374
5375 stmt = gsi_stmt (i);
5376
5377 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5378 {
5379 gsi_remove (&i, true);
5380 e = ssa_redirect_edge (e, target);
5381 e->flags = EDGE_FALLTHRU;
5382 return e;
5383 }
5384
5385 return NULL;
5386 }
5387
5388
5389 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5390 edge representing the redirected branch. */
5391
5392 static edge
5393 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5394 {
5395 basic_block bb = e->src;
5396 gimple_stmt_iterator gsi;
5397 edge ret;
5398 gimple stmt;
5399
5400 if (e->flags & EDGE_ABNORMAL)
5401 return NULL;
5402
5403 if (e->dest == dest)
5404 return NULL;
5405
5406 if (e->flags & EDGE_EH)
5407 return redirect_eh_edge (e, dest);
5408
5409 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5410 {
5411 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5412 if (ret)
5413 return ret;
5414 }
5415
5416 gsi = gsi_last_bb (bb);
5417 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5418
5419 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5420 {
5421 case GIMPLE_COND:
5422 /* For COND_EXPR, we only need to redirect the edge. */
5423 break;
5424
5425 case GIMPLE_GOTO:
5426 /* No non-abnormal edges should lead from a non-simple goto, and
5427 simple ones should be represented implicitly. */
5428 gcc_unreachable ();
5429
5430 case GIMPLE_SWITCH:
5431 {
5432 tree label = gimple_block_label (dest);
5433 tree cases = get_cases_for_edge (e, stmt);
5434
5435 /* If we have a list of cases associated with E, then use it
5436 as it's a lot faster than walking the entire case vector. */
5437 if (cases)
5438 {
5439 edge e2 = find_edge (e->src, dest);
5440 tree last, first;
5441
5442 first = cases;
5443 while (cases)
5444 {
5445 last = cases;
5446 CASE_LABEL (cases) = label;
5447 cases = CASE_CHAIN (cases);
5448 }
5449
5450 /* If there was already an edge in the CFG, then we need
5451 to move all the cases associated with E to E2. */
5452 if (e2)
5453 {
5454 tree cases2 = get_cases_for_edge (e2, stmt);
5455
5456 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5457 CASE_CHAIN (cases2) = first;
5458 }
5459 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5460 }
5461 else
5462 {
5463 size_t i, n = gimple_switch_num_labels (stmt);
5464
5465 for (i = 0; i < n; i++)
5466 {
5467 tree elt = gimple_switch_label (stmt, i);
5468 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5469 CASE_LABEL (elt) = label;
5470 }
5471 }
5472 }
5473 break;
5474
5475 case GIMPLE_ASM:
5476 {
5477 int i, n = gimple_asm_nlabels (stmt);
5478 tree label = NULL;
5479
5480 for (i = 0; i < n; ++i)
5481 {
5482 tree cons = gimple_asm_label_op (stmt, i);
5483 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5484 {
5485 if (!label)
5486 label = gimple_block_label (dest);
5487 TREE_VALUE (cons) = label;
5488 }
5489 }
5490
5491 /* If we didn't find any label matching the former edge in the
5492 asm labels, we must be redirecting the fallthrough
5493 edge. */
5494 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5495 }
5496 break;
5497
5498 case GIMPLE_RETURN:
5499 gsi_remove (&gsi, true);
5500 e->flags |= EDGE_FALLTHRU;
5501 break;
5502
5503 case GIMPLE_OMP_RETURN:
5504 case GIMPLE_OMP_CONTINUE:
5505 case GIMPLE_OMP_SECTIONS_SWITCH:
5506 case GIMPLE_OMP_FOR:
5507 /* The edges from OMP constructs can be simply redirected. */
5508 break;
5509
5510 case GIMPLE_EH_DISPATCH:
5511 if (!(e->flags & EDGE_FALLTHRU))
5512 redirect_eh_dispatch_edge (stmt, e, dest);
5513 break;
5514
5515 case GIMPLE_TRANSACTION:
5516 /* The ABORT edge has a stored label associated with it, otherwise
5517 the edges are simply redirectable. */
5518 if (e->flags == 0)
5519 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5520 break;
5521
5522 default:
5523 /* Otherwise it must be a fallthru edge, and we don't need to
5524 do anything besides redirecting it. */
5525 gcc_assert (e->flags & EDGE_FALLTHRU);
5526 break;
5527 }
5528
5529 /* Update/insert PHI nodes as necessary. */
5530
5531 /* Now update the edges in the CFG. */
5532 e = ssa_redirect_edge (e, dest);
5533
5534 return e;
5535 }
5536
5537 /* Returns true if it is possible to remove edge E by redirecting
5538 it to the destination of the other edge from E->src. */
5539
5540 static bool
5541 gimple_can_remove_branch_p (const_edge e)
5542 {
5543 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5544 return false;
5545
5546 return true;
5547 }
5548
5549 /* Simple wrapper, as we can always redirect fallthru edges. */
5550
5551 static basic_block
5552 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5553 {
5554 e = gimple_redirect_edge_and_branch (e, dest);
5555 gcc_assert (e);
5556
5557 return NULL;
5558 }
5559
5560
5561 /* Splits basic block BB after statement STMT (but at least after the
5562 labels). If STMT is NULL, BB is split just after the labels. */
5563
5564 static basic_block
5565 gimple_split_block (basic_block bb, void *stmt)
5566 {
5567 gimple_stmt_iterator gsi;
5568 gimple_stmt_iterator gsi_tgt;
5569 gimple act;
5570 gimple_seq list;
5571 basic_block new_bb;
5572 edge e;
5573 edge_iterator ei;
5574
5575 new_bb = create_empty_bb (bb);
5576
5577 /* Redirect the outgoing edges. */
5578 new_bb->succs = bb->succs;
5579 bb->succs = NULL;
5580 FOR_EACH_EDGE (e, ei, new_bb->succs)
5581 e->src = new_bb;
5582
5583 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5584 stmt = NULL;
5585
5586 /* Move everything from GSI to the new basic block. */
5587 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5588 {
5589 act = gsi_stmt (gsi);
5590 if (gimple_code (act) == GIMPLE_LABEL)
5591 continue;
5592
5593 if (!stmt)
5594 break;
5595
5596 if (stmt == act)
5597 {
5598 gsi_next (&gsi);
5599 break;
5600 }
5601 }
5602
5603 if (gsi_end_p (gsi))
5604 return new_bb;
5605
5606 /* Split the statement list - avoid re-creating new containers as this
5607 brings ugly quadratic memory consumption in the inliner.
5608 (We are still quadratic since we need to update stmt BB pointers,
5609 sadly.) */
5610 gsi_split_seq_before (&gsi, &list);
5611 set_bb_seq (new_bb, list);
5612 for (gsi_tgt = gsi_start (list);
5613 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5614 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5615
5616 return new_bb;
5617 }
5618
5619
5620 /* Moves basic block BB after block AFTER. */
5621
5622 static bool
5623 gimple_move_block_after (basic_block bb, basic_block after)
5624 {
5625 if (bb->prev_bb == after)
5626 return true;
5627
5628 unlink_block (bb);
5629 link_block (bb, after);
5630
5631 return true;
5632 }
5633
5634
5635 /* Return TRUE if block BB has no executable statements, otherwise return
5636 FALSE. */
5637
5638 static bool
5639 gimple_empty_block_p (basic_block bb)
5640 {
5641 /* BB must have no executable statements. */
5642 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5643 if (phi_nodes (bb))
5644 return false;
5645 if (gsi_end_p (gsi))
5646 return true;
5647 if (is_gimple_debug (gsi_stmt (gsi)))
5648 gsi_next_nondebug (&gsi);
5649 return gsi_end_p (gsi);
5650 }
5651
5652
5653 /* Split a basic block if it ends with a conditional branch and if the
5654 other part of the block is not empty. */
5655
5656 static basic_block
5657 gimple_split_block_before_cond_jump (basic_block bb)
5658 {
5659 gimple last, split_point;
5660 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5661 if (gsi_end_p (gsi))
5662 return NULL;
5663 last = gsi_stmt (gsi);
5664 if (gimple_code (last) != GIMPLE_COND
5665 && gimple_code (last) != GIMPLE_SWITCH)
5666 return NULL;
5667 gsi_prev_nondebug (&gsi);
5668 split_point = gsi_stmt (gsi);
5669 return split_block (bb, split_point)->dest;
5670 }
5671
5672
5673 /* Return true if basic_block can be duplicated. */
5674
5675 static bool
5676 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5677 {
5678 return true;
5679 }
5680
5681 /* Create a duplicate of the basic block BB. NOTE: This does not
5682 preserve SSA form. */
5683
5684 static basic_block
5685 gimple_duplicate_bb (basic_block bb)
5686 {
5687 basic_block new_bb;
5688 gimple_stmt_iterator gsi, gsi_tgt;
5689 gimple_seq phis = phi_nodes (bb);
5690 gimple phi, stmt, copy;
5691
5692 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5693
5694 /* Copy the PHI nodes. We ignore PHI node arguments here because
5695 the incoming edges have not been setup yet. */
5696 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5697 {
5698 phi = gsi_stmt (gsi);
5699 copy = create_phi_node (NULL_TREE, new_bb);
5700 create_new_def_for (gimple_phi_result (phi), copy,
5701 gimple_phi_result_ptr (copy));
5702 gimple_set_uid (copy, gimple_uid (phi));
5703 }
5704
5705 gsi_tgt = gsi_start_bb (new_bb);
5706 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5707 {
5708 def_operand_p def_p;
5709 ssa_op_iter op_iter;
5710 tree lhs;
5711
5712 stmt = gsi_stmt (gsi);
5713 if (gimple_code (stmt) == GIMPLE_LABEL)
5714 continue;
5715
5716 /* Don't duplicate label debug stmts. */
5717 if (gimple_debug_bind_p (stmt)
5718 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5719 == LABEL_DECL)
5720 continue;
5721
5722 /* Create a new copy of STMT and duplicate STMT's virtual
5723 operands. */
5724 copy = gimple_copy (stmt);
5725 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5726
5727 maybe_duplicate_eh_stmt (copy, stmt);
5728 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5729
5730 /* When copying around a stmt writing into a local non-user
5731 aggregate, make sure it won't share stack slot with other
5732 vars. */
5733 lhs = gimple_get_lhs (stmt);
5734 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5735 {
5736 tree base = get_base_address (lhs);
5737 if (base
5738 && (TREE_CODE (base) == VAR_DECL
5739 || TREE_CODE (base) == RESULT_DECL)
5740 && DECL_IGNORED_P (base)
5741 && !TREE_STATIC (base)
5742 && !DECL_EXTERNAL (base)
5743 && (TREE_CODE (base) != VAR_DECL
5744 || !DECL_HAS_VALUE_EXPR_P (base)))
5745 DECL_NONSHAREABLE (base) = 1;
5746 }
5747
5748 /* Create new names for all the definitions created by COPY and
5749 add replacement mappings for each new name. */
5750 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5751 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5752 }
5753
5754 return new_bb;
5755 }
5756
5757 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5758
5759 static void
5760 add_phi_args_after_copy_edge (edge e_copy)
5761 {
5762 basic_block bb, bb_copy = e_copy->src, dest;
5763 edge e;
5764 edge_iterator ei;
5765 gimple phi, phi_copy;
5766 tree def;
5767 gimple_stmt_iterator psi, psi_copy;
5768
5769 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5770 return;
5771
5772 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5773
5774 if (e_copy->dest->flags & BB_DUPLICATED)
5775 dest = get_bb_original (e_copy->dest);
5776 else
5777 dest = e_copy->dest;
5778
5779 e = find_edge (bb, dest);
5780 if (!e)
5781 {
5782 /* During loop unrolling the target of the latch edge is copied.
5783 In this case we are not looking for edge to dest, but to
5784 duplicated block whose original was dest. */
5785 FOR_EACH_EDGE (e, ei, bb->succs)
5786 {
5787 if ((e->dest->flags & BB_DUPLICATED)
5788 && get_bb_original (e->dest) == dest)
5789 break;
5790 }
5791
5792 gcc_assert (e != NULL);
5793 }
5794
5795 for (psi = gsi_start_phis (e->dest),
5796 psi_copy = gsi_start_phis (e_copy->dest);
5797 !gsi_end_p (psi);
5798 gsi_next (&psi), gsi_next (&psi_copy))
5799 {
5800 phi = gsi_stmt (psi);
5801 phi_copy = gsi_stmt (psi_copy);
5802 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5803 add_phi_arg (phi_copy, def, e_copy,
5804 gimple_phi_arg_location_from_edge (phi, e));
5805 }
5806 }
5807
5808
5809 /* Basic block BB_COPY was created by code duplication. Add phi node
5810 arguments for edges going out of BB_COPY. The blocks that were
5811 duplicated have BB_DUPLICATED set. */
5812
5813 void
5814 add_phi_args_after_copy_bb (basic_block bb_copy)
5815 {
5816 edge e_copy;
5817 edge_iterator ei;
5818
5819 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5820 {
5821 add_phi_args_after_copy_edge (e_copy);
5822 }
5823 }
5824
5825 /* Blocks in REGION_COPY array of length N_REGION were created by
5826 duplication of basic blocks. Add phi node arguments for edges
5827 going from these blocks. If E_COPY is not NULL, also add
5828 phi node arguments for its destination.*/
5829
5830 void
5831 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5832 edge e_copy)
5833 {
5834 unsigned i;
5835
5836 for (i = 0; i < n_region; i++)
5837 region_copy[i]->flags |= BB_DUPLICATED;
5838
5839 for (i = 0; i < n_region; i++)
5840 add_phi_args_after_copy_bb (region_copy[i]);
5841 if (e_copy)
5842 add_phi_args_after_copy_edge (e_copy);
5843
5844 for (i = 0; i < n_region; i++)
5845 region_copy[i]->flags &= ~BB_DUPLICATED;
5846 }
5847
5848 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5849 important exit edge EXIT. By important we mean that no SSA name defined
5850 inside region is live over the other exit edges of the region. All entry
5851 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5852 to the duplicate of the region. Dominance and loop information is
5853 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5854 UPDATE_DOMINANCE is false then we assume that the caller will update the
5855 dominance information after calling this function. The new basic
5856 blocks are stored to REGION_COPY in the same order as they had in REGION,
5857 provided that REGION_COPY is not NULL.
5858 The function returns false if it is unable to copy the region,
5859 true otherwise. */
5860
5861 bool
5862 gimple_duplicate_sese_region (edge entry, edge exit,
5863 basic_block *region, unsigned n_region,
5864 basic_block *region_copy,
5865 bool update_dominance)
5866 {
5867 unsigned i;
5868 bool free_region_copy = false, copying_header = false;
5869 struct loop *loop = entry->dest->loop_father;
5870 edge exit_copy;
5871 vec<basic_block> doms;
5872 edge redirected;
5873 int total_freq = 0, entry_freq = 0;
5874 gcov_type total_count = 0, entry_count = 0;
5875
5876 if (!can_copy_bbs_p (region, n_region))
5877 return false;
5878
5879 /* Some sanity checking. Note that we do not check for all possible
5880 missuses of the functions. I.e. if you ask to copy something weird,
5881 it will work, but the state of structures probably will not be
5882 correct. */
5883 for (i = 0; i < n_region; i++)
5884 {
5885 /* We do not handle subloops, i.e. all the blocks must belong to the
5886 same loop. */
5887 if (region[i]->loop_father != loop)
5888 return false;
5889
5890 if (region[i] != entry->dest
5891 && region[i] == loop->header)
5892 return false;
5893 }
5894
5895 /* In case the function is used for loop header copying (which is the primary
5896 use), ensure that EXIT and its copy will be new latch and entry edges. */
5897 if (loop->header == entry->dest)
5898 {
5899 copying_header = true;
5900
5901 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5902 return false;
5903
5904 for (i = 0; i < n_region; i++)
5905 if (region[i] != exit->src
5906 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5907 return false;
5908 }
5909
5910 initialize_original_copy_tables ();
5911
5912 if (copying_header)
5913 set_loop_copy (loop, loop_outer (loop));
5914 else
5915 set_loop_copy (loop, loop);
5916
5917 if (!region_copy)
5918 {
5919 region_copy = XNEWVEC (basic_block, n_region);
5920 free_region_copy = true;
5921 }
5922
5923 /* Record blocks outside the region that are dominated by something
5924 inside. */
5925 if (update_dominance)
5926 {
5927 doms.create (0);
5928 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5929 }
5930
5931 if (entry->dest->count)
5932 {
5933 total_count = entry->dest->count;
5934 entry_count = entry->count;
5935 /* Fix up corner cases, to avoid division by zero or creation of negative
5936 frequencies. */
5937 if (entry_count > total_count)
5938 entry_count = total_count;
5939 }
5940 else
5941 {
5942 total_freq = entry->dest->frequency;
5943 entry_freq = EDGE_FREQUENCY (entry);
5944 /* Fix up corner cases, to avoid division by zero or creation of negative
5945 frequencies. */
5946 if (total_freq == 0)
5947 total_freq = 1;
5948 else if (entry_freq > total_freq)
5949 entry_freq = total_freq;
5950 }
5951
5952 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5953 split_edge_bb_loc (entry), update_dominance);
5954 if (total_count)
5955 {
5956 scale_bbs_frequencies_gcov_type (region, n_region,
5957 total_count - entry_count,
5958 total_count);
5959 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5960 total_count);
5961 }
5962 else
5963 {
5964 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5965 total_freq);
5966 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5967 }
5968
5969 if (copying_header)
5970 {
5971 loop->header = exit->dest;
5972 loop->latch = exit->src;
5973 }
5974
5975 /* Redirect the entry and add the phi node arguments. */
5976 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5977 gcc_assert (redirected != NULL);
5978 flush_pending_stmts (entry);
5979
5980 /* Concerning updating of dominators: We must recount dominators
5981 for entry block and its copy. Anything that is outside of the
5982 region, but was dominated by something inside needs recounting as
5983 well. */
5984 if (update_dominance)
5985 {
5986 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5987 doms.safe_push (get_bb_original (entry->dest));
5988 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5989 doms.release ();
5990 }
5991
5992 /* Add the other PHI node arguments. */
5993 add_phi_args_after_copy (region_copy, n_region, NULL);
5994
5995 if (free_region_copy)
5996 free (region_copy);
5997
5998 free_original_copy_tables ();
5999 return true;
6000 }
6001
6002 /* Checks if BB is part of the region defined by N_REGION BBS. */
6003 static bool
6004 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6005 {
6006 unsigned int n;
6007
6008 for (n = 0; n < n_region; n++)
6009 {
6010 if (bb == bbs[n])
6011 return true;
6012 }
6013 return false;
6014 }
6015
6016 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6017 are stored to REGION_COPY in the same order in that they appear
6018 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6019 the region, EXIT an exit from it. The condition guarding EXIT
6020 is moved to ENTRY. Returns true if duplication succeeds, false
6021 otherwise.
6022
6023 For example,
6024
6025 some_code;
6026 if (cond)
6027 A;
6028 else
6029 B;
6030
6031 is transformed to
6032
6033 if (cond)
6034 {
6035 some_code;
6036 A;
6037 }
6038 else
6039 {
6040 some_code;
6041 B;
6042 }
6043 */
6044
6045 bool
6046 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6047 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6048 basic_block *region_copy ATTRIBUTE_UNUSED)
6049 {
6050 unsigned i;
6051 bool free_region_copy = false;
6052 struct loop *loop = exit->dest->loop_father;
6053 struct loop *orig_loop = entry->dest->loop_father;
6054 basic_block switch_bb, entry_bb, nentry_bb;
6055 vec<basic_block> doms;
6056 int total_freq = 0, exit_freq = 0;
6057 gcov_type total_count = 0, exit_count = 0;
6058 edge exits[2], nexits[2], e;
6059 gimple_stmt_iterator gsi;
6060 gimple cond_stmt;
6061 edge sorig, snew;
6062 basic_block exit_bb;
6063 gimple_stmt_iterator psi;
6064 gimple phi;
6065 tree def;
6066 struct loop *target, *aloop, *cloop;
6067
6068 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6069 exits[0] = exit;
6070 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6071
6072 if (!can_copy_bbs_p (region, n_region))
6073 return false;
6074
6075 initialize_original_copy_tables ();
6076 set_loop_copy (orig_loop, loop);
6077
6078 target= loop;
6079 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6080 {
6081 if (bb_part_of_region_p (aloop->header, region, n_region))
6082 {
6083 cloop = duplicate_loop (aloop, target);
6084 duplicate_subloops (aloop, cloop);
6085 }
6086 }
6087
6088 if (!region_copy)
6089 {
6090 region_copy = XNEWVEC (basic_block, n_region);
6091 free_region_copy = true;
6092 }
6093
6094 gcc_assert (!need_ssa_update_p (cfun));
6095
6096 /* Record blocks outside the region that are dominated by something
6097 inside. */
6098 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6099
6100 if (exit->src->count)
6101 {
6102 total_count = exit->src->count;
6103 exit_count = exit->count;
6104 /* Fix up corner cases, to avoid division by zero or creation of negative
6105 frequencies. */
6106 if (exit_count > total_count)
6107 exit_count = total_count;
6108 }
6109 else
6110 {
6111 total_freq = exit->src->frequency;
6112 exit_freq = EDGE_FREQUENCY (exit);
6113 /* Fix up corner cases, to avoid division by zero or creation of negative
6114 frequencies. */
6115 if (total_freq == 0)
6116 total_freq = 1;
6117 if (exit_freq > total_freq)
6118 exit_freq = total_freq;
6119 }
6120
6121 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6122 split_edge_bb_loc (exit), true);
6123 if (total_count)
6124 {
6125 scale_bbs_frequencies_gcov_type (region, n_region,
6126 total_count - exit_count,
6127 total_count);
6128 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6129 total_count);
6130 }
6131 else
6132 {
6133 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6134 total_freq);
6135 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6136 }
6137
6138 /* Create the switch block, and put the exit condition to it. */
6139 entry_bb = entry->dest;
6140 nentry_bb = get_bb_copy (entry_bb);
6141 if (!last_stmt (entry->src)
6142 || !stmt_ends_bb_p (last_stmt (entry->src)))
6143 switch_bb = entry->src;
6144 else
6145 switch_bb = split_edge (entry);
6146 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6147
6148 gsi = gsi_last_bb (switch_bb);
6149 cond_stmt = last_stmt (exit->src);
6150 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6151 cond_stmt = gimple_copy (cond_stmt);
6152
6153 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6154
6155 sorig = single_succ_edge (switch_bb);
6156 sorig->flags = exits[1]->flags;
6157 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6158
6159 /* Register the new edge from SWITCH_BB in loop exit lists. */
6160 rescan_loop_exit (snew, true, false);
6161
6162 /* Add the PHI node arguments. */
6163 add_phi_args_after_copy (region_copy, n_region, snew);
6164
6165 /* Get rid of now superfluous conditions and associated edges (and phi node
6166 arguments). */
6167 exit_bb = exit->dest;
6168
6169 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6170 PENDING_STMT (e) = NULL;
6171
6172 /* The latch of ORIG_LOOP was copied, and so was the backedge
6173 to the original header. We redirect this backedge to EXIT_BB. */
6174 for (i = 0; i < n_region; i++)
6175 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6176 {
6177 gcc_assert (single_succ_edge (region_copy[i]));
6178 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6179 PENDING_STMT (e) = NULL;
6180 for (psi = gsi_start_phis (exit_bb);
6181 !gsi_end_p (psi);
6182 gsi_next (&psi))
6183 {
6184 phi = gsi_stmt (psi);
6185 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6186 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6187 }
6188 }
6189 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6190 PENDING_STMT (e) = NULL;
6191
6192 /* Anything that is outside of the region, but was dominated by something
6193 inside needs to update dominance info. */
6194 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6195 doms.release ();
6196 /* Update the SSA web. */
6197 update_ssa (TODO_update_ssa);
6198
6199 if (free_region_copy)
6200 free (region_copy);
6201
6202 free_original_copy_tables ();
6203 return true;
6204 }
6205
6206 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6207 adding blocks when the dominator traversal reaches EXIT. This
6208 function silently assumes that ENTRY strictly dominates EXIT. */
6209
6210 void
6211 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6212 vec<basic_block> *bbs_p)
6213 {
6214 basic_block son;
6215
6216 for (son = first_dom_son (CDI_DOMINATORS, entry);
6217 son;
6218 son = next_dom_son (CDI_DOMINATORS, son))
6219 {
6220 bbs_p->safe_push (son);
6221 if (son != exit)
6222 gather_blocks_in_sese_region (son, exit, bbs_p);
6223 }
6224 }
6225
6226 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6227 The duplicates are recorded in VARS_MAP. */
6228
6229 static void
6230 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6231 tree to_context)
6232 {
6233 tree t = *tp, new_t;
6234 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6235 void **loc;
6236
6237 if (DECL_CONTEXT (t) == to_context)
6238 return;
6239
6240 loc = pointer_map_contains (vars_map, t);
6241
6242 if (!loc)
6243 {
6244 loc = pointer_map_insert (vars_map, t);
6245
6246 if (SSA_VAR_P (t))
6247 {
6248 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6249 add_local_decl (f, new_t);
6250 }
6251 else
6252 {
6253 gcc_assert (TREE_CODE (t) == CONST_DECL);
6254 new_t = copy_node (t);
6255 }
6256 DECL_CONTEXT (new_t) = to_context;
6257
6258 *loc = new_t;
6259 }
6260 else
6261 new_t = (tree) *loc;
6262
6263 *tp = new_t;
6264 }
6265
6266
6267 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6268 VARS_MAP maps old ssa names and var_decls to the new ones. */
6269
6270 static tree
6271 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6272 tree to_context)
6273 {
6274 void **loc;
6275 tree new_name;
6276
6277 gcc_assert (!virtual_operand_p (name));
6278
6279 loc = pointer_map_contains (vars_map, name);
6280
6281 if (!loc)
6282 {
6283 tree decl = SSA_NAME_VAR (name);
6284 if (decl)
6285 {
6286 replace_by_duplicate_decl (&decl, vars_map, to_context);
6287 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6288 decl, SSA_NAME_DEF_STMT (name));
6289 if (SSA_NAME_IS_DEFAULT_DEF (name))
6290 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6291 decl, new_name);
6292 }
6293 else
6294 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6295 name, SSA_NAME_DEF_STMT (name));
6296
6297 loc = pointer_map_insert (vars_map, name);
6298 *loc = new_name;
6299 }
6300 else
6301 new_name = (tree) *loc;
6302
6303 return new_name;
6304 }
6305
6306 struct move_stmt_d
6307 {
6308 tree orig_block;
6309 tree new_block;
6310 tree from_context;
6311 tree to_context;
6312 struct pointer_map_t *vars_map;
6313 htab_t new_label_map;
6314 struct pointer_map_t *eh_map;
6315 bool remap_decls_p;
6316 };
6317
6318 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6319 contained in *TP if it has been ORIG_BLOCK previously and change the
6320 DECL_CONTEXT of every local variable referenced in *TP. */
6321
6322 static tree
6323 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6324 {
6325 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6326 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6327 tree t = *tp;
6328
6329 if (EXPR_P (t))
6330 {
6331 tree block = TREE_BLOCK (t);
6332 if (block == p->orig_block
6333 || (p->orig_block == NULL_TREE
6334 && block != NULL_TREE))
6335 TREE_SET_BLOCK (t, p->new_block);
6336 #ifdef ENABLE_CHECKING
6337 else if (block != NULL_TREE)
6338 {
6339 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6340 block = BLOCK_SUPERCONTEXT (block);
6341 gcc_assert (block == p->orig_block);
6342 }
6343 #endif
6344 }
6345 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6346 {
6347 if (TREE_CODE (t) == SSA_NAME)
6348 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6349 else if (TREE_CODE (t) == LABEL_DECL)
6350 {
6351 if (p->new_label_map)
6352 {
6353 struct tree_map in, *out;
6354 in.base.from = t;
6355 out = (struct tree_map *)
6356 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6357 if (out)
6358 *tp = t = out->to;
6359 }
6360
6361 DECL_CONTEXT (t) = p->to_context;
6362 }
6363 else if (p->remap_decls_p)
6364 {
6365 /* Replace T with its duplicate. T should no longer appear in the
6366 parent function, so this looks wasteful; however, it may appear
6367 in referenced_vars, and more importantly, as virtual operands of
6368 statements, and in alias lists of other variables. It would be
6369 quite difficult to expunge it from all those places. ??? It might
6370 suffice to do this for addressable variables. */
6371 if ((TREE_CODE (t) == VAR_DECL
6372 && !is_global_var (t))
6373 || TREE_CODE (t) == CONST_DECL)
6374 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6375 }
6376 *walk_subtrees = 0;
6377 }
6378 else if (TYPE_P (t))
6379 *walk_subtrees = 0;
6380
6381 return NULL_TREE;
6382 }
6383
6384 /* Helper for move_stmt_r. Given an EH region number for the source
6385 function, map that to the duplicate EH regio number in the dest. */
6386
6387 static int
6388 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6389 {
6390 eh_region old_r, new_r;
6391 void **slot;
6392
6393 old_r = get_eh_region_from_number (old_nr);
6394 slot = pointer_map_contains (p->eh_map, old_r);
6395 new_r = (eh_region) *slot;
6396
6397 return new_r->index;
6398 }
6399
6400 /* Similar, but operate on INTEGER_CSTs. */
6401
6402 static tree
6403 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6404 {
6405 int old_nr, new_nr;
6406
6407 old_nr = tree_to_shwi (old_t_nr);
6408 new_nr = move_stmt_eh_region_nr (old_nr, p);
6409
6410 return build_int_cst (integer_type_node, new_nr);
6411 }
6412
6413 /* Like move_stmt_op, but for gimple statements.
6414
6415 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6416 contained in the current statement in *GSI_P and change the
6417 DECL_CONTEXT of every local variable referenced in the current
6418 statement. */
6419
6420 static tree
6421 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6422 struct walk_stmt_info *wi)
6423 {
6424 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6425 gimple stmt = gsi_stmt (*gsi_p);
6426 tree block = gimple_block (stmt);
6427
6428 if (block == p->orig_block
6429 || (p->orig_block == NULL_TREE
6430 && block != NULL_TREE))
6431 gimple_set_block (stmt, p->new_block);
6432
6433 switch (gimple_code (stmt))
6434 {
6435 case GIMPLE_CALL:
6436 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6437 {
6438 tree r, fndecl = gimple_call_fndecl (stmt);
6439 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6440 switch (DECL_FUNCTION_CODE (fndecl))
6441 {
6442 case BUILT_IN_EH_COPY_VALUES:
6443 r = gimple_call_arg (stmt, 1);
6444 r = move_stmt_eh_region_tree_nr (r, p);
6445 gimple_call_set_arg (stmt, 1, r);
6446 /* FALLTHRU */
6447
6448 case BUILT_IN_EH_POINTER:
6449 case BUILT_IN_EH_FILTER:
6450 r = gimple_call_arg (stmt, 0);
6451 r = move_stmt_eh_region_tree_nr (r, p);
6452 gimple_call_set_arg (stmt, 0, r);
6453 break;
6454
6455 default:
6456 break;
6457 }
6458 }
6459 break;
6460
6461 case GIMPLE_RESX:
6462 {
6463 int r = gimple_resx_region (stmt);
6464 r = move_stmt_eh_region_nr (r, p);
6465 gimple_resx_set_region (stmt, r);
6466 }
6467 break;
6468
6469 case GIMPLE_EH_DISPATCH:
6470 {
6471 int r = gimple_eh_dispatch_region (stmt);
6472 r = move_stmt_eh_region_nr (r, p);
6473 gimple_eh_dispatch_set_region (stmt, r);
6474 }
6475 break;
6476
6477 case GIMPLE_OMP_RETURN:
6478 case GIMPLE_OMP_CONTINUE:
6479 break;
6480 default:
6481 if (is_gimple_omp (stmt))
6482 {
6483 /* Do not remap variables inside OMP directives. Variables
6484 referenced in clauses and directive header belong to the
6485 parent function and should not be moved into the child
6486 function. */
6487 bool save_remap_decls_p = p->remap_decls_p;
6488 p->remap_decls_p = false;
6489 *handled_ops_p = true;
6490
6491 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6492 move_stmt_op, wi);
6493
6494 p->remap_decls_p = save_remap_decls_p;
6495 }
6496 break;
6497 }
6498
6499 return NULL_TREE;
6500 }
6501
6502 /* Move basic block BB from function CFUN to function DEST_FN. The
6503 block is moved out of the original linked list and placed after
6504 block AFTER in the new list. Also, the block is removed from the
6505 original array of blocks and placed in DEST_FN's array of blocks.
6506 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6507 updated to reflect the moved edges.
6508
6509 The local variables are remapped to new instances, VARS_MAP is used
6510 to record the mapping. */
6511
6512 static void
6513 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6514 basic_block after, bool update_edge_count_p,
6515 struct move_stmt_d *d)
6516 {
6517 struct control_flow_graph *cfg;
6518 edge_iterator ei;
6519 edge e;
6520 gimple_stmt_iterator si;
6521 unsigned old_len, new_len;
6522
6523 /* Remove BB from dominance structures. */
6524 delete_from_dominance_info (CDI_DOMINATORS, bb);
6525
6526 /* Move BB from its current loop to the copy in the new function. */
6527 if (current_loops)
6528 {
6529 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6530 if (new_loop)
6531 bb->loop_father = new_loop;
6532 }
6533
6534 /* Link BB to the new linked list. */
6535 move_block_after (bb, after);
6536
6537 /* Update the edge count in the corresponding flowgraphs. */
6538 if (update_edge_count_p)
6539 FOR_EACH_EDGE (e, ei, bb->succs)
6540 {
6541 cfun->cfg->x_n_edges--;
6542 dest_cfun->cfg->x_n_edges++;
6543 }
6544
6545 /* Remove BB from the original basic block array. */
6546 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6547 cfun->cfg->x_n_basic_blocks--;
6548
6549 /* Grow DEST_CFUN's basic block array if needed. */
6550 cfg = dest_cfun->cfg;
6551 cfg->x_n_basic_blocks++;
6552 if (bb->index >= cfg->x_last_basic_block)
6553 cfg->x_last_basic_block = bb->index + 1;
6554
6555 old_len = vec_safe_length (cfg->x_basic_block_info);
6556 if ((unsigned) cfg->x_last_basic_block >= old_len)
6557 {
6558 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6559 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6560 }
6561
6562 (*cfg->x_basic_block_info)[bb->index] = bb;
6563
6564 /* Remap the variables in phi nodes. */
6565 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6566 {
6567 gimple phi = gsi_stmt (si);
6568 use_operand_p use;
6569 tree op = PHI_RESULT (phi);
6570 ssa_op_iter oi;
6571 unsigned i;
6572
6573 if (virtual_operand_p (op))
6574 {
6575 /* Remove the phi nodes for virtual operands (alias analysis will be
6576 run for the new function, anyway). */
6577 remove_phi_node (&si, true);
6578 continue;
6579 }
6580
6581 SET_PHI_RESULT (phi,
6582 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6583 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6584 {
6585 op = USE_FROM_PTR (use);
6586 if (TREE_CODE (op) == SSA_NAME)
6587 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6588 }
6589
6590 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6591 {
6592 location_t locus = gimple_phi_arg_location (phi, i);
6593 tree block = LOCATION_BLOCK (locus);
6594
6595 if (locus == UNKNOWN_LOCATION)
6596 continue;
6597 if (d->orig_block == NULL_TREE || block == d->orig_block)
6598 {
6599 if (d->new_block == NULL_TREE)
6600 locus = LOCATION_LOCUS (locus);
6601 else
6602 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6603 gimple_phi_arg_set_location (phi, i, locus);
6604 }
6605 }
6606
6607 gsi_next (&si);
6608 }
6609
6610 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6611 {
6612 gimple stmt = gsi_stmt (si);
6613 struct walk_stmt_info wi;
6614
6615 memset (&wi, 0, sizeof (wi));
6616 wi.info = d;
6617 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6618
6619 if (gimple_code (stmt) == GIMPLE_LABEL)
6620 {
6621 tree label = gimple_label_label (stmt);
6622 int uid = LABEL_DECL_UID (label);
6623
6624 gcc_assert (uid > -1);
6625
6626 old_len = vec_safe_length (cfg->x_label_to_block_map);
6627 if (old_len <= (unsigned) uid)
6628 {
6629 new_len = 3 * uid / 2 + 1;
6630 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6631 }
6632
6633 (*cfg->x_label_to_block_map)[uid] = bb;
6634 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6635
6636 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6637
6638 if (uid >= dest_cfun->cfg->last_label_uid)
6639 dest_cfun->cfg->last_label_uid = uid + 1;
6640 }
6641
6642 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6643 remove_stmt_from_eh_lp_fn (cfun, stmt);
6644
6645 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6646 gimple_remove_stmt_histograms (cfun, stmt);
6647
6648 /* We cannot leave any operands allocated from the operand caches of
6649 the current function. */
6650 free_stmt_operands (cfun, stmt);
6651 push_cfun (dest_cfun);
6652 update_stmt (stmt);
6653 pop_cfun ();
6654 }
6655
6656 FOR_EACH_EDGE (e, ei, bb->succs)
6657 if (e->goto_locus != UNKNOWN_LOCATION)
6658 {
6659 tree block = LOCATION_BLOCK (e->goto_locus);
6660 if (d->orig_block == NULL_TREE
6661 || block == d->orig_block)
6662 e->goto_locus = d->new_block ?
6663 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6664 LOCATION_LOCUS (e->goto_locus);
6665 }
6666 }
6667
6668 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6669 the outermost EH region. Use REGION as the incoming base EH region. */
6670
6671 static eh_region
6672 find_outermost_region_in_block (struct function *src_cfun,
6673 basic_block bb, eh_region region)
6674 {
6675 gimple_stmt_iterator si;
6676
6677 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6678 {
6679 gimple stmt = gsi_stmt (si);
6680 eh_region stmt_region;
6681 int lp_nr;
6682
6683 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6684 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6685 if (stmt_region)
6686 {
6687 if (region == NULL)
6688 region = stmt_region;
6689 else if (stmt_region != region)
6690 {
6691 region = eh_region_outermost (src_cfun, stmt_region, region);
6692 gcc_assert (region != NULL);
6693 }
6694 }
6695 }
6696
6697 return region;
6698 }
6699
6700 static tree
6701 new_label_mapper (tree decl, void *data)
6702 {
6703 htab_t hash = (htab_t) data;
6704 struct tree_map *m;
6705 void **slot;
6706
6707 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6708
6709 m = XNEW (struct tree_map);
6710 m->hash = DECL_UID (decl);
6711 m->base.from = decl;
6712 m->to = create_artificial_label (UNKNOWN_LOCATION);
6713 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6714 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6715 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6716
6717 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6718 gcc_assert (*slot == NULL);
6719
6720 *slot = m;
6721
6722 return m->to;
6723 }
6724
6725 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6726 subblocks. */
6727
6728 static void
6729 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6730 tree to_context)
6731 {
6732 tree *tp, t;
6733
6734 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6735 {
6736 t = *tp;
6737 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6738 continue;
6739 replace_by_duplicate_decl (&t, vars_map, to_context);
6740 if (t != *tp)
6741 {
6742 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6743 {
6744 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6745 DECL_HAS_VALUE_EXPR_P (t) = 1;
6746 }
6747 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6748 *tp = t;
6749 }
6750 }
6751
6752 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6753 replace_block_vars_by_duplicates (block, vars_map, to_context);
6754 }
6755
6756 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6757 from FN1 to FN2. */
6758
6759 static void
6760 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6761 struct loop *loop)
6762 {
6763 /* Discard it from the old loop array. */
6764 (*get_loops (fn1))[loop->num] = NULL;
6765
6766 /* Place it in the new loop array, assigning it a new number. */
6767 loop->num = number_of_loops (fn2);
6768 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6769
6770 /* Recurse to children. */
6771 for (loop = loop->inner; loop; loop = loop->next)
6772 fixup_loop_arrays_after_move (fn1, fn2, loop);
6773 }
6774
6775 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6776 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6777 single basic block in the original CFG and the new basic block is
6778 returned. DEST_CFUN must not have a CFG yet.
6779
6780 Note that the region need not be a pure SESE region. Blocks inside
6781 the region may contain calls to abort/exit. The only restriction
6782 is that ENTRY_BB should be the only entry point and it must
6783 dominate EXIT_BB.
6784
6785 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6786 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6787 to the new function.
6788
6789 All local variables referenced in the region are assumed to be in
6790 the corresponding BLOCK_VARS and unexpanded variable lists
6791 associated with DEST_CFUN. */
6792
6793 basic_block
6794 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6795 basic_block exit_bb, tree orig_block)
6796 {
6797 vec<basic_block> bbs, dom_bbs;
6798 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6799 basic_block after, bb, *entry_pred, *exit_succ, abb;
6800 struct function *saved_cfun = cfun;
6801 int *entry_flag, *exit_flag;
6802 unsigned *entry_prob, *exit_prob;
6803 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6804 edge e;
6805 edge_iterator ei;
6806 htab_t new_label_map;
6807 struct pointer_map_t *vars_map, *eh_map;
6808 struct loop *loop = entry_bb->loop_father;
6809 struct loop *loop0 = get_loop (saved_cfun, 0);
6810 struct move_stmt_d d;
6811
6812 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6813 region. */
6814 gcc_assert (entry_bb != exit_bb
6815 && (!exit_bb
6816 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6817
6818 /* Collect all the blocks in the region. Manually add ENTRY_BB
6819 because it won't be added by dfs_enumerate_from. */
6820 bbs.create (0);
6821 bbs.safe_push (entry_bb);
6822 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6823
6824 /* The blocks that used to be dominated by something in BBS will now be
6825 dominated by the new block. */
6826 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6827 bbs.address (),
6828 bbs.length ());
6829
6830 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6831 the predecessor edges to ENTRY_BB and the successor edges to
6832 EXIT_BB so that we can re-attach them to the new basic block that
6833 will replace the region. */
6834 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6835 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6836 entry_flag = XNEWVEC (int, num_entry_edges);
6837 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6838 i = 0;
6839 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6840 {
6841 entry_prob[i] = e->probability;
6842 entry_flag[i] = e->flags;
6843 entry_pred[i++] = e->src;
6844 remove_edge (e);
6845 }
6846
6847 if (exit_bb)
6848 {
6849 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6850 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6851 exit_flag = XNEWVEC (int, num_exit_edges);
6852 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6853 i = 0;
6854 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6855 {
6856 exit_prob[i] = e->probability;
6857 exit_flag[i] = e->flags;
6858 exit_succ[i++] = e->dest;
6859 remove_edge (e);
6860 }
6861 }
6862 else
6863 {
6864 num_exit_edges = 0;
6865 exit_succ = NULL;
6866 exit_flag = NULL;
6867 exit_prob = NULL;
6868 }
6869
6870 /* Switch context to the child function to initialize DEST_FN's CFG. */
6871 gcc_assert (dest_cfun->cfg == NULL);
6872 push_cfun (dest_cfun);
6873
6874 init_empty_tree_cfg ();
6875
6876 /* Initialize EH information for the new function. */
6877 eh_map = NULL;
6878 new_label_map = NULL;
6879 if (saved_cfun->eh)
6880 {
6881 eh_region region = NULL;
6882
6883 FOR_EACH_VEC_ELT (bbs, i, bb)
6884 region = find_outermost_region_in_block (saved_cfun, bb, region);
6885
6886 init_eh_for_function ();
6887 if (region != NULL)
6888 {
6889 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6890 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6891 new_label_mapper, new_label_map);
6892 }
6893 }
6894
6895 /* Initialize an empty loop tree. */
6896 struct loops *loops = ggc_alloc_cleared_loops ();
6897 init_loops_structure (dest_cfun, loops, 1);
6898 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6899 set_loops_for_fn (dest_cfun, loops);
6900
6901 /* Move the outlined loop tree part. */
6902 num_nodes = bbs.length ();
6903 FOR_EACH_VEC_ELT (bbs, i, bb)
6904 {
6905 if (bb->loop_father->header == bb)
6906 {
6907 struct loop *this_loop = bb->loop_father;
6908 struct loop *outer = loop_outer (this_loop);
6909 if (outer == loop
6910 /* If the SESE region contains some bbs ending with
6911 a noreturn call, those are considered to belong
6912 to the outermost loop in saved_cfun, rather than
6913 the entry_bb's loop_father. */
6914 || outer == loop0)
6915 {
6916 if (outer != loop)
6917 num_nodes -= this_loop->num_nodes;
6918 flow_loop_tree_node_remove (bb->loop_father);
6919 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6920 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6921 }
6922 }
6923 else if (bb->loop_father == loop0 && loop0 != loop)
6924 num_nodes--;
6925
6926 /* Remove loop exits from the outlined region. */
6927 if (loops_for_fn (saved_cfun)->exits)
6928 FOR_EACH_EDGE (e, ei, bb->succs)
6929 {
6930 void **slot = htab_find_slot_with_hash
6931 (loops_for_fn (saved_cfun)->exits, e,
6932 htab_hash_pointer (e), NO_INSERT);
6933 if (slot)
6934 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6935 }
6936 }
6937
6938
6939 /* Adjust the number of blocks in the tree root of the outlined part. */
6940 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6941
6942 /* Setup a mapping to be used by move_block_to_fn. */
6943 loop->aux = current_loops->tree_root;
6944 loop0->aux = current_loops->tree_root;
6945
6946 pop_cfun ();
6947
6948 /* Move blocks from BBS into DEST_CFUN. */
6949 gcc_assert (bbs.length () >= 2);
6950 after = dest_cfun->cfg->x_entry_block_ptr;
6951 vars_map = pointer_map_create ();
6952
6953 memset (&d, 0, sizeof (d));
6954 d.orig_block = orig_block;
6955 d.new_block = DECL_INITIAL (dest_cfun->decl);
6956 d.from_context = cfun->decl;
6957 d.to_context = dest_cfun->decl;
6958 d.vars_map = vars_map;
6959 d.new_label_map = new_label_map;
6960 d.eh_map = eh_map;
6961 d.remap_decls_p = true;
6962
6963 FOR_EACH_VEC_ELT (bbs, i, bb)
6964 {
6965 /* No need to update edge counts on the last block. It has
6966 already been updated earlier when we detached the region from
6967 the original CFG. */
6968 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6969 after = bb;
6970 }
6971
6972 loop->aux = NULL;
6973 loop0->aux = NULL;
6974 /* Loop sizes are no longer correct, fix them up. */
6975 loop->num_nodes -= num_nodes;
6976 for (struct loop *outer = loop_outer (loop);
6977 outer; outer = loop_outer (outer))
6978 outer->num_nodes -= num_nodes;
6979 loop0->num_nodes -= bbs.length () - num_nodes;
6980
6981 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
6982 {
6983 struct loop *aloop;
6984 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6985 if (aloop != NULL)
6986 {
6987 if (aloop->simduid)
6988 {
6989 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6990 d.to_context);
6991 dest_cfun->has_simduid_loops = true;
6992 }
6993 if (aloop->force_vectorize)
6994 dest_cfun->has_force_vectorize_loops = true;
6995 }
6996 }
6997
6998 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6999 if (orig_block)
7000 {
7001 tree block;
7002 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7003 == NULL_TREE);
7004 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7005 = BLOCK_SUBBLOCKS (orig_block);
7006 for (block = BLOCK_SUBBLOCKS (orig_block);
7007 block; block = BLOCK_CHAIN (block))
7008 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7009 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7010 }
7011
7012 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7013 vars_map, dest_cfun->decl);
7014
7015 if (new_label_map)
7016 htab_delete (new_label_map);
7017 if (eh_map)
7018 pointer_map_destroy (eh_map);
7019 pointer_map_destroy (vars_map);
7020
7021 /* Rewire the entry and exit blocks. The successor to the entry
7022 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7023 the child function. Similarly, the predecessor of DEST_FN's
7024 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7025 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7026 various CFG manipulation function get to the right CFG.
7027
7028 FIXME, this is silly. The CFG ought to become a parameter to
7029 these helpers. */
7030 push_cfun (dest_cfun);
7031 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7032 if (exit_bb)
7033 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7034 pop_cfun ();
7035
7036 /* Back in the original function, the SESE region has disappeared,
7037 create a new basic block in its place. */
7038 bb = create_empty_bb (entry_pred[0]);
7039 if (current_loops)
7040 add_bb_to_loop (bb, loop);
7041 for (i = 0; i < num_entry_edges; i++)
7042 {
7043 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7044 e->probability = entry_prob[i];
7045 }
7046
7047 for (i = 0; i < num_exit_edges; i++)
7048 {
7049 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7050 e->probability = exit_prob[i];
7051 }
7052
7053 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7054 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7055 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7056 dom_bbs.release ();
7057
7058 if (exit_bb)
7059 {
7060 free (exit_prob);
7061 free (exit_flag);
7062 free (exit_succ);
7063 }
7064 free (entry_prob);
7065 free (entry_flag);
7066 free (entry_pred);
7067 bbs.release ();
7068
7069 return bb;
7070 }
7071
7072
7073 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7074 */
7075
7076 void
7077 dump_function_to_file (tree fndecl, FILE *file, int flags)
7078 {
7079 tree arg, var, old_current_fndecl = current_function_decl;
7080 struct function *dsf;
7081 bool ignore_topmost_bind = false, any_var = false;
7082 basic_block bb;
7083 tree chain;
7084 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7085 && decl_is_tm_clone (fndecl));
7086 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7087
7088 current_function_decl = fndecl;
7089 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7090
7091 arg = DECL_ARGUMENTS (fndecl);
7092 while (arg)
7093 {
7094 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7095 fprintf (file, " ");
7096 print_generic_expr (file, arg, dump_flags);
7097 if (flags & TDF_VERBOSE)
7098 print_node (file, "", arg, 4);
7099 if (DECL_CHAIN (arg))
7100 fprintf (file, ", ");
7101 arg = DECL_CHAIN (arg);
7102 }
7103 fprintf (file, ")\n");
7104
7105 if (flags & TDF_VERBOSE)
7106 print_node (file, "", fndecl, 2);
7107
7108 dsf = DECL_STRUCT_FUNCTION (fndecl);
7109 if (dsf && (flags & TDF_EH))
7110 dump_eh_tree (file, dsf);
7111
7112 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7113 {
7114 dump_node (fndecl, TDF_SLIM | flags, file);
7115 current_function_decl = old_current_fndecl;
7116 return;
7117 }
7118
7119 /* When GIMPLE is lowered, the variables are no longer available in
7120 BIND_EXPRs, so display them separately. */
7121 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7122 {
7123 unsigned ix;
7124 ignore_topmost_bind = true;
7125
7126 fprintf (file, "{\n");
7127 if (!vec_safe_is_empty (fun->local_decls))
7128 FOR_EACH_LOCAL_DECL (fun, ix, var)
7129 {
7130 print_generic_decl (file, var, flags);
7131 if (flags & TDF_VERBOSE)
7132 print_node (file, "", var, 4);
7133 fprintf (file, "\n");
7134
7135 any_var = true;
7136 }
7137 if (gimple_in_ssa_p (cfun))
7138 for (ix = 1; ix < num_ssa_names; ++ix)
7139 {
7140 tree name = ssa_name (ix);
7141 if (name && !SSA_NAME_VAR (name))
7142 {
7143 fprintf (file, " ");
7144 print_generic_expr (file, TREE_TYPE (name), flags);
7145 fprintf (file, " ");
7146 print_generic_expr (file, name, flags);
7147 fprintf (file, ";\n");
7148
7149 any_var = true;
7150 }
7151 }
7152 }
7153
7154 if (fun && fun->decl == fndecl
7155 && fun->cfg
7156 && basic_block_info_for_fn (fun))
7157 {
7158 /* If the CFG has been built, emit a CFG-based dump. */
7159 if (!ignore_topmost_bind)
7160 fprintf (file, "{\n");
7161
7162 if (any_var && n_basic_blocks_for_fn (fun))
7163 fprintf (file, "\n");
7164
7165 FOR_EACH_BB_FN (bb, fun)
7166 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7167
7168 fprintf (file, "}\n");
7169 }
7170 else if (DECL_SAVED_TREE (fndecl) == NULL)
7171 {
7172 /* The function is now in GIMPLE form but the CFG has not been
7173 built yet. Emit the single sequence of GIMPLE statements
7174 that make up its body. */
7175 gimple_seq body = gimple_body (fndecl);
7176
7177 if (gimple_seq_first_stmt (body)
7178 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7179 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7180 print_gimple_seq (file, body, 0, flags);
7181 else
7182 {
7183 if (!ignore_topmost_bind)
7184 fprintf (file, "{\n");
7185
7186 if (any_var)
7187 fprintf (file, "\n");
7188
7189 print_gimple_seq (file, body, 2, flags);
7190 fprintf (file, "}\n");
7191 }
7192 }
7193 else
7194 {
7195 int indent;
7196
7197 /* Make a tree based dump. */
7198 chain = DECL_SAVED_TREE (fndecl);
7199 if (chain && TREE_CODE (chain) == BIND_EXPR)
7200 {
7201 if (ignore_topmost_bind)
7202 {
7203 chain = BIND_EXPR_BODY (chain);
7204 indent = 2;
7205 }
7206 else
7207 indent = 0;
7208 }
7209 else
7210 {
7211 if (!ignore_topmost_bind)
7212 fprintf (file, "{\n");
7213 indent = 2;
7214 }
7215
7216 if (any_var)
7217 fprintf (file, "\n");
7218
7219 print_generic_stmt_indented (file, chain, flags, indent);
7220 if (ignore_topmost_bind)
7221 fprintf (file, "}\n");
7222 }
7223
7224 if (flags & TDF_ENUMERATE_LOCALS)
7225 dump_enumerated_decls (file, flags);
7226 fprintf (file, "\n\n");
7227
7228 current_function_decl = old_current_fndecl;
7229 }
7230
7231 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7232
7233 DEBUG_FUNCTION void
7234 debug_function (tree fn, int flags)
7235 {
7236 dump_function_to_file (fn, stderr, flags);
7237 }
7238
7239
7240 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7241
7242 static void
7243 print_pred_bbs (FILE *file, basic_block bb)
7244 {
7245 edge e;
7246 edge_iterator ei;
7247
7248 FOR_EACH_EDGE (e, ei, bb->preds)
7249 fprintf (file, "bb_%d ", e->src->index);
7250 }
7251
7252
7253 /* Print on FILE the indexes for the successors of basic_block BB. */
7254
7255 static void
7256 print_succ_bbs (FILE *file, basic_block bb)
7257 {
7258 edge e;
7259 edge_iterator ei;
7260
7261 FOR_EACH_EDGE (e, ei, bb->succs)
7262 fprintf (file, "bb_%d ", e->dest->index);
7263 }
7264
7265 /* Print to FILE the basic block BB following the VERBOSITY level. */
7266
7267 void
7268 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7269 {
7270 char *s_indent = (char *) alloca ((size_t) indent + 1);
7271 memset ((void *) s_indent, ' ', (size_t) indent);
7272 s_indent[indent] = '\0';
7273
7274 /* Print basic_block's header. */
7275 if (verbosity >= 2)
7276 {
7277 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7278 print_pred_bbs (file, bb);
7279 fprintf (file, "}, succs = {");
7280 print_succ_bbs (file, bb);
7281 fprintf (file, "})\n");
7282 }
7283
7284 /* Print basic_block's body. */
7285 if (verbosity >= 3)
7286 {
7287 fprintf (file, "%s {\n", s_indent);
7288 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7289 fprintf (file, "%s }\n", s_indent);
7290 }
7291 }
7292
7293 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7294
7295 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7296 VERBOSITY level this outputs the contents of the loop, or just its
7297 structure. */
7298
7299 static void
7300 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7301 {
7302 char *s_indent;
7303 basic_block bb;
7304
7305 if (loop == NULL)
7306 return;
7307
7308 s_indent = (char *) alloca ((size_t) indent + 1);
7309 memset ((void *) s_indent, ' ', (size_t) indent);
7310 s_indent[indent] = '\0';
7311
7312 /* Print loop's header. */
7313 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7314 if (loop->header)
7315 fprintf (file, "header = %d", loop->header->index);
7316 else
7317 {
7318 fprintf (file, "deleted)\n");
7319 return;
7320 }
7321 if (loop->latch)
7322 fprintf (file, ", latch = %d", loop->latch->index);
7323 else
7324 fprintf (file, ", multiple latches");
7325 fprintf (file, ", niter = ");
7326 print_generic_expr (file, loop->nb_iterations, 0);
7327
7328 if (loop->any_upper_bound)
7329 {
7330 fprintf (file, ", upper_bound = ");
7331 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7332 }
7333
7334 if (loop->any_estimate)
7335 {
7336 fprintf (file, ", estimate = ");
7337 dump_double_int (file, loop->nb_iterations_estimate, true);
7338 }
7339 fprintf (file, ")\n");
7340
7341 /* Print loop's body. */
7342 if (verbosity >= 1)
7343 {
7344 fprintf (file, "%s{\n", s_indent);
7345 FOR_EACH_BB_FN (bb, cfun)
7346 if (bb->loop_father == loop)
7347 print_loops_bb (file, bb, indent, verbosity);
7348
7349 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7350 fprintf (file, "%s}\n", s_indent);
7351 }
7352 }
7353
7354 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7355 spaces. Following VERBOSITY level this outputs the contents of the
7356 loop, or just its structure. */
7357
7358 static void
7359 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7360 int verbosity)
7361 {
7362 if (loop == NULL)
7363 return;
7364
7365 print_loop (file, loop, indent, verbosity);
7366 print_loop_and_siblings (file, loop->next, indent, verbosity);
7367 }
7368
7369 /* Follow a CFG edge from the entry point of the program, and on entry
7370 of a loop, pretty print the loop structure on FILE. */
7371
7372 void
7373 print_loops (FILE *file, int verbosity)
7374 {
7375 basic_block bb;
7376
7377 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7378 if (bb && bb->loop_father)
7379 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7380 }
7381
7382 /* Dump a loop. */
7383
7384 DEBUG_FUNCTION void
7385 debug (struct loop &ref)
7386 {
7387 print_loop (stderr, &ref, 0, /*verbosity*/0);
7388 }
7389
7390 DEBUG_FUNCTION void
7391 debug (struct loop *ptr)
7392 {
7393 if (ptr)
7394 debug (*ptr);
7395 else
7396 fprintf (stderr, "<nil>\n");
7397 }
7398
7399 /* Dump a loop verbosely. */
7400
7401 DEBUG_FUNCTION void
7402 debug_verbose (struct loop &ref)
7403 {
7404 print_loop (stderr, &ref, 0, /*verbosity*/3);
7405 }
7406
7407 DEBUG_FUNCTION void
7408 debug_verbose (struct loop *ptr)
7409 {
7410 if (ptr)
7411 debug (*ptr);
7412 else
7413 fprintf (stderr, "<nil>\n");
7414 }
7415
7416
7417 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7418
7419 DEBUG_FUNCTION void
7420 debug_loops (int verbosity)
7421 {
7422 print_loops (stderr, verbosity);
7423 }
7424
7425 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7426
7427 DEBUG_FUNCTION void
7428 debug_loop (struct loop *loop, int verbosity)
7429 {
7430 print_loop (stderr, loop, 0, verbosity);
7431 }
7432
7433 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7434 level. */
7435
7436 DEBUG_FUNCTION void
7437 debug_loop_num (unsigned num, int verbosity)
7438 {
7439 debug_loop (get_loop (cfun, num), verbosity);
7440 }
7441
7442 /* Return true if BB ends with a call, possibly followed by some
7443 instructions that must stay with the call. Return false,
7444 otherwise. */
7445
7446 static bool
7447 gimple_block_ends_with_call_p (basic_block bb)
7448 {
7449 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7450 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7451 }
7452
7453
7454 /* Return true if BB ends with a conditional branch. Return false,
7455 otherwise. */
7456
7457 static bool
7458 gimple_block_ends_with_condjump_p (const_basic_block bb)
7459 {
7460 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7461 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7462 }
7463
7464
7465 /* Return true if we need to add fake edge to exit at statement T.
7466 Helper function for gimple_flow_call_edges_add. */
7467
7468 static bool
7469 need_fake_edge_p (gimple t)
7470 {
7471 tree fndecl = NULL_TREE;
7472 int call_flags = 0;
7473
7474 /* NORETURN and LONGJMP calls already have an edge to exit.
7475 CONST and PURE calls do not need one.
7476 We don't currently check for CONST and PURE here, although
7477 it would be a good idea, because those attributes are
7478 figured out from the RTL in mark_constant_function, and
7479 the counter incrementation code from -fprofile-arcs
7480 leads to different results from -fbranch-probabilities. */
7481 if (is_gimple_call (t))
7482 {
7483 fndecl = gimple_call_fndecl (t);
7484 call_flags = gimple_call_flags (t);
7485 }
7486
7487 if (is_gimple_call (t)
7488 && fndecl
7489 && DECL_BUILT_IN (fndecl)
7490 && (call_flags & ECF_NOTHROW)
7491 && !(call_flags & ECF_RETURNS_TWICE)
7492 /* fork() doesn't really return twice, but the effect of
7493 wrapping it in __gcov_fork() which calls __gcov_flush()
7494 and clears the counters before forking has the same
7495 effect as returning twice. Force a fake edge. */
7496 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7497 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7498 return false;
7499
7500 if (is_gimple_call (t))
7501 {
7502 edge_iterator ei;
7503 edge e;
7504 basic_block bb;
7505
7506 if (!(call_flags & ECF_NORETURN))
7507 return true;
7508
7509 bb = gimple_bb (t);
7510 FOR_EACH_EDGE (e, ei, bb->succs)
7511 if ((e->flags & EDGE_FAKE) == 0)
7512 return true;
7513 }
7514
7515 if (gimple_code (t) == GIMPLE_ASM
7516 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7517 return true;
7518
7519 return false;
7520 }
7521
7522
7523 /* Add fake edges to the function exit for any non constant and non
7524 noreturn calls (or noreturn calls with EH/abnormal edges),
7525 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7526 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7527 that were split.
7528
7529 The goal is to expose cases in which entering a basic block does
7530 not imply that all subsequent instructions must be executed. */
7531
7532 static int
7533 gimple_flow_call_edges_add (sbitmap blocks)
7534 {
7535 int i;
7536 int blocks_split = 0;
7537 int last_bb = last_basic_block_for_fn (cfun);
7538 bool check_last_block = false;
7539
7540 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7541 return 0;
7542
7543 if (! blocks)
7544 check_last_block = true;
7545 else
7546 check_last_block = bitmap_bit_p (blocks,
7547 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7548
7549 /* In the last basic block, before epilogue generation, there will be
7550 a fallthru edge to EXIT. Special care is required if the last insn
7551 of the last basic block is a call because make_edge folds duplicate
7552 edges, which would result in the fallthru edge also being marked
7553 fake, which would result in the fallthru edge being removed by
7554 remove_fake_edges, which would result in an invalid CFG.
7555
7556 Moreover, we can't elide the outgoing fake edge, since the block
7557 profiler needs to take this into account in order to solve the minimal
7558 spanning tree in the case that the call doesn't return.
7559
7560 Handle this by adding a dummy instruction in a new last basic block. */
7561 if (check_last_block)
7562 {
7563 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7564 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7565 gimple t = NULL;
7566
7567 if (!gsi_end_p (gsi))
7568 t = gsi_stmt (gsi);
7569
7570 if (t && need_fake_edge_p (t))
7571 {
7572 edge e;
7573
7574 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7575 if (e)
7576 {
7577 gsi_insert_on_edge (e, gimple_build_nop ());
7578 gsi_commit_edge_inserts ();
7579 }
7580 }
7581 }
7582
7583 /* Now add fake edges to the function exit for any non constant
7584 calls since there is no way that we can determine if they will
7585 return or not... */
7586 for (i = 0; i < last_bb; i++)
7587 {
7588 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7589 gimple_stmt_iterator gsi;
7590 gimple stmt, last_stmt;
7591
7592 if (!bb)
7593 continue;
7594
7595 if (blocks && !bitmap_bit_p (blocks, i))
7596 continue;
7597
7598 gsi = gsi_last_nondebug_bb (bb);
7599 if (!gsi_end_p (gsi))
7600 {
7601 last_stmt = gsi_stmt (gsi);
7602 do
7603 {
7604 stmt = gsi_stmt (gsi);
7605 if (need_fake_edge_p (stmt))
7606 {
7607 edge e;
7608
7609 /* The handling above of the final block before the
7610 epilogue should be enough to verify that there is
7611 no edge to the exit block in CFG already.
7612 Calling make_edge in such case would cause us to
7613 mark that edge as fake and remove it later. */
7614 #ifdef ENABLE_CHECKING
7615 if (stmt == last_stmt)
7616 {
7617 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7618 gcc_assert (e == NULL);
7619 }
7620 #endif
7621
7622 /* Note that the following may create a new basic block
7623 and renumber the existing basic blocks. */
7624 if (stmt != last_stmt)
7625 {
7626 e = split_block (bb, stmt);
7627 if (e)
7628 blocks_split++;
7629 }
7630 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7631 }
7632 gsi_prev (&gsi);
7633 }
7634 while (!gsi_end_p (gsi));
7635 }
7636 }
7637
7638 if (blocks_split)
7639 verify_flow_info ();
7640
7641 return blocks_split;
7642 }
7643
7644 /* Removes edge E and all the blocks dominated by it, and updates dominance
7645 information. The IL in E->src needs to be updated separately.
7646 If dominance info is not available, only the edge E is removed.*/
7647
7648 void
7649 remove_edge_and_dominated_blocks (edge e)
7650 {
7651 vec<basic_block> bbs_to_remove = vNULL;
7652 vec<basic_block> bbs_to_fix_dom = vNULL;
7653 bitmap df, df_idom;
7654 edge f;
7655 edge_iterator ei;
7656 bool none_removed = false;
7657 unsigned i;
7658 basic_block bb, dbb;
7659 bitmap_iterator bi;
7660
7661 if (!dom_info_available_p (CDI_DOMINATORS))
7662 {
7663 remove_edge (e);
7664 return;
7665 }
7666
7667 /* No updating is needed for edges to exit. */
7668 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7669 {
7670 if (cfgcleanup_altered_bbs)
7671 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7672 remove_edge (e);
7673 return;
7674 }
7675
7676 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7677 that is not dominated by E->dest, then this set is empty. Otherwise,
7678 all the basic blocks dominated by E->dest are removed.
7679
7680 Also, to DF_IDOM we store the immediate dominators of the blocks in
7681 the dominance frontier of E (i.e., of the successors of the
7682 removed blocks, if there are any, and of E->dest otherwise). */
7683 FOR_EACH_EDGE (f, ei, e->dest->preds)
7684 {
7685 if (f == e)
7686 continue;
7687
7688 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7689 {
7690 none_removed = true;
7691 break;
7692 }
7693 }
7694
7695 df = BITMAP_ALLOC (NULL);
7696 df_idom = BITMAP_ALLOC (NULL);
7697
7698 if (none_removed)
7699 bitmap_set_bit (df_idom,
7700 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7701 else
7702 {
7703 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7704 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7705 {
7706 FOR_EACH_EDGE (f, ei, bb->succs)
7707 {
7708 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7709 bitmap_set_bit (df, f->dest->index);
7710 }
7711 }
7712 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7713 bitmap_clear_bit (df, bb->index);
7714
7715 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7716 {
7717 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7718 bitmap_set_bit (df_idom,
7719 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7720 }
7721 }
7722
7723 if (cfgcleanup_altered_bbs)
7724 {
7725 /* Record the set of the altered basic blocks. */
7726 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7727 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7728 }
7729
7730 /* Remove E and the cancelled blocks. */
7731 if (none_removed)
7732 remove_edge (e);
7733 else
7734 {
7735 /* Walk backwards so as to get a chance to substitute all
7736 released DEFs into debug stmts. See
7737 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7738 details. */
7739 for (i = bbs_to_remove.length (); i-- > 0; )
7740 delete_basic_block (bbs_to_remove[i]);
7741 }
7742
7743 /* Update the dominance information. The immediate dominator may change only
7744 for blocks whose immediate dominator belongs to DF_IDOM:
7745
7746 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7747 removal. Let Z the arbitrary block such that idom(Z) = Y and
7748 Z dominates X after the removal. Before removal, there exists a path P
7749 from Y to X that avoids Z. Let F be the last edge on P that is
7750 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7751 dominates W, and because of P, Z does not dominate W), and W belongs to
7752 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7753 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7754 {
7755 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7756 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7757 dbb;
7758 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7759 bbs_to_fix_dom.safe_push (dbb);
7760 }
7761
7762 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7763
7764 BITMAP_FREE (df);
7765 BITMAP_FREE (df_idom);
7766 bbs_to_remove.release ();
7767 bbs_to_fix_dom.release ();
7768 }
7769
7770 /* Purge dead EH edges from basic block BB. */
7771
7772 bool
7773 gimple_purge_dead_eh_edges (basic_block bb)
7774 {
7775 bool changed = false;
7776 edge e;
7777 edge_iterator ei;
7778 gimple stmt = last_stmt (bb);
7779
7780 if (stmt && stmt_can_throw_internal (stmt))
7781 return false;
7782
7783 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7784 {
7785 if (e->flags & EDGE_EH)
7786 {
7787 remove_edge_and_dominated_blocks (e);
7788 changed = true;
7789 }
7790 else
7791 ei_next (&ei);
7792 }
7793
7794 return changed;
7795 }
7796
7797 /* Purge dead EH edges from basic block listed in BLOCKS. */
7798
7799 bool
7800 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7801 {
7802 bool changed = false;
7803 unsigned i;
7804 bitmap_iterator bi;
7805
7806 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7807 {
7808 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7809
7810 /* Earlier gimple_purge_dead_eh_edges could have removed
7811 this basic block already. */
7812 gcc_assert (bb || changed);
7813 if (bb != NULL)
7814 changed |= gimple_purge_dead_eh_edges (bb);
7815 }
7816
7817 return changed;
7818 }
7819
7820 /* Purge dead abnormal call edges from basic block BB. */
7821
7822 bool
7823 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7824 {
7825 bool changed = false;
7826 edge e;
7827 edge_iterator ei;
7828 gimple stmt = last_stmt (bb);
7829
7830 if (!cfun->has_nonlocal_label
7831 && !cfun->calls_setjmp)
7832 return false;
7833
7834 if (stmt && stmt_can_make_abnormal_goto (stmt))
7835 return false;
7836
7837 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7838 {
7839 if (e->flags & EDGE_ABNORMAL)
7840 {
7841 if (e->flags & EDGE_FALLTHRU)
7842 e->flags &= ~EDGE_ABNORMAL;
7843 else
7844 remove_edge_and_dominated_blocks (e);
7845 changed = true;
7846 }
7847 else
7848 ei_next (&ei);
7849 }
7850
7851 return changed;
7852 }
7853
7854 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7855
7856 bool
7857 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7858 {
7859 bool changed = false;
7860 unsigned i;
7861 bitmap_iterator bi;
7862
7863 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7864 {
7865 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7866
7867 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7868 this basic block already. */
7869 gcc_assert (bb || changed);
7870 if (bb != NULL)
7871 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7872 }
7873
7874 return changed;
7875 }
7876
7877 /* This function is called whenever a new edge is created or
7878 redirected. */
7879
7880 static void
7881 gimple_execute_on_growing_pred (edge e)
7882 {
7883 basic_block bb = e->dest;
7884
7885 if (!gimple_seq_empty_p (phi_nodes (bb)))
7886 reserve_phi_args_for_new_edge (bb);
7887 }
7888
7889 /* This function is called immediately before edge E is removed from
7890 the edge vector E->dest->preds. */
7891
7892 static void
7893 gimple_execute_on_shrinking_pred (edge e)
7894 {
7895 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7896 remove_phi_args (e);
7897 }
7898
7899 /*---------------------------------------------------------------------------
7900 Helper functions for Loop versioning
7901 ---------------------------------------------------------------------------*/
7902
7903 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7904 of 'first'. Both of them are dominated by 'new_head' basic block. When
7905 'new_head' was created by 'second's incoming edge it received phi arguments
7906 on the edge by split_edge(). Later, additional edge 'e' was created to
7907 connect 'new_head' and 'first'. Now this routine adds phi args on this
7908 additional edge 'e' that new_head to second edge received as part of edge
7909 splitting. */
7910
7911 static void
7912 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7913 basic_block new_head, edge e)
7914 {
7915 gimple phi1, phi2;
7916 gimple_stmt_iterator psi1, psi2;
7917 tree def;
7918 edge e2 = find_edge (new_head, second);
7919
7920 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7921 edge, we should always have an edge from NEW_HEAD to SECOND. */
7922 gcc_assert (e2 != NULL);
7923
7924 /* Browse all 'second' basic block phi nodes and add phi args to
7925 edge 'e' for 'first' head. PHI args are always in correct order. */
7926
7927 for (psi2 = gsi_start_phis (second),
7928 psi1 = gsi_start_phis (first);
7929 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7930 gsi_next (&psi2), gsi_next (&psi1))
7931 {
7932 phi1 = gsi_stmt (psi1);
7933 phi2 = gsi_stmt (psi2);
7934 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7935 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7936 }
7937 }
7938
7939
7940 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7941 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7942 the destination of the ELSE part. */
7943
7944 static void
7945 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7946 basic_block second_head ATTRIBUTE_UNUSED,
7947 basic_block cond_bb, void *cond_e)
7948 {
7949 gimple_stmt_iterator gsi;
7950 gimple new_cond_expr;
7951 tree cond_expr = (tree) cond_e;
7952 edge e0;
7953
7954 /* Build new conditional expr */
7955 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7956 NULL_TREE, NULL_TREE);
7957
7958 /* Add new cond in cond_bb. */
7959 gsi = gsi_last_bb (cond_bb);
7960 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7961
7962 /* Adjust edges appropriately to connect new head with first head
7963 as well as second head. */
7964 e0 = single_succ_edge (cond_bb);
7965 e0->flags &= ~EDGE_FALLTHRU;
7966 e0->flags |= EDGE_FALSE_VALUE;
7967 }
7968
7969
7970 /* Do book-keeping of basic block BB for the profile consistency checker.
7971 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7972 then do post-pass accounting. Store the counting in RECORD. */
7973 static void
7974 gimple_account_profile_record (basic_block bb, int after_pass,
7975 struct profile_record *record)
7976 {
7977 gimple_stmt_iterator i;
7978 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7979 {
7980 record->size[after_pass]
7981 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7982 if (profile_status_for_fn (cfun) == PROFILE_READ)
7983 record->time[after_pass]
7984 += estimate_num_insns (gsi_stmt (i),
7985 &eni_time_weights) * bb->count;
7986 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
7987 record->time[after_pass]
7988 += estimate_num_insns (gsi_stmt (i),
7989 &eni_time_weights) * bb->frequency;
7990 }
7991 }
7992
7993 struct cfg_hooks gimple_cfg_hooks = {
7994 "gimple",
7995 gimple_verify_flow_info,
7996 gimple_dump_bb, /* dump_bb */
7997 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7998 create_bb, /* create_basic_block */
7999 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8000 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8001 gimple_can_remove_branch_p, /* can_remove_branch_p */
8002 remove_bb, /* delete_basic_block */
8003 gimple_split_block, /* split_block */
8004 gimple_move_block_after, /* move_block_after */
8005 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8006 gimple_merge_blocks, /* merge_blocks */
8007 gimple_predict_edge, /* predict_edge */
8008 gimple_predicted_by_p, /* predicted_by_p */
8009 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8010 gimple_duplicate_bb, /* duplicate_block */
8011 gimple_split_edge, /* split_edge */
8012 gimple_make_forwarder_block, /* make_forward_block */
8013 NULL, /* tidy_fallthru_edge */
8014 NULL, /* force_nonfallthru */
8015 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8016 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8017 gimple_flow_call_edges_add, /* flow_call_edges_add */
8018 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8019 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8020 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8021 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8022 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8023 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8024 flush_pending_stmts, /* flush_pending_stmts */
8025 gimple_empty_block_p, /* block_empty_p */
8026 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8027 gimple_account_profile_record,
8028 };
8029
8030
8031 /* Split all critical edges. */
8032
8033 unsigned int
8034 split_critical_edges (void)
8035 {
8036 basic_block bb;
8037 edge e;
8038 edge_iterator ei;
8039
8040 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8041 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8042 mappings around the calls to split_edge. */
8043 start_recording_case_labels ();
8044 FOR_ALL_BB_FN (bb, cfun)
8045 {
8046 FOR_EACH_EDGE (e, ei, bb->succs)
8047 {
8048 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8049 split_edge (e);
8050 /* PRE inserts statements to edges and expects that
8051 since split_critical_edges was done beforehand, committing edge
8052 insertions will not split more edges. In addition to critical
8053 edges we must split edges that have multiple successors and
8054 end by control flow statements, such as RESX.
8055 Go ahead and split them too. This matches the logic in
8056 gimple_find_edge_insert_loc. */
8057 else if ((!single_pred_p (e->dest)
8058 || !gimple_seq_empty_p (phi_nodes (e->dest))
8059 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8060 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8061 && !(e->flags & EDGE_ABNORMAL))
8062 {
8063 gimple_stmt_iterator gsi;
8064
8065 gsi = gsi_last_bb (e->src);
8066 if (!gsi_end_p (gsi)
8067 && stmt_ends_bb_p (gsi_stmt (gsi))
8068 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8069 && !gimple_call_builtin_p (gsi_stmt (gsi),
8070 BUILT_IN_RETURN)))
8071 split_edge (e);
8072 }
8073 }
8074 }
8075 end_recording_case_labels ();
8076 return 0;
8077 }
8078
8079 namespace {
8080
8081 const pass_data pass_data_split_crit_edges =
8082 {
8083 GIMPLE_PASS, /* type */
8084 "crited", /* name */
8085 OPTGROUP_NONE, /* optinfo_flags */
8086 true, /* has_execute */
8087 TV_TREE_SPLIT_EDGES, /* tv_id */
8088 PROP_cfg, /* properties_required */
8089 PROP_no_crit_edges, /* properties_provided */
8090 0, /* properties_destroyed */
8091 0, /* todo_flags_start */
8092 TODO_verify_flow, /* todo_flags_finish */
8093 };
8094
8095 class pass_split_crit_edges : public gimple_opt_pass
8096 {
8097 public:
8098 pass_split_crit_edges (gcc::context *ctxt)
8099 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8100 {}
8101
8102 /* opt_pass methods: */
8103 virtual unsigned int execute (function *) { return split_critical_edges (); }
8104
8105 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8106 }; // class pass_split_crit_edges
8107
8108 } // anon namespace
8109
8110 gimple_opt_pass *
8111 make_pass_split_crit_edges (gcc::context *ctxt)
8112 {
8113 return new pass_split_crit_edges (ctxt);
8114 }
8115
8116
8117 /* Build a ternary operation and gimplify it. Emit code before GSI.
8118 Return the gimple_val holding the result. */
8119
8120 tree
8121 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8122 tree type, tree a, tree b, tree c)
8123 {
8124 tree ret;
8125 location_t loc = gimple_location (gsi_stmt (*gsi));
8126
8127 ret = fold_build3_loc (loc, code, type, a, b, c);
8128 STRIP_NOPS (ret);
8129
8130 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8131 GSI_SAME_STMT);
8132 }
8133
8134 /* Build a binary operation and gimplify it. Emit code before GSI.
8135 Return the gimple_val holding the result. */
8136
8137 tree
8138 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8139 tree type, tree a, tree b)
8140 {
8141 tree ret;
8142
8143 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8144 STRIP_NOPS (ret);
8145
8146 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8147 GSI_SAME_STMT);
8148 }
8149
8150 /* Build a unary operation and gimplify it. Emit code before GSI.
8151 Return the gimple_val holding the result. */
8152
8153 tree
8154 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8155 tree a)
8156 {
8157 tree ret;
8158
8159 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8160 STRIP_NOPS (ret);
8161
8162 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8163 GSI_SAME_STMT);
8164 }
8165
8166
8167 \f
8168 /* Given a basic block B which ends with a conditional and has
8169 precisely two successors, determine which of the edges is taken if
8170 the conditional is true and which is taken if the conditional is
8171 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8172
8173 void
8174 extract_true_false_edges_from_block (basic_block b,
8175 edge *true_edge,
8176 edge *false_edge)
8177 {
8178 edge e = EDGE_SUCC (b, 0);
8179
8180 if (e->flags & EDGE_TRUE_VALUE)
8181 {
8182 *true_edge = e;
8183 *false_edge = EDGE_SUCC (b, 1);
8184 }
8185 else
8186 {
8187 *false_edge = e;
8188 *true_edge = EDGE_SUCC (b, 1);
8189 }
8190 }
8191
8192 /* Emit return warnings. */
8193
8194 namespace {
8195
8196 const pass_data pass_data_warn_function_return =
8197 {
8198 GIMPLE_PASS, /* type */
8199 "*warn_function_return", /* name */
8200 OPTGROUP_NONE, /* optinfo_flags */
8201 true, /* has_execute */
8202 TV_NONE, /* tv_id */
8203 PROP_cfg, /* properties_required */
8204 0, /* properties_provided */
8205 0, /* properties_destroyed */
8206 0, /* todo_flags_start */
8207 0, /* todo_flags_finish */
8208 };
8209
8210 class pass_warn_function_return : public gimple_opt_pass
8211 {
8212 public:
8213 pass_warn_function_return (gcc::context *ctxt)
8214 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8215 {}
8216
8217 /* opt_pass methods: */
8218 virtual unsigned int execute (function *);
8219
8220 }; // class pass_warn_function_return
8221
8222 unsigned int
8223 pass_warn_function_return::execute (function *fun)
8224 {
8225 source_location location;
8226 gimple last;
8227 edge e;
8228 edge_iterator ei;
8229
8230 if (!targetm.warn_func_return (fun->decl))
8231 return 0;
8232
8233 /* If we have a path to EXIT, then we do return. */
8234 if (TREE_THIS_VOLATILE (fun->decl)
8235 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8236 {
8237 location = UNKNOWN_LOCATION;
8238 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8239 {
8240 last = last_stmt (e->src);
8241 if ((gimple_code (last) == GIMPLE_RETURN
8242 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8243 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8244 break;
8245 }
8246 if (location == UNKNOWN_LOCATION)
8247 location = cfun->function_end_locus;
8248 warning_at (location, 0, "%<noreturn%> function does return");
8249 }
8250
8251 /* If we see "return;" in some basic block, then we do reach the end
8252 without returning a value. */
8253 else if (warn_return_type
8254 && !TREE_NO_WARNING (fun->decl)
8255 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8256 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8257 {
8258 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8259 {
8260 gimple last = last_stmt (e->src);
8261 if (gimple_code (last) == GIMPLE_RETURN
8262 && gimple_return_retval (last) == NULL
8263 && !gimple_no_warning_p (last))
8264 {
8265 location = gimple_location (last);
8266 if (location == UNKNOWN_LOCATION)
8267 location = fun->function_end_locus;
8268 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8269 TREE_NO_WARNING (fun->decl) = 1;
8270 break;
8271 }
8272 }
8273 }
8274 return 0;
8275 }
8276
8277 } // anon namespace
8278
8279 gimple_opt_pass *
8280 make_pass_warn_function_return (gcc::context *ctxt)
8281 {
8282 return new pass_warn_function_return (ctxt);
8283 }
8284
8285 /* Walk a gimplified function and warn for functions whose return value is
8286 ignored and attribute((warn_unused_result)) is set. This is done before
8287 inlining, so we don't have to worry about that. */
8288
8289 static void
8290 do_warn_unused_result (gimple_seq seq)
8291 {
8292 tree fdecl, ftype;
8293 gimple_stmt_iterator i;
8294
8295 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8296 {
8297 gimple g = gsi_stmt (i);
8298
8299 switch (gimple_code (g))
8300 {
8301 case GIMPLE_BIND:
8302 do_warn_unused_result (gimple_bind_body (g));
8303 break;
8304 case GIMPLE_TRY:
8305 do_warn_unused_result (gimple_try_eval (g));
8306 do_warn_unused_result (gimple_try_cleanup (g));
8307 break;
8308 case GIMPLE_CATCH:
8309 do_warn_unused_result (gimple_catch_handler (g));
8310 break;
8311 case GIMPLE_EH_FILTER:
8312 do_warn_unused_result (gimple_eh_filter_failure (g));
8313 break;
8314
8315 case GIMPLE_CALL:
8316 if (gimple_call_lhs (g))
8317 break;
8318 if (gimple_call_internal_p (g))
8319 break;
8320
8321 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8322 LHS. All calls whose value is ignored should be
8323 represented like this. Look for the attribute. */
8324 fdecl = gimple_call_fndecl (g);
8325 ftype = gimple_call_fntype (g);
8326
8327 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8328 {
8329 location_t loc = gimple_location (g);
8330
8331 if (fdecl)
8332 warning_at (loc, OPT_Wunused_result,
8333 "ignoring return value of %qD, "
8334 "declared with attribute warn_unused_result",
8335 fdecl);
8336 else
8337 warning_at (loc, OPT_Wunused_result,
8338 "ignoring return value of function "
8339 "declared with attribute warn_unused_result");
8340 }
8341 break;
8342
8343 default:
8344 /* Not a container, not a call, or a call whose value is used. */
8345 break;
8346 }
8347 }
8348 }
8349
8350 namespace {
8351
8352 const pass_data pass_data_warn_unused_result =
8353 {
8354 GIMPLE_PASS, /* type */
8355 "*warn_unused_result", /* name */
8356 OPTGROUP_NONE, /* optinfo_flags */
8357 true, /* has_execute */
8358 TV_NONE, /* tv_id */
8359 PROP_gimple_any, /* properties_required */
8360 0, /* properties_provided */
8361 0, /* properties_destroyed */
8362 0, /* todo_flags_start */
8363 0, /* todo_flags_finish */
8364 };
8365
8366 class pass_warn_unused_result : public gimple_opt_pass
8367 {
8368 public:
8369 pass_warn_unused_result (gcc::context *ctxt)
8370 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8371 {}
8372
8373 /* opt_pass methods: */
8374 virtual bool gate (function *) { return flag_warn_unused_result; }
8375 virtual unsigned int execute (function *)
8376 {
8377 do_warn_unused_result (gimple_body (current_function_decl));
8378 return 0;
8379 }
8380
8381 }; // class pass_warn_unused_result
8382
8383 } // anon namespace
8384
8385 gimple_opt_pass *
8386 make_pass_warn_unused_result (gcc::context *ctxt)
8387 {
8388 return new pass_warn_unused_result (ctxt);
8389 }
8390
8391 /* IPA passes, compilation of earlier functions or inlining
8392 might have changed some properties, such as marked functions nothrow,
8393 pure, const or noreturn.
8394 Remove redundant edges and basic blocks, and create new ones if necessary.
8395
8396 This pass can't be executed as stand alone pass from pass manager, because
8397 in between inlining and this fixup the verify_flow_info would fail. */
8398
8399 unsigned int
8400 execute_fixup_cfg (void)
8401 {
8402 basic_block bb;
8403 gimple_stmt_iterator gsi;
8404 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8405 gcov_type count_scale;
8406 edge e;
8407 edge_iterator ei;
8408
8409 count_scale
8410 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8411 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8412
8413 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8414 cgraph_get_node (current_function_decl)->count;
8415 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8416 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8417 count_scale);
8418
8419 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8420 e->count = apply_scale (e->count, count_scale);
8421
8422 FOR_EACH_BB_FN (bb, cfun)
8423 {
8424 bb->count = apply_scale (bb->count, count_scale);
8425 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8426 {
8427 gimple stmt = gsi_stmt (gsi);
8428 tree decl = is_gimple_call (stmt)
8429 ? gimple_call_fndecl (stmt)
8430 : NULL;
8431 if (decl)
8432 {
8433 int flags = gimple_call_flags (stmt);
8434 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8435 {
8436 if (gimple_purge_dead_abnormal_call_edges (bb))
8437 todo |= TODO_cleanup_cfg;
8438
8439 if (gimple_in_ssa_p (cfun))
8440 {
8441 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8442 update_stmt (stmt);
8443 }
8444 }
8445
8446 if (flags & ECF_NORETURN
8447 && fixup_noreturn_call (stmt))
8448 todo |= TODO_cleanup_cfg;
8449 }
8450
8451 if (maybe_clean_eh_stmt (stmt)
8452 && gimple_purge_dead_eh_edges (bb))
8453 todo |= TODO_cleanup_cfg;
8454 }
8455
8456 FOR_EACH_EDGE (e, ei, bb->succs)
8457 e->count = apply_scale (e->count, count_scale);
8458
8459 /* If we have a basic block with no successors that does not
8460 end with a control statement or a noreturn call end it with
8461 a call to __builtin_unreachable. This situation can occur
8462 when inlining a noreturn call that does in fact return. */
8463 if (EDGE_COUNT (bb->succs) == 0)
8464 {
8465 gimple stmt = last_stmt (bb);
8466 if (!stmt
8467 || (!is_ctrl_stmt (stmt)
8468 && (!is_gimple_call (stmt)
8469 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8470 {
8471 stmt = gimple_build_call
8472 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8473 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8474 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8475 }
8476 }
8477 }
8478 if (count_scale != REG_BR_PROB_BASE)
8479 compute_function_frequency ();
8480
8481 /* We just processed all calls. */
8482 if (cfun->gimple_df)
8483 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8484
8485 /* Dump a textual representation of the flowgraph. */
8486 if (dump_file)
8487 gimple_dump_cfg (dump_file, dump_flags);
8488
8489 if (current_loops
8490 && (todo & TODO_cleanup_cfg))
8491 loops_state_set (LOOPS_NEED_FIXUP);
8492
8493 return todo;
8494 }
8495
8496 namespace {
8497
8498 const pass_data pass_data_fixup_cfg =
8499 {
8500 GIMPLE_PASS, /* type */
8501 "*free_cfg_annotations", /* name */
8502 OPTGROUP_NONE, /* optinfo_flags */
8503 true, /* has_execute */
8504 TV_NONE, /* tv_id */
8505 PROP_cfg, /* properties_required */
8506 0, /* properties_provided */
8507 0, /* properties_destroyed */
8508 0, /* todo_flags_start */
8509 0, /* todo_flags_finish */
8510 };
8511
8512 class pass_fixup_cfg : public gimple_opt_pass
8513 {
8514 public:
8515 pass_fixup_cfg (gcc::context *ctxt)
8516 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8517 {}
8518
8519 /* opt_pass methods: */
8520 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8521 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8522
8523 }; // class pass_fixup_cfg
8524
8525 } // anon namespace
8526
8527 gimple_opt_pass *
8528 make_pass_fixup_cfg (gcc::context *ctxt)
8529 {
8530 return new pass_fixup_cfg (ctxt);
8531 }
8532
8533 /* Garbage collection support for edge_def. */
8534
8535 extern void gt_ggc_mx (tree&);
8536 extern void gt_ggc_mx (gimple&);
8537 extern void gt_ggc_mx (rtx&);
8538 extern void gt_ggc_mx (basic_block&);
8539
8540 void
8541 gt_ggc_mx (edge_def *e)
8542 {
8543 tree block = LOCATION_BLOCK (e->goto_locus);
8544 gt_ggc_mx (e->src);
8545 gt_ggc_mx (e->dest);
8546 if (current_ir_type () == IR_GIMPLE)
8547 gt_ggc_mx (e->insns.g);
8548 else
8549 gt_ggc_mx (e->insns.r);
8550 gt_ggc_mx (block);
8551 }
8552
8553 /* PCH support for edge_def. */
8554
8555 extern void gt_pch_nx (tree&);
8556 extern void gt_pch_nx (gimple&);
8557 extern void gt_pch_nx (rtx&);
8558 extern void gt_pch_nx (basic_block&);
8559
8560 void
8561 gt_pch_nx (edge_def *e)
8562 {
8563 tree block = LOCATION_BLOCK (e->goto_locus);
8564 gt_pch_nx (e->src);
8565 gt_pch_nx (e->dest);
8566 if (current_ir_type () == IR_GIMPLE)
8567 gt_pch_nx (e->insns.g);
8568 else
8569 gt_pch_nx (e->insns.r);
8570 gt_pch_nx (block);
8571 }
8572
8573 void
8574 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8575 {
8576 tree block = LOCATION_BLOCK (e->goto_locus);
8577 op (&(e->src), cookie);
8578 op (&(e->dest), cookie);
8579 if (current_ir_type () == IR_GIMPLE)
8580 op (&(e->insns.g), cookie);
8581 else
8582 op (&(e->insns.r), cookie);
8583 op (&(block), cookie);
8584 }