re PR tree-optimization/23286 (Missed code hoisting optimization)
[gcc.git] / gcc / tree-ssa-pre.c
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa-loop.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-ssa.h"
46 #include "cfgloop.h"
47 #include "tree-ssa-sccvn.h"
48 #include "tree-scalar-evolution.h"
49 #include "params.h"
50 #include "dbgcnt.h"
51 #include "domwalk.h"
52 #include "tree-ssa-propagate.h"
53 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
55 #include "langhooks.h"
56 #include "alias.h"
57
58 /* Even though this file is called tree-ssa-pre.c, we actually
59 implement a bit more than just PRE here. All of them piggy-back
60 on GVN which is implemented in tree-ssa-sccvn.c.
61
62 1. Full Redundancy Elimination (FRE)
63 This is the elimination phase of GVN.
64
65 2. Partial Redundancy Elimination (PRE)
66 This is adds computation of AVAIL_OUT and ANTIC_IN and
67 doing expression insertion to form GVN-PRE.
68
69 3. Code hoisting
70 This optimization uses the ANTIC_IN sets computed for PRE
71 to move expressions further up than PRE would do, to make
72 multiple computations of the same value fully redundant.
73 This pass is explained below (after the explanation of the
74 basic algorithm for PRE).
75 */
76
77 /* TODO:
78
79 1. Avail sets can be shared by making an avail_find_leader that
80 walks up the dominator tree and looks in those avail sets.
81 This might affect code optimality, it's unclear right now.
82 Currently the AVAIL_OUT sets are the remaining quadraticness in
83 memory of GVN-PRE.
84 2. Strength reduction can be performed by anticipating expressions
85 we can repair later on.
86 3. We can do back-substitution or smarter value numbering to catch
87 commutative expressions split up over multiple statements.
88 */
89
90 /* For ease of terminology, "expression node" in the below refers to
91 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
92 represent the actual statement containing the expressions we care about,
93 and we cache the value number by putting it in the expression. */
94
95 /* Basic algorithm for Partial Redundancy Elimination:
96
97 First we walk the statements to generate the AVAIL sets, the
98 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
99 generation of values/expressions by a given block. We use them
100 when computing the ANTIC sets. The AVAIL sets consist of
101 SSA_NAME's that represent values, so we know what values are
102 available in what blocks. AVAIL is a forward dataflow problem. In
103 SSA, values are never killed, so we don't need a kill set, or a
104 fixpoint iteration, in order to calculate the AVAIL sets. In
105 traditional parlance, AVAIL sets tell us the downsafety of the
106 expressions/values.
107
108 Next, we generate the ANTIC sets. These sets represent the
109 anticipatable expressions. ANTIC is a backwards dataflow
110 problem. An expression is anticipatable in a given block if it could
111 be generated in that block. This means that if we had to perform
112 an insertion in that block, of the value of that expression, we
113 could. Calculating the ANTIC sets requires phi translation of
114 expressions, because the flow goes backwards through phis. We must
115 iterate to a fixpoint of the ANTIC sets, because we have a kill
116 set. Even in SSA form, values are not live over the entire
117 function, only from their definition point onwards. So we have to
118 remove values from the ANTIC set once we go past the definition
119 point of the leaders that make them up.
120 compute_antic/compute_antic_aux performs this computation.
121
122 Third, we perform insertions to make partially redundant
123 expressions fully redundant.
124
125 An expression is partially redundant (excluding partial
126 anticipation) if:
127
128 1. It is AVAIL in some, but not all, of the predecessors of a
129 given block.
130 2. It is ANTIC in all the predecessors.
131
132 In order to make it fully redundant, we insert the expression into
133 the predecessors where it is not available, but is ANTIC.
134
135 When optimizing for size, we only eliminate the partial redundancy
136 if we need to insert in only one predecessor. This avoids almost
137 completely the code size increase that PRE usually causes.
138
139 For the partial anticipation case, we only perform insertion if it
140 is partially anticipated in some block, and fully available in all
141 of the predecessors.
142
143 do_pre_regular_insertion/do_pre_partial_partial_insertion
144 performs these steps, driven by insert/insert_aux.
145
146 Fourth, we eliminate fully redundant expressions.
147 This is a simple statement walk that replaces redundant
148 calculations with the now available values. */
149
150 /* Basic algorithm for Code Hoisting:
151
152 Code hoisting is: Moving value computations up in the control flow
153 graph to make multiple copies redundant. Typically this is a size
154 optimization, but there are cases where it also is helpful for speed.
155
156 A simple code hoisting algorithm is implemented that piggy-backs on
157 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
158 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
159 computed for PRE, and we can use them to perform a limited version of
160 code hoisting, too.
161
162 For the purpose of this implementation, a value is hoistable to a basic
163 block B if the following properties are met:
164
165 1. The value is in ANTIC_IN(B) -- the value will be computed on all
166 paths from B to function exit and it can be computed in B);
167
168 2. The value is not in AVAIL_OUT(B) -- there would be no need to
169 compute the value again and make it available twice;
170
171 3. All successors of B are dominated by B -- makes sure that inserting
172 a computation of the value in B will make the remaining
173 computations fully redundant;
174
175 4. At least one successor has the value in AVAIL_OUT -- to avoid
176 hoisting values up too far;
177
178 5. There are at least two successors of B -- hoisting in straight
179 line code is pointless.
180
181 The third condition is not strictly necessary, but it would complicate
182 the hoisting pass a lot. In fact, I don't know of any code hoisting
183 algorithm that does not have this requirement. Fortunately, experiments
184 have show that most candidate hoistable values are in regions that meet
185 this condition (e.g. diamond-shape regions).
186
187 The forth condition is necessary to avoid hoisting things up too far
188 away from the uses of the value. Nothing else limits the algorithm
189 from hoisting everything up as far as ANTIC_IN allows. Experiments
190 with SPEC and CSiBE have shown that hoisting up too far results in more
191 spilling, less benefits for code size, and worse benchmark scores.
192 Fortunately, in practice most of the interesting hoisting opportunities
193 are caught despite this limitation.
194
195 For hoistable values that meet all conditions, expressions are inserted
196 to make the calculation of the hoistable value fully redundant. We
197 perform code hoisting insertions after each round of PRE insertions,
198 because code hoisting never exposes new PRE opportunities, but PRE can
199 create new code hoisting opportunities.
200
201 The code hoisting algorithm is implemented in do_hoist_insert, driven
202 by insert/insert_aux. */
203
204 /* Representations of value numbers:
205
206 Value numbers are represented by a representative SSA_NAME. We
207 will create fake SSA_NAME's in situations where we need a
208 representative but do not have one (because it is a complex
209 expression). In order to facilitate storing the value numbers in
210 bitmaps, and keep the number of wasted SSA_NAME's down, we also
211 associate a value_id with each value number, and create full blown
212 ssa_name's only where we actually need them (IE in operands of
213 existing expressions).
214
215 Theoretically you could replace all the value_id's with
216 SSA_NAME_VERSION, but this would allocate a large number of
217 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
218 It would also require an additional indirection at each point we
219 use the value id. */
220
221 /* Representation of expressions on value numbers:
222
223 Expressions consisting of value numbers are represented the same
224 way as our VN internally represents them, with an additional
225 "pre_expr" wrapping around them in order to facilitate storing all
226 of the expressions in the same sets. */
227
228 /* Representation of sets:
229
230 The dataflow sets do not need to be sorted in any particular order
231 for the majority of their lifetime, are simply represented as two
232 bitmaps, one that keeps track of values present in the set, and one
233 that keeps track of expressions present in the set.
234
235 When we need them in topological order, we produce it on demand by
236 transforming the bitmap into an array and sorting it into topo
237 order. */
238
239 /* Type of expression, used to know which member of the PRE_EXPR union
240 is valid. */
241
242 enum pre_expr_kind
243 {
244 NAME,
245 NARY,
246 REFERENCE,
247 CONSTANT
248 };
249
250 union pre_expr_union
251 {
252 tree name;
253 tree constant;
254 vn_nary_op_t nary;
255 vn_reference_t reference;
256 };
257
258 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
259 {
260 enum pre_expr_kind kind;
261 unsigned int id;
262 pre_expr_union u;
263
264 /* hash_table support. */
265 static inline hashval_t hash (const pre_expr_d *);
266 static inline int equal (const pre_expr_d *, const pre_expr_d *);
267 } *pre_expr;
268
269 #define PRE_EXPR_NAME(e) (e)->u.name
270 #define PRE_EXPR_NARY(e) (e)->u.nary
271 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
272 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
273
274 /* Compare E1 and E1 for equality. */
275
276 inline int
277 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
278 {
279 if (e1->kind != e2->kind)
280 return false;
281
282 switch (e1->kind)
283 {
284 case CONSTANT:
285 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
286 PRE_EXPR_CONSTANT (e2));
287 case NAME:
288 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
289 case NARY:
290 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
291 case REFERENCE:
292 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
293 PRE_EXPR_REFERENCE (e2));
294 default:
295 gcc_unreachable ();
296 }
297 }
298
299 /* Hash E. */
300
301 inline hashval_t
302 pre_expr_d::hash (const pre_expr_d *e)
303 {
304 switch (e->kind)
305 {
306 case CONSTANT:
307 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
308 case NAME:
309 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
310 case NARY:
311 return PRE_EXPR_NARY (e)->hashcode;
312 case REFERENCE:
313 return PRE_EXPR_REFERENCE (e)->hashcode;
314 default:
315 gcc_unreachable ();
316 }
317 }
318
319 /* Next global expression id number. */
320 static unsigned int next_expression_id;
321
322 /* Mapping from expression to id number we can use in bitmap sets. */
323 static vec<pre_expr> expressions;
324 static hash_table<pre_expr_d> *expression_to_id;
325 static vec<unsigned> name_to_id;
326
327 /* Allocate an expression id for EXPR. */
328
329 static inline unsigned int
330 alloc_expression_id (pre_expr expr)
331 {
332 struct pre_expr_d **slot;
333 /* Make sure we won't overflow. */
334 gcc_assert (next_expression_id + 1 > next_expression_id);
335 expr->id = next_expression_id++;
336 expressions.safe_push (expr);
337 if (expr->kind == NAME)
338 {
339 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
340 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
341 re-allocations by using vec::reserve upfront. */
342 unsigned old_len = name_to_id.length ();
343 name_to_id.reserve (num_ssa_names - old_len);
344 name_to_id.quick_grow_cleared (num_ssa_names);
345 gcc_assert (name_to_id[version] == 0);
346 name_to_id[version] = expr->id;
347 }
348 else
349 {
350 slot = expression_to_id->find_slot (expr, INSERT);
351 gcc_assert (!*slot);
352 *slot = expr;
353 }
354 return next_expression_id - 1;
355 }
356
357 /* Return the expression id for tree EXPR. */
358
359 static inline unsigned int
360 get_expression_id (const pre_expr expr)
361 {
362 return expr->id;
363 }
364
365 static inline unsigned int
366 lookup_expression_id (const pre_expr expr)
367 {
368 struct pre_expr_d **slot;
369
370 if (expr->kind == NAME)
371 {
372 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
373 if (name_to_id.length () <= version)
374 return 0;
375 return name_to_id[version];
376 }
377 else
378 {
379 slot = expression_to_id->find_slot (expr, NO_INSERT);
380 if (!slot)
381 return 0;
382 return ((pre_expr)*slot)->id;
383 }
384 }
385
386 /* Return the existing expression id for EXPR, or create one if one
387 does not exist yet. */
388
389 static inline unsigned int
390 get_or_alloc_expression_id (pre_expr expr)
391 {
392 unsigned int id = lookup_expression_id (expr);
393 if (id == 0)
394 return alloc_expression_id (expr);
395 return expr->id = id;
396 }
397
398 /* Return the expression that has expression id ID */
399
400 static inline pre_expr
401 expression_for_id (unsigned int id)
402 {
403 return expressions[id];
404 }
405
406 /* Free the expression id field in all of our expressions,
407 and then destroy the expressions array. */
408
409 static void
410 clear_expression_ids (void)
411 {
412 expressions.release ();
413 }
414
415 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
416
417 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
418
419 static pre_expr
420 get_or_alloc_expr_for_name (tree name)
421 {
422 struct pre_expr_d expr;
423 pre_expr result;
424 unsigned int result_id;
425
426 expr.kind = NAME;
427 expr.id = 0;
428 PRE_EXPR_NAME (&expr) = name;
429 result_id = lookup_expression_id (&expr);
430 if (result_id != 0)
431 return expression_for_id (result_id);
432
433 result = pre_expr_pool.allocate ();
434 result->kind = NAME;
435 PRE_EXPR_NAME (result) = name;
436 alloc_expression_id (result);
437 return result;
438 }
439
440 /* An unordered bitmap set. One bitmap tracks values, the other,
441 expressions. */
442 typedef struct bitmap_set
443 {
444 bitmap_head expressions;
445 bitmap_head values;
446 } *bitmap_set_t;
447
448 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
449 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
450
451 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
452 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
453
454 /* Mapping from value id to expressions with that value_id. */
455 static vec<bitmap> value_expressions;
456
457 /* Sets that we need to keep track of. */
458 typedef struct bb_bitmap_sets
459 {
460 /* The EXP_GEN set, which represents expressions/values generated in
461 a basic block. */
462 bitmap_set_t exp_gen;
463
464 /* The PHI_GEN set, which represents PHI results generated in a
465 basic block. */
466 bitmap_set_t phi_gen;
467
468 /* The TMP_GEN set, which represents results/temporaries generated
469 in a basic block. IE the LHS of an expression. */
470 bitmap_set_t tmp_gen;
471
472 /* The AVAIL_OUT set, which represents which values are available in
473 a given basic block. */
474 bitmap_set_t avail_out;
475
476 /* The ANTIC_IN set, which represents which values are anticipatable
477 in a given basic block. */
478 bitmap_set_t antic_in;
479
480 /* The PA_IN set, which represents which values are
481 partially anticipatable in a given basic block. */
482 bitmap_set_t pa_in;
483
484 /* The NEW_SETS set, which is used during insertion to augment the
485 AVAIL_OUT set of blocks with the new insertions performed during
486 the current iteration. */
487 bitmap_set_t new_sets;
488
489 /* A cache for value_dies_in_block_x. */
490 bitmap expr_dies;
491
492 /* The live virtual operand on successor edges. */
493 tree vop_on_exit;
494
495 /* True if we have visited this block during ANTIC calculation. */
496 unsigned int visited : 1;
497
498 /* True when the block contains a call that might not return. */
499 unsigned int contains_may_not_return_call : 1;
500 } *bb_value_sets_t;
501
502 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
503 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
504 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
505 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
506 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
507 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
508 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
509 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
510 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
511 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
512 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
513
514
515 /* This structure is used to keep track of statistics on what
516 optimization PRE was able to perform. */
517 static struct
518 {
519 /* The number of RHS computations eliminated by PRE. */
520 int eliminations;
521
522 /* The number of new expressions/temporaries generated by PRE. */
523 int insertions;
524
525 /* The number of inserts found due to partial anticipation */
526 int pa_insert;
527
528 /* The number of inserts made for code hoisting. */
529 int hoist_insert;
530
531 /* The number of new PHI nodes added by PRE. */
532 int phis;
533 } pre_stats;
534
535 static bool do_partial_partial;
536 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
537 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
538 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
539 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
540 static void bitmap_set_and (bitmap_set_t, bitmap_set_t);
541 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
542 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
543 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
544 unsigned int, bool);
545 static bitmap_set_t bitmap_set_new (void);
546 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
547 tree);
548 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
549 static unsigned int get_expr_value_id (pre_expr);
550
551 /* We can add and remove elements and entries to and from sets
552 and hash tables, so we use alloc pools for them. */
553
554 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
555 static bitmap_obstack grand_bitmap_obstack;
556
557 /* Set of blocks with statements that have had their EH properties changed. */
558 static bitmap need_eh_cleanup;
559
560 /* Set of blocks with statements that have had their AB properties changed. */
561 static bitmap need_ab_cleanup;
562
563 /* A three tuple {e, pred, v} used to cache phi translations in the
564 phi_translate_table. */
565
566 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
567 {
568 /* The expression. */
569 pre_expr e;
570
571 /* The predecessor block along which we translated the expression. */
572 basic_block pred;
573
574 /* The value that resulted from the translation. */
575 pre_expr v;
576
577 /* The hashcode for the expression, pred pair. This is cached for
578 speed reasons. */
579 hashval_t hashcode;
580
581 /* hash_table support. */
582 static inline hashval_t hash (const expr_pred_trans_d *);
583 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
584 } *expr_pred_trans_t;
585 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
586
587 inline hashval_t
588 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
589 {
590 return e->hashcode;
591 }
592
593 inline int
594 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
595 const expr_pred_trans_d *ve2)
596 {
597 basic_block b1 = ve1->pred;
598 basic_block b2 = ve2->pred;
599
600 /* If they are not translations for the same basic block, they can't
601 be equal. */
602 if (b1 != b2)
603 return false;
604 return pre_expr_d::equal (ve1->e, ve2->e);
605 }
606
607 /* The phi_translate_table caches phi translations for a given
608 expression and predecessor. */
609 static hash_table<expr_pred_trans_d> *phi_translate_table;
610
611 /* Add the tuple mapping from {expression E, basic block PRED} to
612 the phi translation table and return whether it pre-existed. */
613
614 static inline bool
615 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
616 {
617 expr_pred_trans_t *slot;
618 expr_pred_trans_d tem;
619 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
620 pred->index);
621 tem.e = e;
622 tem.pred = pred;
623 tem.hashcode = hash;
624 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
625 if (*slot)
626 {
627 *entry = *slot;
628 return true;
629 }
630
631 *entry = *slot = XNEW (struct expr_pred_trans_d);
632 (*entry)->e = e;
633 (*entry)->pred = pred;
634 (*entry)->hashcode = hash;
635 return false;
636 }
637
638
639 /* Add expression E to the expression set of value id V. */
640
641 static void
642 add_to_value (unsigned int v, pre_expr e)
643 {
644 bitmap set;
645
646 gcc_checking_assert (get_expr_value_id (e) == v);
647
648 if (v >= value_expressions.length ())
649 {
650 value_expressions.safe_grow_cleared (v + 1);
651 }
652
653 set = value_expressions[v];
654 if (!set)
655 {
656 set = BITMAP_ALLOC (&grand_bitmap_obstack);
657 value_expressions[v] = set;
658 }
659
660 bitmap_set_bit (set, get_or_alloc_expression_id (e));
661 }
662
663 /* Create a new bitmap set and return it. */
664
665 static bitmap_set_t
666 bitmap_set_new (void)
667 {
668 bitmap_set_t ret = bitmap_set_pool.allocate ();
669 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
670 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
671 return ret;
672 }
673
674 /* Return the value id for a PRE expression EXPR. */
675
676 static unsigned int
677 get_expr_value_id (pre_expr expr)
678 {
679 unsigned int id;
680 switch (expr->kind)
681 {
682 case CONSTANT:
683 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
684 break;
685 case NAME:
686 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
687 break;
688 case NARY:
689 id = PRE_EXPR_NARY (expr)->value_id;
690 break;
691 case REFERENCE:
692 id = PRE_EXPR_REFERENCE (expr)->value_id;
693 break;
694 default:
695 gcc_unreachable ();
696 }
697 /* ??? We cannot assert that expr has a value-id (it can be 0), because
698 we assign value-ids only to expressions that have a result
699 in set_hashtable_value_ids. */
700 return id;
701 }
702
703 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
704
705 static tree
706 sccvn_valnum_from_value_id (unsigned int val)
707 {
708 bitmap_iterator bi;
709 unsigned int i;
710 bitmap exprset = value_expressions[val];
711 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
712 {
713 pre_expr vexpr = expression_for_id (i);
714 if (vexpr->kind == NAME)
715 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
716 else if (vexpr->kind == CONSTANT)
717 return PRE_EXPR_CONSTANT (vexpr);
718 }
719 return NULL_TREE;
720 }
721
722 /* Remove an expression EXPR from a bitmapped set. */
723
724 static void
725 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
726 {
727 unsigned int val = get_expr_value_id (expr);
728 if (!value_id_constant_p (val))
729 {
730 bitmap_clear_bit (&set->values, val);
731 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
732 }
733 }
734
735 static void
736 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
737 unsigned int val, bool allow_constants)
738 {
739 if (allow_constants || !value_id_constant_p (val))
740 {
741 /* We specifically expect this and only this function to be able to
742 insert constants into a set. */
743 bitmap_set_bit (&set->values, val);
744 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
745 }
746 }
747
748 /* Insert an expression EXPR into a bitmapped set. */
749
750 static void
751 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
752 {
753 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
754 }
755
756 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
757
758 static void
759 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
760 {
761 bitmap_copy (&dest->expressions, &orig->expressions);
762 bitmap_copy (&dest->values, &orig->values);
763 }
764
765
766 /* Free memory used up by SET. */
767 static void
768 bitmap_set_free (bitmap_set_t set)
769 {
770 bitmap_clear (&set->expressions);
771 bitmap_clear (&set->values);
772 }
773
774
775 /* Generate an topological-ordered array of bitmap set SET. */
776
777 static vec<pre_expr>
778 sorted_array_from_bitmap_set (bitmap_set_t set)
779 {
780 unsigned int i, j;
781 bitmap_iterator bi, bj;
782 vec<pre_expr> result;
783
784 /* Pre-allocate enough space for the array. */
785 result.create (bitmap_count_bits (&set->expressions));
786
787 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
788 {
789 /* The number of expressions having a given value is usually
790 relatively small. Thus, rather than making a vector of all
791 the expressions and sorting it by value-id, we walk the values
792 and check in the reverse mapping that tells us what expressions
793 have a given value, to filter those in our set. As a result,
794 the expressions are inserted in value-id order, which means
795 topological order.
796
797 If this is somehow a significant lose for some cases, we can
798 choose which set to walk based on the set size. */
799 bitmap exprset = value_expressions[i];
800 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
801 {
802 if (bitmap_bit_p (&set->expressions, j))
803 result.quick_push (expression_for_id (j));
804 }
805 }
806
807 return result;
808 }
809
810 /* Perform bitmapped set operation DEST &= ORIG. */
811
812 static void
813 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
814 {
815 bitmap_iterator bi;
816 unsigned int i;
817
818 if (dest != orig)
819 {
820 bitmap_head temp;
821 bitmap_initialize (&temp, &grand_bitmap_obstack);
822
823 bitmap_and_into (&dest->values, &orig->values);
824 bitmap_copy (&temp, &dest->expressions);
825 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
826 {
827 pre_expr expr = expression_for_id (i);
828 unsigned int value_id = get_expr_value_id (expr);
829 if (!bitmap_bit_p (&dest->values, value_id))
830 bitmap_clear_bit (&dest->expressions, i);
831 }
832 bitmap_clear (&temp);
833 }
834 }
835
836 /* Subtract all values and expressions contained in ORIG from DEST. */
837
838 static bitmap_set_t
839 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
840 {
841 bitmap_set_t result = bitmap_set_new ();
842 bitmap_iterator bi;
843 unsigned int i;
844
845 bitmap_and_compl (&result->expressions, &dest->expressions,
846 &orig->expressions);
847
848 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
849 {
850 pre_expr expr = expression_for_id (i);
851 unsigned int value_id = get_expr_value_id (expr);
852 bitmap_set_bit (&result->values, value_id);
853 }
854
855 return result;
856 }
857
858 /* Subtract all the values in bitmap set B from bitmap set A. */
859
860 static void
861 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
862 {
863 unsigned int i;
864 bitmap_iterator bi;
865 bitmap_head temp;
866
867 bitmap_initialize (&temp, &grand_bitmap_obstack);
868
869 bitmap_copy (&temp, &a->expressions);
870 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
871 {
872 pre_expr expr = expression_for_id (i);
873 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
874 bitmap_remove_from_set (a, expr);
875 }
876 bitmap_clear (&temp);
877 }
878
879
880 /* Return true if bitmapped set SET contains the value VALUE_ID. */
881
882 static bool
883 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
884 {
885 if (value_id_constant_p (value_id))
886 return true;
887
888 if (!set || bitmap_empty_p (&set->expressions))
889 return false;
890
891 return bitmap_bit_p (&set->values, value_id);
892 }
893
894 static inline bool
895 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
896 {
897 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
898 }
899
900 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
901
902 static void
903 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
904 const pre_expr expr)
905 {
906 bitmap exprset;
907 unsigned int i;
908 bitmap_iterator bi;
909
910 if (value_id_constant_p (lookfor))
911 return;
912
913 if (!bitmap_set_contains_value (set, lookfor))
914 return;
915
916 /* The number of expressions having a given value is usually
917 significantly less than the total number of expressions in SET.
918 Thus, rather than check, for each expression in SET, whether it
919 has the value LOOKFOR, we walk the reverse mapping that tells us
920 what expressions have a given value, and see if any of those
921 expressions are in our set. For large testcases, this is about
922 5-10x faster than walking the bitmap. If this is somehow a
923 significant lose for some cases, we can choose which set to walk
924 based on the set size. */
925 exprset = value_expressions[lookfor];
926 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
927 {
928 if (bitmap_clear_bit (&set->expressions, i))
929 {
930 bitmap_set_bit (&set->expressions, get_expression_id (expr));
931 return;
932 }
933 }
934
935 gcc_unreachable ();
936 }
937
938 /* Return true if two bitmap sets are equal. */
939
940 static bool
941 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
942 {
943 return bitmap_equal_p (&a->values, &b->values);
944 }
945
946 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
947 and add it otherwise. */
948
949 static void
950 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
951 {
952 unsigned int val = get_expr_value_id (expr);
953
954 if (bitmap_set_contains_value (set, val))
955 bitmap_set_replace_value (set, val, expr);
956 else
957 bitmap_insert_into_set (set, expr);
958 }
959
960 /* Insert EXPR into SET if EXPR's value is not already present in
961 SET. */
962
963 static void
964 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
965 {
966 unsigned int val = get_expr_value_id (expr);
967
968 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
969
970 /* Constant values are always considered to be part of the set. */
971 if (value_id_constant_p (val))
972 return;
973
974 /* If the value membership changed, add the expression. */
975 if (bitmap_set_bit (&set->values, val))
976 bitmap_set_bit (&set->expressions, expr->id);
977 }
978
979 /* Print out EXPR to outfile. */
980
981 static void
982 print_pre_expr (FILE *outfile, const pre_expr expr)
983 {
984 switch (expr->kind)
985 {
986 case CONSTANT:
987 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
988 break;
989 case NAME:
990 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
991 break;
992 case NARY:
993 {
994 unsigned int i;
995 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
996 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
997 for (i = 0; i < nary->length; i++)
998 {
999 print_generic_expr (outfile, nary->op[i], 0);
1000 if (i != (unsigned) nary->length - 1)
1001 fprintf (outfile, ",");
1002 }
1003 fprintf (outfile, "}");
1004 }
1005 break;
1006
1007 case REFERENCE:
1008 {
1009 vn_reference_op_t vro;
1010 unsigned int i;
1011 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1012 fprintf (outfile, "{");
1013 for (i = 0;
1014 ref->operands.iterate (i, &vro);
1015 i++)
1016 {
1017 bool closebrace = false;
1018 if (vro->opcode != SSA_NAME
1019 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
1020 {
1021 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
1022 if (vro->op0)
1023 {
1024 fprintf (outfile, "<");
1025 closebrace = true;
1026 }
1027 }
1028 if (vro->op0)
1029 {
1030 print_generic_expr (outfile, vro->op0, 0);
1031 if (vro->op1)
1032 {
1033 fprintf (outfile, ",");
1034 print_generic_expr (outfile, vro->op1, 0);
1035 }
1036 if (vro->op2)
1037 {
1038 fprintf (outfile, ",");
1039 print_generic_expr (outfile, vro->op2, 0);
1040 }
1041 }
1042 if (closebrace)
1043 fprintf (outfile, ">");
1044 if (i != ref->operands.length () - 1)
1045 fprintf (outfile, ",");
1046 }
1047 fprintf (outfile, "}");
1048 if (ref->vuse)
1049 {
1050 fprintf (outfile, "@");
1051 print_generic_expr (outfile, ref->vuse, 0);
1052 }
1053 }
1054 break;
1055 }
1056 }
1057 void debug_pre_expr (pre_expr);
1058
1059 /* Like print_pre_expr but always prints to stderr. */
1060 DEBUG_FUNCTION void
1061 debug_pre_expr (pre_expr e)
1062 {
1063 print_pre_expr (stderr, e);
1064 fprintf (stderr, "\n");
1065 }
1066
1067 /* Print out SET to OUTFILE. */
1068
1069 static void
1070 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1071 const char *setname, int blockindex)
1072 {
1073 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1074 if (set)
1075 {
1076 bool first = true;
1077 unsigned i;
1078 bitmap_iterator bi;
1079
1080 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1081 {
1082 const pre_expr expr = expression_for_id (i);
1083
1084 if (!first)
1085 fprintf (outfile, ", ");
1086 first = false;
1087 print_pre_expr (outfile, expr);
1088
1089 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1090 }
1091 }
1092 fprintf (outfile, " }\n");
1093 }
1094
1095 void debug_bitmap_set (bitmap_set_t);
1096
1097 DEBUG_FUNCTION void
1098 debug_bitmap_set (bitmap_set_t set)
1099 {
1100 print_bitmap_set (stderr, set, "debug", 0);
1101 }
1102
1103 void debug_bitmap_sets_for (basic_block);
1104
1105 DEBUG_FUNCTION void
1106 debug_bitmap_sets_for (basic_block bb)
1107 {
1108 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1109 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1110 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1111 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1112 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1113 if (do_partial_partial)
1114 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1115 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1116 }
1117
1118 /* Print out the expressions that have VAL to OUTFILE. */
1119
1120 static void
1121 print_value_expressions (FILE *outfile, unsigned int val)
1122 {
1123 bitmap set = value_expressions[val];
1124 if (set)
1125 {
1126 bitmap_set x;
1127 char s[10];
1128 sprintf (s, "%04d", val);
1129 x.expressions = *set;
1130 print_bitmap_set (outfile, &x, s, 0);
1131 }
1132 }
1133
1134
1135 DEBUG_FUNCTION void
1136 debug_value_expressions (unsigned int val)
1137 {
1138 print_value_expressions (stderr, val);
1139 }
1140
1141 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1142 represent it. */
1143
1144 static pre_expr
1145 get_or_alloc_expr_for_constant (tree constant)
1146 {
1147 unsigned int result_id;
1148 unsigned int value_id;
1149 struct pre_expr_d expr;
1150 pre_expr newexpr;
1151
1152 expr.kind = CONSTANT;
1153 PRE_EXPR_CONSTANT (&expr) = constant;
1154 result_id = lookup_expression_id (&expr);
1155 if (result_id != 0)
1156 return expression_for_id (result_id);
1157
1158 newexpr = pre_expr_pool.allocate ();
1159 newexpr->kind = CONSTANT;
1160 PRE_EXPR_CONSTANT (newexpr) = constant;
1161 alloc_expression_id (newexpr);
1162 value_id = get_or_alloc_constant_value_id (constant);
1163 add_to_value (value_id, newexpr);
1164 return newexpr;
1165 }
1166
1167 /* Given a value id V, find the actual tree representing the constant
1168 value if there is one, and return it. Return NULL if we can't find
1169 a constant. */
1170
1171 static tree
1172 get_constant_for_value_id (unsigned int v)
1173 {
1174 if (value_id_constant_p (v))
1175 {
1176 unsigned int i;
1177 bitmap_iterator bi;
1178 bitmap exprset = value_expressions[v];
1179
1180 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1181 {
1182 pre_expr expr = expression_for_id (i);
1183 if (expr->kind == CONSTANT)
1184 return PRE_EXPR_CONSTANT (expr);
1185 }
1186 }
1187 return NULL;
1188 }
1189
1190 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1191 Currently only supports constants and SSA_NAMES. */
1192 static pre_expr
1193 get_or_alloc_expr_for (tree t)
1194 {
1195 if (TREE_CODE (t) == SSA_NAME)
1196 return get_or_alloc_expr_for_name (t);
1197 else if (is_gimple_min_invariant (t))
1198 return get_or_alloc_expr_for_constant (t);
1199 else
1200 {
1201 /* More complex expressions can result from SCCVN expression
1202 simplification that inserts values for them. As they all
1203 do not have VOPs the get handled by the nary ops struct. */
1204 vn_nary_op_t result;
1205 unsigned int result_id;
1206 vn_nary_op_lookup (t, &result);
1207 if (result != NULL)
1208 {
1209 pre_expr e = pre_expr_pool.allocate ();
1210 e->kind = NARY;
1211 PRE_EXPR_NARY (e) = result;
1212 result_id = lookup_expression_id (e);
1213 if (result_id != 0)
1214 {
1215 pre_expr_pool.remove (e);
1216 e = expression_for_id (result_id);
1217 return e;
1218 }
1219 alloc_expression_id (e);
1220 return e;
1221 }
1222 }
1223 return NULL;
1224 }
1225
1226 /* Return the folded version of T if T, when folded, is a gimple
1227 min_invariant. Otherwise, return T. */
1228
1229 static pre_expr
1230 fully_constant_expression (pre_expr e)
1231 {
1232 switch (e->kind)
1233 {
1234 case CONSTANT:
1235 return e;
1236 case NARY:
1237 {
1238 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1239 switch (TREE_CODE_CLASS (nary->opcode))
1240 {
1241 case tcc_binary:
1242 case tcc_comparison:
1243 {
1244 /* We have to go from trees to pre exprs to value ids to
1245 constants. */
1246 tree naryop0 = nary->op[0];
1247 tree naryop1 = nary->op[1];
1248 tree result;
1249 if (!is_gimple_min_invariant (naryop0))
1250 {
1251 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1252 unsigned int vrep0 = get_expr_value_id (rep0);
1253 tree const0 = get_constant_for_value_id (vrep0);
1254 if (const0)
1255 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1256 }
1257 if (!is_gimple_min_invariant (naryop1))
1258 {
1259 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1260 unsigned int vrep1 = get_expr_value_id (rep1);
1261 tree const1 = get_constant_for_value_id (vrep1);
1262 if (const1)
1263 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1264 }
1265 result = fold_binary (nary->opcode, nary->type,
1266 naryop0, naryop1);
1267 if (result && is_gimple_min_invariant (result))
1268 return get_or_alloc_expr_for_constant (result);
1269 /* We might have simplified the expression to a
1270 SSA_NAME for example from x_1 * 1. But we cannot
1271 insert a PHI for x_1 unconditionally as x_1 might
1272 not be available readily. */
1273 return e;
1274 }
1275 case tcc_reference:
1276 if (nary->opcode != REALPART_EXPR
1277 && nary->opcode != IMAGPART_EXPR
1278 && nary->opcode != VIEW_CONVERT_EXPR)
1279 return e;
1280 /* Fallthrough. */
1281 case tcc_unary:
1282 {
1283 /* We have to go from trees to pre exprs to value ids to
1284 constants. */
1285 tree naryop0 = nary->op[0];
1286 tree const0, result;
1287 if (is_gimple_min_invariant (naryop0))
1288 const0 = naryop0;
1289 else
1290 {
1291 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1292 unsigned int vrep0 = get_expr_value_id (rep0);
1293 const0 = get_constant_for_value_id (vrep0);
1294 }
1295 result = NULL;
1296 if (const0)
1297 {
1298 tree type1 = TREE_TYPE (nary->op[0]);
1299 const0 = fold_convert (type1, const0);
1300 result = fold_unary (nary->opcode, nary->type, const0);
1301 }
1302 if (result && is_gimple_min_invariant (result))
1303 return get_or_alloc_expr_for_constant (result);
1304 return e;
1305 }
1306 default:
1307 return e;
1308 }
1309 }
1310 case REFERENCE:
1311 {
1312 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1313 tree folded;
1314 if ((folded = fully_constant_vn_reference_p (ref)))
1315 return get_or_alloc_expr_for_constant (folded);
1316 return e;
1317 }
1318 default:
1319 return e;
1320 }
1321 return e;
1322 }
1323
1324 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1325 it has the value it would have in BLOCK. Set *SAME_VALID to true
1326 in case the new vuse doesn't change the value id of the OPERANDS. */
1327
1328 static tree
1329 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1330 alias_set_type set, tree type, tree vuse,
1331 basic_block phiblock,
1332 basic_block block, bool *same_valid)
1333 {
1334 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1335 ao_ref ref;
1336 edge e = NULL;
1337 bool use_oracle;
1338
1339 *same_valid = true;
1340
1341 if (gimple_bb (phi) != phiblock)
1342 return vuse;
1343
1344 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1345
1346 /* Use the alias-oracle to find either the PHI node in this block,
1347 the first VUSE used in this block that is equivalent to vuse or
1348 the first VUSE which definition in this block kills the value. */
1349 if (gimple_code (phi) == GIMPLE_PHI)
1350 e = find_edge (block, phiblock);
1351 else if (use_oracle)
1352 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1353 {
1354 vuse = gimple_vuse (phi);
1355 phi = SSA_NAME_DEF_STMT (vuse);
1356 if (gimple_bb (phi) != phiblock)
1357 return vuse;
1358 if (gimple_code (phi) == GIMPLE_PHI)
1359 {
1360 e = find_edge (block, phiblock);
1361 break;
1362 }
1363 }
1364 else
1365 return NULL_TREE;
1366
1367 if (e)
1368 {
1369 if (use_oracle)
1370 {
1371 bitmap visited = NULL;
1372 unsigned int cnt;
1373 /* Try to find a vuse that dominates this phi node by skipping
1374 non-clobbering statements. */
1375 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1376 NULL, NULL);
1377 if (visited)
1378 BITMAP_FREE (visited);
1379 }
1380 else
1381 vuse = NULL_TREE;
1382 if (!vuse)
1383 {
1384 /* If we didn't find any, the value ID can't stay the same,
1385 but return the translated vuse. */
1386 *same_valid = false;
1387 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1388 }
1389 /* ??? We would like to return vuse here as this is the canonical
1390 upmost vdef that this reference is associated with. But during
1391 insertion of the references into the hash tables we only ever
1392 directly insert with their direct gimple_vuse, hence returning
1393 something else would make us not find the other expression. */
1394 return PHI_ARG_DEF (phi, e->dest_idx);
1395 }
1396
1397 return NULL_TREE;
1398 }
1399
1400 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1401 SET2. This is used to avoid making a set consisting of the union
1402 of PA_IN and ANTIC_IN during insert. */
1403
1404 static inline pre_expr
1405 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1406 {
1407 pre_expr result;
1408
1409 result = bitmap_find_leader (set1, val);
1410 if (!result && set2)
1411 result = bitmap_find_leader (set2, val);
1412 return result;
1413 }
1414
1415 /* Get the tree type for our PRE expression e. */
1416
1417 static tree
1418 get_expr_type (const pre_expr e)
1419 {
1420 switch (e->kind)
1421 {
1422 case NAME:
1423 return TREE_TYPE (PRE_EXPR_NAME (e));
1424 case CONSTANT:
1425 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1426 case REFERENCE:
1427 return PRE_EXPR_REFERENCE (e)->type;
1428 case NARY:
1429 return PRE_EXPR_NARY (e)->type;
1430 }
1431 gcc_unreachable ();
1432 }
1433
1434 /* Get a representative SSA_NAME for a given expression.
1435 Since all of our sub-expressions are treated as values, we require
1436 them to be SSA_NAME's for simplicity.
1437 Prior versions of GVNPRE used to use "value handles" here, so that
1438 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1439 either case, the operands are really values (IE we do not expect
1440 them to be usable without finding leaders). */
1441
1442 static tree
1443 get_representative_for (const pre_expr e)
1444 {
1445 tree name;
1446 unsigned int value_id = get_expr_value_id (e);
1447
1448 switch (e->kind)
1449 {
1450 case NAME:
1451 return PRE_EXPR_NAME (e);
1452 case CONSTANT:
1453 return PRE_EXPR_CONSTANT (e);
1454 case NARY:
1455 case REFERENCE:
1456 {
1457 /* Go through all of the expressions representing this value
1458 and pick out an SSA_NAME. */
1459 unsigned int i;
1460 bitmap_iterator bi;
1461 bitmap exprs = value_expressions[value_id];
1462 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1463 {
1464 pre_expr rep = expression_for_id (i);
1465 if (rep->kind == NAME)
1466 return PRE_EXPR_NAME (rep);
1467 else if (rep->kind == CONSTANT)
1468 return PRE_EXPR_CONSTANT (rep);
1469 }
1470 }
1471 break;
1472 }
1473
1474 /* If we reached here we couldn't find an SSA_NAME. This can
1475 happen when we've discovered a value that has never appeared in
1476 the program as set to an SSA_NAME, as the result of phi translation.
1477 Create one here.
1478 ??? We should be able to re-use this when we insert the statement
1479 to compute it. */
1480 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1481 VN_INFO_GET (name)->value_id = value_id;
1482 VN_INFO (name)->valnum = name;
1483 /* ??? For now mark this SSA name for release by SCCVN. */
1484 VN_INFO (name)->needs_insertion = true;
1485 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1486 if (dump_file && (dump_flags & TDF_DETAILS))
1487 {
1488 fprintf (dump_file, "Created SSA_NAME representative ");
1489 print_generic_expr (dump_file, name, 0);
1490 fprintf (dump_file, " for expression:");
1491 print_pre_expr (dump_file, e);
1492 fprintf (dump_file, " (%04d)\n", value_id);
1493 }
1494
1495 return name;
1496 }
1497
1498
1499
1500 static pre_expr
1501 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1502 basic_block pred, basic_block phiblock);
1503
1504 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1505 the phis in PRED. Return NULL if we can't find a leader for each part
1506 of the translated expression. */
1507
1508 static pre_expr
1509 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1510 basic_block pred, basic_block phiblock)
1511 {
1512 switch (expr->kind)
1513 {
1514 case NARY:
1515 {
1516 unsigned int i;
1517 bool changed = false;
1518 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1519 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1520 sizeof_vn_nary_op (nary->length));
1521 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1522
1523 for (i = 0; i < newnary->length; i++)
1524 {
1525 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1526 continue;
1527 else
1528 {
1529 pre_expr leader, result;
1530 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1531 leader = find_leader_in_sets (op_val_id, set1, set2);
1532 result = phi_translate (leader, set1, set2, pred, phiblock);
1533 if (result && result != leader)
1534 {
1535 tree name = get_representative_for (result);
1536 if (!name)
1537 return NULL;
1538 newnary->op[i] = name;
1539 }
1540 else if (!result)
1541 return NULL;
1542
1543 changed |= newnary->op[i] != nary->op[i];
1544 }
1545 }
1546 if (changed)
1547 {
1548 pre_expr constant;
1549 unsigned int new_val_id;
1550
1551 PRE_EXPR_NARY (expr) = newnary;
1552 constant = fully_constant_expression (expr);
1553 PRE_EXPR_NARY (expr) = nary;
1554 if (constant != expr)
1555 return constant;
1556
1557 tree result = vn_nary_op_lookup_pieces (newnary->length,
1558 newnary->opcode,
1559 newnary->type,
1560 &newnary->op[0],
1561 &nary);
1562 if (result && is_gimple_min_invariant (result))
1563 return get_or_alloc_expr_for_constant (result);
1564
1565 expr = pre_expr_pool.allocate ();
1566 expr->kind = NARY;
1567 expr->id = 0;
1568 if (nary)
1569 {
1570 PRE_EXPR_NARY (expr) = nary;
1571 new_val_id = nary->value_id;
1572 get_or_alloc_expression_id (expr);
1573 }
1574 else
1575 {
1576 new_val_id = get_next_value_id ();
1577 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1578 nary = vn_nary_op_insert_pieces (newnary->length,
1579 newnary->opcode,
1580 newnary->type,
1581 &newnary->op[0],
1582 result, new_val_id);
1583 PRE_EXPR_NARY (expr) = nary;
1584 get_or_alloc_expression_id (expr);
1585 }
1586 add_to_value (new_val_id, expr);
1587 }
1588 return expr;
1589 }
1590 break;
1591
1592 case REFERENCE:
1593 {
1594 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1595 vec<vn_reference_op_s> operands = ref->operands;
1596 tree vuse = ref->vuse;
1597 tree newvuse = vuse;
1598 vec<vn_reference_op_s> newoperands = vNULL;
1599 bool changed = false, same_valid = true;
1600 unsigned int i, n;
1601 vn_reference_op_t operand;
1602 vn_reference_t newref;
1603
1604 for (i = 0; operands.iterate (i, &operand); i++)
1605 {
1606 pre_expr opresult;
1607 pre_expr leader;
1608 tree op[3];
1609 tree type = operand->type;
1610 vn_reference_op_s newop = *operand;
1611 op[0] = operand->op0;
1612 op[1] = operand->op1;
1613 op[2] = operand->op2;
1614 for (n = 0; n < 3; ++n)
1615 {
1616 unsigned int op_val_id;
1617 if (!op[n])
1618 continue;
1619 if (TREE_CODE (op[n]) != SSA_NAME)
1620 {
1621 /* We can't possibly insert these. */
1622 if (n != 0
1623 && !is_gimple_min_invariant (op[n]))
1624 break;
1625 continue;
1626 }
1627 op_val_id = VN_INFO (op[n])->value_id;
1628 leader = find_leader_in_sets (op_val_id, set1, set2);
1629 if (!leader)
1630 break;
1631 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1632 if (!opresult)
1633 break;
1634 if (opresult != leader)
1635 {
1636 tree name = get_representative_for (opresult);
1637 if (!name)
1638 break;
1639 changed |= name != op[n];
1640 op[n] = name;
1641 }
1642 }
1643 if (n != 3)
1644 {
1645 newoperands.release ();
1646 return NULL;
1647 }
1648 if (!changed)
1649 continue;
1650 if (!newoperands.exists ())
1651 newoperands = operands.copy ();
1652 /* We may have changed from an SSA_NAME to a constant */
1653 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1654 newop.opcode = TREE_CODE (op[0]);
1655 newop.type = type;
1656 newop.op0 = op[0];
1657 newop.op1 = op[1];
1658 newop.op2 = op[2];
1659 newoperands[i] = newop;
1660 }
1661 gcc_checking_assert (i == operands.length ());
1662
1663 if (vuse)
1664 {
1665 newvuse = translate_vuse_through_block (newoperands.exists ()
1666 ? newoperands : operands,
1667 ref->set, ref->type,
1668 vuse, phiblock, pred,
1669 &same_valid);
1670 if (newvuse == NULL_TREE)
1671 {
1672 newoperands.release ();
1673 return NULL;
1674 }
1675 }
1676
1677 if (changed || newvuse != vuse)
1678 {
1679 unsigned int new_val_id;
1680 pre_expr constant;
1681
1682 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1683 ref->type,
1684 newoperands.exists ()
1685 ? newoperands : operands,
1686 &newref, VN_WALK);
1687 if (result)
1688 newoperands.release ();
1689
1690 /* We can always insert constants, so if we have a partial
1691 redundant constant load of another type try to translate it
1692 to a constant of appropriate type. */
1693 if (result && is_gimple_min_invariant (result))
1694 {
1695 tree tem = result;
1696 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1697 {
1698 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1699 if (tem && !is_gimple_min_invariant (tem))
1700 tem = NULL_TREE;
1701 }
1702 if (tem)
1703 return get_or_alloc_expr_for_constant (tem);
1704 }
1705
1706 /* If we'd have to convert things we would need to validate
1707 if we can insert the translated expression. So fail
1708 here for now - we cannot insert an alias with a different
1709 type in the VN tables either, as that would assert. */
1710 if (result
1711 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1712 return NULL;
1713 else if (!result && newref
1714 && !useless_type_conversion_p (ref->type, newref->type))
1715 {
1716 newoperands.release ();
1717 return NULL;
1718 }
1719
1720 expr = pre_expr_pool.allocate ();
1721 expr->kind = REFERENCE;
1722 expr->id = 0;
1723
1724 if (newref)
1725 {
1726 PRE_EXPR_REFERENCE (expr) = newref;
1727 constant = fully_constant_expression (expr);
1728 if (constant != expr)
1729 return constant;
1730
1731 new_val_id = newref->value_id;
1732 get_or_alloc_expression_id (expr);
1733 }
1734 else
1735 {
1736 if (changed || !same_valid)
1737 {
1738 new_val_id = get_next_value_id ();
1739 value_expressions.safe_grow_cleared
1740 (get_max_value_id () + 1);
1741 }
1742 else
1743 new_val_id = ref->value_id;
1744 if (!newoperands.exists ())
1745 newoperands = operands.copy ();
1746 newref = vn_reference_insert_pieces (newvuse, ref->set,
1747 ref->type,
1748 newoperands,
1749 result, new_val_id);
1750 newoperands = vNULL;
1751 PRE_EXPR_REFERENCE (expr) = newref;
1752 constant = fully_constant_expression (expr);
1753 if (constant != expr)
1754 return constant;
1755 get_or_alloc_expression_id (expr);
1756 }
1757 add_to_value (new_val_id, expr);
1758 }
1759 newoperands.release ();
1760 return expr;
1761 }
1762 break;
1763
1764 case NAME:
1765 {
1766 tree name = PRE_EXPR_NAME (expr);
1767 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1768 /* If the SSA name is defined by a PHI node in this block,
1769 translate it. */
1770 if (gimple_code (def_stmt) == GIMPLE_PHI
1771 && gimple_bb (def_stmt) == phiblock)
1772 {
1773 edge e = find_edge (pred, gimple_bb (def_stmt));
1774 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1775
1776 /* Handle constant. */
1777 if (is_gimple_min_invariant (def))
1778 return get_or_alloc_expr_for_constant (def);
1779
1780 return get_or_alloc_expr_for_name (def);
1781 }
1782 /* Otherwise return it unchanged - it will get removed if its
1783 value is not available in PREDs AVAIL_OUT set of expressions
1784 by the subtraction of TMP_GEN. */
1785 return expr;
1786 }
1787
1788 default:
1789 gcc_unreachable ();
1790 }
1791 }
1792
1793 /* Wrapper around phi_translate_1 providing caching functionality. */
1794
1795 static pre_expr
1796 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1797 basic_block pred, basic_block phiblock)
1798 {
1799 expr_pred_trans_t slot = NULL;
1800 pre_expr phitrans;
1801
1802 if (!expr)
1803 return NULL;
1804
1805 /* Constants contain no values that need translation. */
1806 if (expr->kind == CONSTANT)
1807 return expr;
1808
1809 if (value_id_constant_p (get_expr_value_id (expr)))
1810 return expr;
1811
1812 /* Don't add translations of NAMEs as those are cheap to translate. */
1813 if (expr->kind != NAME)
1814 {
1815 if (phi_trans_add (&slot, expr, pred))
1816 return slot->v;
1817 /* Store NULL for the value we want to return in the case of
1818 recursing. */
1819 slot->v = NULL;
1820 }
1821
1822 /* Translate. */
1823 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1824
1825 if (slot)
1826 {
1827 if (phitrans)
1828 slot->v = phitrans;
1829 else
1830 /* Remove failed translations again, they cause insert
1831 iteration to not pick up new opportunities reliably. */
1832 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1833 }
1834
1835 return phitrans;
1836 }
1837
1838
1839 /* For each expression in SET, translate the values through phi nodes
1840 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1841 expressions in DEST. */
1842
1843 static void
1844 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1845 basic_block phiblock)
1846 {
1847 vec<pre_expr> exprs;
1848 pre_expr expr;
1849 int i;
1850
1851 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1852 {
1853 bitmap_set_copy (dest, set);
1854 return;
1855 }
1856
1857 exprs = sorted_array_from_bitmap_set (set);
1858 FOR_EACH_VEC_ELT (exprs, i, expr)
1859 {
1860 pre_expr translated;
1861 translated = phi_translate (expr, set, NULL, pred, phiblock);
1862 if (!translated)
1863 continue;
1864
1865 /* We might end up with multiple expressions from SET being
1866 translated to the same value. In this case we do not want
1867 to retain the NARY or REFERENCE expression but prefer a NAME
1868 which would be the leader. */
1869 if (translated->kind == NAME)
1870 bitmap_value_replace_in_set (dest, translated);
1871 else
1872 bitmap_value_insert_into_set (dest, translated);
1873 }
1874 exprs.release ();
1875 }
1876
1877 /* Find the leader for a value (i.e., the name representing that
1878 value) in a given set, and return it. Return NULL if no leader
1879 is found. */
1880
1881 static pre_expr
1882 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1883 {
1884 if (value_id_constant_p (val))
1885 {
1886 unsigned int i;
1887 bitmap_iterator bi;
1888 bitmap exprset = value_expressions[val];
1889
1890 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1891 {
1892 pre_expr expr = expression_for_id (i);
1893 if (expr->kind == CONSTANT)
1894 return expr;
1895 }
1896 }
1897 if (bitmap_set_contains_value (set, val))
1898 {
1899 /* Rather than walk the entire bitmap of expressions, and see
1900 whether any of them has the value we are looking for, we look
1901 at the reverse mapping, which tells us the set of expressions
1902 that have a given value (IE value->expressions with that
1903 value) and see if any of those expressions are in our set.
1904 The number of expressions per value is usually significantly
1905 less than the number of expressions in the set. In fact, for
1906 large testcases, doing it this way is roughly 5-10x faster
1907 than walking the bitmap.
1908 If this is somehow a significant lose for some cases, we can
1909 choose which set to walk based on which set is smaller. */
1910 unsigned int i;
1911 bitmap_iterator bi;
1912 bitmap exprset = value_expressions[val];
1913
1914 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1915 return expression_for_id (i);
1916 }
1917 return NULL;
1918 }
1919
1920 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1921 BLOCK by seeing if it is not killed in the block. Note that we are
1922 only determining whether there is a store that kills it. Because
1923 of the order in which clean iterates over values, we are guaranteed
1924 that altered operands will have caused us to be eliminated from the
1925 ANTIC_IN set already. */
1926
1927 static bool
1928 value_dies_in_block_x (pre_expr expr, basic_block block)
1929 {
1930 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1931 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1932 gimple *def;
1933 gimple_stmt_iterator gsi;
1934 unsigned id = get_expression_id (expr);
1935 bool res = false;
1936 ao_ref ref;
1937
1938 if (!vuse)
1939 return false;
1940
1941 /* Lookup a previously calculated result. */
1942 if (EXPR_DIES (block)
1943 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1944 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1945
1946 /* A memory expression {e, VUSE} dies in the block if there is a
1947 statement that may clobber e. If, starting statement walk from the
1948 top of the basic block, a statement uses VUSE there can be no kill
1949 inbetween that use and the original statement that loaded {e, VUSE},
1950 so we can stop walking. */
1951 ref.base = NULL_TREE;
1952 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1953 {
1954 tree def_vuse, def_vdef;
1955 def = gsi_stmt (gsi);
1956 def_vuse = gimple_vuse (def);
1957 def_vdef = gimple_vdef (def);
1958
1959 /* Not a memory statement. */
1960 if (!def_vuse)
1961 continue;
1962
1963 /* Not a may-def. */
1964 if (!def_vdef)
1965 {
1966 /* A load with the same VUSE, we're done. */
1967 if (def_vuse == vuse)
1968 break;
1969
1970 continue;
1971 }
1972
1973 /* Init ref only if we really need it. */
1974 if (ref.base == NULL_TREE
1975 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1976 refx->operands))
1977 {
1978 res = true;
1979 break;
1980 }
1981 /* If the statement may clobber expr, it dies. */
1982 if (stmt_may_clobber_ref_p_1 (def, &ref))
1983 {
1984 res = true;
1985 break;
1986 }
1987 }
1988
1989 /* Remember the result. */
1990 if (!EXPR_DIES (block))
1991 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1992 bitmap_set_bit (EXPR_DIES (block), id * 2);
1993 if (res)
1994 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1995
1996 return res;
1997 }
1998
1999
2000 /* Determine if OP is valid in SET1 U SET2, which it is when the union
2001 contains its value-id. */
2002
2003 static bool
2004 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
2005 {
2006 if (op && TREE_CODE (op) == SSA_NAME)
2007 {
2008 unsigned int value_id = VN_INFO (op)->value_id;
2009 if (!(bitmap_set_contains_value (set1, value_id)
2010 || (set2 && bitmap_set_contains_value (set2, value_id))))
2011 return false;
2012 }
2013 return true;
2014 }
2015
2016 /* Determine if the expression EXPR is valid in SET1 U SET2.
2017 ONLY SET2 CAN BE NULL.
2018 This means that we have a leader for each part of the expression
2019 (if it consists of values), or the expression is an SSA_NAME.
2020 For loads/calls, we also see if the vuse is killed in this block. */
2021
2022 static bool
2023 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
2024 {
2025 switch (expr->kind)
2026 {
2027 case NAME:
2028 /* By construction all NAMEs are available. Non-available
2029 NAMEs are removed by subtracting TMP_GEN from the sets. */
2030 return true;
2031 case NARY:
2032 {
2033 unsigned int i;
2034 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2035 for (i = 0; i < nary->length; i++)
2036 if (!op_valid_in_sets (set1, set2, nary->op[i]))
2037 return false;
2038 return true;
2039 }
2040 break;
2041 case REFERENCE:
2042 {
2043 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2044 vn_reference_op_t vro;
2045 unsigned int i;
2046
2047 FOR_EACH_VEC_ELT (ref->operands, i, vro)
2048 {
2049 if (!op_valid_in_sets (set1, set2, vro->op0)
2050 || !op_valid_in_sets (set1, set2, vro->op1)
2051 || !op_valid_in_sets (set1, set2, vro->op2))
2052 return false;
2053 }
2054 return true;
2055 }
2056 default:
2057 gcc_unreachable ();
2058 }
2059 }
2060
2061 /* Clean the set of expressions that are no longer valid in SET1 or
2062 SET2. This means expressions that are made up of values we have no
2063 leaders for in SET1 or SET2. This version is used for partial
2064 anticipation, which means it is not valid in either ANTIC_IN or
2065 PA_IN. */
2066
2067 static void
2068 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
2069 {
2070 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2071 pre_expr expr;
2072 int i;
2073
2074 FOR_EACH_VEC_ELT (exprs, i, expr)
2075 {
2076 if (!valid_in_sets (set1, set2, expr))
2077 bitmap_remove_from_set (set1, expr);
2078 }
2079 exprs.release ();
2080 }
2081
2082 /* Clean the set of expressions that are no longer valid in SET. This
2083 means expressions that are made up of values we have no leaders for
2084 in SET. */
2085
2086 static void
2087 clean (bitmap_set_t set)
2088 {
2089 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2090 pre_expr expr;
2091 int i;
2092
2093 FOR_EACH_VEC_ELT (exprs, i, expr)
2094 {
2095 if (!valid_in_sets (set, NULL, expr))
2096 bitmap_remove_from_set (set, expr);
2097 }
2098 exprs.release ();
2099 }
2100
2101 /* Clean the set of expressions that are no longer valid in SET because
2102 they are clobbered in BLOCK or because they trap and may not be executed. */
2103
2104 static void
2105 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2106 {
2107 bitmap_iterator bi;
2108 unsigned i;
2109
2110 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2111 {
2112 pre_expr expr = expression_for_id (i);
2113 if (expr->kind == REFERENCE)
2114 {
2115 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2116 if (ref->vuse)
2117 {
2118 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2119 if (!gimple_nop_p (def_stmt)
2120 && ((gimple_bb (def_stmt) != block
2121 && !dominated_by_p (CDI_DOMINATORS,
2122 block, gimple_bb (def_stmt)))
2123 || (gimple_bb (def_stmt) == block
2124 && value_dies_in_block_x (expr, block))))
2125 bitmap_remove_from_set (set, expr);
2126 }
2127 }
2128 else if (expr->kind == NARY)
2129 {
2130 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2131 /* If the NARY may trap make sure the block does not contain
2132 a possible exit point.
2133 ??? This is overly conservative if we translate AVAIL_OUT
2134 as the available expression might be after the exit point. */
2135 if (BB_MAY_NOTRETURN (block)
2136 && vn_nary_may_trap (nary))
2137 bitmap_remove_from_set (set, expr);
2138 }
2139 }
2140 }
2141
2142 static sbitmap has_abnormal_preds;
2143
2144 /* Compute the ANTIC set for BLOCK.
2145
2146 If succs(BLOCK) > 1 then
2147 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2148 else if succs(BLOCK) == 1 then
2149 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2150
2151 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2152 */
2153
2154 static bool
2155 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2156 {
2157 bool changed = false;
2158 bitmap_set_t S, old, ANTIC_OUT;
2159 bitmap_iterator bi;
2160 unsigned int bii;
2161 edge e;
2162 edge_iterator ei;
2163 bool was_visited = BB_VISITED (block);
2164
2165 old = ANTIC_OUT = S = NULL;
2166 BB_VISITED (block) = 1;
2167
2168 /* If any edges from predecessors are abnormal, antic_in is empty,
2169 so do nothing. */
2170 if (block_has_abnormal_pred_edge)
2171 goto maybe_dump_sets;
2172
2173 old = ANTIC_IN (block);
2174 ANTIC_OUT = bitmap_set_new ();
2175
2176 /* If the block has no successors, ANTIC_OUT is empty. */
2177 if (EDGE_COUNT (block->succs) == 0)
2178 ;
2179 /* If we have one successor, we could have some phi nodes to
2180 translate through. */
2181 else if (single_succ_p (block))
2182 {
2183 basic_block succ_bb = single_succ (block);
2184 gcc_assert (BB_VISITED (succ_bb));
2185 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2186 }
2187 /* If we have multiple successors, we take the intersection of all of
2188 them. Note that in the case of loop exit phi nodes, we may have
2189 phis to translate through. */
2190 else
2191 {
2192 size_t i;
2193 basic_block bprime, first = NULL;
2194
2195 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2196 FOR_EACH_EDGE (e, ei, block->succs)
2197 {
2198 if (!first
2199 && BB_VISITED (e->dest))
2200 first = e->dest;
2201 else if (BB_VISITED (e->dest))
2202 worklist.quick_push (e->dest);
2203 else
2204 {
2205 /* Unvisited successors get their ANTIC_IN replaced by the
2206 maximal set to arrive at a maximum ANTIC_IN solution.
2207 We can ignore them in the intersection operation and thus
2208 need not explicitely represent that maximum solution. */
2209 if (dump_file && (dump_flags & TDF_DETAILS))
2210 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2211 e->src->index, e->dest->index);
2212 }
2213 }
2214
2215 /* Of multiple successors we have to have visited one already
2216 which is guaranteed by iteration order. */
2217 gcc_assert (first != NULL);
2218
2219 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2220
2221 FOR_EACH_VEC_ELT (worklist, i, bprime)
2222 {
2223 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2224 {
2225 bitmap_set_t tmp = bitmap_set_new ();
2226 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2227 bitmap_set_and (ANTIC_OUT, tmp);
2228 bitmap_set_free (tmp);
2229 }
2230 else
2231 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2232 }
2233 }
2234
2235 /* Prune expressions that are clobbered in block and thus become
2236 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2237 prune_clobbered_mems (ANTIC_OUT, block);
2238
2239 /* Generate ANTIC_OUT - TMP_GEN. */
2240 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2241
2242 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2243 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2244 TMP_GEN (block));
2245
2246 /* Then union in the ANTIC_OUT - TMP_GEN values,
2247 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2248 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2249 bitmap_value_insert_into_set (ANTIC_IN (block),
2250 expression_for_id (bii));
2251
2252 clean (ANTIC_IN (block));
2253
2254 if (!was_visited || !bitmap_set_equal (old, ANTIC_IN (block)))
2255 changed = true;
2256
2257 maybe_dump_sets:
2258 if (dump_file && (dump_flags & TDF_DETAILS))
2259 {
2260 if (ANTIC_OUT)
2261 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2262
2263 if (changed)
2264 fprintf (dump_file, "[changed] ");
2265 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2266 block->index);
2267
2268 if (S)
2269 print_bitmap_set (dump_file, S, "S", block->index);
2270 }
2271 if (old)
2272 bitmap_set_free (old);
2273 if (S)
2274 bitmap_set_free (S);
2275 if (ANTIC_OUT)
2276 bitmap_set_free (ANTIC_OUT);
2277 return changed;
2278 }
2279
2280 /* Compute PARTIAL_ANTIC for BLOCK.
2281
2282 If succs(BLOCK) > 1 then
2283 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2284 in ANTIC_OUT for all succ(BLOCK)
2285 else if succs(BLOCK) == 1 then
2286 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2287
2288 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2289 - ANTIC_IN[BLOCK])
2290
2291 */
2292 static void
2293 compute_partial_antic_aux (basic_block block,
2294 bool block_has_abnormal_pred_edge)
2295 {
2296 bitmap_set_t old_PA_IN;
2297 bitmap_set_t PA_OUT;
2298 edge e;
2299 edge_iterator ei;
2300 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2301
2302 old_PA_IN = PA_OUT = NULL;
2303
2304 /* If any edges from predecessors are abnormal, antic_in is empty,
2305 so do nothing. */
2306 if (block_has_abnormal_pred_edge)
2307 goto maybe_dump_sets;
2308
2309 /* If there are too many partially anticipatable values in the
2310 block, phi_translate_set can take an exponential time: stop
2311 before the translation starts. */
2312 if (max_pa
2313 && single_succ_p (block)
2314 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2315 goto maybe_dump_sets;
2316
2317 old_PA_IN = PA_IN (block);
2318 PA_OUT = bitmap_set_new ();
2319
2320 /* If the block has no successors, ANTIC_OUT is empty. */
2321 if (EDGE_COUNT (block->succs) == 0)
2322 ;
2323 /* If we have one successor, we could have some phi nodes to
2324 translate through. Note that we can't phi translate across DFS
2325 back edges in partial antic, because it uses a union operation on
2326 the successors. For recurrences like IV's, we will end up
2327 generating a new value in the set on each go around (i + 3 (VH.1)
2328 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2329 else if (single_succ_p (block))
2330 {
2331 basic_block succ = single_succ (block);
2332 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2333 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2334 }
2335 /* If we have multiple successors, we take the union of all of
2336 them. */
2337 else
2338 {
2339 size_t i;
2340 basic_block bprime;
2341
2342 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2343 FOR_EACH_EDGE (e, ei, block->succs)
2344 {
2345 if (e->flags & EDGE_DFS_BACK)
2346 continue;
2347 worklist.quick_push (e->dest);
2348 }
2349 if (worklist.length () > 0)
2350 {
2351 FOR_EACH_VEC_ELT (worklist, i, bprime)
2352 {
2353 unsigned int i;
2354 bitmap_iterator bi;
2355
2356 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2357 bitmap_value_insert_into_set (PA_OUT,
2358 expression_for_id (i));
2359 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2360 {
2361 bitmap_set_t pa_in = bitmap_set_new ();
2362 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2363 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2364 bitmap_value_insert_into_set (PA_OUT,
2365 expression_for_id (i));
2366 bitmap_set_free (pa_in);
2367 }
2368 else
2369 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2370 bitmap_value_insert_into_set (PA_OUT,
2371 expression_for_id (i));
2372 }
2373 }
2374 }
2375
2376 /* Prune expressions that are clobbered in block and thus become
2377 invalid if translated from PA_OUT to PA_IN. */
2378 prune_clobbered_mems (PA_OUT, block);
2379
2380 /* PA_IN starts with PA_OUT - TMP_GEN.
2381 Then we subtract things from ANTIC_IN. */
2382 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2383
2384 /* For partial antic, we want to put back in the phi results, since
2385 we will properly avoid making them partially antic over backedges. */
2386 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2387 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2388
2389 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2390 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2391
2392 dependent_clean (PA_IN (block), ANTIC_IN (block));
2393
2394 maybe_dump_sets:
2395 if (dump_file && (dump_flags & TDF_DETAILS))
2396 {
2397 if (PA_OUT)
2398 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2399
2400 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2401 }
2402 if (old_PA_IN)
2403 bitmap_set_free (old_PA_IN);
2404 if (PA_OUT)
2405 bitmap_set_free (PA_OUT);
2406 }
2407
2408 /* Compute ANTIC and partial ANTIC sets. */
2409
2410 static void
2411 compute_antic (void)
2412 {
2413 bool changed = true;
2414 int num_iterations = 0;
2415 basic_block block;
2416 int i;
2417 edge_iterator ei;
2418 edge e;
2419
2420 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2421 We pre-build the map of blocks with incoming abnormal edges here. */
2422 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2423 bitmap_clear (has_abnormal_preds);
2424
2425 FOR_ALL_BB_FN (block, cfun)
2426 {
2427 BB_VISITED (block) = 0;
2428
2429 FOR_EACH_EDGE (e, ei, block->preds)
2430 if (e->flags & EDGE_ABNORMAL)
2431 {
2432 bitmap_set_bit (has_abnormal_preds, block->index);
2433
2434 /* We also anticipate nothing. */
2435 BB_VISITED (block) = 1;
2436 break;
2437 }
2438
2439 /* While we are here, give empty ANTIC_IN sets to each block. */
2440 ANTIC_IN (block) = bitmap_set_new ();
2441 if (do_partial_partial)
2442 PA_IN (block) = bitmap_set_new ();
2443 }
2444
2445 /* At the exit block we anticipate nothing. */
2446 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2447
2448 /* For ANTIC computation we need a postorder that also guarantees that
2449 a block with a single successor is visited after its successor.
2450 RPO on the inverted CFG has this property. */
2451 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
2452 int postorder_num = inverted_post_order_compute (postorder);
2453
2454 sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
2455 bitmap_ones (worklist);
2456 while (changed)
2457 {
2458 if (dump_file && (dump_flags & TDF_DETAILS))
2459 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2460 /* ??? We need to clear our PHI translation cache here as the
2461 ANTIC sets shrink and we restrict valid translations to
2462 those having operands with leaders in ANTIC. Same below
2463 for PA ANTIC computation. */
2464 num_iterations++;
2465 changed = false;
2466 for (i = postorder_num - 1; i >= 0; i--)
2467 {
2468 if (bitmap_bit_p (worklist, postorder[i]))
2469 {
2470 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2471 bitmap_clear_bit (worklist, block->index);
2472 if (compute_antic_aux (block,
2473 bitmap_bit_p (has_abnormal_preds,
2474 block->index)))
2475 {
2476 FOR_EACH_EDGE (e, ei, block->preds)
2477 bitmap_set_bit (worklist, e->src->index);
2478 changed = true;
2479 }
2480 }
2481 }
2482 /* Theoretically possible, but *highly* unlikely. */
2483 gcc_checking_assert (num_iterations < 500);
2484 }
2485
2486 statistics_histogram_event (cfun, "compute_antic iterations",
2487 num_iterations);
2488
2489 if (do_partial_partial)
2490 {
2491 /* For partial antic we ignore backedges and thus we do not need
2492 to perform any iteration when we process blocks in postorder. */
2493 postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
2494 for (i = postorder_num - 1 ; i >= 0; i--)
2495 {
2496 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2497 compute_partial_antic_aux (block,
2498 bitmap_bit_p (has_abnormal_preds,
2499 block->index));
2500 }
2501 }
2502
2503 sbitmap_free (has_abnormal_preds);
2504 sbitmap_free (worklist);
2505 free (postorder);
2506 }
2507
2508
2509 /* Inserted expressions are placed onto this worklist, which is used
2510 for performing quick dead code elimination of insertions we made
2511 that didn't turn out to be necessary. */
2512 static bitmap inserted_exprs;
2513
2514 /* The actual worker for create_component_ref_by_pieces. */
2515
2516 static tree
2517 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2518 unsigned int *operand, gimple_seq *stmts)
2519 {
2520 vn_reference_op_t currop = &ref->operands[*operand];
2521 tree genop;
2522 ++*operand;
2523 switch (currop->opcode)
2524 {
2525 case CALL_EXPR:
2526 gcc_unreachable ();
2527
2528 case MEM_REF:
2529 {
2530 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2531 stmts);
2532 if (!baseop)
2533 return NULL_TREE;
2534 tree offset = currop->op0;
2535 if (TREE_CODE (baseop) == ADDR_EXPR
2536 && handled_component_p (TREE_OPERAND (baseop, 0)))
2537 {
2538 HOST_WIDE_INT off;
2539 tree base;
2540 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2541 &off);
2542 gcc_assert (base);
2543 offset = int_const_binop (PLUS_EXPR, offset,
2544 build_int_cst (TREE_TYPE (offset),
2545 off));
2546 baseop = build_fold_addr_expr (base);
2547 }
2548 genop = build2 (MEM_REF, currop->type, baseop, offset);
2549 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2550 MR_DEPENDENCE_BASE (genop) = currop->base;
2551 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2552 return genop;
2553 }
2554
2555 case TARGET_MEM_REF:
2556 {
2557 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2558 vn_reference_op_t nextop = &ref->operands[++*operand];
2559 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2560 stmts);
2561 if (!baseop)
2562 return NULL_TREE;
2563 if (currop->op0)
2564 {
2565 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2566 if (!genop0)
2567 return NULL_TREE;
2568 }
2569 if (nextop->op0)
2570 {
2571 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2572 if (!genop1)
2573 return NULL_TREE;
2574 }
2575 genop = build5 (TARGET_MEM_REF, currop->type,
2576 baseop, currop->op2, genop0, currop->op1, genop1);
2577
2578 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2579 MR_DEPENDENCE_BASE (genop) = currop->base;
2580 return genop;
2581 }
2582
2583 case ADDR_EXPR:
2584 if (currop->op0)
2585 {
2586 gcc_assert (is_gimple_min_invariant (currop->op0));
2587 return currop->op0;
2588 }
2589 /* Fallthrough. */
2590 case REALPART_EXPR:
2591 case IMAGPART_EXPR:
2592 case VIEW_CONVERT_EXPR:
2593 {
2594 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2595 stmts);
2596 if (!genop0)
2597 return NULL_TREE;
2598 return fold_build1 (currop->opcode, currop->type, genop0);
2599 }
2600
2601 case WITH_SIZE_EXPR:
2602 {
2603 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2604 stmts);
2605 if (!genop0)
2606 return NULL_TREE;
2607 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2608 if (!genop1)
2609 return NULL_TREE;
2610 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2611 }
2612
2613 case BIT_FIELD_REF:
2614 {
2615 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2616 stmts);
2617 if (!genop0)
2618 return NULL_TREE;
2619 tree op1 = currop->op0;
2620 tree op2 = currop->op1;
2621 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2622 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2623 return fold (t);
2624 }
2625
2626 /* For array ref vn_reference_op's, operand 1 of the array ref
2627 is op0 of the reference op and operand 3 of the array ref is
2628 op1. */
2629 case ARRAY_RANGE_REF:
2630 case ARRAY_REF:
2631 {
2632 tree genop0;
2633 tree genop1 = currop->op0;
2634 tree genop2 = currop->op1;
2635 tree genop3 = currop->op2;
2636 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2637 stmts);
2638 if (!genop0)
2639 return NULL_TREE;
2640 genop1 = find_or_generate_expression (block, genop1, stmts);
2641 if (!genop1)
2642 return NULL_TREE;
2643 if (genop2)
2644 {
2645 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2646 /* Drop zero minimum index if redundant. */
2647 if (integer_zerop (genop2)
2648 && (!domain_type
2649 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2650 genop2 = NULL_TREE;
2651 else
2652 {
2653 genop2 = find_or_generate_expression (block, genop2, stmts);
2654 if (!genop2)
2655 return NULL_TREE;
2656 }
2657 }
2658 if (genop3)
2659 {
2660 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2661 /* We can't always put a size in units of the element alignment
2662 here as the element alignment may be not visible. See
2663 PR43783. Simply drop the element size for constant
2664 sizes. */
2665 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2666 genop3 = NULL_TREE;
2667 else
2668 {
2669 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2670 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2671 genop3 = find_or_generate_expression (block, genop3, stmts);
2672 if (!genop3)
2673 return NULL_TREE;
2674 }
2675 }
2676 return build4 (currop->opcode, currop->type, genop0, genop1,
2677 genop2, genop3);
2678 }
2679 case COMPONENT_REF:
2680 {
2681 tree op0;
2682 tree op1;
2683 tree genop2 = currop->op1;
2684 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2685 if (!op0)
2686 return NULL_TREE;
2687 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2688 op1 = currop->op0;
2689 if (genop2)
2690 {
2691 genop2 = find_or_generate_expression (block, genop2, stmts);
2692 if (!genop2)
2693 return NULL_TREE;
2694 }
2695 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2696 }
2697
2698 case SSA_NAME:
2699 {
2700 genop = find_or_generate_expression (block, currop->op0, stmts);
2701 return genop;
2702 }
2703 case STRING_CST:
2704 case INTEGER_CST:
2705 case COMPLEX_CST:
2706 case VECTOR_CST:
2707 case REAL_CST:
2708 case CONSTRUCTOR:
2709 case VAR_DECL:
2710 case PARM_DECL:
2711 case CONST_DECL:
2712 case RESULT_DECL:
2713 case FUNCTION_DECL:
2714 return currop->op0;
2715
2716 default:
2717 gcc_unreachable ();
2718 }
2719 }
2720
2721 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2722 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2723 trying to rename aggregates into ssa form directly, which is a no no.
2724
2725 Thus, this routine doesn't create temporaries, it just builds a
2726 single access expression for the array, calling
2727 find_or_generate_expression to build the innermost pieces.
2728
2729 This function is a subroutine of create_expression_by_pieces, and
2730 should not be called on it's own unless you really know what you
2731 are doing. */
2732
2733 static tree
2734 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2735 gimple_seq *stmts)
2736 {
2737 unsigned int op = 0;
2738 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2739 }
2740
2741 /* Find a simple leader for an expression, or generate one using
2742 create_expression_by_pieces from a NARY expression for the value.
2743 BLOCK is the basic_block we are looking for leaders in.
2744 OP is the tree expression to find a leader for or generate.
2745 Returns the leader or NULL_TREE on failure. */
2746
2747 static tree
2748 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2749 {
2750 pre_expr expr = get_or_alloc_expr_for (op);
2751 unsigned int lookfor = get_expr_value_id (expr);
2752 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2753 if (leader)
2754 {
2755 if (leader->kind == NAME)
2756 return PRE_EXPR_NAME (leader);
2757 else if (leader->kind == CONSTANT)
2758 return PRE_EXPR_CONSTANT (leader);
2759
2760 /* Defer. */
2761 return NULL_TREE;
2762 }
2763
2764 /* It must be a complex expression, so generate it recursively. Note
2765 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2766 where the insert algorithm fails to insert a required expression. */
2767 bitmap exprset = value_expressions[lookfor];
2768 bitmap_iterator bi;
2769 unsigned int i;
2770 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2771 {
2772 pre_expr temp = expression_for_id (i);
2773 /* We cannot insert random REFERENCE expressions at arbitrary
2774 places. We can insert NARYs which eventually re-materializes
2775 its operand values. */
2776 if (temp->kind == NARY)
2777 return create_expression_by_pieces (block, temp, stmts,
2778 get_expr_type (expr));
2779 }
2780
2781 /* Defer. */
2782 return NULL_TREE;
2783 }
2784
2785 #define NECESSARY GF_PLF_1
2786
2787 /* Create an expression in pieces, so that we can handle very complex
2788 expressions that may be ANTIC, but not necessary GIMPLE.
2789 BLOCK is the basic block the expression will be inserted into,
2790 EXPR is the expression to insert (in value form)
2791 STMTS is a statement list to append the necessary insertions into.
2792
2793 This function will die if we hit some value that shouldn't be
2794 ANTIC but is (IE there is no leader for it, or its components).
2795 The function returns NULL_TREE in case a different antic expression
2796 has to be inserted first.
2797 This function may also generate expressions that are themselves
2798 partially or fully redundant. Those that are will be either made
2799 fully redundant during the next iteration of insert (for partially
2800 redundant ones), or eliminated by eliminate (for fully redundant
2801 ones). */
2802
2803 static tree
2804 create_expression_by_pieces (basic_block block, pre_expr expr,
2805 gimple_seq *stmts, tree type)
2806 {
2807 tree name;
2808 tree folded;
2809 gimple_seq forced_stmts = NULL;
2810 unsigned int value_id;
2811 gimple_stmt_iterator gsi;
2812 tree exprtype = type ? type : get_expr_type (expr);
2813 pre_expr nameexpr;
2814 gassign *newstmt;
2815
2816 switch (expr->kind)
2817 {
2818 /* We may hit the NAME/CONSTANT case if we have to convert types
2819 that value numbering saw through. */
2820 case NAME:
2821 folded = PRE_EXPR_NAME (expr);
2822 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2823 return folded;
2824 break;
2825 case CONSTANT:
2826 {
2827 folded = PRE_EXPR_CONSTANT (expr);
2828 tree tem = fold_convert (exprtype, folded);
2829 if (is_gimple_min_invariant (tem))
2830 return tem;
2831 break;
2832 }
2833 case REFERENCE:
2834 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2835 {
2836 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2837 unsigned int operand = 1;
2838 vn_reference_op_t currop = &ref->operands[0];
2839 tree sc = NULL_TREE;
2840 tree fn;
2841 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2842 fn = currop->op0;
2843 else
2844 fn = find_or_generate_expression (block, currop->op0, stmts);
2845 if (!fn)
2846 return NULL_TREE;
2847 if (currop->op1)
2848 {
2849 sc = find_or_generate_expression (block, currop->op1, stmts);
2850 if (!sc)
2851 return NULL_TREE;
2852 }
2853 auto_vec<tree> args (ref->operands.length () - 1);
2854 while (operand < ref->operands.length ())
2855 {
2856 tree arg = create_component_ref_by_pieces_1 (block, ref,
2857 &operand, stmts);
2858 if (!arg)
2859 return NULL_TREE;
2860 args.quick_push (arg);
2861 }
2862 gcall *call
2863 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2864 ? build_fold_addr_expr (fn) : fn), args);
2865 gimple_call_set_with_bounds (call, currop->with_bounds);
2866 if (sc)
2867 gimple_call_set_chain (call, sc);
2868 tree forcedname = make_ssa_name (currop->type);
2869 gimple_call_set_lhs (call, forcedname);
2870 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2871 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2872 folded = forcedname;
2873 }
2874 else
2875 {
2876 folded = create_component_ref_by_pieces (block,
2877 PRE_EXPR_REFERENCE (expr),
2878 stmts);
2879 if (!folded)
2880 return NULL_TREE;
2881 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2882 newstmt = gimple_build_assign (name, folded);
2883 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2884 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2885 folded = name;
2886 }
2887 break;
2888 case NARY:
2889 {
2890 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2891 tree *genop = XALLOCAVEC (tree, nary->length);
2892 unsigned i;
2893 for (i = 0; i < nary->length; ++i)
2894 {
2895 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2896 if (!genop[i])
2897 return NULL_TREE;
2898 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2899 may have conversions stripped. */
2900 if (nary->opcode == POINTER_PLUS_EXPR)
2901 {
2902 if (i == 0)
2903 genop[i] = gimple_convert (&forced_stmts,
2904 nary->type, genop[i]);
2905 else if (i == 1)
2906 genop[i] = gimple_convert (&forced_stmts,
2907 sizetype, genop[i]);
2908 }
2909 else
2910 genop[i] = gimple_convert (&forced_stmts,
2911 TREE_TYPE (nary->op[i]), genop[i]);
2912 }
2913 if (nary->opcode == CONSTRUCTOR)
2914 {
2915 vec<constructor_elt, va_gc> *elts = NULL;
2916 for (i = 0; i < nary->length; ++i)
2917 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2918 folded = build_constructor (nary->type, elts);
2919 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2920 newstmt = gimple_build_assign (name, folded);
2921 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2922 folded = name;
2923 }
2924 else
2925 {
2926 switch (nary->length)
2927 {
2928 case 1:
2929 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2930 genop[0]);
2931 break;
2932 case 2:
2933 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2934 genop[0], genop[1]);
2935 break;
2936 case 3:
2937 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2938 genop[0], genop[1], genop[2]);
2939 break;
2940 default:
2941 gcc_unreachable ();
2942 }
2943 }
2944 }
2945 break;
2946 default:
2947 gcc_unreachable ();
2948 }
2949
2950 folded = gimple_convert (&forced_stmts, exprtype, folded);
2951
2952 /* If there is nothing to insert, return the simplified result. */
2953 if (gimple_seq_empty_p (forced_stmts))
2954 return folded;
2955 /* If we simplified to a constant return it and discard eventually
2956 built stmts. */
2957 if (is_gimple_min_invariant (folded))
2958 {
2959 gimple_seq_discard (forced_stmts);
2960 return folded;
2961 }
2962
2963 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2964
2965 /* If we have any intermediate expressions to the value sets, add them
2966 to the value sets and chain them in the instruction stream. */
2967 if (forced_stmts)
2968 {
2969 gsi = gsi_start (forced_stmts);
2970 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2971 {
2972 gimple *stmt = gsi_stmt (gsi);
2973 tree forcedname = gimple_get_lhs (stmt);
2974 pre_expr nameexpr;
2975
2976 if (forcedname != folded)
2977 {
2978 VN_INFO_GET (forcedname)->valnum = forcedname;
2979 VN_INFO (forcedname)->value_id = get_next_value_id ();
2980 nameexpr = get_or_alloc_expr_for_name (forcedname);
2981 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2982 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2983 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2984 }
2985
2986 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2987 gimple_set_plf (stmt, NECESSARY, false);
2988 }
2989 gimple_seq_add_seq (stmts, forced_stmts);
2990 }
2991
2992 name = folded;
2993
2994 /* Fold the last statement. */
2995 gsi = gsi_last (*stmts);
2996 if (fold_stmt_inplace (&gsi))
2997 update_stmt (gsi_stmt (gsi));
2998
2999 /* Add a value number to the temporary.
3000 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3001 we are creating the expression by pieces, and this particular piece of
3002 the expression may have been represented. There is no harm in replacing
3003 here. */
3004 value_id = get_expr_value_id (expr);
3005 VN_INFO_GET (name)->value_id = value_id;
3006 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
3007 if (VN_INFO (name)->valnum == NULL_TREE)
3008 VN_INFO (name)->valnum = name;
3009 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
3010 nameexpr = get_or_alloc_expr_for_name (name);
3011 add_to_value (value_id, nameexpr);
3012 if (NEW_SETS (block))
3013 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3014 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3015
3016 pre_stats.insertions++;
3017 if (dump_file && (dump_flags & TDF_DETAILS))
3018 {
3019 fprintf (dump_file, "Inserted ");
3020 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0, 0);
3021 fprintf (dump_file, " in predecessor %d (%04d)\n",
3022 block->index, value_id);
3023 }
3024
3025 return name;
3026 }
3027
3028
3029 /* Insert the to-be-made-available values of expression EXPRNUM for each
3030 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3031 merge the result with a phi node, given the same value number as
3032 NODE. Return true if we have inserted new stuff. */
3033
3034 static bool
3035 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3036 vec<pre_expr> avail)
3037 {
3038 pre_expr expr = expression_for_id (exprnum);
3039 pre_expr newphi;
3040 unsigned int val = get_expr_value_id (expr);
3041 edge pred;
3042 bool insertions = false;
3043 bool nophi = false;
3044 basic_block bprime;
3045 pre_expr eprime;
3046 edge_iterator ei;
3047 tree type = get_expr_type (expr);
3048 tree temp;
3049 gphi *phi;
3050
3051 /* Make sure we aren't creating an induction variable. */
3052 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3053 {
3054 bool firstinsideloop = false;
3055 bool secondinsideloop = false;
3056 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3057 EDGE_PRED (block, 0)->src);
3058 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3059 EDGE_PRED (block, 1)->src);
3060 /* Induction variables only have one edge inside the loop. */
3061 if ((firstinsideloop ^ secondinsideloop)
3062 && expr->kind != REFERENCE)
3063 {
3064 if (dump_file && (dump_flags & TDF_DETAILS))
3065 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3066 nophi = true;
3067 }
3068 }
3069
3070 /* Make the necessary insertions. */
3071 FOR_EACH_EDGE (pred, ei, block->preds)
3072 {
3073 gimple_seq stmts = NULL;
3074 tree builtexpr;
3075 bprime = pred->src;
3076 eprime = avail[pred->dest_idx];
3077 builtexpr = create_expression_by_pieces (bprime, eprime,
3078 &stmts, type);
3079 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3080 if (!gimple_seq_empty_p (stmts))
3081 {
3082 gsi_insert_seq_on_edge (pred, stmts);
3083 insertions = true;
3084 }
3085 if (!builtexpr)
3086 {
3087 /* We cannot insert a PHI node if we failed to insert
3088 on one edge. */
3089 nophi = true;
3090 continue;
3091 }
3092 if (is_gimple_min_invariant (builtexpr))
3093 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3094 else
3095 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3096 }
3097 /* If we didn't want a phi node, and we made insertions, we still have
3098 inserted new stuff, and thus return true. If we didn't want a phi node,
3099 and didn't make insertions, we haven't added anything new, so return
3100 false. */
3101 if (nophi && insertions)
3102 return true;
3103 else if (nophi && !insertions)
3104 return false;
3105
3106 /* Now build a phi for the new variable. */
3107 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3108 phi = create_phi_node (temp, block);
3109
3110 gimple_set_plf (phi, NECESSARY, false);
3111 VN_INFO_GET (temp)->value_id = val;
3112 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3113 if (VN_INFO (temp)->valnum == NULL_TREE)
3114 VN_INFO (temp)->valnum = temp;
3115 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3116 FOR_EACH_EDGE (pred, ei, block->preds)
3117 {
3118 pre_expr ae = avail[pred->dest_idx];
3119 gcc_assert (get_expr_type (ae) == type
3120 || useless_type_conversion_p (type, get_expr_type (ae)));
3121 if (ae->kind == CONSTANT)
3122 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3123 pred, UNKNOWN_LOCATION);
3124 else
3125 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3126 }
3127
3128 newphi = get_or_alloc_expr_for_name (temp);
3129 add_to_value (val, newphi);
3130
3131 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3132 this insertion, since we test for the existence of this value in PHI_GEN
3133 before proceeding with the partial redundancy checks in insert_aux.
3134
3135 The value may exist in AVAIL_OUT, in particular, it could be represented
3136 by the expression we are trying to eliminate, in which case we want the
3137 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3138 inserted there.
3139
3140 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3141 this block, because if it did, it would have existed in our dominator's
3142 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3143 */
3144
3145 bitmap_insert_into_set (PHI_GEN (block), newphi);
3146 bitmap_value_replace_in_set (AVAIL_OUT (block),
3147 newphi);
3148 bitmap_insert_into_set (NEW_SETS (block),
3149 newphi);
3150
3151 /* If we insert a PHI node for a conversion of another PHI node
3152 in the same basic-block try to preserve range information.
3153 This is important so that followup loop passes receive optimal
3154 number of iteration analysis results. See PR61743. */
3155 if (expr->kind == NARY
3156 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3157 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3158 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3159 && INTEGRAL_TYPE_P (type)
3160 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3161 && (TYPE_PRECISION (type)
3162 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3163 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3164 {
3165 wide_int min, max;
3166 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3167 && !wi::neg_p (min, SIGNED)
3168 && !wi::neg_p (max, SIGNED))
3169 /* Just handle extension and sign-changes of all-positive ranges. */
3170 set_range_info (temp,
3171 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3172 wide_int_storage::from (min, TYPE_PRECISION (type),
3173 TYPE_SIGN (type)),
3174 wide_int_storage::from (max, TYPE_PRECISION (type),
3175 TYPE_SIGN (type)));
3176 }
3177
3178 if (dump_file && (dump_flags & TDF_DETAILS))
3179 {
3180 fprintf (dump_file, "Created phi ");
3181 print_gimple_stmt (dump_file, phi, 0, 0);
3182 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3183 }
3184 pre_stats.phis++;
3185 return true;
3186 }
3187
3188
3189
3190 /* Perform insertion of partially redundant or hoistable values.
3191 For BLOCK, do the following:
3192 1. Propagate the NEW_SETS of the dominator into the current block.
3193 If the block has multiple predecessors,
3194 2a. Iterate over the ANTIC expressions for the block to see if
3195 any of them are partially redundant.
3196 2b. If so, insert them into the necessary predecessors to make
3197 the expression fully redundant.
3198 2c. Insert a new PHI merging the values of the predecessors.
3199 2d. Insert the new PHI, and the new expressions, into the
3200 NEW_SETS set.
3201 If the block has multiple successors,
3202 3a. Iterate over the ANTIC values for the block to see if
3203 any of them are good candidates for hoisting.
3204 3b. If so, insert expressions computing the values in BLOCK,
3205 and add the new expressions into the NEW_SETS set.
3206 4. Recursively call ourselves on the dominator children of BLOCK.
3207
3208 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3209 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3210 done in do_hoist_insertion.
3211 */
3212
3213 static bool
3214 do_pre_regular_insertion (basic_block block, basic_block dom)
3215 {
3216 bool new_stuff = false;
3217 vec<pre_expr> exprs;
3218 pre_expr expr;
3219 auto_vec<pre_expr> avail;
3220 int i;
3221
3222 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3223 avail.safe_grow (EDGE_COUNT (block->preds));
3224
3225 FOR_EACH_VEC_ELT (exprs, i, expr)
3226 {
3227 if (expr->kind == NARY
3228 || expr->kind == REFERENCE)
3229 {
3230 unsigned int val;
3231 bool by_some = false;
3232 bool cant_insert = false;
3233 bool all_same = true;
3234 pre_expr first_s = NULL;
3235 edge pred;
3236 basic_block bprime;
3237 pre_expr eprime = NULL;
3238 edge_iterator ei;
3239 pre_expr edoubleprime = NULL;
3240 bool do_insertion = false;
3241
3242 val = get_expr_value_id (expr);
3243 if (bitmap_set_contains_value (PHI_GEN (block), val))
3244 continue;
3245 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3246 {
3247 if (dump_file && (dump_flags & TDF_DETAILS))
3248 {
3249 fprintf (dump_file, "Found fully redundant value: ");
3250 print_pre_expr (dump_file, expr);
3251 fprintf (dump_file, "\n");
3252 }
3253 continue;
3254 }
3255
3256 FOR_EACH_EDGE (pred, ei, block->preds)
3257 {
3258 unsigned int vprime;
3259
3260 /* We should never run insertion for the exit block
3261 and so not come across fake pred edges. */
3262 gcc_assert (!(pred->flags & EDGE_FAKE));
3263 bprime = pred->src;
3264 /* We are looking at ANTIC_OUT of bprime. */
3265 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3266 bprime, block);
3267
3268 /* eprime will generally only be NULL if the
3269 value of the expression, translated
3270 through the PHI for this predecessor, is
3271 undefined. If that is the case, we can't
3272 make the expression fully redundant,
3273 because its value is undefined along a
3274 predecessor path. We can thus break out
3275 early because it doesn't matter what the
3276 rest of the results are. */
3277 if (eprime == NULL)
3278 {
3279 avail[pred->dest_idx] = NULL;
3280 cant_insert = true;
3281 break;
3282 }
3283
3284 eprime = fully_constant_expression (eprime);
3285 vprime = get_expr_value_id (eprime);
3286 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3287 vprime);
3288 if (edoubleprime == NULL)
3289 {
3290 avail[pred->dest_idx] = eprime;
3291 all_same = false;
3292 }
3293 else
3294 {
3295 avail[pred->dest_idx] = edoubleprime;
3296 by_some = true;
3297 /* We want to perform insertions to remove a redundancy on
3298 a path in the CFG we want to optimize for speed. */
3299 if (optimize_edge_for_speed_p (pred))
3300 do_insertion = true;
3301 if (first_s == NULL)
3302 first_s = edoubleprime;
3303 else if (!pre_expr_d::equal (first_s, edoubleprime))
3304 all_same = false;
3305 }
3306 }
3307 /* If we can insert it, it's not the same value
3308 already existing along every predecessor, and
3309 it's defined by some predecessor, it is
3310 partially redundant. */
3311 if (!cant_insert && !all_same && by_some)
3312 {
3313 if (!do_insertion)
3314 {
3315 if (dump_file && (dump_flags & TDF_DETAILS))
3316 {
3317 fprintf (dump_file, "Skipping partial redundancy for "
3318 "expression ");
3319 print_pre_expr (dump_file, expr);
3320 fprintf (dump_file, " (%04d), no redundancy on to be "
3321 "optimized for speed edge\n", val);
3322 }
3323 }
3324 else if (dbg_cnt (treepre_insert))
3325 {
3326 if (dump_file && (dump_flags & TDF_DETAILS))
3327 {
3328 fprintf (dump_file, "Found partial redundancy for "
3329 "expression ");
3330 print_pre_expr (dump_file, expr);
3331 fprintf (dump_file, " (%04d)\n",
3332 get_expr_value_id (expr));
3333 }
3334 if (insert_into_preds_of_block (block,
3335 get_expression_id (expr),
3336 avail))
3337 new_stuff = true;
3338 }
3339 }
3340 /* If all edges produce the same value and that value is
3341 an invariant, then the PHI has the same value on all
3342 edges. Note this. */
3343 else if (!cant_insert && all_same)
3344 {
3345 gcc_assert (edoubleprime->kind == CONSTANT
3346 || edoubleprime->kind == NAME);
3347
3348 tree temp = make_temp_ssa_name (get_expr_type (expr),
3349 NULL, "pretmp");
3350 gassign *assign
3351 = gimple_build_assign (temp,
3352 edoubleprime->kind == CONSTANT ?
3353 PRE_EXPR_CONSTANT (edoubleprime) :
3354 PRE_EXPR_NAME (edoubleprime));
3355 gimple_stmt_iterator gsi = gsi_after_labels (block);
3356 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3357
3358 gimple_set_plf (assign, NECESSARY, false);
3359 VN_INFO_GET (temp)->value_id = val;
3360 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3361 if (VN_INFO (temp)->valnum == NULL_TREE)
3362 VN_INFO (temp)->valnum = temp;
3363 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3364 pre_expr newe = get_or_alloc_expr_for_name (temp);
3365 add_to_value (val, newe);
3366 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3367 bitmap_insert_into_set (NEW_SETS (block), newe);
3368 }
3369 }
3370 }
3371
3372 exprs.release ();
3373 return new_stuff;
3374 }
3375
3376
3377 /* Perform insertion for partially anticipatable expressions. There
3378 is only one case we will perform insertion for these. This case is
3379 if the expression is partially anticipatable, and fully available.
3380 In this case, we know that putting it earlier will enable us to
3381 remove the later computation. */
3382
3383 static bool
3384 do_pre_partial_partial_insertion (basic_block block, basic_block dom)
3385 {
3386 bool new_stuff = false;
3387 vec<pre_expr> exprs;
3388 pre_expr expr;
3389 auto_vec<pre_expr> avail;
3390 int i;
3391
3392 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3393 avail.safe_grow (EDGE_COUNT (block->preds));
3394
3395 FOR_EACH_VEC_ELT (exprs, i, expr)
3396 {
3397 if (expr->kind == NARY
3398 || expr->kind == REFERENCE)
3399 {
3400 unsigned int val;
3401 bool by_all = true;
3402 bool cant_insert = false;
3403 edge pred;
3404 basic_block bprime;
3405 pre_expr eprime = NULL;
3406 edge_iterator ei;
3407
3408 val = get_expr_value_id (expr);
3409 if (bitmap_set_contains_value (PHI_GEN (block), val))
3410 continue;
3411 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3412 continue;
3413
3414 FOR_EACH_EDGE (pred, ei, block->preds)
3415 {
3416 unsigned int vprime;
3417 pre_expr edoubleprime;
3418
3419 /* We should never run insertion for the exit block
3420 and so not come across fake pred edges. */
3421 gcc_assert (!(pred->flags & EDGE_FAKE));
3422 bprime = pred->src;
3423 eprime = phi_translate (expr, ANTIC_IN (block),
3424 PA_IN (block),
3425 bprime, block);
3426
3427 /* eprime will generally only be NULL if the
3428 value of the expression, translated
3429 through the PHI for this predecessor, is
3430 undefined. If that is the case, we can't
3431 make the expression fully redundant,
3432 because its value is undefined along a
3433 predecessor path. We can thus break out
3434 early because it doesn't matter what the
3435 rest of the results are. */
3436 if (eprime == NULL)
3437 {
3438 avail[pred->dest_idx] = NULL;
3439 cant_insert = true;
3440 break;
3441 }
3442
3443 eprime = fully_constant_expression (eprime);
3444 vprime = get_expr_value_id (eprime);
3445 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3446 avail[pred->dest_idx] = edoubleprime;
3447 if (edoubleprime == NULL)
3448 {
3449 by_all = false;
3450 break;
3451 }
3452 }
3453
3454 /* If we can insert it, it's not the same value
3455 already existing along every predecessor, and
3456 it's defined by some predecessor, it is
3457 partially redundant. */
3458 if (!cant_insert && by_all)
3459 {
3460 edge succ;
3461 bool do_insertion = false;
3462
3463 /* Insert only if we can remove a later expression on a path
3464 that we want to optimize for speed.
3465 The phi node that we will be inserting in BLOCK is not free,
3466 and inserting it for the sake of !optimize_for_speed successor
3467 may cause regressions on the speed path. */
3468 FOR_EACH_EDGE (succ, ei, block->succs)
3469 {
3470 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3471 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3472 {
3473 if (optimize_edge_for_speed_p (succ))
3474 do_insertion = true;
3475 }
3476 }
3477
3478 if (!do_insertion)
3479 {
3480 if (dump_file && (dump_flags & TDF_DETAILS))
3481 {
3482 fprintf (dump_file, "Skipping partial partial redundancy "
3483 "for expression ");
3484 print_pre_expr (dump_file, expr);
3485 fprintf (dump_file, " (%04d), not (partially) anticipated "
3486 "on any to be optimized for speed edges\n", val);
3487 }
3488 }
3489 else if (dbg_cnt (treepre_insert))
3490 {
3491 pre_stats.pa_insert++;
3492 if (dump_file && (dump_flags & TDF_DETAILS))
3493 {
3494 fprintf (dump_file, "Found partial partial redundancy "
3495 "for expression ");
3496 print_pre_expr (dump_file, expr);
3497 fprintf (dump_file, " (%04d)\n",
3498 get_expr_value_id (expr));
3499 }
3500 if (insert_into_preds_of_block (block,
3501 get_expression_id (expr),
3502 avail))
3503 new_stuff = true;
3504 }
3505 }
3506 }
3507 }
3508
3509 exprs.release ();
3510 return new_stuff;
3511 }
3512
3513 /* Insert expressions in BLOCK to compute hoistable values up.
3514 Return TRUE if something was inserted, otherwise return FALSE.
3515 The caller has to make sure that BLOCK has at least two successors. */
3516
3517 static bool
3518 do_hoist_insertion (basic_block block)
3519 {
3520 edge e;
3521 edge_iterator ei;
3522 bool new_stuff = false;
3523 unsigned i;
3524 gimple_stmt_iterator last;
3525
3526 /* At least two successors, or else... */
3527 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3528
3529 /* Check that all successors of BLOCK are dominated by block.
3530 We could use dominated_by_p() for this, but actually there is a much
3531 quicker check: any successor that is dominated by BLOCK can't have
3532 more than one predecessor edge. */
3533 FOR_EACH_EDGE (e, ei, block->succs)
3534 if (! single_pred_p (e->dest))
3535 return false;
3536
3537 /* Determine the insertion point. If we cannot safely insert before
3538 the last stmt if we'd have to, bail out. */
3539 last = gsi_last_bb (block);
3540 if (!gsi_end_p (last)
3541 && !is_ctrl_stmt (gsi_stmt (last))
3542 && stmt_ends_bb_p (gsi_stmt (last)))
3543 return false;
3544
3545 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3546 hoistable values. */
3547 bitmap_set hoistable_set;
3548
3549 /* A hoistable value must be in ANTIC_IN(block)
3550 but not in AVAIL_OUT(BLOCK). */
3551 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3552 bitmap_and_compl (&hoistable_set.values,
3553 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3554
3555 /* Short-cut for a common case: hoistable_set is empty. */
3556 if (bitmap_empty_p (&hoistable_set.values))
3557 return false;
3558
3559 /* Compute which of the hoistable values is in AVAIL_OUT of
3560 at least one of the successors of BLOCK. */
3561 bitmap_head availout_in_some;
3562 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3563 FOR_EACH_EDGE (e, ei, block->succs)
3564 /* Do not consider expressions solely because their availability
3565 on loop exits. They'd be ANTIC-IN throughout the whole loop
3566 and thus effectively hoisted across loops by combination of
3567 PRE and hoisting. */
3568 if (! loop_exit_edge_p (block->loop_father, e))
3569 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3570 &AVAIL_OUT (e->dest)->values);
3571 bitmap_clear (&hoistable_set.values);
3572
3573 /* Short-cut for a common case: availout_in_some is empty. */
3574 if (bitmap_empty_p (&availout_in_some))
3575 return false;
3576
3577 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3578 hoistable_set.values = availout_in_some;
3579 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3580
3581 /* Now finally construct the topological-ordered expression set. */
3582 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3583
3584 bitmap_clear (&hoistable_set.values);
3585
3586 /* If there are candidate values for hoisting, insert expressions
3587 strategically to make the hoistable expressions fully redundant. */
3588 pre_expr expr;
3589 FOR_EACH_VEC_ELT (exprs, i, expr)
3590 {
3591 /* While we try to sort expressions topologically above the
3592 sorting doesn't work out perfectly. Catch expressions we
3593 already inserted. */
3594 unsigned int value_id = get_expr_value_id (expr);
3595 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3596 {
3597 if (dump_file && (dump_flags & TDF_DETAILS))
3598 {
3599 fprintf (dump_file,
3600 "Already inserted expression for ");
3601 print_pre_expr (dump_file, expr);
3602 fprintf (dump_file, " (%04d)\n", value_id);
3603 }
3604 continue;
3605 }
3606
3607 /* OK, we should hoist this value. Perform the transformation. */
3608 pre_stats.hoist_insert++;
3609 if (dump_file && (dump_flags & TDF_DETAILS))
3610 {
3611 fprintf (dump_file,
3612 "Inserting expression in block %d for code hoisting: ",
3613 block->index);
3614 print_pre_expr (dump_file, expr);
3615 fprintf (dump_file, " (%04d)\n", value_id);
3616 }
3617
3618 gimple_seq stmts = NULL;
3619 tree res = create_expression_by_pieces (block, expr, &stmts,
3620 get_expr_type (expr));
3621 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3622 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3623 else
3624 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3625
3626 /* Make sure to not return true if expression creation ultimately
3627 failed but also make sure to insert any stmts produced as they
3628 are tracked in inserted_exprs. */
3629 if (! res)
3630 continue;
3631
3632 new_stuff = true;
3633 }
3634
3635 exprs.release ();
3636
3637 return new_stuff;
3638 }
3639
3640 /* Do a dominator walk on the control flow graph, and insert computations
3641 of values as necessary for PRE and hoisting. */
3642
3643 static bool
3644 insert_aux (basic_block block, bool do_pre, bool do_hoist)
3645 {
3646 basic_block son;
3647 bool new_stuff = false;
3648
3649 if (block)
3650 {
3651 basic_block dom;
3652 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3653 if (dom)
3654 {
3655 unsigned i;
3656 bitmap_iterator bi;
3657 bitmap_set_t newset;
3658
3659 /* First, update the AVAIL_OUT set with anything we may have
3660 inserted higher up in the dominator tree. */
3661 newset = NEW_SETS (dom);
3662 if (newset)
3663 {
3664 /* Note that we need to value_replace both NEW_SETS, and
3665 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3666 represented by some non-simple expression here that we want
3667 to replace it with. */
3668 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3669 {
3670 pre_expr expr = expression_for_id (i);
3671 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3672 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3673 }
3674 }
3675
3676 /* Insert expressions for partial redundancies. */
3677 if (do_pre && !single_pred_p (block))
3678 {
3679 new_stuff |= do_pre_regular_insertion (block, dom);
3680 if (do_partial_partial)
3681 new_stuff |= do_pre_partial_partial_insertion (block, dom);
3682 }
3683
3684 /* Insert expressions for hoisting. */
3685 if (do_hoist && EDGE_COUNT (block->succs) >= 2)
3686 new_stuff |= do_hoist_insertion (block);
3687 }
3688 }
3689 for (son = first_dom_son (CDI_DOMINATORS, block);
3690 son;
3691 son = next_dom_son (CDI_DOMINATORS, son))
3692 {
3693 new_stuff |= insert_aux (son, do_pre, do_hoist);
3694 }
3695
3696 return new_stuff;
3697 }
3698
3699 /* Perform insertion of partially redundant and hoistable values. */
3700
3701 static void
3702 insert (void)
3703 {
3704 bool new_stuff = true;
3705 basic_block bb;
3706 int num_iterations = 0;
3707
3708 FOR_ALL_BB_FN (bb, cfun)
3709 NEW_SETS (bb) = bitmap_set_new ();
3710
3711 while (new_stuff)
3712 {
3713 num_iterations++;
3714 if (dump_file && dump_flags & TDF_DETAILS)
3715 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3716 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun), flag_tree_pre,
3717 flag_code_hoisting);
3718
3719 /* Clear the NEW sets before the next iteration. We have already
3720 fully propagated its contents. */
3721 if (new_stuff)
3722 FOR_ALL_BB_FN (bb, cfun)
3723 bitmap_set_free (NEW_SETS (bb));
3724 }
3725 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3726 }
3727
3728
3729 /* Compute the AVAIL set for all basic blocks.
3730
3731 This function performs value numbering of the statements in each basic
3732 block. The AVAIL sets are built from information we glean while doing
3733 this value numbering, since the AVAIL sets contain only one entry per
3734 value.
3735
3736 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3737 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3738
3739 static void
3740 compute_avail (void)
3741 {
3742
3743 basic_block block, son;
3744 basic_block *worklist;
3745 size_t sp = 0;
3746 unsigned i;
3747
3748 /* We pretend that default definitions are defined in the entry block.
3749 This includes function arguments and the static chain decl. */
3750 for (i = 1; i < num_ssa_names; ++i)
3751 {
3752 tree name = ssa_name (i);
3753 pre_expr e;
3754 if (!name
3755 || !SSA_NAME_IS_DEFAULT_DEF (name)
3756 || has_zero_uses (name)
3757 || virtual_operand_p (name))
3758 continue;
3759
3760 e = get_or_alloc_expr_for_name (name);
3761 add_to_value (get_expr_value_id (e), e);
3762 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3763 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3764 e);
3765 }
3766
3767 if (dump_file && (dump_flags & TDF_DETAILS))
3768 {
3769 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3770 "tmp_gen", ENTRY_BLOCK);
3771 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3772 "avail_out", ENTRY_BLOCK);
3773 }
3774
3775 /* Allocate the worklist. */
3776 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3777
3778 /* Seed the algorithm by putting the dominator children of the entry
3779 block on the worklist. */
3780 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3781 son;
3782 son = next_dom_son (CDI_DOMINATORS, son))
3783 worklist[sp++] = son;
3784
3785 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3786 = ssa_default_def (cfun, gimple_vop (cfun));
3787
3788 /* Loop until the worklist is empty. */
3789 while (sp)
3790 {
3791 gimple *stmt;
3792 basic_block dom;
3793
3794 /* Pick a block from the worklist. */
3795 block = worklist[--sp];
3796
3797 /* Initially, the set of available values in BLOCK is that of
3798 its immediate dominator. */
3799 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3800 if (dom)
3801 {
3802 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3803 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3804 }
3805
3806 /* Generate values for PHI nodes. */
3807 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3808 gsi_next (&gsi))
3809 {
3810 tree result = gimple_phi_result (gsi.phi ());
3811
3812 /* We have no need for virtual phis, as they don't represent
3813 actual computations. */
3814 if (virtual_operand_p (result))
3815 {
3816 BB_LIVE_VOP_ON_EXIT (block) = result;
3817 continue;
3818 }
3819
3820 pre_expr e = get_or_alloc_expr_for_name (result);
3821 add_to_value (get_expr_value_id (e), e);
3822 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3823 bitmap_insert_into_set (PHI_GEN (block), e);
3824 }
3825
3826 BB_MAY_NOTRETURN (block) = 0;
3827
3828 /* Now compute value numbers and populate value sets with all
3829 the expressions computed in BLOCK. */
3830 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3831 gsi_next (&gsi))
3832 {
3833 ssa_op_iter iter;
3834 tree op;
3835
3836 stmt = gsi_stmt (gsi);
3837
3838 /* Cache whether the basic-block has any non-visible side-effect
3839 or control flow.
3840 If this isn't a call or it is the last stmt in the
3841 basic-block then the CFG represents things correctly. */
3842 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3843 {
3844 /* Non-looping const functions always return normally.
3845 Otherwise the call might not return or have side-effects
3846 that forbids hoisting possibly trapping expressions
3847 before it. */
3848 int flags = gimple_call_flags (stmt);
3849 if (!(flags & ECF_CONST)
3850 || (flags & ECF_LOOPING_CONST_OR_PURE))
3851 BB_MAY_NOTRETURN (block) = 1;
3852 }
3853
3854 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3855 {
3856 pre_expr e = get_or_alloc_expr_for_name (op);
3857
3858 add_to_value (get_expr_value_id (e), e);
3859 bitmap_insert_into_set (TMP_GEN (block), e);
3860 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3861 }
3862
3863 if (gimple_vdef (stmt))
3864 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3865
3866 if (gimple_has_side_effects (stmt)
3867 || stmt_could_throw_p (stmt)
3868 || is_gimple_debug (stmt))
3869 continue;
3870
3871 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3872 {
3873 if (ssa_undefined_value_p (op))
3874 continue;
3875 pre_expr e = get_or_alloc_expr_for_name (op);
3876 bitmap_value_insert_into_set (EXP_GEN (block), e);
3877 }
3878
3879 switch (gimple_code (stmt))
3880 {
3881 case GIMPLE_RETURN:
3882 continue;
3883
3884 case GIMPLE_CALL:
3885 {
3886 vn_reference_t ref;
3887 vn_reference_s ref1;
3888 pre_expr result = NULL;
3889
3890 /* We can value number only calls to real functions. */
3891 if (gimple_call_internal_p (stmt))
3892 continue;
3893
3894 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3895 if (!ref)
3896 continue;
3897
3898 /* If the value of the call is not invalidated in
3899 this block until it is computed, add the expression
3900 to EXP_GEN. */
3901 if (!gimple_vuse (stmt)
3902 || gimple_code
3903 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3904 || gimple_bb (SSA_NAME_DEF_STMT
3905 (gimple_vuse (stmt))) != block)
3906 {
3907 result = pre_expr_pool.allocate ();
3908 result->kind = REFERENCE;
3909 result->id = 0;
3910 PRE_EXPR_REFERENCE (result) = ref;
3911
3912 get_or_alloc_expression_id (result);
3913 add_to_value (get_expr_value_id (result), result);
3914 bitmap_value_insert_into_set (EXP_GEN (block), result);
3915 }
3916 continue;
3917 }
3918
3919 case GIMPLE_ASSIGN:
3920 {
3921 pre_expr result = NULL;
3922 switch (vn_get_stmt_kind (stmt))
3923 {
3924 case VN_NARY:
3925 {
3926 enum tree_code code = gimple_assign_rhs_code (stmt);
3927 vn_nary_op_t nary;
3928
3929 /* COND_EXPR and VEC_COND_EXPR are awkward in
3930 that they contain an embedded complex expression.
3931 Don't even try to shove those through PRE. */
3932 if (code == COND_EXPR
3933 || code == VEC_COND_EXPR)
3934 continue;
3935
3936 vn_nary_op_lookup_stmt (stmt, &nary);
3937 if (!nary)
3938 continue;
3939
3940 /* If the NARY traps and there was a preceding
3941 point in the block that might not return avoid
3942 adding the nary to EXP_GEN. */
3943 if (BB_MAY_NOTRETURN (block)
3944 && vn_nary_may_trap (nary))
3945 continue;
3946
3947 result = pre_expr_pool.allocate ();
3948 result->kind = NARY;
3949 result->id = 0;
3950 PRE_EXPR_NARY (result) = nary;
3951 break;
3952 }
3953
3954 case VN_REFERENCE:
3955 {
3956 tree rhs1 = gimple_assign_rhs1 (stmt);
3957 alias_set_type set = get_alias_set (rhs1);
3958 vec<vn_reference_op_s> operands
3959 = vn_reference_operands_for_lookup (rhs1);
3960 vn_reference_t ref;
3961 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
3962 TREE_TYPE (rhs1),
3963 operands, &ref, VN_WALK);
3964 if (!ref)
3965 {
3966 operands.release ();
3967 continue;
3968 }
3969
3970 /* If the value of the reference is not invalidated in
3971 this block until it is computed, add the expression
3972 to EXP_GEN. */
3973 if (gimple_vuse (stmt))
3974 {
3975 gimple *def_stmt;
3976 bool ok = true;
3977 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3978 while (!gimple_nop_p (def_stmt)
3979 && gimple_code (def_stmt) != GIMPLE_PHI
3980 && gimple_bb (def_stmt) == block)
3981 {
3982 if (stmt_may_clobber_ref_p
3983 (def_stmt, gimple_assign_rhs1 (stmt)))
3984 {
3985 ok = false;
3986 break;
3987 }
3988 def_stmt
3989 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3990 }
3991 if (!ok)
3992 {
3993 operands.release ();
3994 continue;
3995 }
3996 }
3997
3998 /* If the load was value-numbered to another
3999 load make sure we do not use its expression
4000 for insertion if it wouldn't be a valid
4001 replacement. */
4002 /* At the momemt we have a testcase
4003 for hoist insertion of aligned vs. misaligned
4004 variants in gcc.dg/torture/pr65270-1.c thus
4005 with just alignment to be considered we can
4006 simply replace the expression in the hashtable
4007 with the most conservative one. */
4008 vn_reference_op_t ref1 = &ref->operands.last ();
4009 while (ref1->opcode != TARGET_MEM_REF
4010 && ref1->opcode != MEM_REF
4011 && ref1 != &ref->operands[0])
4012 --ref1;
4013 vn_reference_op_t ref2 = &operands.last ();
4014 while (ref2->opcode != TARGET_MEM_REF
4015 && ref2->opcode != MEM_REF
4016 && ref2 != &operands[0])
4017 --ref2;
4018 if ((ref1->opcode == TARGET_MEM_REF
4019 || ref1->opcode == MEM_REF)
4020 && (TYPE_ALIGN (ref1->type)
4021 > TYPE_ALIGN (ref2->type)))
4022 ref1->type
4023 = build_aligned_type (ref1->type,
4024 TYPE_ALIGN (ref2->type));
4025 /* TBAA behavior is an obvious part so make sure
4026 that the hashtable one covers this as well
4027 by adjusting the ref alias set and its base. */
4028 if (ref->set == set
4029 || alias_set_subset_of (set, ref->set))
4030 ;
4031 else if (alias_set_subset_of (ref->set, set))
4032 {
4033 ref->set = set;
4034 if (ref1->opcode == MEM_REF)
4035 ref1->op0 = fold_convert (TREE_TYPE (ref2->op0),
4036 ref1->op0);
4037 else
4038 ref1->op2 = fold_convert (TREE_TYPE (ref2->op2),
4039 ref1->op2);
4040 }
4041 else
4042 {
4043 ref->set = 0;
4044 if (ref1->opcode == MEM_REF)
4045 ref1->op0 = fold_convert (ptr_type_node,
4046 ref1->op0);
4047 else
4048 ref1->op2 = fold_convert (ptr_type_node,
4049 ref1->op2);
4050 }
4051 operands.release ();
4052
4053 result = pre_expr_pool.allocate ();
4054 result->kind = REFERENCE;
4055 result->id = 0;
4056 PRE_EXPR_REFERENCE (result) = ref;
4057 break;
4058 }
4059
4060 default:
4061 continue;
4062 }
4063
4064 get_or_alloc_expression_id (result);
4065 add_to_value (get_expr_value_id (result), result);
4066 bitmap_value_insert_into_set (EXP_GEN (block), result);
4067 continue;
4068 }
4069 default:
4070 break;
4071 }
4072 }
4073
4074 if (dump_file && (dump_flags & TDF_DETAILS))
4075 {
4076 print_bitmap_set (dump_file, EXP_GEN (block),
4077 "exp_gen", block->index);
4078 print_bitmap_set (dump_file, PHI_GEN (block),
4079 "phi_gen", block->index);
4080 print_bitmap_set (dump_file, TMP_GEN (block),
4081 "tmp_gen", block->index);
4082 print_bitmap_set (dump_file, AVAIL_OUT (block),
4083 "avail_out", block->index);
4084 }
4085
4086 /* Put the dominator children of BLOCK on the worklist of blocks
4087 to compute available sets for. */
4088 for (son = first_dom_son (CDI_DOMINATORS, block);
4089 son;
4090 son = next_dom_son (CDI_DOMINATORS, son))
4091 worklist[sp++] = son;
4092 }
4093
4094 free (worklist);
4095 }
4096
4097
4098 /* Local state for the eliminate domwalk. */
4099 static vec<gimple *> el_to_remove;
4100 static vec<gimple *> el_to_fixup;
4101 static unsigned int el_todo;
4102 static vec<tree> el_avail;
4103 static vec<tree> el_avail_stack;
4104
4105 /* Return a leader for OP that is available at the current point of the
4106 eliminate domwalk. */
4107
4108 static tree
4109 eliminate_avail (tree op)
4110 {
4111 tree valnum = VN_INFO (op)->valnum;
4112 if (TREE_CODE (valnum) == SSA_NAME)
4113 {
4114 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4115 return valnum;
4116 if (el_avail.length () > SSA_NAME_VERSION (valnum))
4117 return el_avail[SSA_NAME_VERSION (valnum)];
4118 }
4119 else if (is_gimple_min_invariant (valnum))
4120 return valnum;
4121 return NULL_TREE;
4122 }
4123
4124 /* At the current point of the eliminate domwalk make OP available. */
4125
4126 static void
4127 eliminate_push_avail (tree op)
4128 {
4129 tree valnum = VN_INFO (op)->valnum;
4130 if (TREE_CODE (valnum) == SSA_NAME)
4131 {
4132 if (el_avail.length () <= SSA_NAME_VERSION (valnum))
4133 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4134 tree pushop = op;
4135 if (el_avail[SSA_NAME_VERSION (valnum)])
4136 pushop = el_avail[SSA_NAME_VERSION (valnum)];
4137 el_avail_stack.safe_push (pushop);
4138 el_avail[SSA_NAME_VERSION (valnum)] = op;
4139 }
4140 }
4141
4142 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4143 the leader for the expression if insertion was successful. */
4144
4145 static tree
4146 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
4147 {
4148 gimple *stmt = gimple_seq_first_stmt (VN_INFO (val)->expr);
4149 if (!is_gimple_assign (stmt)
4150 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4151 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4152 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF))
4153 return NULL_TREE;
4154
4155 tree op = gimple_assign_rhs1 (stmt);
4156 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4157 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4158 op = TREE_OPERAND (op, 0);
4159 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
4160 if (!leader)
4161 return NULL_TREE;
4162
4163 gimple_seq stmts = NULL;
4164 tree res;
4165 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4166 res = gimple_build (&stmts, BIT_FIELD_REF,
4167 TREE_TYPE (val), leader,
4168 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4169 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4170 else
4171 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4172 TREE_TYPE (val), leader);
4173 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4174 VN_INFO_GET (res)->valnum = val;
4175
4176 if (TREE_CODE (leader) == SSA_NAME)
4177 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
4178
4179 pre_stats.insertions++;
4180 if (dump_file && (dump_flags & TDF_DETAILS))
4181 {
4182 fprintf (dump_file, "Inserted ");
4183 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0, 0);
4184 }
4185
4186 return res;
4187 }
4188
4189 class eliminate_dom_walker : public dom_walker
4190 {
4191 public:
4192 eliminate_dom_walker (cdi_direction direction, bool do_pre_)
4193 : dom_walker (direction), do_pre (do_pre_) {}
4194
4195 virtual edge before_dom_children (basic_block);
4196 virtual void after_dom_children (basic_block);
4197
4198 bool do_pre;
4199 };
4200
4201 /* Perform elimination for the basic-block B during the domwalk. */
4202
4203 edge
4204 eliminate_dom_walker::before_dom_children (basic_block b)
4205 {
4206 /* Mark new bb. */
4207 el_avail_stack.safe_push (NULL_TREE);
4208
4209 /* ??? If we do nothing for unreachable blocks then this will confuse
4210 tailmerging. Eventually we can reduce its reliance on SCCVN now
4211 that we fully copy/constant-propagate (most) things. */
4212
4213 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4214 {
4215 gphi *phi = gsi.phi ();
4216 tree res = PHI_RESULT (phi);
4217
4218 if (virtual_operand_p (res))
4219 {
4220 gsi_next (&gsi);
4221 continue;
4222 }
4223
4224 tree sprime = eliminate_avail (res);
4225 if (sprime
4226 && sprime != res)
4227 {
4228 if (dump_file && (dump_flags & TDF_DETAILS))
4229 {
4230 fprintf (dump_file, "Replaced redundant PHI node defining ");
4231 print_generic_expr (dump_file, res, 0);
4232 fprintf (dump_file, " with ");
4233 print_generic_expr (dump_file, sprime, 0);
4234 fprintf (dump_file, "\n");
4235 }
4236
4237 /* If we inserted this PHI node ourself, it's not an elimination. */
4238 if (inserted_exprs
4239 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4240 pre_stats.phis--;
4241 else
4242 pre_stats.eliminations++;
4243
4244 /* If we will propagate into all uses don't bother to do
4245 anything. */
4246 if (may_propagate_copy (res, sprime))
4247 {
4248 /* Mark the PHI for removal. */
4249 el_to_remove.safe_push (phi);
4250 gsi_next (&gsi);
4251 continue;
4252 }
4253
4254 remove_phi_node (&gsi, false);
4255
4256 if (inserted_exprs
4257 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4258 && TREE_CODE (sprime) == SSA_NAME)
4259 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4260
4261 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4262 sprime = fold_convert (TREE_TYPE (res), sprime);
4263 gimple *stmt = gimple_build_assign (res, sprime);
4264 /* ??? It cannot yet be necessary (DOM walk). */
4265 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4266
4267 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
4268 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4269 continue;
4270 }
4271
4272 eliminate_push_avail (res);
4273 gsi_next (&gsi);
4274 }
4275
4276 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
4277 !gsi_end_p (gsi);
4278 gsi_next (&gsi))
4279 {
4280 tree sprime = NULL_TREE;
4281 gimple *stmt = gsi_stmt (gsi);
4282 tree lhs = gimple_get_lhs (stmt);
4283 if (lhs && TREE_CODE (lhs) == SSA_NAME
4284 && !gimple_has_volatile_ops (stmt)
4285 /* See PR43491. Do not replace a global register variable when
4286 it is a the RHS of an assignment. Do replace local register
4287 variables since gcc does not guarantee a local variable will
4288 be allocated in register.
4289 ??? The fix isn't effective here. This should instead
4290 be ensured by not value-numbering them the same but treating
4291 them like volatiles? */
4292 && !(gimple_assign_single_p (stmt)
4293 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4294 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4295 && is_global_var (gimple_assign_rhs1 (stmt)))))
4296 {
4297 sprime = eliminate_avail (lhs);
4298 if (!sprime)
4299 {
4300 /* If there is no existing usable leader but SCCVN thinks
4301 it has an expression it wants to use as replacement,
4302 insert that. */
4303 tree val = VN_INFO (lhs)->valnum;
4304 if (val != VN_TOP
4305 && TREE_CODE (val) == SSA_NAME
4306 && VN_INFO (val)->needs_insertion
4307 && VN_INFO (val)->expr != NULL
4308 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4309 eliminate_push_avail (sprime);
4310 }
4311
4312 /* If this now constitutes a copy duplicate points-to
4313 and range info appropriately. This is especially
4314 important for inserted code. See tree-ssa-copy.c
4315 for similar code. */
4316 if (sprime
4317 && TREE_CODE (sprime) == SSA_NAME)
4318 {
4319 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4320 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4321 && VN_INFO_PTR_INFO (lhs)
4322 && ! VN_INFO_PTR_INFO (sprime))
4323 {
4324 duplicate_ssa_name_ptr_info (sprime,
4325 VN_INFO_PTR_INFO (lhs));
4326 if (b != sprime_b)
4327 mark_ptr_info_alignment_unknown
4328 (SSA_NAME_PTR_INFO (sprime));
4329 }
4330 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
4331 && VN_INFO_RANGE_INFO (lhs)
4332 && ! VN_INFO_RANGE_INFO (sprime)
4333 && b == sprime_b)
4334 duplicate_ssa_name_range_info (sprime,
4335 VN_INFO_RANGE_TYPE (lhs),
4336 VN_INFO_RANGE_INFO (lhs));
4337 }
4338
4339 /* Inhibit the use of an inserted PHI on a loop header when
4340 the address of the memory reference is a simple induction
4341 variable. In other cases the vectorizer won't do anything
4342 anyway (either it's loop invariant or a complicated
4343 expression). */
4344 if (sprime
4345 && TREE_CODE (sprime) == SSA_NAME
4346 && do_pre
4347 && flag_tree_loop_vectorize
4348 && loop_outer (b->loop_father)
4349 && has_zero_uses (sprime)
4350 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4351 && gimple_assign_load_p (stmt))
4352 {
4353 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4354 basic_block def_bb = gimple_bb (def_stmt);
4355 if (gimple_code (def_stmt) == GIMPLE_PHI
4356 && def_bb->loop_father->header == def_bb)
4357 {
4358 loop_p loop = def_bb->loop_father;
4359 ssa_op_iter iter;
4360 tree op;
4361 bool found = false;
4362 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4363 {
4364 affine_iv iv;
4365 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4366 if (def_bb
4367 && flow_bb_inside_loop_p (loop, def_bb)
4368 && simple_iv (loop, loop, op, &iv, true))
4369 {
4370 found = true;
4371 break;
4372 }
4373 }
4374 if (found)
4375 {
4376 if (dump_file && (dump_flags & TDF_DETAILS))
4377 {
4378 fprintf (dump_file, "Not replacing ");
4379 print_gimple_expr (dump_file, stmt, 0, 0);
4380 fprintf (dump_file, " with ");
4381 print_generic_expr (dump_file, sprime, 0);
4382 fprintf (dump_file, " which would add a loop"
4383 " carried dependence to loop %d\n",
4384 loop->num);
4385 }
4386 /* Don't keep sprime available. */
4387 sprime = NULL_TREE;
4388 }
4389 }
4390 }
4391
4392 if (sprime)
4393 {
4394 /* If we can propagate the value computed for LHS into
4395 all uses don't bother doing anything with this stmt. */
4396 if (may_propagate_copy (lhs, sprime))
4397 {
4398 /* Mark it for removal. */
4399 el_to_remove.safe_push (stmt);
4400
4401 /* ??? Don't count copy/constant propagations. */
4402 if (gimple_assign_single_p (stmt)
4403 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4404 || gimple_assign_rhs1 (stmt) == sprime))
4405 continue;
4406
4407 if (dump_file && (dump_flags & TDF_DETAILS))
4408 {
4409 fprintf (dump_file, "Replaced ");
4410 print_gimple_expr (dump_file, stmt, 0, 0);
4411 fprintf (dump_file, " with ");
4412 print_generic_expr (dump_file, sprime, 0);
4413 fprintf (dump_file, " in all uses of ");
4414 print_gimple_stmt (dump_file, stmt, 0, 0);
4415 }
4416
4417 pre_stats.eliminations++;
4418 continue;
4419 }
4420
4421 /* If this is an assignment from our leader (which
4422 happens in the case the value-number is a constant)
4423 then there is nothing to do. */
4424 if (gimple_assign_single_p (stmt)
4425 && sprime == gimple_assign_rhs1 (stmt))
4426 continue;
4427
4428 /* Else replace its RHS. */
4429 bool can_make_abnormal_goto
4430 = is_gimple_call (stmt)
4431 && stmt_can_make_abnormal_goto (stmt);
4432
4433 if (dump_file && (dump_flags & TDF_DETAILS))
4434 {
4435 fprintf (dump_file, "Replaced ");
4436 print_gimple_expr (dump_file, stmt, 0, 0);
4437 fprintf (dump_file, " with ");
4438 print_generic_expr (dump_file, sprime, 0);
4439 fprintf (dump_file, " in ");
4440 print_gimple_stmt (dump_file, stmt, 0, 0);
4441 }
4442
4443 if (TREE_CODE (sprime) == SSA_NAME)
4444 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4445 NECESSARY, true);
4446
4447 pre_stats.eliminations++;
4448 gimple *orig_stmt = stmt;
4449 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4450 TREE_TYPE (sprime)))
4451 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4452 tree vdef = gimple_vdef (stmt);
4453 tree vuse = gimple_vuse (stmt);
4454 propagate_tree_value_into_stmt (&gsi, sprime);
4455 stmt = gsi_stmt (gsi);
4456 update_stmt (stmt);
4457 if (vdef != gimple_vdef (stmt))
4458 VN_INFO (vdef)->valnum = vuse;
4459
4460 /* If we removed EH side-effects from the statement, clean
4461 its EH information. */
4462 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4463 {
4464 bitmap_set_bit (need_eh_cleanup,
4465 gimple_bb (stmt)->index);
4466 if (dump_file && (dump_flags & TDF_DETAILS))
4467 fprintf (dump_file, " Removed EH side-effects.\n");
4468 }
4469
4470 /* Likewise for AB side-effects. */
4471 if (can_make_abnormal_goto
4472 && !stmt_can_make_abnormal_goto (stmt))
4473 {
4474 bitmap_set_bit (need_ab_cleanup,
4475 gimple_bb (stmt)->index);
4476 if (dump_file && (dump_flags & TDF_DETAILS))
4477 fprintf (dump_file, " Removed AB side-effects.\n");
4478 }
4479
4480 continue;
4481 }
4482 }
4483
4484 /* If the statement is a scalar store, see if the expression
4485 has the same value number as its rhs. If so, the store is
4486 dead. */
4487 if (gimple_assign_single_p (stmt)
4488 && !gimple_has_volatile_ops (stmt)
4489 && !is_gimple_reg (gimple_assign_lhs (stmt))
4490 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4491 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4492 {
4493 tree val;
4494 tree rhs = gimple_assign_rhs1 (stmt);
4495 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4496 gimple_vuse (stmt), VN_WALK, NULL, false);
4497 if (TREE_CODE (rhs) == SSA_NAME)
4498 rhs = VN_INFO (rhs)->valnum;
4499 if (val
4500 && operand_equal_p (val, rhs, 0))
4501 {
4502 if (dump_file && (dump_flags & TDF_DETAILS))
4503 {
4504 fprintf (dump_file, "Deleted redundant store ");
4505 print_gimple_stmt (dump_file, stmt, 0, 0);
4506 }
4507
4508 /* Queue stmt for removal. */
4509 el_to_remove.safe_push (stmt);
4510 continue;
4511 }
4512 }
4513
4514 /* If this is a control statement value numbering left edges
4515 unexecuted on force the condition in a way consistent with
4516 that. */
4517 if (gcond *cond = dyn_cast <gcond *> (stmt))
4518 {
4519 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
4520 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
4521 {
4522 if (dump_file && (dump_flags & TDF_DETAILS))
4523 {
4524 fprintf (dump_file, "Removing unexecutable edge from ");
4525 print_gimple_stmt (dump_file, stmt, 0, 0);
4526 }
4527 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
4528 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
4529 gimple_cond_make_true (cond);
4530 else
4531 gimple_cond_make_false (cond);
4532 update_stmt (cond);
4533 el_todo |= TODO_cleanup_cfg;
4534 continue;
4535 }
4536 }
4537
4538 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4539 bool was_noreturn = (is_gimple_call (stmt)
4540 && gimple_call_noreturn_p (stmt));
4541 tree vdef = gimple_vdef (stmt);
4542 tree vuse = gimple_vuse (stmt);
4543
4544 /* If we didn't replace the whole stmt (or propagate the result
4545 into all uses), replace all uses on this stmt with their
4546 leaders. */
4547 use_operand_p use_p;
4548 ssa_op_iter iter;
4549 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4550 {
4551 tree use = USE_FROM_PTR (use_p);
4552 /* ??? The call code above leaves stmt operands un-updated. */
4553 if (TREE_CODE (use) != SSA_NAME)
4554 continue;
4555 tree sprime = eliminate_avail (use);
4556 if (sprime && sprime != use
4557 && may_propagate_copy (use, sprime)
4558 /* We substitute into debug stmts to avoid excessive
4559 debug temporaries created by removed stmts, but we need
4560 to avoid doing so for inserted sprimes as we never want
4561 to create debug temporaries for them. */
4562 && (!inserted_exprs
4563 || TREE_CODE (sprime) != SSA_NAME
4564 || !is_gimple_debug (stmt)
4565 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4566 {
4567 propagate_value (use_p, sprime);
4568 gimple_set_modified (stmt, true);
4569 if (TREE_CODE (sprime) == SSA_NAME
4570 && !is_gimple_debug (stmt))
4571 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4572 NECESSARY, true);
4573 }
4574 }
4575
4576 /* Visit indirect calls and turn them into direct calls if
4577 possible using the devirtualization machinery. */
4578 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4579 {
4580 tree fn = gimple_call_fn (call_stmt);
4581 if (fn
4582 && flag_devirtualize
4583 && virtual_method_call_p (fn))
4584 {
4585 tree otr_type = obj_type_ref_class (fn);
4586 tree instance;
4587 ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
4588 bool final;
4589
4590 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
4591
4592 vec <cgraph_node *>targets
4593 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
4594 tree_to_uhwi
4595 (OBJ_TYPE_REF_TOKEN (fn)),
4596 context,
4597 &final);
4598 if (dump_file)
4599 dump_possible_polymorphic_call_targets (dump_file,
4600 obj_type_ref_class (fn),
4601 tree_to_uhwi
4602 (OBJ_TYPE_REF_TOKEN (fn)),
4603 context);
4604 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4605 {
4606 tree fn;
4607 if (targets.length () == 1)
4608 fn = targets[0]->decl;
4609 else
4610 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4611 if (dump_enabled_p ())
4612 {
4613 location_t loc = gimple_location_safe (stmt);
4614 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4615 "converting indirect call to "
4616 "function %s\n",
4617 lang_hooks.decl_printable_name (fn, 2));
4618 }
4619 gimple_call_set_fndecl (call_stmt, fn);
4620 maybe_remove_unused_call_args (cfun, call_stmt);
4621 gimple_set_modified (stmt, true);
4622 }
4623 }
4624 }
4625
4626 if (gimple_modified_p (stmt))
4627 {
4628 /* If a formerly non-invariant ADDR_EXPR is turned into an
4629 invariant one it was on a separate stmt. */
4630 if (gimple_assign_single_p (stmt)
4631 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4632 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4633 gimple *old_stmt = stmt;
4634 if (is_gimple_call (stmt))
4635 {
4636 /* ??? Only fold calls inplace for now, this may create new
4637 SSA names which in turn will confuse free_scc_vn SSA name
4638 release code. */
4639 fold_stmt_inplace (&gsi);
4640 /* When changing a call into a noreturn call, cfg cleanup
4641 is needed to fix up the noreturn call. */
4642 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4643 el_to_fixup.safe_push (stmt);
4644 }
4645 else
4646 {
4647 fold_stmt (&gsi);
4648 stmt = gsi_stmt (gsi);
4649 if ((gimple_code (stmt) == GIMPLE_COND
4650 && (gimple_cond_true_p (as_a <gcond *> (stmt))
4651 || gimple_cond_false_p (as_a <gcond *> (stmt))))
4652 || (gimple_code (stmt) == GIMPLE_SWITCH
4653 && TREE_CODE (gimple_switch_index (
4654 as_a <gswitch *> (stmt)))
4655 == INTEGER_CST))
4656 el_todo |= TODO_cleanup_cfg;
4657 }
4658 /* If we removed EH side-effects from the statement, clean
4659 its EH information. */
4660 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4661 {
4662 bitmap_set_bit (need_eh_cleanup,
4663 gimple_bb (stmt)->index);
4664 if (dump_file && (dump_flags & TDF_DETAILS))
4665 fprintf (dump_file, " Removed EH side-effects.\n");
4666 }
4667 /* Likewise for AB side-effects. */
4668 if (can_make_abnormal_goto
4669 && !stmt_can_make_abnormal_goto (stmt))
4670 {
4671 bitmap_set_bit (need_ab_cleanup,
4672 gimple_bb (stmt)->index);
4673 if (dump_file && (dump_flags & TDF_DETAILS))
4674 fprintf (dump_file, " Removed AB side-effects.\n");
4675 }
4676 update_stmt (stmt);
4677 if (vdef != gimple_vdef (stmt))
4678 VN_INFO (vdef)->valnum = vuse;
4679 }
4680
4681 /* Make new values available - for fully redundant LHS we
4682 continue with the next stmt above and skip this. */
4683 def_operand_p defp;
4684 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4685 eliminate_push_avail (DEF_FROM_PTR (defp));
4686 }
4687
4688 /* Replace destination PHI arguments. */
4689 edge_iterator ei;
4690 edge e;
4691 FOR_EACH_EDGE (e, ei, b->succs)
4692 {
4693 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4694 !gsi_end_p (gsi);
4695 gsi_next (&gsi))
4696 {
4697 gphi *phi = gsi.phi ();
4698 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4699 tree arg = USE_FROM_PTR (use_p);
4700 if (TREE_CODE (arg) != SSA_NAME
4701 || virtual_operand_p (arg))
4702 continue;
4703 tree sprime = eliminate_avail (arg);
4704 if (sprime && may_propagate_copy (arg, sprime))
4705 {
4706 propagate_value (use_p, sprime);
4707 if (TREE_CODE (sprime) == SSA_NAME)
4708 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4709 }
4710 }
4711 }
4712 return NULL;
4713 }
4714
4715 /* Make no longer available leaders no longer available. */
4716
4717 void
4718 eliminate_dom_walker::after_dom_children (basic_block)
4719 {
4720 tree entry;
4721 while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4722 {
4723 tree valnum = VN_INFO (entry)->valnum;
4724 tree old = el_avail[SSA_NAME_VERSION (valnum)];
4725 if (old == entry)
4726 el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4727 else
4728 el_avail[SSA_NAME_VERSION (valnum)] = entry;
4729 }
4730 }
4731
4732 /* Eliminate fully redundant computations. */
4733
4734 static unsigned int
4735 eliminate (bool do_pre)
4736 {
4737 gimple_stmt_iterator gsi;
4738 gimple *stmt;
4739
4740 need_eh_cleanup = BITMAP_ALLOC (NULL);
4741 need_ab_cleanup = BITMAP_ALLOC (NULL);
4742
4743 el_to_remove.create (0);
4744 el_to_fixup.create (0);
4745 el_todo = 0;
4746 el_avail.create (num_ssa_names);
4747 el_avail_stack.create (0);
4748
4749 eliminate_dom_walker (CDI_DOMINATORS,
4750 do_pre).walk (cfun->cfg->x_entry_block_ptr);
4751
4752 el_avail.release ();
4753 el_avail_stack.release ();
4754
4755 /* We cannot remove stmts during BB walk, especially not release SSA
4756 names there as this confuses the VN machinery. The stmts ending
4757 up in el_to_remove are either stores or simple copies.
4758 Remove stmts in reverse order to make debug stmt creation possible. */
4759 while (!el_to_remove.is_empty ())
4760 {
4761 stmt = el_to_remove.pop ();
4762
4763 if (dump_file && (dump_flags & TDF_DETAILS))
4764 {
4765 fprintf (dump_file, "Removing dead stmt ");
4766 print_gimple_stmt (dump_file, stmt, 0, 0);
4767 }
4768
4769 tree lhs;
4770 if (gimple_code (stmt) == GIMPLE_PHI)
4771 lhs = gimple_phi_result (stmt);
4772 else
4773 lhs = gimple_get_lhs (stmt);
4774
4775 if (inserted_exprs
4776 && TREE_CODE (lhs) == SSA_NAME)
4777 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4778
4779 gsi = gsi_for_stmt (stmt);
4780 if (gimple_code (stmt) == GIMPLE_PHI)
4781 remove_phi_node (&gsi, true);
4782 else
4783 {
4784 basic_block bb = gimple_bb (stmt);
4785 unlink_stmt_vdef (stmt);
4786 if (gsi_remove (&gsi, true))
4787 bitmap_set_bit (need_eh_cleanup, bb->index);
4788 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
4789 bitmap_set_bit (need_ab_cleanup, bb->index);
4790 release_defs (stmt);
4791 }
4792
4793 /* Removing a stmt may expose a forwarder block. */
4794 el_todo |= TODO_cleanup_cfg;
4795 }
4796 el_to_remove.release ();
4797
4798 /* Fixup stmts that became noreturn calls. This may require splitting
4799 blocks and thus isn't possible during the dominator walk. Do this
4800 in reverse order so we don't inadvertedly remove a stmt we want to
4801 fixup by visiting a dominating now noreturn call first. */
4802 while (!el_to_fixup.is_empty ())
4803 {
4804 stmt = el_to_fixup.pop ();
4805
4806 if (dump_file && (dump_flags & TDF_DETAILS))
4807 {
4808 fprintf (dump_file, "Fixing up noreturn call ");
4809 print_gimple_stmt (dump_file, stmt, 0, 0);
4810 }
4811
4812 if (fixup_noreturn_call (stmt))
4813 el_todo |= TODO_cleanup_cfg;
4814 }
4815 el_to_fixup.release ();
4816
4817 return el_todo;
4818 }
4819
4820 /* Perform CFG cleanups made necessary by elimination. */
4821
4822 static unsigned
4823 fini_eliminate (void)
4824 {
4825 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4826 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4827
4828 if (do_eh_cleanup)
4829 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4830
4831 if (do_ab_cleanup)
4832 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4833
4834 BITMAP_FREE (need_eh_cleanup);
4835 BITMAP_FREE (need_ab_cleanup);
4836
4837 if (do_eh_cleanup || do_ab_cleanup)
4838 return TODO_cleanup_cfg;
4839 return 0;
4840 }
4841
4842 /* Borrow a bit of tree-ssa-dce.c for the moment.
4843 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4844 this may be a bit faster, and we may want critical edges kept split. */
4845
4846 /* If OP's defining statement has not already been determined to be necessary,
4847 mark that statement necessary. Return the stmt, if it is newly
4848 necessary. */
4849
4850 static inline gimple *
4851 mark_operand_necessary (tree op)
4852 {
4853 gimple *stmt;
4854
4855 gcc_assert (op);
4856
4857 if (TREE_CODE (op) != SSA_NAME)
4858 return NULL;
4859
4860 stmt = SSA_NAME_DEF_STMT (op);
4861 gcc_assert (stmt);
4862
4863 if (gimple_plf (stmt, NECESSARY)
4864 || gimple_nop_p (stmt))
4865 return NULL;
4866
4867 gimple_set_plf (stmt, NECESSARY, true);
4868 return stmt;
4869 }
4870
4871 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4872 to insert PHI nodes sometimes, and because value numbering of casts isn't
4873 perfect, we sometimes end up inserting dead code. This simple DCE-like
4874 pass removes any insertions we made that weren't actually used. */
4875
4876 static void
4877 remove_dead_inserted_code (void)
4878 {
4879 bitmap worklist;
4880 unsigned i;
4881 bitmap_iterator bi;
4882 gimple *t;
4883
4884 worklist = BITMAP_ALLOC (NULL);
4885 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4886 {
4887 t = SSA_NAME_DEF_STMT (ssa_name (i));
4888 if (gimple_plf (t, NECESSARY))
4889 bitmap_set_bit (worklist, i);
4890 }
4891 while (!bitmap_empty_p (worklist))
4892 {
4893 i = bitmap_first_set_bit (worklist);
4894 bitmap_clear_bit (worklist, i);
4895 t = SSA_NAME_DEF_STMT (ssa_name (i));
4896
4897 /* PHI nodes are somewhat special in that each PHI alternative has
4898 data and control dependencies. All the statements feeding the
4899 PHI node's arguments are always necessary. */
4900 if (gimple_code (t) == GIMPLE_PHI)
4901 {
4902 unsigned k;
4903
4904 for (k = 0; k < gimple_phi_num_args (t); k++)
4905 {
4906 tree arg = PHI_ARG_DEF (t, k);
4907 if (TREE_CODE (arg) == SSA_NAME)
4908 {
4909 gimple *n = mark_operand_necessary (arg);
4910 if (n)
4911 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4912 }
4913 }
4914 }
4915 else
4916 {
4917 /* Propagate through the operands. Examine all the USE, VUSE and
4918 VDEF operands in this statement. Mark all the statements
4919 which feed this statement's uses as necessary. */
4920 ssa_op_iter iter;
4921 tree use;
4922
4923 /* The operands of VDEF expressions are also needed as they
4924 represent potential definitions that may reach this
4925 statement (VDEF operands allow us to follow def-def
4926 links). */
4927
4928 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4929 {
4930 gimple *n = mark_operand_necessary (use);
4931 if (n)
4932 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4933 }
4934 }
4935 }
4936
4937 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4938 {
4939 t = SSA_NAME_DEF_STMT (ssa_name (i));
4940 if (!gimple_plf (t, NECESSARY))
4941 {
4942 gimple_stmt_iterator gsi;
4943
4944 if (dump_file && (dump_flags & TDF_DETAILS))
4945 {
4946 fprintf (dump_file, "Removing unnecessary insertion:");
4947 print_gimple_stmt (dump_file, t, 0, 0);
4948 }
4949
4950 gsi = gsi_for_stmt (t);
4951 if (gimple_code (t) == GIMPLE_PHI)
4952 remove_phi_node (&gsi, true);
4953 else
4954 {
4955 gsi_remove (&gsi, true);
4956 release_defs (t);
4957 }
4958 }
4959 }
4960 BITMAP_FREE (worklist);
4961 }
4962
4963
4964 /* Initialize data structures used by PRE. */
4965
4966 static void
4967 init_pre (void)
4968 {
4969 basic_block bb;
4970
4971 next_expression_id = 1;
4972 expressions.create (0);
4973 expressions.safe_push (NULL);
4974 value_expressions.create (get_max_value_id () + 1);
4975 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4976 name_to_id.create (0);
4977
4978 inserted_exprs = BITMAP_ALLOC (NULL);
4979
4980 connect_infinite_loops_to_exit ();
4981 memset (&pre_stats, 0, sizeof (pre_stats));
4982
4983 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4984
4985 calculate_dominance_info (CDI_DOMINATORS);
4986
4987 bitmap_obstack_initialize (&grand_bitmap_obstack);
4988 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4989 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4990 FOR_ALL_BB_FN (bb, cfun)
4991 {
4992 EXP_GEN (bb) = bitmap_set_new ();
4993 PHI_GEN (bb) = bitmap_set_new ();
4994 TMP_GEN (bb) = bitmap_set_new ();
4995 AVAIL_OUT (bb) = bitmap_set_new ();
4996 }
4997 }
4998
4999
5000 /* Deallocate data structures used by PRE. */
5001
5002 static void
5003 fini_pre ()
5004 {
5005 value_expressions.release ();
5006 BITMAP_FREE (inserted_exprs);
5007 bitmap_obstack_release (&grand_bitmap_obstack);
5008 bitmap_set_pool.release ();
5009 pre_expr_pool.release ();
5010 delete phi_translate_table;
5011 phi_translate_table = NULL;
5012 delete expression_to_id;
5013 expression_to_id = NULL;
5014 name_to_id.release ();
5015
5016 free_aux_for_blocks ();
5017 }
5018
5019 namespace {
5020
5021 const pass_data pass_data_pre =
5022 {
5023 GIMPLE_PASS, /* type */
5024 "pre", /* name */
5025 OPTGROUP_NONE, /* optinfo_flags */
5026 TV_TREE_PRE, /* tv_id */
5027 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
5028 pass_pre. */
5029 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
5030 0, /* properties_provided */
5031 PROP_no_crit_edges, /* properties_destroyed */
5032 TODO_rebuild_alias, /* todo_flags_start */
5033 0, /* todo_flags_finish */
5034 };
5035
5036 class pass_pre : public gimple_opt_pass
5037 {
5038 public:
5039 pass_pre (gcc::context *ctxt)
5040 : gimple_opt_pass (pass_data_pre, ctxt)
5041 {}
5042
5043 /* opt_pass methods: */
5044 virtual bool gate (function *)
5045 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
5046 virtual unsigned int execute (function *);
5047
5048 }; // class pass_pre
5049
5050 unsigned int
5051 pass_pre::execute (function *fun)
5052 {
5053 unsigned int todo = 0;
5054
5055 do_partial_partial =
5056 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
5057
5058 /* This has to happen before SCCVN runs because
5059 loop_optimizer_init may create new phis, etc. */
5060 loop_optimizer_init (LOOPS_NORMAL);
5061
5062 if (!run_scc_vn (VN_WALK))
5063 {
5064 loop_optimizer_finalize ();
5065 return 0;
5066 }
5067
5068 init_pre ();
5069 scev_initialize ();
5070
5071 /* Collect and value number expressions computed in each basic block. */
5072 compute_avail ();
5073
5074 /* Insert can get quite slow on an incredibly large number of basic
5075 blocks due to some quadratic behavior. Until this behavior is
5076 fixed, don't run it when he have an incredibly large number of
5077 bb's. If we aren't going to run insert, there is no point in
5078 computing ANTIC, either, even though it's plenty fast. */
5079 if (n_basic_blocks_for_fn (fun) < 4000)
5080 {
5081 compute_antic ();
5082 insert ();
5083 }
5084
5085 /* Make sure to remove fake edges before committing our inserts.
5086 This makes sure we don't end up with extra critical edges that
5087 we would need to split. */
5088 remove_fake_exit_edges ();
5089 gsi_commit_edge_inserts ();
5090
5091 /* Eliminate folds statements which might (should not...) end up
5092 not keeping virtual operands up-to-date. */
5093 gcc_assert (!need_ssa_update_p (fun));
5094
5095 /* Remove all the redundant expressions. */
5096 todo |= eliminate (true);
5097
5098 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
5099 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
5100 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
5101 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
5102 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
5103
5104 clear_expression_ids ();
5105 remove_dead_inserted_code ();
5106
5107 scev_finalize ();
5108 fini_pre ();
5109 todo |= fini_eliminate ();
5110 loop_optimizer_finalize ();
5111
5112 /* Restore SSA info before tail-merging as that resets it as well. */
5113 scc_vn_restore_ssa_info ();
5114
5115 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
5116 case we can merge the block with the remaining predecessor of the block.
5117 It should either:
5118 - call merge_blocks after each tail merge iteration
5119 - call merge_blocks after all tail merge iterations
5120 - mark TODO_cleanup_cfg when necessary
5121 - share the cfg cleanup with fini_pre. */
5122 todo |= tail_merge_optimize (todo);
5123
5124 free_scc_vn ();
5125
5126 /* Tail merging invalidates the virtual SSA web, together with
5127 cfg-cleanup opportunities exposed by PRE this will wreck the
5128 SSA updating machinery. So make sure to run update-ssa
5129 manually, before eventually scheduling cfg-cleanup as part of
5130 the todo. */
5131 update_ssa (TODO_update_ssa_only_virtuals);
5132
5133 return todo;
5134 }
5135
5136 } // anon namespace
5137
5138 gimple_opt_pass *
5139 make_pass_pre (gcc::context *ctxt)
5140 {
5141 return new pass_pre (ctxt);
5142 }
5143
5144 namespace {
5145
5146 const pass_data pass_data_fre =
5147 {
5148 GIMPLE_PASS, /* type */
5149 "fre", /* name */
5150 OPTGROUP_NONE, /* optinfo_flags */
5151 TV_TREE_FRE, /* tv_id */
5152 ( PROP_cfg | PROP_ssa ), /* properties_required */
5153 0, /* properties_provided */
5154 0, /* properties_destroyed */
5155 0, /* todo_flags_start */
5156 0, /* todo_flags_finish */
5157 };
5158
5159 class pass_fre : public gimple_opt_pass
5160 {
5161 public:
5162 pass_fre (gcc::context *ctxt)
5163 : gimple_opt_pass (pass_data_fre, ctxt)
5164 {}
5165
5166 /* opt_pass methods: */
5167 opt_pass * clone () { return new pass_fre (m_ctxt); }
5168 virtual bool gate (function *) { return flag_tree_fre != 0; }
5169 virtual unsigned int execute (function *);
5170
5171 }; // class pass_fre
5172
5173 unsigned int
5174 pass_fre::execute (function *fun)
5175 {
5176 unsigned int todo = 0;
5177
5178 if (!run_scc_vn (VN_WALKREWRITE))
5179 return 0;
5180
5181 memset (&pre_stats, 0, sizeof (pre_stats));
5182
5183 /* Remove all the redundant expressions. */
5184 todo |= eliminate (false);
5185
5186 todo |= fini_eliminate ();
5187
5188 scc_vn_restore_ssa_info ();
5189 free_scc_vn ();
5190
5191 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
5192 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
5193
5194 return todo;
5195 }
5196
5197 } // anon namespace
5198
5199 gimple_opt_pass *
5200 make_pass_fre (gcc::context *ctxt)
5201 {
5202 return new pass_fre (ctxt);
5203 }