* g++.dg/cpp0x/nullptr21.c: Remove printfs, make self-checking.
[gcc.git] / gcc / tree-ssa-pre.c
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
13
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "hashtab.h"
34 #include "tree-iterator.h"
35 #include "alloc-pool.h"
36 #include "obstack.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "bitmap.h"
40 #include "langhooks.h"
41 #include "cfgloop.h"
42 #include "tree-ssa-sccvn.h"
43 #include "tree-scalar-evolution.h"
44 #include "params.h"
45 #include "dbgcnt.h"
46
47 /* TODO:
48
49 1. Avail sets can be shared by making an avail_find_leader that
50 walks up the dominator tree and looks in those avail sets.
51 This might affect code optimality, it's unclear right now.
52 2. Strength reduction can be performed by anticipating expressions
53 we can repair later on.
54 3. We can do back-substitution or smarter value numbering to catch
55 commutative expressions split up over multiple statements.
56 */
57
58 /* For ease of terminology, "expression node" in the below refers to
59 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
60 represent the actual statement containing the expressions we care about,
61 and we cache the value number by putting it in the expression. */
62
63 /* Basic algorithm
64
65 First we walk the statements to generate the AVAIL sets, the
66 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
67 generation of values/expressions by a given block. We use them
68 when computing the ANTIC sets. The AVAIL sets consist of
69 SSA_NAME's that represent values, so we know what values are
70 available in what blocks. AVAIL is a forward dataflow problem. In
71 SSA, values are never killed, so we don't need a kill set, or a
72 fixpoint iteration, in order to calculate the AVAIL sets. In
73 traditional parlance, AVAIL sets tell us the downsafety of the
74 expressions/values.
75
76 Next, we generate the ANTIC sets. These sets represent the
77 anticipatable expressions. ANTIC is a backwards dataflow
78 problem. An expression is anticipatable in a given block if it could
79 be generated in that block. This means that if we had to perform
80 an insertion in that block, of the value of that expression, we
81 could. Calculating the ANTIC sets requires phi translation of
82 expressions, because the flow goes backwards through phis. We must
83 iterate to a fixpoint of the ANTIC sets, because we have a kill
84 set. Even in SSA form, values are not live over the entire
85 function, only from their definition point onwards. So we have to
86 remove values from the ANTIC set once we go past the definition
87 point of the leaders that make them up.
88 compute_antic/compute_antic_aux performs this computation.
89
90 Third, we perform insertions to make partially redundant
91 expressions fully redundant.
92
93 An expression is partially redundant (excluding partial
94 anticipation) if:
95
96 1. It is AVAIL in some, but not all, of the predecessors of a
97 given block.
98 2. It is ANTIC in all the predecessors.
99
100 In order to make it fully redundant, we insert the expression into
101 the predecessors where it is not available, but is ANTIC.
102
103 For the partial anticipation case, we only perform insertion if it
104 is partially anticipated in some block, and fully available in all
105 of the predecessors.
106
107 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
108 performs these steps.
109
110 Fourth, we eliminate fully redundant expressions.
111 This is a simple statement walk that replaces redundant
112 calculations with the now available values. */
113
114 /* Representations of value numbers:
115
116 Value numbers are represented by a representative SSA_NAME. We
117 will create fake SSA_NAME's in situations where we need a
118 representative but do not have one (because it is a complex
119 expression). In order to facilitate storing the value numbers in
120 bitmaps, and keep the number of wasted SSA_NAME's down, we also
121 associate a value_id with each value number, and create full blown
122 ssa_name's only where we actually need them (IE in operands of
123 existing expressions).
124
125 Theoretically you could replace all the value_id's with
126 SSA_NAME_VERSION, but this would allocate a large number of
127 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
128 It would also require an additional indirection at each point we
129 use the value id. */
130
131 /* Representation of expressions on value numbers:
132
133 Expressions consisting of value numbers are represented the same
134 way as our VN internally represents them, with an additional
135 "pre_expr" wrapping around them in order to facilitate storing all
136 of the expressions in the same sets. */
137
138 /* Representation of sets:
139
140 The dataflow sets do not need to be sorted in any particular order
141 for the majority of their lifetime, are simply represented as two
142 bitmaps, one that keeps track of values present in the set, and one
143 that keeps track of expressions present in the set.
144
145 When we need them in topological order, we produce it on demand by
146 transforming the bitmap into an array and sorting it into topo
147 order. */
148
149 /* Type of expression, used to know which member of the PRE_EXPR union
150 is valid. */
151
152 enum pre_expr_kind
153 {
154 NAME,
155 NARY,
156 REFERENCE,
157 CONSTANT
158 };
159
160 typedef union pre_expr_union_d
161 {
162 tree name;
163 tree constant;
164 vn_nary_op_t nary;
165 vn_reference_t reference;
166 } pre_expr_union;
167
168 typedef struct pre_expr_d
169 {
170 enum pre_expr_kind kind;
171 unsigned int id;
172 pre_expr_union u;
173 } *pre_expr;
174
175 #define PRE_EXPR_NAME(e) (e)->u.name
176 #define PRE_EXPR_NARY(e) (e)->u.nary
177 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
178 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
179
180 static int
181 pre_expr_eq (const void *p1, const void *p2)
182 {
183 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
184 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
185
186 if (e1->kind != e2->kind)
187 return false;
188
189 switch (e1->kind)
190 {
191 case CONSTANT:
192 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
193 PRE_EXPR_CONSTANT (e2));
194 case NAME:
195 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
196 case NARY:
197 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
198 case REFERENCE:
199 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
200 PRE_EXPR_REFERENCE (e2));
201 default:
202 gcc_unreachable ();
203 }
204 }
205
206 static hashval_t
207 pre_expr_hash (const void *p1)
208 {
209 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
210 switch (e->kind)
211 {
212 case CONSTANT:
213 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
214 case NAME:
215 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
216 case NARY:
217 return PRE_EXPR_NARY (e)->hashcode;
218 case REFERENCE:
219 return PRE_EXPR_REFERENCE (e)->hashcode;
220 default:
221 gcc_unreachable ();
222 }
223 }
224
225
226 /* Next global expression id number. */
227 static unsigned int next_expression_id;
228
229 /* Mapping from expression to id number we can use in bitmap sets. */
230 DEF_VEC_P (pre_expr);
231 DEF_VEC_ALLOC_P (pre_expr, heap);
232 static VEC(pre_expr, heap) *expressions;
233 static htab_t expression_to_id;
234 static VEC(unsigned, heap) *name_to_id;
235
236 /* Allocate an expression id for EXPR. */
237
238 static inline unsigned int
239 alloc_expression_id (pre_expr expr)
240 {
241 void **slot;
242 /* Make sure we won't overflow. */
243 gcc_assert (next_expression_id + 1 > next_expression_id);
244 expr->id = next_expression_id++;
245 VEC_safe_push (pre_expr, heap, expressions, expr);
246 if (expr->kind == NAME)
247 {
248 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
249 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
250 re-allocations by using VEC_reserve upfront. There is no
251 VEC_quick_grow_cleared unfortunately. */
252 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
253 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
254 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
255 VEC_replace (unsigned, name_to_id, version, expr->id);
256 }
257 else
258 {
259 slot = htab_find_slot (expression_to_id, expr, INSERT);
260 gcc_assert (!*slot);
261 *slot = expr;
262 }
263 return next_expression_id - 1;
264 }
265
266 /* Return the expression id for tree EXPR. */
267
268 static inline unsigned int
269 get_expression_id (const pre_expr expr)
270 {
271 return expr->id;
272 }
273
274 static inline unsigned int
275 lookup_expression_id (const pre_expr expr)
276 {
277 void **slot;
278
279 if (expr->kind == NAME)
280 {
281 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
282 if (VEC_length (unsigned, name_to_id) <= version)
283 return 0;
284 return VEC_index (unsigned, name_to_id, version);
285 }
286 else
287 {
288 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
289 if (!slot)
290 return 0;
291 return ((pre_expr)*slot)->id;
292 }
293 }
294
295 /* Return the existing expression id for EXPR, or create one if one
296 does not exist yet. */
297
298 static inline unsigned int
299 get_or_alloc_expression_id (pre_expr expr)
300 {
301 unsigned int id = lookup_expression_id (expr);
302 if (id == 0)
303 return alloc_expression_id (expr);
304 return expr->id = id;
305 }
306
307 /* Return the expression that has expression id ID */
308
309 static inline pre_expr
310 expression_for_id (unsigned int id)
311 {
312 return VEC_index (pre_expr, expressions, id);
313 }
314
315 /* Free the expression id field in all of our expressions,
316 and then destroy the expressions array. */
317
318 static void
319 clear_expression_ids (void)
320 {
321 VEC_free (pre_expr, heap, expressions);
322 }
323
324 static alloc_pool pre_expr_pool;
325
326 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
327
328 static pre_expr
329 get_or_alloc_expr_for_name (tree name)
330 {
331 struct pre_expr_d expr;
332 pre_expr result;
333 unsigned int result_id;
334
335 expr.kind = NAME;
336 expr.id = 0;
337 PRE_EXPR_NAME (&expr) = name;
338 result_id = lookup_expression_id (&expr);
339 if (result_id != 0)
340 return expression_for_id (result_id);
341
342 result = (pre_expr) pool_alloc (pre_expr_pool);
343 result->kind = NAME;
344 PRE_EXPR_NAME (result) = name;
345 alloc_expression_id (result);
346 return result;
347 }
348
349 static bool in_fre = false;
350
351 /* An unordered bitmap set. One bitmap tracks values, the other,
352 expressions. */
353 typedef struct bitmap_set
354 {
355 bitmap_head expressions;
356 bitmap_head values;
357 } *bitmap_set_t;
358
359 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
360 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
361
362 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
363 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
364
365 /* Mapping from value id to expressions with that value_id. */
366 DEF_VEC_P (bitmap_set_t);
367 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
368 static VEC(bitmap_set_t, heap) *value_expressions;
369
370 /* Sets that we need to keep track of. */
371 typedef struct bb_bitmap_sets
372 {
373 /* The EXP_GEN set, which represents expressions/values generated in
374 a basic block. */
375 bitmap_set_t exp_gen;
376
377 /* The PHI_GEN set, which represents PHI results generated in a
378 basic block. */
379 bitmap_set_t phi_gen;
380
381 /* The TMP_GEN set, which represents results/temporaries generated
382 in a basic block. IE the LHS of an expression. */
383 bitmap_set_t tmp_gen;
384
385 /* The AVAIL_OUT set, which represents which values are available in
386 a given basic block. */
387 bitmap_set_t avail_out;
388
389 /* The ANTIC_IN set, which represents which values are anticipatable
390 in a given basic block. */
391 bitmap_set_t antic_in;
392
393 /* The PA_IN set, which represents which values are
394 partially anticipatable in a given basic block. */
395 bitmap_set_t pa_in;
396
397 /* The NEW_SETS set, which is used during insertion to augment the
398 AVAIL_OUT set of blocks with the new insertions performed during
399 the current iteration. */
400 bitmap_set_t new_sets;
401
402 /* A cache for value_dies_in_block_x. */
403 bitmap expr_dies;
404
405 /* True if we have visited this block during ANTIC calculation. */
406 unsigned int visited : 1;
407
408 /* True we have deferred processing this block during ANTIC
409 calculation until its successor is processed. */
410 unsigned int deferred : 1;
411
412 /* True when the block contains a call that might not return. */
413 unsigned int contains_may_not_return_call : 1;
414 } *bb_value_sets_t;
415
416 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
417 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
418 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
419 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
420 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
421 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
422 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
423 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
424 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
425 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
426 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
427
428
429 /* Basic block list in postorder. */
430 static int *postorder;
431
432 /* This structure is used to keep track of statistics on what
433 optimization PRE was able to perform. */
434 static struct
435 {
436 /* The number of RHS computations eliminated by PRE. */
437 int eliminations;
438
439 /* The number of new expressions/temporaries generated by PRE. */
440 int insertions;
441
442 /* The number of inserts found due to partial anticipation */
443 int pa_insert;
444
445 /* The number of new PHI nodes added by PRE. */
446 int phis;
447
448 /* The number of values found constant. */
449 int constified;
450
451 } pre_stats;
452
453 static bool do_partial_partial;
454 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
455 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
456 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
457 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
458 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
459 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
461 unsigned int, bool);
462 static bitmap_set_t bitmap_set_new (void);
463 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
464 gimple, tree);
465 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
466 gimple);
467 static unsigned int get_expr_value_id (pre_expr);
468
469 /* We can add and remove elements and entries to and from sets
470 and hash tables, so we use alloc pools for them. */
471
472 static alloc_pool bitmap_set_pool;
473 static bitmap_obstack grand_bitmap_obstack;
474
475 /* To avoid adding 300 temporary variables when we only need one, we
476 only create one temporary variable, on demand, and build ssa names
477 off that. We do have to change the variable if the types don't
478 match the current variable's type. */
479 static tree pretemp;
480 static tree storetemp;
481 static tree prephitemp;
482
483 /* Set of blocks with statements that have had their EH properties changed. */
484 static bitmap need_eh_cleanup;
485
486 /* Set of blocks with statements that have had their AB properties changed. */
487 static bitmap need_ab_cleanup;
488
489 /* The phi_translate_table caches phi translations for a given
490 expression and predecessor. */
491
492 static htab_t phi_translate_table;
493
494 /* A three tuple {e, pred, v} used to cache phi translations in the
495 phi_translate_table. */
496
497 typedef struct expr_pred_trans_d
498 {
499 /* The expression. */
500 pre_expr e;
501
502 /* The predecessor block along which we translated the expression. */
503 basic_block pred;
504
505 /* The value that resulted from the translation. */
506 pre_expr v;
507
508 /* The hashcode for the expression, pred pair. This is cached for
509 speed reasons. */
510 hashval_t hashcode;
511 } *expr_pred_trans_t;
512 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
513
514 /* Return the hash value for a phi translation table entry. */
515
516 static hashval_t
517 expr_pred_trans_hash (const void *p)
518 {
519 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
520 return ve->hashcode;
521 }
522
523 /* Return true if two phi translation table entries are the same.
524 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
525
526 static int
527 expr_pred_trans_eq (const void *p1, const void *p2)
528 {
529 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
530 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
531 basic_block b1 = ve1->pred;
532 basic_block b2 = ve2->pred;
533
534 /* If they are not translations for the same basic block, they can't
535 be equal. */
536 if (b1 != b2)
537 return false;
538 return pre_expr_eq (ve1->e, ve2->e);
539 }
540
541 /* Search in the phi translation table for the translation of
542 expression E in basic block PRED.
543 Return the translated value, if found, NULL otherwise. */
544
545 static inline pre_expr
546 phi_trans_lookup (pre_expr e, basic_block pred)
547 {
548 void **slot;
549 struct expr_pred_trans_d ept;
550
551 ept.e = e;
552 ept.pred = pred;
553 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
554 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
555 NO_INSERT);
556 if (!slot)
557 return NULL;
558 else
559 return ((expr_pred_trans_t) *slot)->v;
560 }
561
562
563 /* Add the tuple mapping from {expression E, basic block PRED} to
564 value V, to the phi translation table. */
565
566 static inline void
567 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
568 {
569 void **slot;
570 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
571 new_pair->e = e;
572 new_pair->pred = pred;
573 new_pair->v = v;
574 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
575 pred->index);
576
577 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
578 new_pair->hashcode, INSERT);
579 free (*slot);
580 *slot = (void *) new_pair;
581 }
582
583
584 /* Add expression E to the expression set of value id V. */
585
586 static void
587 add_to_value (unsigned int v, pre_expr e)
588 {
589 bitmap_set_t set;
590
591 gcc_assert (get_expr_value_id (e) == v);
592
593 if (v >= VEC_length (bitmap_set_t, value_expressions))
594 {
595 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
596 v + 1);
597 }
598
599 set = VEC_index (bitmap_set_t, value_expressions, v);
600 if (!set)
601 {
602 set = bitmap_set_new ();
603 VEC_replace (bitmap_set_t, value_expressions, v, set);
604 }
605
606 bitmap_insert_into_set_1 (set, e, v, true);
607 }
608
609 /* Create a new bitmap set and return it. */
610
611 static bitmap_set_t
612 bitmap_set_new (void)
613 {
614 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
615 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
616 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
617 return ret;
618 }
619
620 /* Return the value id for a PRE expression EXPR. */
621
622 static unsigned int
623 get_expr_value_id (pre_expr expr)
624 {
625 switch (expr->kind)
626 {
627 case CONSTANT:
628 {
629 unsigned int id;
630 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
631 if (id == 0)
632 {
633 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
634 add_to_value (id, expr);
635 }
636 return id;
637 }
638 case NAME:
639 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
640 case NARY:
641 return PRE_EXPR_NARY (expr)->value_id;
642 case REFERENCE:
643 return PRE_EXPR_REFERENCE (expr)->value_id;
644 default:
645 gcc_unreachable ();
646 }
647 }
648
649 /* Remove an expression EXPR from a bitmapped set. */
650
651 static void
652 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
653 {
654 unsigned int val = get_expr_value_id (expr);
655 if (!value_id_constant_p (val))
656 {
657 bitmap_clear_bit (&set->values, val);
658 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
659 }
660 }
661
662 static void
663 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
664 unsigned int val, bool allow_constants)
665 {
666 if (allow_constants || !value_id_constant_p (val))
667 {
668 /* We specifically expect this and only this function to be able to
669 insert constants into a set. */
670 bitmap_set_bit (&set->values, val);
671 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
672 }
673 }
674
675 /* Insert an expression EXPR into a bitmapped set. */
676
677 static void
678 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
679 {
680 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
681 }
682
683 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
684
685 static void
686 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
687 {
688 bitmap_copy (&dest->expressions, &orig->expressions);
689 bitmap_copy (&dest->values, &orig->values);
690 }
691
692
693 /* Free memory used up by SET. */
694 static void
695 bitmap_set_free (bitmap_set_t set)
696 {
697 bitmap_clear (&set->expressions);
698 bitmap_clear (&set->values);
699 }
700
701
702 /* Generate an topological-ordered array of bitmap set SET. */
703
704 static VEC(pre_expr, heap) *
705 sorted_array_from_bitmap_set (bitmap_set_t set)
706 {
707 unsigned int i, j;
708 bitmap_iterator bi, bj;
709 VEC(pre_expr, heap) *result;
710
711 /* Pre-allocate roughly enough space for the array. */
712 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
713
714 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
715 {
716 /* The number of expressions having a given value is usually
717 relatively small. Thus, rather than making a vector of all
718 the expressions and sorting it by value-id, we walk the values
719 and check in the reverse mapping that tells us what expressions
720 have a given value, to filter those in our set. As a result,
721 the expressions are inserted in value-id order, which means
722 topological order.
723
724 If this is somehow a significant lose for some cases, we can
725 choose which set to walk based on the set size. */
726 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
727 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
728 {
729 if (bitmap_bit_p (&set->expressions, j))
730 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
731 }
732 }
733
734 return result;
735 }
736
737 /* Perform bitmapped set operation DEST &= ORIG. */
738
739 static void
740 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
741 {
742 bitmap_iterator bi;
743 unsigned int i;
744
745 if (dest != orig)
746 {
747 bitmap_head temp;
748 bitmap_initialize (&temp, &grand_bitmap_obstack);
749
750 bitmap_and_into (&dest->values, &orig->values);
751 bitmap_copy (&temp, &dest->expressions);
752 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
753 {
754 pre_expr expr = expression_for_id (i);
755 unsigned int value_id = get_expr_value_id (expr);
756 if (!bitmap_bit_p (&dest->values, value_id))
757 bitmap_clear_bit (&dest->expressions, i);
758 }
759 bitmap_clear (&temp);
760 }
761 }
762
763 /* Subtract all values and expressions contained in ORIG from DEST. */
764
765 static bitmap_set_t
766 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
767 {
768 bitmap_set_t result = bitmap_set_new ();
769 bitmap_iterator bi;
770 unsigned int i;
771
772 bitmap_and_compl (&result->expressions, &dest->expressions,
773 &orig->expressions);
774
775 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
776 {
777 pre_expr expr = expression_for_id (i);
778 unsigned int value_id = get_expr_value_id (expr);
779 bitmap_set_bit (&result->values, value_id);
780 }
781
782 return result;
783 }
784
785 /* Subtract all the values in bitmap set B from bitmap set A. */
786
787 static void
788 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
789 {
790 unsigned int i;
791 bitmap_iterator bi;
792 bitmap_head temp;
793
794 bitmap_initialize (&temp, &grand_bitmap_obstack);
795
796 bitmap_copy (&temp, &a->expressions);
797 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
798 {
799 pre_expr expr = expression_for_id (i);
800 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
801 bitmap_remove_from_set (a, expr);
802 }
803 bitmap_clear (&temp);
804 }
805
806
807 /* Return true if bitmapped set SET contains the value VALUE_ID. */
808
809 static bool
810 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
811 {
812 if (value_id_constant_p (value_id))
813 return true;
814
815 if (!set || bitmap_empty_p (&set->expressions))
816 return false;
817
818 return bitmap_bit_p (&set->values, value_id);
819 }
820
821 static inline bool
822 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
823 {
824 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
825 }
826
827 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
828
829 static void
830 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
831 const pre_expr expr)
832 {
833 bitmap_set_t exprset;
834 unsigned int i;
835 bitmap_iterator bi;
836
837 if (value_id_constant_p (lookfor))
838 return;
839
840 if (!bitmap_set_contains_value (set, lookfor))
841 return;
842
843 /* The number of expressions having a given value is usually
844 significantly less than the total number of expressions in SET.
845 Thus, rather than check, for each expression in SET, whether it
846 has the value LOOKFOR, we walk the reverse mapping that tells us
847 what expressions have a given value, and see if any of those
848 expressions are in our set. For large testcases, this is about
849 5-10x faster than walking the bitmap. If this is somehow a
850 significant lose for some cases, we can choose which set to walk
851 based on the set size. */
852 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
853 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
854 {
855 if (bitmap_clear_bit (&set->expressions, i))
856 {
857 bitmap_set_bit (&set->expressions, get_expression_id (expr));
858 return;
859 }
860 }
861 }
862
863 /* Return true if two bitmap sets are equal. */
864
865 static bool
866 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
867 {
868 return bitmap_equal_p (&a->values, &b->values);
869 }
870
871 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
872 and add it otherwise. */
873
874 static void
875 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
876 {
877 unsigned int val = get_expr_value_id (expr);
878
879 if (bitmap_set_contains_value (set, val))
880 bitmap_set_replace_value (set, val, expr);
881 else
882 bitmap_insert_into_set (set, expr);
883 }
884
885 /* Insert EXPR into SET if EXPR's value is not already present in
886 SET. */
887
888 static void
889 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
890 {
891 unsigned int val = get_expr_value_id (expr);
892
893 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
894
895 /* Constant values are always considered to be part of the set. */
896 if (value_id_constant_p (val))
897 return;
898
899 /* If the value membership changed, add the expression. */
900 if (bitmap_set_bit (&set->values, val))
901 bitmap_set_bit (&set->expressions, expr->id);
902 }
903
904 /* Print out EXPR to outfile. */
905
906 static void
907 print_pre_expr (FILE *outfile, const pre_expr expr)
908 {
909 switch (expr->kind)
910 {
911 case CONSTANT:
912 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
913 break;
914 case NAME:
915 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
916 break;
917 case NARY:
918 {
919 unsigned int i;
920 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
921 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
922 for (i = 0; i < nary->length; i++)
923 {
924 print_generic_expr (outfile, nary->op[i], 0);
925 if (i != (unsigned) nary->length - 1)
926 fprintf (outfile, ",");
927 }
928 fprintf (outfile, "}");
929 }
930 break;
931
932 case REFERENCE:
933 {
934 vn_reference_op_t vro;
935 unsigned int i;
936 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
937 fprintf (outfile, "{");
938 for (i = 0;
939 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
940 i++)
941 {
942 bool closebrace = false;
943 if (vro->opcode != SSA_NAME
944 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
945 {
946 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
947 if (vro->op0)
948 {
949 fprintf (outfile, "<");
950 closebrace = true;
951 }
952 }
953 if (vro->op0)
954 {
955 print_generic_expr (outfile, vro->op0, 0);
956 if (vro->op1)
957 {
958 fprintf (outfile, ",");
959 print_generic_expr (outfile, vro->op1, 0);
960 }
961 if (vro->op2)
962 {
963 fprintf (outfile, ",");
964 print_generic_expr (outfile, vro->op2, 0);
965 }
966 }
967 if (closebrace)
968 fprintf (outfile, ">");
969 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
970 fprintf (outfile, ",");
971 }
972 fprintf (outfile, "}");
973 if (ref->vuse)
974 {
975 fprintf (outfile, "@");
976 print_generic_expr (outfile, ref->vuse, 0);
977 }
978 }
979 break;
980 }
981 }
982 void debug_pre_expr (pre_expr);
983
984 /* Like print_pre_expr but always prints to stderr. */
985 DEBUG_FUNCTION void
986 debug_pre_expr (pre_expr e)
987 {
988 print_pre_expr (stderr, e);
989 fprintf (stderr, "\n");
990 }
991
992 /* Print out SET to OUTFILE. */
993
994 static void
995 print_bitmap_set (FILE *outfile, bitmap_set_t set,
996 const char *setname, int blockindex)
997 {
998 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
999 if (set)
1000 {
1001 bool first = true;
1002 unsigned i;
1003 bitmap_iterator bi;
1004
1005 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1006 {
1007 const pre_expr expr = expression_for_id (i);
1008
1009 if (!first)
1010 fprintf (outfile, ", ");
1011 first = false;
1012 print_pre_expr (outfile, expr);
1013
1014 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1015 }
1016 }
1017 fprintf (outfile, " }\n");
1018 }
1019
1020 void debug_bitmap_set (bitmap_set_t);
1021
1022 DEBUG_FUNCTION void
1023 debug_bitmap_set (bitmap_set_t set)
1024 {
1025 print_bitmap_set (stderr, set, "debug", 0);
1026 }
1027
1028 void debug_bitmap_sets_for (basic_block);
1029
1030 DEBUG_FUNCTION void
1031 debug_bitmap_sets_for (basic_block bb)
1032 {
1033 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1034 if (!in_fre)
1035 {
1036 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1037 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1038 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1039 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1040 if (do_partial_partial)
1041 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1042 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1043 }
1044 }
1045
1046 /* Print out the expressions that have VAL to OUTFILE. */
1047
1048 static void
1049 print_value_expressions (FILE *outfile, unsigned int val)
1050 {
1051 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1052 if (set)
1053 {
1054 char s[10];
1055 sprintf (s, "%04d", val);
1056 print_bitmap_set (outfile, set, s, 0);
1057 }
1058 }
1059
1060
1061 DEBUG_FUNCTION void
1062 debug_value_expressions (unsigned int val)
1063 {
1064 print_value_expressions (stderr, val);
1065 }
1066
1067 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1068 represent it. */
1069
1070 static pre_expr
1071 get_or_alloc_expr_for_constant (tree constant)
1072 {
1073 unsigned int result_id;
1074 unsigned int value_id;
1075 struct pre_expr_d expr;
1076 pre_expr newexpr;
1077
1078 expr.kind = CONSTANT;
1079 PRE_EXPR_CONSTANT (&expr) = constant;
1080 result_id = lookup_expression_id (&expr);
1081 if (result_id != 0)
1082 return expression_for_id (result_id);
1083
1084 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1085 newexpr->kind = CONSTANT;
1086 PRE_EXPR_CONSTANT (newexpr) = constant;
1087 alloc_expression_id (newexpr);
1088 value_id = get_or_alloc_constant_value_id (constant);
1089 add_to_value (value_id, newexpr);
1090 return newexpr;
1091 }
1092
1093 /* Given a value id V, find the actual tree representing the constant
1094 value if there is one, and return it. Return NULL if we can't find
1095 a constant. */
1096
1097 static tree
1098 get_constant_for_value_id (unsigned int v)
1099 {
1100 if (value_id_constant_p (v))
1101 {
1102 unsigned int i;
1103 bitmap_iterator bi;
1104 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1105
1106 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1107 {
1108 pre_expr expr = expression_for_id (i);
1109 if (expr->kind == CONSTANT)
1110 return PRE_EXPR_CONSTANT (expr);
1111 }
1112 }
1113 return NULL;
1114 }
1115
1116 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1117 Currently only supports constants and SSA_NAMES. */
1118 static pre_expr
1119 get_or_alloc_expr_for (tree t)
1120 {
1121 if (TREE_CODE (t) == SSA_NAME)
1122 return get_or_alloc_expr_for_name (t);
1123 else if (is_gimple_min_invariant (t))
1124 return get_or_alloc_expr_for_constant (t);
1125 else
1126 {
1127 /* More complex expressions can result from SCCVN expression
1128 simplification that inserts values for them. As they all
1129 do not have VOPs the get handled by the nary ops struct. */
1130 vn_nary_op_t result;
1131 unsigned int result_id;
1132 vn_nary_op_lookup (t, &result);
1133 if (result != NULL)
1134 {
1135 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1136 e->kind = NARY;
1137 PRE_EXPR_NARY (e) = result;
1138 result_id = lookup_expression_id (e);
1139 if (result_id != 0)
1140 {
1141 pool_free (pre_expr_pool, e);
1142 e = expression_for_id (result_id);
1143 return e;
1144 }
1145 alloc_expression_id (e);
1146 return e;
1147 }
1148 }
1149 return NULL;
1150 }
1151
1152 /* Return the folded version of T if T, when folded, is a gimple
1153 min_invariant. Otherwise, return T. */
1154
1155 static pre_expr
1156 fully_constant_expression (pre_expr e)
1157 {
1158 switch (e->kind)
1159 {
1160 case CONSTANT:
1161 return e;
1162 case NARY:
1163 {
1164 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1165 switch (TREE_CODE_CLASS (nary->opcode))
1166 {
1167 case tcc_binary:
1168 case tcc_comparison:
1169 {
1170 /* We have to go from trees to pre exprs to value ids to
1171 constants. */
1172 tree naryop0 = nary->op[0];
1173 tree naryop1 = nary->op[1];
1174 tree result;
1175 if (!is_gimple_min_invariant (naryop0))
1176 {
1177 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1178 unsigned int vrep0 = get_expr_value_id (rep0);
1179 tree const0 = get_constant_for_value_id (vrep0);
1180 if (const0)
1181 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1182 }
1183 if (!is_gimple_min_invariant (naryop1))
1184 {
1185 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1186 unsigned int vrep1 = get_expr_value_id (rep1);
1187 tree const1 = get_constant_for_value_id (vrep1);
1188 if (const1)
1189 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1190 }
1191 result = fold_binary (nary->opcode, nary->type,
1192 naryop0, naryop1);
1193 if (result && is_gimple_min_invariant (result))
1194 return get_or_alloc_expr_for_constant (result);
1195 /* We might have simplified the expression to a
1196 SSA_NAME for example from x_1 * 1. But we cannot
1197 insert a PHI for x_1 unconditionally as x_1 might
1198 not be available readily. */
1199 return e;
1200 }
1201 case tcc_reference:
1202 if (nary->opcode != REALPART_EXPR
1203 && nary->opcode != IMAGPART_EXPR
1204 && nary->opcode != VIEW_CONVERT_EXPR)
1205 return e;
1206 /* Fallthrough. */
1207 case tcc_unary:
1208 {
1209 /* We have to go from trees to pre exprs to value ids to
1210 constants. */
1211 tree naryop0 = nary->op[0];
1212 tree const0, result;
1213 if (is_gimple_min_invariant (naryop0))
1214 const0 = naryop0;
1215 else
1216 {
1217 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1218 unsigned int vrep0 = get_expr_value_id (rep0);
1219 const0 = get_constant_for_value_id (vrep0);
1220 }
1221 result = NULL;
1222 if (const0)
1223 {
1224 tree type1 = TREE_TYPE (nary->op[0]);
1225 const0 = fold_convert (type1, const0);
1226 result = fold_unary (nary->opcode, nary->type, const0);
1227 }
1228 if (result && is_gimple_min_invariant (result))
1229 return get_or_alloc_expr_for_constant (result);
1230 return e;
1231 }
1232 default:
1233 return e;
1234 }
1235 }
1236 case REFERENCE:
1237 {
1238 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1239 tree folded;
1240 if ((folded = fully_constant_vn_reference_p (ref)))
1241 return get_or_alloc_expr_for_constant (folded);
1242 return e;
1243 }
1244 default:
1245 return e;
1246 }
1247 return e;
1248 }
1249
1250 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1251 it has the value it would have in BLOCK. Set *SAME_VALID to true
1252 in case the new vuse doesn't change the value id of the OPERANDS. */
1253
1254 static tree
1255 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1256 alias_set_type set, tree type, tree vuse,
1257 basic_block phiblock,
1258 basic_block block, bool *same_valid)
1259 {
1260 gimple phi = SSA_NAME_DEF_STMT (vuse);
1261 ao_ref ref;
1262 edge e = NULL;
1263 bool use_oracle;
1264
1265 *same_valid = true;
1266
1267 if (gimple_bb (phi) != phiblock)
1268 return vuse;
1269
1270 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1271
1272 /* Use the alias-oracle to find either the PHI node in this block,
1273 the first VUSE used in this block that is equivalent to vuse or
1274 the first VUSE which definition in this block kills the value. */
1275 if (gimple_code (phi) == GIMPLE_PHI)
1276 e = find_edge (block, phiblock);
1277 else if (use_oracle)
1278 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1279 {
1280 vuse = gimple_vuse (phi);
1281 phi = SSA_NAME_DEF_STMT (vuse);
1282 if (gimple_bb (phi) != phiblock)
1283 return vuse;
1284 if (gimple_code (phi) == GIMPLE_PHI)
1285 {
1286 e = find_edge (block, phiblock);
1287 break;
1288 }
1289 }
1290 else
1291 return NULL_TREE;
1292
1293 if (e)
1294 {
1295 if (use_oracle)
1296 {
1297 bitmap visited = NULL;
1298 /* Try to find a vuse that dominates this phi node by skipping
1299 non-clobbering statements. */
1300 vuse = get_continuation_for_phi (phi, &ref, &visited);
1301 if (visited)
1302 BITMAP_FREE (visited);
1303 }
1304 else
1305 vuse = NULL_TREE;
1306 if (!vuse)
1307 {
1308 /* If we didn't find any, the value ID can't stay the same,
1309 but return the translated vuse. */
1310 *same_valid = false;
1311 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1312 }
1313 /* ??? We would like to return vuse here as this is the canonical
1314 upmost vdef that this reference is associated with. But during
1315 insertion of the references into the hash tables we only ever
1316 directly insert with their direct gimple_vuse, hence returning
1317 something else would make us not find the other expression. */
1318 return PHI_ARG_DEF (phi, e->dest_idx);
1319 }
1320
1321 return NULL_TREE;
1322 }
1323
1324 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1325 SET2. This is used to avoid making a set consisting of the union
1326 of PA_IN and ANTIC_IN during insert. */
1327
1328 static inline pre_expr
1329 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1330 {
1331 pre_expr result;
1332
1333 result = bitmap_find_leader (set1, val, NULL);
1334 if (!result && set2)
1335 result = bitmap_find_leader (set2, val, NULL);
1336 return result;
1337 }
1338
1339 /* Get the tree type for our PRE expression e. */
1340
1341 static tree
1342 get_expr_type (const pre_expr e)
1343 {
1344 switch (e->kind)
1345 {
1346 case NAME:
1347 return TREE_TYPE (PRE_EXPR_NAME (e));
1348 case CONSTANT:
1349 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1350 case REFERENCE:
1351 return PRE_EXPR_REFERENCE (e)->type;
1352 case NARY:
1353 return PRE_EXPR_NARY (e)->type;
1354 }
1355 gcc_unreachable();
1356 }
1357
1358 /* Get a representative SSA_NAME for a given expression.
1359 Since all of our sub-expressions are treated as values, we require
1360 them to be SSA_NAME's for simplicity.
1361 Prior versions of GVNPRE used to use "value handles" here, so that
1362 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1363 either case, the operands are really values (IE we do not expect
1364 them to be usable without finding leaders). */
1365
1366 static tree
1367 get_representative_for (const pre_expr e)
1368 {
1369 tree exprtype;
1370 tree name;
1371 unsigned int value_id = get_expr_value_id (e);
1372
1373 switch (e->kind)
1374 {
1375 case NAME:
1376 return PRE_EXPR_NAME (e);
1377 case CONSTANT:
1378 return PRE_EXPR_CONSTANT (e);
1379 case NARY:
1380 case REFERENCE:
1381 {
1382 /* Go through all of the expressions representing this value
1383 and pick out an SSA_NAME. */
1384 unsigned int i;
1385 bitmap_iterator bi;
1386 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1387 value_id);
1388 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1389 {
1390 pre_expr rep = expression_for_id (i);
1391 if (rep->kind == NAME)
1392 return PRE_EXPR_NAME (rep);
1393 }
1394 }
1395 break;
1396 }
1397 /* If we reached here we couldn't find an SSA_NAME. This can
1398 happen when we've discovered a value that has never appeared in
1399 the program as set to an SSA_NAME, most likely as the result of
1400 phi translation. */
1401 if (dump_file)
1402 {
1403 fprintf (dump_file,
1404 "Could not find SSA_NAME representative for expression:");
1405 print_pre_expr (dump_file, e);
1406 fprintf (dump_file, "\n");
1407 }
1408
1409 exprtype = get_expr_type (e);
1410
1411 /* Build and insert the assignment of the end result to the temporary
1412 that we will return. */
1413 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1414 {
1415 pretemp = create_tmp_reg (exprtype, "pretmp");
1416 add_referenced_var (pretemp);
1417 }
1418
1419 name = make_ssa_name (pretemp, gimple_build_nop ());
1420 VN_INFO_GET (name)->value_id = value_id;
1421 if (e->kind == CONSTANT)
1422 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1423 else
1424 VN_INFO (name)->valnum = name;
1425
1426 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1427 if (dump_file)
1428 {
1429 fprintf (dump_file, "Created SSA_NAME representative ");
1430 print_generic_expr (dump_file, name, 0);
1431 fprintf (dump_file, " for expression:");
1432 print_pre_expr (dump_file, e);
1433 fprintf (dump_file, "\n");
1434 }
1435
1436 return name;
1437 }
1438
1439
1440
1441 static pre_expr
1442 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1443 basic_block pred, basic_block phiblock);
1444
1445 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1446 the phis in PRED. Return NULL if we can't find a leader for each part
1447 of the translated expression. */
1448
1449 static pre_expr
1450 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1451 basic_block pred, basic_block phiblock)
1452 {
1453 switch (expr->kind)
1454 {
1455 case NARY:
1456 {
1457 unsigned int i;
1458 bool changed = false;
1459 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1460 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1461 sizeof_vn_nary_op (nary->length));
1462 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1463
1464 for (i = 0; i < newnary->length; i++)
1465 {
1466 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1467 continue;
1468 else
1469 {
1470 pre_expr leader, result;
1471 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1472 leader = find_leader_in_sets (op_val_id, set1, set2);
1473 result = phi_translate (leader, set1, set2, pred, phiblock);
1474 if (result && result != leader)
1475 {
1476 tree name = get_representative_for (result);
1477 if (!name)
1478 return NULL;
1479 newnary->op[i] = name;
1480 }
1481 else if (!result)
1482 return NULL;
1483
1484 changed |= newnary->op[i] != nary->op[i];
1485 }
1486 }
1487 if (changed)
1488 {
1489 pre_expr constant;
1490 unsigned int new_val_id;
1491
1492 tree result = vn_nary_op_lookup_pieces (newnary->length,
1493 newnary->opcode,
1494 newnary->type,
1495 &newnary->op[0],
1496 &nary);
1497 if (result && is_gimple_min_invariant (result))
1498 return get_or_alloc_expr_for_constant (result);
1499
1500 expr = (pre_expr) pool_alloc (pre_expr_pool);
1501 expr->kind = NARY;
1502 expr->id = 0;
1503 if (nary)
1504 {
1505 PRE_EXPR_NARY (expr) = nary;
1506 constant = fully_constant_expression (expr);
1507 if (constant != expr)
1508 return constant;
1509
1510 new_val_id = nary->value_id;
1511 get_or_alloc_expression_id (expr);
1512 }
1513 else
1514 {
1515 new_val_id = get_next_value_id ();
1516 VEC_safe_grow_cleared (bitmap_set_t, heap,
1517 value_expressions,
1518 get_max_value_id() + 1);
1519 nary = vn_nary_op_insert_pieces (newnary->length,
1520 newnary->opcode,
1521 newnary->type,
1522 &newnary->op[0],
1523 result, new_val_id);
1524 PRE_EXPR_NARY (expr) = nary;
1525 constant = fully_constant_expression (expr);
1526 if (constant != expr)
1527 return constant;
1528 get_or_alloc_expression_id (expr);
1529 }
1530 add_to_value (new_val_id, expr);
1531 }
1532 return expr;
1533 }
1534 break;
1535
1536 case REFERENCE:
1537 {
1538 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1539 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1540 tree vuse = ref->vuse;
1541 tree newvuse = vuse;
1542 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1543 bool changed = false, same_valid = true;
1544 unsigned int i, j, n;
1545 vn_reference_op_t operand;
1546 vn_reference_t newref;
1547
1548 for (i = 0, j = 0;
1549 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1550 {
1551 pre_expr opresult;
1552 pre_expr leader;
1553 tree op[3];
1554 tree type = operand->type;
1555 vn_reference_op_s newop = *operand;
1556 op[0] = operand->op0;
1557 op[1] = operand->op1;
1558 op[2] = operand->op2;
1559 for (n = 0; n < 3; ++n)
1560 {
1561 unsigned int op_val_id;
1562 if (!op[n])
1563 continue;
1564 if (TREE_CODE (op[n]) != SSA_NAME)
1565 {
1566 /* We can't possibly insert these. */
1567 if (n != 0
1568 && !is_gimple_min_invariant (op[n]))
1569 break;
1570 continue;
1571 }
1572 op_val_id = VN_INFO (op[n])->value_id;
1573 leader = find_leader_in_sets (op_val_id, set1, set2);
1574 if (!leader)
1575 break;
1576 /* Make sure we do not recursively translate ourselves
1577 like for translating a[n_1] with the leader for
1578 n_1 being a[n_1]. */
1579 if (get_expression_id (leader) != get_expression_id (expr))
1580 {
1581 opresult = phi_translate (leader, set1, set2,
1582 pred, phiblock);
1583 if (!opresult)
1584 break;
1585 if (opresult != leader)
1586 {
1587 tree name = get_representative_for (opresult);
1588 if (!name)
1589 break;
1590 changed |= name != op[n];
1591 op[n] = name;
1592 }
1593 }
1594 }
1595 if (n != 3)
1596 {
1597 if (newoperands)
1598 VEC_free (vn_reference_op_s, heap, newoperands);
1599 return NULL;
1600 }
1601 if (!newoperands)
1602 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1603 /* We may have changed from an SSA_NAME to a constant */
1604 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1605 newop.opcode = TREE_CODE (op[0]);
1606 newop.type = type;
1607 newop.op0 = op[0];
1608 newop.op1 = op[1];
1609 newop.op2 = op[2];
1610 /* If it transforms a non-constant ARRAY_REF into a constant
1611 one, adjust the constant offset. */
1612 if (newop.opcode == ARRAY_REF
1613 && newop.off == -1
1614 && TREE_CODE (op[0]) == INTEGER_CST
1615 && TREE_CODE (op[1]) == INTEGER_CST
1616 && TREE_CODE (op[2]) == INTEGER_CST)
1617 {
1618 double_int off = tree_to_double_int (op[0]);
1619 off = double_int_add (off,
1620 double_int_neg
1621 (tree_to_double_int (op[1])));
1622 off = double_int_mul (off, tree_to_double_int (op[2]));
1623 if (double_int_fits_in_shwi_p (off))
1624 newop.off = off.low;
1625 }
1626 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1627 /* If it transforms from an SSA_NAME to an address, fold with
1628 a preceding indirect reference. */
1629 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1630 && VEC_index (vn_reference_op_s,
1631 newoperands, j - 1)->opcode == MEM_REF)
1632 vn_reference_fold_indirect (&newoperands, &j);
1633 }
1634 if (i != VEC_length (vn_reference_op_s, operands))
1635 {
1636 if (newoperands)
1637 VEC_free (vn_reference_op_s, heap, newoperands);
1638 return NULL;
1639 }
1640
1641 if (vuse)
1642 {
1643 newvuse = translate_vuse_through_block (newoperands,
1644 ref->set, ref->type,
1645 vuse, phiblock, pred,
1646 &same_valid);
1647 if (newvuse == NULL_TREE)
1648 {
1649 VEC_free (vn_reference_op_s, heap, newoperands);
1650 return NULL;
1651 }
1652 }
1653
1654 if (changed || newvuse != vuse)
1655 {
1656 unsigned int new_val_id;
1657 pre_expr constant;
1658
1659 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1660 ref->type,
1661 newoperands,
1662 &newref, VN_WALK);
1663 if (result)
1664 VEC_free (vn_reference_op_s, heap, newoperands);
1665
1666 /* We can always insert constants, so if we have a partial
1667 redundant constant load of another type try to translate it
1668 to a constant of appropriate type. */
1669 if (result && is_gimple_min_invariant (result))
1670 {
1671 tree tem = result;
1672 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1673 {
1674 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1675 if (tem && !is_gimple_min_invariant (tem))
1676 tem = NULL_TREE;
1677 }
1678 if (tem)
1679 return get_or_alloc_expr_for_constant (tem);
1680 }
1681
1682 /* If we'd have to convert things we would need to validate
1683 if we can insert the translated expression. So fail
1684 here for now - we cannot insert an alias with a different
1685 type in the VN tables either, as that would assert. */
1686 if (result
1687 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1688 return NULL;
1689 else if (!result && newref
1690 && !useless_type_conversion_p (ref->type, newref->type))
1691 {
1692 VEC_free (vn_reference_op_s, heap, newoperands);
1693 return NULL;
1694 }
1695
1696 expr = (pre_expr) pool_alloc (pre_expr_pool);
1697 expr->kind = REFERENCE;
1698 expr->id = 0;
1699
1700 if (newref)
1701 {
1702 PRE_EXPR_REFERENCE (expr) = newref;
1703 constant = fully_constant_expression (expr);
1704 if (constant != expr)
1705 return constant;
1706
1707 new_val_id = newref->value_id;
1708 get_or_alloc_expression_id (expr);
1709 }
1710 else
1711 {
1712 if (changed || !same_valid)
1713 {
1714 new_val_id = get_next_value_id ();
1715 VEC_safe_grow_cleared (bitmap_set_t, heap,
1716 value_expressions,
1717 get_max_value_id() + 1);
1718 }
1719 else
1720 new_val_id = ref->value_id;
1721 newref = vn_reference_insert_pieces (newvuse, ref->set,
1722 ref->type,
1723 newoperands,
1724 result, new_val_id);
1725 newoperands = NULL;
1726 PRE_EXPR_REFERENCE (expr) = newref;
1727 constant = fully_constant_expression (expr);
1728 if (constant != expr)
1729 return constant;
1730 get_or_alloc_expression_id (expr);
1731 }
1732 add_to_value (new_val_id, expr);
1733 }
1734 VEC_free (vn_reference_op_s, heap, newoperands);
1735 return expr;
1736 }
1737 break;
1738
1739 case NAME:
1740 {
1741 gimple phi = NULL;
1742 edge e;
1743 gimple def_stmt;
1744 tree name = PRE_EXPR_NAME (expr);
1745
1746 def_stmt = SSA_NAME_DEF_STMT (name);
1747 if (gimple_code (def_stmt) == GIMPLE_PHI
1748 && gimple_bb (def_stmt) == phiblock)
1749 phi = def_stmt;
1750 else
1751 return expr;
1752
1753 e = find_edge (pred, gimple_bb (phi));
1754 if (e)
1755 {
1756 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1757 pre_expr newexpr;
1758
1759 if (TREE_CODE (def) == SSA_NAME)
1760 def = VN_INFO (def)->valnum;
1761
1762 /* Handle constant. */
1763 if (is_gimple_min_invariant (def))
1764 return get_or_alloc_expr_for_constant (def);
1765
1766 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1767 return NULL;
1768
1769 newexpr = get_or_alloc_expr_for_name (def);
1770 return newexpr;
1771 }
1772 }
1773 return expr;
1774
1775 default:
1776 gcc_unreachable ();
1777 }
1778 }
1779
1780 /* Wrapper around phi_translate_1 providing caching functionality. */
1781
1782 static pre_expr
1783 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1784 basic_block pred, basic_block phiblock)
1785 {
1786 pre_expr phitrans;
1787
1788 if (!expr)
1789 return NULL;
1790
1791 /* Constants contain no values that need translation. */
1792 if (expr->kind == CONSTANT)
1793 return expr;
1794
1795 if (value_id_constant_p (get_expr_value_id (expr)))
1796 return expr;
1797
1798 if (expr->kind != NAME)
1799 {
1800 phitrans = phi_trans_lookup (expr, pred);
1801 if (phitrans)
1802 return phitrans;
1803 }
1804
1805 /* Translate. */
1806 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1807
1808 /* Don't add empty translations to the cache. Neither add
1809 translations of NAMEs as those are cheap to translate. */
1810 if (phitrans
1811 && expr->kind != NAME)
1812 phi_trans_add (expr, phitrans, pred);
1813
1814 return phitrans;
1815 }
1816
1817
1818 /* For each expression in SET, translate the values through phi nodes
1819 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1820 expressions in DEST. */
1821
1822 static void
1823 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1824 basic_block phiblock)
1825 {
1826 VEC (pre_expr, heap) *exprs;
1827 pre_expr expr;
1828 int i;
1829
1830 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1831 {
1832 bitmap_set_copy (dest, set);
1833 return;
1834 }
1835
1836 exprs = sorted_array_from_bitmap_set (set);
1837 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1838 {
1839 pre_expr translated;
1840 translated = phi_translate (expr, set, NULL, pred, phiblock);
1841 if (!translated)
1842 continue;
1843
1844 /* We might end up with multiple expressions from SET being
1845 translated to the same value. In this case we do not want
1846 to retain the NARY or REFERENCE expression but prefer a NAME
1847 which would be the leader. */
1848 if (translated->kind == NAME)
1849 bitmap_value_replace_in_set (dest, translated);
1850 else
1851 bitmap_value_insert_into_set (dest, translated);
1852 }
1853 VEC_free (pre_expr, heap, exprs);
1854 }
1855
1856 /* Find the leader for a value (i.e., the name representing that
1857 value) in a given set, and return it. If STMT is non-NULL it
1858 makes sure the defining statement for the leader dominates it.
1859 Return NULL if no leader is found. */
1860
1861 static pre_expr
1862 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1863 {
1864 if (value_id_constant_p (val))
1865 {
1866 unsigned int i;
1867 bitmap_iterator bi;
1868 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1869
1870 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1871 {
1872 pre_expr expr = expression_for_id (i);
1873 if (expr->kind == CONSTANT)
1874 return expr;
1875 }
1876 }
1877 if (bitmap_set_contains_value (set, val))
1878 {
1879 /* Rather than walk the entire bitmap of expressions, and see
1880 whether any of them has the value we are looking for, we look
1881 at the reverse mapping, which tells us the set of expressions
1882 that have a given value (IE value->expressions with that
1883 value) and see if any of those expressions are in our set.
1884 The number of expressions per value is usually significantly
1885 less than the number of expressions in the set. In fact, for
1886 large testcases, doing it this way is roughly 5-10x faster
1887 than walking the bitmap.
1888 If this is somehow a significant lose for some cases, we can
1889 choose which set to walk based on which set is smaller. */
1890 unsigned int i;
1891 bitmap_iterator bi;
1892 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1893
1894 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1895 &set->expressions, 0, i, bi)
1896 {
1897 pre_expr val = expression_for_id (i);
1898 /* At the point where stmt is not null, there should always
1899 be an SSA_NAME first in the list of expressions. */
1900 if (stmt)
1901 {
1902 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1903 if (gimple_code (def_stmt) != GIMPLE_PHI
1904 && gimple_bb (def_stmt) == gimple_bb (stmt)
1905 /* PRE insertions are at the end of the basic-block
1906 and have UID 0. */
1907 && (gimple_uid (def_stmt) == 0
1908 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1909 continue;
1910 }
1911 return val;
1912 }
1913 }
1914 return NULL;
1915 }
1916
1917 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1918 BLOCK by seeing if it is not killed in the block. Note that we are
1919 only determining whether there is a store that kills it. Because
1920 of the order in which clean iterates over values, we are guaranteed
1921 that altered operands will have caused us to be eliminated from the
1922 ANTIC_IN set already. */
1923
1924 static bool
1925 value_dies_in_block_x (pre_expr expr, basic_block block)
1926 {
1927 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1928 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1929 gimple def;
1930 gimple_stmt_iterator gsi;
1931 unsigned id = get_expression_id (expr);
1932 bool res = false;
1933 ao_ref ref;
1934
1935 if (!vuse)
1936 return false;
1937
1938 /* Lookup a previously calculated result. */
1939 if (EXPR_DIES (block)
1940 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1941 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1942
1943 /* A memory expression {e, VUSE} dies in the block if there is a
1944 statement that may clobber e. If, starting statement walk from the
1945 top of the basic block, a statement uses VUSE there can be no kill
1946 inbetween that use and the original statement that loaded {e, VUSE},
1947 so we can stop walking. */
1948 ref.base = NULL_TREE;
1949 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1950 {
1951 tree def_vuse, def_vdef;
1952 def = gsi_stmt (gsi);
1953 def_vuse = gimple_vuse (def);
1954 def_vdef = gimple_vdef (def);
1955
1956 /* Not a memory statement. */
1957 if (!def_vuse)
1958 continue;
1959
1960 /* Not a may-def. */
1961 if (!def_vdef)
1962 {
1963 /* A load with the same VUSE, we're done. */
1964 if (def_vuse == vuse)
1965 break;
1966
1967 continue;
1968 }
1969
1970 /* Init ref only if we really need it. */
1971 if (ref.base == NULL_TREE
1972 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1973 refx->operands))
1974 {
1975 res = true;
1976 break;
1977 }
1978 /* If the statement may clobber expr, it dies. */
1979 if (stmt_may_clobber_ref_p_1 (def, &ref))
1980 {
1981 res = true;
1982 break;
1983 }
1984 }
1985
1986 /* Remember the result. */
1987 if (!EXPR_DIES (block))
1988 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1989 bitmap_set_bit (EXPR_DIES (block), id * 2);
1990 if (res)
1991 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1992
1993 return res;
1994 }
1995
1996
1997 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1998 contains its value-id. */
1999
2000 static bool
2001 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
2002 {
2003 if (op && TREE_CODE (op) == SSA_NAME)
2004 {
2005 unsigned int value_id = VN_INFO (op)->value_id;
2006 if (!(bitmap_set_contains_value (set1, value_id)
2007 || (set2 && bitmap_set_contains_value (set2, value_id))))
2008 return false;
2009 }
2010 return true;
2011 }
2012
2013 /* Determine if the expression EXPR is valid in SET1 U SET2.
2014 ONLY SET2 CAN BE NULL.
2015 This means that we have a leader for each part of the expression
2016 (if it consists of values), or the expression is an SSA_NAME.
2017 For loads/calls, we also see if the vuse is killed in this block. */
2018
2019 static bool
2020 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2021 basic_block block)
2022 {
2023 switch (expr->kind)
2024 {
2025 case NAME:
2026 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2027 case NARY:
2028 {
2029 unsigned int i;
2030 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2031 for (i = 0; i < nary->length; i++)
2032 if (!op_valid_in_sets (set1, set2, nary->op[i]))
2033 return false;
2034 return true;
2035 }
2036 break;
2037 case REFERENCE:
2038 {
2039 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2040 vn_reference_op_t vro;
2041 unsigned int i;
2042
2043 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2044 {
2045 if (!op_valid_in_sets (set1, set2, vro->op0)
2046 || !op_valid_in_sets (set1, set2, vro->op1)
2047 || !op_valid_in_sets (set1, set2, vro->op2))
2048 return false;
2049 }
2050 return true;
2051 }
2052 default:
2053 gcc_unreachable ();
2054 }
2055 }
2056
2057 /* Clean the set of expressions that are no longer valid in SET1 or
2058 SET2. This means expressions that are made up of values we have no
2059 leaders for in SET1 or SET2. This version is used for partial
2060 anticipation, which means it is not valid in either ANTIC_IN or
2061 PA_IN. */
2062
2063 static void
2064 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2065 {
2066 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2067 pre_expr expr;
2068 int i;
2069
2070 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2071 {
2072 if (!valid_in_sets (set1, set2, expr, block))
2073 bitmap_remove_from_set (set1, expr);
2074 }
2075 VEC_free (pre_expr, heap, exprs);
2076 }
2077
2078 /* Clean the set of expressions that are no longer valid in SET. This
2079 means expressions that are made up of values we have no leaders for
2080 in SET. */
2081
2082 static void
2083 clean (bitmap_set_t set, basic_block block)
2084 {
2085 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2086 pre_expr expr;
2087 int i;
2088
2089 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2090 {
2091 if (!valid_in_sets (set, NULL, expr, block))
2092 bitmap_remove_from_set (set, expr);
2093 }
2094 VEC_free (pre_expr, heap, exprs);
2095 }
2096
2097 /* Clean the set of expressions that are no longer valid in SET because
2098 they are clobbered in BLOCK or because they trap and may not be executed. */
2099
2100 static void
2101 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2102 {
2103 bitmap_iterator bi;
2104 unsigned i;
2105
2106 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2107 {
2108 pre_expr expr = expression_for_id (i);
2109 if (expr->kind == REFERENCE)
2110 {
2111 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2112 if (ref->vuse)
2113 {
2114 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2115 if (!gimple_nop_p (def_stmt)
2116 && ((gimple_bb (def_stmt) != block
2117 && !dominated_by_p (CDI_DOMINATORS,
2118 block, gimple_bb (def_stmt)))
2119 || (gimple_bb (def_stmt) == block
2120 && value_dies_in_block_x (expr, block))))
2121 bitmap_remove_from_set (set, expr);
2122 }
2123 }
2124 else if (expr->kind == NARY)
2125 {
2126 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2127 /* If the NARY may trap make sure the block does not contain
2128 a possible exit point.
2129 ??? This is overly conservative if we translate AVAIL_OUT
2130 as the available expression might be after the exit point. */
2131 if (BB_MAY_NOTRETURN (block)
2132 && vn_nary_may_trap (nary))
2133 bitmap_remove_from_set (set, expr);
2134 }
2135 }
2136 }
2137
2138 static sbitmap has_abnormal_preds;
2139
2140 /* List of blocks that may have changed during ANTIC computation and
2141 thus need to be iterated over. */
2142
2143 static sbitmap changed_blocks;
2144
2145 /* Decide whether to defer a block for a later iteration, or PHI
2146 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2147 should defer the block, and true if we processed it. */
2148
2149 static bool
2150 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2151 basic_block block, basic_block phiblock)
2152 {
2153 if (!BB_VISITED (phiblock))
2154 {
2155 SET_BIT (changed_blocks, block->index);
2156 BB_VISITED (block) = 0;
2157 BB_DEFERRED (block) = 1;
2158 return false;
2159 }
2160 else
2161 phi_translate_set (dest, source, block, phiblock);
2162 return true;
2163 }
2164
2165 /* Compute the ANTIC set for BLOCK.
2166
2167 If succs(BLOCK) > 1 then
2168 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2169 else if succs(BLOCK) == 1 then
2170 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2171
2172 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2173 */
2174
2175 static bool
2176 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2177 {
2178 bool changed = false;
2179 bitmap_set_t S, old, ANTIC_OUT;
2180 bitmap_iterator bi;
2181 unsigned int bii;
2182 edge e;
2183 edge_iterator ei;
2184
2185 old = ANTIC_OUT = S = NULL;
2186 BB_VISITED (block) = 1;
2187
2188 /* If any edges from predecessors are abnormal, antic_in is empty,
2189 so do nothing. */
2190 if (block_has_abnormal_pred_edge)
2191 goto maybe_dump_sets;
2192
2193 old = ANTIC_IN (block);
2194 ANTIC_OUT = bitmap_set_new ();
2195
2196 /* If the block has no successors, ANTIC_OUT is empty. */
2197 if (EDGE_COUNT (block->succs) == 0)
2198 ;
2199 /* If we have one successor, we could have some phi nodes to
2200 translate through. */
2201 else if (single_succ_p (block))
2202 {
2203 basic_block succ_bb = single_succ (block);
2204
2205 /* We trade iterations of the dataflow equations for having to
2206 phi translate the maximal set, which is incredibly slow
2207 (since the maximal set often has 300+ members, even when you
2208 have a small number of blocks).
2209 Basically, we defer the computation of ANTIC for this block
2210 until we have processed it's successor, which will inevitably
2211 have a *much* smaller set of values to phi translate once
2212 clean has been run on it.
2213 The cost of doing this is that we technically perform more
2214 iterations, however, they are lower cost iterations.
2215
2216 Timings for PRE on tramp3d-v4:
2217 without maximal set fix: 11 seconds
2218 with maximal set fix/without deferring: 26 seconds
2219 with maximal set fix/with deferring: 11 seconds
2220 */
2221
2222 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2223 block, succ_bb))
2224 {
2225 changed = true;
2226 goto maybe_dump_sets;
2227 }
2228 }
2229 /* If we have multiple successors, we take the intersection of all of
2230 them. Note that in the case of loop exit phi nodes, we may have
2231 phis to translate through. */
2232 else
2233 {
2234 VEC(basic_block, heap) * worklist;
2235 size_t i;
2236 basic_block bprime, first = NULL;
2237
2238 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2239 FOR_EACH_EDGE (e, ei, block->succs)
2240 {
2241 if (!first
2242 && BB_VISITED (e->dest))
2243 first = e->dest;
2244 else if (BB_VISITED (e->dest))
2245 VEC_quick_push (basic_block, worklist, e->dest);
2246 }
2247
2248 /* Of multiple successors we have to have visited one already. */
2249 if (!first)
2250 {
2251 SET_BIT (changed_blocks, block->index);
2252 BB_VISITED (block) = 0;
2253 BB_DEFERRED (block) = 1;
2254 changed = true;
2255 VEC_free (basic_block, heap, worklist);
2256 goto maybe_dump_sets;
2257 }
2258
2259 if (!gimple_seq_empty_p (phi_nodes (first)))
2260 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2261 else
2262 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2263
2264 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2265 {
2266 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2267 {
2268 bitmap_set_t tmp = bitmap_set_new ();
2269 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2270 bitmap_set_and (ANTIC_OUT, tmp);
2271 bitmap_set_free (tmp);
2272 }
2273 else
2274 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2275 }
2276 VEC_free (basic_block, heap, worklist);
2277 }
2278
2279 /* Prune expressions that are clobbered in block and thus become
2280 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2281 prune_clobbered_mems (ANTIC_OUT, block);
2282
2283 /* Generate ANTIC_OUT - TMP_GEN. */
2284 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2285
2286 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2287 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2288 TMP_GEN (block));
2289
2290 /* Then union in the ANTIC_OUT - TMP_GEN values,
2291 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2292 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2293 bitmap_value_insert_into_set (ANTIC_IN (block),
2294 expression_for_id (bii));
2295
2296 clean (ANTIC_IN (block), block);
2297
2298 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2299 {
2300 changed = true;
2301 SET_BIT (changed_blocks, block->index);
2302 FOR_EACH_EDGE (e, ei, block->preds)
2303 SET_BIT (changed_blocks, e->src->index);
2304 }
2305 else
2306 RESET_BIT (changed_blocks, block->index);
2307
2308 maybe_dump_sets:
2309 if (dump_file && (dump_flags & TDF_DETAILS))
2310 {
2311 if (!BB_DEFERRED (block) || BB_VISITED (block))
2312 {
2313 if (ANTIC_OUT)
2314 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2315
2316 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2317 block->index);
2318
2319 if (S)
2320 print_bitmap_set (dump_file, S, "S", block->index);
2321 }
2322 else
2323 {
2324 fprintf (dump_file,
2325 "Block %d was deferred for a future iteration.\n",
2326 block->index);
2327 }
2328 }
2329 if (old)
2330 bitmap_set_free (old);
2331 if (S)
2332 bitmap_set_free (S);
2333 if (ANTIC_OUT)
2334 bitmap_set_free (ANTIC_OUT);
2335 return changed;
2336 }
2337
2338 /* Compute PARTIAL_ANTIC for BLOCK.
2339
2340 If succs(BLOCK) > 1 then
2341 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2342 in ANTIC_OUT for all succ(BLOCK)
2343 else if succs(BLOCK) == 1 then
2344 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2345
2346 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2347 - ANTIC_IN[BLOCK])
2348
2349 */
2350 static bool
2351 compute_partial_antic_aux (basic_block block,
2352 bool block_has_abnormal_pred_edge)
2353 {
2354 bool changed = false;
2355 bitmap_set_t old_PA_IN;
2356 bitmap_set_t PA_OUT;
2357 edge e;
2358 edge_iterator ei;
2359 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2360
2361 old_PA_IN = PA_OUT = NULL;
2362
2363 /* If any edges from predecessors are abnormal, antic_in is empty,
2364 so do nothing. */
2365 if (block_has_abnormal_pred_edge)
2366 goto maybe_dump_sets;
2367
2368 /* If there are too many partially anticipatable values in the
2369 block, phi_translate_set can take an exponential time: stop
2370 before the translation starts. */
2371 if (max_pa
2372 && single_succ_p (block)
2373 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2374 goto maybe_dump_sets;
2375
2376 old_PA_IN = PA_IN (block);
2377 PA_OUT = bitmap_set_new ();
2378
2379 /* If the block has no successors, ANTIC_OUT is empty. */
2380 if (EDGE_COUNT (block->succs) == 0)
2381 ;
2382 /* If we have one successor, we could have some phi nodes to
2383 translate through. Note that we can't phi translate across DFS
2384 back edges in partial antic, because it uses a union operation on
2385 the successors. For recurrences like IV's, we will end up
2386 generating a new value in the set on each go around (i + 3 (VH.1)
2387 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2388 else if (single_succ_p (block))
2389 {
2390 basic_block succ = single_succ (block);
2391 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2392 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2393 }
2394 /* If we have multiple successors, we take the union of all of
2395 them. */
2396 else
2397 {
2398 VEC(basic_block, heap) * worklist;
2399 size_t i;
2400 basic_block bprime;
2401
2402 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2403 FOR_EACH_EDGE (e, ei, block->succs)
2404 {
2405 if (e->flags & EDGE_DFS_BACK)
2406 continue;
2407 VEC_quick_push (basic_block, worklist, e->dest);
2408 }
2409 if (VEC_length (basic_block, worklist) > 0)
2410 {
2411 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2412 {
2413 unsigned int i;
2414 bitmap_iterator bi;
2415
2416 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2417 bitmap_value_insert_into_set (PA_OUT,
2418 expression_for_id (i));
2419 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2420 {
2421 bitmap_set_t pa_in = bitmap_set_new ();
2422 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2423 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2424 bitmap_value_insert_into_set (PA_OUT,
2425 expression_for_id (i));
2426 bitmap_set_free (pa_in);
2427 }
2428 else
2429 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2430 bitmap_value_insert_into_set (PA_OUT,
2431 expression_for_id (i));
2432 }
2433 }
2434 VEC_free (basic_block, heap, worklist);
2435 }
2436
2437 /* Prune expressions that are clobbered in block and thus become
2438 invalid if translated from PA_OUT to PA_IN. */
2439 prune_clobbered_mems (PA_OUT, block);
2440
2441 /* PA_IN starts with PA_OUT - TMP_GEN.
2442 Then we subtract things from ANTIC_IN. */
2443 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2444
2445 /* For partial antic, we want to put back in the phi results, since
2446 we will properly avoid making them partially antic over backedges. */
2447 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2448 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2449
2450 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2451 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2452
2453 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2454
2455 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2456 {
2457 changed = true;
2458 SET_BIT (changed_blocks, block->index);
2459 FOR_EACH_EDGE (e, ei, block->preds)
2460 SET_BIT (changed_blocks, e->src->index);
2461 }
2462 else
2463 RESET_BIT (changed_blocks, block->index);
2464
2465 maybe_dump_sets:
2466 if (dump_file && (dump_flags & TDF_DETAILS))
2467 {
2468 if (PA_OUT)
2469 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2470
2471 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2472 }
2473 if (old_PA_IN)
2474 bitmap_set_free (old_PA_IN);
2475 if (PA_OUT)
2476 bitmap_set_free (PA_OUT);
2477 return changed;
2478 }
2479
2480 /* Compute ANTIC and partial ANTIC sets. */
2481
2482 static void
2483 compute_antic (void)
2484 {
2485 bool changed = true;
2486 int num_iterations = 0;
2487 basic_block block;
2488 int i;
2489
2490 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2491 We pre-build the map of blocks with incoming abnormal edges here. */
2492 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2493 sbitmap_zero (has_abnormal_preds);
2494
2495 FOR_EACH_BB (block)
2496 {
2497 edge_iterator ei;
2498 edge e;
2499
2500 FOR_EACH_EDGE (e, ei, block->preds)
2501 {
2502 e->flags &= ~EDGE_DFS_BACK;
2503 if (e->flags & EDGE_ABNORMAL)
2504 {
2505 SET_BIT (has_abnormal_preds, block->index);
2506 break;
2507 }
2508 }
2509
2510 BB_VISITED (block) = 0;
2511 BB_DEFERRED (block) = 0;
2512
2513 /* While we are here, give empty ANTIC_IN sets to each block. */
2514 ANTIC_IN (block) = bitmap_set_new ();
2515 PA_IN (block) = bitmap_set_new ();
2516 }
2517
2518 /* At the exit block we anticipate nothing. */
2519 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2520 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2521 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2522
2523 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2524 sbitmap_ones (changed_blocks);
2525 while (changed)
2526 {
2527 if (dump_file && (dump_flags & TDF_DETAILS))
2528 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2529 /* ??? We need to clear our PHI translation cache here as the
2530 ANTIC sets shrink and we restrict valid translations to
2531 those having operands with leaders in ANTIC. Same below
2532 for PA ANTIC computation. */
2533 num_iterations++;
2534 changed = false;
2535 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2536 {
2537 if (TEST_BIT (changed_blocks, postorder[i]))
2538 {
2539 basic_block block = BASIC_BLOCK (postorder[i]);
2540 changed |= compute_antic_aux (block,
2541 TEST_BIT (has_abnormal_preds,
2542 block->index));
2543 }
2544 }
2545 /* Theoretically possible, but *highly* unlikely. */
2546 gcc_checking_assert (num_iterations < 500);
2547 }
2548
2549 statistics_histogram_event (cfun, "compute_antic iterations",
2550 num_iterations);
2551
2552 if (do_partial_partial)
2553 {
2554 sbitmap_ones (changed_blocks);
2555 mark_dfs_back_edges ();
2556 num_iterations = 0;
2557 changed = true;
2558 while (changed)
2559 {
2560 if (dump_file && (dump_flags & TDF_DETAILS))
2561 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2562 num_iterations++;
2563 changed = false;
2564 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2565 {
2566 if (TEST_BIT (changed_blocks, postorder[i]))
2567 {
2568 basic_block block = BASIC_BLOCK (postorder[i]);
2569 changed
2570 |= compute_partial_antic_aux (block,
2571 TEST_BIT (has_abnormal_preds,
2572 block->index));
2573 }
2574 }
2575 /* Theoretically possible, but *highly* unlikely. */
2576 gcc_checking_assert (num_iterations < 500);
2577 }
2578 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2579 num_iterations);
2580 }
2581 sbitmap_free (has_abnormal_preds);
2582 sbitmap_free (changed_blocks);
2583 }
2584
2585 /* Return true if OP is a tree which we can perform PRE on.
2586 This may not match the operations we can value number, but in
2587 a perfect world would. */
2588
2589 static bool
2590 can_PRE_operation (tree op)
2591 {
2592 return UNARY_CLASS_P (op)
2593 || BINARY_CLASS_P (op)
2594 || COMPARISON_CLASS_P (op)
2595 || TREE_CODE (op) == MEM_REF
2596 || TREE_CODE (op) == COMPONENT_REF
2597 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2598 || TREE_CODE (op) == CALL_EXPR
2599 || TREE_CODE (op) == ARRAY_REF;
2600 }
2601
2602
2603 /* Inserted expressions are placed onto this worklist, which is used
2604 for performing quick dead code elimination of insertions we made
2605 that didn't turn out to be necessary. */
2606 static bitmap inserted_exprs;
2607
2608 /* Pool allocated fake store expressions are placed onto this
2609 worklist, which, after performing dead code elimination, is walked
2610 to see which expressions need to be put into GC'able memory */
2611 static VEC(gimple, heap) *need_creation;
2612
2613 /* The actual worker for create_component_ref_by_pieces. */
2614
2615 static tree
2616 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2617 unsigned int *operand, gimple_seq *stmts,
2618 gimple domstmt)
2619 {
2620 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2621 *operand);
2622 tree genop;
2623 ++*operand;
2624 switch (currop->opcode)
2625 {
2626 case CALL_EXPR:
2627 {
2628 tree folded, sc = NULL_TREE;
2629 unsigned int nargs = 0;
2630 tree fn, *args;
2631 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2632 fn = currop->op0;
2633 else
2634 {
2635 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2636 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2637 if (!fn)
2638 return NULL_TREE;
2639 }
2640 if (currop->op1)
2641 {
2642 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2643 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2644 if (!sc)
2645 return NULL_TREE;
2646 }
2647 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2648 ref->operands) - 1);
2649 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2650 {
2651 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2652 operand, stmts,
2653 domstmt);
2654 if (!args[nargs])
2655 {
2656 free (args);
2657 return NULL_TREE;
2658 }
2659 nargs++;
2660 }
2661 folded = build_call_array (currop->type,
2662 (TREE_CODE (fn) == FUNCTION_DECL
2663 ? build_fold_addr_expr (fn) : fn),
2664 nargs, args);
2665 free (args);
2666 if (sc)
2667 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2668 return folded;
2669 }
2670
2671 case MEM_REF:
2672 {
2673 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2674 stmts, domstmt);
2675 tree offset = currop->op0;
2676 if (!baseop)
2677 return NULL_TREE;
2678 if (TREE_CODE (baseop) == ADDR_EXPR
2679 && handled_component_p (TREE_OPERAND (baseop, 0)))
2680 {
2681 HOST_WIDE_INT off;
2682 tree base;
2683 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2684 &off);
2685 gcc_assert (base);
2686 offset = int_const_binop (PLUS_EXPR, offset,
2687 build_int_cst (TREE_TYPE (offset),
2688 off));
2689 baseop = build_fold_addr_expr (base);
2690 }
2691 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2692 }
2693
2694 case TARGET_MEM_REF:
2695 {
2696 pre_expr op0expr, op1expr;
2697 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2698 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2699 ++*operand);
2700 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2701 stmts, domstmt);
2702 if (!baseop)
2703 return NULL_TREE;
2704 if (currop->op0)
2705 {
2706 op0expr = get_or_alloc_expr_for (currop->op0);
2707 genop0 = find_or_generate_expression (block, op0expr,
2708 stmts, domstmt);
2709 if (!genop0)
2710 return NULL_TREE;
2711 }
2712 if (nextop->op0)
2713 {
2714 op1expr = get_or_alloc_expr_for (nextop->op0);
2715 genop1 = find_or_generate_expression (block, op1expr,
2716 stmts, domstmt);
2717 if (!genop1)
2718 return NULL_TREE;
2719 }
2720 return build5 (TARGET_MEM_REF, currop->type,
2721 baseop, currop->op2, genop0, currop->op1, genop1);
2722 }
2723
2724 case ADDR_EXPR:
2725 if (currop->op0)
2726 {
2727 gcc_assert (is_gimple_min_invariant (currop->op0));
2728 return currop->op0;
2729 }
2730 /* Fallthrough. */
2731 case REALPART_EXPR:
2732 case IMAGPART_EXPR:
2733 case VIEW_CONVERT_EXPR:
2734 {
2735 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2736 operand,
2737 stmts, domstmt);
2738 if (!genop0)
2739 return NULL_TREE;
2740
2741 return fold_build1 (currop->opcode, currop->type, genop0);
2742 }
2743
2744 case WITH_SIZE_EXPR:
2745 {
2746 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2747 stmts, domstmt);
2748 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2749 tree genop1;
2750
2751 if (!genop0)
2752 return NULL_TREE;
2753
2754 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2755 if (!genop1)
2756 return NULL_TREE;
2757
2758 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2759 }
2760
2761 case BIT_FIELD_REF:
2762 {
2763 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2764 stmts, domstmt);
2765 tree op1 = currop->op0;
2766 tree op2 = currop->op1;
2767
2768 if (!genop0)
2769 return NULL_TREE;
2770
2771 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2772 }
2773
2774 /* For array ref vn_reference_op's, operand 1 of the array ref
2775 is op0 of the reference op and operand 3 of the array ref is
2776 op1. */
2777 case ARRAY_RANGE_REF:
2778 case ARRAY_REF:
2779 {
2780 tree genop0;
2781 tree genop1 = currop->op0;
2782 pre_expr op1expr;
2783 tree genop2 = currop->op1;
2784 pre_expr op2expr;
2785 tree genop3 = currop->op2;
2786 pre_expr op3expr;
2787 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2788 stmts, domstmt);
2789 if (!genop0)
2790 return NULL_TREE;
2791 op1expr = get_or_alloc_expr_for (genop1);
2792 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2793 if (!genop1)
2794 return NULL_TREE;
2795 if (genop2)
2796 {
2797 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2798 /* Drop zero minimum index if redundant. */
2799 if (integer_zerop (genop2)
2800 && (!domain_type
2801 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2802 genop2 = NULL_TREE;
2803 else
2804 {
2805 op2expr = get_or_alloc_expr_for (genop2);
2806 genop2 = find_or_generate_expression (block, op2expr, stmts,
2807 domstmt);
2808 if (!genop2)
2809 return NULL_TREE;
2810 }
2811 }
2812 if (genop3)
2813 {
2814 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2815 /* We can't always put a size in units of the element alignment
2816 here as the element alignment may be not visible. See
2817 PR43783. Simply drop the element size for constant
2818 sizes. */
2819 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2820 genop3 = NULL_TREE;
2821 else
2822 {
2823 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2824 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2825 op3expr = get_or_alloc_expr_for (genop3);
2826 genop3 = find_or_generate_expression (block, op3expr, stmts,
2827 domstmt);
2828 if (!genop3)
2829 return NULL_TREE;
2830 }
2831 }
2832 return build4 (currop->opcode, currop->type, genop0, genop1,
2833 genop2, genop3);
2834 }
2835 case COMPONENT_REF:
2836 {
2837 tree op0;
2838 tree op1;
2839 tree genop2 = currop->op1;
2840 pre_expr op2expr;
2841 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2842 stmts, domstmt);
2843 if (!op0)
2844 return NULL_TREE;
2845 /* op1 should be a FIELD_DECL, which are represented by
2846 themselves. */
2847 op1 = currop->op0;
2848 if (genop2)
2849 {
2850 op2expr = get_or_alloc_expr_for (genop2);
2851 genop2 = find_or_generate_expression (block, op2expr, stmts,
2852 domstmt);
2853 if (!genop2)
2854 return NULL_TREE;
2855 }
2856
2857 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2858 }
2859
2860 case SSA_NAME:
2861 {
2862 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2863 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2864 return genop;
2865 }
2866 case STRING_CST:
2867 case INTEGER_CST:
2868 case COMPLEX_CST:
2869 case VECTOR_CST:
2870 case REAL_CST:
2871 case CONSTRUCTOR:
2872 case VAR_DECL:
2873 case PARM_DECL:
2874 case CONST_DECL:
2875 case RESULT_DECL:
2876 case FUNCTION_DECL:
2877 return currop->op0;
2878
2879 default:
2880 gcc_unreachable ();
2881 }
2882 }
2883
2884 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2885 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2886 trying to rename aggregates into ssa form directly, which is a no no.
2887
2888 Thus, this routine doesn't create temporaries, it just builds a
2889 single access expression for the array, calling
2890 find_or_generate_expression to build the innermost pieces.
2891
2892 This function is a subroutine of create_expression_by_pieces, and
2893 should not be called on it's own unless you really know what you
2894 are doing. */
2895
2896 static tree
2897 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2898 gimple_seq *stmts, gimple domstmt)
2899 {
2900 unsigned int op = 0;
2901 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2902 }
2903
2904 /* Find a leader for an expression, or generate one using
2905 create_expression_by_pieces if it's ANTIC but
2906 complex.
2907 BLOCK is the basic_block we are looking for leaders in.
2908 EXPR is the expression to find a leader or generate for.
2909 STMTS is the statement list to put the inserted expressions on.
2910 Returns the SSA_NAME of the LHS of the generated expression or the
2911 leader.
2912 DOMSTMT if non-NULL is a statement that should be dominated by
2913 all uses in the generated expression. If DOMSTMT is non-NULL this
2914 routine can fail and return NULL_TREE. Otherwise it will assert
2915 on failure. */
2916
2917 static tree
2918 find_or_generate_expression (basic_block block, pre_expr expr,
2919 gimple_seq *stmts, gimple domstmt)
2920 {
2921 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2922 get_expr_value_id (expr), domstmt);
2923 tree genop = NULL;
2924 if (leader)
2925 {
2926 if (leader->kind == NAME)
2927 genop = PRE_EXPR_NAME (leader);
2928 else if (leader->kind == CONSTANT)
2929 genop = PRE_EXPR_CONSTANT (leader);
2930 }
2931
2932 /* If it's still NULL, it must be a complex expression, so generate
2933 it recursively. Not so if inserting expressions for values generated
2934 by SCCVN. */
2935 if (genop == NULL
2936 && !domstmt)
2937 {
2938 bitmap_set_t exprset;
2939 unsigned int lookfor = get_expr_value_id (expr);
2940 bool handled = false;
2941 bitmap_iterator bi;
2942 unsigned int i;
2943
2944 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2945 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2946 {
2947 pre_expr temp = expression_for_id (i);
2948 if (temp->kind != NAME)
2949 {
2950 handled = true;
2951 genop = create_expression_by_pieces (block, temp, stmts,
2952 domstmt,
2953 get_expr_type (expr));
2954 break;
2955 }
2956 }
2957 if (!handled && domstmt)
2958 return NULL_TREE;
2959
2960 gcc_assert (handled);
2961 }
2962 return genop;
2963 }
2964
2965 #define NECESSARY GF_PLF_1
2966
2967 /* Create an expression in pieces, so that we can handle very complex
2968 expressions that may be ANTIC, but not necessary GIMPLE.
2969 BLOCK is the basic block the expression will be inserted into,
2970 EXPR is the expression to insert (in value form)
2971 STMTS is a statement list to append the necessary insertions into.
2972
2973 This function will die if we hit some value that shouldn't be
2974 ANTIC but is (IE there is no leader for it, or its components).
2975 This function may also generate expressions that are themselves
2976 partially or fully redundant. Those that are will be either made
2977 fully redundant during the next iteration of insert (for partially
2978 redundant ones), or eliminated by eliminate (for fully redundant
2979 ones).
2980
2981 If DOMSTMT is non-NULL then we make sure that all uses in the
2982 expressions dominate that statement. In this case the function
2983 can return NULL_TREE to signal failure. */
2984
2985 static tree
2986 create_expression_by_pieces (basic_block block, pre_expr expr,
2987 gimple_seq *stmts, gimple domstmt, tree type)
2988 {
2989 tree temp, name;
2990 tree folded;
2991 gimple_seq forced_stmts = NULL;
2992 unsigned int value_id;
2993 gimple_stmt_iterator gsi;
2994 tree exprtype = type ? type : get_expr_type (expr);
2995 pre_expr nameexpr;
2996 gimple newstmt;
2997
2998 switch (expr->kind)
2999 {
3000 /* We may hit the NAME/CONSTANT case if we have to convert types
3001 that value numbering saw through. */
3002 case NAME:
3003 folded = PRE_EXPR_NAME (expr);
3004 break;
3005 case CONSTANT:
3006 folded = PRE_EXPR_CONSTANT (expr);
3007 break;
3008 case REFERENCE:
3009 {
3010 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3011 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3012 }
3013 break;
3014 case NARY:
3015 {
3016 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3017 tree genop[4];
3018 unsigned i;
3019 for (i = 0; i < nary->length; ++i)
3020 {
3021 pre_expr op = get_or_alloc_expr_for (nary->op[i]);
3022 genop[i] = find_or_generate_expression (block, op,
3023 stmts, domstmt);
3024 if (!genop[i])
3025 return NULL_TREE;
3026 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
3027 may have conversions stripped. */
3028 if (nary->opcode == POINTER_PLUS_EXPR)
3029 {
3030 if (i == 0)
3031 genop[i] = fold_convert (nary->type, genop[i]);
3032 else if (i == 1)
3033 genop[i] = convert_to_ptrofftype (genop[i]);
3034 }
3035 else
3036 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
3037 }
3038 if (nary->opcode == CONSTRUCTOR)
3039 {
3040 VEC(constructor_elt,gc) *elts = NULL;
3041 for (i = 0; i < nary->length; ++i)
3042 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3043 folded = build_constructor (nary->type, elts);
3044 }
3045 else
3046 {
3047 switch (nary->length)
3048 {
3049 case 1:
3050 folded = fold_build1 (nary->opcode, nary->type,
3051 genop[0]);
3052 break;
3053 case 2:
3054 folded = fold_build2 (nary->opcode, nary->type,
3055 genop[0], genop[1]);
3056 break;
3057 case 3:
3058 folded = fold_build3 (nary->opcode, nary->type,
3059 genop[0], genop[1], genop[3]);
3060 break;
3061 default:
3062 gcc_unreachable ();
3063 }
3064 }
3065 }
3066 break;
3067 default:
3068 return NULL_TREE;
3069 }
3070
3071 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3072 folded = fold_convert (exprtype, folded);
3073
3074 /* Force the generated expression to be a sequence of GIMPLE
3075 statements.
3076 We have to call unshare_expr because force_gimple_operand may
3077 modify the tree we pass to it. */
3078 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3079 false, NULL);
3080
3081 /* If we have any intermediate expressions to the value sets, add them
3082 to the value sets and chain them in the instruction stream. */
3083 if (forced_stmts)
3084 {
3085 gsi = gsi_start (forced_stmts);
3086 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3087 {
3088 gimple stmt = gsi_stmt (gsi);
3089 tree forcedname = gimple_get_lhs (stmt);
3090 pre_expr nameexpr;
3091
3092 if (TREE_CODE (forcedname) == SSA_NAME)
3093 {
3094 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3095 VN_INFO_GET (forcedname)->valnum = forcedname;
3096 VN_INFO (forcedname)->value_id = get_next_value_id ();
3097 nameexpr = get_or_alloc_expr_for_name (forcedname);
3098 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3099 if (!in_fre)
3100 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3101 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3102 }
3103 }
3104 gimple_seq_add_seq (stmts, forced_stmts);
3105 }
3106
3107 /* Build and insert the assignment of the end result to the temporary
3108 that we will return. */
3109 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3110 pretemp = create_tmp_reg (exprtype, "pretmp");
3111
3112 temp = pretemp;
3113 add_referenced_var (temp);
3114
3115 newstmt = gimple_build_assign (temp, folded);
3116 name = make_ssa_name (temp, newstmt);
3117 gimple_assign_set_lhs (newstmt, name);
3118 gimple_set_plf (newstmt, NECESSARY, false);
3119
3120 gimple_seq_add_stmt (stmts, newstmt);
3121 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3122
3123 /* Fold the last statement. */
3124 gsi = gsi_last (*stmts);
3125 if (fold_stmt_inplace (&gsi))
3126 update_stmt (gsi_stmt (gsi));
3127
3128 /* Add a value number to the temporary.
3129 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3130 we are creating the expression by pieces, and this particular piece of
3131 the expression may have been represented. There is no harm in replacing
3132 here. */
3133 VN_INFO_GET (name)->valnum = name;
3134 value_id = get_expr_value_id (expr);
3135 VN_INFO (name)->value_id = value_id;
3136 nameexpr = get_or_alloc_expr_for_name (name);
3137 add_to_value (value_id, nameexpr);
3138 if (NEW_SETS (block))
3139 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3140 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3141
3142 pre_stats.insertions++;
3143 if (dump_file && (dump_flags & TDF_DETAILS))
3144 {
3145 fprintf (dump_file, "Inserted ");
3146 print_gimple_stmt (dump_file, newstmt, 0, 0);
3147 fprintf (dump_file, " in predecessor %d\n", block->index);
3148 }
3149
3150 return name;
3151 }
3152
3153
3154 /* Returns true if we want to inhibit the insertions of PHI nodes
3155 for the given EXPR for basic block BB (a member of a loop).
3156 We want to do this, when we fear that the induction variable we
3157 create might inhibit vectorization. */
3158
3159 static bool
3160 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3161 {
3162 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3163 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3164 vn_reference_op_t op;
3165 unsigned i;
3166
3167 /* If we aren't going to vectorize we don't inhibit anything. */
3168 if (!flag_tree_vectorize)
3169 return false;
3170
3171 /* Otherwise we inhibit the insertion when the address of the
3172 memory reference is a simple induction variable. In other
3173 cases the vectorizer won't do anything anyway (either it's
3174 loop invariant or a complicated expression). */
3175 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3176 {
3177 switch (op->opcode)
3178 {
3179 case CALL_EXPR:
3180 /* Calls are not a problem. */
3181 return false;
3182
3183 case ARRAY_REF:
3184 case ARRAY_RANGE_REF:
3185 if (TREE_CODE (op->op0) != SSA_NAME)
3186 break;
3187 /* Fallthru. */
3188 case SSA_NAME:
3189 {
3190 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3191 affine_iv iv;
3192 /* Default defs are loop invariant. */
3193 if (!defbb)
3194 break;
3195 /* Defined outside this loop, also loop invariant. */
3196 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3197 break;
3198 /* If it's a simple induction variable inhibit insertion,
3199 the vectorizer might be interested in this one. */
3200 if (simple_iv (bb->loop_father, bb->loop_father,
3201 op->op0, &iv, true))
3202 return true;
3203 /* No simple IV, vectorizer can't do anything, hence no
3204 reason to inhibit the transformation for this operand. */
3205 break;
3206 }
3207 default:
3208 break;
3209 }
3210 }
3211 return false;
3212 }
3213
3214 /* Insert the to-be-made-available values of expression EXPRNUM for each
3215 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3216 merge the result with a phi node, given the same value number as
3217 NODE. Return true if we have inserted new stuff. */
3218
3219 static bool
3220 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3221 pre_expr *avail)
3222 {
3223 pre_expr expr = expression_for_id (exprnum);
3224 pre_expr newphi;
3225 unsigned int val = get_expr_value_id (expr);
3226 edge pred;
3227 bool insertions = false;
3228 bool nophi = false;
3229 basic_block bprime;
3230 pre_expr eprime;
3231 edge_iterator ei;
3232 tree type = get_expr_type (expr);
3233 tree temp;
3234 gimple phi;
3235
3236 /* Make sure we aren't creating an induction variable. */
3237 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3238 {
3239 bool firstinsideloop = false;
3240 bool secondinsideloop = false;
3241 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3242 EDGE_PRED (block, 0)->src);
3243 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3244 EDGE_PRED (block, 1)->src);
3245 /* Induction variables only have one edge inside the loop. */
3246 if ((firstinsideloop ^ secondinsideloop)
3247 && (expr->kind != REFERENCE
3248 || inhibit_phi_insertion (block, expr)))
3249 {
3250 if (dump_file && (dump_flags & TDF_DETAILS))
3251 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3252 nophi = true;
3253 }
3254 }
3255
3256 /* Make the necessary insertions. */
3257 FOR_EACH_EDGE (pred, ei, block->preds)
3258 {
3259 gimple_seq stmts = NULL;
3260 tree builtexpr;
3261 bprime = pred->src;
3262 eprime = avail[bprime->index];
3263
3264 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3265 {
3266 builtexpr = create_expression_by_pieces (bprime,
3267 eprime,
3268 &stmts, NULL,
3269 type);
3270 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3271 gsi_insert_seq_on_edge (pred, stmts);
3272 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3273 insertions = true;
3274 }
3275 else if (eprime->kind == CONSTANT)
3276 {
3277 /* Constants may not have the right type, fold_convert
3278 should give us back a constant with the right type.
3279 */
3280 tree constant = PRE_EXPR_CONSTANT (eprime);
3281 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3282 {
3283 tree builtexpr = fold_convert (type, constant);
3284 if (!is_gimple_min_invariant (builtexpr))
3285 {
3286 tree forcedexpr = force_gimple_operand (builtexpr,
3287 &stmts, true,
3288 NULL);
3289 if (!is_gimple_min_invariant (forcedexpr))
3290 {
3291 if (forcedexpr != builtexpr)
3292 {
3293 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3294 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3295 }
3296 if (stmts)
3297 {
3298 gimple_stmt_iterator gsi;
3299 gsi = gsi_start (stmts);
3300 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3301 {
3302 gimple stmt = gsi_stmt (gsi);
3303 tree lhs = gimple_get_lhs (stmt);
3304 if (TREE_CODE (lhs) == SSA_NAME)
3305 bitmap_set_bit (inserted_exprs,
3306 SSA_NAME_VERSION (lhs));
3307 gimple_set_plf (stmt, NECESSARY, false);
3308 }
3309 gsi_insert_seq_on_edge (pred, stmts);
3310 }
3311 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3312 }
3313 }
3314 else
3315 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
3316 }
3317 }
3318 else if (eprime->kind == NAME)
3319 {
3320 /* We may have to do a conversion because our value
3321 numbering can look through types in certain cases, but
3322 our IL requires all operands of a phi node have the same
3323 type. */
3324 tree name = PRE_EXPR_NAME (eprime);
3325 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3326 {
3327 tree builtexpr;
3328 tree forcedexpr;
3329 builtexpr = fold_convert (type, name);
3330 forcedexpr = force_gimple_operand (builtexpr,
3331 &stmts, true,
3332 NULL);
3333
3334 if (forcedexpr != name)
3335 {
3336 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3337 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3338 }
3339
3340 if (stmts)
3341 {
3342 gimple_stmt_iterator gsi;
3343 gsi = gsi_start (stmts);
3344 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3345 {
3346 gimple stmt = gsi_stmt (gsi);
3347 tree lhs = gimple_get_lhs (stmt);
3348 if (TREE_CODE (lhs) == SSA_NAME)
3349 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3350 gimple_set_plf (stmt, NECESSARY, false);
3351 }
3352 gsi_insert_seq_on_edge (pred, stmts);
3353 }
3354 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3355 }
3356 }
3357 }
3358 /* If we didn't want a phi node, and we made insertions, we still have
3359 inserted new stuff, and thus return true. If we didn't want a phi node,
3360 and didn't make insertions, we haven't added anything new, so return
3361 false. */
3362 if (nophi && insertions)
3363 return true;
3364 else if (nophi && !insertions)
3365 return false;
3366
3367 /* Now build a phi for the new variable. */
3368 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3369 prephitemp = create_tmp_var (type, "prephitmp");
3370
3371 temp = prephitemp;
3372 add_referenced_var (temp);
3373
3374 if (TREE_CODE (type) == COMPLEX_TYPE
3375 || TREE_CODE (type) == VECTOR_TYPE)
3376 DECL_GIMPLE_REG_P (temp) = 1;
3377 phi = create_phi_node (temp, block);
3378
3379 gimple_set_plf (phi, NECESSARY, false);
3380 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3381 VN_INFO (gimple_phi_result (phi))->value_id = val;
3382 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3383 FOR_EACH_EDGE (pred, ei, block->preds)
3384 {
3385 pre_expr ae = avail[pred->src->index];
3386 gcc_assert (get_expr_type (ae) == type
3387 || useless_type_conversion_p (type, get_expr_type (ae)));
3388 if (ae->kind == CONSTANT)
3389 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3390 else
3391 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3392 UNKNOWN_LOCATION);
3393 }
3394
3395 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3396 add_to_value (val, newphi);
3397
3398 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3399 this insertion, since we test for the existence of this value in PHI_GEN
3400 before proceeding with the partial redundancy checks in insert_aux.
3401
3402 The value may exist in AVAIL_OUT, in particular, it could be represented
3403 by the expression we are trying to eliminate, in which case we want the
3404 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3405 inserted there.
3406
3407 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3408 this block, because if it did, it would have existed in our dominator's
3409 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3410 */
3411
3412 bitmap_insert_into_set (PHI_GEN (block), newphi);
3413 bitmap_value_replace_in_set (AVAIL_OUT (block),
3414 newphi);
3415 bitmap_insert_into_set (NEW_SETS (block),
3416 newphi);
3417
3418 if (dump_file && (dump_flags & TDF_DETAILS))
3419 {
3420 fprintf (dump_file, "Created phi ");
3421 print_gimple_stmt (dump_file, phi, 0, 0);
3422 fprintf (dump_file, " in block %d\n", block->index);
3423 }
3424 pre_stats.phis++;
3425 return true;
3426 }
3427
3428
3429
3430 /* Perform insertion of partially redundant values.
3431 For BLOCK, do the following:
3432 1. Propagate the NEW_SETS of the dominator into the current block.
3433 If the block has multiple predecessors,
3434 2a. Iterate over the ANTIC expressions for the block to see if
3435 any of them are partially redundant.
3436 2b. If so, insert them into the necessary predecessors to make
3437 the expression fully redundant.
3438 2c. Insert a new PHI merging the values of the predecessors.
3439 2d. Insert the new PHI, and the new expressions, into the
3440 NEW_SETS set.
3441 3. Recursively call ourselves on the dominator children of BLOCK.
3442
3443 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3444 do_regular_insertion and do_partial_insertion.
3445
3446 */
3447
3448 static bool
3449 do_regular_insertion (basic_block block, basic_block dom)
3450 {
3451 bool new_stuff = false;
3452 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3453 pre_expr expr;
3454 int i;
3455
3456 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3457 {
3458 if (expr->kind != NAME)
3459 {
3460 pre_expr *avail;
3461 unsigned int val;
3462 bool by_some = false;
3463 bool cant_insert = false;
3464 bool all_same = true;
3465 pre_expr first_s = NULL;
3466 edge pred;
3467 basic_block bprime;
3468 pre_expr eprime = NULL;
3469 edge_iterator ei;
3470 pre_expr edoubleprime = NULL;
3471 bool do_insertion = false;
3472
3473 val = get_expr_value_id (expr);
3474 if (bitmap_set_contains_value (PHI_GEN (block), val))
3475 continue;
3476 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3477 {
3478 if (dump_file && (dump_flags & TDF_DETAILS))
3479 fprintf (dump_file, "Found fully redundant value\n");
3480 continue;
3481 }
3482
3483 avail = XCNEWVEC (pre_expr, last_basic_block);
3484 FOR_EACH_EDGE (pred, ei, block->preds)
3485 {
3486 unsigned int vprime;
3487
3488 /* We should never run insertion for the exit block
3489 and so not come across fake pred edges. */
3490 gcc_assert (!(pred->flags & EDGE_FAKE));
3491 bprime = pred->src;
3492 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3493 bprime, block);
3494
3495 /* eprime will generally only be NULL if the
3496 value of the expression, translated
3497 through the PHI for this predecessor, is
3498 undefined. If that is the case, we can't
3499 make the expression fully redundant,
3500 because its value is undefined along a
3501 predecessor path. We can thus break out
3502 early because it doesn't matter what the
3503 rest of the results are. */
3504 if (eprime == NULL)
3505 {
3506 cant_insert = true;
3507 break;
3508 }
3509
3510 eprime = fully_constant_expression (eprime);
3511 vprime = get_expr_value_id (eprime);
3512 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3513 vprime, NULL);
3514 if (edoubleprime == NULL)
3515 {
3516 avail[bprime->index] = eprime;
3517 all_same = false;
3518 }
3519 else
3520 {
3521 avail[bprime->index] = edoubleprime;
3522 by_some = true;
3523 /* We want to perform insertions to remove a redundancy on
3524 a path in the CFG we want to optimize for speed. */
3525 if (optimize_edge_for_speed_p (pred))
3526 do_insertion = true;
3527 if (first_s == NULL)
3528 first_s = edoubleprime;
3529 else if (!pre_expr_eq (first_s, edoubleprime))
3530 all_same = false;
3531 }
3532 }
3533 /* If we can insert it, it's not the same value
3534 already existing along every predecessor, and
3535 it's defined by some predecessor, it is
3536 partially redundant. */
3537 if (!cant_insert && !all_same && by_some)
3538 {
3539 if (!do_insertion)
3540 {
3541 if (dump_file && (dump_flags & TDF_DETAILS))
3542 {
3543 fprintf (dump_file, "Skipping partial redundancy for "
3544 "expression ");
3545 print_pre_expr (dump_file, expr);
3546 fprintf (dump_file, " (%04d), no redundancy on to be "
3547 "optimized for speed edge\n", val);
3548 }
3549 }
3550 else if (dbg_cnt (treepre_insert))
3551 {
3552 if (dump_file && (dump_flags & TDF_DETAILS))
3553 {
3554 fprintf (dump_file, "Found partial redundancy for "
3555 "expression ");
3556 print_pre_expr (dump_file, expr);
3557 fprintf (dump_file, " (%04d)\n",
3558 get_expr_value_id (expr));
3559 }
3560 if (insert_into_preds_of_block (block,
3561 get_expression_id (expr),
3562 avail))
3563 new_stuff = true;
3564 }
3565 }
3566 /* If all edges produce the same value and that value is
3567 an invariant, then the PHI has the same value on all
3568 edges. Note this. */
3569 else if (!cant_insert && all_same && eprime
3570 && (edoubleprime->kind == CONSTANT
3571 || edoubleprime->kind == NAME)
3572 && !value_id_constant_p (val))
3573 {
3574 unsigned int j;
3575 bitmap_iterator bi;
3576 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3577 value_expressions, val);
3578
3579 unsigned int new_val = get_expr_value_id (edoubleprime);
3580 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3581 {
3582 pre_expr expr = expression_for_id (j);
3583
3584 if (expr->kind == NAME)
3585 {
3586 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3587 /* Just reset the value id and valnum so it is
3588 the same as the constant we have discovered. */
3589 if (edoubleprime->kind == CONSTANT)
3590 {
3591 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3592 pre_stats.constified++;
3593 }
3594 else
3595 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3596 info->value_id = new_val;
3597 }
3598 }
3599 }
3600 free (avail);
3601 }
3602 }
3603
3604 VEC_free (pre_expr, heap, exprs);
3605 return new_stuff;
3606 }
3607
3608
3609 /* Perform insertion for partially anticipatable expressions. There
3610 is only one case we will perform insertion for these. This case is
3611 if the expression is partially anticipatable, and fully available.
3612 In this case, we know that putting it earlier will enable us to
3613 remove the later computation. */
3614
3615
3616 static bool
3617 do_partial_partial_insertion (basic_block block, basic_block dom)
3618 {
3619 bool new_stuff = false;
3620 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3621 pre_expr expr;
3622 int i;
3623
3624 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3625 {
3626 if (expr->kind != NAME)
3627 {
3628 pre_expr *avail;
3629 unsigned int val;
3630 bool by_all = true;
3631 bool cant_insert = false;
3632 edge pred;
3633 basic_block bprime;
3634 pre_expr eprime = NULL;
3635 edge_iterator ei;
3636
3637 val = get_expr_value_id (expr);
3638 if (bitmap_set_contains_value (PHI_GEN (block), val))
3639 continue;
3640 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3641 continue;
3642
3643 avail = XCNEWVEC (pre_expr, last_basic_block);
3644 FOR_EACH_EDGE (pred, ei, block->preds)
3645 {
3646 unsigned int vprime;
3647 pre_expr edoubleprime;
3648
3649 /* We should never run insertion for the exit block
3650 and so not come across fake pred edges. */
3651 gcc_assert (!(pred->flags & EDGE_FAKE));
3652 bprime = pred->src;
3653 eprime = phi_translate (expr, ANTIC_IN (block),
3654 PA_IN (block),
3655 bprime, block);
3656
3657 /* eprime will generally only be NULL if the
3658 value of the expression, translated
3659 through the PHI for this predecessor, is
3660 undefined. If that is the case, we can't
3661 make the expression fully redundant,
3662 because its value is undefined along a
3663 predecessor path. We can thus break out
3664 early because it doesn't matter what the
3665 rest of the results are. */
3666 if (eprime == NULL)
3667 {
3668 cant_insert = true;
3669 break;
3670 }
3671
3672 eprime = fully_constant_expression (eprime);
3673 vprime = get_expr_value_id (eprime);
3674 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3675 vprime, NULL);
3676 if (edoubleprime == NULL)
3677 {
3678 by_all = false;
3679 break;
3680 }
3681 else
3682 avail[bprime->index] = edoubleprime;
3683 }
3684
3685 /* If we can insert it, it's not the same value
3686 already existing along every predecessor, and
3687 it's defined by some predecessor, it is
3688 partially redundant. */
3689 if (!cant_insert && by_all)
3690 {
3691 edge succ;
3692 bool do_insertion = false;
3693
3694 /* Insert only if we can remove a later expression on a path
3695 that we want to optimize for speed.
3696 The phi node that we will be inserting in BLOCK is not free,
3697 and inserting it for the sake of !optimize_for_speed successor
3698 may cause regressions on the speed path. */
3699 FOR_EACH_EDGE (succ, ei, block->succs)
3700 {
3701 if (bitmap_set_contains_value (PA_IN (succ->dest), val))
3702 {
3703 if (optimize_edge_for_speed_p (succ))
3704 do_insertion = true;
3705 }
3706 }
3707
3708 if (!do_insertion)
3709 {
3710 if (dump_file && (dump_flags & TDF_DETAILS))
3711 {
3712 fprintf (dump_file, "Skipping partial partial redundancy "
3713 "for expression ");
3714 print_pre_expr (dump_file, expr);
3715 fprintf (dump_file, " (%04d), not partially anticipated "
3716 "on any to be optimized for speed edges\n", val);
3717 }
3718 }
3719 else if (dbg_cnt (treepre_insert))
3720 {
3721 pre_stats.pa_insert++;
3722 if (dump_file && (dump_flags & TDF_DETAILS))
3723 {
3724 fprintf (dump_file, "Found partial partial redundancy "
3725 "for expression ");
3726 print_pre_expr (dump_file, expr);
3727 fprintf (dump_file, " (%04d)\n",
3728 get_expr_value_id (expr));
3729 }
3730 if (insert_into_preds_of_block (block,
3731 get_expression_id (expr),
3732 avail))
3733 new_stuff = true;
3734 }
3735 }
3736 free (avail);
3737 }
3738 }
3739
3740 VEC_free (pre_expr, heap, exprs);
3741 return new_stuff;
3742 }
3743
3744 static bool
3745 insert_aux (basic_block block)
3746 {
3747 basic_block son;
3748 bool new_stuff = false;
3749
3750 if (block)
3751 {
3752 basic_block dom;
3753 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3754 if (dom)
3755 {
3756 unsigned i;
3757 bitmap_iterator bi;
3758 bitmap_set_t newset = NEW_SETS (dom);
3759 if (newset)
3760 {
3761 /* Note that we need to value_replace both NEW_SETS, and
3762 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3763 represented by some non-simple expression here that we want
3764 to replace it with. */
3765 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3766 {
3767 pre_expr expr = expression_for_id (i);
3768 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3769 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3770 }
3771 }
3772 if (!single_pred_p (block))
3773 {
3774 new_stuff |= do_regular_insertion (block, dom);
3775 if (do_partial_partial)
3776 new_stuff |= do_partial_partial_insertion (block, dom);
3777 }
3778 }
3779 }
3780 for (son = first_dom_son (CDI_DOMINATORS, block);
3781 son;
3782 son = next_dom_son (CDI_DOMINATORS, son))
3783 {
3784 new_stuff |= insert_aux (son);
3785 }
3786
3787 return new_stuff;
3788 }
3789
3790 /* Perform insertion of partially redundant values. */
3791
3792 static void
3793 insert (void)
3794 {
3795 bool new_stuff = true;
3796 basic_block bb;
3797 int num_iterations = 0;
3798
3799 FOR_ALL_BB (bb)
3800 NEW_SETS (bb) = bitmap_set_new ();
3801
3802 while (new_stuff)
3803 {
3804 num_iterations++;
3805 if (dump_file && dump_flags & TDF_DETAILS)
3806 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3807 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3808 }
3809 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3810 }
3811
3812
3813 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3814
3815 static void
3816 add_to_exp_gen (basic_block block, tree op)
3817 {
3818 if (!in_fre)
3819 {
3820 pre_expr result;
3821 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3822 return;
3823 result = get_or_alloc_expr_for_name (op);
3824 bitmap_value_insert_into_set (EXP_GEN (block), result);
3825 }
3826 }
3827
3828 /* Create value ids for PHI in BLOCK. */
3829
3830 static void
3831 make_values_for_phi (gimple phi, basic_block block)
3832 {
3833 tree result = gimple_phi_result (phi);
3834
3835 /* We have no need for virtual phis, as they don't represent
3836 actual computations. */
3837 if (is_gimple_reg (result))
3838 {
3839 pre_expr e = get_or_alloc_expr_for_name (result);
3840 add_to_value (get_expr_value_id (e), e);
3841 bitmap_insert_into_set (PHI_GEN (block), e);
3842 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3843 if (!in_fre)
3844 {
3845 unsigned i;
3846 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3847 {
3848 tree arg = gimple_phi_arg_def (phi, i);
3849 if (TREE_CODE (arg) == SSA_NAME)
3850 {
3851 e = get_or_alloc_expr_for_name (arg);
3852 add_to_value (get_expr_value_id (e), e);
3853 }
3854 }
3855 }
3856 }
3857 }
3858
3859 /* Compute the AVAIL set for all basic blocks.
3860
3861 This function performs value numbering of the statements in each basic
3862 block. The AVAIL sets are built from information we glean while doing
3863 this value numbering, since the AVAIL sets contain only one entry per
3864 value.
3865
3866 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3867 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3868
3869 static void
3870 compute_avail (void)
3871 {
3872
3873 basic_block block, son;
3874 basic_block *worklist;
3875 size_t sp = 0;
3876 unsigned i;
3877
3878 /* We pretend that default definitions are defined in the entry block.
3879 This includes function arguments and the static chain decl. */
3880 for (i = 1; i < num_ssa_names; ++i)
3881 {
3882 tree name = ssa_name (i);
3883 pre_expr e;
3884 if (!name
3885 || !SSA_NAME_IS_DEFAULT_DEF (name)
3886 || has_zero_uses (name)
3887 || !is_gimple_reg (name))
3888 continue;
3889
3890 e = get_or_alloc_expr_for_name (name);
3891 add_to_value (get_expr_value_id (e), e);
3892 if (!in_fre)
3893 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3894 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3895 }
3896
3897 /* Allocate the worklist. */
3898 worklist = XNEWVEC (basic_block, n_basic_blocks);
3899
3900 /* Seed the algorithm by putting the dominator children of the entry
3901 block on the worklist. */
3902 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3903 son;
3904 son = next_dom_son (CDI_DOMINATORS, son))
3905 worklist[sp++] = son;
3906
3907 /* Loop until the worklist is empty. */
3908 while (sp)
3909 {
3910 gimple_stmt_iterator gsi;
3911 gimple stmt;
3912 basic_block dom;
3913 unsigned int stmt_uid = 1;
3914
3915 /* Pick a block from the worklist. */
3916 block = worklist[--sp];
3917
3918 /* Initially, the set of available values in BLOCK is that of
3919 its immediate dominator. */
3920 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3921 if (dom)
3922 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3923
3924 /* Generate values for PHI nodes. */
3925 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3926 make_values_for_phi (gsi_stmt (gsi), block);
3927
3928 BB_MAY_NOTRETURN (block) = 0;
3929
3930 /* Now compute value numbers and populate value sets with all
3931 the expressions computed in BLOCK. */
3932 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3933 {
3934 ssa_op_iter iter;
3935 tree op;
3936
3937 stmt = gsi_stmt (gsi);
3938 gimple_set_uid (stmt, stmt_uid++);
3939
3940 /* Cache whether the basic-block has any non-visible side-effect
3941 or control flow.
3942 If this isn't a call or it is the last stmt in the
3943 basic-block then the CFG represents things correctly. */
3944 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3945 {
3946 /* Non-looping const functions always return normally.
3947 Otherwise the call might not return or have side-effects
3948 that forbids hoisting possibly trapping expressions
3949 before it. */
3950 int flags = gimple_call_flags (stmt);
3951 if (!(flags & ECF_CONST)
3952 || (flags & ECF_LOOPING_CONST_OR_PURE))
3953 BB_MAY_NOTRETURN (block) = 1;
3954 }
3955
3956 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3957 {
3958 pre_expr e = get_or_alloc_expr_for_name (op);
3959
3960 add_to_value (get_expr_value_id (e), e);
3961 if (!in_fre)
3962 bitmap_insert_into_set (TMP_GEN (block), e);
3963 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3964 }
3965
3966 if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
3967 continue;
3968
3969 switch (gimple_code (stmt))
3970 {
3971 case GIMPLE_RETURN:
3972 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3973 add_to_exp_gen (block, op);
3974 continue;
3975
3976 case GIMPLE_CALL:
3977 {
3978 vn_reference_t ref;
3979 unsigned int i;
3980 vn_reference_op_t vro;
3981 pre_expr result = NULL;
3982 VEC(vn_reference_op_s, heap) *ops = NULL;
3983
3984 /* We can value number only calls to real functions. */
3985 if (gimple_call_internal_p (stmt))
3986 continue;
3987
3988 copy_reference_ops_from_call (stmt, &ops);
3989 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3990 gimple_expr_type (stmt),
3991 ops, &ref, VN_NOWALK);
3992 VEC_free (vn_reference_op_s, heap, ops);
3993 if (!ref)
3994 continue;
3995
3996 for (i = 0; VEC_iterate (vn_reference_op_s,
3997 ref->operands, i,
3998 vro); i++)
3999 {
4000 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4001 add_to_exp_gen (block, vro->op0);
4002 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4003 add_to_exp_gen (block, vro->op1);
4004 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4005 add_to_exp_gen (block, vro->op2);
4006 }
4007
4008 /* If the value of the call is not invalidated in
4009 this block until it is computed, add the expression
4010 to EXP_GEN. */
4011 if (!gimple_vuse (stmt)
4012 || gimple_code
4013 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
4014 || gimple_bb (SSA_NAME_DEF_STMT
4015 (gimple_vuse (stmt))) != block)
4016 {
4017 result = (pre_expr) pool_alloc (pre_expr_pool);
4018 result->kind = REFERENCE;
4019 result->id = 0;
4020 PRE_EXPR_REFERENCE (result) = ref;
4021
4022 get_or_alloc_expression_id (result);
4023 add_to_value (get_expr_value_id (result), result);
4024 if (!in_fre)
4025 bitmap_value_insert_into_set (EXP_GEN (block), result);
4026 }
4027 continue;
4028 }
4029
4030 case GIMPLE_ASSIGN:
4031 {
4032 pre_expr result = NULL;
4033 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4034 {
4035 case tcc_unary:
4036 case tcc_binary:
4037 case tcc_comparison:
4038 {
4039 vn_nary_op_t nary;
4040 unsigned int i;
4041
4042 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4043 gimple_assign_rhs_code (stmt),
4044 gimple_expr_type (stmt),
4045 gimple_assign_rhs1_ptr (stmt),
4046 &nary);
4047
4048 if (!nary)
4049 continue;
4050
4051 for (i = 0; i < nary->length; i++)
4052 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4053 add_to_exp_gen (block, nary->op[i]);
4054
4055 /* If the NARY traps and there was a preceding
4056 point in the block that might not return avoid
4057 adding the nary to EXP_GEN. */
4058 if (BB_MAY_NOTRETURN (block)
4059 && vn_nary_may_trap (nary))
4060 continue;
4061
4062 result = (pre_expr) pool_alloc (pre_expr_pool);
4063 result->kind = NARY;
4064 result->id = 0;
4065 PRE_EXPR_NARY (result) = nary;
4066 break;
4067 }
4068
4069 case tcc_declaration:
4070 case tcc_reference:
4071 {
4072 vn_reference_t ref;
4073 unsigned int i;
4074 vn_reference_op_t vro;
4075
4076 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4077 gimple_vuse (stmt),
4078 VN_WALK, &ref);
4079 if (!ref)
4080 continue;
4081
4082 for (i = 0; VEC_iterate (vn_reference_op_s,
4083 ref->operands, i,
4084 vro); i++)
4085 {
4086 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4087 add_to_exp_gen (block, vro->op0);
4088 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4089 add_to_exp_gen (block, vro->op1);
4090 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4091 add_to_exp_gen (block, vro->op2);
4092 }
4093
4094 /* If the value of the reference is not invalidated in
4095 this block until it is computed, add the expression
4096 to EXP_GEN. */
4097 if (gimple_vuse (stmt))
4098 {
4099 gimple def_stmt;
4100 bool ok = true;
4101 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
4102 while (!gimple_nop_p (def_stmt)
4103 && gimple_code (def_stmt) != GIMPLE_PHI
4104 && gimple_bb (def_stmt) == block)
4105 {
4106 if (stmt_may_clobber_ref_p
4107 (def_stmt, gimple_assign_rhs1 (stmt)))
4108 {
4109 ok = false;
4110 break;
4111 }
4112 def_stmt
4113 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
4114 }
4115 if (!ok)
4116 continue;
4117 }
4118
4119 result = (pre_expr) pool_alloc (pre_expr_pool);
4120 result->kind = REFERENCE;
4121 result->id = 0;
4122 PRE_EXPR_REFERENCE (result) = ref;
4123 break;
4124 }
4125
4126 default:
4127 /* For any other statement that we don't
4128 recognize, simply add all referenced
4129 SSA_NAMEs to EXP_GEN. */
4130 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4131 add_to_exp_gen (block, op);
4132 continue;
4133 }
4134
4135 get_or_alloc_expression_id (result);
4136 add_to_value (get_expr_value_id (result), result);
4137 if (!in_fre)
4138 bitmap_value_insert_into_set (EXP_GEN (block), result);
4139
4140 continue;
4141 }
4142 default:
4143 break;
4144 }
4145 }
4146
4147 /* Put the dominator children of BLOCK on the worklist of blocks
4148 to compute available sets for. */
4149 for (son = first_dom_son (CDI_DOMINATORS, block);
4150 son;
4151 son = next_dom_son (CDI_DOMINATORS, son))
4152 worklist[sp++] = son;
4153 }
4154
4155 free (worklist);
4156 }
4157
4158 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4159 than the available expressions for it. The insertion point is
4160 right before the first use in STMT. Returns the SSA_NAME that should
4161 be used for replacement. */
4162
4163 static tree
4164 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4165 {
4166 basic_block bb = gimple_bb (stmt);
4167 gimple_stmt_iterator gsi;
4168 gimple_seq stmts = NULL;
4169 tree expr;
4170 pre_expr e;
4171
4172 /* First create a value expression from the expression we want
4173 to insert and associate it with the value handle for SSA_VN. */
4174 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4175 if (e == NULL)
4176 return NULL_TREE;
4177
4178 /* Then use create_expression_by_pieces to generate a valid
4179 expression to insert at this point of the IL stream. */
4180 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4181 if (expr == NULL_TREE)
4182 return NULL_TREE;
4183 gsi = gsi_for_stmt (stmt);
4184 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4185
4186 return expr;
4187 }
4188
4189 /* Eliminate fully redundant computations. */
4190
4191 static unsigned int
4192 eliminate (void)
4193 {
4194 VEC (gimple, heap) *to_remove = NULL;
4195 VEC (gimple, heap) *to_update = NULL;
4196 basic_block b;
4197 unsigned int todo = 0;
4198 gimple_stmt_iterator gsi;
4199 gimple stmt;
4200 unsigned i;
4201
4202 FOR_EACH_BB (b)
4203 {
4204 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4205 {
4206 tree lhs = NULL_TREE;
4207 tree rhs = NULL_TREE;
4208
4209 stmt = gsi_stmt (gsi);
4210
4211 if (gimple_has_lhs (stmt))
4212 lhs = gimple_get_lhs (stmt);
4213
4214 if (gimple_assign_single_p (stmt))
4215 rhs = gimple_assign_rhs1 (stmt);
4216
4217 /* Lookup the RHS of the expression, see if we have an
4218 available computation for it. If so, replace the RHS with
4219 the available computation.
4220
4221 See PR43491.
4222 We don't replace global register variable when it is a the RHS of
4223 a single assign. We do replace local register variable since gcc
4224 does not guarantee local variable will be allocated in register. */
4225 if (gimple_has_lhs (stmt)
4226 && TREE_CODE (lhs) == SSA_NAME
4227 && !gimple_assign_ssa_name_copy_p (stmt)
4228 && (!gimple_assign_single_p (stmt)
4229 || (!is_gimple_min_invariant (rhs)
4230 && (gimple_assign_rhs_code (stmt) != VAR_DECL
4231 || !is_global_var (rhs)
4232 || !DECL_HARD_REGISTER (rhs))))
4233 && !gimple_has_volatile_ops (stmt)
4234 && !has_zero_uses (lhs))
4235 {
4236 tree sprime = NULL;
4237 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4238 pre_expr sprimeexpr;
4239 gimple orig_stmt = stmt;
4240
4241 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4242 get_expr_value_id (lhsexpr),
4243 NULL);
4244
4245 if (sprimeexpr)
4246 {
4247 if (sprimeexpr->kind == CONSTANT)
4248 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4249 else if (sprimeexpr->kind == NAME)
4250 sprime = PRE_EXPR_NAME (sprimeexpr);
4251 else
4252 gcc_unreachable ();
4253 }
4254
4255 /* If there is no existing leader but SCCVN knows this
4256 value is constant, use that constant. */
4257 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4258 {
4259 sprime = VN_INFO (lhs)->valnum;
4260 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4261 TREE_TYPE (sprime)))
4262 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4263
4264 if (dump_file && (dump_flags & TDF_DETAILS))
4265 {
4266 fprintf (dump_file, "Replaced ");
4267 print_gimple_expr (dump_file, stmt, 0, 0);
4268 fprintf (dump_file, " with ");
4269 print_generic_expr (dump_file, sprime, 0);
4270 fprintf (dump_file, " in ");
4271 print_gimple_stmt (dump_file, stmt, 0, 0);
4272 }
4273 pre_stats.eliminations++;
4274 propagate_tree_value_into_stmt (&gsi, sprime);
4275 stmt = gsi_stmt (gsi);
4276 update_stmt (stmt);
4277
4278 /* If we removed EH side-effects from the statement, clean
4279 its EH information. */
4280 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4281 {
4282 bitmap_set_bit (need_eh_cleanup,
4283 gimple_bb (stmt)->index);
4284 if (dump_file && (dump_flags & TDF_DETAILS))
4285 fprintf (dump_file, " Removed EH side-effects.\n");
4286 }
4287 continue;
4288 }
4289
4290 /* If there is no existing usable leader but SCCVN thinks
4291 it has an expression it wants to use as replacement,
4292 insert that. */
4293 if (!sprime || sprime == lhs)
4294 {
4295 tree val = VN_INFO (lhs)->valnum;
4296 if (val != VN_TOP
4297 && TREE_CODE (val) == SSA_NAME
4298 && VN_INFO (val)->needs_insertion
4299 && can_PRE_operation (vn_get_expr_for (val)))
4300 sprime = do_SCCVN_insertion (stmt, val);
4301 }
4302 if (sprime
4303 && sprime != lhs
4304 && (rhs == NULL_TREE
4305 || TREE_CODE (rhs) != SSA_NAME
4306 || may_propagate_copy (rhs, sprime)))
4307 {
4308 bool can_make_abnormal_goto
4309 = is_gimple_call (stmt)
4310 && stmt_can_make_abnormal_goto (stmt);
4311
4312 gcc_assert (sprime != rhs);
4313
4314 if (dump_file && (dump_flags & TDF_DETAILS))
4315 {
4316 fprintf (dump_file, "Replaced ");
4317 print_gimple_expr (dump_file, stmt, 0, 0);
4318 fprintf (dump_file, " with ");
4319 print_generic_expr (dump_file, sprime, 0);
4320 fprintf (dump_file, " in ");
4321 print_gimple_stmt (dump_file, stmt, 0, 0);
4322 }
4323
4324 if (TREE_CODE (sprime) == SSA_NAME)
4325 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4326 NECESSARY, true);
4327 /* We need to make sure the new and old types actually match,
4328 which may require adding a simple cast, which fold_convert
4329 will do for us. */
4330 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4331 && !useless_type_conversion_p (gimple_expr_type (stmt),
4332 TREE_TYPE (sprime)))
4333 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4334
4335 pre_stats.eliminations++;
4336 propagate_tree_value_into_stmt (&gsi, sprime);
4337 stmt = gsi_stmt (gsi);
4338 update_stmt (stmt);
4339
4340 /* If we removed EH side-effects from the statement, clean
4341 its EH information. */
4342 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4343 {
4344 bitmap_set_bit (need_eh_cleanup,
4345 gimple_bb (stmt)->index);
4346 if (dump_file && (dump_flags & TDF_DETAILS))
4347 fprintf (dump_file, " Removed EH side-effects.\n");
4348 }
4349
4350 /* Likewise for AB side-effects. */
4351 if (can_make_abnormal_goto
4352 && !stmt_can_make_abnormal_goto (stmt))
4353 {
4354 bitmap_set_bit (need_ab_cleanup,
4355 gimple_bb (stmt)->index);
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4357 fprintf (dump_file, " Removed AB side-effects.\n");
4358 }
4359 }
4360 }
4361 /* If the statement is a scalar store, see if the expression
4362 has the same value number as its rhs. If so, the store is
4363 dead. */
4364 else if (gimple_assign_single_p (stmt)
4365 && !gimple_has_volatile_ops (stmt)
4366 && !is_gimple_reg (gimple_assign_lhs (stmt))
4367 && (TREE_CODE (rhs) == SSA_NAME
4368 || is_gimple_min_invariant (rhs)))
4369 {
4370 tree val;
4371 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4372 gimple_vuse (stmt), VN_WALK, NULL);
4373 if (TREE_CODE (rhs) == SSA_NAME)
4374 rhs = VN_INFO (rhs)->valnum;
4375 if (val
4376 && operand_equal_p (val, rhs, 0))
4377 {
4378 if (dump_file && (dump_flags & TDF_DETAILS))
4379 {
4380 fprintf (dump_file, "Deleted redundant store ");
4381 print_gimple_stmt (dump_file, stmt, 0, 0);
4382 }
4383
4384 /* Queue stmt for removal. */
4385 VEC_safe_push (gimple, heap, to_remove, stmt);
4386 }
4387 }
4388 /* Visit COND_EXPRs and fold the comparison with the
4389 available value-numbers. */
4390 else if (gimple_code (stmt) == GIMPLE_COND)
4391 {
4392 tree op0 = gimple_cond_lhs (stmt);
4393 tree op1 = gimple_cond_rhs (stmt);
4394 tree result;
4395
4396 if (TREE_CODE (op0) == SSA_NAME)
4397 op0 = VN_INFO (op0)->valnum;
4398 if (TREE_CODE (op1) == SSA_NAME)
4399 op1 = VN_INFO (op1)->valnum;
4400 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4401 op0, op1);
4402 if (result && TREE_CODE (result) == INTEGER_CST)
4403 {
4404 if (integer_zerop (result))
4405 gimple_cond_make_false (stmt);
4406 else
4407 gimple_cond_make_true (stmt);
4408 update_stmt (stmt);
4409 todo = TODO_cleanup_cfg;
4410 }
4411 }
4412 /* Visit indirect calls and turn them into direct calls if
4413 possible. */
4414 if (is_gimple_call (stmt))
4415 {
4416 tree orig_fn = gimple_call_fn (stmt);
4417 tree fn;
4418 if (!orig_fn)
4419 continue;
4420 if (TREE_CODE (orig_fn) == SSA_NAME)
4421 fn = VN_INFO (orig_fn)->valnum;
4422 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4423 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4424 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4425 else
4426 continue;
4427 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4428 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4429 TREE_TYPE (fn)))
4430 {
4431 bool can_make_abnormal_goto
4432 = stmt_can_make_abnormal_goto (stmt);
4433 bool was_noreturn = gimple_call_noreturn_p (stmt);
4434
4435 if (dump_file && (dump_flags & TDF_DETAILS))
4436 {
4437 fprintf (dump_file, "Replacing call target with ");
4438 print_generic_expr (dump_file, fn, 0);
4439 fprintf (dump_file, " in ");
4440 print_gimple_stmt (dump_file, stmt, 0, 0);
4441 }
4442
4443 gimple_call_set_fn (stmt, fn);
4444 VEC_safe_push (gimple, heap, to_update, stmt);
4445
4446 /* When changing a call into a noreturn call, cfg cleanup
4447 is needed to fix up the noreturn call. */
4448 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4449 todo |= TODO_cleanup_cfg;
4450
4451 /* If we removed EH side-effects from the statement, clean
4452 its EH information. */
4453 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4454 {
4455 bitmap_set_bit (need_eh_cleanup,
4456 gimple_bb (stmt)->index);
4457 if (dump_file && (dump_flags & TDF_DETAILS))
4458 fprintf (dump_file, " Removed EH side-effects.\n");
4459 }
4460
4461 /* Likewise for AB side-effects. */
4462 if (can_make_abnormal_goto
4463 && !stmt_can_make_abnormal_goto (stmt))
4464 {
4465 bitmap_set_bit (need_ab_cleanup,
4466 gimple_bb (stmt)->index);
4467 if (dump_file && (dump_flags & TDF_DETAILS))
4468 fprintf (dump_file, " Removed AB side-effects.\n");
4469 }
4470
4471 /* Changing an indirect call to a direct call may
4472 have exposed different semantics. This may
4473 require an SSA update. */
4474 todo |= TODO_update_ssa_only_virtuals;
4475 }
4476 }
4477 }
4478
4479 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4480 {
4481 gimple stmt, phi = gsi_stmt (gsi);
4482 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4483 pre_expr sprimeexpr, resexpr;
4484 gimple_stmt_iterator gsi2;
4485
4486 /* We want to perform redundant PHI elimination. Do so by
4487 replacing the PHI with a single copy if possible.
4488 Do not touch inserted, single-argument or virtual PHIs. */
4489 if (gimple_phi_num_args (phi) == 1
4490 || !is_gimple_reg (res))
4491 {
4492 gsi_next (&gsi);
4493 continue;
4494 }
4495
4496 resexpr = get_or_alloc_expr_for_name (res);
4497 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4498 get_expr_value_id (resexpr), NULL);
4499 if (sprimeexpr)
4500 {
4501 if (sprimeexpr->kind == CONSTANT)
4502 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4503 else if (sprimeexpr->kind == NAME)
4504 sprime = PRE_EXPR_NAME (sprimeexpr);
4505 else
4506 gcc_unreachable ();
4507 }
4508 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4509 {
4510 sprime = VN_INFO (res)->valnum;
4511 if (!useless_type_conversion_p (TREE_TYPE (res),
4512 TREE_TYPE (sprime)))
4513 sprime = fold_convert (TREE_TYPE (res), sprime);
4514 }
4515 if (!sprime
4516 || sprime == res)
4517 {
4518 gsi_next (&gsi);
4519 continue;
4520 }
4521
4522 if (dump_file && (dump_flags & TDF_DETAILS))
4523 {
4524 fprintf (dump_file, "Replaced redundant PHI node defining ");
4525 print_generic_expr (dump_file, res, 0);
4526 fprintf (dump_file, " with ");
4527 print_generic_expr (dump_file, sprime, 0);
4528 fprintf (dump_file, "\n");
4529 }
4530
4531 remove_phi_node (&gsi, false);
4532
4533 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4534 && TREE_CODE (sprime) == SSA_NAME)
4535 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4536
4537 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4538 sprime = fold_convert (TREE_TYPE (res), sprime);
4539 stmt = gimple_build_assign (res, sprime);
4540 SSA_NAME_DEF_STMT (res) = stmt;
4541 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4542
4543 gsi2 = gsi_after_labels (b);
4544 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4545 /* Queue the copy for eventual removal. */
4546 VEC_safe_push (gimple, heap, to_remove, stmt);
4547 /* If we inserted this PHI node ourself, it's not an elimination. */
4548 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4549 pre_stats.phis--;
4550 else
4551 pre_stats.eliminations++;
4552 }
4553 }
4554
4555 /* We cannot remove stmts during BB walk, especially not release SSA
4556 names there as this confuses the VN machinery. The stmts ending
4557 up in to_remove are either stores or simple copies. */
4558 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4559 {
4560 tree lhs = gimple_assign_lhs (stmt);
4561 tree rhs = gimple_assign_rhs1 (stmt);
4562 use_operand_p use_p;
4563 gimple use_stmt;
4564
4565 /* If there is a single use only, propagate the equivalency
4566 instead of keeping the copy. */
4567 if (TREE_CODE (lhs) == SSA_NAME
4568 && TREE_CODE (rhs) == SSA_NAME
4569 && single_imm_use (lhs, &use_p, &use_stmt)
4570 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4571 {
4572 SET_USE (use_p, rhs);
4573 update_stmt (use_stmt);
4574 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4575 && TREE_CODE (rhs) == SSA_NAME)
4576 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4577 }
4578
4579 /* If this is a store or a now unused copy, remove it. */
4580 if (TREE_CODE (lhs) != SSA_NAME
4581 || has_zero_uses (lhs))
4582 {
4583 basic_block bb = gimple_bb (stmt);
4584 gsi = gsi_for_stmt (stmt);
4585 unlink_stmt_vdef (stmt);
4586 if (gsi_remove (&gsi, true))
4587 bitmap_set_bit (need_eh_cleanup, bb->index);
4588 if (TREE_CODE (lhs) == SSA_NAME)
4589 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4590 release_defs (stmt);
4591 }
4592 }
4593 VEC_free (gimple, heap, to_remove);
4594
4595 /* We cannot update call statements with virtual operands during
4596 SSA walk. This might remove them which in turn makes our
4597 VN lattice invalid. */
4598 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
4599 update_stmt (stmt);
4600 VEC_free (gimple, heap, to_update);
4601
4602 return todo;
4603 }
4604
4605 /* Borrow a bit of tree-ssa-dce.c for the moment.
4606 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4607 this may be a bit faster, and we may want critical edges kept split. */
4608
4609 /* If OP's defining statement has not already been determined to be necessary,
4610 mark that statement necessary. Return the stmt, if it is newly
4611 necessary. */
4612
4613 static inline gimple
4614 mark_operand_necessary (tree op)
4615 {
4616 gimple stmt;
4617
4618 gcc_assert (op);
4619
4620 if (TREE_CODE (op) != SSA_NAME)
4621 return NULL;
4622
4623 stmt = SSA_NAME_DEF_STMT (op);
4624 gcc_assert (stmt);
4625
4626 if (gimple_plf (stmt, NECESSARY)
4627 || gimple_nop_p (stmt))
4628 return NULL;
4629
4630 gimple_set_plf (stmt, NECESSARY, true);
4631 return stmt;
4632 }
4633
4634 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4635 to insert PHI nodes sometimes, and because value numbering of casts isn't
4636 perfect, we sometimes end up inserting dead code. This simple DCE-like
4637 pass removes any insertions we made that weren't actually used. */
4638
4639 static void
4640 remove_dead_inserted_code (void)
4641 {
4642 bitmap worklist;
4643 unsigned i;
4644 bitmap_iterator bi;
4645 gimple t;
4646
4647 worklist = BITMAP_ALLOC (NULL);
4648 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4649 {
4650 t = SSA_NAME_DEF_STMT (ssa_name (i));
4651 if (gimple_plf (t, NECESSARY))
4652 bitmap_set_bit (worklist, i);
4653 }
4654 while (!bitmap_empty_p (worklist))
4655 {
4656 i = bitmap_first_set_bit (worklist);
4657 bitmap_clear_bit (worklist, i);
4658 t = SSA_NAME_DEF_STMT (ssa_name (i));
4659
4660 /* PHI nodes are somewhat special in that each PHI alternative has
4661 data and control dependencies. All the statements feeding the
4662 PHI node's arguments are always necessary. */
4663 if (gimple_code (t) == GIMPLE_PHI)
4664 {
4665 unsigned k;
4666
4667 for (k = 0; k < gimple_phi_num_args (t); k++)
4668 {
4669 tree arg = PHI_ARG_DEF (t, k);
4670 if (TREE_CODE (arg) == SSA_NAME)
4671 {
4672 gimple n = mark_operand_necessary (arg);
4673 if (n)
4674 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4675 }
4676 }
4677 }
4678 else
4679 {
4680 /* Propagate through the operands. Examine all the USE, VUSE and
4681 VDEF operands in this statement. Mark all the statements
4682 which feed this statement's uses as necessary. */
4683 ssa_op_iter iter;
4684 tree use;
4685
4686 /* The operands of VDEF expressions are also needed as they
4687 represent potential definitions that may reach this
4688 statement (VDEF operands allow us to follow def-def
4689 links). */
4690
4691 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4692 {
4693 gimple n = mark_operand_necessary (use);
4694 if (n)
4695 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4696 }
4697 }
4698 }
4699
4700 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4701 {
4702 t = SSA_NAME_DEF_STMT (ssa_name (i));
4703 if (!gimple_plf (t, NECESSARY))
4704 {
4705 gimple_stmt_iterator gsi;
4706
4707 if (dump_file && (dump_flags & TDF_DETAILS))
4708 {
4709 fprintf (dump_file, "Removing unnecessary insertion:");
4710 print_gimple_stmt (dump_file, t, 0, 0);
4711 }
4712
4713 gsi = gsi_for_stmt (t);
4714 if (gimple_code (t) == GIMPLE_PHI)
4715 remove_phi_node (&gsi, true);
4716 else
4717 {
4718 gsi_remove (&gsi, true);
4719 release_defs (t);
4720 }
4721 }
4722 }
4723 BITMAP_FREE (worklist);
4724 }
4725
4726 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4727 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4728 the number of visited blocks. */
4729
4730 static int
4731 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4732 {
4733 edge_iterator *stack;
4734 int sp;
4735 int post_order_num = 0;
4736 sbitmap visited;
4737
4738 if (include_entry_exit)
4739 post_order[post_order_num++] = EXIT_BLOCK;
4740
4741 /* Allocate stack for back-tracking up CFG. */
4742 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4743 sp = 0;
4744
4745 /* Allocate bitmap to track nodes that have been visited. */
4746 visited = sbitmap_alloc (last_basic_block);
4747
4748 /* None of the nodes in the CFG have been visited yet. */
4749 sbitmap_zero (visited);
4750
4751 /* Push the last edge on to the stack. */
4752 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4753
4754 while (sp)
4755 {
4756 edge_iterator ei;
4757 basic_block src;
4758 basic_block dest;
4759
4760 /* Look at the edge on the top of the stack. */
4761 ei = stack[sp - 1];
4762 src = ei_edge (ei)->src;
4763 dest = ei_edge (ei)->dest;
4764
4765 /* Check if the edge destination has been visited yet. */
4766 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4767 {
4768 /* Mark that we have visited the destination. */
4769 SET_BIT (visited, src->index);
4770
4771 if (EDGE_COUNT (src->preds) > 0)
4772 /* Since the DEST node has been visited for the first
4773 time, check its successors. */
4774 stack[sp++] = ei_start (src->preds);
4775 else
4776 post_order[post_order_num++] = src->index;
4777 }
4778 else
4779 {
4780 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4781 post_order[post_order_num++] = dest->index;
4782
4783 if (!ei_one_before_end_p (ei))
4784 ei_next (&stack[sp - 1]);
4785 else
4786 sp--;
4787 }
4788 }
4789
4790 if (include_entry_exit)
4791 post_order[post_order_num++] = ENTRY_BLOCK;
4792
4793 free (stack);
4794 sbitmap_free (visited);
4795 return post_order_num;
4796 }
4797
4798
4799 /* Initialize data structures used by PRE. */
4800
4801 static void
4802 init_pre (bool do_fre)
4803 {
4804 basic_block bb;
4805
4806 next_expression_id = 1;
4807 expressions = NULL;
4808 VEC_safe_push (pre_expr, heap, expressions, NULL);
4809 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4810 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4811 get_max_value_id() + 1);
4812 name_to_id = NULL;
4813
4814 in_fre = do_fre;
4815
4816 inserted_exprs = BITMAP_ALLOC (NULL);
4817 need_creation = NULL;
4818 pretemp = NULL_TREE;
4819 storetemp = NULL_TREE;
4820 prephitemp = NULL_TREE;
4821
4822 connect_infinite_loops_to_exit ();
4823 memset (&pre_stats, 0, sizeof (pre_stats));
4824
4825
4826 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4827 my_rev_post_order_compute (postorder, false);
4828
4829 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4830
4831 calculate_dominance_info (CDI_POST_DOMINATORS);
4832 calculate_dominance_info (CDI_DOMINATORS);
4833
4834 bitmap_obstack_initialize (&grand_bitmap_obstack);
4835 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4836 expr_pred_trans_eq, free);
4837 expression_to_id = htab_create (num_ssa_names * 3,
4838 pre_expr_hash,
4839 pre_expr_eq, NULL);
4840 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4841 sizeof (struct bitmap_set), 30);
4842 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4843 sizeof (struct pre_expr_d), 30);
4844 FOR_ALL_BB (bb)
4845 {
4846 EXP_GEN (bb) = bitmap_set_new ();
4847 PHI_GEN (bb) = bitmap_set_new ();
4848 TMP_GEN (bb) = bitmap_set_new ();
4849 AVAIL_OUT (bb) = bitmap_set_new ();
4850 }
4851
4852 need_eh_cleanup = BITMAP_ALLOC (NULL);
4853 need_ab_cleanup = BITMAP_ALLOC (NULL);
4854 }
4855
4856
4857 /* Deallocate data structures used by PRE. */
4858
4859 static void
4860 fini_pre (bool do_fre)
4861 {
4862 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4863 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4864
4865 free (postorder);
4866 VEC_free (bitmap_set_t, heap, value_expressions);
4867 BITMAP_FREE (inserted_exprs);
4868 VEC_free (gimple, heap, need_creation);
4869 bitmap_obstack_release (&grand_bitmap_obstack);
4870 free_alloc_pool (bitmap_set_pool);
4871 free_alloc_pool (pre_expr_pool);
4872 htab_delete (phi_translate_table);
4873 htab_delete (expression_to_id);
4874 VEC_free (unsigned, heap, name_to_id);
4875
4876 free_aux_for_blocks ();
4877
4878 free_dominance_info (CDI_POST_DOMINATORS);
4879
4880 if (do_eh_cleanup)
4881 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4882
4883 if (do_ab_cleanup)
4884 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4885
4886 BITMAP_FREE (need_eh_cleanup);
4887 BITMAP_FREE (need_ab_cleanup);
4888
4889 if (do_eh_cleanup || do_ab_cleanup)
4890 cleanup_tree_cfg ();
4891
4892 if (!do_fre)
4893 loop_optimizer_finalize ();
4894 }
4895
4896 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4897 only wants to do full redundancy elimination. */
4898
4899 static unsigned int
4900 execute_pre (bool do_fre)
4901 {
4902 unsigned int todo = 0;
4903
4904 do_partial_partial =
4905 flag_tree_partial_pre && optimize_function_for_speed_p (cfun);
4906
4907 /* This has to happen before SCCVN runs because
4908 loop_optimizer_init may create new phis, etc. */
4909 if (!do_fre)
4910 loop_optimizer_init (LOOPS_NORMAL);
4911
4912 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
4913 {
4914 if (!do_fre)
4915 loop_optimizer_finalize ();
4916
4917 return 0;
4918 }
4919
4920 init_pre (do_fre);
4921 scev_initialize ();
4922
4923 /* Collect and value number expressions computed in each basic block. */
4924 compute_avail ();
4925
4926 if (dump_file && (dump_flags & TDF_DETAILS))
4927 {
4928 basic_block bb;
4929
4930 FOR_ALL_BB (bb)
4931 {
4932 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4933 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4934 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4935 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4936 }
4937 }
4938
4939 /* Insert can get quite slow on an incredibly large number of basic
4940 blocks due to some quadratic behavior. Until this behavior is
4941 fixed, don't run it when he have an incredibly large number of
4942 bb's. If we aren't going to run insert, there is no point in
4943 computing ANTIC, either, even though it's plenty fast. */
4944 if (!do_fre && n_basic_blocks < 4000)
4945 {
4946 compute_antic ();
4947 insert ();
4948 }
4949
4950 /* Make sure to remove fake edges before committing our inserts.
4951 This makes sure we don't end up with extra critical edges that
4952 we would need to split. */
4953 remove_fake_exit_edges ();
4954 gsi_commit_edge_inserts ();
4955
4956 /* Remove all the redundant expressions. */
4957 todo |= eliminate ();
4958
4959 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4960 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4961 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4962 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4963 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4964
4965 clear_expression_ids ();
4966 if (!do_fre)
4967 {
4968 remove_dead_inserted_code ();
4969 todo |= TODO_verify_flow;
4970 }
4971
4972 scev_finalize ();
4973 fini_pre (do_fre);
4974
4975 if (!do_fre)
4976 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4977 case we can merge the block with the remaining predecessor of the block.
4978 It should either:
4979 - call merge_blocks after each tail merge iteration
4980 - call merge_blocks after all tail merge iterations
4981 - mark TODO_cleanup_cfg when necessary
4982 - share the cfg cleanup with fini_pre. */
4983 todo |= tail_merge_optimize (todo);
4984 free_scc_vn ();
4985
4986 return todo;
4987 }
4988
4989 /* Gate and execute functions for PRE. */
4990
4991 static unsigned int
4992 do_pre (void)
4993 {
4994 return execute_pre (false);
4995 }
4996
4997 static bool
4998 gate_pre (void)
4999 {
5000 return flag_tree_pre != 0;
5001 }
5002
5003 struct gimple_opt_pass pass_pre =
5004 {
5005 {
5006 GIMPLE_PASS,
5007 "pre", /* name */
5008 gate_pre, /* gate */
5009 do_pre, /* execute */
5010 NULL, /* sub */
5011 NULL, /* next */
5012 0, /* static_pass_number */
5013 TV_TREE_PRE, /* tv_id */
5014 PROP_no_crit_edges | PROP_cfg
5015 | PROP_ssa, /* properties_required */
5016 0, /* properties_provided */
5017 0, /* properties_destroyed */
5018 TODO_rebuild_alias, /* todo_flags_start */
5019 TODO_update_ssa_only_virtuals | TODO_ggc_collect
5020 | TODO_verify_ssa /* todo_flags_finish */
5021 }
5022 };
5023
5024
5025 /* Gate and execute functions for FRE. */
5026
5027 static unsigned int
5028 execute_fre (void)
5029 {
5030 return execute_pre (true);
5031 }
5032
5033 static bool
5034 gate_fre (void)
5035 {
5036 return flag_tree_fre != 0;
5037 }
5038
5039 struct gimple_opt_pass pass_fre =
5040 {
5041 {
5042 GIMPLE_PASS,
5043 "fre", /* name */
5044 gate_fre, /* gate */
5045 execute_fre, /* execute */
5046 NULL, /* sub */
5047 NULL, /* next */
5048 0, /* static_pass_number */
5049 TV_TREE_FRE, /* tv_id */
5050 PROP_cfg | PROP_ssa, /* properties_required */
5051 0, /* properties_provided */
5052 0, /* properties_destroyed */
5053 0, /* todo_flags_start */
5054 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
5055 }
5056 };