gimplify.c: Do not include except.h and optabs.h.
[gcc.git] / gcc / tree-ssa-pre.c
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
13
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "obstack.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50
51 /* TODO:
52
53 1. Avail sets can be shared by making an avail_find_leader that
54 walks up the dominator tree and looks in those avail sets.
55 This might affect code optimality, it's unclear right now.
56 2. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 3. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
60 */
61
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
64 represent the actual statement containing the expressions we care about,
65 and we cache the value number by putting it in the expression. */
66
67 /* Basic algorithm
68
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
78 expressions/values.
79
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem. An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
93
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
96
97 An expression is partially redundant (excluding partial
98 anticipation) if:
99
100 1. It is AVAIL in some, but not all, of the predecessors of a
101 given block.
102 2. It is ANTIC in all the predecessors.
103
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
106
107 For the partial anticipation case, we only perform insertion if it
108 is partially anticipated in some block, and fully available in all
109 of the predecessors.
110
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
112 performs these steps.
113
114 Fourth, we eliminate fully redundant expressions.
115 This is a simple statement walk that replaces redundant
116 calculations with the now available values. */
117
118 /* Representations of value numbers:
119
120 Value numbers are represented by a representative SSA_NAME. We
121 will create fake SSA_NAME's in situations where we need a
122 representative but do not have one (because it is a complex
123 expression). In order to facilitate storing the value numbers in
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also
125 associate a value_id with each value number, and create full blown
126 ssa_name's only where we actually need them (IE in operands of
127 existing expressions).
128
129 Theoretically you could replace all the value_id's with
130 SSA_NAME_VERSION, but this would allocate a large number of
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
132 It would also require an additional indirection at each point we
133 use the value id. */
134
135 /* Representation of expressions on value numbers:
136
137 Expressions consisting of value numbers are represented the same
138 way as our VN internally represents them, with an additional
139 "pre_expr" wrapping around them in order to facilitate storing all
140 of the expressions in the same sets. */
141
142 /* Representation of sets:
143
144 The dataflow sets do not need to be sorted in any particular order
145 for the majority of their lifetime, are simply represented as two
146 bitmaps, one that keeps track of values present in the set, and one
147 that keeps track of expressions present in the set.
148
149 When we need them in topological order, we produce it on demand by
150 transforming the bitmap into an array and sorting it into topo
151 order. */
152
153 /* Type of expression, used to know which member of the PRE_EXPR union
154 is valid. */
155
156 enum pre_expr_kind
157 {
158 NAME,
159 NARY,
160 REFERENCE,
161 CONSTANT
162 };
163
164 typedef union pre_expr_union_d
165 {
166 tree name;
167 tree constant;
168 vn_nary_op_t nary;
169 vn_reference_t reference;
170 } pre_expr_union;
171
172 typedef struct pre_expr_d
173 {
174 enum pre_expr_kind kind;
175 unsigned int id;
176 pre_expr_union u;
177 } *pre_expr;
178
179 #define PRE_EXPR_NAME(e) (e)->u.name
180 #define PRE_EXPR_NARY(e) (e)->u.nary
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
183
184 static int
185 pre_expr_eq (const void *p1, const void *p2)
186 {
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
189
190 if (e1->kind != e2->kind)
191 return false;
192
193 switch (e1->kind)
194 {
195 case CONSTANT:
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
198 case NAME:
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
200 case NARY:
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
202 case REFERENCE:
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
205 default:
206 gcc_unreachable ();
207 }
208 }
209
210 static hashval_t
211 pre_expr_hash (const void *p1)
212 {
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
214 switch (e->kind)
215 {
216 case CONSTANT:
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
218 case NAME:
219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
220 case NARY:
221 return PRE_EXPR_NARY (e)->hashcode;
222 case REFERENCE:
223 return PRE_EXPR_REFERENCE (e)->hashcode;
224 default:
225 gcc_unreachable ();
226 }
227 }
228
229
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
232
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static htab_t expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
239
240 /* Allocate an expression id for EXPR. */
241
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
244 {
245 void **slot;
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
251 {
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
259 VEC_replace (unsigned, name_to_id, version, expr->id);
260 }
261 else
262 {
263 slot = htab_find_slot (expression_to_id, expr, INSERT);
264 gcc_assert (!*slot);
265 *slot = expr;
266 }
267 return next_expression_id - 1;
268 }
269
270 /* Return the expression id for tree EXPR. */
271
272 static inline unsigned int
273 get_expression_id (const pre_expr expr)
274 {
275 return expr->id;
276 }
277
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr)
280 {
281 void **slot;
282
283 if (expr->kind == NAME)
284 {
285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
286 if (VEC_length (unsigned, name_to_id) <= version)
287 return 0;
288 return VEC_index (unsigned, name_to_id, version);
289 }
290 else
291 {
292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
293 if (!slot)
294 return 0;
295 return ((pre_expr)*slot)->id;
296 }
297 }
298
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
301
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr)
304 {
305 unsigned int id = lookup_expression_id (expr);
306 if (id == 0)
307 return alloc_expression_id (expr);
308 return expr->id = id;
309 }
310
311 /* Return the expression that has expression id ID */
312
313 static inline pre_expr
314 expression_for_id (unsigned int id)
315 {
316 return VEC_index (pre_expr, expressions, id);
317 }
318
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
321
322 static void
323 clear_expression_ids (void)
324 {
325 VEC_free (pre_expr, heap, expressions);
326 }
327
328 static alloc_pool pre_expr_pool;
329
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
331
332 static pre_expr
333 get_or_alloc_expr_for_name (tree name)
334 {
335 struct pre_expr_d expr;
336 pre_expr result;
337 unsigned int result_id;
338
339 expr.kind = NAME;
340 expr.id = 0;
341 PRE_EXPR_NAME (&expr) = name;
342 result_id = lookup_expression_id (&expr);
343 if (result_id != 0)
344 return expression_for_id (result_id);
345
346 result = (pre_expr) pool_alloc (pre_expr_pool);
347 result->kind = NAME;
348 PRE_EXPR_NAME (result) = name;
349 alloc_expression_id (result);
350 return result;
351 }
352
353 static bool in_fre = false;
354
355 /* An unordered bitmap set. One bitmap tracks values, the other,
356 expressions. */
357 typedef struct bitmap_set
358 {
359 bitmap expressions;
360 bitmap values;
361 } *bitmap_set_t;
362
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
365
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
368
369 /* Mapping from value id to expressions with that value_id. */
370 DEF_VEC_P (bitmap_set_t);
371 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
372 static VEC(bitmap_set_t, heap) *value_expressions;
373
374 /* Sets that we need to keep track of. */
375 typedef struct bb_bitmap_sets
376 {
377 /* The EXP_GEN set, which represents expressions/values generated in
378 a basic block. */
379 bitmap_set_t exp_gen;
380
381 /* The PHI_GEN set, which represents PHI results generated in a
382 basic block. */
383 bitmap_set_t phi_gen;
384
385 /* The TMP_GEN set, which represents results/temporaries generated
386 in a basic block. IE the LHS of an expression. */
387 bitmap_set_t tmp_gen;
388
389 /* The AVAIL_OUT set, which represents which values are available in
390 a given basic block. */
391 bitmap_set_t avail_out;
392
393 /* The ANTIC_IN set, which represents which values are anticipatable
394 in a given basic block. */
395 bitmap_set_t antic_in;
396
397 /* The PA_IN set, which represents which values are
398 partially anticipatable in a given basic block. */
399 bitmap_set_t pa_in;
400
401 /* The NEW_SETS set, which is used during insertion to augment the
402 AVAIL_OUT set of blocks with the new insertions performed during
403 the current iteration. */
404 bitmap_set_t new_sets;
405
406 /* A cache for value_dies_in_block_x. */
407 bitmap expr_dies;
408
409 /* True if we have visited this block during ANTIC calculation. */
410 unsigned int visited : 1;
411
412 /* True we have deferred processing this block during ANTIC
413 calculation until its successor is processed. */
414 unsigned int deferred : 1;
415
416 /* True when the block contains a call that might not return. */
417 unsigned int contains_may_not_return_call : 1;
418 } *bb_value_sets_t;
419
420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
431
432
433 /* Basic block list in postorder. */
434 static int *postorder;
435
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
438 static struct
439 {
440 /* The number of RHS computations eliminated by PRE. */
441 int eliminations;
442
443 /* The number of new expressions/temporaries generated by PRE. */
444 int insertions;
445
446 /* The number of inserts found due to partial anticipation */
447 int pa_insert;
448
449 /* The number of new PHI nodes added by PRE. */
450 int phis;
451
452 /* The number of values found constant. */
453 int constified;
454
455 } pre_stats;
456
457 static bool do_partial_partial;
458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
465 unsigned int, bool);
466 static bitmap_set_t bitmap_set_new (void);
467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
468 gimple, tree);
469 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
470 gimple);
471 static unsigned int get_expr_value_id (pre_expr);
472
473 /* We can add and remove elements and entries to and from sets
474 and hash tables, so we use alloc pools for them. */
475
476 static alloc_pool bitmap_set_pool;
477 static bitmap_obstack grand_bitmap_obstack;
478
479 /* To avoid adding 300 temporary variables when we only need one, we
480 only create one temporary variable, on demand, and build ssa names
481 off that. We do have to change the variable if the types don't
482 match the current variable's type. */
483 static tree pretemp;
484 static tree storetemp;
485 static tree prephitemp;
486
487 /* Set of blocks with statements that have had its EH information
488 cleaned up. */
489 static bitmap need_eh_cleanup;
490
491 /* The phi_translate_table caches phi translations for a given
492 expression and predecessor. */
493
494 static htab_t phi_translate_table;
495
496 /* A three tuple {e, pred, v} used to cache phi translations in the
497 phi_translate_table. */
498
499 typedef struct expr_pred_trans_d
500 {
501 /* The expression. */
502 pre_expr e;
503
504 /* The predecessor block along which we translated the expression. */
505 basic_block pred;
506
507 /* The value that resulted from the translation. */
508 pre_expr v;
509
510 /* The hashcode for the expression, pred pair. This is cached for
511 speed reasons. */
512 hashval_t hashcode;
513 } *expr_pred_trans_t;
514 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
515
516 /* Return the hash value for a phi translation table entry. */
517
518 static hashval_t
519 expr_pred_trans_hash (const void *p)
520 {
521 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
522 return ve->hashcode;
523 }
524
525 /* Return true if two phi translation table entries are the same.
526 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
527
528 static int
529 expr_pred_trans_eq (const void *p1, const void *p2)
530 {
531 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
532 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
533 basic_block b1 = ve1->pred;
534 basic_block b2 = ve2->pred;
535
536 /* If they are not translations for the same basic block, they can't
537 be equal. */
538 if (b1 != b2)
539 return false;
540 return pre_expr_eq (ve1->e, ve2->e);
541 }
542
543 /* Search in the phi translation table for the translation of
544 expression E in basic block PRED.
545 Return the translated value, if found, NULL otherwise. */
546
547 static inline pre_expr
548 phi_trans_lookup (pre_expr e, basic_block pred)
549 {
550 void **slot;
551 struct expr_pred_trans_d ept;
552
553 ept.e = e;
554 ept.pred = pred;
555 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
556 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
557 NO_INSERT);
558 if (!slot)
559 return NULL;
560 else
561 return ((expr_pred_trans_t) *slot)->v;
562 }
563
564
565 /* Add the tuple mapping from {expression E, basic block PRED} to
566 value V, to the phi translation table. */
567
568 static inline void
569 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
570 {
571 void **slot;
572 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
573 new_pair->e = e;
574 new_pair->pred = pred;
575 new_pair->v = v;
576 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
577 pred->index);
578
579 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
580 new_pair->hashcode, INSERT);
581 if (*slot)
582 free (*slot);
583 *slot = (void *) new_pair;
584 }
585
586
587 /* Add expression E to the expression set of value id V. */
588
589 void
590 add_to_value (unsigned int v, pre_expr e)
591 {
592 bitmap_set_t set;
593
594 gcc_assert (get_expr_value_id (e) == v);
595
596 if (v >= VEC_length (bitmap_set_t, value_expressions))
597 {
598 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
599 v + 1);
600 }
601
602 set = VEC_index (bitmap_set_t, value_expressions, v);
603 if (!set)
604 {
605 set = bitmap_set_new ();
606 VEC_replace (bitmap_set_t, value_expressions, v, set);
607 }
608
609 bitmap_insert_into_set_1 (set, e, v, true);
610 }
611
612 /* Create a new bitmap set and return it. */
613
614 static bitmap_set_t
615 bitmap_set_new (void)
616 {
617 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
618 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
619 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
620 return ret;
621 }
622
623 /* Return the value id for a PRE expression EXPR. */
624
625 static unsigned int
626 get_expr_value_id (pre_expr expr)
627 {
628 switch (expr->kind)
629 {
630 case CONSTANT:
631 {
632 unsigned int id;
633 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
634 if (id == 0)
635 {
636 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
637 add_to_value (id, expr);
638 }
639 return id;
640 }
641 case NAME:
642 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
643 case NARY:
644 return PRE_EXPR_NARY (expr)->value_id;
645 case REFERENCE:
646 return PRE_EXPR_REFERENCE (expr)->value_id;
647 default:
648 gcc_unreachable ();
649 }
650 }
651
652 /* Remove an expression EXPR from a bitmapped set. */
653
654 static void
655 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
656 {
657 unsigned int val = get_expr_value_id (expr);
658 if (!value_id_constant_p (val))
659 {
660 bitmap_clear_bit (set->values, val);
661 bitmap_clear_bit (set->expressions, get_expression_id (expr));
662 }
663 }
664
665 static void
666 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
667 unsigned int val, bool allow_constants)
668 {
669 if (allow_constants || !value_id_constant_p (val))
670 {
671 /* We specifically expect this and only this function to be able to
672 insert constants into a set. */
673 bitmap_set_bit (set->values, val);
674 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
675 }
676 }
677
678 /* Insert an expression EXPR into a bitmapped set. */
679
680 static void
681 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
682 {
683 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
684 }
685
686 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
687
688 static void
689 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
690 {
691 bitmap_copy (dest->expressions, orig->expressions);
692 bitmap_copy (dest->values, orig->values);
693 }
694
695
696 /* Free memory used up by SET. */
697 static void
698 bitmap_set_free (bitmap_set_t set)
699 {
700 BITMAP_FREE (set->expressions);
701 BITMAP_FREE (set->values);
702 }
703
704
705 /* Generate an topological-ordered array of bitmap set SET. */
706
707 static VEC(pre_expr, heap) *
708 sorted_array_from_bitmap_set (bitmap_set_t set)
709 {
710 unsigned int i, j;
711 bitmap_iterator bi, bj;
712 VEC(pre_expr, heap) *result;
713
714 /* Pre-allocate roughly enough space for the array. */
715 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
716
717 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
718 {
719 /* The number of expressions having a given value is usually
720 relatively small. Thus, rather than making a vector of all
721 the expressions and sorting it by value-id, we walk the values
722 and check in the reverse mapping that tells us what expressions
723 have a given value, to filter those in our set. As a result,
724 the expressions are inserted in value-id order, which means
725 topological order.
726
727 If this is somehow a significant lose for some cases, we can
728 choose which set to walk based on the set size. */
729 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
730 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
731 {
732 if (bitmap_bit_p (set->expressions, j))
733 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
734 }
735 }
736
737 return result;
738 }
739
740 /* Perform bitmapped set operation DEST &= ORIG. */
741
742 static void
743 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
744 {
745 bitmap_iterator bi;
746 unsigned int i;
747
748 if (dest != orig)
749 {
750 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
751
752 bitmap_and_into (dest->values, orig->values);
753 bitmap_copy (temp, dest->expressions);
754 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
755 {
756 pre_expr expr = expression_for_id (i);
757 unsigned int value_id = get_expr_value_id (expr);
758 if (!bitmap_bit_p (dest->values, value_id))
759 bitmap_clear_bit (dest->expressions, i);
760 }
761 BITMAP_FREE (temp);
762 }
763 }
764
765 /* Subtract all values and expressions contained in ORIG from DEST. */
766
767 static bitmap_set_t
768 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
769 {
770 bitmap_set_t result = bitmap_set_new ();
771 bitmap_iterator bi;
772 unsigned int i;
773
774 bitmap_and_compl (result->expressions, dest->expressions,
775 orig->expressions);
776
777 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
778 {
779 pre_expr expr = expression_for_id (i);
780 unsigned int value_id = get_expr_value_id (expr);
781 bitmap_set_bit (result->values, value_id);
782 }
783
784 return result;
785 }
786
787 /* Subtract all the values in bitmap set B from bitmap set A. */
788
789 static void
790 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
791 {
792 unsigned int i;
793 bitmap_iterator bi;
794 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
795
796 bitmap_copy (temp, a->expressions);
797 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
798 {
799 pre_expr expr = expression_for_id (i);
800 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
801 bitmap_remove_from_set (a, expr);
802 }
803 BITMAP_FREE (temp);
804 }
805
806
807 /* Return true if bitmapped set SET contains the value VALUE_ID. */
808
809 static bool
810 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
811 {
812 if (value_id_constant_p (value_id))
813 return true;
814
815 if (!set || bitmap_empty_p (set->expressions))
816 return false;
817
818 return bitmap_bit_p (set->values, value_id);
819 }
820
821 static inline bool
822 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
823 {
824 return bitmap_bit_p (set->expressions, get_expression_id (expr));
825 }
826
827 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
828
829 static void
830 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
831 const pre_expr expr)
832 {
833 bitmap_set_t exprset;
834 unsigned int i;
835 bitmap_iterator bi;
836
837 if (value_id_constant_p (lookfor))
838 return;
839
840 if (!bitmap_set_contains_value (set, lookfor))
841 return;
842
843 /* The number of expressions having a given value is usually
844 significantly less than the total number of expressions in SET.
845 Thus, rather than check, for each expression in SET, whether it
846 has the value LOOKFOR, we walk the reverse mapping that tells us
847 what expressions have a given value, and see if any of those
848 expressions are in our set. For large testcases, this is about
849 5-10x faster than walking the bitmap. If this is somehow a
850 significant lose for some cases, we can choose which set to walk
851 based on the set size. */
852 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
853 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
854 {
855 if (bitmap_bit_p (set->expressions, i))
856 {
857 bitmap_clear_bit (set->expressions, i);
858 bitmap_set_bit (set->expressions, get_expression_id (expr));
859 return;
860 }
861 }
862 }
863
864 /* Return true if two bitmap sets are equal. */
865
866 static bool
867 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
868 {
869 return bitmap_equal_p (a->values, b->values);
870 }
871
872 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
873 and add it otherwise. */
874
875 static void
876 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
877 {
878 unsigned int val = get_expr_value_id (expr);
879
880 if (bitmap_set_contains_value (set, val))
881 bitmap_set_replace_value (set, val, expr);
882 else
883 bitmap_insert_into_set (set, expr);
884 }
885
886 /* Insert EXPR into SET if EXPR's value is not already present in
887 SET. */
888
889 static void
890 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
891 {
892 unsigned int val = get_expr_value_id (expr);
893
894 #ifdef ENABLE_CHECKING
895 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
896 #endif
897
898 /* Constant values are always considered to be part of the set. */
899 if (value_id_constant_p (val))
900 return;
901
902 /* If the value membership changed, add the expression. */
903 if (bitmap_set_bit (set->values, val))
904 bitmap_set_bit (set->expressions, expr->id);
905 }
906
907 /* Print out EXPR to outfile. */
908
909 static void
910 print_pre_expr (FILE *outfile, const pre_expr expr)
911 {
912 switch (expr->kind)
913 {
914 case CONSTANT:
915 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
916 break;
917 case NAME:
918 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
919 break;
920 case NARY:
921 {
922 unsigned int i;
923 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
924 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
925 for (i = 0; i < nary->length; i++)
926 {
927 print_generic_expr (outfile, nary->op[i], 0);
928 if (i != (unsigned) nary->length - 1)
929 fprintf (outfile, ",");
930 }
931 fprintf (outfile, "}");
932 }
933 break;
934
935 case REFERENCE:
936 {
937 vn_reference_op_t vro;
938 unsigned int i;
939 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
940 fprintf (outfile, "{");
941 for (i = 0;
942 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
943 i++)
944 {
945 bool closebrace = false;
946 if (vro->opcode != SSA_NAME
947 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
948 {
949 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
950 if (vro->op0)
951 {
952 fprintf (outfile, "<");
953 closebrace = true;
954 }
955 }
956 if (vro->op0)
957 {
958 print_generic_expr (outfile, vro->op0, 0);
959 if (vro->op1)
960 {
961 fprintf (outfile, ",");
962 print_generic_expr (outfile, vro->op1, 0);
963 }
964 if (vro->op2)
965 {
966 fprintf (outfile, ",");
967 print_generic_expr (outfile, vro->op2, 0);
968 }
969 }
970 if (closebrace)
971 fprintf (outfile, ">");
972 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
973 fprintf (outfile, ",");
974 }
975 fprintf (outfile, "}");
976 if (ref->vuse)
977 {
978 fprintf (outfile, "@");
979 print_generic_expr (outfile, ref->vuse, 0);
980 }
981 }
982 break;
983 }
984 }
985 void debug_pre_expr (pre_expr);
986
987 /* Like print_pre_expr but always prints to stderr. */
988 DEBUG_FUNCTION void
989 debug_pre_expr (pre_expr e)
990 {
991 print_pre_expr (stderr, e);
992 fprintf (stderr, "\n");
993 }
994
995 /* Print out SET to OUTFILE. */
996
997 static void
998 print_bitmap_set (FILE *outfile, bitmap_set_t set,
999 const char *setname, int blockindex)
1000 {
1001 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1002 if (set)
1003 {
1004 bool first = true;
1005 unsigned i;
1006 bitmap_iterator bi;
1007
1008 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1009 {
1010 const pre_expr expr = expression_for_id (i);
1011
1012 if (!first)
1013 fprintf (outfile, ", ");
1014 first = false;
1015 print_pre_expr (outfile, expr);
1016
1017 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1018 }
1019 }
1020 fprintf (outfile, " }\n");
1021 }
1022
1023 void debug_bitmap_set (bitmap_set_t);
1024
1025 DEBUG_FUNCTION void
1026 debug_bitmap_set (bitmap_set_t set)
1027 {
1028 print_bitmap_set (stderr, set, "debug", 0);
1029 }
1030
1031 /* Print out the expressions that have VAL to OUTFILE. */
1032
1033 void
1034 print_value_expressions (FILE *outfile, unsigned int val)
1035 {
1036 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1037 if (set)
1038 {
1039 char s[10];
1040 sprintf (s, "%04d", val);
1041 print_bitmap_set (outfile, set, s, 0);
1042 }
1043 }
1044
1045
1046 DEBUG_FUNCTION void
1047 debug_value_expressions (unsigned int val)
1048 {
1049 print_value_expressions (stderr, val);
1050 }
1051
1052 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1053 represent it. */
1054
1055 static pre_expr
1056 get_or_alloc_expr_for_constant (tree constant)
1057 {
1058 unsigned int result_id;
1059 unsigned int value_id;
1060 struct pre_expr_d expr;
1061 pre_expr newexpr;
1062
1063 expr.kind = CONSTANT;
1064 PRE_EXPR_CONSTANT (&expr) = constant;
1065 result_id = lookup_expression_id (&expr);
1066 if (result_id != 0)
1067 return expression_for_id (result_id);
1068
1069 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1070 newexpr->kind = CONSTANT;
1071 PRE_EXPR_CONSTANT (newexpr) = constant;
1072 alloc_expression_id (newexpr);
1073 value_id = get_or_alloc_constant_value_id (constant);
1074 add_to_value (value_id, newexpr);
1075 return newexpr;
1076 }
1077
1078 /* Given a value id V, find the actual tree representing the constant
1079 value if there is one, and return it. Return NULL if we can't find
1080 a constant. */
1081
1082 static tree
1083 get_constant_for_value_id (unsigned int v)
1084 {
1085 if (value_id_constant_p (v))
1086 {
1087 unsigned int i;
1088 bitmap_iterator bi;
1089 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1090
1091 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1092 {
1093 pre_expr expr = expression_for_id (i);
1094 if (expr->kind == CONSTANT)
1095 return PRE_EXPR_CONSTANT (expr);
1096 }
1097 }
1098 return NULL;
1099 }
1100
1101 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1102 Currently only supports constants and SSA_NAMES. */
1103 static pre_expr
1104 get_or_alloc_expr_for (tree t)
1105 {
1106 if (TREE_CODE (t) == SSA_NAME)
1107 return get_or_alloc_expr_for_name (t);
1108 else if (is_gimple_min_invariant (t))
1109 return get_or_alloc_expr_for_constant (t);
1110 else
1111 {
1112 /* More complex expressions can result from SCCVN expression
1113 simplification that inserts values for them. As they all
1114 do not have VOPs the get handled by the nary ops struct. */
1115 vn_nary_op_t result;
1116 unsigned int result_id;
1117 vn_nary_op_lookup (t, &result);
1118 if (result != NULL)
1119 {
1120 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1121 e->kind = NARY;
1122 PRE_EXPR_NARY (e) = result;
1123 result_id = lookup_expression_id (e);
1124 if (result_id != 0)
1125 {
1126 pool_free (pre_expr_pool, e);
1127 e = expression_for_id (result_id);
1128 return e;
1129 }
1130 alloc_expression_id (e);
1131 return e;
1132 }
1133 }
1134 return NULL;
1135 }
1136
1137 /* Return the folded version of T if T, when folded, is a gimple
1138 min_invariant. Otherwise, return T. */
1139
1140 static pre_expr
1141 fully_constant_expression (pre_expr e)
1142 {
1143 switch (e->kind)
1144 {
1145 case CONSTANT:
1146 return e;
1147 case NARY:
1148 {
1149 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1150 switch (TREE_CODE_CLASS (nary->opcode))
1151 {
1152 case tcc_expression:
1153 if (nary->opcode == TRUTH_NOT_EXPR)
1154 goto do_unary;
1155 if (nary->opcode != TRUTH_AND_EXPR
1156 && nary->opcode != TRUTH_OR_EXPR
1157 && nary->opcode != TRUTH_XOR_EXPR)
1158 return e;
1159 /* Fallthrough. */
1160 case tcc_binary:
1161 case tcc_comparison:
1162 {
1163 /* We have to go from trees to pre exprs to value ids to
1164 constants. */
1165 tree naryop0 = nary->op[0];
1166 tree naryop1 = nary->op[1];
1167 tree result;
1168 if (!is_gimple_min_invariant (naryop0))
1169 {
1170 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1171 unsigned int vrep0 = get_expr_value_id (rep0);
1172 tree const0 = get_constant_for_value_id (vrep0);
1173 if (const0)
1174 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1175 }
1176 if (!is_gimple_min_invariant (naryop1))
1177 {
1178 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1179 unsigned int vrep1 = get_expr_value_id (rep1);
1180 tree const1 = get_constant_for_value_id (vrep1);
1181 if (const1)
1182 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1183 }
1184 result = fold_binary (nary->opcode, nary->type,
1185 naryop0, naryop1);
1186 if (result && is_gimple_min_invariant (result))
1187 return get_or_alloc_expr_for_constant (result);
1188 /* We might have simplified the expression to a
1189 SSA_NAME for example from x_1 * 1. But we cannot
1190 insert a PHI for x_1 unconditionally as x_1 might
1191 not be available readily. */
1192 return e;
1193 }
1194 case tcc_reference:
1195 if (nary->opcode != REALPART_EXPR
1196 && nary->opcode != IMAGPART_EXPR
1197 && nary->opcode != VIEW_CONVERT_EXPR)
1198 return e;
1199 /* Fallthrough. */
1200 case tcc_unary:
1201 do_unary:
1202 {
1203 /* We have to go from trees to pre exprs to value ids to
1204 constants. */
1205 tree naryop0 = nary->op[0];
1206 tree const0, result;
1207 if (is_gimple_min_invariant (naryop0))
1208 const0 = naryop0;
1209 else
1210 {
1211 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1212 unsigned int vrep0 = get_expr_value_id (rep0);
1213 const0 = get_constant_for_value_id (vrep0);
1214 }
1215 result = NULL;
1216 if (const0)
1217 {
1218 tree type1 = TREE_TYPE (nary->op[0]);
1219 const0 = fold_convert (type1, const0);
1220 result = fold_unary (nary->opcode, nary->type, const0);
1221 }
1222 if (result && is_gimple_min_invariant (result))
1223 return get_or_alloc_expr_for_constant (result);
1224 return e;
1225 }
1226 default:
1227 return e;
1228 }
1229 }
1230 case REFERENCE:
1231 {
1232 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1233 tree folded;
1234 if ((folded = fully_constant_vn_reference_p (ref)))
1235 return get_or_alloc_expr_for_constant (folded);
1236 return e;
1237 }
1238 default:
1239 return e;
1240 }
1241 return e;
1242 }
1243
1244 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1245 it has the value it would have in BLOCK. Set *SAME_VALID to true
1246 in case the new vuse doesn't change the value id of the OPERANDS. */
1247
1248 static tree
1249 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1250 alias_set_type set, tree type, tree vuse,
1251 basic_block phiblock,
1252 basic_block block, bool *same_valid)
1253 {
1254 gimple phi = SSA_NAME_DEF_STMT (vuse);
1255 ao_ref ref;
1256 edge e = NULL;
1257 bool use_oracle;
1258
1259 *same_valid = true;
1260
1261 if (gimple_bb (phi) != phiblock)
1262 return vuse;
1263
1264 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1265
1266 /* Use the alias-oracle to find either the PHI node in this block,
1267 the first VUSE used in this block that is equivalent to vuse or
1268 the first VUSE which definition in this block kills the value. */
1269 if (gimple_code (phi) == GIMPLE_PHI)
1270 e = find_edge (block, phiblock);
1271 else if (use_oracle)
1272 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1273 {
1274 vuse = gimple_vuse (phi);
1275 phi = SSA_NAME_DEF_STMT (vuse);
1276 if (gimple_bb (phi) != phiblock)
1277 return vuse;
1278 if (gimple_code (phi) == GIMPLE_PHI)
1279 {
1280 e = find_edge (block, phiblock);
1281 break;
1282 }
1283 }
1284 else
1285 return NULL_TREE;
1286
1287 if (e)
1288 {
1289 if (use_oracle)
1290 {
1291 bitmap visited = NULL;
1292 /* Try to find a vuse that dominates this phi node by skipping
1293 non-clobbering statements. */
1294 vuse = get_continuation_for_phi (phi, &ref, &visited);
1295 if (visited)
1296 BITMAP_FREE (visited);
1297 }
1298 else
1299 vuse = NULL_TREE;
1300 if (!vuse)
1301 {
1302 /* If we didn't find any, the value ID can't stay the same,
1303 but return the translated vuse. */
1304 *same_valid = false;
1305 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1306 }
1307 /* ??? We would like to return vuse here as this is the canonical
1308 upmost vdef that this reference is associated with. But during
1309 insertion of the references into the hash tables we only ever
1310 directly insert with their direct gimple_vuse, hence returning
1311 something else would make us not find the other expression. */
1312 return PHI_ARG_DEF (phi, e->dest_idx);
1313 }
1314
1315 return NULL_TREE;
1316 }
1317
1318 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1319 SET2. This is used to avoid making a set consisting of the union
1320 of PA_IN and ANTIC_IN during insert. */
1321
1322 static inline pre_expr
1323 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1324 {
1325 pre_expr result;
1326
1327 result = bitmap_find_leader (set1, val, NULL);
1328 if (!result && set2)
1329 result = bitmap_find_leader (set2, val, NULL);
1330 return result;
1331 }
1332
1333 /* Get the tree type for our PRE expression e. */
1334
1335 static tree
1336 get_expr_type (const pre_expr e)
1337 {
1338 switch (e->kind)
1339 {
1340 case NAME:
1341 return TREE_TYPE (PRE_EXPR_NAME (e));
1342 case CONSTANT:
1343 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1344 case REFERENCE:
1345 return PRE_EXPR_REFERENCE (e)->type;
1346 case NARY:
1347 return PRE_EXPR_NARY (e)->type;
1348 }
1349 gcc_unreachable();
1350 }
1351
1352 /* Get a representative SSA_NAME for a given expression.
1353 Since all of our sub-expressions are treated as values, we require
1354 them to be SSA_NAME's for simplicity.
1355 Prior versions of GVNPRE used to use "value handles" here, so that
1356 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1357 either case, the operands are really values (IE we do not expect
1358 them to be usable without finding leaders). */
1359
1360 static tree
1361 get_representative_for (const pre_expr e)
1362 {
1363 tree exprtype;
1364 tree name;
1365 unsigned int value_id = get_expr_value_id (e);
1366
1367 switch (e->kind)
1368 {
1369 case NAME:
1370 return PRE_EXPR_NAME (e);
1371 case CONSTANT:
1372 return PRE_EXPR_CONSTANT (e);
1373 case NARY:
1374 case REFERENCE:
1375 {
1376 /* Go through all of the expressions representing this value
1377 and pick out an SSA_NAME. */
1378 unsigned int i;
1379 bitmap_iterator bi;
1380 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1381 value_id);
1382 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1383 {
1384 pre_expr rep = expression_for_id (i);
1385 if (rep->kind == NAME)
1386 return PRE_EXPR_NAME (rep);
1387 }
1388 }
1389 break;
1390 }
1391 /* If we reached here we couldn't find an SSA_NAME. This can
1392 happen when we've discovered a value that has never appeared in
1393 the program as set to an SSA_NAME, most likely as the result of
1394 phi translation. */
1395 if (dump_file)
1396 {
1397 fprintf (dump_file,
1398 "Could not find SSA_NAME representative for expression:");
1399 print_pre_expr (dump_file, e);
1400 fprintf (dump_file, "\n");
1401 }
1402
1403 exprtype = get_expr_type (e);
1404
1405 /* Build and insert the assignment of the end result to the temporary
1406 that we will return. */
1407 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1408 {
1409 pretemp = create_tmp_reg (exprtype, "pretmp");
1410 get_var_ann (pretemp);
1411 }
1412
1413 name = make_ssa_name (pretemp, gimple_build_nop ());
1414 VN_INFO_GET (name)->value_id = value_id;
1415 if (e->kind == CONSTANT)
1416 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1417 else
1418 VN_INFO (name)->valnum = name;
1419
1420 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1421 if (dump_file)
1422 {
1423 fprintf (dump_file, "Created SSA_NAME representative ");
1424 print_generic_expr (dump_file, name, 0);
1425 fprintf (dump_file, " for expression:");
1426 print_pre_expr (dump_file, e);
1427 fprintf (dump_file, "\n");
1428 }
1429
1430 return name;
1431 }
1432
1433
1434
1435 static pre_expr
1436 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1437 basic_block pred, basic_block phiblock);
1438
1439 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1440 the phis in PRED. Return NULL if we can't find a leader for each part
1441 of the translated expression. */
1442
1443 static pre_expr
1444 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1445 basic_block pred, basic_block phiblock)
1446 {
1447 switch (expr->kind)
1448 {
1449 case NARY:
1450 {
1451 unsigned int i;
1452 bool changed = false;
1453 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1454 struct vn_nary_op_s newnary;
1455 /* The NARY structure is only guaranteed to have been
1456 allocated to the nary->length operands. */
1457 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1458 - sizeof (tree) * (4 - nary->length)));
1459
1460 for (i = 0; i < newnary.length; i++)
1461 {
1462 if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1463 continue;
1464 else
1465 {
1466 pre_expr leader, result;
1467 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1468 leader = find_leader_in_sets (op_val_id, set1, set2);
1469 result = phi_translate (leader, set1, set2, pred, phiblock);
1470 if (result && result != leader)
1471 {
1472 tree name = get_representative_for (result);
1473 if (!name)
1474 return NULL;
1475 newnary.op[i] = name;
1476 }
1477 else if (!result)
1478 return NULL;
1479
1480 changed |= newnary.op[i] != nary->op[i];
1481 }
1482 }
1483 if (changed)
1484 {
1485 pre_expr constant;
1486 unsigned int new_val_id;
1487
1488 tree result = vn_nary_op_lookup_pieces (newnary.length,
1489 newnary.opcode,
1490 newnary.type,
1491 newnary.op[0],
1492 newnary.op[1],
1493 newnary.op[2],
1494 newnary.op[3],
1495 &nary);
1496 if (result && is_gimple_min_invariant (result))
1497 return get_or_alloc_expr_for_constant (result);
1498
1499 expr = (pre_expr) pool_alloc (pre_expr_pool);
1500 expr->kind = NARY;
1501 expr->id = 0;
1502 if (nary)
1503 {
1504 PRE_EXPR_NARY (expr) = nary;
1505 constant = fully_constant_expression (expr);
1506 if (constant != expr)
1507 return constant;
1508
1509 new_val_id = nary->value_id;
1510 get_or_alloc_expression_id (expr);
1511 }
1512 else
1513 {
1514 new_val_id = get_next_value_id ();
1515 VEC_safe_grow_cleared (bitmap_set_t, heap,
1516 value_expressions,
1517 get_max_value_id() + 1);
1518 nary = vn_nary_op_insert_pieces (newnary.length,
1519 newnary.opcode,
1520 newnary.type,
1521 newnary.op[0],
1522 newnary.op[1],
1523 newnary.op[2],
1524 newnary.op[3],
1525 result, new_val_id);
1526 PRE_EXPR_NARY (expr) = nary;
1527 constant = fully_constant_expression (expr);
1528 if (constant != expr)
1529 return constant;
1530 get_or_alloc_expression_id (expr);
1531 }
1532 add_to_value (new_val_id, expr);
1533 }
1534 return expr;
1535 }
1536 break;
1537
1538 case REFERENCE:
1539 {
1540 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1541 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1542 tree vuse = ref->vuse;
1543 tree newvuse = vuse;
1544 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1545 bool changed = false, same_valid = true;
1546 unsigned int i, j;
1547 vn_reference_op_t operand;
1548 vn_reference_t newref;
1549
1550 for (i = 0, j = 0;
1551 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1552 {
1553 pre_expr opresult;
1554 pre_expr leader;
1555 tree oldop0 = operand->op0;
1556 tree oldop1 = operand->op1;
1557 tree oldop2 = operand->op2;
1558 tree op0 = oldop0;
1559 tree op1 = oldop1;
1560 tree op2 = oldop2;
1561 tree type = operand->type;
1562 vn_reference_op_s newop = *operand;
1563
1564 if (op0 && TREE_CODE (op0) == SSA_NAME)
1565 {
1566 unsigned int op_val_id = VN_INFO (op0)->value_id;
1567 leader = find_leader_in_sets (op_val_id, set1, set2);
1568 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1569 if (opresult && opresult != leader)
1570 {
1571 tree name = get_representative_for (opresult);
1572 if (!name)
1573 break;
1574 op0 = name;
1575 }
1576 else if (!opresult)
1577 break;
1578 }
1579 changed |= op0 != oldop0;
1580
1581 if (op1 && TREE_CODE (op1) == SSA_NAME)
1582 {
1583 unsigned int op_val_id = VN_INFO (op1)->value_id;
1584 leader = find_leader_in_sets (op_val_id, set1, set2);
1585 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1586 if (opresult && opresult != leader)
1587 {
1588 tree name = get_representative_for (opresult);
1589 if (!name)
1590 break;
1591 op1 = name;
1592 }
1593 else if (!opresult)
1594 break;
1595 }
1596 /* We can't possibly insert these. */
1597 else if (op1 && !is_gimple_min_invariant (op1))
1598 break;
1599 changed |= op1 != oldop1;
1600 if (op2 && TREE_CODE (op2) == SSA_NAME)
1601 {
1602 unsigned int op_val_id = VN_INFO (op2)->value_id;
1603 leader = find_leader_in_sets (op_val_id, set1, set2);
1604 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1605 if (opresult && opresult != leader)
1606 {
1607 tree name = get_representative_for (opresult);
1608 if (!name)
1609 break;
1610 op2 = name;
1611 }
1612 else if (!opresult)
1613 break;
1614 }
1615 /* We can't possibly insert these. */
1616 else if (op2 && !is_gimple_min_invariant (op2))
1617 break;
1618 changed |= op2 != oldop2;
1619
1620 if (!newoperands)
1621 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1622 /* We may have changed from an SSA_NAME to a constant */
1623 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1624 newop.opcode = TREE_CODE (op0);
1625 newop.type = type;
1626 newop.op0 = op0;
1627 newop.op1 = op1;
1628 newop.op2 = op2;
1629 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1630 /* If it transforms from an SSA_NAME to an address, fold with
1631 a preceding indirect reference. */
1632 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1633 && VEC_index (vn_reference_op_s,
1634 newoperands, j - 1)->opcode == INDIRECT_REF)
1635 vn_reference_fold_indirect (&newoperands, &j);
1636 }
1637 if (i != VEC_length (vn_reference_op_s, operands))
1638 {
1639 if (newoperands)
1640 VEC_free (vn_reference_op_s, heap, newoperands);
1641 return NULL;
1642 }
1643
1644 if (vuse)
1645 {
1646 newvuse = translate_vuse_through_block (newoperands,
1647 ref->set, ref->type,
1648 vuse, phiblock, pred,
1649 &same_valid);
1650 if (newvuse == NULL_TREE)
1651 {
1652 VEC_free (vn_reference_op_s, heap, newoperands);
1653 return NULL;
1654 }
1655 }
1656
1657 if (changed || newvuse != vuse)
1658 {
1659 unsigned int new_val_id;
1660 pre_expr constant;
1661
1662 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1663 ref->type,
1664 newoperands,
1665 &newref, true);
1666 if (result)
1667 VEC_free (vn_reference_op_s, heap, newoperands);
1668
1669 if (result && is_gimple_min_invariant (result))
1670 {
1671 gcc_assert (!newoperands);
1672 return get_or_alloc_expr_for_constant (result);
1673 }
1674
1675 expr = (pre_expr) pool_alloc (pre_expr_pool);
1676 expr->kind = REFERENCE;
1677 expr->id = 0;
1678
1679 if (newref)
1680 {
1681 PRE_EXPR_REFERENCE (expr) = newref;
1682 constant = fully_constant_expression (expr);
1683 if (constant != expr)
1684 return constant;
1685
1686 new_val_id = newref->value_id;
1687 get_or_alloc_expression_id (expr);
1688 }
1689 else
1690 {
1691 if (changed || !same_valid)
1692 {
1693 new_val_id = get_next_value_id ();
1694 VEC_safe_grow_cleared (bitmap_set_t, heap,
1695 value_expressions,
1696 get_max_value_id() + 1);
1697 }
1698 else
1699 new_val_id = ref->value_id;
1700 newref = vn_reference_insert_pieces (newvuse, ref->set,
1701 ref->type,
1702 newoperands,
1703 result, new_val_id);
1704 newoperands = NULL;
1705 PRE_EXPR_REFERENCE (expr) = newref;
1706 constant = fully_constant_expression (expr);
1707 if (constant != expr)
1708 return constant;
1709 get_or_alloc_expression_id (expr);
1710 }
1711 add_to_value (new_val_id, expr);
1712 }
1713 VEC_free (vn_reference_op_s, heap, newoperands);
1714 return expr;
1715 }
1716 break;
1717
1718 case NAME:
1719 {
1720 gimple phi = NULL;
1721 edge e;
1722 gimple def_stmt;
1723 tree name = PRE_EXPR_NAME (expr);
1724
1725 def_stmt = SSA_NAME_DEF_STMT (name);
1726 if (gimple_code (def_stmt) == GIMPLE_PHI
1727 && gimple_bb (def_stmt) == phiblock)
1728 phi = def_stmt;
1729 else
1730 return expr;
1731
1732 e = find_edge (pred, gimple_bb (phi));
1733 if (e)
1734 {
1735 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1736 pre_expr newexpr;
1737
1738 if (TREE_CODE (def) == SSA_NAME)
1739 def = VN_INFO (def)->valnum;
1740
1741 /* Handle constant. */
1742 if (is_gimple_min_invariant (def))
1743 return get_or_alloc_expr_for_constant (def);
1744
1745 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1746 return NULL;
1747
1748 newexpr = get_or_alloc_expr_for_name (def);
1749 return newexpr;
1750 }
1751 }
1752 return expr;
1753
1754 default:
1755 gcc_unreachable ();
1756 }
1757 }
1758
1759 /* Wrapper around phi_translate_1 providing caching functionality. */
1760
1761 static pre_expr
1762 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1763 basic_block pred, basic_block phiblock)
1764 {
1765 pre_expr phitrans;
1766
1767 if (!expr)
1768 return NULL;
1769
1770 /* Constants contain no values that need translation. */
1771 if (expr->kind == CONSTANT)
1772 return expr;
1773
1774 if (value_id_constant_p (get_expr_value_id (expr)))
1775 return expr;
1776
1777 if (expr->kind != NAME)
1778 {
1779 phitrans = phi_trans_lookup (expr, pred);
1780 if (phitrans)
1781 return phitrans;
1782 }
1783
1784 /* Translate. */
1785 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1786
1787 /* Don't add empty translations to the cache. Neither add
1788 translations of NAMEs as those are cheap to translate. */
1789 if (phitrans
1790 && expr->kind != NAME)
1791 phi_trans_add (expr, phitrans, pred);
1792
1793 return phitrans;
1794 }
1795
1796
1797 /* For each expression in SET, translate the values through phi nodes
1798 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1799 expressions in DEST. */
1800
1801 static void
1802 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1803 basic_block phiblock)
1804 {
1805 VEC (pre_expr, heap) *exprs;
1806 pre_expr expr;
1807 int i;
1808
1809 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1810 {
1811 bitmap_set_copy (dest, set);
1812 return;
1813 }
1814
1815 exprs = sorted_array_from_bitmap_set (set);
1816 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1817 {
1818 pre_expr translated;
1819 translated = phi_translate (expr, set, NULL, pred, phiblock);
1820 if (!translated)
1821 continue;
1822
1823 /* We might end up with multiple expressions from SET being
1824 translated to the same value. In this case we do not want
1825 to retain the NARY or REFERENCE expression but prefer a NAME
1826 which would be the leader. */
1827 if (translated->kind == NAME)
1828 bitmap_value_replace_in_set (dest, translated);
1829 else
1830 bitmap_value_insert_into_set (dest, translated);
1831 }
1832 VEC_free (pre_expr, heap, exprs);
1833 }
1834
1835 /* Find the leader for a value (i.e., the name representing that
1836 value) in a given set, and return it. If STMT is non-NULL it
1837 makes sure the defining statement for the leader dominates it.
1838 Return NULL if no leader is found. */
1839
1840 static pre_expr
1841 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1842 {
1843 if (value_id_constant_p (val))
1844 {
1845 unsigned int i;
1846 bitmap_iterator bi;
1847 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1848
1849 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1850 {
1851 pre_expr expr = expression_for_id (i);
1852 if (expr->kind == CONSTANT)
1853 return expr;
1854 }
1855 }
1856 if (bitmap_set_contains_value (set, val))
1857 {
1858 /* Rather than walk the entire bitmap of expressions, and see
1859 whether any of them has the value we are looking for, we look
1860 at the reverse mapping, which tells us the set of expressions
1861 that have a given value (IE value->expressions with that
1862 value) and see if any of those expressions are in our set.
1863 The number of expressions per value is usually significantly
1864 less than the number of expressions in the set. In fact, for
1865 large testcases, doing it this way is roughly 5-10x faster
1866 than walking the bitmap.
1867 If this is somehow a significant lose for some cases, we can
1868 choose which set to walk based on which set is smaller. */
1869 unsigned int i;
1870 bitmap_iterator bi;
1871 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1872
1873 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
1874 set->expressions, 0, i, bi)
1875 {
1876 pre_expr val = expression_for_id (i);
1877 /* At the point where stmt is not null, there should always
1878 be an SSA_NAME first in the list of expressions. */
1879 if (stmt)
1880 {
1881 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1882 if (gimple_code (def_stmt) != GIMPLE_PHI
1883 && gimple_bb (def_stmt) == gimple_bb (stmt)
1884 && gimple_uid (def_stmt) >= gimple_uid (stmt))
1885 continue;
1886 }
1887 return val;
1888 }
1889 }
1890 return NULL;
1891 }
1892
1893 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1894 BLOCK by seeing if it is not killed in the block. Note that we are
1895 only determining whether there is a store that kills it. Because
1896 of the order in which clean iterates over values, we are guaranteed
1897 that altered operands will have caused us to be eliminated from the
1898 ANTIC_IN set already. */
1899
1900 static bool
1901 value_dies_in_block_x (pre_expr expr, basic_block block)
1902 {
1903 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1904 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1905 gimple def;
1906 gimple_stmt_iterator gsi;
1907 unsigned id = get_expression_id (expr);
1908 bool res = false;
1909 ao_ref ref;
1910
1911 if (!vuse)
1912 return false;
1913
1914 /* Lookup a previously calculated result. */
1915 if (EXPR_DIES (block)
1916 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1917 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1918
1919 /* A memory expression {e, VUSE} dies in the block if there is a
1920 statement that may clobber e. If, starting statement walk from the
1921 top of the basic block, a statement uses VUSE there can be no kill
1922 inbetween that use and the original statement that loaded {e, VUSE},
1923 so we can stop walking. */
1924 ref.base = NULL_TREE;
1925 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1926 {
1927 tree def_vuse, def_vdef;
1928 def = gsi_stmt (gsi);
1929 def_vuse = gimple_vuse (def);
1930 def_vdef = gimple_vdef (def);
1931
1932 /* Not a memory statement. */
1933 if (!def_vuse)
1934 continue;
1935
1936 /* Not a may-def. */
1937 if (!def_vdef)
1938 {
1939 /* A load with the same VUSE, we're done. */
1940 if (def_vuse == vuse)
1941 break;
1942
1943 continue;
1944 }
1945
1946 /* Init ref only if we really need it. */
1947 if (ref.base == NULL_TREE
1948 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1949 refx->operands))
1950 {
1951 res = true;
1952 break;
1953 }
1954 /* If the statement may clobber expr, it dies. */
1955 if (stmt_may_clobber_ref_p_1 (def, &ref))
1956 {
1957 res = true;
1958 break;
1959 }
1960 }
1961
1962 /* Remember the result. */
1963 if (!EXPR_DIES (block))
1964 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1965 bitmap_set_bit (EXPR_DIES (block), id * 2);
1966 if (res)
1967 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1968
1969 return res;
1970 }
1971
1972
1973 #define union_contains_value(SET1, SET2, VAL) \
1974 (bitmap_set_contains_value ((SET1), (VAL)) \
1975 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
1976
1977 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
1978 */
1979 static bool
1980 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
1981 vn_reference_op_t vro)
1982 {
1983 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
1984 {
1985 struct pre_expr_d temp;
1986 temp.kind = NAME;
1987 temp.id = 0;
1988 PRE_EXPR_NAME (&temp) = vro->op0;
1989 temp.id = lookup_expression_id (&temp);
1990 if (temp.id == 0)
1991 return false;
1992 if (!union_contains_value (set1, set2,
1993 get_expr_value_id (&temp)))
1994 return false;
1995 }
1996 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1997 {
1998 struct pre_expr_d temp;
1999 temp.kind = NAME;
2000 temp.id = 0;
2001 PRE_EXPR_NAME (&temp) = vro->op1;
2002 temp.id = lookup_expression_id (&temp);
2003 if (temp.id == 0)
2004 return false;
2005 if (!union_contains_value (set1, set2,
2006 get_expr_value_id (&temp)))
2007 return false;
2008 }
2009
2010 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2011 {
2012 struct pre_expr_d temp;
2013 temp.kind = NAME;
2014 temp.id = 0;
2015 PRE_EXPR_NAME (&temp) = vro->op2;
2016 temp.id = lookup_expression_id (&temp);
2017 if (temp.id == 0)
2018 return false;
2019 if (!union_contains_value (set1, set2,
2020 get_expr_value_id (&temp)))
2021 return false;
2022 }
2023
2024 return true;
2025 }
2026
2027 /* Determine if the expression EXPR is valid in SET1 U SET2.
2028 ONLY SET2 CAN BE NULL.
2029 This means that we have a leader for each part of the expression
2030 (if it consists of values), or the expression is an SSA_NAME.
2031 For loads/calls, we also see if the vuse is killed in this block. */
2032
2033 static bool
2034 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2035 basic_block block)
2036 {
2037 switch (expr->kind)
2038 {
2039 case NAME:
2040 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2041 case NARY:
2042 {
2043 unsigned int i;
2044 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2045 for (i = 0; i < nary->length; i++)
2046 {
2047 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2048 {
2049 struct pre_expr_d temp;
2050 temp.kind = NAME;
2051 temp.id = 0;
2052 PRE_EXPR_NAME (&temp) = nary->op[i];
2053 temp.id = lookup_expression_id (&temp);
2054 if (temp.id == 0)
2055 return false;
2056 if (!union_contains_value (set1, set2,
2057 get_expr_value_id (&temp)))
2058 return false;
2059 }
2060 }
2061 /* If the NARY may trap make sure the block does not contain
2062 a possible exit point.
2063 ??? This is overly conservative if we translate AVAIL_OUT
2064 as the available expression might be after the exit point. */
2065 if (BB_MAY_NOTRETURN (block)
2066 && vn_nary_may_trap (nary))
2067 return false;
2068 return true;
2069 }
2070 break;
2071 case REFERENCE:
2072 {
2073 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2074 vn_reference_op_t vro;
2075 unsigned int i;
2076
2077 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
2078 {
2079 if (!vro_valid_in_sets (set1, set2, vro))
2080 return false;
2081 }
2082 if (ref->vuse)
2083 {
2084 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2085 if (!gimple_nop_p (def_stmt)
2086 && gimple_bb (def_stmt) != block
2087 && !dominated_by_p (CDI_DOMINATORS,
2088 block, gimple_bb (def_stmt)))
2089 return false;
2090 }
2091 return !value_dies_in_block_x (expr, block);
2092 }
2093 default:
2094 gcc_unreachable ();
2095 }
2096 }
2097
2098 /* Clean the set of expressions that are no longer valid in SET1 or
2099 SET2. This means expressions that are made up of values we have no
2100 leaders for in SET1 or SET2. This version is used for partial
2101 anticipation, which means it is not valid in either ANTIC_IN or
2102 PA_IN. */
2103
2104 static void
2105 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2106 {
2107 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2108 pre_expr expr;
2109 int i;
2110
2111 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2112 {
2113 if (!valid_in_sets (set1, set2, expr, block))
2114 bitmap_remove_from_set (set1, expr);
2115 }
2116 VEC_free (pre_expr, heap, exprs);
2117 }
2118
2119 /* Clean the set of expressions that are no longer valid in SET. This
2120 means expressions that are made up of values we have no leaders for
2121 in SET. */
2122
2123 static void
2124 clean (bitmap_set_t set, basic_block block)
2125 {
2126 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2127 pre_expr expr;
2128 int i;
2129
2130 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
2131 {
2132 if (!valid_in_sets (set, NULL, expr, block))
2133 bitmap_remove_from_set (set, expr);
2134 }
2135 VEC_free (pre_expr, heap, exprs);
2136 }
2137
2138 static sbitmap has_abnormal_preds;
2139
2140 /* List of blocks that may have changed during ANTIC computation and
2141 thus need to be iterated over. */
2142
2143 static sbitmap changed_blocks;
2144
2145 /* Decide whether to defer a block for a later iteration, or PHI
2146 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2147 should defer the block, and true if we processed it. */
2148
2149 static bool
2150 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2151 basic_block block, basic_block phiblock)
2152 {
2153 if (!BB_VISITED (phiblock))
2154 {
2155 SET_BIT (changed_blocks, block->index);
2156 BB_VISITED (block) = 0;
2157 BB_DEFERRED (block) = 1;
2158 return false;
2159 }
2160 else
2161 phi_translate_set (dest, source, block, phiblock);
2162 return true;
2163 }
2164
2165 /* Compute the ANTIC set for BLOCK.
2166
2167 If succs(BLOCK) > 1 then
2168 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2169 else if succs(BLOCK) == 1 then
2170 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2171
2172 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2173 */
2174
2175 static bool
2176 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2177 {
2178 bool changed = false;
2179 bitmap_set_t S, old, ANTIC_OUT;
2180 bitmap_iterator bi;
2181 unsigned int bii;
2182 edge e;
2183 edge_iterator ei;
2184
2185 old = ANTIC_OUT = S = NULL;
2186 BB_VISITED (block) = 1;
2187
2188 /* If any edges from predecessors are abnormal, antic_in is empty,
2189 so do nothing. */
2190 if (block_has_abnormal_pred_edge)
2191 goto maybe_dump_sets;
2192
2193 old = ANTIC_IN (block);
2194 ANTIC_OUT = bitmap_set_new ();
2195
2196 /* If the block has no successors, ANTIC_OUT is empty. */
2197 if (EDGE_COUNT (block->succs) == 0)
2198 ;
2199 /* If we have one successor, we could have some phi nodes to
2200 translate through. */
2201 else if (single_succ_p (block))
2202 {
2203 basic_block succ_bb = single_succ (block);
2204
2205 /* We trade iterations of the dataflow equations for having to
2206 phi translate the maximal set, which is incredibly slow
2207 (since the maximal set often has 300+ members, even when you
2208 have a small number of blocks).
2209 Basically, we defer the computation of ANTIC for this block
2210 until we have processed it's successor, which will inevitably
2211 have a *much* smaller set of values to phi translate once
2212 clean has been run on it.
2213 The cost of doing this is that we technically perform more
2214 iterations, however, they are lower cost iterations.
2215
2216 Timings for PRE on tramp3d-v4:
2217 without maximal set fix: 11 seconds
2218 with maximal set fix/without deferring: 26 seconds
2219 with maximal set fix/with deferring: 11 seconds
2220 */
2221
2222 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2223 block, succ_bb))
2224 {
2225 changed = true;
2226 goto maybe_dump_sets;
2227 }
2228 }
2229 /* If we have multiple successors, we take the intersection of all of
2230 them. Note that in the case of loop exit phi nodes, we may have
2231 phis to translate through. */
2232 else
2233 {
2234 VEC(basic_block, heap) * worklist;
2235 size_t i;
2236 basic_block bprime, first = NULL;
2237
2238 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2239 FOR_EACH_EDGE (e, ei, block->succs)
2240 {
2241 if (!first
2242 && BB_VISITED (e->dest))
2243 first = e->dest;
2244 else if (BB_VISITED (e->dest))
2245 VEC_quick_push (basic_block, worklist, e->dest);
2246 }
2247
2248 /* Of multiple successors we have to have visited one already. */
2249 if (!first)
2250 {
2251 SET_BIT (changed_blocks, block->index);
2252 BB_VISITED (block) = 0;
2253 BB_DEFERRED (block) = 1;
2254 changed = true;
2255 VEC_free (basic_block, heap, worklist);
2256 goto maybe_dump_sets;
2257 }
2258
2259 if (!gimple_seq_empty_p (phi_nodes (first)))
2260 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2261 else
2262 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2263
2264 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2265 {
2266 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2267 {
2268 bitmap_set_t tmp = bitmap_set_new ();
2269 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2270 bitmap_set_and (ANTIC_OUT, tmp);
2271 bitmap_set_free (tmp);
2272 }
2273 else
2274 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2275 }
2276 VEC_free (basic_block, heap, worklist);
2277 }
2278
2279 /* Generate ANTIC_OUT - TMP_GEN. */
2280 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2281
2282 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2283 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2284 TMP_GEN (block));
2285
2286 /* Then union in the ANTIC_OUT - TMP_GEN values,
2287 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2288 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2289 bitmap_value_insert_into_set (ANTIC_IN (block),
2290 expression_for_id (bii));
2291
2292 clean (ANTIC_IN (block), block);
2293
2294 /* !old->expressions can happen when we deferred a block. */
2295 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
2296 {
2297 changed = true;
2298 SET_BIT (changed_blocks, block->index);
2299 FOR_EACH_EDGE (e, ei, block->preds)
2300 SET_BIT (changed_blocks, e->src->index);
2301 }
2302 else
2303 RESET_BIT (changed_blocks, block->index);
2304
2305 maybe_dump_sets:
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2307 {
2308 if (!BB_DEFERRED (block) || BB_VISITED (block))
2309 {
2310 if (ANTIC_OUT)
2311 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2312
2313 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2314 block->index);
2315
2316 if (S)
2317 print_bitmap_set (dump_file, S, "S", block->index);
2318 }
2319 else
2320 {
2321 fprintf (dump_file,
2322 "Block %d was deferred for a future iteration.\n",
2323 block->index);
2324 }
2325 }
2326 if (old)
2327 bitmap_set_free (old);
2328 if (S)
2329 bitmap_set_free (S);
2330 if (ANTIC_OUT)
2331 bitmap_set_free (ANTIC_OUT);
2332 return changed;
2333 }
2334
2335 /* Compute PARTIAL_ANTIC for BLOCK.
2336
2337 If succs(BLOCK) > 1 then
2338 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2339 in ANTIC_OUT for all succ(BLOCK)
2340 else if succs(BLOCK) == 1 then
2341 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2342
2343 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2344 - ANTIC_IN[BLOCK])
2345
2346 */
2347 static bool
2348 compute_partial_antic_aux (basic_block block,
2349 bool block_has_abnormal_pred_edge)
2350 {
2351 bool changed = false;
2352 bitmap_set_t old_PA_IN;
2353 bitmap_set_t PA_OUT;
2354 edge e;
2355 edge_iterator ei;
2356 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2357
2358 old_PA_IN = PA_OUT = NULL;
2359
2360 /* If any edges from predecessors are abnormal, antic_in is empty,
2361 so do nothing. */
2362 if (block_has_abnormal_pred_edge)
2363 goto maybe_dump_sets;
2364
2365 /* If there are too many partially anticipatable values in the
2366 block, phi_translate_set can take an exponential time: stop
2367 before the translation starts. */
2368 if (max_pa
2369 && single_succ_p (block)
2370 && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
2371 goto maybe_dump_sets;
2372
2373 old_PA_IN = PA_IN (block);
2374 PA_OUT = bitmap_set_new ();
2375
2376 /* If the block has no successors, ANTIC_OUT is empty. */
2377 if (EDGE_COUNT (block->succs) == 0)
2378 ;
2379 /* If we have one successor, we could have some phi nodes to
2380 translate through. Note that we can't phi translate across DFS
2381 back edges in partial antic, because it uses a union operation on
2382 the successors. For recurrences like IV's, we will end up
2383 generating a new value in the set on each go around (i + 3 (VH.1)
2384 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2385 else if (single_succ_p (block))
2386 {
2387 basic_block succ = single_succ (block);
2388 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2389 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2390 }
2391 /* If we have multiple successors, we take the union of all of
2392 them. */
2393 else
2394 {
2395 VEC(basic_block, heap) * worklist;
2396 size_t i;
2397 basic_block bprime;
2398
2399 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2400 FOR_EACH_EDGE (e, ei, block->succs)
2401 {
2402 if (e->flags & EDGE_DFS_BACK)
2403 continue;
2404 VEC_quick_push (basic_block, worklist, e->dest);
2405 }
2406 if (VEC_length (basic_block, worklist) > 0)
2407 {
2408 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2409 {
2410 unsigned int i;
2411 bitmap_iterator bi;
2412
2413 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2414 bitmap_value_insert_into_set (PA_OUT,
2415 expression_for_id (i));
2416 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2417 {
2418 bitmap_set_t pa_in = bitmap_set_new ();
2419 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2420 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2421 bitmap_value_insert_into_set (PA_OUT,
2422 expression_for_id (i));
2423 bitmap_set_free (pa_in);
2424 }
2425 else
2426 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2427 bitmap_value_insert_into_set (PA_OUT,
2428 expression_for_id (i));
2429 }
2430 }
2431 VEC_free (basic_block, heap, worklist);
2432 }
2433
2434 /* PA_IN starts with PA_OUT - TMP_GEN.
2435 Then we subtract things from ANTIC_IN. */
2436 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2437
2438 /* For partial antic, we want to put back in the phi results, since
2439 we will properly avoid making them partially antic over backedges. */
2440 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
2441 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
2442
2443 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2444 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2445
2446 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2447
2448 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2449 {
2450 changed = true;
2451 SET_BIT (changed_blocks, block->index);
2452 FOR_EACH_EDGE (e, ei, block->preds)
2453 SET_BIT (changed_blocks, e->src->index);
2454 }
2455 else
2456 RESET_BIT (changed_blocks, block->index);
2457
2458 maybe_dump_sets:
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2460 {
2461 if (PA_OUT)
2462 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2463
2464 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2465 }
2466 if (old_PA_IN)
2467 bitmap_set_free (old_PA_IN);
2468 if (PA_OUT)
2469 bitmap_set_free (PA_OUT);
2470 return changed;
2471 }
2472
2473 /* Compute ANTIC and partial ANTIC sets. */
2474
2475 static void
2476 compute_antic (void)
2477 {
2478 bool changed = true;
2479 int num_iterations = 0;
2480 basic_block block;
2481 int i;
2482
2483 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2484 We pre-build the map of blocks with incoming abnormal edges here. */
2485 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2486 sbitmap_zero (has_abnormal_preds);
2487
2488 FOR_EACH_BB (block)
2489 {
2490 edge_iterator ei;
2491 edge e;
2492
2493 FOR_EACH_EDGE (e, ei, block->preds)
2494 {
2495 e->flags &= ~EDGE_DFS_BACK;
2496 if (e->flags & EDGE_ABNORMAL)
2497 {
2498 SET_BIT (has_abnormal_preds, block->index);
2499 break;
2500 }
2501 }
2502
2503 BB_VISITED (block) = 0;
2504 BB_DEFERRED (block) = 0;
2505
2506 /* While we are here, give empty ANTIC_IN sets to each block. */
2507 ANTIC_IN (block) = bitmap_set_new ();
2508 PA_IN (block) = bitmap_set_new ();
2509 }
2510
2511 /* At the exit block we anticipate nothing. */
2512 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2513 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2514 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2515
2516 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2517 sbitmap_ones (changed_blocks);
2518 while (changed)
2519 {
2520 if (dump_file && (dump_flags & TDF_DETAILS))
2521 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2522 num_iterations++;
2523 changed = false;
2524 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2525 {
2526 if (TEST_BIT (changed_blocks, postorder[i]))
2527 {
2528 basic_block block = BASIC_BLOCK (postorder[i]);
2529 changed |= compute_antic_aux (block,
2530 TEST_BIT (has_abnormal_preds,
2531 block->index));
2532 }
2533 }
2534 #ifdef ENABLE_CHECKING
2535 /* Theoretically possible, but *highly* unlikely. */
2536 gcc_assert (num_iterations < 500);
2537 #endif
2538 }
2539
2540 statistics_histogram_event (cfun, "compute_antic iterations",
2541 num_iterations);
2542
2543 if (do_partial_partial)
2544 {
2545 sbitmap_ones (changed_blocks);
2546 mark_dfs_back_edges ();
2547 num_iterations = 0;
2548 changed = true;
2549 while (changed)
2550 {
2551 if (dump_file && (dump_flags & TDF_DETAILS))
2552 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2553 num_iterations++;
2554 changed = false;
2555 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2556 {
2557 if (TEST_BIT (changed_blocks, postorder[i]))
2558 {
2559 basic_block block = BASIC_BLOCK (postorder[i]);
2560 changed
2561 |= compute_partial_antic_aux (block,
2562 TEST_BIT (has_abnormal_preds,
2563 block->index));
2564 }
2565 }
2566 #ifdef ENABLE_CHECKING
2567 /* Theoretically possible, but *highly* unlikely. */
2568 gcc_assert (num_iterations < 500);
2569 #endif
2570 }
2571 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2572 num_iterations);
2573 }
2574 sbitmap_free (has_abnormal_preds);
2575 sbitmap_free (changed_blocks);
2576 }
2577
2578 /* Return true if we can value number the call in STMT. This is true
2579 if we have a pure or constant call. */
2580
2581 static bool
2582 can_value_number_call (gimple stmt)
2583 {
2584 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2585 return true;
2586 return false;
2587 }
2588
2589 /* Return true if OP is a tree which we can perform PRE on.
2590 This may not match the operations we can value number, but in
2591 a perfect world would. */
2592
2593 static bool
2594 can_PRE_operation (tree op)
2595 {
2596 return UNARY_CLASS_P (op)
2597 || BINARY_CLASS_P (op)
2598 || COMPARISON_CLASS_P (op)
2599 || TREE_CODE (op) == INDIRECT_REF
2600 || TREE_CODE (op) == COMPONENT_REF
2601 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2602 || TREE_CODE (op) == CALL_EXPR
2603 || TREE_CODE (op) == ARRAY_REF;
2604 }
2605
2606
2607 /* Inserted expressions are placed onto this worklist, which is used
2608 for performing quick dead code elimination of insertions we made
2609 that didn't turn out to be necessary. */
2610 static bitmap inserted_exprs;
2611
2612 /* Pool allocated fake store expressions are placed onto this
2613 worklist, which, after performing dead code elimination, is walked
2614 to see which expressions need to be put into GC'able memory */
2615 static VEC(gimple, heap) *need_creation;
2616
2617 /* The actual worker for create_component_ref_by_pieces. */
2618
2619 static tree
2620 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2621 unsigned int *operand, gimple_seq *stmts,
2622 gimple domstmt)
2623 {
2624 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2625 *operand);
2626 tree genop;
2627 ++*operand;
2628 switch (currop->opcode)
2629 {
2630 case CALL_EXPR:
2631 {
2632 tree folded, sc = NULL_TREE;
2633 unsigned int nargs = 0;
2634 tree fn, *args;
2635 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2636 fn = currop->op0;
2637 else
2638 {
2639 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2640 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2641 if (!fn)
2642 return NULL_TREE;
2643 }
2644 if (currop->op1)
2645 {
2646 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2647 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2648 if (!sc)
2649 return NULL_TREE;
2650 }
2651 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2652 ref->operands) - 1);
2653 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2654 {
2655 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2656 operand, stmts,
2657 domstmt);
2658 if (!args[nargs])
2659 {
2660 free (args);
2661 return NULL_TREE;
2662 }
2663 nargs++;
2664 }
2665 folded = build_call_array (currop->type,
2666 (TREE_CODE (fn) == FUNCTION_DECL
2667 ? build_fold_addr_expr (fn) : fn),
2668 nargs, args);
2669 free (args);
2670 if (sc)
2671 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2672 return folded;
2673 }
2674 break;
2675 case TARGET_MEM_REF:
2676 {
2677 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2678 *operand);
2679 pre_expr op0expr;
2680 tree genop0 = NULL_TREE;
2681 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2682 stmts, domstmt);
2683 if (!baseop)
2684 return NULL_TREE;
2685 if (currop->op0)
2686 {
2687 op0expr = get_or_alloc_expr_for (currop->op0);
2688 genop0 = find_or_generate_expression (block, op0expr,
2689 stmts, domstmt);
2690 if (!genop0)
2691 return NULL_TREE;
2692 }
2693 if (DECL_P (baseop))
2694 return build6 (TARGET_MEM_REF, currop->type,
2695 baseop, NULL_TREE,
2696 genop0, currop->op1, currop->op2,
2697 unshare_expr (nextop->op1));
2698 else
2699 return build6 (TARGET_MEM_REF, currop->type,
2700 NULL_TREE, baseop,
2701 genop0, currop->op1, currop->op2,
2702 unshare_expr (nextop->op1));
2703 }
2704 break;
2705 case ADDR_EXPR:
2706 if (currop->op0)
2707 {
2708 gcc_assert (is_gimple_min_invariant (currop->op0));
2709 return currop->op0;
2710 }
2711 /* Fallthrough. */
2712 case REALPART_EXPR:
2713 case IMAGPART_EXPR:
2714 case VIEW_CONVERT_EXPR:
2715 {
2716 tree folded;
2717 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2718 operand,
2719 stmts, domstmt);
2720 if (!genop0)
2721 return NULL_TREE;
2722 folded = fold_build1 (currop->opcode, currop->type,
2723 genop0);
2724 return folded;
2725 }
2726 break;
2727 case ALIGN_INDIRECT_REF:
2728 case MISALIGNED_INDIRECT_REF:
2729 case INDIRECT_REF:
2730 {
2731 tree folded;
2732 tree genop1 = create_component_ref_by_pieces_1 (block, ref,
2733 operand,
2734 stmts, domstmt);
2735 if (!genop1)
2736 return NULL_TREE;
2737 genop1 = fold_convert (build_pointer_type (currop->type),
2738 genop1);
2739
2740 if (currop->opcode == MISALIGNED_INDIRECT_REF)
2741 folded = fold_build2 (currop->opcode, currop->type,
2742 genop1, currop->op1);
2743 else
2744 folded = fold_build1 (currop->opcode, currop->type,
2745 genop1);
2746 return folded;
2747 }
2748 break;
2749 case BIT_FIELD_REF:
2750 {
2751 tree folded;
2752 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2753 stmts, domstmt);
2754 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2755 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2756 tree genop1;
2757 tree genop2;
2758
2759 if (!genop0)
2760 return NULL_TREE;
2761 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2762 if (!genop1)
2763 return NULL_TREE;
2764 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2765 if (!genop2)
2766 return NULL_TREE;
2767 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2768 genop2);
2769 return folded;
2770 }
2771
2772 /* For array ref vn_reference_op's, operand 1 of the array ref
2773 is op0 of the reference op and operand 3 of the array ref is
2774 op1. */
2775 case ARRAY_RANGE_REF:
2776 case ARRAY_REF:
2777 {
2778 tree genop0;
2779 tree genop1 = currop->op0;
2780 pre_expr op1expr;
2781 tree genop2 = currop->op1;
2782 pre_expr op2expr;
2783 tree genop3 = currop->op2;
2784 pre_expr op3expr;
2785 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2786 stmts, domstmt);
2787 if (!genop0)
2788 return NULL_TREE;
2789 op1expr = get_or_alloc_expr_for (genop1);
2790 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2791 if (!genop1)
2792 return NULL_TREE;
2793 if (genop2)
2794 {
2795 /* Drop zero minimum index. */
2796 if (tree_int_cst_equal (genop2, integer_zero_node))
2797 genop2 = NULL_TREE;
2798 else
2799 {
2800 op2expr = get_or_alloc_expr_for (genop2);
2801 genop2 = find_or_generate_expression (block, op2expr, stmts,
2802 domstmt);
2803 if (!genop2)
2804 return NULL_TREE;
2805 }
2806 }
2807 if (genop3)
2808 {
2809 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2810 /* We can't always put a size in units of the element alignment
2811 here as the element alignment may be not visible. See
2812 PR43783. Simply drop the element size for constant
2813 sizes. */
2814 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2815 genop3 = NULL_TREE;
2816 else
2817 {
2818 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2819 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2820 op3expr = get_or_alloc_expr_for (genop3);
2821 genop3 = find_or_generate_expression (block, op3expr, stmts,
2822 domstmt);
2823 if (!genop3)
2824 return NULL_TREE;
2825 }
2826 }
2827 return build4 (currop->opcode, currop->type, genop0, genop1,
2828 genop2, genop3);
2829 }
2830 case COMPONENT_REF:
2831 {
2832 tree op0;
2833 tree op1;
2834 tree genop2 = currop->op1;
2835 pre_expr op2expr;
2836 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2837 stmts, domstmt);
2838 if (!op0)
2839 return NULL_TREE;
2840 /* op1 should be a FIELD_DECL, which are represented by
2841 themselves. */
2842 op1 = currop->op0;
2843 if (genop2)
2844 {
2845 op2expr = get_or_alloc_expr_for (genop2);
2846 genop2 = find_or_generate_expression (block, op2expr, stmts,
2847 domstmt);
2848 if (!genop2)
2849 return NULL_TREE;
2850 }
2851
2852 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2853 genop2);
2854 }
2855 break;
2856 case SSA_NAME:
2857 {
2858 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2859 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2860 return genop;
2861 }
2862 case STRING_CST:
2863 case INTEGER_CST:
2864 case COMPLEX_CST:
2865 case VECTOR_CST:
2866 case REAL_CST:
2867 case CONSTRUCTOR:
2868 case VAR_DECL:
2869 case PARM_DECL:
2870 case CONST_DECL:
2871 case RESULT_DECL:
2872 case FUNCTION_DECL:
2873 return currop->op0;
2874
2875 default:
2876 gcc_unreachable ();
2877 }
2878 }
2879
2880 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2881 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2882 trying to rename aggregates into ssa form directly, which is a no no.
2883
2884 Thus, this routine doesn't create temporaries, it just builds a
2885 single access expression for the array, calling
2886 find_or_generate_expression to build the innermost pieces.
2887
2888 This function is a subroutine of create_expression_by_pieces, and
2889 should not be called on it's own unless you really know what you
2890 are doing. */
2891
2892 static tree
2893 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2894 gimple_seq *stmts, gimple domstmt)
2895 {
2896 unsigned int op = 0;
2897 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2898 }
2899
2900 /* Find a leader for an expression, or generate one using
2901 create_expression_by_pieces if it's ANTIC but
2902 complex.
2903 BLOCK is the basic_block we are looking for leaders in.
2904 EXPR is the expression to find a leader or generate for.
2905 STMTS is the statement list to put the inserted expressions on.
2906 Returns the SSA_NAME of the LHS of the generated expression or the
2907 leader.
2908 DOMSTMT if non-NULL is a statement that should be dominated by
2909 all uses in the generated expression. If DOMSTMT is non-NULL this
2910 routine can fail and return NULL_TREE. Otherwise it will assert
2911 on failure. */
2912
2913 static tree
2914 find_or_generate_expression (basic_block block, pre_expr expr,
2915 gimple_seq *stmts, gimple domstmt)
2916 {
2917 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2918 get_expr_value_id (expr), domstmt);
2919 tree genop = NULL;
2920 if (leader)
2921 {
2922 if (leader->kind == NAME)
2923 genop = PRE_EXPR_NAME (leader);
2924 else if (leader->kind == CONSTANT)
2925 genop = PRE_EXPR_CONSTANT (leader);
2926 }
2927
2928 /* If it's still NULL, it must be a complex expression, so generate
2929 it recursively. Not so for FRE though. */
2930 if (genop == NULL
2931 && !in_fre)
2932 {
2933 bitmap_set_t exprset;
2934 unsigned int lookfor = get_expr_value_id (expr);
2935 bool handled = false;
2936 bitmap_iterator bi;
2937 unsigned int i;
2938
2939 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2940 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2941 {
2942 pre_expr temp = expression_for_id (i);
2943 if (temp->kind != NAME)
2944 {
2945 handled = true;
2946 genop = create_expression_by_pieces (block, temp, stmts,
2947 domstmt,
2948 get_expr_type (expr));
2949 break;
2950 }
2951 }
2952 if (!handled && domstmt)
2953 return NULL_TREE;
2954
2955 gcc_assert (handled);
2956 }
2957 return genop;
2958 }
2959
2960 #define NECESSARY GF_PLF_1
2961
2962 /* Create an expression in pieces, so that we can handle very complex
2963 expressions that may be ANTIC, but not necessary GIMPLE.
2964 BLOCK is the basic block the expression will be inserted into,
2965 EXPR is the expression to insert (in value form)
2966 STMTS is a statement list to append the necessary insertions into.
2967
2968 This function will die if we hit some value that shouldn't be
2969 ANTIC but is (IE there is no leader for it, or its components).
2970 This function may also generate expressions that are themselves
2971 partially or fully redundant. Those that are will be either made
2972 fully redundant during the next iteration of insert (for partially
2973 redundant ones), or eliminated by eliminate (for fully redundant
2974 ones).
2975
2976 If DOMSTMT is non-NULL then we make sure that all uses in the
2977 expressions dominate that statement. In this case the function
2978 can return NULL_TREE to signal failure. */
2979
2980 static tree
2981 create_expression_by_pieces (basic_block block, pre_expr expr,
2982 gimple_seq *stmts, gimple domstmt, tree type)
2983 {
2984 tree temp, name;
2985 tree folded;
2986 gimple_seq forced_stmts = NULL;
2987 unsigned int value_id;
2988 gimple_stmt_iterator gsi;
2989 tree exprtype = type ? type : get_expr_type (expr);
2990 pre_expr nameexpr;
2991 gimple newstmt;
2992
2993 switch (expr->kind)
2994 {
2995 /* We may hit the NAME/CONSTANT case if we have to convert types
2996 that value numbering saw through. */
2997 case NAME:
2998 folded = PRE_EXPR_NAME (expr);
2999 break;
3000 case CONSTANT:
3001 folded = PRE_EXPR_CONSTANT (expr);
3002 break;
3003 case REFERENCE:
3004 {
3005 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3006 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3007 }
3008 break;
3009 case NARY:
3010 {
3011 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3012 switch (nary->length)
3013 {
3014 case 2:
3015 {
3016 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3017 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3018 tree genop1 = find_or_generate_expression (block, op1,
3019 stmts, domstmt);
3020 tree genop2 = find_or_generate_expression (block, op2,
3021 stmts, domstmt);
3022 if (!genop1 || !genop2)
3023 return NULL_TREE;
3024 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3025 may be a constant with the wrong type. */
3026 if (nary->opcode == POINTER_PLUS_EXPR)
3027 {
3028 genop1 = fold_convert (nary->type, genop1);
3029 genop2 = fold_convert (sizetype, genop2);
3030 }
3031 else
3032 {
3033 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3034 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3035 }
3036
3037 folded = fold_build2 (nary->opcode, nary->type,
3038 genop1, genop2);
3039 }
3040 break;
3041 case 1:
3042 {
3043 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3044 tree genop1 = find_or_generate_expression (block, op1,
3045 stmts, domstmt);
3046 if (!genop1)
3047 return NULL_TREE;
3048 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3049
3050 folded = fold_build1 (nary->opcode, nary->type,
3051 genop1);
3052 }
3053 break;
3054 default:
3055 return NULL_TREE;
3056 }
3057 }
3058 break;
3059 default:
3060 return NULL_TREE;
3061 }
3062
3063 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3064 folded = fold_convert (exprtype, folded);
3065
3066 /* Force the generated expression to be a sequence of GIMPLE
3067 statements.
3068 We have to call unshare_expr because force_gimple_operand may
3069 modify the tree we pass to it. */
3070 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3071 false, NULL);
3072
3073 /* If we have any intermediate expressions to the value sets, add them
3074 to the value sets and chain them in the instruction stream. */
3075 if (forced_stmts)
3076 {
3077 gsi = gsi_start (forced_stmts);
3078 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3079 {
3080 gimple stmt = gsi_stmt (gsi);
3081 tree forcedname = gimple_get_lhs (stmt);
3082 pre_expr nameexpr;
3083
3084 if (TREE_CODE (forcedname) == SSA_NAME)
3085 {
3086 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3087 VN_INFO_GET (forcedname)->valnum = forcedname;
3088 VN_INFO (forcedname)->value_id = get_next_value_id ();
3089 nameexpr = get_or_alloc_expr_for_name (forcedname);
3090 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3091 if (!in_fre)
3092 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3093 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3094 }
3095 mark_symbols_for_renaming (stmt);
3096 }
3097 gimple_seq_add_seq (stmts, forced_stmts);
3098 }
3099
3100 /* Build and insert the assignment of the end result to the temporary
3101 that we will return. */
3102 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3103 {
3104 pretemp = create_tmp_reg (exprtype, "pretmp");
3105 get_var_ann (pretemp);
3106 }
3107
3108 temp = pretemp;
3109 add_referenced_var (temp);
3110
3111 newstmt = gimple_build_assign (temp, folded);
3112 name = make_ssa_name (temp, newstmt);
3113 gimple_assign_set_lhs (newstmt, name);
3114 gimple_set_plf (newstmt, NECESSARY, false);
3115
3116 gimple_seq_add_stmt (stmts, newstmt);
3117 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3118
3119 /* All the symbols in NEWEXPR should be put into SSA form. */
3120 mark_symbols_for_renaming (newstmt);
3121
3122 /* Add a value number to the temporary.
3123 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3124 we are creating the expression by pieces, and this particular piece of
3125 the expression may have been represented. There is no harm in replacing
3126 here. */
3127 VN_INFO_GET (name)->valnum = name;
3128 value_id = get_expr_value_id (expr);
3129 VN_INFO (name)->value_id = value_id;
3130 nameexpr = get_or_alloc_expr_for_name (name);
3131 add_to_value (value_id, nameexpr);
3132 if (!in_fre)
3133 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3134 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3135
3136 pre_stats.insertions++;
3137 if (dump_file && (dump_flags & TDF_DETAILS))
3138 {
3139 fprintf (dump_file, "Inserted ");
3140 print_gimple_stmt (dump_file, newstmt, 0, 0);
3141 fprintf (dump_file, " in predecessor %d\n", block->index);
3142 }
3143
3144 return name;
3145 }
3146
3147
3148 /* Returns true if we want to inhibit the insertions of PHI nodes
3149 for the given EXPR for basic block BB (a member of a loop).
3150 We want to do this, when we fear that the induction variable we
3151 create might inhibit vectorization. */
3152
3153 static bool
3154 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3155 {
3156 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3157 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3158 vn_reference_op_t op;
3159 unsigned i;
3160
3161 /* If we aren't going to vectorize we don't inhibit anything. */
3162 if (!flag_tree_vectorize)
3163 return false;
3164
3165 /* Otherwise we inhibit the insertion when the address of the
3166 memory reference is a simple induction variable. In other
3167 cases the vectorizer won't do anything anyway (either it's
3168 loop invariant or a complicated expression). */
3169 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
3170 {
3171 switch (op->opcode)
3172 {
3173 case ARRAY_REF:
3174 case ARRAY_RANGE_REF:
3175 if (TREE_CODE (op->op0) != SSA_NAME)
3176 break;
3177 /* Fallthru. */
3178 case SSA_NAME:
3179 {
3180 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3181 affine_iv iv;
3182 /* Default defs are loop invariant. */
3183 if (!defbb)
3184 break;
3185 /* Defined outside this loop, also loop invariant. */
3186 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3187 break;
3188 /* If it's a simple induction variable inhibit insertion,
3189 the vectorizer might be interested in this one. */
3190 if (simple_iv (bb->loop_father, bb->loop_father,
3191 op->op0, &iv, true))
3192 return true;
3193 /* No simple IV, vectorizer can't do anything, hence no
3194 reason to inhibit the transformation for this operand. */
3195 break;
3196 }
3197 default:
3198 break;
3199 }
3200 }
3201 return false;
3202 }
3203
3204 /* Insert the to-be-made-available values of expression EXPRNUM for each
3205 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3206 merge the result with a phi node, given the same value number as
3207 NODE. Return true if we have inserted new stuff. */
3208
3209 static bool
3210 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3211 pre_expr *avail)
3212 {
3213 pre_expr expr = expression_for_id (exprnum);
3214 pre_expr newphi;
3215 unsigned int val = get_expr_value_id (expr);
3216 edge pred;
3217 bool insertions = false;
3218 bool nophi = false;
3219 basic_block bprime;
3220 pre_expr eprime;
3221 edge_iterator ei;
3222 tree type = get_expr_type (expr);
3223 tree temp;
3224 gimple phi;
3225
3226 if (dump_file && (dump_flags & TDF_DETAILS))
3227 {
3228 fprintf (dump_file, "Found partial redundancy for expression ");
3229 print_pre_expr (dump_file, expr);
3230 fprintf (dump_file, " (%04d)\n", val);
3231 }
3232
3233 /* Make sure we aren't creating an induction variable. */
3234 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3235 {
3236 bool firstinsideloop = false;
3237 bool secondinsideloop = false;
3238 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3239 EDGE_PRED (block, 0)->src);
3240 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3241 EDGE_PRED (block, 1)->src);
3242 /* Induction variables only have one edge inside the loop. */
3243 if ((firstinsideloop ^ secondinsideloop)
3244 && (expr->kind != REFERENCE
3245 || inhibit_phi_insertion (block, expr)))
3246 {
3247 if (dump_file && (dump_flags & TDF_DETAILS))
3248 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3249 nophi = true;
3250 }
3251 }
3252
3253 /* Make the necessary insertions. */
3254 FOR_EACH_EDGE (pred, ei, block->preds)
3255 {
3256 gimple_seq stmts = NULL;
3257 tree builtexpr;
3258 bprime = pred->src;
3259 eprime = avail[bprime->index];
3260
3261 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3262 {
3263 builtexpr = create_expression_by_pieces (bprime,
3264 eprime,
3265 &stmts, NULL,
3266 type);
3267 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3268 gsi_insert_seq_on_edge (pred, stmts);
3269 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3270 insertions = true;
3271 }
3272 else if (eprime->kind == CONSTANT)
3273 {
3274 /* Constants may not have the right type, fold_convert
3275 should give us back a constant with the right type.
3276 */
3277 tree constant = PRE_EXPR_CONSTANT (eprime);
3278 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3279 {
3280 tree builtexpr = fold_convert (type, constant);
3281 if (!is_gimple_min_invariant (builtexpr))
3282 {
3283 tree forcedexpr = force_gimple_operand (builtexpr,
3284 &stmts, true,
3285 NULL);
3286 if (!is_gimple_min_invariant (forcedexpr))
3287 {
3288 if (forcedexpr != builtexpr)
3289 {
3290 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3291 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3292 }
3293 if (stmts)
3294 {
3295 gimple_stmt_iterator gsi;
3296 gsi = gsi_start (stmts);
3297 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3298 {
3299 gimple stmt = gsi_stmt (gsi);
3300 tree lhs = gimple_get_lhs (stmt);
3301 if (TREE_CODE (lhs) == SSA_NAME)
3302 bitmap_set_bit (inserted_exprs,
3303 SSA_NAME_VERSION (lhs));
3304 gimple_set_plf (stmt, NECESSARY, false);
3305 }
3306 gsi_insert_seq_on_edge (pred, stmts);
3307 }
3308 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3309 }
3310 }
3311 }
3312 }
3313 else if (eprime->kind == NAME)
3314 {
3315 /* We may have to do a conversion because our value
3316 numbering can look through types in certain cases, but
3317 our IL requires all operands of a phi node have the same
3318 type. */
3319 tree name = PRE_EXPR_NAME (eprime);
3320 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3321 {
3322 tree builtexpr;
3323 tree forcedexpr;
3324 builtexpr = fold_convert (type, name);
3325 forcedexpr = force_gimple_operand (builtexpr,
3326 &stmts, true,
3327 NULL);
3328
3329 if (forcedexpr != name)
3330 {
3331 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3332 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3333 }
3334
3335 if (stmts)
3336 {
3337 gimple_stmt_iterator gsi;
3338 gsi = gsi_start (stmts);
3339 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3340 {
3341 gimple stmt = gsi_stmt (gsi);
3342 tree lhs = gimple_get_lhs (stmt);
3343 if (TREE_CODE (lhs) == SSA_NAME)
3344 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3345 gimple_set_plf (stmt, NECESSARY, false);
3346 }
3347 gsi_insert_seq_on_edge (pred, stmts);
3348 }
3349 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3350 }
3351 }
3352 }
3353 /* If we didn't want a phi node, and we made insertions, we still have
3354 inserted new stuff, and thus return true. If we didn't want a phi node,
3355 and didn't make insertions, we haven't added anything new, so return
3356 false. */
3357 if (nophi && insertions)
3358 return true;
3359 else if (nophi && !insertions)
3360 return false;
3361
3362 /* Now build a phi for the new variable. */
3363 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3364 {
3365 prephitemp = create_tmp_var (type, "prephitmp");
3366 get_var_ann (prephitemp);
3367 }
3368
3369 temp = prephitemp;
3370 add_referenced_var (temp);
3371
3372 if (TREE_CODE (type) == COMPLEX_TYPE
3373 || TREE_CODE (type) == VECTOR_TYPE)
3374 DECL_GIMPLE_REG_P (temp) = 1;
3375 phi = create_phi_node (temp, block);
3376
3377 gimple_set_plf (phi, NECESSARY, false);
3378 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3379 VN_INFO (gimple_phi_result (phi))->value_id = val;
3380 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3381 FOR_EACH_EDGE (pred, ei, block->preds)
3382 {
3383 pre_expr ae = avail[pred->src->index];
3384 gcc_assert (get_expr_type (ae) == type
3385 || useless_type_conversion_p (type, get_expr_type (ae)));
3386 if (ae->kind == CONSTANT)
3387 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3388 else
3389 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3390 UNKNOWN_LOCATION);
3391 }
3392
3393 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3394 add_to_value (val, newphi);
3395
3396 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3397 this insertion, since we test for the existence of this value in PHI_GEN
3398 before proceeding with the partial redundancy checks in insert_aux.
3399
3400 The value may exist in AVAIL_OUT, in particular, it could be represented
3401 by the expression we are trying to eliminate, in which case we want the
3402 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3403 inserted there.
3404
3405 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3406 this block, because if it did, it would have existed in our dominator's
3407 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3408 */
3409
3410 bitmap_insert_into_set (PHI_GEN (block), newphi);
3411 bitmap_value_replace_in_set (AVAIL_OUT (block),
3412 newphi);
3413 bitmap_insert_into_set (NEW_SETS (block),
3414 newphi);
3415
3416 if (dump_file && (dump_flags & TDF_DETAILS))
3417 {
3418 fprintf (dump_file, "Created phi ");
3419 print_gimple_stmt (dump_file, phi, 0, 0);
3420 fprintf (dump_file, " in block %d\n", block->index);
3421 }
3422 pre_stats.phis++;
3423 return true;
3424 }
3425
3426
3427
3428 /* Perform insertion of partially redundant values.
3429 For BLOCK, do the following:
3430 1. Propagate the NEW_SETS of the dominator into the current block.
3431 If the block has multiple predecessors,
3432 2a. Iterate over the ANTIC expressions for the block to see if
3433 any of them are partially redundant.
3434 2b. If so, insert them into the necessary predecessors to make
3435 the expression fully redundant.
3436 2c. Insert a new PHI merging the values of the predecessors.
3437 2d. Insert the new PHI, and the new expressions, into the
3438 NEW_SETS set.
3439 3. Recursively call ourselves on the dominator children of BLOCK.
3440
3441 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3442 do_regular_insertion and do_partial_insertion.
3443
3444 */
3445
3446 static bool
3447 do_regular_insertion (basic_block block, basic_block dom)
3448 {
3449 bool new_stuff = false;
3450 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3451 pre_expr expr;
3452 int i;
3453
3454 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3455 {
3456 if (expr->kind != NAME)
3457 {
3458 pre_expr *avail;
3459 unsigned int val;
3460 bool by_some = false;
3461 bool cant_insert = false;
3462 bool all_same = true;
3463 pre_expr first_s = NULL;
3464 edge pred;
3465 basic_block bprime;
3466 pre_expr eprime = NULL;
3467 edge_iterator ei;
3468 pre_expr edoubleprime = NULL;
3469 bool do_insertion = false;
3470
3471 val = get_expr_value_id (expr);
3472 if (bitmap_set_contains_value (PHI_GEN (block), val))
3473 continue;
3474 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3475 {
3476 if (dump_file && (dump_flags & TDF_DETAILS))
3477 fprintf (dump_file, "Found fully redundant value\n");
3478 continue;
3479 }
3480
3481 avail = XCNEWVEC (pre_expr, last_basic_block);
3482 FOR_EACH_EDGE (pred, ei, block->preds)
3483 {
3484 unsigned int vprime;
3485
3486 /* We should never run insertion for the exit block
3487 and so not come across fake pred edges. */
3488 gcc_assert (!(pred->flags & EDGE_FAKE));
3489 bprime = pred->src;
3490 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3491 bprime, block);
3492
3493 /* eprime will generally only be NULL if the
3494 value of the expression, translated
3495 through the PHI for this predecessor, is
3496 undefined. If that is the case, we can't
3497 make the expression fully redundant,
3498 because its value is undefined along a
3499 predecessor path. We can thus break out
3500 early because it doesn't matter what the
3501 rest of the results are. */
3502 if (eprime == NULL)
3503 {
3504 cant_insert = true;
3505 break;
3506 }
3507
3508 eprime = fully_constant_expression (eprime);
3509 vprime = get_expr_value_id (eprime);
3510 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3511 vprime, NULL);
3512 if (edoubleprime == NULL)
3513 {
3514 avail[bprime->index] = eprime;
3515 all_same = false;
3516 }
3517 else
3518 {
3519 avail[bprime->index] = edoubleprime;
3520 by_some = true;
3521 /* We want to perform insertions to remove a redundancy on
3522 a path in the CFG we want to optimize for speed. */
3523 if (optimize_edge_for_speed_p (pred))
3524 do_insertion = true;
3525 if (first_s == NULL)
3526 first_s = edoubleprime;
3527 else if (!pre_expr_eq (first_s, edoubleprime))
3528 all_same = false;
3529 }
3530 }
3531 /* If we can insert it, it's not the same value
3532 already existing along every predecessor, and
3533 it's defined by some predecessor, it is
3534 partially redundant. */
3535 if (!cant_insert && !all_same && by_some && do_insertion
3536 && dbg_cnt (treepre_insert))
3537 {
3538 if (insert_into_preds_of_block (block, get_expression_id (expr),
3539 avail))
3540 new_stuff = true;
3541 }
3542 /* If all edges produce the same value and that value is
3543 an invariant, then the PHI has the same value on all
3544 edges. Note this. */
3545 else if (!cant_insert && all_same && eprime
3546 && (edoubleprime->kind == CONSTANT
3547 || edoubleprime->kind == NAME)
3548 && !value_id_constant_p (val))
3549 {
3550 unsigned int j;
3551 bitmap_iterator bi;
3552 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3553 value_expressions, val);
3554
3555 unsigned int new_val = get_expr_value_id (edoubleprime);
3556 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3557 {
3558 pre_expr expr = expression_for_id (j);
3559
3560 if (expr->kind == NAME)
3561 {
3562 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3563 /* Just reset the value id and valnum so it is
3564 the same as the constant we have discovered. */
3565 if (edoubleprime->kind == CONSTANT)
3566 {
3567 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3568 pre_stats.constified++;
3569 }
3570 else
3571 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3572 info->value_id = new_val;
3573 }
3574 }
3575 }
3576 free (avail);
3577 }
3578 }
3579
3580 VEC_free (pre_expr, heap, exprs);
3581 return new_stuff;
3582 }
3583
3584
3585 /* Perform insertion for partially anticipatable expressions. There
3586 is only one case we will perform insertion for these. This case is
3587 if the expression is partially anticipatable, and fully available.
3588 In this case, we know that putting it earlier will enable us to
3589 remove the later computation. */
3590
3591
3592 static bool
3593 do_partial_partial_insertion (basic_block block, basic_block dom)
3594 {
3595 bool new_stuff = false;
3596 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3597 pre_expr expr;
3598 int i;
3599
3600 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
3601 {
3602 if (expr->kind != NAME)
3603 {
3604 pre_expr *avail;
3605 unsigned int val;
3606 bool by_all = true;
3607 bool cant_insert = false;
3608 edge pred;
3609 basic_block bprime;
3610 pre_expr eprime = NULL;
3611 edge_iterator ei;
3612
3613 val = get_expr_value_id (expr);
3614 if (bitmap_set_contains_value (PHI_GEN (block), val))
3615 continue;
3616 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3617 continue;
3618
3619 avail = XCNEWVEC (pre_expr, last_basic_block);
3620 FOR_EACH_EDGE (pred, ei, block->preds)
3621 {
3622 unsigned int vprime;
3623 pre_expr edoubleprime;
3624
3625 /* We should never run insertion for the exit block
3626 and so not come across fake pred edges. */
3627 gcc_assert (!(pred->flags & EDGE_FAKE));
3628 bprime = pred->src;
3629 eprime = phi_translate (expr, ANTIC_IN (block),
3630 PA_IN (block),
3631 bprime, block);
3632
3633 /* eprime will generally only be NULL if the
3634 value of the expression, translated
3635 through the PHI for this predecessor, is
3636 undefined. If that is the case, we can't
3637 make the expression fully redundant,
3638 because its value is undefined along a
3639 predecessor path. We can thus break out
3640 early because it doesn't matter what the
3641 rest of the results are. */
3642 if (eprime == NULL)
3643 {
3644 cant_insert = true;
3645 break;
3646 }
3647
3648 eprime = fully_constant_expression (eprime);
3649 vprime = get_expr_value_id (eprime);
3650 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3651 vprime, NULL);
3652 if (edoubleprime == NULL)
3653 {
3654 by_all = false;
3655 break;
3656 }
3657 else
3658 avail[bprime->index] = edoubleprime;
3659
3660 }
3661
3662 /* If we can insert it, it's not the same value
3663 already existing along every predecessor, and
3664 it's defined by some predecessor, it is
3665 partially redundant. */
3666 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3667 {
3668 pre_stats.pa_insert++;
3669 if (insert_into_preds_of_block (block, get_expression_id (expr),
3670 avail))
3671 new_stuff = true;
3672 }
3673 free (avail);
3674 }
3675 }
3676
3677 VEC_free (pre_expr, heap, exprs);
3678 return new_stuff;
3679 }
3680
3681 static bool
3682 insert_aux (basic_block block)
3683 {
3684 basic_block son;
3685 bool new_stuff = false;
3686
3687 if (block)
3688 {
3689 basic_block dom;
3690 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3691 if (dom)
3692 {
3693 unsigned i;
3694 bitmap_iterator bi;
3695 bitmap_set_t newset = NEW_SETS (dom);
3696 if (newset)
3697 {
3698 /* Note that we need to value_replace both NEW_SETS, and
3699 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3700 represented by some non-simple expression here that we want
3701 to replace it with. */
3702 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3703 {
3704 pre_expr expr = expression_for_id (i);
3705 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3706 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3707 }
3708 }
3709 if (!single_pred_p (block))
3710 {
3711 new_stuff |= do_regular_insertion (block, dom);
3712 if (do_partial_partial)
3713 new_stuff |= do_partial_partial_insertion (block, dom);
3714 }
3715 }
3716 }
3717 for (son = first_dom_son (CDI_DOMINATORS, block);
3718 son;
3719 son = next_dom_son (CDI_DOMINATORS, son))
3720 {
3721 new_stuff |= insert_aux (son);
3722 }
3723
3724 return new_stuff;
3725 }
3726
3727 /* Perform insertion of partially redundant values. */
3728
3729 static void
3730 insert (void)
3731 {
3732 bool new_stuff = true;
3733 basic_block bb;
3734 int num_iterations = 0;
3735
3736 FOR_ALL_BB (bb)
3737 NEW_SETS (bb) = bitmap_set_new ();
3738
3739 while (new_stuff)
3740 {
3741 num_iterations++;
3742 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3743 }
3744 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3745 }
3746
3747
3748 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3749
3750 static void
3751 add_to_exp_gen (basic_block block, tree op)
3752 {
3753 if (!in_fre)
3754 {
3755 pre_expr result;
3756 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3757 return;
3758 result = get_or_alloc_expr_for_name (op);
3759 bitmap_value_insert_into_set (EXP_GEN (block), result);
3760 }
3761 }
3762
3763 /* Create value ids for PHI in BLOCK. */
3764
3765 static void
3766 make_values_for_phi (gimple phi, basic_block block)
3767 {
3768 tree result = gimple_phi_result (phi);
3769
3770 /* We have no need for virtual phis, as they don't represent
3771 actual computations. */
3772 if (is_gimple_reg (result))
3773 {
3774 pre_expr e = get_or_alloc_expr_for_name (result);
3775 add_to_value (get_expr_value_id (e), e);
3776 bitmap_insert_into_set (PHI_GEN (block), e);
3777 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3778 if (!in_fre)
3779 {
3780 unsigned i;
3781 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3782 {
3783 tree arg = gimple_phi_arg_def (phi, i);
3784 if (TREE_CODE (arg) == SSA_NAME)
3785 {
3786 e = get_or_alloc_expr_for_name (arg);
3787 add_to_value (get_expr_value_id (e), e);
3788 }
3789 }
3790 }
3791 }
3792 }
3793
3794 /* Compute the AVAIL set for all basic blocks.
3795
3796 This function performs value numbering of the statements in each basic
3797 block. The AVAIL sets are built from information we glean while doing
3798 this value numbering, since the AVAIL sets contain only one entry per
3799 value.
3800
3801 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3802 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3803
3804 static void
3805 compute_avail (void)
3806 {
3807
3808 basic_block block, son;
3809 basic_block *worklist;
3810 size_t sp = 0;
3811 unsigned i;
3812
3813 /* We pretend that default definitions are defined in the entry block.
3814 This includes function arguments and the static chain decl. */
3815 for (i = 1; i < num_ssa_names; ++i)
3816 {
3817 tree name = ssa_name (i);
3818 pre_expr e;
3819 if (!name
3820 || !SSA_NAME_IS_DEFAULT_DEF (name)
3821 || has_zero_uses (name)
3822 || !is_gimple_reg (name))
3823 continue;
3824
3825 e = get_or_alloc_expr_for_name (name);
3826 add_to_value (get_expr_value_id (e), e);
3827 if (!in_fre)
3828 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3829 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3830 }
3831
3832 /* Allocate the worklist. */
3833 worklist = XNEWVEC (basic_block, n_basic_blocks);
3834
3835 /* Seed the algorithm by putting the dominator children of the entry
3836 block on the worklist. */
3837 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3838 son;
3839 son = next_dom_son (CDI_DOMINATORS, son))
3840 worklist[sp++] = son;
3841
3842 /* Loop until the worklist is empty. */
3843 while (sp)
3844 {
3845 gimple_stmt_iterator gsi;
3846 gimple stmt;
3847 basic_block dom;
3848 unsigned int stmt_uid = 1;
3849
3850 /* Pick a block from the worklist. */
3851 block = worklist[--sp];
3852
3853 /* Initially, the set of available values in BLOCK is that of
3854 its immediate dominator. */
3855 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3856 if (dom)
3857 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3858
3859 /* Generate values for PHI nodes. */
3860 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3861 make_values_for_phi (gsi_stmt (gsi), block);
3862
3863 BB_MAY_NOTRETURN (block) = 0;
3864
3865 /* Now compute value numbers and populate value sets with all
3866 the expressions computed in BLOCK. */
3867 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3868 {
3869 ssa_op_iter iter;
3870 tree op;
3871
3872 stmt = gsi_stmt (gsi);
3873 gimple_set_uid (stmt, stmt_uid++);
3874
3875 /* Cache whether the basic-block has any non-visible side-effect
3876 or control flow.
3877 If this isn't a call or it is the last stmt in the
3878 basic-block then the CFG represents things correctly. */
3879 if (is_gimple_call (stmt)
3880 && !stmt_ends_bb_p (stmt))
3881 {
3882 /* Non-looping const functions always return normally.
3883 Otherwise the call might not return or have side-effects
3884 that forbids hoisting possibly trapping expressions
3885 before it. */
3886 int flags = gimple_call_flags (stmt);
3887 if (!(flags & ECF_CONST)
3888 || (flags & ECF_LOOPING_CONST_OR_PURE))
3889 BB_MAY_NOTRETURN (block) = 1;
3890 }
3891
3892 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3893 {
3894 pre_expr e = get_or_alloc_expr_for_name (op);
3895
3896 add_to_value (get_expr_value_id (e), e);
3897 if (!in_fre)
3898 bitmap_insert_into_set (TMP_GEN (block), e);
3899 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3900 }
3901
3902 if (gimple_has_volatile_ops (stmt)
3903 || stmt_could_throw_p (stmt))
3904 continue;
3905
3906 switch (gimple_code (stmt))
3907 {
3908 case GIMPLE_RETURN:
3909 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3910 add_to_exp_gen (block, op);
3911 continue;
3912
3913 case GIMPLE_CALL:
3914 {
3915 vn_reference_t ref;
3916 unsigned int i;
3917 vn_reference_op_t vro;
3918 pre_expr result = NULL;
3919 VEC(vn_reference_op_s, heap) *ops = NULL;
3920
3921 if (!can_value_number_call (stmt))
3922 continue;
3923
3924 copy_reference_ops_from_call (stmt, &ops);
3925 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3926 gimple_expr_type (stmt),
3927 ops, &ref, false);
3928 VEC_free (vn_reference_op_s, heap, ops);
3929 if (!ref)
3930 continue;
3931
3932 for (i = 0; VEC_iterate (vn_reference_op_s,
3933 ref->operands, i,
3934 vro); i++)
3935 {
3936 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
3937 add_to_exp_gen (block, vro->op0);
3938 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3939 add_to_exp_gen (block, vro->op1);
3940 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3941 add_to_exp_gen (block, vro->op2);
3942 }
3943 result = (pre_expr) pool_alloc (pre_expr_pool);
3944 result->kind = REFERENCE;
3945 result->id = 0;
3946 PRE_EXPR_REFERENCE (result) = ref;
3947
3948 get_or_alloc_expression_id (result);
3949 add_to_value (get_expr_value_id (result), result);
3950 if (!in_fre)
3951 bitmap_value_insert_into_set (EXP_GEN (block), result);
3952 continue;
3953 }
3954
3955 case GIMPLE_ASSIGN:
3956 {
3957 pre_expr result = NULL;
3958 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3959 {
3960 case tcc_unary:
3961 case tcc_binary:
3962 case tcc_comparison:
3963 {
3964 vn_nary_op_t nary;
3965 unsigned int i;
3966
3967 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
3968 gimple_assign_rhs_code (stmt),
3969 gimple_expr_type (stmt),
3970 gimple_assign_rhs1 (stmt),
3971 gimple_assign_rhs2 (stmt),
3972 NULL_TREE, NULL_TREE, &nary);
3973
3974 if (!nary)
3975 continue;
3976
3977 for (i = 0; i < nary->length; i++)
3978 if (TREE_CODE (nary->op[i]) == SSA_NAME)
3979 add_to_exp_gen (block, nary->op[i]);
3980
3981 result = (pre_expr) pool_alloc (pre_expr_pool);
3982 result->kind = NARY;
3983 result->id = 0;
3984 PRE_EXPR_NARY (result) = nary;
3985 break;
3986 }
3987
3988 case tcc_declaration:
3989 case tcc_reference:
3990 {
3991 vn_reference_t ref;
3992 unsigned int i;
3993 vn_reference_op_t vro;
3994
3995 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3996 gimple_vuse (stmt),
3997 true, &ref);
3998 if (!ref)
3999 continue;
4000
4001 for (i = 0; VEC_iterate (vn_reference_op_s,
4002 ref->operands, i,
4003 vro); i++)
4004 {
4005 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4006 add_to_exp_gen (block, vro->op0);
4007 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4008 add_to_exp_gen (block, vro->op1);
4009 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4010 add_to_exp_gen (block, vro->op2);
4011 }
4012 result = (pre_expr) pool_alloc (pre_expr_pool);
4013 result->kind = REFERENCE;
4014 result->id = 0;
4015 PRE_EXPR_REFERENCE (result) = ref;
4016 break;
4017 }
4018
4019 default:
4020 /* For any other statement that we don't
4021 recognize, simply add all referenced
4022 SSA_NAMEs to EXP_GEN. */
4023 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4024 add_to_exp_gen (block, op);
4025 continue;
4026 }
4027
4028 get_or_alloc_expression_id (result);
4029 add_to_value (get_expr_value_id (result), result);
4030 if (!in_fre)
4031 bitmap_value_insert_into_set (EXP_GEN (block), result);
4032
4033 continue;
4034 }
4035 default:
4036 break;
4037 }
4038 }
4039
4040 /* Put the dominator children of BLOCK on the worklist of blocks
4041 to compute available sets for. */
4042 for (son = first_dom_son (CDI_DOMINATORS, block);
4043 son;
4044 son = next_dom_son (CDI_DOMINATORS, son))
4045 worklist[sp++] = son;
4046 }
4047
4048 free (worklist);
4049 }
4050
4051 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4052 than the available expressions for it. The insertion point is
4053 right before the first use in STMT. Returns the SSA_NAME that should
4054 be used for replacement. */
4055
4056 static tree
4057 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4058 {
4059 basic_block bb = gimple_bb (stmt);
4060 gimple_stmt_iterator gsi;
4061 gimple_seq stmts = NULL;
4062 tree expr;
4063 pre_expr e;
4064
4065 /* First create a value expression from the expression we want
4066 to insert and associate it with the value handle for SSA_VN. */
4067 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4068 if (e == NULL)
4069 return NULL_TREE;
4070
4071 /* Then use create_expression_by_pieces to generate a valid
4072 expression to insert at this point of the IL stream. */
4073 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4074 if (expr == NULL_TREE)
4075 return NULL_TREE;
4076 gsi = gsi_for_stmt (stmt);
4077 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4078
4079 return expr;
4080 }
4081
4082 /* Eliminate fully redundant computations. */
4083
4084 static unsigned int
4085 eliminate (void)
4086 {
4087 VEC (gimple, heap) *to_remove = NULL;
4088 basic_block b;
4089 unsigned int todo = 0;
4090 gimple_stmt_iterator gsi;
4091 gimple stmt;
4092 unsigned i;
4093
4094 FOR_EACH_BB (b)
4095 {
4096 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4097 {
4098 stmt = gsi_stmt (gsi);
4099
4100 /* Lookup the RHS of the expression, see if we have an
4101 available computation for it. If so, replace the RHS with
4102 the available computation. */
4103 if (gimple_has_lhs (stmt)
4104 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4105 && !gimple_assign_ssa_name_copy_p (stmt)
4106 && (!gimple_assign_single_p (stmt)
4107 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4108 && !gimple_has_volatile_ops (stmt)
4109 && !has_zero_uses (gimple_get_lhs (stmt)))
4110 {
4111 tree lhs = gimple_get_lhs (stmt);
4112 tree rhs = NULL_TREE;
4113 tree sprime = NULL;
4114 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4115 pre_expr sprimeexpr;
4116
4117 if (gimple_assign_single_p (stmt))
4118 rhs = gimple_assign_rhs1 (stmt);
4119
4120 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4121 get_expr_value_id (lhsexpr),
4122 NULL);
4123
4124 if (sprimeexpr)
4125 {
4126 if (sprimeexpr->kind == CONSTANT)
4127 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4128 else if (sprimeexpr->kind == NAME)
4129 sprime = PRE_EXPR_NAME (sprimeexpr);
4130 else
4131 gcc_unreachable ();
4132 }
4133
4134 /* If there is no existing leader but SCCVN knows this
4135 value is constant, use that constant. */
4136 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4137 {
4138 sprime = VN_INFO (lhs)->valnum;
4139 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4140 TREE_TYPE (sprime)))
4141 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4142
4143 if (dump_file && (dump_flags & TDF_DETAILS))
4144 {
4145 fprintf (dump_file, "Replaced ");
4146 print_gimple_expr (dump_file, stmt, 0, 0);
4147 fprintf (dump_file, " with ");
4148 print_generic_expr (dump_file, sprime, 0);
4149 fprintf (dump_file, " in ");
4150 print_gimple_stmt (dump_file, stmt, 0, 0);
4151 }
4152 pre_stats.eliminations++;
4153 propagate_tree_value_into_stmt (&gsi, sprime);
4154 stmt = gsi_stmt (gsi);
4155 update_stmt (stmt);
4156 continue;
4157 }
4158
4159 /* If there is no existing usable leader but SCCVN thinks
4160 it has an expression it wants to use as replacement,
4161 insert that. */
4162 if (!sprime || sprime == lhs)
4163 {
4164 tree val = VN_INFO (lhs)->valnum;
4165 if (val != VN_TOP
4166 && TREE_CODE (val) == SSA_NAME
4167 && VN_INFO (val)->needs_insertion
4168 && can_PRE_operation (vn_get_expr_for (val)))
4169 sprime = do_SCCVN_insertion (stmt, val);
4170 }
4171 if (sprime
4172 && sprime != lhs
4173 && (rhs == NULL_TREE
4174 || TREE_CODE (rhs) != SSA_NAME
4175 || may_propagate_copy (rhs, sprime)))
4176 {
4177 gcc_assert (sprime != rhs);
4178
4179 if (dump_file && (dump_flags & TDF_DETAILS))
4180 {
4181 fprintf (dump_file, "Replaced ");
4182 print_gimple_expr (dump_file, stmt, 0, 0);
4183 fprintf (dump_file, " with ");
4184 print_generic_expr (dump_file, sprime, 0);
4185 fprintf (dump_file, " in ");
4186 print_gimple_stmt (dump_file, stmt, 0, 0);
4187 }
4188
4189 if (TREE_CODE (sprime) == SSA_NAME)
4190 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4191 NECESSARY, true);
4192 /* We need to make sure the new and old types actually match,
4193 which may require adding a simple cast, which fold_convert
4194 will do for us. */
4195 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4196 && !useless_type_conversion_p (gimple_expr_type (stmt),
4197 TREE_TYPE (sprime)))
4198 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4199
4200 pre_stats.eliminations++;
4201 propagate_tree_value_into_stmt (&gsi, sprime);
4202 stmt = gsi_stmt (gsi);
4203 update_stmt (stmt);
4204
4205 /* If we removed EH side effects from the statement, clean
4206 its EH information. */
4207 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4208 {
4209 bitmap_set_bit (need_eh_cleanup,
4210 gimple_bb (stmt)->index);
4211 if (dump_file && (dump_flags & TDF_DETAILS))
4212 fprintf (dump_file, " Removed EH side effects.\n");
4213 }
4214 }
4215 }
4216 /* If the statement is a scalar store, see if the expression
4217 has the same value number as its rhs. If so, the store is
4218 dead. */
4219 else if (gimple_assign_single_p (stmt)
4220 && !is_gimple_reg (gimple_assign_lhs (stmt))
4221 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4222 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4223 {
4224 tree rhs = gimple_assign_rhs1 (stmt);
4225 tree val;
4226 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4227 gimple_vuse (stmt), true, NULL);
4228 if (TREE_CODE (rhs) == SSA_NAME)
4229 rhs = VN_INFO (rhs)->valnum;
4230 if (val
4231 && operand_equal_p (val, rhs, 0))
4232 {
4233 if (dump_file && (dump_flags & TDF_DETAILS))
4234 {
4235 fprintf (dump_file, "Deleted redundant store ");
4236 print_gimple_stmt (dump_file, stmt, 0, 0);
4237 }
4238
4239 /* Queue stmt for removal. */
4240 VEC_safe_push (gimple, heap, to_remove, stmt);
4241 }
4242 }
4243 /* Visit COND_EXPRs and fold the comparison with the
4244 available value-numbers. */
4245 else if (gimple_code (stmt) == GIMPLE_COND)
4246 {
4247 tree op0 = gimple_cond_lhs (stmt);
4248 tree op1 = gimple_cond_rhs (stmt);
4249 tree result;
4250
4251 if (TREE_CODE (op0) == SSA_NAME)
4252 op0 = VN_INFO (op0)->valnum;
4253 if (TREE_CODE (op1) == SSA_NAME)
4254 op1 = VN_INFO (op1)->valnum;
4255 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4256 op0, op1);
4257 if (result && TREE_CODE (result) == INTEGER_CST)
4258 {
4259 if (integer_zerop (result))
4260 gimple_cond_make_false (stmt);
4261 else
4262 gimple_cond_make_true (stmt);
4263 update_stmt (stmt);
4264 todo = TODO_cleanup_cfg;
4265 }
4266 }
4267 /* Visit indirect calls and turn them into direct calls if
4268 possible. */
4269 if (gimple_code (stmt) == GIMPLE_CALL
4270 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4271 {
4272 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4273 if (TREE_CODE (fn) == ADDR_EXPR
4274 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4275 {
4276 if (dump_file && (dump_flags & TDF_DETAILS))
4277 {
4278 fprintf (dump_file, "Replacing call target with ");
4279 print_generic_expr (dump_file, fn, 0);
4280 fprintf (dump_file, " in ");
4281 print_gimple_stmt (dump_file, stmt, 0, 0);
4282 }
4283
4284 gimple_call_set_fn (stmt, fn);
4285 update_stmt (stmt);
4286 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4287 {
4288 bitmap_set_bit (need_eh_cleanup,
4289 gimple_bb (stmt)->index);
4290 if (dump_file && (dump_flags & TDF_DETAILS))
4291 fprintf (dump_file, " Removed EH side effects.\n");
4292 }
4293
4294 /* Changing an indirect call to a direct call may
4295 have exposed different semantics. This may
4296 require an SSA update. */
4297 todo |= TODO_update_ssa_only_virtuals;
4298 }
4299 }
4300 }
4301
4302 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4303 {
4304 gimple stmt, phi = gsi_stmt (gsi);
4305 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4306 pre_expr sprimeexpr, resexpr;
4307 gimple_stmt_iterator gsi2;
4308
4309 /* We want to perform redundant PHI elimination. Do so by
4310 replacing the PHI with a single copy if possible.
4311 Do not touch inserted, single-argument or virtual PHIs. */
4312 if (gimple_phi_num_args (phi) == 1
4313 || !is_gimple_reg (res))
4314 {
4315 gsi_next (&gsi);
4316 continue;
4317 }
4318
4319 resexpr = get_or_alloc_expr_for_name (res);
4320 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4321 get_expr_value_id (resexpr), NULL);
4322 if (sprimeexpr)
4323 {
4324 if (sprimeexpr->kind == CONSTANT)
4325 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4326 else if (sprimeexpr->kind == NAME)
4327 sprime = PRE_EXPR_NAME (sprimeexpr);
4328 else
4329 gcc_unreachable ();
4330 }
4331 if (!sprimeexpr
4332 || sprime == res)
4333 {
4334 gsi_next (&gsi);
4335 continue;
4336 }
4337
4338 if (dump_file && (dump_flags & TDF_DETAILS))
4339 {
4340 fprintf (dump_file, "Replaced redundant PHI node defining ");
4341 print_generic_expr (dump_file, res, 0);
4342 fprintf (dump_file, " with ");
4343 print_generic_expr (dump_file, sprime, 0);
4344 fprintf (dump_file, "\n");
4345 }
4346
4347 remove_phi_node (&gsi, false);
4348
4349 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4350 && TREE_CODE (sprime) == SSA_NAME)
4351 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4352
4353 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4354 sprime = fold_convert (TREE_TYPE (res), sprime);
4355 stmt = gimple_build_assign (res, sprime);
4356 SSA_NAME_DEF_STMT (res) = stmt;
4357 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4358
4359 gsi2 = gsi_after_labels (b);
4360 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4361 /* Queue the copy for eventual removal. */
4362 VEC_safe_push (gimple, heap, to_remove, stmt);
4363 /* If we inserted this PHI node ourself, it's not an elimination. */
4364 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4365 pre_stats.phis--;
4366 else
4367 pre_stats.eliminations++;
4368 }
4369 }
4370
4371 /* We cannot remove stmts during BB walk, especially not release SSA
4372 names there as this confuses the VN machinery. The stmts ending
4373 up in to_remove are either stores or simple copies. */
4374 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4375 {
4376 tree lhs = gimple_assign_lhs (stmt);
4377 tree rhs = gimple_assign_rhs1 (stmt);
4378 use_operand_p use_p;
4379 gimple use_stmt;
4380
4381 /* If there is a single use only, propagate the equivalency
4382 instead of keeping the copy. */
4383 if (TREE_CODE (lhs) == SSA_NAME
4384 && TREE_CODE (rhs) == SSA_NAME
4385 && single_imm_use (lhs, &use_p, &use_stmt)
4386 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4387 {
4388 SET_USE (use_p, rhs);
4389 update_stmt (use_stmt);
4390 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4391 && TREE_CODE (rhs) == SSA_NAME)
4392 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4393 }
4394
4395 /* If this is a store or a now unused copy, remove it. */
4396 if (TREE_CODE (lhs) != SSA_NAME
4397 || has_zero_uses (lhs))
4398 {
4399 gsi = gsi_for_stmt (stmt);
4400 unlink_stmt_vdef (stmt);
4401 gsi_remove (&gsi, true);
4402 if (TREE_CODE (lhs) == SSA_NAME)
4403 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4404 release_defs (stmt);
4405 }
4406 }
4407 VEC_free (gimple, heap, to_remove);
4408
4409 return todo;
4410 }
4411
4412 /* Borrow a bit of tree-ssa-dce.c for the moment.
4413 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4414 this may be a bit faster, and we may want critical edges kept split. */
4415
4416 /* If OP's defining statement has not already been determined to be necessary,
4417 mark that statement necessary. Return the stmt, if it is newly
4418 necessary. */
4419
4420 static inline gimple
4421 mark_operand_necessary (tree op)
4422 {
4423 gimple stmt;
4424
4425 gcc_assert (op);
4426
4427 if (TREE_CODE (op) != SSA_NAME)
4428 return NULL;
4429
4430 stmt = SSA_NAME_DEF_STMT (op);
4431 gcc_assert (stmt);
4432
4433 if (gimple_plf (stmt, NECESSARY)
4434 || gimple_nop_p (stmt))
4435 return NULL;
4436
4437 gimple_set_plf (stmt, NECESSARY, true);
4438 return stmt;
4439 }
4440
4441 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4442 to insert PHI nodes sometimes, and because value numbering of casts isn't
4443 perfect, we sometimes end up inserting dead code. This simple DCE-like
4444 pass removes any insertions we made that weren't actually used. */
4445
4446 static void
4447 remove_dead_inserted_code (void)
4448 {
4449 bitmap worklist;
4450 unsigned i;
4451 bitmap_iterator bi;
4452 gimple t;
4453
4454 worklist = BITMAP_ALLOC (NULL);
4455 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4456 {
4457 t = SSA_NAME_DEF_STMT (ssa_name (i));
4458 if (gimple_plf (t, NECESSARY))
4459 bitmap_set_bit (worklist, i);
4460 }
4461 while (!bitmap_empty_p (worklist))
4462 {
4463 i = bitmap_first_set_bit (worklist);
4464 bitmap_clear_bit (worklist, i);
4465 t = SSA_NAME_DEF_STMT (ssa_name (i));
4466
4467 /* PHI nodes are somewhat special in that each PHI alternative has
4468 data and control dependencies. All the statements feeding the
4469 PHI node's arguments are always necessary. */
4470 if (gimple_code (t) == GIMPLE_PHI)
4471 {
4472 unsigned k;
4473
4474 for (k = 0; k < gimple_phi_num_args (t); k++)
4475 {
4476 tree arg = PHI_ARG_DEF (t, k);
4477 if (TREE_CODE (arg) == SSA_NAME)
4478 {
4479 gimple n = mark_operand_necessary (arg);
4480 if (n)
4481 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4482 }
4483 }
4484 }
4485 else
4486 {
4487 /* Propagate through the operands. Examine all the USE, VUSE and
4488 VDEF operands in this statement. Mark all the statements
4489 which feed this statement's uses as necessary. */
4490 ssa_op_iter iter;
4491 tree use;
4492
4493 /* The operands of VDEF expressions are also needed as they
4494 represent potential definitions that may reach this
4495 statement (VDEF operands allow us to follow def-def
4496 links). */
4497
4498 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4499 {
4500 gimple n = mark_operand_necessary (use);
4501 if (n)
4502 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4503 }
4504 }
4505 }
4506
4507 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4508 {
4509 t = SSA_NAME_DEF_STMT (ssa_name (i));
4510 if (!gimple_plf (t, NECESSARY))
4511 {
4512 gimple_stmt_iterator gsi;
4513
4514 if (dump_file && (dump_flags & TDF_DETAILS))
4515 {
4516 fprintf (dump_file, "Removing unnecessary insertion:");
4517 print_gimple_stmt (dump_file, t, 0, 0);
4518 }
4519
4520 gsi = gsi_for_stmt (t);
4521 if (gimple_code (t) == GIMPLE_PHI)
4522 remove_phi_node (&gsi, true);
4523 else
4524 {
4525 gsi_remove (&gsi, true);
4526 release_defs (t);
4527 }
4528 }
4529 }
4530 BITMAP_FREE (worklist);
4531 }
4532
4533 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4534 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4535 the number of visited blocks. */
4536
4537 static int
4538 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4539 {
4540 edge_iterator *stack;
4541 int sp;
4542 int post_order_num = 0;
4543 sbitmap visited;
4544
4545 if (include_entry_exit)
4546 post_order[post_order_num++] = EXIT_BLOCK;
4547
4548 /* Allocate stack for back-tracking up CFG. */
4549 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4550 sp = 0;
4551
4552 /* Allocate bitmap to track nodes that have been visited. */
4553 visited = sbitmap_alloc (last_basic_block);
4554
4555 /* None of the nodes in the CFG have been visited yet. */
4556 sbitmap_zero (visited);
4557
4558 /* Push the last edge on to the stack. */
4559 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4560
4561 while (sp)
4562 {
4563 edge_iterator ei;
4564 basic_block src;
4565 basic_block dest;
4566
4567 /* Look at the edge on the top of the stack. */
4568 ei = stack[sp - 1];
4569 src = ei_edge (ei)->src;
4570 dest = ei_edge (ei)->dest;
4571
4572 /* Check if the edge destination has been visited yet. */
4573 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4574 {
4575 /* Mark that we have visited the destination. */
4576 SET_BIT (visited, src->index);
4577
4578 if (EDGE_COUNT (src->preds) > 0)
4579 /* Since the DEST node has been visited for the first
4580 time, check its successors. */
4581 stack[sp++] = ei_start (src->preds);
4582 else
4583 post_order[post_order_num++] = src->index;
4584 }
4585 else
4586 {
4587 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4588 post_order[post_order_num++] = dest->index;
4589
4590 if (!ei_one_before_end_p (ei))
4591 ei_next (&stack[sp - 1]);
4592 else
4593 sp--;
4594 }
4595 }
4596
4597 if (include_entry_exit)
4598 post_order[post_order_num++] = ENTRY_BLOCK;
4599
4600 free (stack);
4601 sbitmap_free (visited);
4602 return post_order_num;
4603 }
4604
4605
4606 /* Initialize data structures used by PRE. */
4607
4608 static void
4609 init_pre (bool do_fre)
4610 {
4611 basic_block bb;
4612
4613 next_expression_id = 1;
4614 expressions = NULL;
4615 VEC_safe_push (pre_expr, heap, expressions, NULL);
4616 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4617 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4618 get_max_value_id() + 1);
4619 name_to_id = NULL;
4620
4621 in_fre = do_fre;
4622
4623 inserted_exprs = BITMAP_ALLOC (NULL);
4624 need_creation = NULL;
4625 pretemp = NULL_TREE;
4626 storetemp = NULL_TREE;
4627 prephitemp = NULL_TREE;
4628
4629 connect_infinite_loops_to_exit ();
4630 memset (&pre_stats, 0, sizeof (pre_stats));
4631
4632
4633 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4634 my_rev_post_order_compute (postorder, false);
4635
4636 FOR_ALL_BB (bb)
4637 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4638
4639 calculate_dominance_info (CDI_POST_DOMINATORS);
4640 calculate_dominance_info (CDI_DOMINATORS);
4641
4642 bitmap_obstack_initialize (&grand_bitmap_obstack);
4643 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4644 expr_pred_trans_eq, free);
4645 expression_to_id = htab_create (num_ssa_names * 3,
4646 pre_expr_hash,
4647 pre_expr_eq, NULL);
4648 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4649 sizeof (struct bitmap_set), 30);
4650 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4651 sizeof (struct pre_expr_d), 30);
4652 FOR_ALL_BB (bb)
4653 {
4654 EXP_GEN (bb) = bitmap_set_new ();
4655 PHI_GEN (bb) = bitmap_set_new ();
4656 TMP_GEN (bb) = bitmap_set_new ();
4657 AVAIL_OUT (bb) = bitmap_set_new ();
4658 }
4659
4660 need_eh_cleanup = BITMAP_ALLOC (NULL);
4661 }
4662
4663
4664 /* Deallocate data structures used by PRE. */
4665
4666 static void
4667 fini_pre (bool do_fre)
4668 {
4669 basic_block bb;
4670
4671 free (postorder);
4672 VEC_free (bitmap_set_t, heap, value_expressions);
4673 BITMAP_FREE (inserted_exprs);
4674 VEC_free (gimple, heap, need_creation);
4675 bitmap_obstack_release (&grand_bitmap_obstack);
4676 free_alloc_pool (bitmap_set_pool);
4677 free_alloc_pool (pre_expr_pool);
4678 htab_delete (phi_translate_table);
4679 htab_delete (expression_to_id);
4680 VEC_free (unsigned, heap, name_to_id);
4681
4682 FOR_ALL_BB (bb)
4683 {
4684 free (bb->aux);
4685 bb->aux = NULL;
4686 }
4687
4688 free_dominance_info (CDI_POST_DOMINATORS);
4689
4690 if (!bitmap_empty_p (need_eh_cleanup))
4691 {
4692 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4693 cleanup_tree_cfg ();
4694 }
4695
4696 BITMAP_FREE (need_eh_cleanup);
4697
4698 if (!do_fre)
4699 loop_optimizer_finalize ();
4700 }
4701
4702 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4703 only wants to do full redundancy elimination. */
4704
4705 static unsigned int
4706 execute_pre (bool do_fre)
4707 {
4708 unsigned int todo = 0;
4709
4710 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4711
4712 /* This has to happen before SCCVN runs because
4713 loop_optimizer_init may create new phis, etc. */
4714 if (!do_fre)
4715 loop_optimizer_init (LOOPS_NORMAL);
4716
4717 if (!run_scc_vn (do_fre))
4718 {
4719 if (!do_fre)
4720 loop_optimizer_finalize ();
4721
4722 return 0;
4723 }
4724
4725 init_pre (do_fre);
4726 scev_initialize ();
4727
4728 /* Collect and value number expressions computed in each basic block. */
4729 compute_avail ();
4730
4731 if (dump_file && (dump_flags & TDF_DETAILS))
4732 {
4733 basic_block bb;
4734
4735 FOR_ALL_BB (bb)
4736 {
4737 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4738 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4739 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4740 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4741 }
4742 }
4743
4744 /* Insert can get quite slow on an incredibly large number of basic
4745 blocks due to some quadratic behavior. Until this behavior is
4746 fixed, don't run it when he have an incredibly large number of
4747 bb's. If we aren't going to run insert, there is no point in
4748 computing ANTIC, either, even though it's plenty fast. */
4749 if (!do_fre && n_basic_blocks < 4000)
4750 {
4751 compute_antic ();
4752 insert ();
4753 }
4754
4755 /* Make sure to remove fake edges before committing our inserts.
4756 This makes sure we don't end up with extra critical edges that
4757 we would need to split. */
4758 remove_fake_exit_edges ();
4759 gsi_commit_edge_inserts ();
4760
4761 /* Remove all the redundant expressions. */
4762 todo |= eliminate ();
4763
4764 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4765 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4766 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4767 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4768 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4769
4770 clear_expression_ids ();
4771 free_scc_vn ();
4772 if (!do_fre)
4773 remove_dead_inserted_code ();
4774
4775 scev_finalize ();
4776 fini_pre (do_fre);
4777
4778 return todo;
4779 }
4780
4781 /* Gate and execute functions for PRE. */
4782
4783 static unsigned int
4784 do_pre (void)
4785 {
4786 return execute_pre (false);
4787 }
4788
4789 static bool
4790 gate_pre (void)
4791 {
4792 return flag_tree_pre != 0;
4793 }
4794
4795 struct gimple_opt_pass pass_pre =
4796 {
4797 {
4798 GIMPLE_PASS,
4799 "pre", /* name */
4800 gate_pre, /* gate */
4801 do_pre, /* execute */
4802 NULL, /* sub */
4803 NULL, /* next */
4804 0, /* static_pass_number */
4805 TV_TREE_PRE, /* tv_id */
4806 PROP_no_crit_edges | PROP_cfg
4807 | PROP_ssa, /* properties_required */
4808 0, /* properties_provided */
4809 0, /* properties_destroyed */
4810 TODO_rebuild_alias, /* todo_flags_start */
4811 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4812 | TODO_verify_ssa /* todo_flags_finish */
4813 }
4814 };
4815
4816
4817 /* Gate and execute functions for FRE. */
4818
4819 static unsigned int
4820 execute_fre (void)
4821 {
4822 return execute_pre (true);
4823 }
4824
4825 static bool
4826 gate_fre (void)
4827 {
4828 return flag_tree_fre != 0;
4829 }
4830
4831 struct gimple_opt_pass pass_fre =
4832 {
4833 {
4834 GIMPLE_PASS,
4835 "fre", /* name */
4836 gate_fre, /* gate */
4837 execute_fre, /* execute */
4838 NULL, /* sub */
4839 NULL, /* next */
4840 0, /* static_pass_number */
4841 TV_TREE_FRE, /* tv_id */
4842 PROP_cfg | PROP_ssa, /* properties_required */
4843 0, /* properties_provided */
4844 0, /* properties_destroyed */
4845 0, /* todo_flags_start */
4846 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
4847 }
4848 };