tree-flow.h (gimple_purge_all_dead_abnormal_call_edges): Declare.
[gcc.git] / gcc / tree-ssa-pre.c
1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
12 any later version.
13
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "fibheap.h"
37 #include "hashtab.h"
38 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "obstack.h"
41 #include "tree-pass.h"
42 #include "flags.h"
43 #include "bitmap.h"
44 #include "langhooks.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "params.h"
49 #include "dbgcnt.h"
50
51 /* TODO:
52
53 1. Avail sets can be shared by making an avail_find_leader that
54 walks up the dominator tree and looks in those avail sets.
55 This might affect code optimality, it's unclear right now.
56 2. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 3. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
60 */
61
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
64 represent the actual statement containing the expressions we care about,
65 and we cache the value number by putting it in the expression. */
66
67 /* Basic algorithm
68
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
78 expressions/values.
79
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem. An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
93
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
96
97 An expression is partially redundant (excluding partial
98 anticipation) if:
99
100 1. It is AVAIL in some, but not all, of the predecessors of a
101 given block.
102 2. It is ANTIC in all the predecessors.
103
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
106
107 For the partial anticipation case, we only perform insertion if it
108 is partially anticipated in some block, and fully available in all
109 of the predecessors.
110
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
112 performs these steps.
113
114 Fourth, we eliminate fully redundant expressions.
115 This is a simple statement walk that replaces redundant
116 calculations with the now available values. */
117
118 /* Representations of value numbers:
119
120 Value numbers are represented by a representative SSA_NAME. We
121 will create fake SSA_NAME's in situations where we need a
122 representative but do not have one (because it is a complex
123 expression). In order to facilitate storing the value numbers in
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also
125 associate a value_id with each value number, and create full blown
126 ssa_name's only where we actually need them (IE in operands of
127 existing expressions).
128
129 Theoretically you could replace all the value_id's with
130 SSA_NAME_VERSION, but this would allocate a large number of
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
132 It would also require an additional indirection at each point we
133 use the value id. */
134
135 /* Representation of expressions on value numbers:
136
137 Expressions consisting of value numbers are represented the same
138 way as our VN internally represents them, with an additional
139 "pre_expr" wrapping around them in order to facilitate storing all
140 of the expressions in the same sets. */
141
142 /* Representation of sets:
143
144 The dataflow sets do not need to be sorted in any particular order
145 for the majority of their lifetime, are simply represented as two
146 bitmaps, one that keeps track of values present in the set, and one
147 that keeps track of expressions present in the set.
148
149 When we need them in topological order, we produce it on demand by
150 transforming the bitmap into an array and sorting it into topo
151 order. */
152
153 /* Type of expression, used to know which member of the PRE_EXPR union
154 is valid. */
155
156 enum pre_expr_kind
157 {
158 NAME,
159 NARY,
160 REFERENCE,
161 CONSTANT
162 };
163
164 typedef union pre_expr_union_d
165 {
166 tree name;
167 tree constant;
168 vn_nary_op_t nary;
169 vn_reference_t reference;
170 } pre_expr_union;
171
172 typedef struct pre_expr_d
173 {
174 enum pre_expr_kind kind;
175 unsigned int id;
176 pre_expr_union u;
177 } *pre_expr;
178
179 #define PRE_EXPR_NAME(e) (e)->u.name
180 #define PRE_EXPR_NARY(e) (e)->u.nary
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
183
184 static int
185 pre_expr_eq (const void *p1, const void *p2)
186 {
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
189
190 if (e1->kind != e2->kind)
191 return false;
192
193 switch (e1->kind)
194 {
195 case CONSTANT:
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
198 case NAME:
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
200 case NARY:
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
202 case REFERENCE:
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
205 default:
206 gcc_unreachable ();
207 }
208 }
209
210 static hashval_t
211 pre_expr_hash (const void *p1)
212 {
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
214 switch (e->kind)
215 {
216 case CONSTANT:
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
218 case NAME:
219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
220 case NARY:
221 return PRE_EXPR_NARY (e)->hashcode;
222 case REFERENCE:
223 return PRE_EXPR_REFERENCE (e)->hashcode;
224 default:
225 gcc_unreachable ();
226 }
227 }
228
229
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
232
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static htab_t expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
239
240 /* Allocate an expression id for EXPR. */
241
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
244 {
245 void **slot;
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
251 {
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
259 VEC_replace (unsigned, name_to_id, version, expr->id);
260 }
261 else
262 {
263 slot = htab_find_slot (expression_to_id, expr, INSERT);
264 gcc_assert (!*slot);
265 *slot = expr;
266 }
267 return next_expression_id - 1;
268 }
269
270 /* Return the expression id for tree EXPR. */
271
272 static inline unsigned int
273 get_expression_id (const pre_expr expr)
274 {
275 return expr->id;
276 }
277
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr)
280 {
281 void **slot;
282
283 if (expr->kind == NAME)
284 {
285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
286 if (VEC_length (unsigned, name_to_id) <= version)
287 return 0;
288 return VEC_index (unsigned, name_to_id, version);
289 }
290 else
291 {
292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
293 if (!slot)
294 return 0;
295 return ((pre_expr)*slot)->id;
296 }
297 }
298
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
301
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr)
304 {
305 unsigned int id = lookup_expression_id (expr);
306 if (id == 0)
307 return alloc_expression_id (expr);
308 return expr->id = id;
309 }
310
311 /* Return the expression that has expression id ID */
312
313 static inline pre_expr
314 expression_for_id (unsigned int id)
315 {
316 return VEC_index (pre_expr, expressions, id);
317 }
318
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
321
322 static void
323 clear_expression_ids (void)
324 {
325 VEC_free (pre_expr, heap, expressions);
326 }
327
328 static alloc_pool pre_expr_pool;
329
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
331
332 static pre_expr
333 get_or_alloc_expr_for_name (tree name)
334 {
335 struct pre_expr_d expr;
336 pre_expr result;
337 unsigned int result_id;
338
339 expr.kind = NAME;
340 expr.id = 0;
341 PRE_EXPR_NAME (&expr) = name;
342 result_id = lookup_expression_id (&expr);
343 if (result_id != 0)
344 return expression_for_id (result_id);
345
346 result = (pre_expr) pool_alloc (pre_expr_pool);
347 result->kind = NAME;
348 PRE_EXPR_NAME (result) = name;
349 alloc_expression_id (result);
350 return result;
351 }
352
353 static bool in_fre = false;
354
355 /* An unordered bitmap set. One bitmap tracks values, the other,
356 expressions. */
357 typedef struct bitmap_set
358 {
359 bitmap_head expressions;
360 bitmap_head values;
361 } *bitmap_set_t;
362
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
365
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
368
369 /* Mapping from value id to expressions with that value_id. */
370 DEF_VEC_P (bitmap_set_t);
371 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
372 static VEC(bitmap_set_t, heap) *value_expressions;
373
374 /* Sets that we need to keep track of. */
375 typedef struct bb_bitmap_sets
376 {
377 /* The EXP_GEN set, which represents expressions/values generated in
378 a basic block. */
379 bitmap_set_t exp_gen;
380
381 /* The PHI_GEN set, which represents PHI results generated in a
382 basic block. */
383 bitmap_set_t phi_gen;
384
385 /* The TMP_GEN set, which represents results/temporaries generated
386 in a basic block. IE the LHS of an expression. */
387 bitmap_set_t tmp_gen;
388
389 /* The AVAIL_OUT set, which represents which values are available in
390 a given basic block. */
391 bitmap_set_t avail_out;
392
393 /* The ANTIC_IN set, which represents which values are anticipatable
394 in a given basic block. */
395 bitmap_set_t antic_in;
396
397 /* The PA_IN set, which represents which values are
398 partially anticipatable in a given basic block. */
399 bitmap_set_t pa_in;
400
401 /* The NEW_SETS set, which is used during insertion to augment the
402 AVAIL_OUT set of blocks with the new insertions performed during
403 the current iteration. */
404 bitmap_set_t new_sets;
405
406 /* A cache for value_dies_in_block_x. */
407 bitmap expr_dies;
408
409 /* True if we have visited this block during ANTIC calculation. */
410 unsigned int visited : 1;
411
412 /* True we have deferred processing this block during ANTIC
413 calculation until its successor is processed. */
414 unsigned int deferred : 1;
415
416 /* True when the block contains a call that might not return. */
417 unsigned int contains_may_not_return_call : 1;
418 } *bb_value_sets_t;
419
420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
431
432
433 /* Basic block list in postorder. */
434 static int *postorder;
435
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
438 static struct
439 {
440 /* The number of RHS computations eliminated by PRE. */
441 int eliminations;
442
443 /* The number of new expressions/temporaries generated by PRE. */
444 int insertions;
445
446 /* The number of inserts found due to partial anticipation */
447 int pa_insert;
448
449 /* The number of new PHI nodes added by PRE. */
450 int phis;
451
452 /* The number of values found constant. */
453 int constified;
454
455 } pre_stats;
456
457 static bool do_partial_partial;
458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
465 unsigned int, bool);
466 static bitmap_set_t bitmap_set_new (void);
467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
468 gimple, tree);
469 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
470 gimple);
471 static unsigned int get_expr_value_id (pre_expr);
472
473 /* We can add and remove elements and entries to and from sets
474 and hash tables, so we use alloc pools for them. */
475
476 static alloc_pool bitmap_set_pool;
477 static bitmap_obstack grand_bitmap_obstack;
478
479 /* To avoid adding 300 temporary variables when we only need one, we
480 only create one temporary variable, on demand, and build ssa names
481 off that. We do have to change the variable if the types don't
482 match the current variable's type. */
483 static tree pretemp;
484 static tree storetemp;
485 static tree prephitemp;
486
487 /* Set of blocks with statements that have had their EH properties changed. */
488 static bitmap need_eh_cleanup;
489
490 /* Set of blocks with statements that have had their AB properties changed. */
491 static bitmap need_ab_cleanup;
492
493 /* The phi_translate_table caches phi translations for a given
494 expression and predecessor. */
495
496 static htab_t phi_translate_table;
497
498 /* A three tuple {e, pred, v} used to cache phi translations in the
499 phi_translate_table. */
500
501 typedef struct expr_pred_trans_d
502 {
503 /* The expression. */
504 pre_expr e;
505
506 /* The predecessor block along which we translated the expression. */
507 basic_block pred;
508
509 /* The value that resulted from the translation. */
510 pre_expr v;
511
512 /* The hashcode for the expression, pred pair. This is cached for
513 speed reasons. */
514 hashval_t hashcode;
515 } *expr_pred_trans_t;
516 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
517
518 /* Return the hash value for a phi translation table entry. */
519
520 static hashval_t
521 expr_pred_trans_hash (const void *p)
522 {
523 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
524 return ve->hashcode;
525 }
526
527 /* Return true if two phi translation table entries are the same.
528 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
529
530 static int
531 expr_pred_trans_eq (const void *p1, const void *p2)
532 {
533 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
534 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
535 basic_block b1 = ve1->pred;
536 basic_block b2 = ve2->pred;
537
538 /* If they are not translations for the same basic block, they can't
539 be equal. */
540 if (b1 != b2)
541 return false;
542 return pre_expr_eq (ve1->e, ve2->e);
543 }
544
545 /* Search in the phi translation table for the translation of
546 expression E in basic block PRED.
547 Return the translated value, if found, NULL otherwise. */
548
549 static inline pre_expr
550 phi_trans_lookup (pre_expr e, basic_block pred)
551 {
552 void **slot;
553 struct expr_pred_trans_d ept;
554
555 ept.e = e;
556 ept.pred = pred;
557 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
558 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
559 NO_INSERT);
560 if (!slot)
561 return NULL;
562 else
563 return ((expr_pred_trans_t) *slot)->v;
564 }
565
566
567 /* Add the tuple mapping from {expression E, basic block PRED} to
568 value V, to the phi translation table. */
569
570 static inline void
571 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
572 {
573 void **slot;
574 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
575 new_pair->e = e;
576 new_pair->pred = pred;
577 new_pair->v = v;
578 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
579 pred->index);
580
581 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
582 new_pair->hashcode, INSERT);
583 if (*slot)
584 free (*slot);
585 *slot = (void *) new_pair;
586 }
587
588
589 /* Add expression E to the expression set of value id V. */
590
591 void
592 add_to_value (unsigned int v, pre_expr e)
593 {
594 bitmap_set_t set;
595
596 gcc_assert (get_expr_value_id (e) == v);
597
598 if (v >= VEC_length (bitmap_set_t, value_expressions))
599 {
600 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
601 v + 1);
602 }
603
604 set = VEC_index (bitmap_set_t, value_expressions, v);
605 if (!set)
606 {
607 set = bitmap_set_new ();
608 VEC_replace (bitmap_set_t, value_expressions, v, set);
609 }
610
611 bitmap_insert_into_set_1 (set, e, v, true);
612 }
613
614 /* Create a new bitmap set and return it. */
615
616 static bitmap_set_t
617 bitmap_set_new (void)
618 {
619 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
620 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
621 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
622 return ret;
623 }
624
625 /* Return the value id for a PRE expression EXPR. */
626
627 static unsigned int
628 get_expr_value_id (pre_expr expr)
629 {
630 switch (expr->kind)
631 {
632 case CONSTANT:
633 {
634 unsigned int id;
635 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
636 if (id == 0)
637 {
638 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
639 add_to_value (id, expr);
640 }
641 return id;
642 }
643 case NAME:
644 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
645 case NARY:
646 return PRE_EXPR_NARY (expr)->value_id;
647 case REFERENCE:
648 return PRE_EXPR_REFERENCE (expr)->value_id;
649 default:
650 gcc_unreachable ();
651 }
652 }
653
654 /* Remove an expression EXPR from a bitmapped set. */
655
656 static void
657 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
658 {
659 unsigned int val = get_expr_value_id (expr);
660 if (!value_id_constant_p (val))
661 {
662 bitmap_clear_bit (&set->values, val);
663 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
664 }
665 }
666
667 static void
668 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
669 unsigned int val, bool allow_constants)
670 {
671 if (allow_constants || !value_id_constant_p (val))
672 {
673 /* We specifically expect this and only this function to be able to
674 insert constants into a set. */
675 bitmap_set_bit (&set->values, val);
676 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
677 }
678 }
679
680 /* Insert an expression EXPR into a bitmapped set. */
681
682 static void
683 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
684 {
685 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
686 }
687
688 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
689
690 static void
691 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
692 {
693 bitmap_copy (&dest->expressions, &orig->expressions);
694 bitmap_copy (&dest->values, &orig->values);
695 }
696
697
698 /* Free memory used up by SET. */
699 static void
700 bitmap_set_free (bitmap_set_t set)
701 {
702 bitmap_clear (&set->expressions);
703 bitmap_clear (&set->values);
704 }
705
706
707 /* Generate an topological-ordered array of bitmap set SET. */
708
709 static VEC(pre_expr, heap) *
710 sorted_array_from_bitmap_set (bitmap_set_t set)
711 {
712 unsigned int i, j;
713 bitmap_iterator bi, bj;
714 VEC(pre_expr, heap) *result;
715
716 /* Pre-allocate roughly enough space for the array. */
717 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
718
719 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
720 {
721 /* The number of expressions having a given value is usually
722 relatively small. Thus, rather than making a vector of all
723 the expressions and sorting it by value-id, we walk the values
724 and check in the reverse mapping that tells us what expressions
725 have a given value, to filter those in our set. As a result,
726 the expressions are inserted in value-id order, which means
727 topological order.
728
729 If this is somehow a significant lose for some cases, we can
730 choose which set to walk based on the set size. */
731 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
732 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
733 {
734 if (bitmap_bit_p (&set->expressions, j))
735 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
736 }
737 }
738
739 return result;
740 }
741
742 /* Perform bitmapped set operation DEST &= ORIG. */
743
744 static void
745 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
746 {
747 bitmap_iterator bi;
748 unsigned int i;
749
750 if (dest != orig)
751 {
752 bitmap_head temp;
753 bitmap_initialize (&temp, &grand_bitmap_obstack);
754
755 bitmap_and_into (&dest->values, &orig->values);
756 bitmap_copy (&temp, &dest->expressions);
757 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
758 {
759 pre_expr expr = expression_for_id (i);
760 unsigned int value_id = get_expr_value_id (expr);
761 if (!bitmap_bit_p (&dest->values, value_id))
762 bitmap_clear_bit (&dest->expressions, i);
763 }
764 bitmap_clear (&temp);
765 }
766 }
767
768 /* Subtract all values and expressions contained in ORIG from DEST. */
769
770 static bitmap_set_t
771 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
772 {
773 bitmap_set_t result = bitmap_set_new ();
774 bitmap_iterator bi;
775 unsigned int i;
776
777 bitmap_and_compl (&result->expressions, &dest->expressions,
778 &orig->expressions);
779
780 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
781 {
782 pre_expr expr = expression_for_id (i);
783 unsigned int value_id = get_expr_value_id (expr);
784 bitmap_set_bit (&result->values, value_id);
785 }
786
787 return result;
788 }
789
790 /* Subtract all the values in bitmap set B from bitmap set A. */
791
792 static void
793 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
794 {
795 unsigned int i;
796 bitmap_iterator bi;
797 bitmap_head temp;
798
799 bitmap_initialize (&temp, &grand_bitmap_obstack);
800
801 bitmap_copy (&temp, &a->expressions);
802 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
803 {
804 pre_expr expr = expression_for_id (i);
805 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
806 bitmap_remove_from_set (a, expr);
807 }
808 bitmap_clear (&temp);
809 }
810
811
812 /* Return true if bitmapped set SET contains the value VALUE_ID. */
813
814 static bool
815 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
816 {
817 if (value_id_constant_p (value_id))
818 return true;
819
820 if (!set || bitmap_empty_p (&set->expressions))
821 return false;
822
823 return bitmap_bit_p (&set->values, value_id);
824 }
825
826 static inline bool
827 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
828 {
829 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
830 }
831
832 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
833
834 static void
835 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
836 const pre_expr expr)
837 {
838 bitmap_set_t exprset;
839 unsigned int i;
840 bitmap_iterator bi;
841
842 if (value_id_constant_p (lookfor))
843 return;
844
845 if (!bitmap_set_contains_value (set, lookfor))
846 return;
847
848 /* The number of expressions having a given value is usually
849 significantly less than the total number of expressions in SET.
850 Thus, rather than check, for each expression in SET, whether it
851 has the value LOOKFOR, we walk the reverse mapping that tells us
852 what expressions have a given value, and see if any of those
853 expressions are in our set. For large testcases, this is about
854 5-10x faster than walking the bitmap. If this is somehow a
855 significant lose for some cases, we can choose which set to walk
856 based on the set size. */
857 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
858 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
859 {
860 if (bitmap_clear_bit (&set->expressions, i))
861 {
862 bitmap_set_bit (&set->expressions, get_expression_id (expr));
863 return;
864 }
865 }
866 }
867
868 /* Return true if two bitmap sets are equal. */
869
870 static bool
871 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
872 {
873 return bitmap_equal_p (&a->values, &b->values);
874 }
875
876 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
877 and add it otherwise. */
878
879 static void
880 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
881 {
882 unsigned int val = get_expr_value_id (expr);
883
884 if (bitmap_set_contains_value (set, val))
885 bitmap_set_replace_value (set, val, expr);
886 else
887 bitmap_insert_into_set (set, expr);
888 }
889
890 /* Insert EXPR into SET if EXPR's value is not already present in
891 SET. */
892
893 static void
894 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
895 {
896 unsigned int val = get_expr_value_id (expr);
897
898 #ifdef ENABLE_CHECKING
899 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
900 #endif
901
902 /* Constant values are always considered to be part of the set. */
903 if (value_id_constant_p (val))
904 return;
905
906 /* If the value membership changed, add the expression. */
907 if (bitmap_set_bit (&set->values, val))
908 bitmap_set_bit (&set->expressions, expr->id);
909 }
910
911 /* Print out EXPR to outfile. */
912
913 static void
914 print_pre_expr (FILE *outfile, const pre_expr expr)
915 {
916 switch (expr->kind)
917 {
918 case CONSTANT:
919 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
920 break;
921 case NAME:
922 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
923 break;
924 case NARY:
925 {
926 unsigned int i;
927 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
928 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
929 for (i = 0; i < nary->length; i++)
930 {
931 print_generic_expr (outfile, nary->op[i], 0);
932 if (i != (unsigned) nary->length - 1)
933 fprintf (outfile, ",");
934 }
935 fprintf (outfile, "}");
936 }
937 break;
938
939 case REFERENCE:
940 {
941 vn_reference_op_t vro;
942 unsigned int i;
943 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
944 fprintf (outfile, "{");
945 for (i = 0;
946 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
947 i++)
948 {
949 bool closebrace = false;
950 if (vro->opcode != SSA_NAME
951 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
952 {
953 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
954 if (vro->op0)
955 {
956 fprintf (outfile, "<");
957 closebrace = true;
958 }
959 }
960 if (vro->op0)
961 {
962 print_generic_expr (outfile, vro->op0, 0);
963 if (vro->op1)
964 {
965 fprintf (outfile, ",");
966 print_generic_expr (outfile, vro->op1, 0);
967 }
968 if (vro->op2)
969 {
970 fprintf (outfile, ",");
971 print_generic_expr (outfile, vro->op2, 0);
972 }
973 }
974 if (closebrace)
975 fprintf (outfile, ">");
976 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
977 fprintf (outfile, ",");
978 }
979 fprintf (outfile, "}");
980 if (ref->vuse)
981 {
982 fprintf (outfile, "@");
983 print_generic_expr (outfile, ref->vuse, 0);
984 }
985 }
986 break;
987 }
988 }
989 void debug_pre_expr (pre_expr);
990
991 /* Like print_pre_expr but always prints to stderr. */
992 DEBUG_FUNCTION void
993 debug_pre_expr (pre_expr e)
994 {
995 print_pre_expr (stderr, e);
996 fprintf (stderr, "\n");
997 }
998
999 /* Print out SET to OUTFILE. */
1000
1001 static void
1002 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1003 const char *setname, int blockindex)
1004 {
1005 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1006 if (set)
1007 {
1008 bool first = true;
1009 unsigned i;
1010 bitmap_iterator bi;
1011
1012 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1013 {
1014 const pre_expr expr = expression_for_id (i);
1015
1016 if (!first)
1017 fprintf (outfile, ", ");
1018 first = false;
1019 print_pre_expr (outfile, expr);
1020
1021 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1022 }
1023 }
1024 fprintf (outfile, " }\n");
1025 }
1026
1027 void debug_bitmap_set (bitmap_set_t);
1028
1029 DEBUG_FUNCTION void
1030 debug_bitmap_set (bitmap_set_t set)
1031 {
1032 print_bitmap_set (stderr, set, "debug", 0);
1033 }
1034
1035 /* Print out the expressions that have VAL to OUTFILE. */
1036
1037 void
1038 print_value_expressions (FILE *outfile, unsigned int val)
1039 {
1040 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1041 if (set)
1042 {
1043 char s[10];
1044 sprintf (s, "%04d", val);
1045 print_bitmap_set (outfile, set, s, 0);
1046 }
1047 }
1048
1049
1050 DEBUG_FUNCTION void
1051 debug_value_expressions (unsigned int val)
1052 {
1053 print_value_expressions (stderr, val);
1054 }
1055
1056 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1057 represent it. */
1058
1059 static pre_expr
1060 get_or_alloc_expr_for_constant (tree constant)
1061 {
1062 unsigned int result_id;
1063 unsigned int value_id;
1064 struct pre_expr_d expr;
1065 pre_expr newexpr;
1066
1067 expr.kind = CONSTANT;
1068 PRE_EXPR_CONSTANT (&expr) = constant;
1069 result_id = lookup_expression_id (&expr);
1070 if (result_id != 0)
1071 return expression_for_id (result_id);
1072
1073 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1074 newexpr->kind = CONSTANT;
1075 PRE_EXPR_CONSTANT (newexpr) = constant;
1076 alloc_expression_id (newexpr);
1077 value_id = get_or_alloc_constant_value_id (constant);
1078 add_to_value (value_id, newexpr);
1079 return newexpr;
1080 }
1081
1082 /* Given a value id V, find the actual tree representing the constant
1083 value if there is one, and return it. Return NULL if we can't find
1084 a constant. */
1085
1086 static tree
1087 get_constant_for_value_id (unsigned int v)
1088 {
1089 if (value_id_constant_p (v))
1090 {
1091 unsigned int i;
1092 bitmap_iterator bi;
1093 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1094
1095 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1096 {
1097 pre_expr expr = expression_for_id (i);
1098 if (expr->kind == CONSTANT)
1099 return PRE_EXPR_CONSTANT (expr);
1100 }
1101 }
1102 return NULL;
1103 }
1104
1105 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1106 Currently only supports constants and SSA_NAMES. */
1107 static pre_expr
1108 get_or_alloc_expr_for (tree t)
1109 {
1110 if (TREE_CODE (t) == SSA_NAME)
1111 return get_or_alloc_expr_for_name (t);
1112 else if (is_gimple_min_invariant (t))
1113 return get_or_alloc_expr_for_constant (t);
1114 else
1115 {
1116 /* More complex expressions can result from SCCVN expression
1117 simplification that inserts values for them. As they all
1118 do not have VOPs the get handled by the nary ops struct. */
1119 vn_nary_op_t result;
1120 unsigned int result_id;
1121 vn_nary_op_lookup (t, &result);
1122 if (result != NULL)
1123 {
1124 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1125 e->kind = NARY;
1126 PRE_EXPR_NARY (e) = result;
1127 result_id = lookup_expression_id (e);
1128 if (result_id != 0)
1129 {
1130 pool_free (pre_expr_pool, e);
1131 e = expression_for_id (result_id);
1132 return e;
1133 }
1134 alloc_expression_id (e);
1135 return e;
1136 }
1137 }
1138 return NULL;
1139 }
1140
1141 /* Return the folded version of T if T, when folded, is a gimple
1142 min_invariant. Otherwise, return T. */
1143
1144 static pre_expr
1145 fully_constant_expression (pre_expr e)
1146 {
1147 switch (e->kind)
1148 {
1149 case CONSTANT:
1150 return e;
1151 case NARY:
1152 {
1153 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1154 switch (TREE_CODE_CLASS (nary->opcode))
1155 {
1156 case tcc_expression:
1157 if (nary->opcode == TRUTH_NOT_EXPR)
1158 goto do_unary;
1159 if (nary->opcode != TRUTH_AND_EXPR
1160 && nary->opcode != TRUTH_OR_EXPR
1161 && nary->opcode != TRUTH_XOR_EXPR)
1162 return e;
1163 /* Fallthrough. */
1164 case tcc_binary:
1165 case tcc_comparison:
1166 {
1167 /* We have to go from trees to pre exprs to value ids to
1168 constants. */
1169 tree naryop0 = nary->op[0];
1170 tree naryop1 = nary->op[1];
1171 tree result;
1172 if (!is_gimple_min_invariant (naryop0))
1173 {
1174 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1175 unsigned int vrep0 = get_expr_value_id (rep0);
1176 tree const0 = get_constant_for_value_id (vrep0);
1177 if (const0)
1178 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1179 }
1180 if (!is_gimple_min_invariant (naryop1))
1181 {
1182 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1183 unsigned int vrep1 = get_expr_value_id (rep1);
1184 tree const1 = get_constant_for_value_id (vrep1);
1185 if (const1)
1186 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1187 }
1188 result = fold_binary (nary->opcode, nary->type,
1189 naryop0, naryop1);
1190 if (result && is_gimple_min_invariant (result))
1191 return get_or_alloc_expr_for_constant (result);
1192 /* We might have simplified the expression to a
1193 SSA_NAME for example from x_1 * 1. But we cannot
1194 insert a PHI for x_1 unconditionally as x_1 might
1195 not be available readily. */
1196 return e;
1197 }
1198 case tcc_reference:
1199 if (nary->opcode != REALPART_EXPR
1200 && nary->opcode != IMAGPART_EXPR
1201 && nary->opcode != VIEW_CONVERT_EXPR)
1202 return e;
1203 /* Fallthrough. */
1204 case tcc_unary:
1205 do_unary:
1206 {
1207 /* We have to go from trees to pre exprs to value ids to
1208 constants. */
1209 tree naryop0 = nary->op[0];
1210 tree const0, result;
1211 if (is_gimple_min_invariant (naryop0))
1212 const0 = naryop0;
1213 else
1214 {
1215 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1216 unsigned int vrep0 = get_expr_value_id (rep0);
1217 const0 = get_constant_for_value_id (vrep0);
1218 }
1219 result = NULL;
1220 if (const0)
1221 {
1222 tree type1 = TREE_TYPE (nary->op[0]);
1223 const0 = fold_convert (type1, const0);
1224 result = fold_unary (nary->opcode, nary->type, const0);
1225 }
1226 if (result && is_gimple_min_invariant (result))
1227 return get_or_alloc_expr_for_constant (result);
1228 return e;
1229 }
1230 default:
1231 return e;
1232 }
1233 }
1234 case REFERENCE:
1235 {
1236 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1237 tree folded;
1238 if ((folded = fully_constant_vn_reference_p (ref)))
1239 return get_or_alloc_expr_for_constant (folded);
1240 return e;
1241 }
1242 default:
1243 return e;
1244 }
1245 return e;
1246 }
1247
1248 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1249 it has the value it would have in BLOCK. Set *SAME_VALID to true
1250 in case the new vuse doesn't change the value id of the OPERANDS. */
1251
1252 static tree
1253 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1254 alias_set_type set, tree type, tree vuse,
1255 basic_block phiblock,
1256 basic_block block, bool *same_valid)
1257 {
1258 gimple phi = SSA_NAME_DEF_STMT (vuse);
1259 ao_ref ref;
1260 edge e = NULL;
1261 bool use_oracle;
1262
1263 *same_valid = true;
1264
1265 if (gimple_bb (phi) != phiblock)
1266 return vuse;
1267
1268 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1269
1270 /* Use the alias-oracle to find either the PHI node in this block,
1271 the first VUSE used in this block that is equivalent to vuse or
1272 the first VUSE which definition in this block kills the value. */
1273 if (gimple_code (phi) == GIMPLE_PHI)
1274 e = find_edge (block, phiblock);
1275 else if (use_oracle)
1276 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1277 {
1278 vuse = gimple_vuse (phi);
1279 phi = SSA_NAME_DEF_STMT (vuse);
1280 if (gimple_bb (phi) != phiblock)
1281 return vuse;
1282 if (gimple_code (phi) == GIMPLE_PHI)
1283 {
1284 e = find_edge (block, phiblock);
1285 break;
1286 }
1287 }
1288 else
1289 return NULL_TREE;
1290
1291 if (e)
1292 {
1293 if (use_oracle)
1294 {
1295 bitmap visited = NULL;
1296 /* Try to find a vuse that dominates this phi node by skipping
1297 non-clobbering statements. */
1298 vuse = get_continuation_for_phi (phi, &ref, &visited);
1299 if (visited)
1300 BITMAP_FREE (visited);
1301 }
1302 else
1303 vuse = NULL_TREE;
1304 if (!vuse)
1305 {
1306 /* If we didn't find any, the value ID can't stay the same,
1307 but return the translated vuse. */
1308 *same_valid = false;
1309 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1310 }
1311 /* ??? We would like to return vuse here as this is the canonical
1312 upmost vdef that this reference is associated with. But during
1313 insertion of the references into the hash tables we only ever
1314 directly insert with their direct gimple_vuse, hence returning
1315 something else would make us not find the other expression. */
1316 return PHI_ARG_DEF (phi, e->dest_idx);
1317 }
1318
1319 return NULL_TREE;
1320 }
1321
1322 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1323 SET2. This is used to avoid making a set consisting of the union
1324 of PA_IN and ANTIC_IN during insert. */
1325
1326 static inline pre_expr
1327 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1328 {
1329 pre_expr result;
1330
1331 result = bitmap_find_leader (set1, val, NULL);
1332 if (!result && set2)
1333 result = bitmap_find_leader (set2, val, NULL);
1334 return result;
1335 }
1336
1337 /* Get the tree type for our PRE expression e. */
1338
1339 static tree
1340 get_expr_type (const pre_expr e)
1341 {
1342 switch (e->kind)
1343 {
1344 case NAME:
1345 return TREE_TYPE (PRE_EXPR_NAME (e));
1346 case CONSTANT:
1347 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1348 case REFERENCE:
1349 return PRE_EXPR_REFERENCE (e)->type;
1350 case NARY:
1351 return PRE_EXPR_NARY (e)->type;
1352 }
1353 gcc_unreachable();
1354 }
1355
1356 /* Get a representative SSA_NAME for a given expression.
1357 Since all of our sub-expressions are treated as values, we require
1358 them to be SSA_NAME's for simplicity.
1359 Prior versions of GVNPRE used to use "value handles" here, so that
1360 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1361 either case, the operands are really values (IE we do not expect
1362 them to be usable without finding leaders). */
1363
1364 static tree
1365 get_representative_for (const pre_expr e)
1366 {
1367 tree exprtype;
1368 tree name;
1369 unsigned int value_id = get_expr_value_id (e);
1370
1371 switch (e->kind)
1372 {
1373 case NAME:
1374 return PRE_EXPR_NAME (e);
1375 case CONSTANT:
1376 return PRE_EXPR_CONSTANT (e);
1377 case NARY:
1378 case REFERENCE:
1379 {
1380 /* Go through all of the expressions representing this value
1381 and pick out an SSA_NAME. */
1382 unsigned int i;
1383 bitmap_iterator bi;
1384 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1385 value_id);
1386 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1387 {
1388 pre_expr rep = expression_for_id (i);
1389 if (rep->kind == NAME)
1390 return PRE_EXPR_NAME (rep);
1391 }
1392 }
1393 break;
1394 }
1395 /* If we reached here we couldn't find an SSA_NAME. This can
1396 happen when we've discovered a value that has never appeared in
1397 the program as set to an SSA_NAME, most likely as the result of
1398 phi translation. */
1399 if (dump_file)
1400 {
1401 fprintf (dump_file,
1402 "Could not find SSA_NAME representative for expression:");
1403 print_pre_expr (dump_file, e);
1404 fprintf (dump_file, "\n");
1405 }
1406
1407 exprtype = get_expr_type (e);
1408
1409 /* Build and insert the assignment of the end result to the temporary
1410 that we will return. */
1411 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1412 {
1413 pretemp = create_tmp_reg (exprtype, "pretmp");
1414 get_var_ann (pretemp);
1415 }
1416
1417 name = make_ssa_name (pretemp, gimple_build_nop ());
1418 VN_INFO_GET (name)->value_id = value_id;
1419 if (e->kind == CONSTANT)
1420 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1421 else
1422 VN_INFO (name)->valnum = name;
1423
1424 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1425 if (dump_file)
1426 {
1427 fprintf (dump_file, "Created SSA_NAME representative ");
1428 print_generic_expr (dump_file, name, 0);
1429 fprintf (dump_file, " for expression:");
1430 print_pre_expr (dump_file, e);
1431 fprintf (dump_file, "\n");
1432 }
1433
1434 return name;
1435 }
1436
1437
1438
1439 static pre_expr
1440 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1441 basic_block pred, basic_block phiblock);
1442
1443 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1444 the phis in PRED. Return NULL if we can't find a leader for each part
1445 of the translated expression. */
1446
1447 static pre_expr
1448 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1449 basic_block pred, basic_block phiblock)
1450 {
1451 switch (expr->kind)
1452 {
1453 case NARY:
1454 {
1455 unsigned int i;
1456 bool changed = false;
1457 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1458 struct vn_nary_op_s newnary;
1459 /* The NARY structure is only guaranteed to have been
1460 allocated to the nary->length operands. */
1461 memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
1462 - sizeof (tree) * (4 - nary->length)));
1463
1464 for (i = 0; i < newnary.length; i++)
1465 {
1466 if (TREE_CODE (newnary.op[i]) != SSA_NAME)
1467 continue;
1468 else
1469 {
1470 pre_expr leader, result;
1471 unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
1472 leader = find_leader_in_sets (op_val_id, set1, set2);
1473 result = phi_translate (leader, set1, set2, pred, phiblock);
1474 if (result && result != leader)
1475 {
1476 tree name = get_representative_for (result);
1477 if (!name)
1478 return NULL;
1479 newnary.op[i] = name;
1480 }
1481 else if (!result)
1482 return NULL;
1483
1484 changed |= newnary.op[i] != nary->op[i];
1485 }
1486 }
1487 if (changed)
1488 {
1489 pre_expr constant;
1490 unsigned int new_val_id;
1491
1492 tree result = vn_nary_op_lookup_pieces (newnary.length,
1493 newnary.opcode,
1494 newnary.type,
1495 newnary.op[0],
1496 newnary.op[1],
1497 newnary.op[2],
1498 newnary.op[3],
1499 &nary);
1500 if (result && is_gimple_min_invariant (result))
1501 return get_or_alloc_expr_for_constant (result);
1502
1503 expr = (pre_expr) pool_alloc (pre_expr_pool);
1504 expr->kind = NARY;
1505 expr->id = 0;
1506 if (nary)
1507 {
1508 PRE_EXPR_NARY (expr) = nary;
1509 constant = fully_constant_expression (expr);
1510 if (constant != expr)
1511 return constant;
1512
1513 new_val_id = nary->value_id;
1514 get_or_alloc_expression_id (expr);
1515 }
1516 else
1517 {
1518 new_val_id = get_next_value_id ();
1519 VEC_safe_grow_cleared (bitmap_set_t, heap,
1520 value_expressions,
1521 get_max_value_id() + 1);
1522 nary = vn_nary_op_insert_pieces (newnary.length,
1523 newnary.opcode,
1524 newnary.type,
1525 newnary.op[0],
1526 newnary.op[1],
1527 newnary.op[2],
1528 newnary.op[3],
1529 result, new_val_id);
1530 PRE_EXPR_NARY (expr) = nary;
1531 constant = fully_constant_expression (expr);
1532 if (constant != expr)
1533 return constant;
1534 get_or_alloc_expression_id (expr);
1535 }
1536 add_to_value (new_val_id, expr);
1537 }
1538 return expr;
1539 }
1540 break;
1541
1542 case REFERENCE:
1543 {
1544 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1545 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1546 tree vuse = ref->vuse;
1547 tree newvuse = vuse;
1548 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1549 bool changed = false, same_valid = true;
1550 unsigned int i, j;
1551 vn_reference_op_t operand;
1552 vn_reference_t newref;
1553
1554 for (i = 0, j = 0;
1555 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1556 {
1557 pre_expr opresult;
1558 pre_expr leader;
1559 tree oldop0 = operand->op0;
1560 tree oldop1 = operand->op1;
1561 tree oldop2 = operand->op2;
1562 tree op0 = oldop0;
1563 tree op1 = oldop1;
1564 tree op2 = oldop2;
1565 tree type = operand->type;
1566 vn_reference_op_s newop = *operand;
1567
1568 if (op0 && TREE_CODE (op0) == SSA_NAME)
1569 {
1570 unsigned int op_val_id = VN_INFO (op0)->value_id;
1571 leader = find_leader_in_sets (op_val_id, set1, set2);
1572 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1573 if (opresult && opresult != leader)
1574 {
1575 tree name = get_representative_for (opresult);
1576 if (!name)
1577 break;
1578 op0 = name;
1579 }
1580 else if (!opresult)
1581 break;
1582 }
1583 changed |= op0 != oldop0;
1584
1585 if (op1 && TREE_CODE (op1) == SSA_NAME)
1586 {
1587 unsigned int op_val_id = VN_INFO (op1)->value_id;
1588 leader = find_leader_in_sets (op_val_id, set1, set2);
1589 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1590 if (opresult && opresult != leader)
1591 {
1592 tree name = get_representative_for (opresult);
1593 if (!name)
1594 break;
1595 op1 = name;
1596 }
1597 else if (!opresult)
1598 break;
1599 }
1600 /* We can't possibly insert these. */
1601 else if (op1 && !is_gimple_min_invariant (op1))
1602 break;
1603 changed |= op1 != oldop1;
1604 if (op2 && TREE_CODE (op2) == SSA_NAME)
1605 {
1606 unsigned int op_val_id = VN_INFO (op2)->value_id;
1607 leader = find_leader_in_sets (op_val_id, set1, set2);
1608 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1609 if (opresult && opresult != leader)
1610 {
1611 tree name = get_representative_for (opresult);
1612 if (!name)
1613 break;
1614 op2 = name;
1615 }
1616 else if (!opresult)
1617 break;
1618 }
1619 /* We can't possibly insert these. */
1620 else if (op2 && !is_gimple_min_invariant (op2))
1621 break;
1622 changed |= op2 != oldop2;
1623
1624 if (!newoperands)
1625 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1626 /* We may have changed from an SSA_NAME to a constant */
1627 if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
1628 newop.opcode = TREE_CODE (op0);
1629 newop.type = type;
1630 newop.op0 = op0;
1631 newop.op1 = op1;
1632 newop.op2 = op2;
1633 /* If it transforms a non-constant ARRAY_REF into a constant
1634 one, adjust the constant offset. */
1635 if (newop.opcode == ARRAY_REF
1636 && newop.off == -1
1637 && TREE_CODE (op0) == INTEGER_CST
1638 && TREE_CODE (op1) == INTEGER_CST
1639 && TREE_CODE (op2) == INTEGER_CST)
1640 {
1641 double_int off = tree_to_double_int (op0);
1642 off = double_int_add (off,
1643 double_int_neg
1644 (tree_to_double_int (op1)));
1645 off = double_int_mul (off, tree_to_double_int (op2));
1646 if (double_int_fits_in_shwi_p (off))
1647 newop.off = off.low;
1648 }
1649 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1650 /* If it transforms from an SSA_NAME to an address, fold with
1651 a preceding indirect reference. */
1652 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
1653 && VEC_index (vn_reference_op_s,
1654 newoperands, j - 1)->opcode == MEM_REF)
1655 vn_reference_fold_indirect (&newoperands, &j);
1656 }
1657 if (i != VEC_length (vn_reference_op_s, operands))
1658 {
1659 if (newoperands)
1660 VEC_free (vn_reference_op_s, heap, newoperands);
1661 return NULL;
1662 }
1663
1664 if (vuse)
1665 {
1666 newvuse = translate_vuse_through_block (newoperands,
1667 ref->set, ref->type,
1668 vuse, phiblock, pred,
1669 &same_valid);
1670 if (newvuse == NULL_TREE)
1671 {
1672 VEC_free (vn_reference_op_s, heap, newoperands);
1673 return NULL;
1674 }
1675 }
1676
1677 if (changed || newvuse != vuse)
1678 {
1679 unsigned int new_val_id;
1680 pre_expr constant;
1681 bool converted = false;
1682
1683 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1684 ref->type,
1685 newoperands,
1686 &newref, true);
1687 if (result)
1688 VEC_free (vn_reference_op_s, heap, newoperands);
1689
1690 if (result
1691 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1692 {
1693 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
1694 converted = true;
1695 }
1696
1697 if (result && is_gimple_min_invariant (result))
1698 {
1699 gcc_assert (!newoperands);
1700 return get_or_alloc_expr_for_constant (result);
1701 }
1702
1703 expr = (pre_expr) pool_alloc (pre_expr_pool);
1704 expr->kind = REFERENCE;
1705 expr->id = 0;
1706
1707 if (converted)
1708 {
1709 vn_nary_op_t nary;
1710 tree nresult;
1711
1712 gcc_assert (CONVERT_EXPR_P (result)
1713 || TREE_CODE (result) == VIEW_CONVERT_EXPR);
1714
1715 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
1716 TREE_TYPE (result),
1717 TREE_OPERAND (result, 0),
1718 NULL_TREE, NULL_TREE,
1719 NULL_TREE,
1720 &nary);
1721 if (nresult && is_gimple_min_invariant (nresult))
1722 return get_or_alloc_expr_for_constant (nresult);
1723
1724 expr->kind = NARY;
1725 if (nary)
1726 {
1727 PRE_EXPR_NARY (expr) = nary;
1728 constant = fully_constant_expression (expr);
1729 if (constant != expr)
1730 return constant;
1731
1732 new_val_id = nary->value_id;
1733 get_or_alloc_expression_id (expr);
1734 }
1735 else
1736 {
1737 new_val_id = get_next_value_id ();
1738 VEC_safe_grow_cleared (bitmap_set_t, heap,
1739 value_expressions,
1740 get_max_value_id() + 1);
1741 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
1742 TREE_TYPE (result),
1743 TREE_OPERAND (result, 0),
1744 NULL_TREE, NULL_TREE,
1745 NULL_TREE, NULL_TREE,
1746 new_val_id);
1747 PRE_EXPR_NARY (expr) = nary;
1748 constant = fully_constant_expression (expr);
1749 if (constant != expr)
1750 return constant;
1751 get_or_alloc_expression_id (expr);
1752 }
1753 }
1754 else if (newref)
1755 {
1756 PRE_EXPR_REFERENCE (expr) = newref;
1757 constant = fully_constant_expression (expr);
1758 if (constant != expr)
1759 return constant;
1760
1761 new_val_id = newref->value_id;
1762 get_or_alloc_expression_id (expr);
1763 }
1764 else
1765 {
1766 if (changed || !same_valid)
1767 {
1768 new_val_id = get_next_value_id ();
1769 VEC_safe_grow_cleared (bitmap_set_t, heap,
1770 value_expressions,
1771 get_max_value_id() + 1);
1772 }
1773 else
1774 new_val_id = ref->value_id;
1775 newref = vn_reference_insert_pieces (newvuse, ref->set,
1776 ref->type,
1777 newoperands,
1778 result, new_val_id);
1779 newoperands = NULL;
1780 PRE_EXPR_REFERENCE (expr) = newref;
1781 constant = fully_constant_expression (expr);
1782 if (constant != expr)
1783 return constant;
1784 get_or_alloc_expression_id (expr);
1785 }
1786 add_to_value (new_val_id, expr);
1787 }
1788 VEC_free (vn_reference_op_s, heap, newoperands);
1789 return expr;
1790 }
1791 break;
1792
1793 case NAME:
1794 {
1795 gimple phi = NULL;
1796 edge e;
1797 gimple def_stmt;
1798 tree name = PRE_EXPR_NAME (expr);
1799
1800 def_stmt = SSA_NAME_DEF_STMT (name);
1801 if (gimple_code (def_stmt) == GIMPLE_PHI
1802 && gimple_bb (def_stmt) == phiblock)
1803 phi = def_stmt;
1804 else
1805 return expr;
1806
1807 e = find_edge (pred, gimple_bb (phi));
1808 if (e)
1809 {
1810 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1811 pre_expr newexpr;
1812
1813 if (TREE_CODE (def) == SSA_NAME)
1814 def = VN_INFO (def)->valnum;
1815
1816 /* Handle constant. */
1817 if (is_gimple_min_invariant (def))
1818 return get_or_alloc_expr_for_constant (def);
1819
1820 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1821 return NULL;
1822
1823 newexpr = get_or_alloc_expr_for_name (def);
1824 return newexpr;
1825 }
1826 }
1827 return expr;
1828
1829 default:
1830 gcc_unreachable ();
1831 }
1832 }
1833
1834 /* Wrapper around phi_translate_1 providing caching functionality. */
1835
1836 static pre_expr
1837 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1838 basic_block pred, basic_block phiblock)
1839 {
1840 pre_expr phitrans;
1841
1842 if (!expr)
1843 return NULL;
1844
1845 /* Constants contain no values that need translation. */
1846 if (expr->kind == CONSTANT)
1847 return expr;
1848
1849 if (value_id_constant_p (get_expr_value_id (expr)))
1850 return expr;
1851
1852 if (expr->kind != NAME)
1853 {
1854 phitrans = phi_trans_lookup (expr, pred);
1855 if (phitrans)
1856 return phitrans;
1857 }
1858
1859 /* Translate. */
1860 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1861
1862 /* Don't add empty translations to the cache. Neither add
1863 translations of NAMEs as those are cheap to translate. */
1864 if (phitrans
1865 && expr->kind != NAME)
1866 phi_trans_add (expr, phitrans, pred);
1867
1868 return phitrans;
1869 }
1870
1871
1872 /* For each expression in SET, translate the values through phi nodes
1873 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1874 expressions in DEST. */
1875
1876 static void
1877 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1878 basic_block phiblock)
1879 {
1880 VEC (pre_expr, heap) *exprs;
1881 pre_expr expr;
1882 int i;
1883
1884 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1885 {
1886 bitmap_set_copy (dest, set);
1887 return;
1888 }
1889
1890 exprs = sorted_array_from_bitmap_set (set);
1891 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1892 {
1893 pre_expr translated;
1894 translated = phi_translate (expr, set, NULL, pred, phiblock);
1895 if (!translated)
1896 continue;
1897
1898 /* We might end up with multiple expressions from SET being
1899 translated to the same value. In this case we do not want
1900 to retain the NARY or REFERENCE expression but prefer a NAME
1901 which would be the leader. */
1902 if (translated->kind == NAME)
1903 bitmap_value_replace_in_set (dest, translated);
1904 else
1905 bitmap_value_insert_into_set (dest, translated);
1906 }
1907 VEC_free (pre_expr, heap, exprs);
1908 }
1909
1910 /* Find the leader for a value (i.e., the name representing that
1911 value) in a given set, and return it. If STMT is non-NULL it
1912 makes sure the defining statement for the leader dominates it.
1913 Return NULL if no leader is found. */
1914
1915 static pre_expr
1916 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1917 {
1918 if (value_id_constant_p (val))
1919 {
1920 unsigned int i;
1921 bitmap_iterator bi;
1922 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1923
1924 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1925 {
1926 pre_expr expr = expression_for_id (i);
1927 if (expr->kind == CONSTANT)
1928 return expr;
1929 }
1930 }
1931 if (bitmap_set_contains_value (set, val))
1932 {
1933 /* Rather than walk the entire bitmap of expressions, and see
1934 whether any of them has the value we are looking for, we look
1935 at the reverse mapping, which tells us the set of expressions
1936 that have a given value (IE value->expressions with that
1937 value) and see if any of those expressions are in our set.
1938 The number of expressions per value is usually significantly
1939 less than the number of expressions in the set. In fact, for
1940 large testcases, doing it this way is roughly 5-10x faster
1941 than walking the bitmap.
1942 If this is somehow a significant lose for some cases, we can
1943 choose which set to walk based on which set is smaller. */
1944 unsigned int i;
1945 bitmap_iterator bi;
1946 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1947
1948 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1949 &set->expressions, 0, i, bi)
1950 {
1951 pre_expr val = expression_for_id (i);
1952 /* At the point where stmt is not null, there should always
1953 be an SSA_NAME first in the list of expressions. */
1954 if (stmt)
1955 {
1956 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1957 if (gimple_code (def_stmt) != GIMPLE_PHI
1958 && gimple_bb (def_stmt) == gimple_bb (stmt)
1959 /* PRE insertions are at the end of the basic-block
1960 and have UID 0. */
1961 && (gimple_uid (def_stmt) == 0
1962 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1963 continue;
1964 }
1965 return val;
1966 }
1967 }
1968 return NULL;
1969 }
1970
1971 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1972 BLOCK by seeing if it is not killed in the block. Note that we are
1973 only determining whether there is a store that kills it. Because
1974 of the order in which clean iterates over values, we are guaranteed
1975 that altered operands will have caused us to be eliminated from the
1976 ANTIC_IN set already. */
1977
1978 static bool
1979 value_dies_in_block_x (pre_expr expr, basic_block block)
1980 {
1981 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1982 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1983 gimple def;
1984 gimple_stmt_iterator gsi;
1985 unsigned id = get_expression_id (expr);
1986 bool res = false;
1987 ao_ref ref;
1988
1989 if (!vuse)
1990 return false;
1991
1992 /* Lookup a previously calculated result. */
1993 if (EXPR_DIES (block)
1994 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1995 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1996
1997 /* A memory expression {e, VUSE} dies in the block if there is a
1998 statement that may clobber e. If, starting statement walk from the
1999 top of the basic block, a statement uses VUSE there can be no kill
2000 inbetween that use and the original statement that loaded {e, VUSE},
2001 so we can stop walking. */
2002 ref.base = NULL_TREE;
2003 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
2004 {
2005 tree def_vuse, def_vdef;
2006 def = gsi_stmt (gsi);
2007 def_vuse = gimple_vuse (def);
2008 def_vdef = gimple_vdef (def);
2009
2010 /* Not a memory statement. */
2011 if (!def_vuse)
2012 continue;
2013
2014 /* Not a may-def. */
2015 if (!def_vdef)
2016 {
2017 /* A load with the same VUSE, we're done. */
2018 if (def_vuse == vuse)
2019 break;
2020
2021 continue;
2022 }
2023
2024 /* Init ref only if we really need it. */
2025 if (ref.base == NULL_TREE
2026 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
2027 refx->operands))
2028 {
2029 res = true;
2030 break;
2031 }
2032 /* If the statement may clobber expr, it dies. */
2033 if (stmt_may_clobber_ref_p_1 (def, &ref))
2034 {
2035 res = true;
2036 break;
2037 }
2038 }
2039
2040 /* Remember the result. */
2041 if (!EXPR_DIES (block))
2042 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2043 bitmap_set_bit (EXPR_DIES (block), id * 2);
2044 if (res)
2045 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2046
2047 return res;
2048 }
2049
2050
2051 #define union_contains_value(SET1, SET2, VAL) \
2052 (bitmap_set_contains_value ((SET1), (VAL)) \
2053 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2054
2055 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2056 */
2057 static bool
2058 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2059 vn_reference_op_t vro)
2060 {
2061 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2062 {
2063 struct pre_expr_d temp;
2064 temp.kind = NAME;
2065 temp.id = 0;
2066 PRE_EXPR_NAME (&temp) = vro->op0;
2067 temp.id = lookup_expression_id (&temp);
2068 if (temp.id == 0)
2069 return false;
2070 if (!union_contains_value (set1, set2,
2071 get_expr_value_id (&temp)))
2072 return false;
2073 }
2074 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2075 {
2076 struct pre_expr_d temp;
2077 temp.kind = NAME;
2078 temp.id = 0;
2079 PRE_EXPR_NAME (&temp) = vro->op1;
2080 temp.id = lookup_expression_id (&temp);
2081 if (temp.id == 0)
2082 return false;
2083 if (!union_contains_value (set1, set2,
2084 get_expr_value_id (&temp)))
2085 return false;
2086 }
2087
2088 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2089 {
2090 struct pre_expr_d temp;
2091 temp.kind = NAME;
2092 temp.id = 0;
2093 PRE_EXPR_NAME (&temp) = vro->op2;
2094 temp.id = lookup_expression_id (&temp);
2095 if (temp.id == 0)
2096 return false;
2097 if (!union_contains_value (set1, set2,
2098 get_expr_value_id (&temp)))
2099 return false;
2100 }
2101
2102 return true;
2103 }
2104
2105 /* Determine if the expression EXPR is valid in SET1 U SET2.
2106 ONLY SET2 CAN BE NULL.
2107 This means that we have a leader for each part of the expression
2108 (if it consists of values), or the expression is an SSA_NAME.
2109 For loads/calls, we also see if the vuse is killed in this block. */
2110
2111 static bool
2112 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2113 basic_block block)
2114 {
2115 switch (expr->kind)
2116 {
2117 case NAME:
2118 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2119 case NARY:
2120 {
2121 unsigned int i;
2122 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2123 for (i = 0; i < nary->length; i++)
2124 {
2125 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2126 {
2127 struct pre_expr_d temp;
2128 temp.kind = NAME;
2129 temp.id = 0;
2130 PRE_EXPR_NAME (&temp) = nary->op[i];
2131 temp.id = lookup_expression_id (&temp);
2132 if (temp.id == 0)
2133 return false;
2134 if (!union_contains_value (set1, set2,
2135 get_expr_value_id (&temp)))
2136 return false;
2137 }
2138 }
2139 /* If the NARY may trap make sure the block does not contain
2140 a possible exit point.
2141 ??? This is overly conservative if we translate AVAIL_OUT
2142 as the available expression might be after the exit point. */
2143 if (BB_MAY_NOTRETURN (block)
2144 && vn_nary_may_trap (nary))
2145 return false;
2146 return true;
2147 }
2148 break;
2149 case REFERENCE:
2150 {
2151 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2152 vn_reference_op_t vro;
2153 unsigned int i;
2154
2155 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2156 {
2157 if (!vro_valid_in_sets (set1, set2, vro))
2158 return false;
2159 }
2160 if (ref->vuse)
2161 {
2162 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2163 if (!gimple_nop_p (def_stmt)
2164 && gimple_bb (def_stmt) != block
2165 && !dominated_by_p (CDI_DOMINATORS,
2166 block, gimple_bb (def_stmt)))
2167 return false;
2168 }
2169 return !value_dies_in_block_x (expr, block);
2170 }
2171 default:
2172 gcc_unreachable ();
2173 }
2174 }
2175
2176 /* Clean the set of expressions that are no longer valid in SET1 or
2177 SET2. This means expressions that are made up of values we have no
2178 leaders for in SET1 or SET2. This version is used for partial
2179 anticipation, which means it is not valid in either ANTIC_IN or
2180 PA_IN. */
2181
2182 static void
2183 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2184 {
2185 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2186 pre_expr expr;
2187 int i;
2188
2189 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2190 {
2191 if (!valid_in_sets (set1, set2, expr, block))
2192 bitmap_remove_from_set (set1, expr);
2193 }
2194 VEC_free (pre_expr, heap, exprs);
2195 }
2196
2197 /* Clean the set of expressions that are no longer valid in SET. This
2198 means expressions that are made up of values we have no leaders for
2199 in SET. */
2200
2201 static void
2202 clean (bitmap_set_t set, basic_block block)
2203 {
2204 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2205 pre_expr expr;
2206 int i;
2207
2208 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2209 {
2210 if (!valid_in_sets (set, NULL, expr, block))
2211 bitmap_remove_from_set (set, expr);
2212 }
2213 VEC_free (pre_expr, heap, exprs);
2214 }
2215
2216 static sbitmap has_abnormal_preds;
2217
2218 /* List of blocks that may have changed during ANTIC computation and
2219 thus need to be iterated over. */
2220
2221 static sbitmap changed_blocks;
2222
2223 /* Decide whether to defer a block for a later iteration, or PHI
2224 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2225 should defer the block, and true if we processed it. */
2226
2227 static bool
2228 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2229 basic_block block, basic_block phiblock)
2230 {
2231 if (!BB_VISITED (phiblock))
2232 {
2233 SET_BIT (changed_blocks, block->index);
2234 BB_VISITED (block) = 0;
2235 BB_DEFERRED (block) = 1;
2236 return false;
2237 }
2238 else
2239 phi_translate_set (dest, source, block, phiblock);
2240 return true;
2241 }
2242
2243 /* Compute the ANTIC set for BLOCK.
2244
2245 If succs(BLOCK) > 1 then
2246 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2247 else if succs(BLOCK) == 1 then
2248 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2249
2250 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2251 */
2252
2253 static bool
2254 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2255 {
2256 bool changed = false;
2257 bitmap_set_t S, old, ANTIC_OUT;
2258 bitmap_iterator bi;
2259 unsigned int bii;
2260 edge e;
2261 edge_iterator ei;
2262
2263 old = ANTIC_OUT = S = NULL;
2264 BB_VISITED (block) = 1;
2265
2266 /* If any edges from predecessors are abnormal, antic_in is empty,
2267 so do nothing. */
2268 if (block_has_abnormal_pred_edge)
2269 goto maybe_dump_sets;
2270
2271 old = ANTIC_IN (block);
2272 ANTIC_OUT = bitmap_set_new ();
2273
2274 /* If the block has no successors, ANTIC_OUT is empty. */
2275 if (EDGE_COUNT (block->succs) == 0)
2276 ;
2277 /* If we have one successor, we could have some phi nodes to
2278 translate through. */
2279 else if (single_succ_p (block))
2280 {
2281 basic_block succ_bb = single_succ (block);
2282
2283 /* We trade iterations of the dataflow equations for having to
2284 phi translate the maximal set, which is incredibly slow
2285 (since the maximal set often has 300+ members, even when you
2286 have a small number of blocks).
2287 Basically, we defer the computation of ANTIC for this block
2288 until we have processed it's successor, which will inevitably
2289 have a *much* smaller set of values to phi translate once
2290 clean has been run on it.
2291 The cost of doing this is that we technically perform more
2292 iterations, however, they are lower cost iterations.
2293
2294 Timings for PRE on tramp3d-v4:
2295 without maximal set fix: 11 seconds
2296 with maximal set fix/without deferring: 26 seconds
2297 with maximal set fix/with deferring: 11 seconds
2298 */
2299
2300 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2301 block, succ_bb))
2302 {
2303 changed = true;
2304 goto maybe_dump_sets;
2305 }
2306 }
2307 /* If we have multiple successors, we take the intersection of all of
2308 them. Note that in the case of loop exit phi nodes, we may have
2309 phis to translate through. */
2310 else
2311 {
2312 VEC(basic_block, heap) * worklist;
2313 size_t i;
2314 basic_block bprime, first = NULL;
2315
2316 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2317 FOR_EACH_EDGE (e, ei, block->succs)
2318 {
2319 if (!first
2320 && BB_VISITED (e->dest))
2321 first = e->dest;
2322 else if (BB_VISITED (e->dest))
2323 VEC_quick_push (basic_block, worklist, e->dest);
2324 }
2325
2326 /* Of multiple successors we have to have visited one already. */
2327 if (!first)
2328 {
2329 SET_BIT (changed_blocks, block->index);
2330 BB_VISITED (block) = 0;
2331 BB_DEFERRED (block) = 1;
2332 changed = true;
2333 VEC_free (basic_block, heap, worklist);
2334 goto maybe_dump_sets;
2335 }
2336
2337 if (!gimple_seq_empty_p (phi_nodes (first)))
2338 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2339 else
2340 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2341
2342 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2343 {
2344 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2345 {
2346 bitmap_set_t tmp = bitmap_set_new ();
2347 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2348 bitmap_set_and (ANTIC_OUT, tmp);
2349 bitmap_set_free (tmp);
2350 }
2351 else
2352 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2353 }
2354 VEC_free (basic_block, heap, worklist);
2355 }
2356
2357 /* Generate ANTIC_OUT - TMP_GEN. */
2358 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2359
2360 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2361 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2362 TMP_GEN (block));
2363
2364 /* Then union in the ANTIC_OUT - TMP_GEN values,
2365 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2366 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2367 bitmap_value_insert_into_set (ANTIC_IN (block),
2368 expression_for_id (bii));
2369
2370 clean (ANTIC_IN (block), block);
2371
2372 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2373 {
2374 changed = true;
2375 SET_BIT (changed_blocks, block->index);
2376 FOR_EACH_EDGE (e, ei, block->preds)
2377 SET_BIT (changed_blocks, e->src->index);
2378 }
2379 else
2380 RESET_BIT (changed_blocks, block->index);
2381
2382 maybe_dump_sets:
2383 if (dump_file && (dump_flags & TDF_DETAILS))
2384 {
2385 if (!BB_DEFERRED (block) || BB_VISITED (block))
2386 {
2387 if (ANTIC_OUT)
2388 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2389
2390 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2391 block->index);
2392
2393 if (S)
2394 print_bitmap_set (dump_file, S, "S", block->index);
2395 }
2396 else
2397 {
2398 fprintf (dump_file,
2399 "Block %d was deferred for a future iteration.\n",
2400 block->index);
2401 }
2402 }
2403 if (old)
2404 bitmap_set_free (old);
2405 if (S)
2406 bitmap_set_free (S);
2407 if (ANTIC_OUT)
2408 bitmap_set_free (ANTIC_OUT);
2409 return changed;
2410 }
2411
2412 /* Compute PARTIAL_ANTIC for BLOCK.
2413
2414 If succs(BLOCK) > 1 then
2415 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2416 in ANTIC_OUT for all succ(BLOCK)
2417 else if succs(BLOCK) == 1 then
2418 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2419
2420 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2421 - ANTIC_IN[BLOCK])
2422
2423 */
2424 static bool
2425 compute_partial_antic_aux (basic_block block,
2426 bool block_has_abnormal_pred_edge)
2427 {
2428 bool changed = false;
2429 bitmap_set_t old_PA_IN;
2430 bitmap_set_t PA_OUT;
2431 edge e;
2432 edge_iterator ei;
2433 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2434
2435 old_PA_IN = PA_OUT = NULL;
2436
2437 /* If any edges from predecessors are abnormal, antic_in is empty,
2438 so do nothing. */
2439 if (block_has_abnormal_pred_edge)
2440 goto maybe_dump_sets;
2441
2442 /* If there are too many partially anticipatable values in the
2443 block, phi_translate_set can take an exponential time: stop
2444 before the translation starts. */
2445 if (max_pa
2446 && single_succ_p (block)
2447 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2448 goto maybe_dump_sets;
2449
2450 old_PA_IN = PA_IN (block);
2451 PA_OUT = bitmap_set_new ();
2452
2453 /* If the block has no successors, ANTIC_OUT is empty. */
2454 if (EDGE_COUNT (block->succs) == 0)
2455 ;
2456 /* If we have one successor, we could have some phi nodes to
2457 translate through. Note that we can't phi translate across DFS
2458 back edges in partial antic, because it uses a union operation on
2459 the successors. For recurrences like IV's, we will end up
2460 generating a new value in the set on each go around (i + 3 (VH.1)
2461 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2462 else if (single_succ_p (block))
2463 {
2464 basic_block succ = single_succ (block);
2465 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2466 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2467 }
2468 /* If we have multiple successors, we take the union of all of
2469 them. */
2470 else
2471 {
2472 VEC(basic_block, heap) * worklist;
2473 size_t i;
2474 basic_block bprime;
2475
2476 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2477 FOR_EACH_EDGE (e, ei, block->succs)
2478 {
2479 if (e->flags & EDGE_DFS_BACK)
2480 continue;
2481 VEC_quick_push (basic_block, worklist, e->dest);
2482 }
2483 if (VEC_length (basic_block, worklist) > 0)
2484 {
2485 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2486 {
2487 unsigned int i;
2488 bitmap_iterator bi;
2489
2490 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2491 bitmap_value_insert_into_set (PA_OUT,
2492 expression_for_id (i));
2493 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2494 {
2495 bitmap_set_t pa_in = bitmap_set_new ();
2496 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2497 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2498 bitmap_value_insert_into_set (PA_OUT,
2499 expression_for_id (i));
2500 bitmap_set_free (pa_in);
2501 }
2502 else
2503 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2504 bitmap_value_insert_into_set (PA_OUT,
2505 expression_for_id (i));
2506 }
2507 }
2508 VEC_free (basic_block, heap, worklist);
2509 }
2510
2511 /* PA_IN starts with PA_OUT - TMP_GEN.
2512 Then we subtract things from ANTIC_IN. */
2513 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2514
2515 /* For partial antic, we want to put back in the phi results, since
2516 we will properly avoid making them partially antic over backedges. */
2517 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2518 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2519
2520 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2521 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2522
2523 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2524
2525 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2526 {
2527 changed = true;
2528 SET_BIT (changed_blocks, block->index);
2529 FOR_EACH_EDGE (e, ei, block->preds)
2530 SET_BIT (changed_blocks, e->src->index);
2531 }
2532 else
2533 RESET_BIT (changed_blocks, block->index);
2534
2535 maybe_dump_sets:
2536 if (dump_file && (dump_flags & TDF_DETAILS))
2537 {
2538 if (PA_OUT)
2539 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2540
2541 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2542 }
2543 if (old_PA_IN)
2544 bitmap_set_free (old_PA_IN);
2545 if (PA_OUT)
2546 bitmap_set_free (PA_OUT);
2547 return changed;
2548 }
2549
2550 /* Compute ANTIC and partial ANTIC sets. */
2551
2552 static void
2553 compute_antic (void)
2554 {
2555 bool changed = true;
2556 int num_iterations = 0;
2557 basic_block block;
2558 int i;
2559
2560 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2561 We pre-build the map of blocks with incoming abnormal edges here. */
2562 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2563 sbitmap_zero (has_abnormal_preds);
2564
2565 FOR_EACH_BB (block)
2566 {
2567 edge_iterator ei;
2568 edge e;
2569
2570 FOR_EACH_EDGE (e, ei, block->preds)
2571 {
2572 e->flags &= ~EDGE_DFS_BACK;
2573 if (e->flags & EDGE_ABNORMAL)
2574 {
2575 SET_BIT (has_abnormal_preds, block->index);
2576 break;
2577 }
2578 }
2579
2580 BB_VISITED (block) = 0;
2581 BB_DEFERRED (block) = 0;
2582
2583 /* While we are here, give empty ANTIC_IN sets to each block. */
2584 ANTIC_IN (block) = bitmap_set_new ();
2585 PA_IN (block) = bitmap_set_new ();
2586 }
2587
2588 /* At the exit block we anticipate nothing. */
2589 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2590 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2591 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2592
2593 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2594 sbitmap_ones (changed_blocks);
2595 while (changed)
2596 {
2597 if (dump_file && (dump_flags & TDF_DETAILS))
2598 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2599 num_iterations++;
2600 changed = false;
2601 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2602 {
2603 if (TEST_BIT (changed_blocks, postorder[i]))
2604 {
2605 basic_block block = BASIC_BLOCK (postorder[i]);
2606 changed |= compute_antic_aux (block,
2607 TEST_BIT (has_abnormal_preds,
2608 block->index));
2609 }
2610 }
2611 #ifdef ENABLE_CHECKING
2612 /* Theoretically possible, but *highly* unlikely. */
2613 gcc_assert (num_iterations < 500);
2614 #endif
2615 }
2616
2617 statistics_histogram_event (cfun, "compute_antic iterations",
2618 num_iterations);
2619
2620 if (do_partial_partial)
2621 {
2622 sbitmap_ones (changed_blocks);
2623 mark_dfs_back_edges ();
2624 num_iterations = 0;
2625 changed = true;
2626 while (changed)
2627 {
2628 if (dump_file && (dump_flags & TDF_DETAILS))
2629 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2630 num_iterations++;
2631 changed = false;
2632 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2633 {
2634 if (TEST_BIT (changed_blocks, postorder[i]))
2635 {
2636 basic_block block = BASIC_BLOCK (postorder[i]);
2637 changed
2638 |= compute_partial_antic_aux (block,
2639 TEST_BIT (has_abnormal_preds,
2640 block->index));
2641 }
2642 }
2643 #ifdef ENABLE_CHECKING
2644 /* Theoretically possible, but *highly* unlikely. */
2645 gcc_assert (num_iterations < 500);
2646 #endif
2647 }
2648 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2649 num_iterations);
2650 }
2651 sbitmap_free (has_abnormal_preds);
2652 sbitmap_free (changed_blocks);
2653 }
2654
2655 /* Return true if we can value number the call in STMT. This is true
2656 if we have a pure or constant call. */
2657
2658 static bool
2659 can_value_number_call (gimple stmt)
2660 {
2661 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2662 return true;
2663 return false;
2664 }
2665
2666 /* Return true if OP is a tree which we can perform PRE on.
2667 This may not match the operations we can value number, but in
2668 a perfect world would. */
2669
2670 static bool
2671 can_PRE_operation (tree op)
2672 {
2673 return UNARY_CLASS_P (op)
2674 || BINARY_CLASS_P (op)
2675 || COMPARISON_CLASS_P (op)
2676 || TREE_CODE (op) == MEM_REF
2677 || TREE_CODE (op) == COMPONENT_REF
2678 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2679 || TREE_CODE (op) == CALL_EXPR
2680 || TREE_CODE (op) == ARRAY_REF;
2681 }
2682
2683
2684 /* Inserted expressions are placed onto this worklist, which is used
2685 for performing quick dead code elimination of insertions we made
2686 that didn't turn out to be necessary. */
2687 static bitmap inserted_exprs;
2688
2689 /* Pool allocated fake store expressions are placed onto this
2690 worklist, which, after performing dead code elimination, is walked
2691 to see which expressions need to be put into GC'able memory */
2692 static VEC(gimple, heap) *need_creation;
2693
2694 /* The actual worker for create_component_ref_by_pieces. */
2695
2696 static tree
2697 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2698 unsigned int *operand, gimple_seq *stmts,
2699 gimple domstmt)
2700 {
2701 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2702 *operand);
2703 tree genop;
2704 ++*operand;
2705 switch (currop->opcode)
2706 {
2707 case CALL_EXPR:
2708 {
2709 tree folded, sc = NULL_TREE;
2710 unsigned int nargs = 0;
2711 tree fn, *args;
2712 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2713 fn = currop->op0;
2714 else
2715 {
2716 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2717 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2718 if (!fn)
2719 return NULL_TREE;
2720 }
2721 if (currop->op1)
2722 {
2723 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2724 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2725 if (!sc)
2726 return NULL_TREE;
2727 }
2728 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2729 ref->operands) - 1);
2730 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2731 {
2732 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2733 operand, stmts,
2734 domstmt);
2735 if (!args[nargs])
2736 {
2737 free (args);
2738 return NULL_TREE;
2739 }
2740 nargs++;
2741 }
2742 folded = build_call_array (currop->type,
2743 (TREE_CODE (fn) == FUNCTION_DECL
2744 ? build_fold_addr_expr (fn) : fn),
2745 nargs, args);
2746 free (args);
2747 if (sc)
2748 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2749 return folded;
2750 }
2751 break;
2752 case MEM_REF:
2753 {
2754 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2755 stmts, domstmt);
2756 tree offset = currop->op0;
2757 if (!baseop)
2758 return NULL_TREE;
2759 if (TREE_CODE (baseop) == ADDR_EXPR
2760 && handled_component_p (TREE_OPERAND (baseop, 0)))
2761 {
2762 HOST_WIDE_INT off;
2763 tree base;
2764 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2765 &off);
2766 gcc_assert (base);
2767 offset = int_const_binop (PLUS_EXPR, offset,
2768 build_int_cst (TREE_TYPE (offset),
2769 off), 0);
2770 baseop = build_fold_addr_expr (base);
2771 }
2772 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2773 }
2774 break;
2775 case TARGET_MEM_REF:
2776 {
2777 pre_expr op0expr, op1expr;
2778 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2779 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2780 ++*operand);
2781 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2782 stmts, domstmt);
2783 if (!baseop)
2784 return NULL_TREE;
2785 if (currop->op0)
2786 {
2787 op0expr = get_or_alloc_expr_for (currop->op0);
2788 genop0 = find_or_generate_expression (block, op0expr,
2789 stmts, domstmt);
2790 if (!genop0)
2791 return NULL_TREE;
2792 }
2793 if (nextop->op0)
2794 {
2795 op1expr = get_or_alloc_expr_for (nextop->op0);
2796 genop1 = find_or_generate_expression (block, op1expr,
2797 stmts, domstmt);
2798 if (!genop1)
2799 return NULL_TREE;
2800 }
2801 return build5 (TARGET_MEM_REF, currop->type,
2802 baseop, currop->op2, genop0, currop->op1, genop1);
2803 }
2804 break;
2805 case ADDR_EXPR:
2806 if (currop->op0)
2807 {
2808 gcc_assert (is_gimple_min_invariant (currop->op0));
2809 return currop->op0;
2810 }
2811 /* Fallthrough. */
2812 case REALPART_EXPR:
2813 case IMAGPART_EXPR:
2814 case VIEW_CONVERT_EXPR:
2815 {
2816 tree folded;
2817 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2818 operand,
2819 stmts, domstmt);
2820 if (!genop0)
2821 return NULL_TREE;
2822 folded = fold_build1 (currop->opcode, currop->type,
2823 genop0);
2824 return folded;
2825 }
2826 break;
2827 case BIT_FIELD_REF:
2828 {
2829 tree folded;
2830 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2831 stmts, domstmt);
2832 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2833 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2834 tree genop1;
2835 tree genop2;
2836
2837 if (!genop0)
2838 return NULL_TREE;
2839 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2840 if (!genop1)
2841 return NULL_TREE;
2842 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2843 if (!genop2)
2844 return NULL_TREE;
2845 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2846 genop2);
2847 return folded;
2848 }
2849
2850 /* For array ref vn_reference_op's, operand 1 of the array ref
2851 is op0 of the reference op and operand 3 of the array ref is
2852 op1. */
2853 case ARRAY_RANGE_REF:
2854 case ARRAY_REF:
2855 {
2856 tree genop0;
2857 tree genop1 = currop->op0;
2858 pre_expr op1expr;
2859 tree genop2 = currop->op1;
2860 pre_expr op2expr;
2861 tree genop3 = currop->op2;
2862 pre_expr op3expr;
2863 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2864 stmts, domstmt);
2865 if (!genop0)
2866 return NULL_TREE;
2867 op1expr = get_or_alloc_expr_for (genop1);
2868 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2869 if (!genop1)
2870 return NULL_TREE;
2871 if (genop2)
2872 {
2873 /* Drop zero minimum index. */
2874 if (tree_int_cst_equal (genop2, integer_zero_node))
2875 genop2 = NULL_TREE;
2876 else
2877 {
2878 op2expr = get_or_alloc_expr_for (genop2);
2879 genop2 = find_or_generate_expression (block, op2expr, stmts,
2880 domstmt);
2881 if (!genop2)
2882 return NULL_TREE;
2883 }
2884 }
2885 if (genop3)
2886 {
2887 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2888 /* We can't always put a size in units of the element alignment
2889 here as the element alignment may be not visible. See
2890 PR43783. Simply drop the element size for constant
2891 sizes. */
2892 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2893 genop3 = NULL_TREE;
2894 else
2895 {
2896 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2897 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2898 op3expr = get_or_alloc_expr_for (genop3);
2899 genop3 = find_or_generate_expression (block, op3expr, stmts,
2900 domstmt);
2901 if (!genop3)
2902 return NULL_TREE;
2903 }
2904 }
2905 return build4 (currop->opcode, currop->type, genop0, genop1,
2906 genop2, genop3);
2907 }
2908 case COMPONENT_REF:
2909 {
2910 tree op0;
2911 tree op1;
2912 tree genop2 = currop->op1;
2913 pre_expr op2expr;
2914 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2915 stmts, domstmt);
2916 if (!op0)
2917 return NULL_TREE;
2918 /* op1 should be a FIELD_DECL, which are represented by
2919 themselves. */
2920 op1 = currop->op0;
2921 if (genop2)
2922 {
2923 op2expr = get_or_alloc_expr_for (genop2);
2924 genop2 = find_or_generate_expression (block, op2expr, stmts,
2925 domstmt);
2926 if (!genop2)
2927 return NULL_TREE;
2928 }
2929
2930 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2931 genop2);
2932 }
2933 break;
2934 case SSA_NAME:
2935 {
2936 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2937 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2938 return genop;
2939 }
2940 case STRING_CST:
2941 case INTEGER_CST:
2942 case COMPLEX_CST:
2943 case VECTOR_CST:
2944 case REAL_CST:
2945 case CONSTRUCTOR:
2946 case VAR_DECL:
2947 case PARM_DECL:
2948 case CONST_DECL:
2949 case RESULT_DECL:
2950 case FUNCTION_DECL:
2951 return currop->op0;
2952
2953 default:
2954 gcc_unreachable ();
2955 }
2956 }
2957
2958 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2959 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2960 trying to rename aggregates into ssa form directly, which is a no no.
2961
2962 Thus, this routine doesn't create temporaries, it just builds a
2963 single access expression for the array, calling
2964 find_or_generate_expression to build the innermost pieces.
2965
2966 This function is a subroutine of create_expression_by_pieces, and
2967 should not be called on it's own unless you really know what you
2968 are doing. */
2969
2970 static tree
2971 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2972 gimple_seq *stmts, gimple domstmt)
2973 {
2974 unsigned int op = 0;
2975 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2976 }
2977
2978 /* Find a leader for an expression, or generate one using
2979 create_expression_by_pieces if it's ANTIC but
2980 complex.
2981 BLOCK is the basic_block we are looking for leaders in.
2982 EXPR is the expression to find a leader or generate for.
2983 STMTS is the statement list to put the inserted expressions on.
2984 Returns the SSA_NAME of the LHS of the generated expression or the
2985 leader.
2986 DOMSTMT if non-NULL is a statement that should be dominated by
2987 all uses in the generated expression. If DOMSTMT is non-NULL this
2988 routine can fail and return NULL_TREE. Otherwise it will assert
2989 on failure. */
2990
2991 static tree
2992 find_or_generate_expression (basic_block block, pre_expr expr,
2993 gimple_seq *stmts, gimple domstmt)
2994 {
2995 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2996 get_expr_value_id (expr), domstmt);
2997 tree genop = NULL;
2998 if (leader)
2999 {
3000 if (leader->kind == NAME)
3001 genop = PRE_EXPR_NAME (leader);
3002 else if (leader->kind == CONSTANT)
3003 genop = PRE_EXPR_CONSTANT (leader);
3004 }
3005
3006 /* If it's still NULL, it must be a complex expression, so generate
3007 it recursively. Not so if inserting expressions for values generated
3008 by SCCVN. */
3009 if (genop == NULL
3010 && !domstmt)
3011 {
3012 bitmap_set_t exprset;
3013 unsigned int lookfor = get_expr_value_id (expr);
3014 bool handled = false;
3015 bitmap_iterator bi;
3016 unsigned int i;
3017
3018 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
3019 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
3020 {
3021 pre_expr temp = expression_for_id (i);
3022 if (temp->kind != NAME)
3023 {
3024 handled = true;
3025 genop = create_expression_by_pieces (block, temp, stmts,
3026 domstmt,
3027 get_expr_type (expr));
3028 break;
3029 }
3030 }
3031 if (!handled && domstmt)
3032 return NULL_TREE;
3033
3034 gcc_assert (handled);
3035 }
3036 return genop;
3037 }
3038
3039 #define NECESSARY GF_PLF_1
3040
3041 /* Create an expression in pieces, so that we can handle very complex
3042 expressions that may be ANTIC, but not necessary GIMPLE.
3043 BLOCK is the basic block the expression will be inserted into,
3044 EXPR is the expression to insert (in value form)
3045 STMTS is a statement list to append the necessary insertions into.
3046
3047 This function will die if we hit some value that shouldn't be
3048 ANTIC but is (IE there is no leader for it, or its components).
3049 This function may also generate expressions that are themselves
3050 partially or fully redundant. Those that are will be either made
3051 fully redundant during the next iteration of insert (for partially
3052 redundant ones), or eliminated by eliminate (for fully redundant
3053 ones).
3054
3055 If DOMSTMT is non-NULL then we make sure that all uses in the
3056 expressions dominate that statement. In this case the function
3057 can return NULL_TREE to signal failure. */
3058
3059 static tree
3060 create_expression_by_pieces (basic_block block, pre_expr expr,
3061 gimple_seq *stmts, gimple domstmt, tree type)
3062 {
3063 tree temp, name;
3064 tree folded;
3065 gimple_seq forced_stmts = NULL;
3066 unsigned int value_id;
3067 gimple_stmt_iterator gsi;
3068 tree exprtype = type ? type : get_expr_type (expr);
3069 pre_expr nameexpr;
3070 gimple newstmt;
3071
3072 switch (expr->kind)
3073 {
3074 /* We may hit the NAME/CONSTANT case if we have to convert types
3075 that value numbering saw through. */
3076 case NAME:
3077 folded = PRE_EXPR_NAME (expr);
3078 break;
3079 case CONSTANT:
3080 folded = PRE_EXPR_CONSTANT (expr);
3081 break;
3082 case REFERENCE:
3083 {
3084 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3085 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3086 }
3087 break;
3088 case NARY:
3089 {
3090 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3091 switch (nary->length)
3092 {
3093 case 2:
3094 {
3095 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3096 pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
3097 tree genop1 = find_or_generate_expression (block, op1,
3098 stmts, domstmt);
3099 tree genop2 = find_or_generate_expression (block, op2,
3100 stmts, domstmt);
3101 if (!genop1 || !genop2)
3102 return NULL_TREE;
3103 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
3104 may be a constant with the wrong type. */
3105 if (nary->opcode == POINTER_PLUS_EXPR)
3106 {
3107 genop1 = fold_convert (nary->type, genop1);
3108 genop2 = fold_convert (sizetype, genop2);
3109 }
3110 else
3111 {
3112 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3113 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3114 }
3115
3116 folded = fold_build2 (nary->opcode, nary->type,
3117 genop1, genop2);
3118 }
3119 break;
3120 case 1:
3121 {
3122 pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
3123 tree genop1 = find_or_generate_expression (block, op1,
3124 stmts, domstmt);
3125 if (!genop1)
3126 return NULL_TREE;
3127 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3128
3129 folded = fold_build1 (nary->opcode, nary->type,
3130 genop1);
3131 }
3132 break;
3133 default:
3134 return NULL_TREE;
3135 }
3136 }
3137 break;
3138 default:
3139 return NULL_TREE;
3140 }
3141
3142 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3143 folded = fold_convert (exprtype, folded);
3144
3145 /* Force the generated expression to be a sequence of GIMPLE
3146 statements.
3147 We have to call unshare_expr because force_gimple_operand may
3148 modify the tree we pass to it. */
3149 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3150 false, NULL);
3151
3152 /* If we have any intermediate expressions to the value sets, add them
3153 to the value sets and chain them in the instruction stream. */
3154 if (forced_stmts)
3155 {
3156 gsi = gsi_start (forced_stmts);
3157 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3158 {
3159 gimple stmt = gsi_stmt (gsi);
3160 tree forcedname = gimple_get_lhs (stmt);
3161 pre_expr nameexpr;
3162
3163 if (TREE_CODE (forcedname) == SSA_NAME)
3164 {
3165 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3166 VN_INFO_GET (forcedname)->valnum = forcedname;
3167 VN_INFO (forcedname)->value_id = get_next_value_id ();
3168 nameexpr = get_or_alloc_expr_for_name (forcedname);
3169 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3170 if (!in_fre)
3171 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3172 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3173 }
3174 mark_symbols_for_renaming (stmt);
3175 }
3176 gimple_seq_add_seq (stmts, forced_stmts);
3177 }
3178
3179 /* Build and insert the assignment of the end result to the temporary
3180 that we will return. */
3181 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3182 {
3183 pretemp = create_tmp_reg (exprtype, "pretmp");
3184 get_var_ann (pretemp);
3185 }
3186
3187 temp = pretemp;
3188 add_referenced_var (temp);
3189
3190 newstmt = gimple_build_assign (temp, folded);
3191 name = make_ssa_name (temp, newstmt);
3192 gimple_assign_set_lhs (newstmt, name);
3193 gimple_set_plf (newstmt, NECESSARY, false);
3194
3195 gimple_seq_add_stmt (stmts, newstmt);
3196 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3197
3198 /* All the symbols in NEWEXPR should be put into SSA form. */
3199 mark_symbols_for_renaming (newstmt);
3200
3201 /* Add a value number to the temporary.
3202 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3203 we are creating the expression by pieces, and this particular piece of
3204 the expression may have been represented. There is no harm in replacing
3205 here. */
3206 VN_INFO_GET (name)->valnum = name;
3207 value_id = get_expr_value_id (expr);
3208 VN_INFO (name)->value_id = value_id;
3209 nameexpr = get_or_alloc_expr_for_name (name);
3210 add_to_value (value_id, nameexpr);
3211 if (NEW_SETS (block))
3212 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3213 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3214
3215 pre_stats.insertions++;
3216 if (dump_file && (dump_flags & TDF_DETAILS))
3217 {
3218 fprintf (dump_file, "Inserted ");
3219 print_gimple_stmt (dump_file, newstmt, 0, 0);
3220 fprintf (dump_file, " in predecessor %d\n", block->index);
3221 }
3222
3223 return name;
3224 }
3225
3226
3227 /* Returns true if we want to inhibit the insertions of PHI nodes
3228 for the given EXPR for basic block BB (a member of a loop).
3229 We want to do this, when we fear that the induction variable we
3230 create might inhibit vectorization. */
3231
3232 static bool
3233 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3234 {
3235 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3236 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3237 vn_reference_op_t op;
3238 unsigned i;
3239
3240 /* If we aren't going to vectorize we don't inhibit anything. */
3241 if (!flag_tree_vectorize)
3242 return false;
3243
3244 /* Otherwise we inhibit the insertion when the address of the
3245 memory reference is a simple induction variable. In other
3246 cases the vectorizer won't do anything anyway (either it's
3247 loop invariant or a complicated expression). */
3248 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3249 {
3250 switch (op->opcode)
3251 {
3252 case ARRAY_REF:
3253 case ARRAY_RANGE_REF:
3254 if (TREE_CODE (op->op0) != SSA_NAME)
3255 break;
3256 /* Fallthru. */
3257 case SSA_NAME:
3258 {
3259 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3260 affine_iv iv;
3261 /* Default defs are loop invariant. */
3262 if (!defbb)
3263 break;
3264 /* Defined outside this loop, also loop invariant. */
3265 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3266 break;
3267 /* If it's a simple induction variable inhibit insertion,
3268 the vectorizer might be interested in this one. */
3269 if (simple_iv (bb->loop_father, bb->loop_father,
3270 op->op0, &iv, true))
3271 return true;
3272 /* No simple IV, vectorizer can't do anything, hence no
3273 reason to inhibit the transformation for this operand. */
3274 break;
3275 }
3276 default:
3277 break;
3278 }
3279 }
3280 return false;
3281 }
3282
3283 /* Insert the to-be-made-available values of expression EXPRNUM for each
3284 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3285 merge the result with a phi node, given the same value number as
3286 NODE. Return true if we have inserted new stuff. */
3287
3288 static bool
3289 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3290 pre_expr *avail)
3291 {
3292 pre_expr expr = expression_for_id (exprnum);
3293 pre_expr newphi;
3294 unsigned int val = get_expr_value_id (expr);
3295 edge pred;
3296 bool insertions = false;
3297 bool nophi = false;
3298 basic_block bprime;
3299 pre_expr eprime;
3300 edge_iterator ei;
3301 tree type = get_expr_type (expr);
3302 tree temp;
3303 gimple phi;
3304
3305 if (dump_file && (dump_flags & TDF_DETAILS))
3306 {
3307 fprintf (dump_file, "Found partial redundancy for expression ");
3308 print_pre_expr (dump_file, expr);
3309 fprintf (dump_file, " (%04d)\n", val);
3310 }
3311
3312 /* Make sure we aren't creating an induction variable. */
3313 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3314 {
3315 bool firstinsideloop = false;
3316 bool secondinsideloop = false;
3317 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3318 EDGE_PRED (block, 0)->src);
3319 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3320 EDGE_PRED (block, 1)->src);
3321 /* Induction variables only have one edge inside the loop. */
3322 if ((firstinsideloop ^ secondinsideloop)
3323 && (expr->kind != REFERENCE
3324 || inhibit_phi_insertion (block, expr)))
3325 {
3326 if (dump_file && (dump_flags & TDF_DETAILS))
3327 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3328 nophi = true;
3329 }
3330 }
3331
3332 /* Make the necessary insertions. */
3333 FOR_EACH_EDGE (pred, ei, block->preds)
3334 {
3335 gimple_seq stmts = NULL;
3336 tree builtexpr;
3337 bprime = pred->src;
3338 eprime = avail[bprime->index];
3339
3340 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3341 {
3342 builtexpr = create_expression_by_pieces (bprime,
3343 eprime,
3344 &stmts, NULL,
3345 type);
3346 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3347 gsi_insert_seq_on_edge (pred, stmts);
3348 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3349 insertions = true;
3350 }
3351 else if (eprime->kind == CONSTANT)
3352 {
3353 /* Constants may not have the right type, fold_convert
3354 should give us back a constant with the right type.
3355 */
3356 tree constant = PRE_EXPR_CONSTANT (eprime);
3357 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3358 {
3359 tree builtexpr = fold_convert (type, constant);
3360 if (!is_gimple_min_invariant (builtexpr))
3361 {
3362 tree forcedexpr = force_gimple_operand (builtexpr,
3363 &stmts, true,
3364 NULL);
3365 if (!is_gimple_min_invariant (forcedexpr))
3366 {
3367 if (forcedexpr != builtexpr)
3368 {
3369 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3370 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3371 }
3372 if (stmts)
3373 {
3374 gimple_stmt_iterator gsi;
3375 gsi = gsi_start (stmts);
3376 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3377 {
3378 gimple stmt = gsi_stmt (gsi);
3379 tree lhs = gimple_get_lhs (stmt);
3380 if (TREE_CODE (lhs) == SSA_NAME)
3381 bitmap_set_bit (inserted_exprs,
3382 SSA_NAME_VERSION (lhs));
3383 gimple_set_plf (stmt, NECESSARY, false);
3384 }
3385 gsi_insert_seq_on_edge (pred, stmts);
3386 }
3387 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3388 }
3389 }
3390 else
3391 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
3392 }
3393 }
3394 else if (eprime->kind == NAME)
3395 {
3396 /* We may have to do a conversion because our value
3397 numbering can look through types in certain cases, but
3398 our IL requires all operands of a phi node have the same
3399 type. */
3400 tree name = PRE_EXPR_NAME (eprime);
3401 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3402 {
3403 tree builtexpr;
3404 tree forcedexpr;
3405 builtexpr = fold_convert (type, name);
3406 forcedexpr = force_gimple_operand (builtexpr,
3407 &stmts, true,
3408 NULL);
3409
3410 if (forcedexpr != name)
3411 {
3412 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3413 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3414 }
3415
3416 if (stmts)
3417 {
3418 gimple_stmt_iterator gsi;
3419 gsi = gsi_start (stmts);
3420 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3421 {
3422 gimple stmt = gsi_stmt (gsi);
3423 tree lhs = gimple_get_lhs (stmt);
3424 if (TREE_CODE (lhs) == SSA_NAME)
3425 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3426 gimple_set_plf (stmt, NECESSARY, false);
3427 }
3428 gsi_insert_seq_on_edge (pred, stmts);
3429 }
3430 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3431 }
3432 }
3433 }
3434 /* If we didn't want a phi node, and we made insertions, we still have
3435 inserted new stuff, and thus return true. If we didn't want a phi node,
3436 and didn't make insertions, we haven't added anything new, so return
3437 false. */
3438 if (nophi && insertions)
3439 return true;
3440 else if (nophi && !insertions)
3441 return false;
3442
3443 /* Now build a phi for the new variable. */
3444 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3445 {
3446 prephitemp = create_tmp_var (type, "prephitmp");
3447 get_var_ann (prephitemp);
3448 }
3449
3450 temp = prephitemp;
3451 add_referenced_var (temp);
3452
3453 if (TREE_CODE (type) == COMPLEX_TYPE
3454 || TREE_CODE (type) == VECTOR_TYPE)
3455 DECL_GIMPLE_REG_P (temp) = 1;
3456 phi = create_phi_node (temp, block);
3457
3458 gimple_set_plf (phi, NECESSARY, false);
3459 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3460 VN_INFO (gimple_phi_result (phi))->value_id = val;
3461 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3462 FOR_EACH_EDGE (pred, ei, block->preds)
3463 {
3464 pre_expr ae = avail[pred->src->index];
3465 gcc_assert (get_expr_type (ae) == type
3466 || useless_type_conversion_p (type, get_expr_type (ae)));
3467 if (ae->kind == CONSTANT)
3468 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3469 else
3470 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3471 UNKNOWN_LOCATION);
3472 }
3473
3474 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3475 add_to_value (val, newphi);
3476
3477 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3478 this insertion, since we test for the existence of this value in PHI_GEN
3479 before proceeding with the partial redundancy checks in insert_aux.
3480
3481 The value may exist in AVAIL_OUT, in particular, it could be represented
3482 by the expression we are trying to eliminate, in which case we want the
3483 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3484 inserted there.
3485
3486 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3487 this block, because if it did, it would have existed in our dominator's
3488 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3489 */
3490
3491 bitmap_insert_into_set (PHI_GEN (block), newphi);
3492 bitmap_value_replace_in_set (AVAIL_OUT (block),
3493 newphi);
3494 bitmap_insert_into_set (NEW_SETS (block),
3495 newphi);
3496
3497 if (dump_file && (dump_flags & TDF_DETAILS))
3498 {
3499 fprintf (dump_file, "Created phi ");
3500 print_gimple_stmt (dump_file, phi, 0, 0);
3501 fprintf (dump_file, " in block %d\n", block->index);
3502 }
3503 pre_stats.phis++;
3504 return true;
3505 }
3506
3507
3508
3509 /* Perform insertion of partially redundant values.
3510 For BLOCK, do the following:
3511 1. Propagate the NEW_SETS of the dominator into the current block.
3512 If the block has multiple predecessors,
3513 2a. Iterate over the ANTIC expressions for the block to see if
3514 any of them are partially redundant.
3515 2b. If so, insert them into the necessary predecessors to make
3516 the expression fully redundant.
3517 2c. Insert a new PHI merging the values of the predecessors.
3518 2d. Insert the new PHI, and the new expressions, into the
3519 NEW_SETS set.
3520 3. Recursively call ourselves on the dominator children of BLOCK.
3521
3522 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3523 do_regular_insertion and do_partial_insertion.
3524
3525 */
3526
3527 static bool
3528 do_regular_insertion (basic_block block, basic_block dom)
3529 {
3530 bool new_stuff = false;
3531 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3532 pre_expr expr;
3533 int i;
3534
3535 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3536 {
3537 if (expr->kind != NAME)
3538 {
3539 pre_expr *avail;
3540 unsigned int val;
3541 bool by_some = false;
3542 bool cant_insert = false;
3543 bool all_same = true;
3544 pre_expr first_s = NULL;
3545 edge pred;
3546 basic_block bprime;
3547 pre_expr eprime = NULL;
3548 edge_iterator ei;
3549 pre_expr edoubleprime = NULL;
3550 bool do_insertion = false;
3551
3552 val = get_expr_value_id (expr);
3553 if (bitmap_set_contains_value (PHI_GEN (block), val))
3554 continue;
3555 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3556 {
3557 if (dump_file && (dump_flags & TDF_DETAILS))
3558 fprintf (dump_file, "Found fully redundant value\n");
3559 continue;
3560 }
3561
3562 avail = XCNEWVEC (pre_expr, last_basic_block);
3563 FOR_EACH_EDGE (pred, ei, block->preds)
3564 {
3565 unsigned int vprime;
3566
3567 /* We should never run insertion for the exit block
3568 and so not come across fake pred edges. */
3569 gcc_assert (!(pred->flags & EDGE_FAKE));
3570 bprime = pred->src;
3571 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3572 bprime, block);
3573
3574 /* eprime will generally only be NULL if the
3575 value of the expression, translated
3576 through the PHI for this predecessor, is
3577 undefined. If that is the case, we can't
3578 make the expression fully redundant,
3579 because its value is undefined along a
3580 predecessor path. We can thus break out
3581 early because it doesn't matter what the
3582 rest of the results are. */
3583 if (eprime == NULL)
3584 {
3585 cant_insert = true;
3586 break;
3587 }
3588
3589 eprime = fully_constant_expression (eprime);
3590 vprime = get_expr_value_id (eprime);
3591 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3592 vprime, NULL);
3593 if (edoubleprime == NULL)
3594 {
3595 avail[bprime->index] = eprime;
3596 all_same = false;
3597 }
3598 else
3599 {
3600 avail[bprime->index] = edoubleprime;
3601 by_some = true;
3602 /* We want to perform insertions to remove a redundancy on
3603 a path in the CFG we want to optimize for speed. */
3604 if (optimize_edge_for_speed_p (pred))
3605 do_insertion = true;
3606 if (first_s == NULL)
3607 first_s = edoubleprime;
3608 else if (!pre_expr_eq (first_s, edoubleprime))
3609 all_same = false;
3610 }
3611 }
3612 /* If we can insert it, it's not the same value
3613 already existing along every predecessor, and
3614 it's defined by some predecessor, it is
3615 partially redundant. */
3616 if (!cant_insert && !all_same && by_some && do_insertion
3617 && dbg_cnt (treepre_insert))
3618 {
3619 if (insert_into_preds_of_block (block, get_expression_id (expr),
3620 avail))
3621 new_stuff = true;
3622 }
3623 /* If all edges produce the same value and that value is
3624 an invariant, then the PHI has the same value on all
3625 edges. Note this. */
3626 else if (!cant_insert && all_same && eprime
3627 && (edoubleprime->kind == CONSTANT
3628 || edoubleprime->kind == NAME)
3629 && !value_id_constant_p (val))
3630 {
3631 unsigned int j;
3632 bitmap_iterator bi;
3633 bitmap_set_t exprset = VEC_index (bitmap_set_t,
3634 value_expressions, val);
3635
3636 unsigned int new_val = get_expr_value_id (edoubleprime);
3637 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
3638 {
3639 pre_expr expr = expression_for_id (j);
3640
3641 if (expr->kind == NAME)
3642 {
3643 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
3644 /* Just reset the value id and valnum so it is
3645 the same as the constant we have discovered. */
3646 if (edoubleprime->kind == CONSTANT)
3647 {
3648 info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
3649 pre_stats.constified++;
3650 }
3651 else
3652 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
3653 info->value_id = new_val;
3654 }
3655 }
3656 }
3657 free (avail);
3658 }
3659 }
3660
3661 VEC_free (pre_expr, heap, exprs);
3662 return new_stuff;
3663 }
3664
3665
3666 /* Perform insertion for partially anticipatable expressions. There
3667 is only one case we will perform insertion for these. This case is
3668 if the expression is partially anticipatable, and fully available.
3669 In this case, we know that putting it earlier will enable us to
3670 remove the later computation. */
3671
3672
3673 static bool
3674 do_partial_partial_insertion (basic_block block, basic_block dom)
3675 {
3676 bool new_stuff = false;
3677 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3678 pre_expr expr;
3679 int i;
3680
3681 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3682 {
3683 if (expr->kind != NAME)
3684 {
3685 pre_expr *avail;
3686 unsigned int val;
3687 bool by_all = true;
3688 bool cant_insert = false;
3689 edge pred;
3690 basic_block bprime;
3691 pre_expr eprime = NULL;
3692 edge_iterator ei;
3693
3694 val = get_expr_value_id (expr);
3695 if (bitmap_set_contains_value (PHI_GEN (block), val))
3696 continue;
3697 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3698 continue;
3699
3700 avail = XCNEWVEC (pre_expr, last_basic_block);
3701 FOR_EACH_EDGE (pred, ei, block->preds)
3702 {
3703 unsigned int vprime;
3704 pre_expr edoubleprime;
3705
3706 /* We should never run insertion for the exit block
3707 and so not come across fake pred edges. */
3708 gcc_assert (!(pred->flags & EDGE_FAKE));
3709 bprime = pred->src;
3710 eprime = phi_translate (expr, ANTIC_IN (block),
3711 PA_IN (block),
3712 bprime, block);
3713
3714 /* eprime will generally only be NULL if the
3715 value of the expression, translated
3716 through the PHI for this predecessor, is
3717 undefined. If that is the case, we can't
3718 make the expression fully redundant,
3719 because its value is undefined along a
3720 predecessor path. We can thus break out
3721 early because it doesn't matter what the
3722 rest of the results are. */
3723 if (eprime == NULL)
3724 {
3725 cant_insert = true;
3726 break;
3727 }
3728
3729 eprime = fully_constant_expression (eprime);
3730 vprime = get_expr_value_id (eprime);
3731 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3732 vprime, NULL);
3733 if (edoubleprime == NULL)
3734 {
3735 by_all = false;
3736 break;
3737 }
3738 else
3739 avail[bprime->index] = edoubleprime;
3740
3741 }
3742
3743 /* If we can insert it, it's not the same value
3744 already existing along every predecessor, and
3745 it's defined by some predecessor, it is
3746 partially redundant. */
3747 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3748 {
3749 pre_stats.pa_insert++;
3750 if (insert_into_preds_of_block (block, get_expression_id (expr),
3751 avail))
3752 new_stuff = true;
3753 }
3754 free (avail);
3755 }
3756 }
3757
3758 VEC_free (pre_expr, heap, exprs);
3759 return new_stuff;
3760 }
3761
3762 static bool
3763 insert_aux (basic_block block)
3764 {
3765 basic_block son;
3766 bool new_stuff = false;
3767
3768 if (block)
3769 {
3770 basic_block dom;
3771 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3772 if (dom)
3773 {
3774 unsigned i;
3775 bitmap_iterator bi;
3776 bitmap_set_t newset = NEW_SETS (dom);
3777 if (newset)
3778 {
3779 /* Note that we need to value_replace both NEW_SETS, and
3780 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3781 represented by some non-simple expression here that we want
3782 to replace it with. */
3783 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3784 {
3785 pre_expr expr = expression_for_id (i);
3786 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3787 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3788 }
3789 }
3790 if (!single_pred_p (block))
3791 {
3792 new_stuff |= do_regular_insertion (block, dom);
3793 if (do_partial_partial)
3794 new_stuff |= do_partial_partial_insertion (block, dom);
3795 }
3796 }
3797 }
3798 for (son = first_dom_son (CDI_DOMINATORS, block);
3799 son;
3800 son = next_dom_son (CDI_DOMINATORS, son))
3801 {
3802 new_stuff |= insert_aux (son);
3803 }
3804
3805 return new_stuff;
3806 }
3807
3808 /* Perform insertion of partially redundant values. */
3809
3810 static void
3811 insert (void)
3812 {
3813 bool new_stuff = true;
3814 basic_block bb;
3815 int num_iterations = 0;
3816
3817 FOR_ALL_BB (bb)
3818 NEW_SETS (bb) = bitmap_set_new ();
3819
3820 while (new_stuff)
3821 {
3822 num_iterations++;
3823 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3824 }
3825 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3826 }
3827
3828
3829 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3830
3831 static void
3832 add_to_exp_gen (basic_block block, tree op)
3833 {
3834 if (!in_fre)
3835 {
3836 pre_expr result;
3837 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3838 return;
3839 result = get_or_alloc_expr_for_name (op);
3840 bitmap_value_insert_into_set (EXP_GEN (block), result);
3841 }
3842 }
3843
3844 /* Create value ids for PHI in BLOCK. */
3845
3846 static void
3847 make_values_for_phi (gimple phi, basic_block block)
3848 {
3849 tree result = gimple_phi_result (phi);
3850
3851 /* We have no need for virtual phis, as they don't represent
3852 actual computations. */
3853 if (is_gimple_reg (result))
3854 {
3855 pre_expr e = get_or_alloc_expr_for_name (result);
3856 add_to_value (get_expr_value_id (e), e);
3857 bitmap_insert_into_set (PHI_GEN (block), e);
3858 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3859 if (!in_fre)
3860 {
3861 unsigned i;
3862 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3863 {
3864 tree arg = gimple_phi_arg_def (phi, i);
3865 if (TREE_CODE (arg) == SSA_NAME)
3866 {
3867 e = get_or_alloc_expr_for_name (arg);
3868 add_to_value (get_expr_value_id (e), e);
3869 }
3870 }
3871 }
3872 }
3873 }
3874
3875 /* Compute the AVAIL set for all basic blocks.
3876
3877 This function performs value numbering of the statements in each basic
3878 block. The AVAIL sets are built from information we glean while doing
3879 this value numbering, since the AVAIL sets contain only one entry per
3880 value.
3881
3882 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3883 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3884
3885 static void
3886 compute_avail (void)
3887 {
3888
3889 basic_block block, son;
3890 basic_block *worklist;
3891 size_t sp = 0;
3892 unsigned i;
3893
3894 /* We pretend that default definitions are defined in the entry block.
3895 This includes function arguments and the static chain decl. */
3896 for (i = 1; i < num_ssa_names; ++i)
3897 {
3898 tree name = ssa_name (i);
3899 pre_expr e;
3900 if (!name
3901 || !SSA_NAME_IS_DEFAULT_DEF (name)
3902 || has_zero_uses (name)
3903 || !is_gimple_reg (name))
3904 continue;
3905
3906 e = get_or_alloc_expr_for_name (name);
3907 add_to_value (get_expr_value_id (e), e);
3908 if (!in_fre)
3909 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3910 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3911 }
3912
3913 /* Allocate the worklist. */
3914 worklist = XNEWVEC (basic_block, n_basic_blocks);
3915
3916 /* Seed the algorithm by putting the dominator children of the entry
3917 block on the worklist. */
3918 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3919 son;
3920 son = next_dom_son (CDI_DOMINATORS, son))
3921 worklist[sp++] = son;
3922
3923 /* Loop until the worklist is empty. */
3924 while (sp)
3925 {
3926 gimple_stmt_iterator gsi;
3927 gimple stmt;
3928 basic_block dom;
3929 unsigned int stmt_uid = 1;
3930
3931 /* Pick a block from the worklist. */
3932 block = worklist[--sp];
3933
3934 /* Initially, the set of available values in BLOCK is that of
3935 its immediate dominator. */
3936 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3937 if (dom)
3938 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3939
3940 /* Generate values for PHI nodes. */
3941 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3942 make_values_for_phi (gsi_stmt (gsi), block);
3943
3944 BB_MAY_NOTRETURN (block) = 0;
3945
3946 /* Now compute value numbers and populate value sets with all
3947 the expressions computed in BLOCK. */
3948 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3949 {
3950 ssa_op_iter iter;
3951 tree op;
3952
3953 stmt = gsi_stmt (gsi);
3954 gimple_set_uid (stmt, stmt_uid++);
3955
3956 /* Cache whether the basic-block has any non-visible side-effect
3957 or control flow.
3958 If this isn't a call or it is the last stmt in the
3959 basic-block then the CFG represents things correctly. */
3960 if (is_gimple_call (stmt)
3961 && !stmt_ends_bb_p (stmt))
3962 {
3963 /* Non-looping const functions always return normally.
3964 Otherwise the call might not return or have side-effects
3965 that forbids hoisting possibly trapping expressions
3966 before it. */
3967 int flags = gimple_call_flags (stmt);
3968 if (!(flags & ECF_CONST)
3969 || (flags & ECF_LOOPING_CONST_OR_PURE))
3970 BB_MAY_NOTRETURN (block) = 1;
3971 }
3972
3973 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3974 {
3975 pre_expr e = get_or_alloc_expr_for_name (op);
3976
3977 add_to_value (get_expr_value_id (e), e);
3978 if (!in_fre)
3979 bitmap_insert_into_set (TMP_GEN (block), e);
3980 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3981 }
3982
3983 if (gimple_has_volatile_ops (stmt)
3984 || stmt_could_throw_p (stmt))
3985 continue;
3986
3987 switch (gimple_code (stmt))
3988 {
3989 case GIMPLE_RETURN:
3990 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3991 add_to_exp_gen (block, op);
3992 continue;
3993
3994 case GIMPLE_CALL:
3995 {
3996 vn_reference_t ref;
3997 unsigned int i;
3998 vn_reference_op_t vro;
3999 pre_expr result = NULL;
4000 VEC(vn_reference_op_s, heap) *ops = NULL;
4001
4002 if (!can_value_number_call (stmt))
4003 continue;
4004
4005 copy_reference_ops_from_call (stmt, &ops);
4006 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
4007 gimple_expr_type (stmt),
4008 ops, &ref, false);
4009 VEC_free (vn_reference_op_s, heap, ops);
4010 if (!ref)
4011 continue;
4012
4013 for (i = 0; VEC_iterate (vn_reference_op_s,
4014 ref->operands, i,
4015 vro); i++)
4016 {
4017 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4018 add_to_exp_gen (block, vro->op0);
4019 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4020 add_to_exp_gen (block, vro->op1);
4021 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4022 add_to_exp_gen (block, vro->op2);
4023 }
4024 result = (pre_expr) pool_alloc (pre_expr_pool);
4025 result->kind = REFERENCE;
4026 result->id = 0;
4027 PRE_EXPR_REFERENCE (result) = ref;
4028
4029 get_or_alloc_expression_id (result);
4030 add_to_value (get_expr_value_id (result), result);
4031 if (!in_fre)
4032 bitmap_value_insert_into_set (EXP_GEN (block), result);
4033 continue;
4034 }
4035
4036 case GIMPLE_ASSIGN:
4037 {
4038 pre_expr result = NULL;
4039 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4040 {
4041 case tcc_unary:
4042 case tcc_binary:
4043 case tcc_comparison:
4044 {
4045 vn_nary_op_t nary;
4046 unsigned int i;
4047
4048 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4049 gimple_assign_rhs_code (stmt),
4050 gimple_expr_type (stmt),
4051 gimple_assign_rhs1 (stmt),
4052 gimple_assign_rhs2 (stmt),
4053 NULL_TREE, NULL_TREE, &nary);
4054
4055 if (!nary)
4056 continue;
4057
4058 for (i = 0; i < nary->length; i++)
4059 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4060 add_to_exp_gen (block, nary->op[i]);
4061
4062 result = (pre_expr) pool_alloc (pre_expr_pool);
4063 result->kind = NARY;
4064 result->id = 0;
4065 PRE_EXPR_NARY (result) = nary;
4066 break;
4067 }
4068
4069 case tcc_declaration:
4070 case tcc_reference:
4071 {
4072 vn_reference_t ref;
4073 unsigned int i;
4074 vn_reference_op_t vro;
4075
4076 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4077 gimple_vuse (stmt),
4078 true, &ref);
4079 if (!ref)
4080 continue;
4081
4082 for (i = 0; VEC_iterate (vn_reference_op_s,
4083 ref->operands, i,
4084 vro); i++)
4085 {
4086 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4087 add_to_exp_gen (block, vro->op0);
4088 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4089 add_to_exp_gen (block, vro->op1);
4090 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4091 add_to_exp_gen (block, vro->op2);
4092 }
4093 result = (pre_expr) pool_alloc (pre_expr_pool);
4094 result->kind = REFERENCE;
4095 result->id = 0;
4096 PRE_EXPR_REFERENCE (result) = ref;
4097 break;
4098 }
4099
4100 default:
4101 /* For any other statement that we don't
4102 recognize, simply add all referenced
4103 SSA_NAMEs to EXP_GEN. */
4104 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4105 add_to_exp_gen (block, op);
4106 continue;
4107 }
4108
4109 get_or_alloc_expression_id (result);
4110 add_to_value (get_expr_value_id (result), result);
4111 if (!in_fre)
4112 bitmap_value_insert_into_set (EXP_GEN (block), result);
4113
4114 continue;
4115 }
4116 default:
4117 break;
4118 }
4119 }
4120
4121 /* Put the dominator children of BLOCK on the worklist of blocks
4122 to compute available sets for. */
4123 for (son = first_dom_son (CDI_DOMINATORS, block);
4124 son;
4125 son = next_dom_son (CDI_DOMINATORS, son))
4126 worklist[sp++] = son;
4127 }
4128
4129 free (worklist);
4130 }
4131
4132 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4133 than the available expressions for it. The insertion point is
4134 right before the first use in STMT. Returns the SSA_NAME that should
4135 be used for replacement. */
4136
4137 static tree
4138 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4139 {
4140 basic_block bb = gimple_bb (stmt);
4141 gimple_stmt_iterator gsi;
4142 gimple_seq stmts = NULL;
4143 tree expr;
4144 pre_expr e;
4145
4146 /* First create a value expression from the expression we want
4147 to insert and associate it with the value handle for SSA_VN. */
4148 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4149 if (e == NULL)
4150 return NULL_TREE;
4151
4152 /* Then use create_expression_by_pieces to generate a valid
4153 expression to insert at this point of the IL stream. */
4154 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4155 if (expr == NULL_TREE)
4156 return NULL_TREE;
4157 gsi = gsi_for_stmt (stmt);
4158 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4159
4160 return expr;
4161 }
4162
4163 /* Eliminate fully redundant computations. */
4164
4165 static unsigned int
4166 eliminate (void)
4167 {
4168 VEC (gimple, heap) *to_remove = NULL;
4169 basic_block b;
4170 unsigned int todo = 0;
4171 gimple_stmt_iterator gsi;
4172 gimple stmt;
4173 unsigned i;
4174
4175 FOR_EACH_BB (b)
4176 {
4177 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4178 {
4179 stmt = gsi_stmt (gsi);
4180
4181 /* Lookup the RHS of the expression, see if we have an
4182 available computation for it. If so, replace the RHS with
4183 the available computation. */
4184 if (gimple_has_lhs (stmt)
4185 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
4186 && !gimple_assign_ssa_name_copy_p (stmt)
4187 && (!gimple_assign_single_p (stmt)
4188 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4189 && !gimple_has_volatile_ops (stmt)
4190 && !has_zero_uses (gimple_get_lhs (stmt)))
4191 {
4192 tree lhs = gimple_get_lhs (stmt);
4193 tree rhs = NULL_TREE;
4194 tree sprime = NULL;
4195 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4196 pre_expr sprimeexpr;
4197
4198 if (gimple_assign_single_p (stmt))
4199 rhs = gimple_assign_rhs1 (stmt);
4200
4201 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4202 get_expr_value_id (lhsexpr),
4203 NULL);
4204
4205 if (sprimeexpr)
4206 {
4207 if (sprimeexpr->kind == CONSTANT)
4208 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4209 else if (sprimeexpr->kind == NAME)
4210 sprime = PRE_EXPR_NAME (sprimeexpr);
4211 else
4212 gcc_unreachable ();
4213 }
4214
4215 /* If there is no existing leader but SCCVN knows this
4216 value is constant, use that constant. */
4217 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4218 {
4219 sprime = VN_INFO (lhs)->valnum;
4220 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4221 TREE_TYPE (sprime)))
4222 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4223
4224 if (dump_file && (dump_flags & TDF_DETAILS))
4225 {
4226 fprintf (dump_file, "Replaced ");
4227 print_gimple_expr (dump_file, stmt, 0, 0);
4228 fprintf (dump_file, " with ");
4229 print_generic_expr (dump_file, sprime, 0);
4230 fprintf (dump_file, " in ");
4231 print_gimple_stmt (dump_file, stmt, 0, 0);
4232 }
4233 pre_stats.eliminations++;
4234 propagate_tree_value_into_stmt (&gsi, sprime);
4235 stmt = gsi_stmt (gsi);
4236 update_stmt (stmt);
4237 continue;
4238 }
4239
4240 /* If there is no existing usable leader but SCCVN thinks
4241 it has an expression it wants to use as replacement,
4242 insert that. */
4243 if (!sprime || sprime == lhs)
4244 {
4245 tree val = VN_INFO (lhs)->valnum;
4246 if (val != VN_TOP
4247 && TREE_CODE (val) == SSA_NAME
4248 && VN_INFO (val)->needs_insertion
4249 && can_PRE_operation (vn_get_expr_for (val)))
4250 sprime = do_SCCVN_insertion (stmt, val);
4251 }
4252 if (sprime
4253 && sprime != lhs
4254 && (rhs == NULL_TREE
4255 || TREE_CODE (rhs) != SSA_NAME
4256 || may_propagate_copy (rhs, sprime)))
4257 {
4258 bool can_make_abnormal_goto
4259 = is_gimple_call (stmt)
4260 && stmt_can_make_abnormal_goto (stmt);
4261
4262 gcc_assert (sprime != rhs);
4263
4264 if (dump_file && (dump_flags & TDF_DETAILS))
4265 {
4266 fprintf (dump_file, "Replaced ");
4267 print_gimple_expr (dump_file, stmt, 0, 0);
4268 fprintf (dump_file, " with ");
4269 print_generic_expr (dump_file, sprime, 0);
4270 fprintf (dump_file, " in ");
4271 print_gimple_stmt (dump_file, stmt, 0, 0);
4272 }
4273
4274 if (TREE_CODE (sprime) == SSA_NAME)
4275 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4276 NECESSARY, true);
4277 /* We need to make sure the new and old types actually match,
4278 which may require adding a simple cast, which fold_convert
4279 will do for us. */
4280 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4281 && !useless_type_conversion_p (gimple_expr_type (stmt),
4282 TREE_TYPE (sprime)))
4283 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4284
4285 pre_stats.eliminations++;
4286 propagate_tree_value_into_stmt (&gsi, sprime);
4287 stmt = gsi_stmt (gsi);
4288 update_stmt (stmt);
4289
4290 /* If we removed EH side-effects from the statement, clean
4291 its EH information. */
4292 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4293 {
4294 bitmap_set_bit (need_eh_cleanup,
4295 gimple_bb (stmt)->index);
4296 if (dump_file && (dump_flags & TDF_DETAILS))
4297 fprintf (dump_file, " Removed EH side-effects.\n");
4298 }
4299
4300 /* Likewise for AB side-effects. */
4301 if (can_make_abnormal_goto
4302 && !stmt_can_make_abnormal_goto (stmt))
4303 {
4304 bitmap_set_bit (need_ab_cleanup,
4305 gimple_bb (stmt)->index);
4306 if (dump_file && (dump_flags & TDF_DETAILS))
4307 fprintf (dump_file, " Removed AB side-effects.\n");
4308 }
4309 }
4310 }
4311 /* If the statement is a scalar store, see if the expression
4312 has the same value number as its rhs. If so, the store is
4313 dead. */
4314 else if (gimple_assign_single_p (stmt)
4315 && !is_gimple_reg (gimple_assign_lhs (stmt))
4316 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4317 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4318 {
4319 tree rhs = gimple_assign_rhs1 (stmt);
4320 tree val;
4321 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4322 gimple_vuse (stmt), true, NULL);
4323 if (TREE_CODE (rhs) == SSA_NAME)
4324 rhs = VN_INFO (rhs)->valnum;
4325 if (val
4326 && operand_equal_p (val, rhs, 0))
4327 {
4328 if (dump_file && (dump_flags & TDF_DETAILS))
4329 {
4330 fprintf (dump_file, "Deleted redundant store ");
4331 print_gimple_stmt (dump_file, stmt, 0, 0);
4332 }
4333
4334 /* Queue stmt for removal. */
4335 VEC_safe_push (gimple, heap, to_remove, stmt);
4336 }
4337 }
4338 /* Visit COND_EXPRs and fold the comparison with the
4339 available value-numbers. */
4340 else if (gimple_code (stmt) == GIMPLE_COND)
4341 {
4342 tree op0 = gimple_cond_lhs (stmt);
4343 tree op1 = gimple_cond_rhs (stmt);
4344 tree result;
4345
4346 if (TREE_CODE (op0) == SSA_NAME)
4347 op0 = VN_INFO (op0)->valnum;
4348 if (TREE_CODE (op1) == SSA_NAME)
4349 op1 = VN_INFO (op1)->valnum;
4350 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4351 op0, op1);
4352 if (result && TREE_CODE (result) == INTEGER_CST)
4353 {
4354 if (integer_zerop (result))
4355 gimple_cond_make_false (stmt);
4356 else
4357 gimple_cond_make_true (stmt);
4358 update_stmt (stmt);
4359 todo = TODO_cleanup_cfg;
4360 }
4361 }
4362 /* Visit indirect calls and turn them into direct calls if
4363 possible. */
4364 if (is_gimple_call (stmt)
4365 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
4366 {
4367 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
4368 if (TREE_CODE (fn) == ADDR_EXPR
4369 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4370 {
4371 bool can_make_abnormal_goto
4372 = stmt_can_make_abnormal_goto (stmt);
4373
4374 if (dump_file && (dump_flags & TDF_DETAILS))
4375 {
4376 fprintf (dump_file, "Replacing call target with ");
4377 print_generic_expr (dump_file, fn, 0);
4378 fprintf (dump_file, " in ");
4379 print_gimple_stmt (dump_file, stmt, 0, 0);
4380 }
4381
4382 gimple_call_set_fn (stmt, fn);
4383 update_stmt (stmt);
4384
4385 /* If we removed EH side-effects from the statement, clean
4386 its EH information. */
4387 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4388 {
4389 bitmap_set_bit (need_eh_cleanup,
4390 gimple_bb (stmt)->index);
4391 if (dump_file && (dump_flags & TDF_DETAILS))
4392 fprintf (dump_file, " Removed EH side-effects.\n");
4393 }
4394
4395 /* Likewise for AB side-effects. */
4396 if (can_make_abnormal_goto
4397 && !stmt_can_make_abnormal_goto (stmt))
4398 {
4399 bitmap_set_bit (need_ab_cleanup,
4400 gimple_bb (stmt)->index);
4401 if (dump_file && (dump_flags & TDF_DETAILS))
4402 fprintf (dump_file, " Removed AB side-effects.\n");
4403 }
4404
4405 /* Changing an indirect call to a direct call may
4406 have exposed different semantics. This may
4407 require an SSA update. */
4408 todo |= TODO_update_ssa_only_virtuals;
4409 }
4410 }
4411 }
4412
4413 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4414 {
4415 gimple stmt, phi = gsi_stmt (gsi);
4416 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4417 pre_expr sprimeexpr, resexpr;
4418 gimple_stmt_iterator gsi2;
4419
4420 /* We want to perform redundant PHI elimination. Do so by
4421 replacing the PHI with a single copy if possible.
4422 Do not touch inserted, single-argument or virtual PHIs. */
4423 if (gimple_phi_num_args (phi) == 1
4424 || !is_gimple_reg (res))
4425 {
4426 gsi_next (&gsi);
4427 continue;
4428 }
4429
4430 resexpr = get_or_alloc_expr_for_name (res);
4431 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4432 get_expr_value_id (resexpr), NULL);
4433 if (sprimeexpr)
4434 {
4435 if (sprimeexpr->kind == CONSTANT)
4436 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4437 else if (sprimeexpr->kind == NAME)
4438 sprime = PRE_EXPR_NAME (sprimeexpr);
4439 else
4440 gcc_unreachable ();
4441 }
4442 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4443 {
4444 sprime = VN_INFO (res)->valnum;
4445 if (!useless_type_conversion_p (TREE_TYPE (res),
4446 TREE_TYPE (sprime)))
4447 sprime = fold_convert (TREE_TYPE (res), sprime);
4448 }
4449 if (!sprime
4450 || sprime == res)
4451 {
4452 gsi_next (&gsi);
4453 continue;
4454 }
4455
4456 if (dump_file && (dump_flags & TDF_DETAILS))
4457 {
4458 fprintf (dump_file, "Replaced redundant PHI node defining ");
4459 print_generic_expr (dump_file, res, 0);
4460 fprintf (dump_file, " with ");
4461 print_generic_expr (dump_file, sprime, 0);
4462 fprintf (dump_file, "\n");
4463 }
4464
4465 remove_phi_node (&gsi, false);
4466
4467 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4468 && TREE_CODE (sprime) == SSA_NAME)
4469 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4470
4471 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4472 sprime = fold_convert (TREE_TYPE (res), sprime);
4473 stmt = gimple_build_assign (res, sprime);
4474 SSA_NAME_DEF_STMT (res) = stmt;
4475 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4476
4477 gsi2 = gsi_after_labels (b);
4478 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4479 /* Queue the copy for eventual removal. */
4480 VEC_safe_push (gimple, heap, to_remove, stmt);
4481 /* If we inserted this PHI node ourself, it's not an elimination. */
4482 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4483 pre_stats.phis--;
4484 else
4485 pre_stats.eliminations++;
4486 }
4487 }
4488
4489 /* We cannot remove stmts during BB walk, especially not release SSA
4490 names there as this confuses the VN machinery. The stmts ending
4491 up in to_remove are either stores or simple copies. */
4492 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4493 {
4494 tree lhs = gimple_assign_lhs (stmt);
4495 tree rhs = gimple_assign_rhs1 (stmt);
4496 use_operand_p use_p;
4497 gimple use_stmt;
4498
4499 /* If there is a single use only, propagate the equivalency
4500 instead of keeping the copy. */
4501 if (TREE_CODE (lhs) == SSA_NAME
4502 && TREE_CODE (rhs) == SSA_NAME
4503 && single_imm_use (lhs, &use_p, &use_stmt)
4504 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4505 {
4506 SET_USE (use_p, rhs);
4507 update_stmt (use_stmt);
4508 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4509 && TREE_CODE (rhs) == SSA_NAME)
4510 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4511 }
4512
4513 /* If this is a store or a now unused copy, remove it. */
4514 if (TREE_CODE (lhs) != SSA_NAME
4515 || has_zero_uses (lhs))
4516 {
4517 basic_block bb = gimple_bb (stmt);
4518 gsi = gsi_for_stmt (stmt);
4519 unlink_stmt_vdef (stmt);
4520 gsi_remove (&gsi, true);
4521 if (gimple_purge_dead_eh_edges (bb))
4522 todo |= TODO_cleanup_cfg;
4523 if (TREE_CODE (lhs) == SSA_NAME)
4524 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4525 release_defs (stmt);
4526 }
4527 }
4528 VEC_free (gimple, heap, to_remove);
4529
4530 return todo;
4531 }
4532
4533 /* Borrow a bit of tree-ssa-dce.c for the moment.
4534 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4535 this may be a bit faster, and we may want critical edges kept split. */
4536
4537 /* If OP's defining statement has not already been determined to be necessary,
4538 mark that statement necessary. Return the stmt, if it is newly
4539 necessary. */
4540
4541 static inline gimple
4542 mark_operand_necessary (tree op)
4543 {
4544 gimple stmt;
4545
4546 gcc_assert (op);
4547
4548 if (TREE_CODE (op) != SSA_NAME)
4549 return NULL;
4550
4551 stmt = SSA_NAME_DEF_STMT (op);
4552 gcc_assert (stmt);
4553
4554 if (gimple_plf (stmt, NECESSARY)
4555 || gimple_nop_p (stmt))
4556 return NULL;
4557
4558 gimple_set_plf (stmt, NECESSARY, true);
4559 return stmt;
4560 }
4561
4562 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4563 to insert PHI nodes sometimes, and because value numbering of casts isn't
4564 perfect, we sometimes end up inserting dead code. This simple DCE-like
4565 pass removes any insertions we made that weren't actually used. */
4566
4567 static void
4568 remove_dead_inserted_code (void)
4569 {
4570 bitmap worklist;
4571 unsigned i;
4572 bitmap_iterator bi;
4573 gimple t;
4574
4575 worklist = BITMAP_ALLOC (NULL);
4576 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4577 {
4578 t = SSA_NAME_DEF_STMT (ssa_name (i));
4579 if (gimple_plf (t, NECESSARY))
4580 bitmap_set_bit (worklist, i);
4581 }
4582 while (!bitmap_empty_p (worklist))
4583 {
4584 i = bitmap_first_set_bit (worklist);
4585 bitmap_clear_bit (worklist, i);
4586 t = SSA_NAME_DEF_STMT (ssa_name (i));
4587
4588 /* PHI nodes are somewhat special in that each PHI alternative has
4589 data and control dependencies. All the statements feeding the
4590 PHI node's arguments are always necessary. */
4591 if (gimple_code (t) == GIMPLE_PHI)
4592 {
4593 unsigned k;
4594
4595 for (k = 0; k < gimple_phi_num_args (t); k++)
4596 {
4597 tree arg = PHI_ARG_DEF (t, k);
4598 if (TREE_CODE (arg) == SSA_NAME)
4599 {
4600 gimple n = mark_operand_necessary (arg);
4601 if (n)
4602 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4603 }
4604 }
4605 }
4606 else
4607 {
4608 /* Propagate through the operands. Examine all the USE, VUSE and
4609 VDEF operands in this statement. Mark all the statements
4610 which feed this statement's uses as necessary. */
4611 ssa_op_iter iter;
4612 tree use;
4613
4614 /* The operands of VDEF expressions are also needed as they
4615 represent potential definitions that may reach this
4616 statement (VDEF operands allow us to follow def-def
4617 links). */
4618
4619 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4620 {
4621 gimple n = mark_operand_necessary (use);
4622 if (n)
4623 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4624 }
4625 }
4626 }
4627
4628 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4629 {
4630 t = SSA_NAME_DEF_STMT (ssa_name (i));
4631 if (!gimple_plf (t, NECESSARY))
4632 {
4633 gimple_stmt_iterator gsi;
4634
4635 if (dump_file && (dump_flags & TDF_DETAILS))
4636 {
4637 fprintf (dump_file, "Removing unnecessary insertion:");
4638 print_gimple_stmt (dump_file, t, 0, 0);
4639 }
4640
4641 gsi = gsi_for_stmt (t);
4642 if (gimple_code (t) == GIMPLE_PHI)
4643 remove_phi_node (&gsi, true);
4644 else
4645 {
4646 gsi_remove (&gsi, true);
4647 release_defs (t);
4648 }
4649 }
4650 }
4651 BITMAP_FREE (worklist);
4652 }
4653
4654 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4655 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4656 the number of visited blocks. */
4657
4658 static int
4659 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4660 {
4661 edge_iterator *stack;
4662 int sp;
4663 int post_order_num = 0;
4664 sbitmap visited;
4665
4666 if (include_entry_exit)
4667 post_order[post_order_num++] = EXIT_BLOCK;
4668
4669 /* Allocate stack for back-tracking up CFG. */
4670 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4671 sp = 0;
4672
4673 /* Allocate bitmap to track nodes that have been visited. */
4674 visited = sbitmap_alloc (last_basic_block);
4675
4676 /* None of the nodes in the CFG have been visited yet. */
4677 sbitmap_zero (visited);
4678
4679 /* Push the last edge on to the stack. */
4680 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4681
4682 while (sp)
4683 {
4684 edge_iterator ei;
4685 basic_block src;
4686 basic_block dest;
4687
4688 /* Look at the edge on the top of the stack. */
4689 ei = stack[sp - 1];
4690 src = ei_edge (ei)->src;
4691 dest = ei_edge (ei)->dest;
4692
4693 /* Check if the edge destination has been visited yet. */
4694 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4695 {
4696 /* Mark that we have visited the destination. */
4697 SET_BIT (visited, src->index);
4698
4699 if (EDGE_COUNT (src->preds) > 0)
4700 /* Since the DEST node has been visited for the first
4701 time, check its successors. */
4702 stack[sp++] = ei_start (src->preds);
4703 else
4704 post_order[post_order_num++] = src->index;
4705 }
4706 else
4707 {
4708 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4709 post_order[post_order_num++] = dest->index;
4710
4711 if (!ei_one_before_end_p (ei))
4712 ei_next (&stack[sp - 1]);
4713 else
4714 sp--;
4715 }
4716 }
4717
4718 if (include_entry_exit)
4719 post_order[post_order_num++] = ENTRY_BLOCK;
4720
4721 free (stack);
4722 sbitmap_free (visited);
4723 return post_order_num;
4724 }
4725
4726
4727 /* Initialize data structures used by PRE. */
4728
4729 static void
4730 init_pre (bool do_fre)
4731 {
4732 basic_block bb;
4733
4734 next_expression_id = 1;
4735 expressions = NULL;
4736 VEC_safe_push (pre_expr, heap, expressions, NULL);
4737 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4738 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4739 get_max_value_id() + 1);
4740 name_to_id = NULL;
4741
4742 in_fre = do_fre;
4743
4744 inserted_exprs = BITMAP_ALLOC (NULL);
4745 need_creation = NULL;
4746 pretemp = NULL_TREE;
4747 storetemp = NULL_TREE;
4748 prephitemp = NULL_TREE;
4749
4750 connect_infinite_loops_to_exit ();
4751 memset (&pre_stats, 0, sizeof (pre_stats));
4752
4753
4754 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4755 my_rev_post_order_compute (postorder, false);
4756
4757 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4758
4759 calculate_dominance_info (CDI_POST_DOMINATORS);
4760 calculate_dominance_info (CDI_DOMINATORS);
4761
4762 bitmap_obstack_initialize (&grand_bitmap_obstack);
4763 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4764 expr_pred_trans_eq, free);
4765 expression_to_id = htab_create (num_ssa_names * 3,
4766 pre_expr_hash,
4767 pre_expr_eq, NULL);
4768 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4769 sizeof (struct bitmap_set), 30);
4770 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4771 sizeof (struct pre_expr_d), 30);
4772 FOR_ALL_BB (bb)
4773 {
4774 EXP_GEN (bb) = bitmap_set_new ();
4775 PHI_GEN (bb) = bitmap_set_new ();
4776 TMP_GEN (bb) = bitmap_set_new ();
4777 AVAIL_OUT (bb) = bitmap_set_new ();
4778 }
4779
4780 need_eh_cleanup = BITMAP_ALLOC (NULL);
4781 need_ab_cleanup = BITMAP_ALLOC (NULL);
4782 }
4783
4784
4785 /* Deallocate data structures used by PRE. */
4786
4787 static void
4788 fini_pre (bool do_fre)
4789 {
4790 free (postorder);
4791 VEC_free (bitmap_set_t, heap, value_expressions);
4792 BITMAP_FREE (inserted_exprs);
4793 VEC_free (gimple, heap, need_creation);
4794 bitmap_obstack_release (&grand_bitmap_obstack);
4795 free_alloc_pool (bitmap_set_pool);
4796 free_alloc_pool (pre_expr_pool);
4797 htab_delete (phi_translate_table);
4798 htab_delete (expression_to_id);
4799 VEC_free (unsigned, heap, name_to_id);
4800
4801 free_aux_for_blocks ();
4802
4803 free_dominance_info (CDI_POST_DOMINATORS);
4804
4805 if (!bitmap_empty_p (need_eh_cleanup))
4806 {
4807 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4808 cleanup_tree_cfg ();
4809 }
4810
4811 BITMAP_FREE (need_eh_cleanup);
4812
4813 if (!bitmap_empty_p (need_ab_cleanup))
4814 {
4815 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4816 cleanup_tree_cfg ();
4817 }
4818
4819 BITMAP_FREE (need_ab_cleanup);
4820
4821 if (!do_fre)
4822 loop_optimizer_finalize ();
4823 }
4824
4825 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4826 only wants to do full redundancy elimination. */
4827
4828 static unsigned int
4829 execute_pre (bool do_fre)
4830 {
4831 unsigned int todo = 0;
4832
4833 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4834
4835 /* This has to happen before SCCVN runs because
4836 loop_optimizer_init may create new phis, etc. */
4837 if (!do_fre)
4838 loop_optimizer_init (LOOPS_NORMAL);
4839
4840 if (!run_scc_vn ())
4841 {
4842 if (!do_fre)
4843 loop_optimizer_finalize ();
4844
4845 return 0;
4846 }
4847
4848 init_pre (do_fre);
4849 scev_initialize ();
4850
4851 /* Collect and value number expressions computed in each basic block. */
4852 compute_avail ();
4853
4854 if (dump_file && (dump_flags & TDF_DETAILS))
4855 {
4856 basic_block bb;
4857
4858 FOR_ALL_BB (bb)
4859 {
4860 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4861 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4862 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4863 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4864 }
4865 }
4866
4867 /* Insert can get quite slow on an incredibly large number of basic
4868 blocks due to some quadratic behavior. Until this behavior is
4869 fixed, don't run it when he have an incredibly large number of
4870 bb's. If we aren't going to run insert, there is no point in
4871 computing ANTIC, either, even though it's plenty fast. */
4872 if (!do_fre && n_basic_blocks < 4000)
4873 {
4874 compute_antic ();
4875 insert ();
4876 }
4877
4878 /* Make sure to remove fake edges before committing our inserts.
4879 This makes sure we don't end up with extra critical edges that
4880 we would need to split. */
4881 remove_fake_exit_edges ();
4882 gsi_commit_edge_inserts ();
4883
4884 /* Remove all the redundant expressions. */
4885 todo |= eliminate ();
4886
4887 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4888 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4889 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4890 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4891 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4892
4893 clear_expression_ids ();
4894 free_scc_vn ();
4895 if (!do_fre)
4896 remove_dead_inserted_code ();
4897
4898 scev_finalize ();
4899 fini_pre (do_fre);
4900
4901 return todo;
4902 }
4903
4904 /* Gate and execute functions for PRE. */
4905
4906 static unsigned int
4907 do_pre (void)
4908 {
4909 return execute_pre (false);
4910 }
4911
4912 static bool
4913 gate_pre (void)
4914 {
4915 return flag_tree_pre != 0;
4916 }
4917
4918 struct gimple_opt_pass pass_pre =
4919 {
4920 {
4921 GIMPLE_PASS,
4922 "pre", /* name */
4923 gate_pre, /* gate */
4924 do_pre, /* execute */
4925 NULL, /* sub */
4926 NULL, /* next */
4927 0, /* static_pass_number */
4928 TV_TREE_PRE, /* tv_id */
4929 PROP_no_crit_edges | PROP_cfg
4930 | PROP_ssa, /* properties_required */
4931 0, /* properties_provided */
4932 0, /* properties_destroyed */
4933 TODO_rebuild_alias, /* todo_flags_start */
4934 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
4935 | TODO_verify_ssa /* todo_flags_finish */
4936 }
4937 };
4938
4939
4940 /* Gate and execute functions for FRE. */
4941
4942 static unsigned int
4943 execute_fre (void)
4944 {
4945 return execute_pre (true);
4946 }
4947
4948 static bool
4949 gate_fre (void)
4950 {
4951 return flag_tree_fre != 0;
4952 }
4953
4954 struct gimple_opt_pass pass_fre =
4955 {
4956 {
4957 GIMPLE_PASS,
4958 "fre", /* name */
4959 gate_fre, /* gate */
4960 execute_fre, /* execute */
4961 NULL, /* sub */
4962 NULL, /* next */
4963 0, /* static_pass_number */
4964 TV_TREE_FRE, /* tv_id */
4965 PROP_cfg | PROP_ssa, /* properties_required */
4966 0, /* properties_provided */
4967 0, /* properties_destroyed */
4968 0, /* todo_flags_start */
4969 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
4970 }
4971 };