Daily bump.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "gimple.h"
32 #include "dumpfile.h"
33 #include "hashtab.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "bitmap.h"
37 #include "cfgloop.h"
38 #include "params.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-sccvn.h"
41 #include "gimple-fold.h"
42
43 /* This algorithm is based on the SCC algorithm presented by Keith
44 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
45 (http://citeseer.ist.psu.edu/41805.html). In
46 straight line code, it is equivalent to a regular hash based value
47 numbering that is performed in reverse postorder.
48
49 For code with cycles, there are two alternatives, both of which
50 require keeping the hashtables separate from the actual list of
51 value numbers for SSA names.
52
53 1. Iterate value numbering in an RPO walk of the blocks, removing
54 all the entries from the hashtable after each iteration (but
55 keeping the SSA name->value number mapping between iterations).
56 Iterate until it does not change.
57
58 2. Perform value numbering as part of an SCC walk on the SSA graph,
59 iterating only the cycles in the SSA graph until they do not change
60 (using a separate, optimistic hashtable for value numbering the SCC
61 operands).
62
63 The second is not just faster in practice (because most SSA graph
64 cycles do not involve all the variables in the graph), it also has
65 some nice properties.
66
67 One of these nice properties is that when we pop an SCC off the
68 stack, we are guaranteed to have processed all the operands coming from
69 *outside of that SCC*, so we do not need to do anything special to
70 ensure they have value numbers.
71
72 Another nice property is that the SCC walk is done as part of a DFS
73 of the SSA graph, which makes it easy to perform combining and
74 simplifying operations at the same time.
75
76 The code below is deliberately written in a way that makes it easy
77 to separate the SCC walk from the other work it does.
78
79 In order to propagate constants through the code, we track which
80 expressions contain constants, and use those while folding. In
81 theory, we could also track expressions whose value numbers are
82 replaced, in case we end up folding based on expression
83 identities.
84
85 In order to value number memory, we assign value numbers to vuses.
86 This enables us to note that, for example, stores to the same
87 address of the same value from the same starting memory states are
88 equivalent.
89 TODO:
90
91 1. We can iterate only the changing portions of the SCC's, but
92 I have not seen an SCC big enough for this to be a win.
93 2. If you differentiate between phi nodes for loops and phi nodes
94 for if-then-else, you can properly consider phi nodes in different
95 blocks for equivalence.
96 3. We could value number vuses in more cases, particularly, whole
97 structure copies.
98 */
99
100 /* The set of hashtables and alloc_pool's for their items. */
101
102 typedef struct vn_tables_s
103 {
104 htab_t nary;
105 htab_t phis;
106 htab_t references;
107 struct obstack nary_obstack;
108 alloc_pool phis_pool;
109 alloc_pool references_pool;
110 } *vn_tables_t;
111
112 static htab_t constant_to_value_id;
113 static bitmap constant_value_ids;
114
115
116 /* Valid hashtables storing information we have proven to be
117 correct. */
118
119 static vn_tables_t valid_info;
120
121 /* Optimistic hashtables storing information we are making assumptions about
122 during iterations. */
123
124 static vn_tables_t optimistic_info;
125
126 /* Pointer to the set of hashtables that is currently being used.
127 Should always point to either the optimistic_info, or the
128 valid_info. */
129
130 static vn_tables_t current_info;
131
132
133 /* Reverse post order index for each basic block. */
134
135 static int *rpo_numbers;
136
137 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
138
139 /* This represents the top of the VN lattice, which is the universal
140 value. */
141
142 tree VN_TOP;
143
144 /* Unique counter for our value ids. */
145
146 static unsigned int next_value_id;
147
148 /* Next DFS number and the stack for strongly connected component
149 detection. */
150
151 static unsigned int next_dfs_num;
152 static vec<tree> sccstack;
153
154
155
156 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
157 are allocated on an obstack for locality reasons, and to free them
158 without looping over the vec. */
159
160 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
161 static struct obstack vn_ssa_aux_obstack;
162
163 /* Return the value numbering information for a given SSA name. */
164
165 vn_ssa_aux_t
166 VN_INFO (tree name)
167 {
168 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
169 gcc_checking_assert (res);
170 return res;
171 }
172
173 /* Set the value numbering info for a given SSA name to a given
174 value. */
175
176 static inline void
177 VN_INFO_SET (tree name, vn_ssa_aux_t value)
178 {
179 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
180 }
181
182 /* Initialize the value numbering info for a given SSA name.
183 This should be called just once for every SSA name. */
184
185 vn_ssa_aux_t
186 VN_INFO_GET (tree name)
187 {
188 vn_ssa_aux_t newinfo;
189
190 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
191 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
192 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
193 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
194 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
195 return newinfo;
196 }
197
198
199 /* Get the representative expression for the SSA_NAME NAME. Returns
200 the representative SSA_NAME if there is no expression associated with it. */
201
202 tree
203 vn_get_expr_for (tree name)
204 {
205 vn_ssa_aux_t vn = VN_INFO (name);
206 gimple def_stmt;
207 tree expr = NULL_TREE;
208 enum tree_code code;
209
210 if (vn->valnum == VN_TOP)
211 return name;
212
213 /* If the value-number is a constant it is the representative
214 expression. */
215 if (TREE_CODE (vn->valnum) != SSA_NAME)
216 return vn->valnum;
217
218 /* Get to the information of the value of this SSA_NAME. */
219 vn = VN_INFO (vn->valnum);
220
221 /* If the value-number is a constant it is the representative
222 expression. */
223 if (TREE_CODE (vn->valnum) != SSA_NAME)
224 return vn->valnum;
225
226 /* Else if we have an expression, return it. */
227 if (vn->expr != NULL_TREE)
228 return vn->expr;
229
230 /* Otherwise use the defining statement to build the expression. */
231 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
232
233 /* If the value number is not an assignment use it directly. */
234 if (!is_gimple_assign (def_stmt))
235 return vn->valnum;
236
237 /* FIXME tuples. This is incomplete and likely will miss some
238 simplifications. */
239 code = gimple_assign_rhs_code (def_stmt);
240 switch (TREE_CODE_CLASS (code))
241 {
242 case tcc_reference:
243 if ((code == REALPART_EXPR
244 || code == IMAGPART_EXPR
245 || code == VIEW_CONVERT_EXPR)
246 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
247 0)) == SSA_NAME)
248 expr = fold_build1 (code,
249 gimple_expr_type (def_stmt),
250 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
251 break;
252
253 case tcc_unary:
254 expr = fold_build1 (code,
255 gimple_expr_type (def_stmt),
256 gimple_assign_rhs1 (def_stmt));
257 break;
258
259 case tcc_binary:
260 expr = fold_build2 (code,
261 gimple_expr_type (def_stmt),
262 gimple_assign_rhs1 (def_stmt),
263 gimple_assign_rhs2 (def_stmt));
264 break;
265
266 case tcc_exceptional:
267 if (code == CONSTRUCTOR
268 && TREE_CODE
269 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
270 expr = gimple_assign_rhs1 (def_stmt);
271 break;
272
273 default:;
274 }
275 if (expr == NULL_TREE)
276 return vn->valnum;
277
278 /* Cache the expression. */
279 vn->expr = expr;
280
281 return expr;
282 }
283
284 /* Return the vn_kind the expression computed by the stmt should be
285 associated with. */
286
287 enum vn_kind
288 vn_get_stmt_kind (gimple stmt)
289 {
290 switch (gimple_code (stmt))
291 {
292 case GIMPLE_CALL:
293 return VN_REFERENCE;
294 case GIMPLE_PHI:
295 return VN_PHI;
296 case GIMPLE_ASSIGN:
297 {
298 enum tree_code code = gimple_assign_rhs_code (stmt);
299 tree rhs1 = gimple_assign_rhs1 (stmt);
300 switch (get_gimple_rhs_class (code))
301 {
302 case GIMPLE_UNARY_RHS:
303 case GIMPLE_BINARY_RHS:
304 case GIMPLE_TERNARY_RHS:
305 return VN_NARY;
306 case GIMPLE_SINGLE_RHS:
307 switch (TREE_CODE_CLASS (code))
308 {
309 case tcc_reference:
310 /* VOP-less references can go through unary case. */
311 if ((code == REALPART_EXPR
312 || code == IMAGPART_EXPR
313 || code == VIEW_CONVERT_EXPR
314 || code == BIT_FIELD_REF)
315 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
316 return VN_NARY;
317
318 /* Fallthrough. */
319 case tcc_declaration:
320 return VN_REFERENCE;
321
322 case tcc_constant:
323 return VN_CONSTANT;
324
325 default:
326 if (code == ADDR_EXPR)
327 return (is_gimple_min_invariant (rhs1)
328 ? VN_CONSTANT : VN_REFERENCE);
329 else if (code == CONSTRUCTOR)
330 return VN_NARY;
331 return VN_NONE;
332 }
333 default:
334 return VN_NONE;
335 }
336 }
337 default:
338 return VN_NONE;
339 }
340 }
341
342 /* Free a phi operation structure VP. */
343
344 static void
345 free_phi (void *vp)
346 {
347 vn_phi_t phi = (vn_phi_t) vp;
348 phi->phiargs.release ();
349 }
350
351 /* Free a reference operation structure VP. */
352
353 static void
354 free_reference (void *vp)
355 {
356 vn_reference_t vr = (vn_reference_t) vp;
357 vr->operands.release ();
358 }
359
360 /* Hash table equality function for vn_constant_t. */
361
362 static int
363 vn_constant_eq (const void *p1, const void *p2)
364 {
365 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
366 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
367
368 if (vc1->hashcode != vc2->hashcode)
369 return false;
370
371 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
372 }
373
374 /* Hash table hash function for vn_constant_t. */
375
376 static hashval_t
377 vn_constant_hash (const void *p1)
378 {
379 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
380 return vc1->hashcode;
381 }
382
383 /* Lookup a value id for CONSTANT and return it. If it does not
384 exist returns 0. */
385
386 unsigned int
387 get_constant_value_id (tree constant)
388 {
389 void **slot;
390 struct vn_constant_s vc;
391
392 vc.hashcode = vn_hash_constant_with_type (constant);
393 vc.constant = constant;
394 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
395 vc.hashcode, NO_INSERT);
396 if (slot)
397 return ((vn_constant_t)*slot)->value_id;
398 return 0;
399 }
400
401 /* Lookup a value id for CONSTANT, and if it does not exist, create a
402 new one and return it. If it does exist, return it. */
403
404 unsigned int
405 get_or_alloc_constant_value_id (tree constant)
406 {
407 void **slot;
408 struct vn_constant_s vc;
409 vn_constant_t vcp;
410
411 vc.hashcode = vn_hash_constant_with_type (constant);
412 vc.constant = constant;
413 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
414 vc.hashcode, INSERT);
415 if (*slot)
416 return ((vn_constant_t)*slot)->value_id;
417
418 vcp = XNEW (struct vn_constant_s);
419 vcp->hashcode = vc.hashcode;
420 vcp->constant = constant;
421 vcp->value_id = get_next_value_id ();
422 *slot = (void *) vcp;
423 bitmap_set_bit (constant_value_ids, vcp->value_id);
424 return vcp->value_id;
425 }
426
427 /* Return true if V is a value id for a constant. */
428
429 bool
430 value_id_constant_p (unsigned int v)
431 {
432 return bitmap_bit_p (constant_value_ids, v);
433 }
434
435 /* Compare two reference operands P1 and P2 for equality. Return true if
436 they are equal, and false otherwise. */
437
438 static int
439 vn_reference_op_eq (const void *p1, const void *p2)
440 {
441 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
442 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
443
444 return (vro1->opcode == vro2->opcode
445 /* We do not care for differences in type qualification. */
446 && (vro1->type == vro2->type
447 || (vro1->type && vro2->type
448 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
449 TYPE_MAIN_VARIANT (vro2->type))))
450 && expressions_equal_p (vro1->op0, vro2->op0)
451 && expressions_equal_p (vro1->op1, vro2->op1)
452 && expressions_equal_p (vro1->op2, vro2->op2));
453 }
454
455 /* Compute the hash for a reference operand VRO1. */
456
457 static hashval_t
458 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
459 {
460 result = iterative_hash_hashval_t (vro1->opcode, result);
461 if (vro1->op0)
462 result = iterative_hash_expr (vro1->op0, result);
463 if (vro1->op1)
464 result = iterative_hash_expr (vro1->op1, result);
465 if (vro1->op2)
466 result = iterative_hash_expr (vro1->op2, result);
467 return result;
468 }
469
470 /* Return the hashcode for a given reference operation P1. */
471
472 static hashval_t
473 vn_reference_hash (const void *p1)
474 {
475 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
476 return vr1->hashcode;
477 }
478
479 /* Compute a hash for the reference operation VR1 and return it. */
480
481 hashval_t
482 vn_reference_compute_hash (const vn_reference_t vr1)
483 {
484 hashval_t result = 0;
485 int i;
486 vn_reference_op_t vro;
487 HOST_WIDE_INT off = -1;
488 bool deref = false;
489
490 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
491 {
492 if (vro->opcode == MEM_REF)
493 deref = true;
494 else if (vro->opcode != ADDR_EXPR)
495 deref = false;
496 if (vro->off != -1)
497 {
498 if (off == -1)
499 off = 0;
500 off += vro->off;
501 }
502 else
503 {
504 if (off != -1
505 && off != 0)
506 result = iterative_hash_hashval_t (off, result);
507 off = -1;
508 if (deref
509 && vro->opcode == ADDR_EXPR)
510 {
511 if (vro->op0)
512 {
513 tree op = TREE_OPERAND (vro->op0, 0);
514 result = iterative_hash_hashval_t (TREE_CODE (op), result);
515 result = iterative_hash_expr (op, result);
516 }
517 }
518 else
519 result = vn_reference_op_compute_hash (vro, result);
520 }
521 }
522 if (vr1->vuse)
523 result += SSA_NAME_VERSION (vr1->vuse);
524
525 return result;
526 }
527
528 /* Return true if reference operations P1 and P2 are equivalent. This
529 means they have the same set of operands and vuses. */
530
531 int
532 vn_reference_eq (const void *p1, const void *p2)
533 {
534 unsigned i, j;
535
536 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
537 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
538 if (vr1->hashcode != vr2->hashcode)
539 return false;
540
541 /* Early out if this is not a hash collision. */
542 if (vr1->hashcode != vr2->hashcode)
543 return false;
544
545 /* The VOP needs to be the same. */
546 if (vr1->vuse != vr2->vuse)
547 return false;
548
549 /* If the operands are the same we are done. */
550 if (vr1->operands == vr2->operands)
551 return true;
552
553 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
554 return false;
555
556 if (INTEGRAL_TYPE_P (vr1->type)
557 && INTEGRAL_TYPE_P (vr2->type))
558 {
559 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
560 return false;
561 }
562 else if (INTEGRAL_TYPE_P (vr1->type)
563 && (TYPE_PRECISION (vr1->type)
564 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
565 return false;
566 else if (INTEGRAL_TYPE_P (vr2->type)
567 && (TYPE_PRECISION (vr2->type)
568 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
569 return false;
570
571 i = 0;
572 j = 0;
573 do
574 {
575 HOST_WIDE_INT off1 = 0, off2 = 0;
576 vn_reference_op_t vro1, vro2;
577 vn_reference_op_s tem1, tem2;
578 bool deref1 = false, deref2 = false;
579 for (; vr1->operands.iterate (i, &vro1); i++)
580 {
581 if (vro1->opcode == MEM_REF)
582 deref1 = true;
583 if (vro1->off == -1)
584 break;
585 off1 += vro1->off;
586 }
587 for (; vr2->operands.iterate (j, &vro2); j++)
588 {
589 if (vro2->opcode == MEM_REF)
590 deref2 = true;
591 if (vro2->off == -1)
592 break;
593 off2 += vro2->off;
594 }
595 if (off1 != off2)
596 return false;
597 if (deref1 && vro1->opcode == ADDR_EXPR)
598 {
599 memset (&tem1, 0, sizeof (tem1));
600 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
601 tem1.type = TREE_TYPE (tem1.op0);
602 tem1.opcode = TREE_CODE (tem1.op0);
603 vro1 = &tem1;
604 deref1 = false;
605 }
606 if (deref2 && vro2->opcode == ADDR_EXPR)
607 {
608 memset (&tem2, 0, sizeof (tem2));
609 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
610 tem2.type = TREE_TYPE (tem2.op0);
611 tem2.opcode = TREE_CODE (tem2.op0);
612 vro2 = &tem2;
613 deref2 = false;
614 }
615 if (deref1 != deref2)
616 return false;
617 if (!vn_reference_op_eq (vro1, vro2))
618 return false;
619 ++j;
620 ++i;
621 }
622 while (vr1->operands.length () != i
623 || vr2->operands.length () != j);
624
625 return true;
626 }
627
628 /* Copy the operations present in load/store REF into RESULT, a vector of
629 vn_reference_op_s's. */
630
631 void
632 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
633 {
634 if (TREE_CODE (ref) == TARGET_MEM_REF)
635 {
636 vn_reference_op_s temp;
637
638 memset (&temp, 0, sizeof (temp));
639 temp.type = TREE_TYPE (ref);
640 temp.opcode = TREE_CODE (ref);
641 temp.op0 = TMR_INDEX (ref);
642 temp.op1 = TMR_STEP (ref);
643 temp.op2 = TMR_OFFSET (ref);
644 temp.off = -1;
645 result->safe_push (temp);
646
647 memset (&temp, 0, sizeof (temp));
648 temp.type = NULL_TREE;
649 temp.opcode = ERROR_MARK;
650 temp.op0 = TMR_INDEX2 (ref);
651 temp.off = -1;
652 result->safe_push (temp);
653
654 memset (&temp, 0, sizeof (temp));
655 temp.type = NULL_TREE;
656 temp.opcode = TREE_CODE (TMR_BASE (ref));
657 temp.op0 = TMR_BASE (ref);
658 temp.off = -1;
659 result->safe_push (temp);
660 return;
661 }
662
663 /* For non-calls, store the information that makes up the address. */
664
665 while (ref)
666 {
667 vn_reference_op_s temp;
668
669 memset (&temp, 0, sizeof (temp));
670 temp.type = TREE_TYPE (ref);
671 temp.opcode = TREE_CODE (ref);
672 temp.off = -1;
673
674 switch (temp.opcode)
675 {
676 case MODIFY_EXPR:
677 temp.op0 = TREE_OPERAND (ref, 1);
678 break;
679 case WITH_SIZE_EXPR:
680 temp.op0 = TREE_OPERAND (ref, 1);
681 temp.off = 0;
682 break;
683 case MEM_REF:
684 /* The base address gets its own vn_reference_op_s structure. */
685 temp.op0 = TREE_OPERAND (ref, 1);
686 if (host_integerp (TREE_OPERAND (ref, 1), 0))
687 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
688 break;
689 case BIT_FIELD_REF:
690 /* Record bits and position. */
691 temp.op0 = TREE_OPERAND (ref, 1);
692 temp.op1 = TREE_OPERAND (ref, 2);
693 break;
694 case COMPONENT_REF:
695 /* The field decl is enough to unambiguously specify the field,
696 a matching type is not necessary and a mismatching type
697 is always a spurious difference. */
698 temp.type = NULL_TREE;
699 temp.op0 = TREE_OPERAND (ref, 1);
700 temp.op1 = TREE_OPERAND (ref, 2);
701 {
702 tree this_offset = component_ref_field_offset (ref);
703 if (this_offset
704 && TREE_CODE (this_offset) == INTEGER_CST)
705 {
706 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
707 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
708 {
709 double_int off
710 = tree_to_double_int (this_offset)
711 + tree_to_double_int (bit_offset)
712 .arshift (BITS_PER_UNIT == 8
713 ? 3 : exact_log2 (BITS_PER_UNIT),
714 HOST_BITS_PER_DOUBLE_INT);
715 if (off.fits_shwi ())
716 temp.off = off.low;
717 }
718 }
719 }
720 break;
721 case ARRAY_RANGE_REF:
722 case ARRAY_REF:
723 /* Record index as operand. */
724 temp.op0 = TREE_OPERAND (ref, 1);
725 /* Always record lower bounds and element size. */
726 temp.op1 = array_ref_low_bound (ref);
727 temp.op2 = array_ref_element_size (ref);
728 if (TREE_CODE (temp.op0) == INTEGER_CST
729 && TREE_CODE (temp.op1) == INTEGER_CST
730 && TREE_CODE (temp.op2) == INTEGER_CST)
731 {
732 double_int off = tree_to_double_int (temp.op0);
733 off += -tree_to_double_int (temp.op1);
734 off *= tree_to_double_int (temp.op2);
735 if (off.fits_shwi ())
736 temp.off = off.low;
737 }
738 break;
739 case VAR_DECL:
740 if (DECL_HARD_REGISTER (ref))
741 {
742 temp.op0 = ref;
743 break;
744 }
745 /* Fallthru. */
746 case PARM_DECL:
747 case CONST_DECL:
748 case RESULT_DECL:
749 /* Canonicalize decls to MEM[&decl] which is what we end up with
750 when valueizing MEM[ptr] with ptr = &decl. */
751 temp.opcode = MEM_REF;
752 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
753 temp.off = 0;
754 result->safe_push (temp);
755 temp.opcode = ADDR_EXPR;
756 temp.op0 = build_fold_addr_expr (ref);
757 temp.type = TREE_TYPE (temp.op0);
758 temp.off = -1;
759 break;
760 case STRING_CST:
761 case INTEGER_CST:
762 case COMPLEX_CST:
763 case VECTOR_CST:
764 case REAL_CST:
765 case FIXED_CST:
766 case CONSTRUCTOR:
767 case SSA_NAME:
768 temp.op0 = ref;
769 break;
770 case ADDR_EXPR:
771 if (is_gimple_min_invariant (ref))
772 {
773 temp.op0 = ref;
774 break;
775 }
776 /* Fallthrough. */
777 /* These are only interesting for their operands, their
778 existence, and their type. They will never be the last
779 ref in the chain of references (IE they require an
780 operand), so we don't have to put anything
781 for op* as it will be handled by the iteration */
782 case REALPART_EXPR:
783 case VIEW_CONVERT_EXPR:
784 temp.off = 0;
785 break;
786 case IMAGPART_EXPR:
787 /* This is only interesting for its constant offset. */
788 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
789 break;
790 default:
791 gcc_unreachable ();
792 }
793 result->safe_push (temp);
794
795 if (REFERENCE_CLASS_P (ref)
796 || TREE_CODE (ref) == MODIFY_EXPR
797 || TREE_CODE (ref) == WITH_SIZE_EXPR
798 || (TREE_CODE (ref) == ADDR_EXPR
799 && !is_gimple_min_invariant (ref)))
800 ref = TREE_OPERAND (ref, 0);
801 else
802 ref = NULL_TREE;
803 }
804 }
805
806 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
807 operands in *OPS, the reference alias set SET and the reference type TYPE.
808 Return true if something useful was produced. */
809
810 bool
811 ao_ref_init_from_vn_reference (ao_ref *ref,
812 alias_set_type set, tree type,
813 vec<vn_reference_op_s> ops)
814 {
815 vn_reference_op_t op;
816 unsigned i;
817 tree base = NULL_TREE;
818 tree *op0_p = &base;
819 HOST_WIDE_INT offset = 0;
820 HOST_WIDE_INT max_size;
821 HOST_WIDE_INT size = -1;
822 tree size_tree = NULL_TREE;
823 alias_set_type base_alias_set = -1;
824
825 /* First get the final access size from just the outermost expression. */
826 op = &ops[0];
827 if (op->opcode == COMPONENT_REF)
828 size_tree = DECL_SIZE (op->op0);
829 else if (op->opcode == BIT_FIELD_REF)
830 size_tree = op->op0;
831 else
832 {
833 enum machine_mode mode = TYPE_MODE (type);
834 if (mode == BLKmode)
835 size_tree = TYPE_SIZE (type);
836 else
837 size = GET_MODE_BITSIZE (mode);
838 }
839 if (size_tree != NULL_TREE)
840 {
841 if (!host_integerp (size_tree, 1))
842 size = -1;
843 else
844 size = TREE_INT_CST_LOW (size_tree);
845 }
846
847 /* Initially, maxsize is the same as the accessed element size.
848 In the following it will only grow (or become -1). */
849 max_size = size;
850
851 /* Compute cumulative bit-offset for nested component-refs and array-refs,
852 and find the ultimate containing object. */
853 FOR_EACH_VEC_ELT (ops, i, op)
854 {
855 switch (op->opcode)
856 {
857 /* These may be in the reference ops, but we cannot do anything
858 sensible with them here. */
859 case ADDR_EXPR:
860 /* Apart from ADDR_EXPR arguments to MEM_REF. */
861 if (base != NULL_TREE
862 && TREE_CODE (base) == MEM_REF
863 && op->op0
864 && DECL_P (TREE_OPERAND (op->op0, 0)))
865 {
866 vn_reference_op_t pop = &ops[i-1];
867 base = TREE_OPERAND (op->op0, 0);
868 if (pop->off == -1)
869 {
870 max_size = -1;
871 offset = 0;
872 }
873 else
874 offset += pop->off * BITS_PER_UNIT;
875 op0_p = NULL;
876 break;
877 }
878 /* Fallthru. */
879 case CALL_EXPR:
880 return false;
881
882 /* Record the base objects. */
883 case MEM_REF:
884 base_alias_set = get_deref_alias_set (op->op0);
885 *op0_p = build2 (MEM_REF, op->type,
886 NULL_TREE, op->op0);
887 op0_p = &TREE_OPERAND (*op0_p, 0);
888 break;
889
890 case VAR_DECL:
891 case PARM_DECL:
892 case RESULT_DECL:
893 case SSA_NAME:
894 *op0_p = op->op0;
895 op0_p = NULL;
896 break;
897
898 /* And now the usual component-reference style ops. */
899 case BIT_FIELD_REF:
900 offset += tree_low_cst (op->op1, 0);
901 break;
902
903 case COMPONENT_REF:
904 {
905 tree field = op->op0;
906 /* We do not have a complete COMPONENT_REF tree here so we
907 cannot use component_ref_field_offset. Do the interesting
908 parts manually. */
909
910 if (op->op1
911 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
912 max_size = -1;
913 else
914 {
915 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
916 * BITS_PER_UNIT);
917 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
918 }
919 break;
920 }
921
922 case ARRAY_RANGE_REF:
923 case ARRAY_REF:
924 /* We recorded the lower bound and the element size. */
925 if (!host_integerp (op->op0, 0)
926 || !host_integerp (op->op1, 0)
927 || !host_integerp (op->op2, 0))
928 max_size = -1;
929 else
930 {
931 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
932 hindex -= TREE_INT_CST_LOW (op->op1);
933 hindex *= TREE_INT_CST_LOW (op->op2);
934 hindex *= BITS_PER_UNIT;
935 offset += hindex;
936 }
937 break;
938
939 case REALPART_EXPR:
940 break;
941
942 case IMAGPART_EXPR:
943 offset += size;
944 break;
945
946 case VIEW_CONVERT_EXPR:
947 break;
948
949 case STRING_CST:
950 case INTEGER_CST:
951 case COMPLEX_CST:
952 case VECTOR_CST:
953 case REAL_CST:
954 case CONSTRUCTOR:
955 case CONST_DECL:
956 return false;
957
958 default:
959 return false;
960 }
961 }
962
963 if (base == NULL_TREE)
964 return false;
965
966 ref->ref = NULL_TREE;
967 ref->base = base;
968 ref->offset = offset;
969 ref->size = size;
970 ref->max_size = max_size;
971 ref->ref_alias_set = set;
972 if (base_alias_set != -1)
973 ref->base_alias_set = base_alias_set;
974 else
975 ref->base_alias_set = get_alias_set (base);
976 /* We discount volatiles from value-numbering elsewhere. */
977 ref->volatile_p = false;
978
979 return true;
980 }
981
982 /* Copy the operations present in load/store/call REF into RESULT, a vector of
983 vn_reference_op_s's. */
984
985 void
986 copy_reference_ops_from_call (gimple call,
987 vec<vn_reference_op_s> *result)
988 {
989 vn_reference_op_s temp;
990 unsigned i;
991 tree lhs = gimple_call_lhs (call);
992
993 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
994 different. By adding the lhs here in the vector, we ensure that the
995 hashcode is different, guaranteeing a different value number. */
996 if (lhs && TREE_CODE (lhs) != SSA_NAME)
997 {
998 memset (&temp, 0, sizeof (temp));
999 temp.opcode = MODIFY_EXPR;
1000 temp.type = TREE_TYPE (lhs);
1001 temp.op0 = lhs;
1002 temp.off = -1;
1003 result->safe_push (temp);
1004 }
1005
1006 /* Copy the type, opcode, function being called and static chain. */
1007 memset (&temp, 0, sizeof (temp));
1008 temp.type = gimple_call_return_type (call);
1009 temp.opcode = CALL_EXPR;
1010 temp.op0 = gimple_call_fn (call);
1011 temp.op1 = gimple_call_chain (call);
1012 temp.off = -1;
1013 result->safe_push (temp);
1014
1015 /* Copy the call arguments. As they can be references as well,
1016 just chain them together. */
1017 for (i = 0; i < gimple_call_num_args (call); ++i)
1018 {
1019 tree callarg = gimple_call_arg (call, i);
1020 copy_reference_ops_from_ref (callarg, result);
1021 }
1022 }
1023
1024 /* Create a vector of vn_reference_op_s structures from REF, a
1025 REFERENCE_CLASS_P tree. The vector is not shared. */
1026
1027 static vec<vn_reference_op_s>
1028 create_reference_ops_from_ref (tree ref)
1029 {
1030 vec<vn_reference_op_s> result = vNULL;
1031
1032 copy_reference_ops_from_ref (ref, &result);
1033 return result;
1034 }
1035
1036 /* Create a vector of vn_reference_op_s structures from CALL, a
1037 call statement. The vector is not shared. */
1038
1039 static vec<vn_reference_op_s>
1040 create_reference_ops_from_call (gimple call)
1041 {
1042 vec<vn_reference_op_s> result = vNULL;
1043
1044 copy_reference_ops_from_call (call, &result);
1045 return result;
1046 }
1047
1048 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1049 *I_P to point to the last element of the replacement. */
1050 void
1051 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1052 unsigned int *i_p)
1053 {
1054 unsigned int i = *i_p;
1055 vn_reference_op_t op = &(*ops)[i];
1056 vn_reference_op_t mem_op = &(*ops)[i - 1];
1057 tree addr_base;
1058 HOST_WIDE_INT addr_offset = 0;
1059
1060 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1061 from .foo.bar to the preceding MEM_REF offset and replace the
1062 address with &OBJ. */
1063 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1064 &addr_offset);
1065 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1066 if (addr_base != op->op0)
1067 {
1068 double_int off = tree_to_double_int (mem_op->op0);
1069 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1070 off += double_int::from_shwi (addr_offset);
1071 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1072 op->op0 = build_fold_addr_expr (addr_base);
1073 if (host_integerp (mem_op->op0, 0))
1074 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1075 else
1076 mem_op->off = -1;
1077 }
1078 }
1079
1080 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1081 *I_P to point to the last element of the replacement. */
1082 static void
1083 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1084 unsigned int *i_p)
1085 {
1086 unsigned int i = *i_p;
1087 vn_reference_op_t op = &(*ops)[i];
1088 vn_reference_op_t mem_op = &(*ops)[i - 1];
1089 gimple def_stmt;
1090 enum tree_code code;
1091 double_int off;
1092
1093 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1094 if (!is_gimple_assign (def_stmt))
1095 return;
1096
1097 code = gimple_assign_rhs_code (def_stmt);
1098 if (code != ADDR_EXPR
1099 && code != POINTER_PLUS_EXPR)
1100 return;
1101
1102 off = tree_to_double_int (mem_op->op0);
1103 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1104
1105 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1106 from .foo.bar to the preceding MEM_REF offset and replace the
1107 address with &OBJ. */
1108 if (code == ADDR_EXPR)
1109 {
1110 tree addr, addr_base;
1111 HOST_WIDE_INT addr_offset;
1112
1113 addr = gimple_assign_rhs1 (def_stmt);
1114 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1115 &addr_offset);
1116 if (!addr_base
1117 || TREE_CODE (addr_base) != MEM_REF)
1118 return;
1119
1120 off += double_int::from_shwi (addr_offset);
1121 off += mem_ref_offset (addr_base);
1122 op->op0 = TREE_OPERAND (addr_base, 0);
1123 }
1124 else
1125 {
1126 tree ptr, ptroff;
1127 ptr = gimple_assign_rhs1 (def_stmt);
1128 ptroff = gimple_assign_rhs2 (def_stmt);
1129 if (TREE_CODE (ptr) != SSA_NAME
1130 || TREE_CODE (ptroff) != INTEGER_CST)
1131 return;
1132
1133 off += tree_to_double_int (ptroff);
1134 op->op0 = ptr;
1135 }
1136
1137 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1138 if (host_integerp (mem_op->op0, 0))
1139 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1140 else
1141 mem_op->off = -1;
1142 if (TREE_CODE (op->op0) == SSA_NAME)
1143 op->op0 = SSA_VAL (op->op0);
1144 if (TREE_CODE (op->op0) != SSA_NAME)
1145 op->opcode = TREE_CODE (op->op0);
1146
1147 /* And recurse. */
1148 if (TREE_CODE (op->op0) == SSA_NAME)
1149 vn_reference_maybe_forwprop_address (ops, i_p);
1150 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1151 vn_reference_fold_indirect (ops, i_p);
1152 }
1153
1154 /* Optimize the reference REF to a constant if possible or return
1155 NULL_TREE if not. */
1156
1157 tree
1158 fully_constant_vn_reference_p (vn_reference_t ref)
1159 {
1160 vec<vn_reference_op_s> operands = ref->operands;
1161 vn_reference_op_t op;
1162
1163 /* Try to simplify the translated expression if it is
1164 a call to a builtin function with at most two arguments. */
1165 op = &operands[0];
1166 if (op->opcode == CALL_EXPR
1167 && TREE_CODE (op->op0) == ADDR_EXPR
1168 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1169 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1170 && operands.length () >= 2
1171 && operands.length () <= 3)
1172 {
1173 vn_reference_op_t arg0, arg1 = NULL;
1174 bool anyconst = false;
1175 arg0 = &operands[1];
1176 if (operands.length () > 2)
1177 arg1 = &operands[2];
1178 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1179 || (arg0->opcode == ADDR_EXPR
1180 && is_gimple_min_invariant (arg0->op0)))
1181 anyconst = true;
1182 if (arg1
1183 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1184 || (arg1->opcode == ADDR_EXPR
1185 && is_gimple_min_invariant (arg1->op0))))
1186 anyconst = true;
1187 if (anyconst)
1188 {
1189 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1190 arg1 ? 2 : 1,
1191 arg0->op0,
1192 arg1 ? arg1->op0 : NULL);
1193 if (folded
1194 && TREE_CODE (folded) == NOP_EXPR)
1195 folded = TREE_OPERAND (folded, 0);
1196 if (folded
1197 && is_gimple_min_invariant (folded))
1198 return folded;
1199 }
1200 }
1201
1202 /* Simplify reads from constant strings. */
1203 else if (op->opcode == ARRAY_REF
1204 && TREE_CODE (op->op0) == INTEGER_CST
1205 && integer_zerop (op->op1)
1206 && operands.length () == 2)
1207 {
1208 vn_reference_op_t arg0;
1209 arg0 = &operands[1];
1210 if (arg0->opcode == STRING_CST
1211 && (TYPE_MODE (op->type)
1212 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1213 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1214 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1215 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1216 return build_int_cst_type (op->type,
1217 (TREE_STRING_POINTER (arg0->op0)
1218 [TREE_INT_CST_LOW (op->op0)]));
1219 }
1220
1221 return NULL_TREE;
1222 }
1223
1224 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1225 structures into their value numbers. This is done in-place, and
1226 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1227 whether any operands were valueized. */
1228
1229 static vec<vn_reference_op_s>
1230 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1231 {
1232 vn_reference_op_t vro;
1233 unsigned int i;
1234
1235 *valueized_anything = false;
1236
1237 FOR_EACH_VEC_ELT (orig, i, vro)
1238 {
1239 if (vro->opcode == SSA_NAME
1240 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1241 {
1242 tree tem = SSA_VAL (vro->op0);
1243 if (tem != vro->op0)
1244 {
1245 *valueized_anything = true;
1246 vro->op0 = tem;
1247 }
1248 /* If it transforms from an SSA_NAME to a constant, update
1249 the opcode. */
1250 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1251 vro->opcode = TREE_CODE (vro->op0);
1252 }
1253 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1254 {
1255 tree tem = SSA_VAL (vro->op1);
1256 if (tem != vro->op1)
1257 {
1258 *valueized_anything = true;
1259 vro->op1 = tem;
1260 }
1261 }
1262 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1263 {
1264 tree tem = SSA_VAL (vro->op2);
1265 if (tem != vro->op2)
1266 {
1267 *valueized_anything = true;
1268 vro->op2 = tem;
1269 }
1270 }
1271 /* If it transforms from an SSA_NAME to an address, fold with
1272 a preceding indirect reference. */
1273 if (i > 0
1274 && vro->op0
1275 && TREE_CODE (vro->op0) == ADDR_EXPR
1276 && orig[i - 1].opcode == MEM_REF)
1277 vn_reference_fold_indirect (&orig, &i);
1278 else if (i > 0
1279 && vro->opcode == SSA_NAME
1280 && orig[i - 1].opcode == MEM_REF)
1281 vn_reference_maybe_forwprop_address (&orig, &i);
1282 /* If it transforms a non-constant ARRAY_REF into a constant
1283 one, adjust the constant offset. */
1284 else if (vro->opcode == ARRAY_REF
1285 && vro->off == -1
1286 && TREE_CODE (vro->op0) == INTEGER_CST
1287 && TREE_CODE (vro->op1) == INTEGER_CST
1288 && TREE_CODE (vro->op2) == INTEGER_CST)
1289 {
1290 double_int off = tree_to_double_int (vro->op0);
1291 off += -tree_to_double_int (vro->op1);
1292 off *= tree_to_double_int (vro->op2);
1293 if (off.fits_shwi ())
1294 vro->off = off.low;
1295 }
1296 }
1297
1298 return orig;
1299 }
1300
1301 static vec<vn_reference_op_s>
1302 valueize_refs (vec<vn_reference_op_s> orig)
1303 {
1304 bool tem;
1305 return valueize_refs_1 (orig, &tem);
1306 }
1307
1308 static vec<vn_reference_op_s> shared_lookup_references;
1309
1310 /* Create a vector of vn_reference_op_s structures from REF, a
1311 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1312 this function. *VALUEIZED_ANYTHING will specify whether any
1313 operands were valueized. */
1314
1315 static vec<vn_reference_op_s>
1316 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1317 {
1318 if (!ref)
1319 return vNULL;
1320 shared_lookup_references.truncate (0);
1321 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1322 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1323 valueized_anything);
1324 return shared_lookup_references;
1325 }
1326
1327 /* Create a vector of vn_reference_op_s structures from CALL, a
1328 call statement. The vector is shared among all callers of
1329 this function. */
1330
1331 static vec<vn_reference_op_s>
1332 valueize_shared_reference_ops_from_call (gimple call)
1333 {
1334 if (!call)
1335 return vNULL;
1336 shared_lookup_references.truncate (0);
1337 copy_reference_ops_from_call (call, &shared_lookup_references);
1338 shared_lookup_references = valueize_refs (shared_lookup_references);
1339 return shared_lookup_references;
1340 }
1341
1342 /* Lookup a SCCVN reference operation VR in the current hash table.
1343 Returns the resulting value number if it exists in the hash table,
1344 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1345 vn_reference_t stored in the hashtable if something is found. */
1346
1347 static tree
1348 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1349 {
1350 void **slot;
1351 hashval_t hash;
1352
1353 hash = vr->hashcode;
1354 slot = htab_find_slot_with_hash (current_info->references, vr,
1355 hash, NO_INSERT);
1356 if (!slot && current_info == optimistic_info)
1357 slot = htab_find_slot_with_hash (valid_info->references, vr,
1358 hash, NO_INSERT);
1359 if (slot)
1360 {
1361 if (vnresult)
1362 *vnresult = (vn_reference_t)*slot;
1363 return ((vn_reference_t)*slot)->result;
1364 }
1365
1366 return NULL_TREE;
1367 }
1368
1369 static tree *last_vuse_ptr;
1370 static vn_lookup_kind vn_walk_kind;
1371 static vn_lookup_kind default_vn_walk_kind;
1372
1373 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1374 with the current VUSE and performs the expression lookup. */
1375
1376 static void *
1377 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1378 unsigned int cnt, void *vr_)
1379 {
1380 vn_reference_t vr = (vn_reference_t)vr_;
1381 void **slot;
1382 hashval_t hash;
1383
1384 /* This bounds the stmt walks we perform on reference lookups
1385 to O(1) instead of O(N) where N is the number of dominating
1386 stores. */
1387 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1388 return (void *)-1;
1389
1390 if (last_vuse_ptr)
1391 *last_vuse_ptr = vuse;
1392
1393 /* Fixup vuse and hash. */
1394 if (vr->vuse)
1395 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1396 vr->vuse = SSA_VAL (vuse);
1397 if (vr->vuse)
1398 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1399
1400 hash = vr->hashcode;
1401 slot = htab_find_slot_with_hash (current_info->references, vr,
1402 hash, NO_INSERT);
1403 if (!slot && current_info == optimistic_info)
1404 slot = htab_find_slot_with_hash (valid_info->references, vr,
1405 hash, NO_INSERT);
1406 if (slot)
1407 return *slot;
1408
1409 return NULL;
1410 }
1411
1412 /* Lookup an existing or insert a new vn_reference entry into the
1413 value table for the VUSE, SET, TYPE, OPERANDS reference which
1414 has the value VALUE which is either a constant or an SSA name. */
1415
1416 static vn_reference_t
1417 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1418 alias_set_type set,
1419 tree type,
1420 vec<vn_reference_op_s,
1421 va_heap> operands,
1422 tree value)
1423 {
1424 struct vn_reference_s vr1;
1425 vn_reference_t result;
1426 unsigned value_id;
1427 vr1.vuse = vuse;
1428 vr1.operands = operands;
1429 vr1.type = type;
1430 vr1.set = set;
1431 vr1.hashcode = vn_reference_compute_hash (&vr1);
1432 if (vn_reference_lookup_1 (&vr1, &result))
1433 return result;
1434 if (TREE_CODE (value) == SSA_NAME)
1435 value_id = VN_INFO (value)->value_id;
1436 else
1437 value_id = get_or_alloc_constant_value_id (value);
1438 return vn_reference_insert_pieces (vuse, set, type,
1439 operands.copy (), value, value_id);
1440 }
1441
1442 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1443 from the statement defining VUSE and if not successful tries to
1444 translate *REFP and VR_ through an aggregate copy at the definition
1445 of VUSE. */
1446
1447 static void *
1448 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1449 {
1450 vn_reference_t vr = (vn_reference_t)vr_;
1451 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1452 tree base;
1453 HOST_WIDE_INT offset, maxsize;
1454 static vec<vn_reference_op_s>
1455 lhs_ops = vNULL;
1456 ao_ref lhs_ref;
1457 bool lhs_ref_ok = false;
1458
1459 /* First try to disambiguate after value-replacing in the definitions LHS. */
1460 if (is_gimple_assign (def_stmt))
1461 {
1462 vec<vn_reference_op_s> tem;
1463 tree lhs = gimple_assign_lhs (def_stmt);
1464 bool valueized_anything = false;
1465 /* Avoid re-allocation overhead. */
1466 lhs_ops.truncate (0);
1467 copy_reference_ops_from_ref (lhs, &lhs_ops);
1468 tem = lhs_ops;
1469 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1470 gcc_assert (lhs_ops == tem);
1471 if (valueized_anything)
1472 {
1473 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1474 get_alias_set (lhs),
1475 TREE_TYPE (lhs), lhs_ops);
1476 if (lhs_ref_ok
1477 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1478 return NULL;
1479 }
1480 else
1481 {
1482 ao_ref_init (&lhs_ref, lhs);
1483 lhs_ref_ok = true;
1484 }
1485 }
1486
1487 base = ao_ref_base (ref);
1488 offset = ref->offset;
1489 maxsize = ref->max_size;
1490
1491 /* If we cannot constrain the size of the reference we cannot
1492 test if anything kills it. */
1493 if (maxsize == -1)
1494 return (void *)-1;
1495
1496 /* We can't deduce anything useful from clobbers. */
1497 if (gimple_clobber_p (def_stmt))
1498 return (void *)-1;
1499
1500 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1501 from that definition.
1502 1) Memset. */
1503 if (is_gimple_reg_type (vr->type)
1504 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1505 && integer_zerop (gimple_call_arg (def_stmt, 1))
1506 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1507 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1508 {
1509 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1510 tree base2;
1511 HOST_WIDE_INT offset2, size2, maxsize2;
1512 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1513 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1514 if ((unsigned HOST_WIDE_INT)size2 / 8
1515 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1516 && maxsize2 != -1
1517 && operand_equal_p (base, base2, 0)
1518 && offset2 <= offset
1519 && offset2 + size2 >= offset + maxsize)
1520 {
1521 tree val = build_zero_cst (vr->type);
1522 return vn_reference_lookup_or_insert_for_pieces
1523 (vuse, vr->set, vr->type, vr->operands, val);
1524 }
1525 }
1526
1527 /* 2) Assignment from an empty CONSTRUCTOR. */
1528 else if (is_gimple_reg_type (vr->type)
1529 && gimple_assign_single_p (def_stmt)
1530 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1531 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1532 {
1533 tree base2;
1534 HOST_WIDE_INT offset2, size2, maxsize2;
1535 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1536 &offset2, &size2, &maxsize2);
1537 if (maxsize2 != -1
1538 && operand_equal_p (base, base2, 0)
1539 && offset2 <= offset
1540 && offset2 + size2 >= offset + maxsize)
1541 {
1542 tree val = build_zero_cst (vr->type);
1543 return vn_reference_lookup_or_insert_for_pieces
1544 (vuse, vr->set, vr->type, vr->operands, val);
1545 }
1546 }
1547
1548 /* 3) Assignment from a constant. We can use folds native encode/interpret
1549 routines to extract the assigned bits. */
1550 else if (vn_walk_kind == VN_WALKREWRITE
1551 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1552 && ref->size == maxsize
1553 && maxsize % BITS_PER_UNIT == 0
1554 && offset % BITS_PER_UNIT == 0
1555 && is_gimple_reg_type (vr->type)
1556 && gimple_assign_single_p (def_stmt)
1557 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1558 {
1559 tree base2;
1560 HOST_WIDE_INT offset2, size2, maxsize2;
1561 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1562 &offset2, &size2, &maxsize2);
1563 if (maxsize2 != -1
1564 && maxsize2 == size2
1565 && size2 % BITS_PER_UNIT == 0
1566 && offset2 % BITS_PER_UNIT == 0
1567 && operand_equal_p (base, base2, 0)
1568 && offset2 <= offset
1569 && offset2 + size2 >= offset + maxsize)
1570 {
1571 /* We support up to 512-bit values (for V8DFmode). */
1572 unsigned char buffer[64];
1573 int len;
1574
1575 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1576 buffer, sizeof (buffer));
1577 if (len > 0)
1578 {
1579 tree val = native_interpret_expr (vr->type,
1580 buffer
1581 + ((offset - offset2)
1582 / BITS_PER_UNIT),
1583 ref->size / BITS_PER_UNIT);
1584 if (val)
1585 return vn_reference_lookup_or_insert_for_pieces
1586 (vuse, vr->set, vr->type, vr->operands, val);
1587 }
1588 }
1589 }
1590
1591 /* 4) Assignment from an SSA name which definition we may be able
1592 to access pieces from. */
1593 else if (ref->size == maxsize
1594 && is_gimple_reg_type (vr->type)
1595 && gimple_assign_single_p (def_stmt)
1596 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1597 {
1598 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1599 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1600 if (is_gimple_assign (def_stmt2)
1601 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1602 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1603 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1604 {
1605 tree base2;
1606 HOST_WIDE_INT offset2, size2, maxsize2, off;
1607 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1608 &offset2, &size2, &maxsize2);
1609 off = offset - offset2;
1610 if (maxsize2 != -1
1611 && maxsize2 == size2
1612 && operand_equal_p (base, base2, 0)
1613 && offset2 <= offset
1614 && offset2 + size2 >= offset + maxsize)
1615 {
1616 tree val = NULL_TREE;
1617 HOST_WIDE_INT elsz
1618 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1619 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1620 {
1621 if (off == 0)
1622 val = gimple_assign_rhs1 (def_stmt2);
1623 else if (off == elsz)
1624 val = gimple_assign_rhs2 (def_stmt2);
1625 }
1626 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1627 && off % elsz == 0)
1628 {
1629 tree ctor = gimple_assign_rhs1 (def_stmt2);
1630 unsigned i = off / elsz;
1631 if (i < CONSTRUCTOR_NELTS (ctor))
1632 {
1633 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1634 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1635 {
1636 if (TREE_CODE (TREE_TYPE (elt->value))
1637 != VECTOR_TYPE)
1638 val = elt->value;
1639 }
1640 }
1641 }
1642 if (val)
1643 return vn_reference_lookup_or_insert_for_pieces
1644 (vuse, vr->set, vr->type, vr->operands, val);
1645 }
1646 }
1647 }
1648
1649 /* 5) For aggregate copies translate the reference through them if
1650 the copy kills ref. */
1651 else if (vn_walk_kind == VN_WALKREWRITE
1652 && gimple_assign_single_p (def_stmt)
1653 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1654 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1655 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1656 {
1657 tree base2;
1658 HOST_WIDE_INT offset2, size2, maxsize2;
1659 int i, j;
1660 vec<vn_reference_op_s>
1661 rhs = vNULL;
1662 vn_reference_op_t vro;
1663 ao_ref r;
1664
1665 if (!lhs_ref_ok)
1666 return (void *)-1;
1667
1668 /* See if the assignment kills REF. */
1669 base2 = ao_ref_base (&lhs_ref);
1670 offset2 = lhs_ref.offset;
1671 size2 = lhs_ref.size;
1672 maxsize2 = lhs_ref.max_size;
1673 if (maxsize2 == -1
1674 || (base != base2 && !operand_equal_p (base, base2, 0))
1675 || offset2 > offset
1676 || offset2 + size2 < offset + maxsize)
1677 return (void *)-1;
1678
1679 /* Find the common base of ref and the lhs. lhs_ops already
1680 contains valueized operands for the lhs. */
1681 i = vr->operands.length () - 1;
1682 j = lhs_ops.length () - 1;
1683 while (j >= 0 && i >= 0
1684 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1685 {
1686 i--;
1687 j--;
1688 }
1689
1690 /* ??? The innermost op should always be a MEM_REF and we already
1691 checked that the assignment to the lhs kills vr. Thus for
1692 aggregate copies using char[] types the vn_reference_op_eq
1693 may fail when comparing types for compatibility. But we really
1694 don't care here - further lookups with the rewritten operands
1695 will simply fail if we messed up types too badly. */
1696 if (j == 0 && i >= 0
1697 && lhs_ops[0].opcode == MEM_REF
1698 && lhs_ops[0].off != -1
1699 && (lhs_ops[0].off == vr->operands[i].off))
1700 i--, j--;
1701
1702 /* i now points to the first additional op.
1703 ??? LHS may not be completely contained in VR, one or more
1704 VIEW_CONVERT_EXPRs could be in its way. We could at least
1705 try handling outermost VIEW_CONVERT_EXPRs. */
1706 if (j != -1)
1707 return (void *)-1;
1708
1709 /* Now re-write REF to be based on the rhs of the assignment. */
1710 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1711 /* We need to pre-pend vr->operands[0..i] to rhs. */
1712 if (i + 1 + rhs.length () > vr->operands.length ())
1713 {
1714 vec<vn_reference_op_s> old = vr->operands;
1715 vr->operands.safe_grow (i + 1 + rhs.length ());
1716 if (old == shared_lookup_references
1717 && vr->operands != old)
1718 shared_lookup_references = vNULL;
1719 }
1720 else
1721 vr->operands.truncate (i + 1 + rhs.length ());
1722 FOR_EACH_VEC_ELT (rhs, j, vro)
1723 vr->operands[i + 1 + j] = *vro;
1724 rhs.release ();
1725 vr->operands = valueize_refs (vr->operands);
1726 vr->hashcode = vn_reference_compute_hash (vr);
1727
1728 /* Adjust *ref from the new operands. */
1729 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1730 return (void *)-1;
1731 /* This can happen with bitfields. */
1732 if (ref->size != r.size)
1733 return (void *)-1;
1734 *ref = r;
1735
1736 /* Do not update last seen VUSE after translating. */
1737 last_vuse_ptr = NULL;
1738
1739 /* Keep looking for the adjusted *REF / VR pair. */
1740 return NULL;
1741 }
1742
1743 /* 6) For memcpy copies translate the reference through them if
1744 the copy kills ref. */
1745 else if (vn_walk_kind == VN_WALKREWRITE
1746 && is_gimple_reg_type (vr->type)
1747 /* ??? Handle BCOPY as well. */
1748 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1749 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1750 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1751 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1752 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1753 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1754 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1755 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1756 {
1757 tree lhs, rhs;
1758 ao_ref r;
1759 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1760 vn_reference_op_s op;
1761 HOST_WIDE_INT at;
1762
1763
1764 /* Only handle non-variable, addressable refs. */
1765 if (ref->size != maxsize
1766 || offset % BITS_PER_UNIT != 0
1767 || ref->size % BITS_PER_UNIT != 0)
1768 return (void *)-1;
1769
1770 /* Extract a pointer base and an offset for the destination. */
1771 lhs = gimple_call_arg (def_stmt, 0);
1772 lhs_offset = 0;
1773 if (TREE_CODE (lhs) == SSA_NAME)
1774 lhs = SSA_VAL (lhs);
1775 if (TREE_CODE (lhs) == ADDR_EXPR)
1776 {
1777 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1778 &lhs_offset);
1779 if (!tem)
1780 return (void *)-1;
1781 if (TREE_CODE (tem) == MEM_REF
1782 && host_integerp (TREE_OPERAND (tem, 1), 1))
1783 {
1784 lhs = TREE_OPERAND (tem, 0);
1785 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1786 }
1787 else if (DECL_P (tem))
1788 lhs = build_fold_addr_expr (tem);
1789 else
1790 return (void *)-1;
1791 }
1792 if (TREE_CODE (lhs) != SSA_NAME
1793 && TREE_CODE (lhs) != ADDR_EXPR)
1794 return (void *)-1;
1795
1796 /* Extract a pointer base and an offset for the source. */
1797 rhs = gimple_call_arg (def_stmt, 1);
1798 rhs_offset = 0;
1799 if (TREE_CODE (rhs) == SSA_NAME)
1800 rhs = SSA_VAL (rhs);
1801 if (TREE_CODE (rhs) == ADDR_EXPR)
1802 {
1803 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1804 &rhs_offset);
1805 if (!tem)
1806 return (void *)-1;
1807 if (TREE_CODE (tem) == MEM_REF
1808 && host_integerp (TREE_OPERAND (tem, 1), 1))
1809 {
1810 rhs = TREE_OPERAND (tem, 0);
1811 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1812 }
1813 else if (DECL_P (tem))
1814 rhs = build_fold_addr_expr (tem);
1815 else
1816 return (void *)-1;
1817 }
1818 if (TREE_CODE (rhs) != SSA_NAME
1819 && TREE_CODE (rhs) != ADDR_EXPR)
1820 return (void *)-1;
1821
1822 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1823
1824 /* The bases of the destination and the references have to agree. */
1825 if ((TREE_CODE (base) != MEM_REF
1826 && !DECL_P (base))
1827 || (TREE_CODE (base) == MEM_REF
1828 && (TREE_OPERAND (base, 0) != lhs
1829 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1830 || (DECL_P (base)
1831 && (TREE_CODE (lhs) != ADDR_EXPR
1832 || TREE_OPERAND (lhs, 0) != base)))
1833 return (void *)-1;
1834
1835 /* And the access has to be contained within the memcpy destination. */
1836 at = offset / BITS_PER_UNIT;
1837 if (TREE_CODE (base) == MEM_REF)
1838 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1839 if (lhs_offset > at
1840 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1841 return (void *)-1;
1842
1843 /* Make room for 2 operands in the new reference. */
1844 if (vr->operands.length () < 2)
1845 {
1846 vec<vn_reference_op_s> old = vr->operands;
1847 vr->operands.safe_grow_cleared (2);
1848 if (old == shared_lookup_references
1849 && vr->operands != old)
1850 shared_lookup_references.create (0);
1851 }
1852 else
1853 vr->operands.truncate (2);
1854
1855 /* The looked-through reference is a simple MEM_REF. */
1856 memset (&op, 0, sizeof (op));
1857 op.type = vr->type;
1858 op.opcode = MEM_REF;
1859 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1860 op.off = at - lhs_offset + rhs_offset;
1861 vr->operands[0] = op;
1862 op.type = TREE_TYPE (rhs);
1863 op.opcode = TREE_CODE (rhs);
1864 op.op0 = rhs;
1865 op.off = -1;
1866 vr->operands[1] = op;
1867 vr->hashcode = vn_reference_compute_hash (vr);
1868
1869 /* Adjust *ref from the new operands. */
1870 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1871 return (void *)-1;
1872 /* This can happen with bitfields. */
1873 if (ref->size != r.size)
1874 return (void *)-1;
1875 *ref = r;
1876
1877 /* Do not update last seen VUSE after translating. */
1878 last_vuse_ptr = NULL;
1879
1880 /* Keep looking for the adjusted *REF / VR pair. */
1881 return NULL;
1882 }
1883
1884 /* Bail out and stop walking. */
1885 return (void *)-1;
1886 }
1887
1888 /* Lookup a reference operation by it's parts, in the current hash table.
1889 Returns the resulting value number if it exists in the hash table,
1890 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1891 vn_reference_t stored in the hashtable if something is found. */
1892
1893 tree
1894 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1895 vec<vn_reference_op_s> operands,
1896 vn_reference_t *vnresult, vn_lookup_kind kind)
1897 {
1898 struct vn_reference_s vr1;
1899 vn_reference_t tmp;
1900 tree cst;
1901
1902 if (!vnresult)
1903 vnresult = &tmp;
1904 *vnresult = NULL;
1905
1906 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1907 shared_lookup_references.truncate (0);
1908 shared_lookup_references.safe_grow (operands.length ());
1909 memcpy (shared_lookup_references.address (),
1910 operands.address (),
1911 sizeof (vn_reference_op_s)
1912 * operands.length ());
1913 vr1.operands = operands = shared_lookup_references
1914 = valueize_refs (shared_lookup_references);
1915 vr1.type = type;
1916 vr1.set = set;
1917 vr1.hashcode = vn_reference_compute_hash (&vr1);
1918 if ((cst = fully_constant_vn_reference_p (&vr1)))
1919 return cst;
1920
1921 vn_reference_lookup_1 (&vr1, vnresult);
1922 if (!*vnresult
1923 && kind != VN_NOWALK
1924 && vr1.vuse)
1925 {
1926 ao_ref r;
1927 vn_walk_kind = kind;
1928 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1929 *vnresult =
1930 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1931 vn_reference_lookup_2,
1932 vn_reference_lookup_3, &vr1);
1933 if (vr1.operands != operands)
1934 vr1.operands.release ();
1935 }
1936
1937 if (*vnresult)
1938 return (*vnresult)->result;
1939
1940 return NULL_TREE;
1941 }
1942
1943 /* Lookup OP in the current hash table, and return the resulting value
1944 number if it exists in the hash table. Return NULL_TREE if it does
1945 not exist in the hash table or if the result field of the structure
1946 was NULL.. VNRESULT will be filled in with the vn_reference_t
1947 stored in the hashtable if one exists. */
1948
1949 tree
1950 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1951 vn_reference_t *vnresult)
1952 {
1953 vec<vn_reference_op_s> operands;
1954 struct vn_reference_s vr1;
1955 tree cst;
1956 bool valuezied_anything;
1957
1958 if (vnresult)
1959 *vnresult = NULL;
1960
1961 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1962 vr1.operands = operands
1963 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1964 vr1.type = TREE_TYPE (op);
1965 vr1.set = get_alias_set (op);
1966 vr1.hashcode = vn_reference_compute_hash (&vr1);
1967 if ((cst = fully_constant_vn_reference_p (&vr1)))
1968 return cst;
1969
1970 if (kind != VN_NOWALK
1971 && vr1.vuse)
1972 {
1973 vn_reference_t wvnresult;
1974 ao_ref r;
1975 /* Make sure to use a valueized reference if we valueized anything.
1976 Otherwise preserve the full reference for advanced TBAA. */
1977 if (!valuezied_anything
1978 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1979 vr1.operands))
1980 ao_ref_init (&r, op);
1981 vn_walk_kind = kind;
1982 wvnresult =
1983 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1984 vn_reference_lookup_2,
1985 vn_reference_lookup_3, &vr1);
1986 if (vr1.operands != operands)
1987 vr1.operands.release ();
1988 if (wvnresult)
1989 {
1990 if (vnresult)
1991 *vnresult = wvnresult;
1992 return wvnresult->result;
1993 }
1994
1995 return NULL_TREE;
1996 }
1997
1998 return vn_reference_lookup_1 (&vr1, vnresult);
1999 }
2000
2001
2002 /* Insert OP into the current hash table with a value number of
2003 RESULT, and return the resulting reference structure we created. */
2004
2005 vn_reference_t
2006 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2007 {
2008 void **slot;
2009 vn_reference_t vr1;
2010
2011 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2012 if (TREE_CODE (result) == SSA_NAME)
2013 vr1->value_id = VN_INFO (result)->value_id;
2014 else
2015 vr1->value_id = get_or_alloc_constant_value_id (result);
2016 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2017 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2018 vr1->type = TREE_TYPE (op);
2019 vr1->set = get_alias_set (op);
2020 vr1->hashcode = vn_reference_compute_hash (vr1);
2021 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2022 vr1->result_vdef = vdef;
2023
2024 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2025 INSERT);
2026
2027 /* Because we lookup stores using vuses, and value number failures
2028 using the vdefs (see visit_reference_op_store for how and why),
2029 it's possible that on failure we may try to insert an already
2030 inserted store. This is not wrong, there is no ssa name for a
2031 store that we could use as a differentiator anyway. Thus, unlike
2032 the other lookup functions, you cannot gcc_assert (!*slot)
2033 here. */
2034
2035 /* But free the old slot in case of a collision. */
2036 if (*slot)
2037 free_reference (*slot);
2038
2039 *slot = vr1;
2040 return vr1;
2041 }
2042
2043 /* Insert a reference by it's pieces into the current hash table with
2044 a value number of RESULT. Return the resulting reference
2045 structure we created. */
2046
2047 vn_reference_t
2048 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2049 vec<vn_reference_op_s> operands,
2050 tree result, unsigned int value_id)
2051
2052 {
2053 void **slot;
2054 vn_reference_t vr1;
2055
2056 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2057 vr1->value_id = value_id;
2058 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2059 vr1->operands = valueize_refs (operands);
2060 vr1->type = type;
2061 vr1->set = set;
2062 vr1->hashcode = vn_reference_compute_hash (vr1);
2063 if (result && TREE_CODE (result) == SSA_NAME)
2064 result = SSA_VAL (result);
2065 vr1->result = result;
2066
2067 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2068 INSERT);
2069
2070 /* At this point we should have all the things inserted that we have
2071 seen before, and we should never try inserting something that
2072 already exists. */
2073 gcc_assert (!*slot);
2074 if (*slot)
2075 free_reference (*slot);
2076
2077 *slot = vr1;
2078 return vr1;
2079 }
2080
2081 /* Compute and return the hash value for nary operation VBO1. */
2082
2083 hashval_t
2084 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2085 {
2086 hashval_t hash;
2087 unsigned i;
2088
2089 for (i = 0; i < vno1->length; ++i)
2090 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2091 vno1->op[i] = SSA_VAL (vno1->op[i]);
2092
2093 if (vno1->length == 2
2094 && commutative_tree_code (vno1->opcode)
2095 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2096 {
2097 tree temp = vno1->op[0];
2098 vno1->op[0] = vno1->op[1];
2099 vno1->op[1] = temp;
2100 }
2101
2102 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2103 for (i = 0; i < vno1->length; ++i)
2104 hash = iterative_hash_expr (vno1->op[i], hash);
2105
2106 return hash;
2107 }
2108
2109 /* Return the computed hashcode for nary operation P1. */
2110
2111 static hashval_t
2112 vn_nary_op_hash (const void *p1)
2113 {
2114 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2115 return vno1->hashcode;
2116 }
2117
2118 /* Compare nary operations P1 and P2 and return true if they are
2119 equivalent. */
2120
2121 int
2122 vn_nary_op_eq (const void *p1, const void *p2)
2123 {
2124 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2125 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2126 unsigned i;
2127
2128 if (vno1->hashcode != vno2->hashcode)
2129 return false;
2130
2131 if (vno1->length != vno2->length)
2132 return false;
2133
2134 if (vno1->opcode != vno2->opcode
2135 || !types_compatible_p (vno1->type, vno2->type))
2136 return false;
2137
2138 for (i = 0; i < vno1->length; ++i)
2139 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2140 return false;
2141
2142 return true;
2143 }
2144
2145 /* Initialize VNO from the pieces provided. */
2146
2147 static void
2148 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2149 enum tree_code code, tree type, tree *ops)
2150 {
2151 vno->opcode = code;
2152 vno->length = length;
2153 vno->type = type;
2154 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2155 }
2156
2157 /* Initialize VNO from OP. */
2158
2159 static void
2160 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2161 {
2162 unsigned i;
2163
2164 vno->opcode = TREE_CODE (op);
2165 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2166 vno->type = TREE_TYPE (op);
2167 for (i = 0; i < vno->length; ++i)
2168 vno->op[i] = TREE_OPERAND (op, i);
2169 }
2170
2171 /* Return the number of operands for a vn_nary ops structure from STMT. */
2172
2173 static unsigned int
2174 vn_nary_length_from_stmt (gimple stmt)
2175 {
2176 switch (gimple_assign_rhs_code (stmt))
2177 {
2178 case REALPART_EXPR:
2179 case IMAGPART_EXPR:
2180 case VIEW_CONVERT_EXPR:
2181 return 1;
2182
2183 case BIT_FIELD_REF:
2184 return 3;
2185
2186 case CONSTRUCTOR:
2187 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2188
2189 default:
2190 return gimple_num_ops (stmt) - 1;
2191 }
2192 }
2193
2194 /* Initialize VNO from STMT. */
2195
2196 static void
2197 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2198 {
2199 unsigned i;
2200
2201 vno->opcode = gimple_assign_rhs_code (stmt);
2202 vno->type = gimple_expr_type (stmt);
2203 switch (vno->opcode)
2204 {
2205 case REALPART_EXPR:
2206 case IMAGPART_EXPR:
2207 case VIEW_CONVERT_EXPR:
2208 vno->length = 1;
2209 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2210 break;
2211
2212 case BIT_FIELD_REF:
2213 vno->length = 3;
2214 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2215 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2216 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2217 break;
2218
2219 case CONSTRUCTOR:
2220 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2221 for (i = 0; i < vno->length; ++i)
2222 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2223 break;
2224
2225 default:
2226 gcc_checking_assert (!gimple_assign_single_p (stmt));
2227 vno->length = gimple_num_ops (stmt) - 1;
2228 for (i = 0; i < vno->length; ++i)
2229 vno->op[i] = gimple_op (stmt, i + 1);
2230 }
2231 }
2232
2233 /* Compute the hashcode for VNO and look for it in the hash table;
2234 return the resulting value number if it exists in the hash table.
2235 Return NULL_TREE if it does not exist in the hash table or if the
2236 result field of the operation is NULL. VNRESULT will contain the
2237 vn_nary_op_t from the hashtable if it exists. */
2238
2239 static tree
2240 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2241 {
2242 void **slot;
2243
2244 if (vnresult)
2245 *vnresult = NULL;
2246
2247 vno->hashcode = vn_nary_op_compute_hash (vno);
2248 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2249 NO_INSERT);
2250 if (!slot && current_info == optimistic_info)
2251 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2252 NO_INSERT);
2253 if (!slot)
2254 return NULL_TREE;
2255 if (vnresult)
2256 *vnresult = (vn_nary_op_t)*slot;
2257 return ((vn_nary_op_t)*slot)->result;
2258 }
2259
2260 /* Lookup a n-ary operation by its pieces and return the resulting value
2261 number if it exists in the hash table. Return NULL_TREE if it does
2262 not exist in the hash table or if the result field of the operation
2263 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2264 if it exists. */
2265
2266 tree
2267 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2268 tree type, tree *ops, vn_nary_op_t *vnresult)
2269 {
2270 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2271 sizeof_vn_nary_op (length));
2272 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2273 return vn_nary_op_lookup_1 (vno1, vnresult);
2274 }
2275
2276 /* Lookup OP in the current hash table, and return the resulting value
2277 number if it exists in the hash table. Return NULL_TREE if it does
2278 not exist in the hash table or if the result field of the operation
2279 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2280 if it exists. */
2281
2282 tree
2283 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2284 {
2285 vn_nary_op_t vno1
2286 = XALLOCAVAR (struct vn_nary_op_s,
2287 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2288 init_vn_nary_op_from_op (vno1, op);
2289 return vn_nary_op_lookup_1 (vno1, vnresult);
2290 }
2291
2292 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2293 value number if it exists in the hash table. Return NULL_TREE if
2294 it does not exist in the hash table. VNRESULT will contain the
2295 vn_nary_op_t from the hashtable if it exists. */
2296
2297 tree
2298 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2299 {
2300 vn_nary_op_t vno1
2301 = XALLOCAVAR (struct vn_nary_op_s,
2302 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2303 init_vn_nary_op_from_stmt (vno1, stmt);
2304 return vn_nary_op_lookup_1 (vno1, vnresult);
2305 }
2306
2307 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2308
2309 static vn_nary_op_t
2310 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2311 {
2312 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2313 }
2314
2315 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2316 obstack. */
2317
2318 static vn_nary_op_t
2319 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2320 {
2321 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2322 &current_info->nary_obstack);
2323
2324 vno1->value_id = value_id;
2325 vno1->length = length;
2326 vno1->result = result;
2327
2328 return vno1;
2329 }
2330
2331 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2332 VNO->HASHCODE first. */
2333
2334 static vn_nary_op_t
2335 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2336 {
2337 void **slot;
2338
2339 if (compute_hash)
2340 vno->hashcode = vn_nary_op_compute_hash (vno);
2341
2342 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2343 gcc_assert (!*slot);
2344
2345 *slot = vno;
2346 return vno;
2347 }
2348
2349 /* Insert a n-ary operation into the current hash table using it's
2350 pieces. Return the vn_nary_op_t structure we created and put in
2351 the hashtable. */
2352
2353 vn_nary_op_t
2354 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2355 tree type, tree *ops,
2356 tree result, unsigned int value_id)
2357 {
2358 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2359 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2360 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2361 }
2362
2363 /* Insert OP into the current hash table with a value number of
2364 RESULT. Return the vn_nary_op_t structure we created and put in
2365 the hashtable. */
2366
2367 vn_nary_op_t
2368 vn_nary_op_insert (tree op, tree result)
2369 {
2370 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2371 vn_nary_op_t vno1;
2372
2373 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2374 init_vn_nary_op_from_op (vno1, op);
2375 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2376 }
2377
2378 /* Insert the rhs of STMT into the current hash table with a value number of
2379 RESULT. */
2380
2381 vn_nary_op_t
2382 vn_nary_op_insert_stmt (gimple stmt, tree result)
2383 {
2384 vn_nary_op_t vno1
2385 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2386 result, VN_INFO (result)->value_id);
2387 init_vn_nary_op_from_stmt (vno1, stmt);
2388 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2389 }
2390
2391 /* Compute a hashcode for PHI operation VP1 and return it. */
2392
2393 static inline hashval_t
2394 vn_phi_compute_hash (vn_phi_t vp1)
2395 {
2396 hashval_t result;
2397 int i;
2398 tree phi1op;
2399 tree type;
2400
2401 result = vp1->block->index;
2402
2403 /* If all PHI arguments are constants we need to distinguish
2404 the PHI node via its type. */
2405 type = TREE_TYPE (vp1->phiargs[0]);
2406 result += (INTEGRAL_TYPE_P (type)
2407 + (INTEGRAL_TYPE_P (type)
2408 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2409
2410 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2411 {
2412 if (phi1op == VN_TOP)
2413 continue;
2414 result = iterative_hash_expr (phi1op, result);
2415 }
2416
2417 return result;
2418 }
2419
2420 /* Return the computed hashcode for phi operation P1. */
2421
2422 static hashval_t
2423 vn_phi_hash (const void *p1)
2424 {
2425 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2426 return vp1->hashcode;
2427 }
2428
2429 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2430
2431 static int
2432 vn_phi_eq (const void *p1, const void *p2)
2433 {
2434 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2435 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2436
2437 if (vp1->hashcode != vp2->hashcode)
2438 return false;
2439
2440 if (vp1->block == vp2->block)
2441 {
2442 int i;
2443 tree phi1op;
2444
2445 /* If the PHI nodes do not have compatible types
2446 they are not the same. */
2447 if (!types_compatible_p (TREE_TYPE (vp1->phiargs[0]),
2448 TREE_TYPE (vp2->phiargs[0])))
2449 return false;
2450
2451 /* Any phi in the same block will have it's arguments in the
2452 same edge order, because of how we store phi nodes. */
2453 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2454 {
2455 tree phi2op = vp2->phiargs[i];
2456 if (phi1op == VN_TOP || phi2op == VN_TOP)
2457 continue;
2458 if (!expressions_equal_p (phi1op, phi2op))
2459 return false;
2460 }
2461 return true;
2462 }
2463 return false;
2464 }
2465
2466 static vec<tree> shared_lookup_phiargs;
2467
2468 /* Lookup PHI in the current hash table, and return the resulting
2469 value number if it exists in the hash table. Return NULL_TREE if
2470 it does not exist in the hash table. */
2471
2472 static tree
2473 vn_phi_lookup (gimple phi)
2474 {
2475 void **slot;
2476 struct vn_phi_s vp1;
2477 unsigned i;
2478
2479 shared_lookup_phiargs.truncate (0);
2480
2481 /* Canonicalize the SSA_NAME's to their value number. */
2482 for (i = 0; i < gimple_phi_num_args (phi); i++)
2483 {
2484 tree def = PHI_ARG_DEF (phi, i);
2485 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2486 shared_lookup_phiargs.safe_push (def);
2487 }
2488 vp1.phiargs = shared_lookup_phiargs;
2489 vp1.block = gimple_bb (phi);
2490 vp1.hashcode = vn_phi_compute_hash (&vp1);
2491 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2492 NO_INSERT);
2493 if (!slot && current_info == optimistic_info)
2494 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2495 NO_INSERT);
2496 if (!slot)
2497 return NULL_TREE;
2498 return ((vn_phi_t)*slot)->result;
2499 }
2500
2501 /* Insert PHI into the current hash table with a value number of
2502 RESULT. */
2503
2504 static vn_phi_t
2505 vn_phi_insert (gimple phi, tree result)
2506 {
2507 void **slot;
2508 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2509 unsigned i;
2510 vec<tree> args = vNULL;
2511
2512 /* Canonicalize the SSA_NAME's to their value number. */
2513 for (i = 0; i < gimple_phi_num_args (phi); i++)
2514 {
2515 tree def = PHI_ARG_DEF (phi, i);
2516 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2517 args.safe_push (def);
2518 }
2519 vp1->value_id = VN_INFO (result)->value_id;
2520 vp1->phiargs = args;
2521 vp1->block = gimple_bb (phi);
2522 vp1->result = result;
2523 vp1->hashcode = vn_phi_compute_hash (vp1);
2524
2525 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2526 INSERT);
2527
2528 /* Because we iterate over phi operations more than once, it's
2529 possible the slot might already exist here, hence no assert.*/
2530 *slot = vp1;
2531 return vp1;
2532 }
2533
2534
2535 /* Print set of components in strongly connected component SCC to OUT. */
2536
2537 static void
2538 print_scc (FILE *out, vec<tree> scc)
2539 {
2540 tree var;
2541 unsigned int i;
2542
2543 fprintf (out, "SCC consists of:");
2544 FOR_EACH_VEC_ELT (scc, i, var)
2545 {
2546 fprintf (out, " ");
2547 print_generic_expr (out, var, 0);
2548 }
2549 fprintf (out, "\n");
2550 }
2551
2552 /* Set the value number of FROM to TO, return true if it has changed
2553 as a result. */
2554
2555 static inline bool
2556 set_ssa_val_to (tree from, tree to)
2557 {
2558 tree currval = SSA_VAL (from);
2559
2560 if (from != to)
2561 {
2562 if (currval == from)
2563 {
2564 if (dump_file && (dump_flags & TDF_DETAILS))
2565 {
2566 fprintf (dump_file, "Not changing value number of ");
2567 print_generic_expr (dump_file, from, 0);
2568 fprintf (dump_file, " from VARYING to ");
2569 print_generic_expr (dump_file, to, 0);
2570 fprintf (dump_file, "\n");
2571 }
2572 return false;
2573 }
2574 else if (TREE_CODE (to) == SSA_NAME
2575 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2576 to = from;
2577 }
2578
2579 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2580 and invariants. So assert that here. */
2581 gcc_assert (to != NULL_TREE
2582 && (to == VN_TOP
2583 || TREE_CODE (to) == SSA_NAME
2584 || is_gimple_min_invariant (to)));
2585
2586 if (dump_file && (dump_flags & TDF_DETAILS))
2587 {
2588 fprintf (dump_file, "Setting value number of ");
2589 print_generic_expr (dump_file, from, 0);
2590 fprintf (dump_file, " to ");
2591 print_generic_expr (dump_file, to, 0);
2592 }
2593
2594 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2595 {
2596 VN_INFO (from)->valnum = to;
2597 if (dump_file && (dump_flags & TDF_DETAILS))
2598 fprintf (dump_file, " (changed)\n");
2599 return true;
2600 }
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2602 fprintf (dump_file, "\n");
2603 return false;
2604 }
2605
2606 /* Mark as processed all the definitions in the defining stmt of USE, or
2607 the USE itself. */
2608
2609 static void
2610 mark_use_processed (tree use)
2611 {
2612 ssa_op_iter iter;
2613 def_operand_p defp;
2614 gimple stmt = SSA_NAME_DEF_STMT (use);
2615
2616 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2617 {
2618 VN_INFO (use)->use_processed = true;
2619 return;
2620 }
2621
2622 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2623 {
2624 tree def = DEF_FROM_PTR (defp);
2625
2626 VN_INFO (def)->use_processed = true;
2627 }
2628 }
2629
2630 /* Set all definitions in STMT to value number to themselves.
2631 Return true if a value number changed. */
2632
2633 static bool
2634 defs_to_varying (gimple stmt)
2635 {
2636 bool changed = false;
2637 ssa_op_iter iter;
2638 def_operand_p defp;
2639
2640 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2641 {
2642 tree def = DEF_FROM_PTR (defp);
2643 changed |= set_ssa_val_to (def, def);
2644 }
2645 return changed;
2646 }
2647
2648 static bool expr_has_constants (tree expr);
2649 static tree valueize_expr (tree expr);
2650
2651 /* Visit a copy between LHS and RHS, return true if the value number
2652 changed. */
2653
2654 static bool
2655 visit_copy (tree lhs, tree rhs)
2656 {
2657 /* Follow chains of copies to their destination. */
2658 while (TREE_CODE (rhs) == SSA_NAME
2659 && SSA_VAL (rhs) != rhs)
2660 rhs = SSA_VAL (rhs);
2661
2662 /* The copy may have a more interesting constant filled expression
2663 (we don't, since we know our RHS is just an SSA name). */
2664 if (TREE_CODE (rhs) == SSA_NAME)
2665 {
2666 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2667 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2668 }
2669
2670 return set_ssa_val_to (lhs, rhs);
2671 }
2672
2673 /* Visit a nary operator RHS, value number it, and return true if the
2674 value number of LHS has changed as a result. */
2675
2676 static bool
2677 visit_nary_op (tree lhs, gimple stmt)
2678 {
2679 bool changed = false;
2680 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2681
2682 if (result)
2683 changed = set_ssa_val_to (lhs, result);
2684 else
2685 {
2686 changed = set_ssa_val_to (lhs, lhs);
2687 vn_nary_op_insert_stmt (stmt, lhs);
2688 }
2689
2690 return changed;
2691 }
2692
2693 /* Visit a call STMT storing into LHS. Return true if the value number
2694 of the LHS has changed as a result. */
2695
2696 static bool
2697 visit_reference_op_call (tree lhs, gimple stmt)
2698 {
2699 bool changed = false;
2700 struct vn_reference_s vr1;
2701 vn_reference_t vnresult = NULL;
2702 tree vuse = gimple_vuse (stmt);
2703 tree vdef = gimple_vdef (stmt);
2704
2705 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2706 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2707 lhs = NULL_TREE;
2708
2709 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2710 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2711 vr1.type = gimple_expr_type (stmt);
2712 vr1.set = 0;
2713 vr1.hashcode = vn_reference_compute_hash (&vr1);
2714 vn_reference_lookup_1 (&vr1, &vnresult);
2715
2716 if (vnresult)
2717 {
2718 if (vnresult->result_vdef)
2719 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2720
2721 if (!vnresult->result && lhs)
2722 vnresult->result = lhs;
2723
2724 if (vnresult->result && lhs)
2725 {
2726 changed |= set_ssa_val_to (lhs, vnresult->result);
2727
2728 if (VN_INFO (vnresult->result)->has_constants)
2729 VN_INFO (lhs)->has_constants = true;
2730 }
2731 }
2732 else
2733 {
2734 void **slot;
2735 vn_reference_t vr2;
2736 if (vdef)
2737 changed |= set_ssa_val_to (vdef, vdef);
2738 if (lhs)
2739 changed |= set_ssa_val_to (lhs, lhs);
2740 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2741 vr2->vuse = vr1.vuse;
2742 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2743 vr2->type = vr1.type;
2744 vr2->set = vr1.set;
2745 vr2->hashcode = vr1.hashcode;
2746 vr2->result = lhs;
2747 vr2->result_vdef = vdef;
2748 slot = htab_find_slot_with_hash (current_info->references,
2749 vr2, vr2->hashcode, INSERT);
2750 if (*slot)
2751 free_reference (*slot);
2752 *slot = vr2;
2753 }
2754
2755 return changed;
2756 }
2757
2758 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2759 and return true if the value number of the LHS has changed as a result. */
2760
2761 static bool
2762 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2763 {
2764 bool changed = false;
2765 tree last_vuse;
2766 tree result;
2767
2768 last_vuse = gimple_vuse (stmt);
2769 last_vuse_ptr = &last_vuse;
2770 result = vn_reference_lookup (op, gimple_vuse (stmt),
2771 default_vn_walk_kind, NULL);
2772 last_vuse_ptr = NULL;
2773
2774 /* If we have a VCE, try looking up its operand as it might be stored in
2775 a different type. */
2776 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2777 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2778 default_vn_walk_kind, NULL);
2779
2780 /* We handle type-punning through unions by value-numbering based
2781 on offset and size of the access. Be prepared to handle a
2782 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2783 if (result
2784 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2785 {
2786 /* We will be setting the value number of lhs to the value number
2787 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2788 So first simplify and lookup this expression to see if it
2789 is already available. */
2790 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2791 if ((CONVERT_EXPR_P (val)
2792 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2793 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2794 {
2795 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2796 if ((CONVERT_EXPR_P (tem)
2797 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2798 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2799 TREE_TYPE (val), tem)))
2800 val = tem;
2801 }
2802 result = val;
2803 if (!is_gimple_min_invariant (val)
2804 && TREE_CODE (val) != SSA_NAME)
2805 result = vn_nary_op_lookup (val, NULL);
2806 /* If the expression is not yet available, value-number lhs to
2807 a new SSA_NAME we create. */
2808 if (!result)
2809 {
2810 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2811 "vntemp");
2812 /* Initialize value-number information properly. */
2813 VN_INFO_GET (result)->valnum = result;
2814 VN_INFO (result)->value_id = get_next_value_id ();
2815 VN_INFO (result)->expr = val;
2816 VN_INFO (result)->has_constants = expr_has_constants (val);
2817 VN_INFO (result)->needs_insertion = true;
2818 /* As all "inserted" statements are singleton SCCs, insert
2819 to the valid table. This is strictly needed to
2820 avoid re-generating new value SSA_NAMEs for the same
2821 expression during SCC iteration over and over (the
2822 optimistic table gets cleared after each iteration).
2823 We do not need to insert into the optimistic table, as
2824 lookups there will fall back to the valid table. */
2825 if (current_info == optimistic_info)
2826 {
2827 current_info = valid_info;
2828 vn_nary_op_insert (val, result);
2829 current_info = optimistic_info;
2830 }
2831 else
2832 vn_nary_op_insert (val, result);
2833 if (dump_file && (dump_flags & TDF_DETAILS))
2834 {
2835 fprintf (dump_file, "Inserting name ");
2836 print_generic_expr (dump_file, result, 0);
2837 fprintf (dump_file, " for expression ");
2838 print_generic_expr (dump_file, val, 0);
2839 fprintf (dump_file, "\n");
2840 }
2841 }
2842 }
2843
2844 if (result)
2845 {
2846 changed = set_ssa_val_to (lhs, result);
2847 if (TREE_CODE (result) == SSA_NAME
2848 && VN_INFO (result)->has_constants)
2849 {
2850 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2851 VN_INFO (lhs)->has_constants = true;
2852 }
2853 }
2854 else
2855 {
2856 changed = set_ssa_val_to (lhs, lhs);
2857 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2858 }
2859
2860 return changed;
2861 }
2862
2863
2864 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2865 and return true if the value number of the LHS has changed as a result. */
2866
2867 static bool
2868 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2869 {
2870 bool changed = false;
2871 vn_reference_t vnresult = NULL;
2872 tree result, assign;
2873 bool resultsame = false;
2874 tree vuse = gimple_vuse (stmt);
2875 tree vdef = gimple_vdef (stmt);
2876
2877 /* First we want to lookup using the *vuses* from the store and see
2878 if there the last store to this location with the same address
2879 had the same value.
2880
2881 The vuses represent the memory state before the store. If the
2882 memory state, address, and value of the store is the same as the
2883 last store to this location, then this store will produce the
2884 same memory state as that store.
2885
2886 In this case the vdef versions for this store are value numbered to those
2887 vuse versions, since they represent the same memory state after
2888 this store.
2889
2890 Otherwise, the vdefs for the store are used when inserting into
2891 the table, since the store generates a new memory state. */
2892
2893 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2894
2895 if (result)
2896 {
2897 if (TREE_CODE (result) == SSA_NAME)
2898 result = SSA_VAL (result);
2899 if (TREE_CODE (op) == SSA_NAME)
2900 op = SSA_VAL (op);
2901 resultsame = expressions_equal_p (result, op);
2902 }
2903
2904 if (!result || !resultsame)
2905 {
2906 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2907 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2908 if (vnresult)
2909 {
2910 VN_INFO (vdef)->use_processed = true;
2911 return set_ssa_val_to (vdef, vnresult->result_vdef);
2912 }
2913 }
2914
2915 if (!result || !resultsame)
2916 {
2917 if (dump_file && (dump_flags & TDF_DETAILS))
2918 {
2919 fprintf (dump_file, "No store match\n");
2920 fprintf (dump_file, "Value numbering store ");
2921 print_generic_expr (dump_file, lhs, 0);
2922 fprintf (dump_file, " to ");
2923 print_generic_expr (dump_file, op, 0);
2924 fprintf (dump_file, "\n");
2925 }
2926 /* Have to set value numbers before insert, since insert is
2927 going to valueize the references in-place. */
2928 if (vdef)
2929 {
2930 changed |= set_ssa_val_to (vdef, vdef);
2931 }
2932
2933 /* Do not insert structure copies into the tables. */
2934 if (is_gimple_min_invariant (op)
2935 || is_gimple_reg (op))
2936 vn_reference_insert (lhs, op, vdef, NULL);
2937
2938 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2939 vn_reference_insert (assign, lhs, vuse, vdef);
2940 }
2941 else
2942 {
2943 /* We had a match, so value number the vdef to have the value
2944 number of the vuse it came from. */
2945
2946 if (dump_file && (dump_flags & TDF_DETAILS))
2947 fprintf (dump_file, "Store matched earlier value,"
2948 "value numbering store vdefs to matching vuses.\n");
2949
2950 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2951 }
2952
2953 return changed;
2954 }
2955
2956 /* Visit and value number PHI, return true if the value number
2957 changed. */
2958
2959 static bool
2960 visit_phi (gimple phi)
2961 {
2962 bool changed = false;
2963 tree result;
2964 tree sameval = VN_TOP;
2965 bool allsame = true;
2966 unsigned i;
2967
2968 /* TODO: We could check for this in init_sccvn, and replace this
2969 with a gcc_assert. */
2970 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2971 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2972
2973 /* See if all non-TOP arguments have the same value. TOP is
2974 equivalent to everything, so we can ignore it. */
2975 for (i = 0; i < gimple_phi_num_args (phi); i++)
2976 {
2977 tree def = PHI_ARG_DEF (phi, i);
2978
2979 if (TREE_CODE (def) == SSA_NAME)
2980 def = SSA_VAL (def);
2981 if (def == VN_TOP)
2982 continue;
2983 if (sameval == VN_TOP)
2984 {
2985 sameval = def;
2986 }
2987 else
2988 {
2989 if (!expressions_equal_p (def, sameval))
2990 {
2991 allsame = false;
2992 break;
2993 }
2994 }
2995 }
2996
2997 /* If all value numbered to the same value, the phi node has that
2998 value. */
2999 if (allsame)
3000 {
3001 if (is_gimple_min_invariant (sameval))
3002 {
3003 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3004 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3005 }
3006 else
3007 {
3008 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3009 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3010 }
3011
3012 if (TREE_CODE (sameval) == SSA_NAME)
3013 return visit_copy (PHI_RESULT (phi), sameval);
3014
3015 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3016 }
3017
3018 /* Otherwise, see if it is equivalent to a phi node in this block. */
3019 result = vn_phi_lookup (phi);
3020 if (result)
3021 {
3022 if (TREE_CODE (result) == SSA_NAME)
3023 changed = visit_copy (PHI_RESULT (phi), result);
3024 else
3025 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3026 }
3027 else
3028 {
3029 vn_phi_insert (phi, PHI_RESULT (phi));
3030 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3031 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3032 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3033 }
3034
3035 return changed;
3036 }
3037
3038 /* Return true if EXPR contains constants. */
3039
3040 static bool
3041 expr_has_constants (tree expr)
3042 {
3043 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3044 {
3045 case tcc_unary:
3046 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3047
3048 case tcc_binary:
3049 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3050 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3051 /* Constants inside reference ops are rarely interesting, but
3052 it can take a lot of looking to find them. */
3053 case tcc_reference:
3054 case tcc_declaration:
3055 return false;
3056 default:
3057 return is_gimple_min_invariant (expr);
3058 }
3059 return false;
3060 }
3061
3062 /* Return true if STMT contains constants. */
3063
3064 static bool
3065 stmt_has_constants (gimple stmt)
3066 {
3067 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3068 return false;
3069
3070 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3071 {
3072 case GIMPLE_UNARY_RHS:
3073 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3074
3075 case GIMPLE_BINARY_RHS:
3076 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3077 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3078 case GIMPLE_TERNARY_RHS:
3079 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3080 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3081 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3082 case GIMPLE_SINGLE_RHS:
3083 /* Constants inside reference ops are rarely interesting, but
3084 it can take a lot of looking to find them. */
3085 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3086 default:
3087 gcc_unreachable ();
3088 }
3089 return false;
3090 }
3091
3092 /* Replace SSA_NAMES in expr with their value numbers, and return the
3093 result.
3094 This is performed in place. */
3095
3096 static tree
3097 valueize_expr (tree expr)
3098 {
3099 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3100 {
3101 case tcc_binary:
3102 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3103 /* Fallthru. */
3104 case tcc_unary:
3105 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3106 break;
3107 default:;
3108 }
3109 return expr;
3110 }
3111
3112 /* Simplify the binary expression RHS, and return the result if
3113 simplified. */
3114
3115 static tree
3116 simplify_binary_expression (gimple stmt)
3117 {
3118 tree result = NULL_TREE;
3119 tree op0 = gimple_assign_rhs1 (stmt);
3120 tree op1 = gimple_assign_rhs2 (stmt);
3121 enum tree_code code = gimple_assign_rhs_code (stmt);
3122
3123 /* This will not catch every single case we could combine, but will
3124 catch those with constants. The goal here is to simultaneously
3125 combine constants between expressions, but avoid infinite
3126 expansion of expressions during simplification. */
3127 if (TREE_CODE (op0) == SSA_NAME)
3128 {
3129 if (VN_INFO (op0)->has_constants
3130 || TREE_CODE_CLASS (code) == tcc_comparison
3131 || code == COMPLEX_EXPR)
3132 op0 = valueize_expr (vn_get_expr_for (op0));
3133 else
3134 op0 = vn_valueize (op0);
3135 }
3136
3137 if (TREE_CODE (op1) == SSA_NAME)
3138 {
3139 if (VN_INFO (op1)->has_constants
3140 || code == COMPLEX_EXPR)
3141 op1 = valueize_expr (vn_get_expr_for (op1));
3142 else
3143 op1 = vn_valueize (op1);
3144 }
3145
3146 /* Pointer plus constant can be represented as invariant address.
3147 Do so to allow further propatation, see also tree forwprop. */
3148 if (code == POINTER_PLUS_EXPR
3149 && host_integerp (op1, 1)
3150 && TREE_CODE (op0) == ADDR_EXPR
3151 && is_gimple_min_invariant (op0))
3152 return build_invariant_address (TREE_TYPE (op0),
3153 TREE_OPERAND (op0, 0),
3154 TREE_INT_CST_LOW (op1));
3155
3156 /* Avoid folding if nothing changed. */
3157 if (op0 == gimple_assign_rhs1 (stmt)
3158 && op1 == gimple_assign_rhs2 (stmt))
3159 return NULL_TREE;
3160
3161 fold_defer_overflow_warnings ();
3162
3163 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3164 if (result)
3165 STRIP_USELESS_TYPE_CONVERSION (result);
3166
3167 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3168 stmt, 0);
3169
3170 /* Make sure result is not a complex expression consisting
3171 of operators of operators (IE (a + b) + (a + c))
3172 Otherwise, we will end up with unbounded expressions if
3173 fold does anything at all. */
3174 if (result && valid_gimple_rhs_p (result))
3175 return result;
3176
3177 return NULL_TREE;
3178 }
3179
3180 /* Simplify the unary expression RHS, and return the result if
3181 simplified. */
3182
3183 static tree
3184 simplify_unary_expression (gimple stmt)
3185 {
3186 tree result = NULL_TREE;
3187 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3188 enum tree_code code = gimple_assign_rhs_code (stmt);
3189
3190 /* We handle some tcc_reference codes here that are all
3191 GIMPLE_ASSIGN_SINGLE codes. */
3192 if (code == REALPART_EXPR
3193 || code == IMAGPART_EXPR
3194 || code == VIEW_CONVERT_EXPR
3195 || code == BIT_FIELD_REF)
3196 op0 = TREE_OPERAND (op0, 0);
3197
3198 if (TREE_CODE (op0) != SSA_NAME)
3199 return NULL_TREE;
3200
3201 orig_op0 = op0;
3202 if (VN_INFO (op0)->has_constants)
3203 op0 = valueize_expr (vn_get_expr_for (op0));
3204 else if (CONVERT_EXPR_CODE_P (code)
3205 || code == REALPART_EXPR
3206 || code == IMAGPART_EXPR
3207 || code == VIEW_CONVERT_EXPR
3208 || code == BIT_FIELD_REF)
3209 {
3210 /* We want to do tree-combining on conversion-like expressions.
3211 Make sure we feed only SSA_NAMEs or constants to fold though. */
3212 tree tem = valueize_expr (vn_get_expr_for (op0));
3213 if (UNARY_CLASS_P (tem)
3214 || BINARY_CLASS_P (tem)
3215 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3216 || TREE_CODE (tem) == SSA_NAME
3217 || TREE_CODE (tem) == CONSTRUCTOR
3218 || is_gimple_min_invariant (tem))
3219 op0 = tem;
3220 }
3221
3222 /* Avoid folding if nothing changed, but remember the expression. */
3223 if (op0 == orig_op0)
3224 return NULL_TREE;
3225
3226 if (code == BIT_FIELD_REF)
3227 {
3228 tree rhs = gimple_assign_rhs1 (stmt);
3229 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3230 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3231 }
3232 else
3233 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3234 if (result)
3235 {
3236 STRIP_USELESS_TYPE_CONVERSION (result);
3237 if (valid_gimple_rhs_p (result))
3238 return result;
3239 }
3240
3241 return NULL_TREE;
3242 }
3243
3244 /* Try to simplify RHS using equivalences and constant folding. */
3245
3246 static tree
3247 try_to_simplify (gimple stmt)
3248 {
3249 enum tree_code code = gimple_assign_rhs_code (stmt);
3250 tree tem;
3251
3252 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3253 in this case, there is no point in doing extra work. */
3254 if (code == SSA_NAME)
3255 return NULL_TREE;
3256
3257 /* First try constant folding based on our current lattice. */
3258 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3259 if (tem
3260 && (TREE_CODE (tem) == SSA_NAME
3261 || is_gimple_min_invariant (tem)))
3262 return tem;
3263
3264 /* If that didn't work try combining multiple statements. */
3265 switch (TREE_CODE_CLASS (code))
3266 {
3267 case tcc_reference:
3268 /* Fallthrough for some unary codes that can operate on registers. */
3269 if (!(code == REALPART_EXPR
3270 || code == IMAGPART_EXPR
3271 || code == VIEW_CONVERT_EXPR
3272 || code == BIT_FIELD_REF))
3273 break;
3274 /* We could do a little more with unary ops, if they expand
3275 into binary ops, but it's debatable whether it is worth it. */
3276 case tcc_unary:
3277 return simplify_unary_expression (stmt);
3278
3279 case tcc_comparison:
3280 case tcc_binary:
3281 return simplify_binary_expression (stmt);
3282
3283 default:
3284 break;
3285 }
3286
3287 return NULL_TREE;
3288 }
3289
3290 /* Visit and value number USE, return true if the value number
3291 changed. */
3292
3293 static bool
3294 visit_use (tree use)
3295 {
3296 bool changed = false;
3297 gimple stmt = SSA_NAME_DEF_STMT (use);
3298
3299 mark_use_processed (use);
3300
3301 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3302 if (dump_file && (dump_flags & TDF_DETAILS)
3303 && !SSA_NAME_IS_DEFAULT_DEF (use))
3304 {
3305 fprintf (dump_file, "Value numbering ");
3306 print_generic_expr (dump_file, use, 0);
3307 fprintf (dump_file, " stmt = ");
3308 print_gimple_stmt (dump_file, stmt, 0, 0);
3309 }
3310
3311 /* Handle uninitialized uses. */
3312 if (SSA_NAME_IS_DEFAULT_DEF (use))
3313 changed = set_ssa_val_to (use, use);
3314 else
3315 {
3316 if (gimple_code (stmt) == GIMPLE_PHI)
3317 changed = visit_phi (stmt);
3318 else if (gimple_has_volatile_ops (stmt))
3319 changed = defs_to_varying (stmt);
3320 else if (is_gimple_assign (stmt))
3321 {
3322 enum tree_code code = gimple_assign_rhs_code (stmt);
3323 tree lhs = gimple_assign_lhs (stmt);
3324 tree rhs1 = gimple_assign_rhs1 (stmt);
3325 tree simplified;
3326
3327 /* Shortcut for copies. Simplifying copies is pointless,
3328 since we copy the expression and value they represent. */
3329 if (code == SSA_NAME
3330 && TREE_CODE (lhs) == SSA_NAME)
3331 {
3332 changed = visit_copy (lhs, rhs1);
3333 goto done;
3334 }
3335 simplified = try_to_simplify (stmt);
3336 if (simplified)
3337 {
3338 if (dump_file && (dump_flags & TDF_DETAILS))
3339 {
3340 fprintf (dump_file, "RHS ");
3341 print_gimple_expr (dump_file, stmt, 0, 0);
3342 fprintf (dump_file, " simplified to ");
3343 print_generic_expr (dump_file, simplified, 0);
3344 if (TREE_CODE (lhs) == SSA_NAME)
3345 fprintf (dump_file, " has constants %d\n",
3346 expr_has_constants (simplified));
3347 else
3348 fprintf (dump_file, "\n");
3349 }
3350 }
3351 /* Setting value numbers to constants will occasionally
3352 screw up phi congruence because constants are not
3353 uniquely associated with a single ssa name that can be
3354 looked up. */
3355 if (simplified
3356 && is_gimple_min_invariant (simplified)
3357 && TREE_CODE (lhs) == SSA_NAME)
3358 {
3359 VN_INFO (lhs)->expr = simplified;
3360 VN_INFO (lhs)->has_constants = true;
3361 changed = set_ssa_val_to (lhs, simplified);
3362 goto done;
3363 }
3364 else if (simplified
3365 && TREE_CODE (simplified) == SSA_NAME
3366 && TREE_CODE (lhs) == SSA_NAME)
3367 {
3368 changed = visit_copy (lhs, simplified);
3369 goto done;
3370 }
3371 else if (simplified)
3372 {
3373 if (TREE_CODE (lhs) == SSA_NAME)
3374 {
3375 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3376 /* We have to unshare the expression or else
3377 valuizing may change the IL stream. */
3378 VN_INFO (lhs)->expr = unshare_expr (simplified);
3379 }
3380 }
3381 else if (stmt_has_constants (stmt)
3382 && TREE_CODE (lhs) == SSA_NAME)
3383 VN_INFO (lhs)->has_constants = true;
3384 else if (TREE_CODE (lhs) == SSA_NAME)
3385 {
3386 /* We reset expr and constantness here because we may
3387 have been value numbering optimistically, and
3388 iterating. They may become non-constant in this case,
3389 even if they were optimistically constant. */
3390
3391 VN_INFO (lhs)->has_constants = false;
3392 VN_INFO (lhs)->expr = NULL_TREE;
3393 }
3394
3395 if ((TREE_CODE (lhs) == SSA_NAME
3396 /* We can substitute SSA_NAMEs that are live over
3397 abnormal edges with their constant value. */
3398 && !(gimple_assign_copy_p (stmt)
3399 && is_gimple_min_invariant (rhs1))
3400 && !(simplified
3401 && is_gimple_min_invariant (simplified))
3402 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3403 /* Stores or copies from SSA_NAMEs that are live over
3404 abnormal edges are a problem. */
3405 || (code == SSA_NAME
3406 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3407 changed = defs_to_varying (stmt);
3408 else if (REFERENCE_CLASS_P (lhs)
3409 || DECL_P (lhs))
3410 changed = visit_reference_op_store (lhs, rhs1, stmt);
3411 else if (TREE_CODE (lhs) == SSA_NAME)
3412 {
3413 if ((gimple_assign_copy_p (stmt)
3414 && is_gimple_min_invariant (rhs1))
3415 || (simplified
3416 && is_gimple_min_invariant (simplified)))
3417 {
3418 VN_INFO (lhs)->has_constants = true;
3419 if (simplified)
3420 changed = set_ssa_val_to (lhs, simplified);
3421 else
3422 changed = set_ssa_val_to (lhs, rhs1);
3423 }
3424 else
3425 {
3426 switch (vn_get_stmt_kind (stmt))
3427 {
3428 case VN_NARY:
3429 changed = visit_nary_op (lhs, stmt);
3430 break;
3431 case VN_REFERENCE:
3432 changed = visit_reference_op_load (lhs, rhs1, stmt);
3433 break;
3434 default:
3435 changed = defs_to_varying (stmt);
3436 break;
3437 }
3438 }
3439 }
3440 else
3441 changed = defs_to_varying (stmt);
3442 }
3443 else if (is_gimple_call (stmt))
3444 {
3445 tree lhs = gimple_call_lhs (stmt);
3446
3447 /* ??? We could try to simplify calls. */
3448
3449 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3450 {
3451 if (stmt_has_constants (stmt))
3452 VN_INFO (lhs)->has_constants = true;
3453 else
3454 {
3455 /* We reset expr and constantness here because we may
3456 have been value numbering optimistically, and
3457 iterating. They may become non-constant in this case,
3458 even if they were optimistically constant. */
3459 VN_INFO (lhs)->has_constants = false;
3460 VN_INFO (lhs)->expr = NULL_TREE;
3461 }
3462
3463 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3464 {
3465 changed = defs_to_varying (stmt);
3466 goto done;
3467 }
3468 }
3469
3470 if (!gimple_call_internal_p (stmt)
3471 && (/* Calls to the same function with the same vuse
3472 and the same operands do not necessarily return the same
3473 value, unless they're pure or const. */
3474 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3475 /* If calls have a vdef, subsequent calls won't have
3476 the same incoming vuse. So, if 2 calls with vdef have the
3477 same vuse, we know they're not subsequent.
3478 We can value number 2 calls to the same function with the
3479 same vuse and the same operands which are not subsequent
3480 the same, because there is no code in the program that can
3481 compare the 2 values. */
3482 || gimple_vdef (stmt)))
3483 changed = visit_reference_op_call (lhs, stmt);
3484 else
3485 changed = defs_to_varying (stmt);
3486 }
3487 else
3488 changed = defs_to_varying (stmt);
3489 }
3490 done:
3491 return changed;
3492 }
3493
3494 /* Compare two operands by reverse postorder index */
3495
3496 static int
3497 compare_ops (const void *pa, const void *pb)
3498 {
3499 const tree opa = *((const tree *)pa);
3500 const tree opb = *((const tree *)pb);
3501 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3502 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3503 basic_block bba;
3504 basic_block bbb;
3505
3506 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3507 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3508 else if (gimple_nop_p (opstmta))
3509 return -1;
3510 else if (gimple_nop_p (opstmtb))
3511 return 1;
3512
3513 bba = gimple_bb (opstmta);
3514 bbb = gimple_bb (opstmtb);
3515
3516 if (!bba && !bbb)
3517 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3518 else if (!bba)
3519 return -1;
3520 else if (!bbb)
3521 return 1;
3522
3523 if (bba == bbb)
3524 {
3525 if (gimple_code (opstmta) == GIMPLE_PHI
3526 && gimple_code (opstmtb) == GIMPLE_PHI)
3527 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3528 else if (gimple_code (opstmta) == GIMPLE_PHI)
3529 return -1;
3530 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3531 return 1;
3532 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3533 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3534 else
3535 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3536 }
3537 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3538 }
3539
3540 /* Sort an array containing members of a strongly connected component
3541 SCC so that the members are ordered by RPO number.
3542 This means that when the sort is complete, iterating through the
3543 array will give you the members in RPO order. */
3544
3545 static void
3546 sort_scc (vec<tree> scc)
3547 {
3548 scc.qsort (compare_ops);
3549 }
3550
3551 /* Insert the no longer used nary ONARY to the hash INFO. */
3552
3553 static void
3554 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3555 {
3556 size_t size = sizeof_vn_nary_op (onary->length);
3557 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3558 &info->nary_obstack);
3559 memcpy (nary, onary, size);
3560 vn_nary_op_insert_into (nary, info->nary, false);
3561 }
3562
3563 /* Insert the no longer used phi OPHI to the hash INFO. */
3564
3565 static void
3566 copy_phi (vn_phi_t ophi, vn_tables_t info)
3567 {
3568 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3569 void **slot;
3570 memcpy (phi, ophi, sizeof (*phi));
3571 ophi->phiargs.create (0);
3572 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3573 gcc_assert (!*slot);
3574 *slot = phi;
3575 }
3576
3577 /* Insert the no longer used reference OREF to the hash INFO. */
3578
3579 static void
3580 copy_reference (vn_reference_t oref, vn_tables_t info)
3581 {
3582 vn_reference_t ref;
3583 void **slot;
3584 ref = (vn_reference_t) pool_alloc (info->references_pool);
3585 memcpy (ref, oref, sizeof (*ref));
3586 oref->operands.create (0);
3587 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3588 INSERT);
3589 if (*slot)
3590 free_reference (*slot);
3591 *slot = ref;
3592 }
3593
3594 /* Process a strongly connected component in the SSA graph. */
3595
3596 static void
3597 process_scc (vec<tree> scc)
3598 {
3599 tree var;
3600 unsigned int i;
3601 unsigned int iterations = 0;
3602 bool changed = true;
3603 htab_iterator hi;
3604 vn_nary_op_t nary;
3605 vn_phi_t phi;
3606 vn_reference_t ref;
3607
3608 /* If the SCC has a single member, just visit it. */
3609 if (scc.length () == 1)
3610 {
3611 tree use = scc[0];
3612 if (VN_INFO (use)->use_processed)
3613 return;
3614 /* We need to make sure it doesn't form a cycle itself, which can
3615 happen for self-referential PHI nodes. In that case we would
3616 end up inserting an expression with VN_TOP operands into the
3617 valid table which makes us derive bogus equivalences later.
3618 The cheapest way to check this is to assume it for all PHI nodes. */
3619 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3620 /* Fallthru to iteration. */ ;
3621 else
3622 {
3623 visit_use (use);
3624 return;
3625 }
3626 }
3627
3628 /* Iterate over the SCC with the optimistic table until it stops
3629 changing. */
3630 current_info = optimistic_info;
3631 while (changed)
3632 {
3633 changed = false;
3634 iterations++;
3635 if (dump_file && (dump_flags & TDF_DETAILS))
3636 fprintf (dump_file, "Starting iteration %d\n", iterations);
3637 /* As we are value-numbering optimistically we have to
3638 clear the expression tables and the simplified expressions
3639 in each iteration until we converge. */
3640 htab_empty (optimistic_info->nary);
3641 htab_empty (optimistic_info->phis);
3642 htab_empty (optimistic_info->references);
3643 obstack_free (&optimistic_info->nary_obstack, NULL);
3644 gcc_obstack_init (&optimistic_info->nary_obstack);
3645 empty_alloc_pool (optimistic_info->phis_pool);
3646 empty_alloc_pool (optimistic_info->references_pool);
3647 FOR_EACH_VEC_ELT (scc, i, var)
3648 VN_INFO (var)->expr = NULL_TREE;
3649 FOR_EACH_VEC_ELT (scc, i, var)
3650 changed |= visit_use (var);
3651 }
3652
3653 statistics_histogram_event (cfun, "SCC iterations", iterations);
3654
3655 /* Finally, copy the contents of the no longer used optimistic
3656 table to the valid table. */
3657 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3658 copy_nary (nary, valid_info);
3659 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3660 copy_phi (phi, valid_info);
3661 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3662 copy_reference (ref, valid_info);
3663
3664 current_info = valid_info;
3665 }
3666
3667
3668 /* Pop the components of the found SCC for NAME off the SCC stack
3669 and process them. Returns true if all went well, false if
3670 we run into resource limits. */
3671
3672 static bool
3673 extract_and_process_scc_for_name (tree name)
3674 {
3675 vec<tree> scc = vNULL;
3676 tree x;
3677
3678 /* Found an SCC, pop the components off the SCC stack and
3679 process them. */
3680 do
3681 {
3682 x = sccstack.pop ();
3683
3684 VN_INFO (x)->on_sccstack = false;
3685 scc.safe_push (x);
3686 } while (x != name);
3687
3688 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3689 if (scc.length ()
3690 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3691 {
3692 if (dump_file)
3693 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3694 "SCC size %u exceeding %u\n", scc.length (),
3695 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3696
3697 scc.release ();
3698 return false;
3699 }
3700
3701 if (scc.length () > 1)
3702 sort_scc (scc);
3703
3704 if (dump_file && (dump_flags & TDF_DETAILS))
3705 print_scc (dump_file, scc);
3706
3707 process_scc (scc);
3708
3709 scc.release ();
3710
3711 return true;
3712 }
3713
3714 /* Depth first search on NAME to discover and process SCC's in the SSA
3715 graph.
3716 Execution of this algorithm relies on the fact that the SCC's are
3717 popped off the stack in topological order.
3718 Returns true if successful, false if we stopped processing SCC's due
3719 to resource constraints. */
3720
3721 static bool
3722 DFS (tree name)
3723 {
3724 vec<ssa_op_iter> itervec = vNULL;
3725 vec<tree> namevec = vNULL;
3726 use_operand_p usep = NULL;
3727 gimple defstmt;
3728 tree use;
3729 ssa_op_iter iter;
3730
3731 start_over:
3732 /* SCC info */
3733 VN_INFO (name)->dfsnum = next_dfs_num++;
3734 VN_INFO (name)->visited = true;
3735 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3736
3737 sccstack.safe_push (name);
3738 VN_INFO (name)->on_sccstack = true;
3739 defstmt = SSA_NAME_DEF_STMT (name);
3740
3741 /* Recursively DFS on our operands, looking for SCC's. */
3742 if (!gimple_nop_p (defstmt))
3743 {
3744 /* Push a new iterator. */
3745 if (gimple_code (defstmt) == GIMPLE_PHI)
3746 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3747 else
3748 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3749 }
3750 else
3751 clear_and_done_ssa_iter (&iter);
3752
3753 while (1)
3754 {
3755 /* If we are done processing uses of a name, go up the stack
3756 of iterators and process SCCs as we found them. */
3757 if (op_iter_done (&iter))
3758 {
3759 /* See if we found an SCC. */
3760 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3761 if (!extract_and_process_scc_for_name (name))
3762 {
3763 namevec.release ();
3764 itervec.release ();
3765 return false;
3766 }
3767
3768 /* Check if we are done. */
3769 if (namevec.is_empty ())
3770 {
3771 namevec.release ();
3772 itervec.release ();
3773 return true;
3774 }
3775
3776 /* Restore the last use walker and continue walking there. */
3777 use = name;
3778 name = namevec.pop ();
3779 memcpy (&iter, &itervec.last (),
3780 sizeof (ssa_op_iter));
3781 itervec.pop ();
3782 goto continue_walking;
3783 }
3784
3785 use = USE_FROM_PTR (usep);
3786
3787 /* Since we handle phi nodes, we will sometimes get
3788 invariants in the use expression. */
3789 if (TREE_CODE (use) == SSA_NAME)
3790 {
3791 if (! (VN_INFO (use)->visited))
3792 {
3793 /* Recurse by pushing the current use walking state on
3794 the stack and starting over. */
3795 itervec.safe_push (iter);
3796 namevec.safe_push (name);
3797 name = use;
3798 goto start_over;
3799
3800 continue_walking:
3801 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3802 VN_INFO (use)->low);
3803 }
3804 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3805 && VN_INFO (use)->on_sccstack)
3806 {
3807 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3808 VN_INFO (name)->low);
3809 }
3810 }
3811
3812 usep = op_iter_next_use (&iter);
3813 }
3814 }
3815
3816 /* Allocate a value number table. */
3817
3818 static void
3819 allocate_vn_table (vn_tables_t table)
3820 {
3821 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3822 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3823 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3824 free_reference);
3825
3826 gcc_obstack_init (&table->nary_obstack);
3827 table->phis_pool = create_alloc_pool ("VN phis",
3828 sizeof (struct vn_phi_s),
3829 30);
3830 table->references_pool = create_alloc_pool ("VN references",
3831 sizeof (struct vn_reference_s),
3832 30);
3833 }
3834
3835 /* Free a value number table. */
3836
3837 static void
3838 free_vn_table (vn_tables_t table)
3839 {
3840 htab_delete (table->phis);
3841 htab_delete (table->nary);
3842 htab_delete (table->references);
3843 obstack_free (&table->nary_obstack, NULL);
3844 free_alloc_pool (table->phis_pool);
3845 free_alloc_pool (table->references_pool);
3846 }
3847
3848 static void
3849 init_scc_vn (void)
3850 {
3851 size_t i;
3852 int j;
3853 int *rpo_numbers_temp;
3854
3855 calculate_dominance_info (CDI_DOMINATORS);
3856 sccstack.create (0);
3857 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3858 free);
3859
3860 constant_value_ids = BITMAP_ALLOC (NULL);
3861
3862 next_dfs_num = 1;
3863 next_value_id = 1;
3864
3865 vn_ssa_aux_table.create (num_ssa_names + 1);
3866 /* VEC_alloc doesn't actually grow it to the right size, it just
3867 preallocates the space to do so. */
3868 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3869 gcc_obstack_init (&vn_ssa_aux_obstack);
3870
3871 shared_lookup_phiargs.create (0);
3872 shared_lookup_references.create (0);
3873 rpo_numbers = XNEWVEC (int, last_basic_block);
3874 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3875 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3876
3877 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3878 the i'th block in RPO order is bb. We want to map bb's to RPO
3879 numbers, so we need to rearrange this array. */
3880 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3881 rpo_numbers[rpo_numbers_temp[j]] = j;
3882
3883 XDELETE (rpo_numbers_temp);
3884
3885 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3886
3887 /* Create the VN_INFO structures, and initialize value numbers to
3888 TOP. */
3889 for (i = 0; i < num_ssa_names; i++)
3890 {
3891 tree name = ssa_name (i);
3892 if (name)
3893 {
3894 VN_INFO_GET (name)->valnum = VN_TOP;
3895 VN_INFO (name)->expr = NULL_TREE;
3896 VN_INFO (name)->value_id = 0;
3897 }
3898 }
3899
3900 renumber_gimple_stmt_uids ();
3901
3902 /* Create the valid and optimistic value numbering tables. */
3903 valid_info = XCNEW (struct vn_tables_s);
3904 allocate_vn_table (valid_info);
3905 optimistic_info = XCNEW (struct vn_tables_s);
3906 allocate_vn_table (optimistic_info);
3907 }
3908
3909 void
3910 free_scc_vn (void)
3911 {
3912 size_t i;
3913
3914 htab_delete (constant_to_value_id);
3915 BITMAP_FREE (constant_value_ids);
3916 shared_lookup_phiargs.release ();
3917 shared_lookup_references.release ();
3918 XDELETEVEC (rpo_numbers);
3919
3920 for (i = 0; i < num_ssa_names; i++)
3921 {
3922 tree name = ssa_name (i);
3923 if (name
3924 && VN_INFO (name)->needs_insertion)
3925 release_ssa_name (name);
3926 }
3927 obstack_free (&vn_ssa_aux_obstack, NULL);
3928 vn_ssa_aux_table.release ();
3929
3930 sccstack.release ();
3931 free_vn_table (valid_info);
3932 XDELETE (valid_info);
3933 free_vn_table (optimistic_info);
3934 XDELETE (optimistic_info);
3935 }
3936
3937 /* Set *ID if we computed something useful in RESULT. */
3938
3939 static void
3940 set_value_id_for_result (tree result, unsigned int *id)
3941 {
3942 if (result)
3943 {
3944 if (TREE_CODE (result) == SSA_NAME)
3945 *id = VN_INFO (result)->value_id;
3946 else if (is_gimple_min_invariant (result))
3947 *id = get_or_alloc_constant_value_id (result);
3948 }
3949 }
3950
3951 /* Set the value ids in the valid hash tables. */
3952
3953 static void
3954 set_hashtable_value_ids (void)
3955 {
3956 htab_iterator hi;
3957 vn_nary_op_t vno;
3958 vn_reference_t vr;
3959 vn_phi_t vp;
3960
3961 /* Now set the value ids of the things we had put in the hash
3962 table. */
3963
3964 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3965 vno, vn_nary_op_t, hi)
3966 set_value_id_for_result (vno->result, &vno->value_id);
3967
3968 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3969 vp, vn_phi_t, hi)
3970 set_value_id_for_result (vp->result, &vp->value_id);
3971
3972 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3973 vr, vn_reference_t, hi)
3974 set_value_id_for_result (vr->result, &vr->value_id);
3975 }
3976
3977 /* Do SCCVN. Returns true if it finished, false if we bailed out
3978 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3979 how we use the alias oracle walking during the VN process. */
3980
3981 bool
3982 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3983 {
3984 size_t i;
3985 tree param;
3986
3987 default_vn_walk_kind = default_vn_walk_kind_;
3988
3989 init_scc_vn ();
3990 current_info = valid_info;
3991
3992 for (param = DECL_ARGUMENTS (current_function_decl);
3993 param;
3994 param = DECL_CHAIN (param))
3995 {
3996 tree def = ssa_default_def (cfun, param);
3997 if (def)
3998 VN_INFO (def)->valnum = def;
3999 }
4000
4001 for (i = 1; i < num_ssa_names; ++i)
4002 {
4003 tree name = ssa_name (i);
4004 if (name
4005 && VN_INFO (name)->visited == false
4006 && !has_zero_uses (name))
4007 if (!DFS (name))
4008 {
4009 free_scc_vn ();
4010 return false;
4011 }
4012 }
4013
4014 /* Initialize the value ids. */
4015
4016 for (i = 1; i < num_ssa_names; ++i)
4017 {
4018 tree name = ssa_name (i);
4019 vn_ssa_aux_t info;
4020 if (!name)
4021 continue;
4022 info = VN_INFO (name);
4023 if (info->valnum == name
4024 || info->valnum == VN_TOP)
4025 info->value_id = get_next_value_id ();
4026 else if (is_gimple_min_invariant (info->valnum))
4027 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4028 }
4029
4030 /* Propagate. */
4031 for (i = 1; i < num_ssa_names; ++i)
4032 {
4033 tree name = ssa_name (i);
4034 vn_ssa_aux_t info;
4035 if (!name)
4036 continue;
4037 info = VN_INFO (name);
4038 if (TREE_CODE (info->valnum) == SSA_NAME
4039 && info->valnum != name
4040 && info->value_id != VN_INFO (info->valnum)->value_id)
4041 info->value_id = VN_INFO (info->valnum)->value_id;
4042 }
4043
4044 set_hashtable_value_ids ();
4045
4046 if (dump_file && (dump_flags & TDF_DETAILS))
4047 {
4048 fprintf (dump_file, "Value numbers:\n");
4049 for (i = 0; i < num_ssa_names; i++)
4050 {
4051 tree name = ssa_name (i);
4052 if (name
4053 && VN_INFO (name)->visited
4054 && SSA_VAL (name) != name)
4055 {
4056 print_generic_expr (dump_file, name, 0);
4057 fprintf (dump_file, " = ");
4058 print_generic_expr (dump_file, SSA_VAL (name), 0);
4059 fprintf (dump_file, "\n");
4060 }
4061 }
4062 }
4063
4064 return true;
4065 }
4066
4067 /* Return the maximum value id we have ever seen. */
4068
4069 unsigned int
4070 get_max_value_id (void)
4071 {
4072 return next_value_id;
4073 }
4074
4075 /* Return the next unique value id. */
4076
4077 unsigned int
4078 get_next_value_id (void)
4079 {
4080 return next_value_id++;
4081 }
4082
4083
4084 /* Compare two expressions E1 and E2 and return true if they are equal. */
4085
4086 bool
4087 expressions_equal_p (tree e1, tree e2)
4088 {
4089 /* The obvious case. */
4090 if (e1 == e2)
4091 return true;
4092
4093 /* If only one of them is null, they cannot be equal. */
4094 if (!e1 || !e2)
4095 return false;
4096
4097 /* Now perform the actual comparison. */
4098 if (TREE_CODE (e1) == TREE_CODE (e2)
4099 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4100 return true;
4101
4102 return false;
4103 }
4104
4105
4106 /* Return true if the nary operation NARY may trap. This is a copy
4107 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4108
4109 bool
4110 vn_nary_may_trap (vn_nary_op_t nary)
4111 {
4112 tree type;
4113 tree rhs2 = NULL_TREE;
4114 bool honor_nans = false;
4115 bool honor_snans = false;
4116 bool fp_operation = false;
4117 bool honor_trapv = false;
4118 bool handled, ret;
4119 unsigned i;
4120
4121 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4122 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4123 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4124 {
4125 type = nary->type;
4126 fp_operation = FLOAT_TYPE_P (type);
4127 if (fp_operation)
4128 {
4129 honor_nans = flag_trapping_math && !flag_finite_math_only;
4130 honor_snans = flag_signaling_nans != 0;
4131 }
4132 else if (INTEGRAL_TYPE_P (type)
4133 && TYPE_OVERFLOW_TRAPS (type))
4134 honor_trapv = true;
4135 }
4136 if (nary->length >= 2)
4137 rhs2 = nary->op[1];
4138 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4139 honor_trapv,
4140 honor_nans, honor_snans, rhs2,
4141 &handled);
4142 if (handled
4143 && ret)
4144 return true;
4145
4146 for (i = 0; i < nary->length; ++i)
4147 if (tree_could_trap_p (nary->op[i]))
4148 return true;
4149
4150 return false;
4151 }