i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
167
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
170
171 /* Return the value numbering information for a given SSA name. */
172
173 vn_ssa_aux_t
174 VN_INFO (tree name)
175 {
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
180 }
181
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
184
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 {
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
190 }
191
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
194
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
197 {
198 vn_ssa_aux_t newinfo;
199
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
208 }
209
210
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
213
214 tree
215 vn_get_expr_for (tree name)
216 {
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
221
222 if (vn->valnum == VN_TOP)
223 return name;
224
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
229
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
232
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
237
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
241
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
248
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
253 {
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
264
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
270
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
277
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
280 && TREE_CODE
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
283 break;
284
285 default:;
286 }
287 if (expr == NULL_TREE)
288 return vn->valnum;
289
290 /* Cache the expression. */
291 vn->expr = expr;
292
293 return expr;
294 }
295
296
297 /* Free a phi operation structure VP. */
298
299 static void
300 free_phi (void *vp)
301 {
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
304 }
305
306 /* Free a reference operation structure VP. */
307
308 static void
309 free_reference (void *vp)
310 {
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
313 }
314
315 /* Hash table equality function for vn_constant_t. */
316
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
319 {
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
322
323 if (vc1->hashcode != vc2->hashcode)
324 return false;
325
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
327 }
328
329 /* Hash table hash function for vn_constant_t. */
330
331 static hashval_t
332 vn_constant_hash (const void *p1)
333 {
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
336 }
337
338 /* Lookup a value id for CONSTANT and return it. If it does not
339 exist returns 0. */
340
341 unsigned int
342 get_constant_value_id (tree constant)
343 {
344 void **slot;
345 struct vn_constant_s vc;
346
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
351 if (slot)
352 return ((vn_constant_t)*slot)->value_id;
353 return 0;
354 }
355
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
358
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
361 {
362 void **slot;
363 struct vn_constant_s vc;
364 vn_constant_t vcp;
365
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
370 if (*slot)
371 return ((vn_constant_t)*slot)->value_id;
372
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
380 }
381
382 /* Return true if V is a value id for a constant. */
383
384 bool
385 value_id_constant_p (unsigned int v)
386 {
387 return bitmap_bit_p (constant_value_ids, v);
388 }
389
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
392
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
395 {
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
398
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
408 }
409
410 /* Compute the hash for a reference operand VRO1. */
411
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
414 {
415 result = iterative_hash_hashval_t (vro1->opcode, result);
416 if (vro1->op0)
417 result = iterative_hash_expr (vro1->op0, result);
418 if (vro1->op1)
419 result = iterative_hash_expr (vro1->op1, result);
420 if (vro1->op2)
421 result = iterative_hash_expr (vro1->op2, result);
422 return result;
423 }
424
425 /* Return the hashcode for a given reference operation P1. */
426
427 static hashval_t
428 vn_reference_hash (const void *p1)
429 {
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
432 }
433
434 /* Compute a hash for the reference operation VR1 and return it. */
435
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
438 {
439 hashval_t result = 0;
440 int i;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
443 bool deref = false;
444
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
446 {
447 if (vro->opcode == MEM_REF)
448 deref = true;
449 else if (vro->opcode != ADDR_EXPR)
450 deref = false;
451 if (vro->off != -1)
452 {
453 if (off == -1)
454 off = 0;
455 off += vro->off;
456 }
457 else
458 {
459 if (off != -1
460 && off != 0)
461 result = iterative_hash_hashval_t (off, result);
462 off = -1;
463 if (deref
464 && vro->opcode == ADDR_EXPR)
465 {
466 if (vro->op0)
467 {
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
471 }
472 }
473 else
474 result = vn_reference_op_compute_hash (vro, result);
475 }
476 }
477 if (vr1->vuse)
478 result += SSA_NAME_VERSION (vr1->vuse);
479
480 return result;
481 }
482
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
485
486 int
487 vn_reference_eq (const void *p1, const void *p2)
488 {
489 unsigned i, j;
490
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
494 return false;
495
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
498 return false;
499
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
502 return false;
503
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
506 return true;
507
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509 return false;
510
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
513 {
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 return false;
516 }
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520 return false;
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524 return false;
525
526 i = 0;
527 j = 0;
528 do
529 {
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
535 {
536 if (vro1->opcode == MEM_REF)
537 deref1 = true;
538 if (vro1->off == -1)
539 break;
540 off1 += vro1->off;
541 }
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
543 {
544 if (vro2->opcode == MEM_REF)
545 deref2 = true;
546 if (vro2->off == -1)
547 break;
548 off2 += vro2->off;
549 }
550 if (off1 != off2)
551 return false;
552 if (deref1 && vro1->opcode == ADDR_EXPR)
553 {
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
558 vro1 = &tem1;
559 deref1 = false;
560 }
561 if (deref2 && vro2->opcode == ADDR_EXPR)
562 {
563 memset (&tem2, 0, sizeof (tem2));
564 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
565 tem2.type = TREE_TYPE (tem2.op0);
566 tem2.opcode = TREE_CODE (tem2.op0);
567 vro2 = &tem2;
568 deref2 = false;
569 }
570 if (deref1 != deref2)
571 return false;
572 if (!vn_reference_op_eq (vro1, vro2))
573 return false;
574 ++j;
575 ++i;
576 }
577 while (VEC_length (vn_reference_op_s, vr1->operands) != i
578 || VEC_length (vn_reference_op_s, vr2->operands) != j);
579
580 return true;
581 }
582
583 /* Copy the operations present in load/store REF into RESULT, a vector of
584 vn_reference_op_s's. */
585
586 void
587 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
588 {
589 if (TREE_CODE (ref) == TARGET_MEM_REF)
590 {
591 vn_reference_op_s temp;
592
593 memset (&temp, 0, sizeof (temp));
594 temp.type = TREE_TYPE (ref);
595 temp.opcode = TREE_CODE (ref);
596 temp.op0 = TMR_INDEX (ref);
597 temp.op1 = TMR_STEP (ref);
598 temp.op2 = TMR_OFFSET (ref);
599 temp.off = -1;
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
601
602 memset (&temp, 0, sizeof (temp));
603 temp.type = NULL_TREE;
604 temp.opcode = ERROR_MARK;
605 temp.op0 = TMR_INDEX2 (ref);
606 temp.off = -1;
607 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
608
609 memset (&temp, 0, sizeof (temp));
610 temp.type = NULL_TREE;
611 temp.opcode = TREE_CODE (TMR_BASE (ref));
612 temp.op0 = TMR_BASE (ref);
613 temp.off = -1;
614 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
615 return;
616 }
617
618 /* For non-calls, store the information that makes up the address. */
619
620 while (ref)
621 {
622 vn_reference_op_s temp;
623
624 memset (&temp, 0, sizeof (temp));
625 temp.type = TREE_TYPE (ref);
626 temp.opcode = TREE_CODE (ref);
627 temp.off = -1;
628
629 switch (temp.opcode)
630 {
631 case WITH_SIZE_EXPR:
632 temp.op0 = TREE_OPERAND (ref, 1);
633 temp.off = 0;
634 break;
635 case MEM_REF:
636 /* The base address gets its own vn_reference_op_s structure. */
637 temp.op0 = TREE_OPERAND (ref, 1);
638 if (host_integerp (TREE_OPERAND (ref, 1), 0))
639 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
640 break;
641 case BIT_FIELD_REF:
642 /* Record bits and position. */
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
645 break;
646 case COMPONENT_REF:
647 /* The field decl is enough to unambiguously specify the field,
648 a matching type is not necessary and a mismatching type
649 is always a spurious difference. */
650 temp.type = NULL_TREE;
651 temp.op0 = TREE_OPERAND (ref, 1);
652 temp.op1 = TREE_OPERAND (ref, 2);
653 {
654 tree this_offset = component_ref_field_offset (ref);
655 if (this_offset
656 && TREE_CODE (this_offset) == INTEGER_CST)
657 {
658 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
659 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
660 {
661 double_int off
662 = double_int_add (tree_to_double_int (this_offset),
663 double_int_rshift
664 (tree_to_double_int (bit_offset),
665 BITS_PER_UNIT == 8
666 ? 3 : exact_log2 (BITS_PER_UNIT),
667 HOST_BITS_PER_DOUBLE_INT, true));
668 if (double_int_fits_in_shwi_p (off))
669 temp.off = off.low;
670 }
671 }
672 }
673 break;
674 case ARRAY_RANGE_REF:
675 case ARRAY_REF:
676 /* Record index as operand. */
677 temp.op0 = TREE_OPERAND (ref, 1);
678 /* Always record lower bounds and element size. */
679 temp.op1 = array_ref_low_bound (ref);
680 temp.op2 = array_ref_element_size (ref);
681 if (TREE_CODE (temp.op0) == INTEGER_CST
682 && TREE_CODE (temp.op1) == INTEGER_CST
683 && TREE_CODE (temp.op2) == INTEGER_CST)
684 {
685 double_int off = tree_to_double_int (temp.op0);
686 off = double_int_add (off,
687 double_int_neg
688 (tree_to_double_int (temp.op1)));
689 off = double_int_mul (off, tree_to_double_int (temp.op2));
690 if (double_int_fits_in_shwi_p (off))
691 temp.off = off.low;
692 }
693 break;
694 case VAR_DECL:
695 if (DECL_HARD_REGISTER (ref))
696 {
697 temp.op0 = ref;
698 break;
699 }
700 /* Fallthru. */
701 case PARM_DECL:
702 case CONST_DECL:
703 case RESULT_DECL:
704 /* Canonicalize decls to MEM[&decl] which is what we end up with
705 when valueizing MEM[ptr] with ptr = &decl. */
706 temp.opcode = MEM_REF;
707 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
708 temp.off = 0;
709 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
710 temp.opcode = ADDR_EXPR;
711 temp.op0 = build_fold_addr_expr (ref);
712 temp.type = TREE_TYPE (temp.op0);
713 temp.off = -1;
714 break;
715 case STRING_CST:
716 case INTEGER_CST:
717 case COMPLEX_CST:
718 case VECTOR_CST:
719 case REAL_CST:
720 case FIXED_CST:
721 case CONSTRUCTOR:
722 case SSA_NAME:
723 temp.op0 = ref;
724 break;
725 case ADDR_EXPR:
726 if (is_gimple_min_invariant (ref))
727 {
728 temp.op0 = ref;
729 break;
730 }
731 /* Fallthrough. */
732 /* These are only interesting for their operands, their
733 existence, and their type. They will never be the last
734 ref in the chain of references (IE they require an
735 operand), so we don't have to put anything
736 for op* as it will be handled by the iteration */
737 case REALPART_EXPR:
738 case VIEW_CONVERT_EXPR:
739 temp.off = 0;
740 break;
741 case IMAGPART_EXPR:
742 /* This is only interesting for its constant offset. */
743 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
744 break;
745 default:
746 gcc_unreachable ();
747 }
748 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
749
750 if (REFERENCE_CLASS_P (ref)
751 || TREE_CODE (ref) == WITH_SIZE_EXPR
752 || (TREE_CODE (ref) == ADDR_EXPR
753 && !is_gimple_min_invariant (ref)))
754 ref = TREE_OPERAND (ref, 0);
755 else
756 ref = NULL_TREE;
757 }
758 }
759
760 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
761 operands in *OPS, the reference alias set SET and the reference type TYPE.
762 Return true if something useful was produced. */
763
764 bool
765 ao_ref_init_from_vn_reference (ao_ref *ref,
766 alias_set_type set, tree type,
767 VEC (vn_reference_op_s, heap) *ops)
768 {
769 vn_reference_op_t op;
770 unsigned i;
771 tree base = NULL_TREE;
772 tree *op0_p = &base;
773 HOST_WIDE_INT offset = 0;
774 HOST_WIDE_INT max_size;
775 HOST_WIDE_INT size = -1;
776 tree size_tree = NULL_TREE;
777 alias_set_type base_alias_set = -1;
778
779 /* First get the final access size from just the outermost expression. */
780 op = VEC_index (vn_reference_op_s, ops, 0);
781 if (op->opcode == COMPONENT_REF)
782 size_tree = DECL_SIZE (op->op0);
783 else if (op->opcode == BIT_FIELD_REF)
784 size_tree = op->op0;
785 else
786 {
787 enum machine_mode mode = TYPE_MODE (type);
788 if (mode == BLKmode)
789 size_tree = TYPE_SIZE (type);
790 else
791 size = GET_MODE_BITSIZE (mode);
792 }
793 if (size_tree != NULL_TREE)
794 {
795 if (!host_integerp (size_tree, 1))
796 size = -1;
797 else
798 size = TREE_INT_CST_LOW (size_tree);
799 }
800
801 /* Initially, maxsize is the same as the accessed element size.
802 In the following it will only grow (or become -1). */
803 max_size = size;
804
805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
806 and find the ultimate containing object. */
807 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
808 {
809 switch (op->opcode)
810 {
811 /* These may be in the reference ops, but we cannot do anything
812 sensible with them here. */
813 case ADDR_EXPR:
814 /* Apart from ADDR_EXPR arguments to MEM_REF. */
815 if (base != NULL_TREE
816 && TREE_CODE (base) == MEM_REF
817 && op->op0
818 && DECL_P (TREE_OPERAND (op->op0, 0)))
819 {
820 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
821 base = TREE_OPERAND (op->op0, 0);
822 if (pop->off == -1)
823 {
824 max_size = -1;
825 offset = 0;
826 }
827 else
828 offset += pop->off * BITS_PER_UNIT;
829 op0_p = NULL;
830 break;
831 }
832 /* Fallthru. */
833 case CALL_EXPR:
834 return false;
835
836 /* Record the base objects. */
837 case MEM_REF:
838 base_alias_set = get_deref_alias_set (op->op0);
839 *op0_p = build2 (MEM_REF, op->type,
840 NULL_TREE, op->op0);
841 op0_p = &TREE_OPERAND (*op0_p, 0);
842 break;
843
844 case VAR_DECL:
845 case PARM_DECL:
846 case RESULT_DECL:
847 case SSA_NAME:
848 *op0_p = op->op0;
849 op0_p = NULL;
850 break;
851
852 /* And now the usual component-reference style ops. */
853 case BIT_FIELD_REF:
854 offset += tree_low_cst (op->op1, 0);
855 break;
856
857 case COMPONENT_REF:
858 {
859 tree field = op->op0;
860 /* We do not have a complete COMPONENT_REF tree here so we
861 cannot use component_ref_field_offset. Do the interesting
862 parts manually. */
863
864 if (op->op1
865 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
866 max_size = -1;
867 else
868 {
869 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
870 * BITS_PER_UNIT);
871 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
872 }
873 break;
874 }
875
876 case ARRAY_RANGE_REF:
877 case ARRAY_REF:
878 /* We recorded the lower bound and the element size. */
879 if (!host_integerp (op->op0, 0)
880 || !host_integerp (op->op1, 0)
881 || !host_integerp (op->op2, 0))
882 max_size = -1;
883 else
884 {
885 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
886 hindex -= TREE_INT_CST_LOW (op->op1);
887 hindex *= TREE_INT_CST_LOW (op->op2);
888 hindex *= BITS_PER_UNIT;
889 offset += hindex;
890 }
891 break;
892
893 case REALPART_EXPR:
894 break;
895
896 case IMAGPART_EXPR:
897 offset += size;
898 break;
899
900 case VIEW_CONVERT_EXPR:
901 break;
902
903 case STRING_CST:
904 case INTEGER_CST:
905 case COMPLEX_CST:
906 case VECTOR_CST:
907 case REAL_CST:
908 case CONSTRUCTOR:
909 case CONST_DECL:
910 return false;
911
912 default:
913 return false;
914 }
915 }
916
917 if (base == NULL_TREE)
918 return false;
919
920 ref->ref = NULL_TREE;
921 ref->base = base;
922 ref->offset = offset;
923 ref->size = size;
924 ref->max_size = max_size;
925 ref->ref_alias_set = set;
926 if (base_alias_set != -1)
927 ref->base_alias_set = base_alias_set;
928 else
929 ref->base_alias_set = get_alias_set (base);
930 /* We discount volatiles from value-numbering elsewhere. */
931 ref->volatile_p = false;
932
933 return true;
934 }
935
936 /* Copy the operations present in load/store/call REF into RESULT, a vector of
937 vn_reference_op_s's. */
938
939 void
940 copy_reference_ops_from_call (gimple call,
941 VEC(vn_reference_op_s, heap) **result)
942 {
943 vn_reference_op_s temp;
944 unsigned i;
945
946 /* Copy the type, opcode, function being called and static chain. */
947 memset (&temp, 0, sizeof (temp));
948 temp.type = gimple_call_return_type (call);
949 temp.opcode = CALL_EXPR;
950 temp.op0 = gimple_call_fn (call);
951 temp.op1 = gimple_call_chain (call);
952 temp.off = -1;
953 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
954
955 /* Copy the call arguments. As they can be references as well,
956 just chain them together. */
957 for (i = 0; i < gimple_call_num_args (call); ++i)
958 {
959 tree callarg = gimple_call_arg (call, i);
960 copy_reference_ops_from_ref (callarg, result);
961 }
962 }
963
964 /* Create a vector of vn_reference_op_s structures from REF, a
965 REFERENCE_CLASS_P tree. The vector is not shared. */
966
967 static VEC(vn_reference_op_s, heap) *
968 create_reference_ops_from_ref (tree ref)
969 {
970 VEC (vn_reference_op_s, heap) *result = NULL;
971
972 copy_reference_ops_from_ref (ref, &result);
973 return result;
974 }
975
976 /* Create a vector of vn_reference_op_s structures from CALL, a
977 call statement. The vector is not shared. */
978
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_call (gimple call)
981 {
982 VEC (vn_reference_op_s, heap) *result = NULL;
983
984 copy_reference_ops_from_call (call, &result);
985 return result;
986 }
987
988 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
989 *I_P to point to the last element of the replacement. */
990 void
991 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
992 unsigned int *i_p)
993 {
994 unsigned int i = *i_p;
995 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
996 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
997 tree addr_base;
998 HOST_WIDE_INT addr_offset;
999
1000 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1001 from .foo.bar to the preceding MEM_REF offset and replace the
1002 address with &OBJ. */
1003 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1004 &addr_offset);
1005 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1006 if (addr_base != op->op0)
1007 {
1008 double_int off = tree_to_double_int (mem_op->op0);
1009 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1010 off = double_int_add (off, shwi_to_double_int (addr_offset));
1011 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1012 op->op0 = build_fold_addr_expr (addr_base);
1013 if (host_integerp (mem_op->op0, 0))
1014 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1015 else
1016 mem_op->off = -1;
1017 }
1018 }
1019
1020 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1021 *I_P to point to the last element of the replacement. */
1022 static void
1023 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1024 unsigned int *i_p)
1025 {
1026 unsigned int i = *i_p;
1027 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1028 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1029 gimple def_stmt;
1030 enum tree_code code;
1031 double_int off;
1032
1033 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1034 if (!is_gimple_assign (def_stmt))
1035 return;
1036
1037 code = gimple_assign_rhs_code (def_stmt);
1038 if (code != ADDR_EXPR
1039 && code != POINTER_PLUS_EXPR)
1040 return;
1041
1042 off = tree_to_double_int (mem_op->op0);
1043 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1044
1045 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1046 from .foo.bar to the preceding MEM_REF offset and replace the
1047 address with &OBJ. */
1048 if (code == ADDR_EXPR)
1049 {
1050 tree addr, addr_base;
1051 HOST_WIDE_INT addr_offset;
1052
1053 addr = gimple_assign_rhs1 (def_stmt);
1054 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1055 &addr_offset);
1056 if (!addr_base
1057 || TREE_CODE (addr_base) != MEM_REF)
1058 return;
1059
1060 off = double_int_add (off, shwi_to_double_int (addr_offset));
1061 off = double_int_add (off, mem_ref_offset (addr_base));
1062 op->op0 = TREE_OPERAND (addr_base, 0);
1063 }
1064 else
1065 {
1066 tree ptr, ptroff;
1067 ptr = gimple_assign_rhs1 (def_stmt);
1068 ptroff = gimple_assign_rhs2 (def_stmt);
1069 if (TREE_CODE (ptr) != SSA_NAME
1070 || TREE_CODE (ptroff) != INTEGER_CST)
1071 return;
1072
1073 off = double_int_add (off, tree_to_double_int (ptroff));
1074 op->op0 = ptr;
1075 }
1076
1077 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1078 if (host_integerp (mem_op->op0, 0))
1079 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1080 else
1081 mem_op->off = -1;
1082 if (TREE_CODE (op->op0) == SSA_NAME)
1083 op->op0 = SSA_VAL (op->op0);
1084 if (TREE_CODE (op->op0) != SSA_NAME)
1085 op->opcode = TREE_CODE (op->op0);
1086
1087 /* And recurse. */
1088 if (TREE_CODE (op->op0) == SSA_NAME)
1089 vn_reference_maybe_forwprop_address (ops, i_p);
1090 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1091 vn_reference_fold_indirect (ops, i_p);
1092 }
1093
1094 /* Optimize the reference REF to a constant if possible or return
1095 NULL_TREE if not. */
1096
1097 tree
1098 fully_constant_vn_reference_p (vn_reference_t ref)
1099 {
1100 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1101 vn_reference_op_t op;
1102
1103 /* Try to simplify the translated expression if it is
1104 a call to a builtin function with at most two arguments. */
1105 op = VEC_index (vn_reference_op_s, operands, 0);
1106 if (op->opcode == CALL_EXPR
1107 && TREE_CODE (op->op0) == ADDR_EXPR
1108 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1109 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1110 && VEC_length (vn_reference_op_s, operands) >= 2
1111 && VEC_length (vn_reference_op_s, operands) <= 3)
1112 {
1113 vn_reference_op_t arg0, arg1 = NULL;
1114 bool anyconst = false;
1115 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1116 if (VEC_length (vn_reference_op_s, operands) > 2)
1117 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1118 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1119 || (arg0->opcode == ADDR_EXPR
1120 && is_gimple_min_invariant (arg0->op0)))
1121 anyconst = true;
1122 if (arg1
1123 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1124 || (arg1->opcode == ADDR_EXPR
1125 && is_gimple_min_invariant (arg1->op0))))
1126 anyconst = true;
1127 if (anyconst)
1128 {
1129 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1130 arg1 ? 2 : 1,
1131 arg0->op0,
1132 arg1 ? arg1->op0 : NULL);
1133 if (folded
1134 && TREE_CODE (folded) == NOP_EXPR)
1135 folded = TREE_OPERAND (folded, 0);
1136 if (folded
1137 && is_gimple_min_invariant (folded))
1138 return folded;
1139 }
1140 }
1141
1142 /* Simplify reads from constant strings. */
1143 else if (op->opcode == ARRAY_REF
1144 && TREE_CODE (op->op0) == INTEGER_CST
1145 && integer_zerop (op->op1)
1146 && VEC_length (vn_reference_op_s, operands) == 2)
1147 {
1148 vn_reference_op_t arg0;
1149 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1150 if (arg0->opcode == STRING_CST
1151 && (TYPE_MODE (op->type)
1152 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1153 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1154 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1155 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1156 return build_int_cst_type (op->type,
1157 (TREE_STRING_POINTER (arg0->op0)
1158 [TREE_INT_CST_LOW (op->op0)]));
1159 }
1160
1161 return NULL_TREE;
1162 }
1163
1164 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1165 structures into their value numbers. This is done in-place, and
1166 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1167 whether any operands were valueized. */
1168
1169 static VEC (vn_reference_op_s, heap) *
1170 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1171 {
1172 vn_reference_op_t vro;
1173 unsigned int i;
1174
1175 *valueized_anything = false;
1176
1177 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1178 {
1179 if (vro->opcode == SSA_NAME
1180 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1181 {
1182 tree tem = SSA_VAL (vro->op0);
1183 if (tem != vro->op0)
1184 {
1185 *valueized_anything = true;
1186 vro->op0 = tem;
1187 }
1188 /* If it transforms from an SSA_NAME to a constant, update
1189 the opcode. */
1190 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1191 vro->opcode = TREE_CODE (vro->op0);
1192 }
1193 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1194 {
1195 tree tem = SSA_VAL (vro->op1);
1196 if (tem != vro->op1)
1197 {
1198 *valueized_anything = true;
1199 vro->op1 = tem;
1200 }
1201 }
1202 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1203 {
1204 tree tem = SSA_VAL (vro->op2);
1205 if (tem != vro->op2)
1206 {
1207 *valueized_anything = true;
1208 vro->op2 = tem;
1209 }
1210 }
1211 /* If it transforms from an SSA_NAME to an address, fold with
1212 a preceding indirect reference. */
1213 if (i > 0
1214 && vro->op0
1215 && TREE_CODE (vro->op0) == ADDR_EXPR
1216 && VEC_index (vn_reference_op_s,
1217 orig, i - 1)->opcode == MEM_REF)
1218 vn_reference_fold_indirect (&orig, &i);
1219 else if (i > 0
1220 && vro->opcode == SSA_NAME
1221 && VEC_index (vn_reference_op_s,
1222 orig, i - 1)->opcode == MEM_REF)
1223 vn_reference_maybe_forwprop_address (&orig, &i);
1224 /* If it transforms a non-constant ARRAY_REF into a constant
1225 one, adjust the constant offset. */
1226 else if (vro->opcode == ARRAY_REF
1227 && vro->off == -1
1228 && TREE_CODE (vro->op0) == INTEGER_CST
1229 && TREE_CODE (vro->op1) == INTEGER_CST
1230 && TREE_CODE (vro->op2) == INTEGER_CST)
1231 {
1232 double_int off = tree_to_double_int (vro->op0);
1233 off = double_int_add (off,
1234 double_int_neg
1235 (tree_to_double_int (vro->op1)));
1236 off = double_int_mul (off, tree_to_double_int (vro->op2));
1237 if (double_int_fits_in_shwi_p (off))
1238 vro->off = off.low;
1239 }
1240 }
1241
1242 return orig;
1243 }
1244
1245 static VEC (vn_reference_op_s, heap) *
1246 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1247 {
1248 bool tem;
1249 return valueize_refs_1 (orig, &tem);
1250 }
1251
1252 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1253
1254 /* Create a vector of vn_reference_op_s structures from REF, a
1255 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1256 this function. *VALUEIZED_ANYTHING will specify whether any
1257 operands were valueized. */
1258
1259 static VEC(vn_reference_op_s, heap) *
1260 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1261 {
1262 if (!ref)
1263 return NULL;
1264 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1265 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1266 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1267 valueized_anything);
1268 return shared_lookup_references;
1269 }
1270
1271 /* Create a vector of vn_reference_op_s structures from CALL, a
1272 call statement. The vector is shared among all callers of
1273 this function. */
1274
1275 static VEC(vn_reference_op_s, heap) *
1276 valueize_shared_reference_ops_from_call (gimple call)
1277 {
1278 if (!call)
1279 return NULL;
1280 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1281 copy_reference_ops_from_call (call, &shared_lookup_references);
1282 shared_lookup_references = valueize_refs (shared_lookup_references);
1283 return shared_lookup_references;
1284 }
1285
1286 /* Lookup a SCCVN reference operation VR in the current hash table.
1287 Returns the resulting value number if it exists in the hash table,
1288 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1289 vn_reference_t stored in the hashtable if something is found. */
1290
1291 static tree
1292 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1293 {
1294 void **slot;
1295 hashval_t hash;
1296
1297 hash = vr->hashcode;
1298 slot = htab_find_slot_with_hash (current_info->references, vr,
1299 hash, NO_INSERT);
1300 if (!slot && current_info == optimistic_info)
1301 slot = htab_find_slot_with_hash (valid_info->references, vr,
1302 hash, NO_INSERT);
1303 if (slot)
1304 {
1305 if (vnresult)
1306 *vnresult = (vn_reference_t)*slot;
1307 return ((vn_reference_t)*slot)->result;
1308 }
1309
1310 return NULL_TREE;
1311 }
1312
1313 static tree *last_vuse_ptr;
1314 static vn_lookup_kind vn_walk_kind;
1315 static vn_lookup_kind default_vn_walk_kind;
1316
1317 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1318 with the current VUSE and performs the expression lookup. */
1319
1320 static void *
1321 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1322 {
1323 vn_reference_t vr = (vn_reference_t)vr_;
1324 void **slot;
1325 hashval_t hash;
1326
1327 if (last_vuse_ptr)
1328 *last_vuse_ptr = vuse;
1329
1330 /* Fixup vuse and hash. */
1331 if (vr->vuse)
1332 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1333 vr->vuse = SSA_VAL (vuse);
1334 if (vr->vuse)
1335 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1336
1337 hash = vr->hashcode;
1338 slot = htab_find_slot_with_hash (current_info->references, vr,
1339 hash, NO_INSERT);
1340 if (!slot && current_info == optimistic_info)
1341 slot = htab_find_slot_with_hash (valid_info->references, vr,
1342 hash, NO_INSERT);
1343 if (slot)
1344 return *slot;
1345
1346 return NULL;
1347 }
1348
1349 /* Lookup an existing or insert a new vn_reference entry into the
1350 value table for the VUSE, SET, TYPE, OPERANDS reference which
1351 has the value VALUE which is either a constant or an SSA name. */
1352
1353 static vn_reference_t
1354 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1355 alias_set_type set,
1356 tree type,
1357 VEC (vn_reference_op_s,
1358 heap) *operands,
1359 tree value)
1360 {
1361 struct vn_reference_s vr1;
1362 vn_reference_t result;
1363 unsigned value_id;
1364 vr1.vuse = vuse;
1365 vr1.operands = operands;
1366 vr1.type = type;
1367 vr1.set = set;
1368 vr1.hashcode = vn_reference_compute_hash (&vr1);
1369 if (vn_reference_lookup_1 (&vr1, &result))
1370 return result;
1371 if (TREE_CODE (value) == SSA_NAME)
1372 value_id = VN_INFO (value)->value_id;
1373 else
1374 value_id = get_or_alloc_constant_value_id (value);
1375 return vn_reference_insert_pieces (vuse, set, type,
1376 VEC_copy (vn_reference_op_s, heap,
1377 operands), value, value_id);
1378 }
1379
1380 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1381 from the statement defining VUSE and if not successful tries to
1382 translate *REFP and VR_ through an aggregate copy at the definition
1383 of VUSE. */
1384
1385 static void *
1386 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1387 {
1388 vn_reference_t vr = (vn_reference_t)vr_;
1389 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1390 tree base;
1391 HOST_WIDE_INT offset, maxsize;
1392 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1393 ao_ref lhs_ref;
1394 bool lhs_ref_ok = false;
1395
1396 /* First try to disambiguate after value-replacing in the definitions LHS. */
1397 if (is_gimple_assign (def_stmt))
1398 {
1399 VEC (vn_reference_op_s, heap) *tem;
1400 tree lhs = gimple_assign_lhs (def_stmt);
1401 bool valueized_anything = false;
1402 /* Avoid re-allocation overhead. */
1403 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1404 copy_reference_ops_from_ref (lhs, &lhs_ops);
1405 tem = lhs_ops;
1406 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1407 gcc_assert (lhs_ops == tem);
1408 if (valueized_anything)
1409 {
1410 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1411 get_alias_set (lhs),
1412 TREE_TYPE (lhs), lhs_ops);
1413 if (lhs_ref_ok
1414 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1415 return NULL;
1416 }
1417 else
1418 {
1419 ao_ref_init (&lhs_ref, lhs);
1420 lhs_ref_ok = true;
1421 }
1422 }
1423
1424 base = ao_ref_base (ref);
1425 offset = ref->offset;
1426 maxsize = ref->max_size;
1427
1428 /* If we cannot constrain the size of the reference we cannot
1429 test if anything kills it. */
1430 if (maxsize == -1)
1431 return (void *)-1;
1432
1433 /* We can't deduce anything useful from clobbers. */
1434 if (gimple_clobber_p (def_stmt))
1435 return (void *)-1;
1436
1437 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1438 from that definition.
1439 1) Memset. */
1440 if (is_gimple_reg_type (vr->type)
1441 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1442 && integer_zerop (gimple_call_arg (def_stmt, 1))
1443 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1444 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1445 {
1446 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1447 tree base2;
1448 HOST_WIDE_INT offset2, size2, maxsize2;
1449 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1450 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1451 if ((unsigned HOST_WIDE_INT)size2 / 8
1452 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1453 && maxsize2 != -1
1454 && operand_equal_p (base, base2, 0)
1455 && offset2 <= offset
1456 && offset2 + size2 >= offset + maxsize)
1457 {
1458 tree val = build_zero_cst (vr->type);
1459 return vn_reference_lookup_or_insert_for_pieces
1460 (vuse, vr->set, vr->type, vr->operands, val);
1461 }
1462 }
1463
1464 /* 2) Assignment from an empty CONSTRUCTOR. */
1465 else if (is_gimple_reg_type (vr->type)
1466 && gimple_assign_single_p (def_stmt)
1467 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1468 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1469 {
1470 tree base2;
1471 HOST_WIDE_INT offset2, size2, maxsize2;
1472 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1473 &offset2, &size2, &maxsize2);
1474 if (maxsize2 != -1
1475 && operand_equal_p (base, base2, 0)
1476 && offset2 <= offset
1477 && offset2 + size2 >= offset + maxsize)
1478 {
1479 tree val = build_zero_cst (vr->type);
1480 return vn_reference_lookup_or_insert_for_pieces
1481 (vuse, vr->set, vr->type, vr->operands, val);
1482 }
1483 }
1484
1485 /* 3) Assignment from a constant. We can use folds native encode/interpret
1486 routines to extract the assigned bits. */
1487 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1488 && ref->size == maxsize
1489 && maxsize % BITS_PER_UNIT == 0
1490 && offset % BITS_PER_UNIT == 0
1491 && is_gimple_reg_type (vr->type)
1492 && gimple_assign_single_p (def_stmt)
1493 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1494 {
1495 tree base2;
1496 HOST_WIDE_INT offset2, size2, maxsize2;
1497 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1498 &offset2, &size2, &maxsize2);
1499 if (maxsize2 != -1
1500 && maxsize2 == size2
1501 && size2 % BITS_PER_UNIT == 0
1502 && offset2 % BITS_PER_UNIT == 0
1503 && operand_equal_p (base, base2, 0)
1504 && offset2 <= offset
1505 && offset2 + size2 >= offset + maxsize)
1506 {
1507 /* We support up to 512-bit values (for V8DFmode). */
1508 unsigned char buffer[64];
1509 int len;
1510
1511 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1512 buffer, sizeof (buffer));
1513 if (len > 0)
1514 {
1515 tree val = native_interpret_expr (vr->type,
1516 buffer
1517 + ((offset - offset2)
1518 / BITS_PER_UNIT),
1519 ref->size / BITS_PER_UNIT);
1520 if (val)
1521 return vn_reference_lookup_or_insert_for_pieces
1522 (vuse, vr->set, vr->type, vr->operands, val);
1523 }
1524 }
1525 }
1526
1527 /* 4) Assignment from an SSA name which definition we may be able
1528 to access pieces from. */
1529 else if (ref->size == maxsize
1530 && is_gimple_reg_type (vr->type)
1531 && gimple_assign_single_p (def_stmt)
1532 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1533 {
1534 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1535 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1536 if (is_gimple_assign (def_stmt2)
1537 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1538 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1539 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1540 {
1541 tree base2;
1542 HOST_WIDE_INT offset2, size2, maxsize2, off;
1543 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1544 &offset2, &size2, &maxsize2);
1545 off = offset - offset2;
1546 if (maxsize2 != -1
1547 && maxsize2 == size2
1548 && operand_equal_p (base, base2, 0)
1549 && offset2 <= offset
1550 && offset2 + size2 >= offset + maxsize)
1551 {
1552 tree val = NULL_TREE;
1553 HOST_WIDE_INT elsz
1554 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1555 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1556 {
1557 if (off == 0)
1558 val = gimple_assign_rhs1 (def_stmt2);
1559 else if (off == elsz)
1560 val = gimple_assign_rhs2 (def_stmt2);
1561 }
1562 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1563 && off % elsz == 0)
1564 {
1565 tree ctor = gimple_assign_rhs1 (def_stmt2);
1566 unsigned i = off / elsz;
1567 if (i < CONSTRUCTOR_NELTS (ctor))
1568 {
1569 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1570 if (compare_tree_int (elt->index, i) == 0)
1571 val = elt->value;
1572 }
1573 }
1574 if (val)
1575 return vn_reference_lookup_or_insert_for_pieces
1576 (vuse, vr->set, vr->type, vr->operands, val);
1577 }
1578 }
1579 }
1580
1581 /* 5) For aggregate copies translate the reference through them if
1582 the copy kills ref. */
1583 else if (vn_walk_kind == VN_WALKREWRITE
1584 && gimple_assign_single_p (def_stmt)
1585 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1586 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1587 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1588 {
1589 tree base2;
1590 HOST_WIDE_INT offset2, size2, maxsize2;
1591 int i, j;
1592 VEC (vn_reference_op_s, heap) *rhs = NULL;
1593 vn_reference_op_t vro;
1594 ao_ref r;
1595
1596 if (!lhs_ref_ok)
1597 return (void *)-1;
1598
1599 /* See if the assignment kills REF. */
1600 base2 = ao_ref_base (&lhs_ref);
1601 offset2 = lhs_ref.offset;
1602 size2 = lhs_ref.size;
1603 maxsize2 = lhs_ref.max_size;
1604 if (maxsize2 == -1
1605 || (base != base2 && !operand_equal_p (base, base2, 0))
1606 || offset2 > offset
1607 || offset2 + size2 < offset + maxsize)
1608 return (void *)-1;
1609
1610 /* Find the common base of ref and the lhs. lhs_ops already
1611 contains valueized operands for the lhs. */
1612 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1613 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1614 while (j >= 0 && i >= 0
1615 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1616 vr->operands, i),
1617 VEC_index (vn_reference_op_s, lhs_ops, j)))
1618 {
1619 i--;
1620 j--;
1621 }
1622
1623 /* ??? The innermost op should always be a MEM_REF and we already
1624 checked that the assignment to the lhs kills vr. Thus for
1625 aggregate copies using char[] types the vn_reference_op_eq
1626 may fail when comparing types for compatibility. But we really
1627 don't care here - further lookups with the rewritten operands
1628 will simply fail if we messed up types too badly. */
1629 if (j == 0 && i >= 0
1630 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1631 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1632 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1633 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1634 i--, j--;
1635
1636 /* i now points to the first additional op.
1637 ??? LHS may not be completely contained in VR, one or more
1638 VIEW_CONVERT_EXPRs could be in its way. We could at least
1639 try handling outermost VIEW_CONVERT_EXPRs. */
1640 if (j != -1)
1641 return (void *)-1;
1642
1643 /* Now re-write REF to be based on the rhs of the assignment. */
1644 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1645 /* We need to pre-pend vr->operands[0..i] to rhs. */
1646 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1647 > VEC_length (vn_reference_op_s, vr->operands))
1648 {
1649 VEC (vn_reference_op_s, heap) *old = vr->operands;
1650 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1651 i + 1 + VEC_length (vn_reference_op_s, rhs));
1652 if (old == shared_lookup_references
1653 && vr->operands != old)
1654 shared_lookup_references = NULL;
1655 }
1656 else
1657 VEC_truncate (vn_reference_op_s, vr->operands,
1658 i + 1 + VEC_length (vn_reference_op_s, rhs));
1659 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1660 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1661 VEC_free (vn_reference_op_s, heap, rhs);
1662 vr->operands = valueize_refs (vr->operands);
1663 vr->hashcode = vn_reference_compute_hash (vr);
1664
1665 /* Adjust *ref from the new operands. */
1666 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1667 return (void *)-1;
1668 /* This can happen with bitfields. */
1669 if (ref->size != r.size)
1670 return (void *)-1;
1671 *ref = r;
1672
1673 /* Do not update last seen VUSE after translating. */
1674 last_vuse_ptr = NULL;
1675
1676 /* Keep looking for the adjusted *REF / VR pair. */
1677 return NULL;
1678 }
1679
1680 /* 6) For memcpy copies translate the reference through them if
1681 the copy kills ref. */
1682 else if (vn_walk_kind == VN_WALKREWRITE
1683 && is_gimple_reg_type (vr->type)
1684 /* ??? Handle BCOPY as well. */
1685 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1686 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1687 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1688 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1689 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1690 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1691 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1692 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1693 {
1694 tree lhs, rhs;
1695 ao_ref r;
1696 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1697 vn_reference_op_s op;
1698 HOST_WIDE_INT at;
1699
1700
1701 /* Only handle non-variable, addressable refs. */
1702 if (ref->size != maxsize
1703 || offset % BITS_PER_UNIT != 0
1704 || ref->size % BITS_PER_UNIT != 0)
1705 return (void *)-1;
1706
1707 /* Extract a pointer base and an offset for the destination. */
1708 lhs = gimple_call_arg (def_stmt, 0);
1709 lhs_offset = 0;
1710 if (TREE_CODE (lhs) == SSA_NAME)
1711 lhs = SSA_VAL (lhs);
1712 if (TREE_CODE (lhs) == ADDR_EXPR)
1713 {
1714 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1715 &lhs_offset);
1716 if (!tem)
1717 return (void *)-1;
1718 if (TREE_CODE (tem) == MEM_REF
1719 && host_integerp (TREE_OPERAND (tem, 1), 1))
1720 {
1721 lhs = TREE_OPERAND (tem, 0);
1722 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1723 }
1724 else if (DECL_P (tem))
1725 lhs = build_fold_addr_expr (tem);
1726 else
1727 return (void *)-1;
1728 }
1729 if (TREE_CODE (lhs) != SSA_NAME
1730 && TREE_CODE (lhs) != ADDR_EXPR)
1731 return (void *)-1;
1732
1733 /* Extract a pointer base and an offset for the source. */
1734 rhs = gimple_call_arg (def_stmt, 1);
1735 rhs_offset = 0;
1736 if (TREE_CODE (rhs) == SSA_NAME)
1737 rhs = SSA_VAL (rhs);
1738 if (TREE_CODE (rhs) == ADDR_EXPR)
1739 {
1740 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1741 &rhs_offset);
1742 if (!tem)
1743 return (void *)-1;
1744 if (TREE_CODE (tem) == MEM_REF
1745 && host_integerp (TREE_OPERAND (tem, 1), 1))
1746 {
1747 rhs = TREE_OPERAND (tem, 0);
1748 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1749 }
1750 else if (DECL_P (tem))
1751 rhs = build_fold_addr_expr (tem);
1752 else
1753 return (void *)-1;
1754 }
1755 if (TREE_CODE (rhs) != SSA_NAME
1756 && TREE_CODE (rhs) != ADDR_EXPR)
1757 return (void *)-1;
1758
1759 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1760
1761 /* The bases of the destination and the references have to agree. */
1762 if ((TREE_CODE (base) != MEM_REF
1763 && !DECL_P (base))
1764 || (TREE_CODE (base) == MEM_REF
1765 && (TREE_OPERAND (base, 0) != lhs
1766 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1767 || (DECL_P (base)
1768 && (TREE_CODE (lhs) != ADDR_EXPR
1769 || TREE_OPERAND (lhs, 0) != base)))
1770 return (void *)-1;
1771
1772 /* And the access has to be contained within the memcpy destination. */
1773 at = offset / BITS_PER_UNIT;
1774 if (TREE_CODE (base) == MEM_REF)
1775 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1776 if (lhs_offset > at
1777 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1778 return (void *)-1;
1779
1780 /* Make room for 2 operands in the new reference. */
1781 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1782 {
1783 VEC (vn_reference_op_s, heap) *old = vr->operands;
1784 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1785 if (old == shared_lookup_references
1786 && vr->operands != old)
1787 shared_lookup_references = NULL;
1788 }
1789 else
1790 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1791
1792 /* The looked-through reference is a simple MEM_REF. */
1793 memset (&op, 0, sizeof (op));
1794 op.type = vr->type;
1795 op.opcode = MEM_REF;
1796 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1797 op.off = at - lhs_offset + rhs_offset;
1798 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1799 op.type = TREE_TYPE (rhs);
1800 op.opcode = TREE_CODE (rhs);
1801 op.op0 = rhs;
1802 op.off = -1;
1803 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1804 vr->hashcode = vn_reference_compute_hash (vr);
1805
1806 /* Adjust *ref from the new operands. */
1807 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1808 return (void *)-1;
1809 /* This can happen with bitfields. */
1810 if (ref->size != r.size)
1811 return (void *)-1;
1812 *ref = r;
1813
1814 /* Do not update last seen VUSE after translating. */
1815 last_vuse_ptr = NULL;
1816
1817 /* Keep looking for the adjusted *REF / VR pair. */
1818 return NULL;
1819 }
1820
1821 /* Bail out and stop walking. */
1822 return (void *)-1;
1823 }
1824
1825 /* Lookup a reference operation by it's parts, in the current hash table.
1826 Returns the resulting value number if it exists in the hash table,
1827 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1828 vn_reference_t stored in the hashtable if something is found. */
1829
1830 tree
1831 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1832 VEC (vn_reference_op_s, heap) *operands,
1833 vn_reference_t *vnresult, vn_lookup_kind kind)
1834 {
1835 struct vn_reference_s vr1;
1836 vn_reference_t tmp;
1837 tree cst;
1838
1839 if (!vnresult)
1840 vnresult = &tmp;
1841 *vnresult = NULL;
1842
1843 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1844 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1845 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1846 VEC_length (vn_reference_op_s, operands));
1847 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1848 VEC_address (vn_reference_op_s, operands),
1849 sizeof (vn_reference_op_s)
1850 * VEC_length (vn_reference_op_s, operands));
1851 vr1.operands = operands = shared_lookup_references
1852 = valueize_refs (shared_lookup_references);
1853 vr1.type = type;
1854 vr1.set = set;
1855 vr1.hashcode = vn_reference_compute_hash (&vr1);
1856 if ((cst = fully_constant_vn_reference_p (&vr1)))
1857 return cst;
1858
1859 vn_reference_lookup_1 (&vr1, vnresult);
1860 if (!*vnresult
1861 && kind != VN_NOWALK
1862 && vr1.vuse)
1863 {
1864 ao_ref r;
1865 vn_walk_kind = kind;
1866 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1867 *vnresult =
1868 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1869 vn_reference_lookup_2,
1870 vn_reference_lookup_3, &vr1);
1871 if (vr1.operands != operands)
1872 VEC_free (vn_reference_op_s, heap, vr1.operands);
1873 }
1874
1875 if (*vnresult)
1876 return (*vnresult)->result;
1877
1878 return NULL_TREE;
1879 }
1880
1881 /* Lookup OP in the current hash table, and return the resulting value
1882 number if it exists in the hash table. Return NULL_TREE if it does
1883 not exist in the hash table or if the result field of the structure
1884 was NULL.. VNRESULT will be filled in with the vn_reference_t
1885 stored in the hashtable if one exists. */
1886
1887 tree
1888 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1889 vn_reference_t *vnresult)
1890 {
1891 VEC (vn_reference_op_s, heap) *operands;
1892 struct vn_reference_s vr1;
1893 tree cst;
1894 bool valuezied_anything;
1895
1896 if (vnresult)
1897 *vnresult = NULL;
1898
1899 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1900 vr1.operands = operands
1901 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1902 vr1.type = TREE_TYPE (op);
1903 vr1.set = get_alias_set (op);
1904 vr1.hashcode = vn_reference_compute_hash (&vr1);
1905 if ((cst = fully_constant_vn_reference_p (&vr1)))
1906 return cst;
1907
1908 if (kind != VN_NOWALK
1909 && vr1.vuse)
1910 {
1911 vn_reference_t wvnresult;
1912 ao_ref r;
1913 /* Make sure to use a valueized reference if we valueized anything.
1914 Otherwise preserve the full reference for advanced TBAA. */
1915 if (!valuezied_anything
1916 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1917 vr1.operands))
1918 ao_ref_init (&r, op);
1919 vn_walk_kind = kind;
1920 wvnresult =
1921 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1922 vn_reference_lookup_2,
1923 vn_reference_lookup_3, &vr1);
1924 if (vr1.operands != operands)
1925 VEC_free (vn_reference_op_s, heap, vr1.operands);
1926 if (wvnresult)
1927 {
1928 if (vnresult)
1929 *vnresult = wvnresult;
1930 return wvnresult->result;
1931 }
1932
1933 return NULL_TREE;
1934 }
1935
1936 return vn_reference_lookup_1 (&vr1, vnresult);
1937 }
1938
1939
1940 /* Insert OP into the current hash table with a value number of
1941 RESULT, and return the resulting reference structure we created. */
1942
1943 vn_reference_t
1944 vn_reference_insert (tree op, tree result, tree vuse)
1945 {
1946 void **slot;
1947 vn_reference_t vr1;
1948
1949 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1950 if (TREE_CODE (result) == SSA_NAME)
1951 vr1->value_id = VN_INFO (result)->value_id;
1952 else
1953 vr1->value_id = get_or_alloc_constant_value_id (result);
1954 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1955 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1956 vr1->type = TREE_TYPE (op);
1957 vr1->set = get_alias_set (op);
1958 vr1->hashcode = vn_reference_compute_hash (vr1);
1959 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1960
1961 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1962 INSERT);
1963
1964 /* Because we lookup stores using vuses, and value number failures
1965 using the vdefs (see visit_reference_op_store for how and why),
1966 it's possible that on failure we may try to insert an already
1967 inserted store. This is not wrong, there is no ssa name for a
1968 store that we could use as a differentiator anyway. Thus, unlike
1969 the other lookup functions, you cannot gcc_assert (!*slot)
1970 here. */
1971
1972 /* But free the old slot in case of a collision. */
1973 if (*slot)
1974 free_reference (*slot);
1975
1976 *slot = vr1;
1977 return vr1;
1978 }
1979
1980 /* Insert a reference by it's pieces into the current hash table with
1981 a value number of RESULT. Return the resulting reference
1982 structure we created. */
1983
1984 vn_reference_t
1985 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1986 VEC (vn_reference_op_s, heap) *operands,
1987 tree result, unsigned int value_id)
1988
1989 {
1990 void **slot;
1991 vn_reference_t vr1;
1992
1993 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1994 vr1->value_id = value_id;
1995 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1996 vr1->operands = valueize_refs (operands);
1997 vr1->type = type;
1998 vr1->set = set;
1999 vr1->hashcode = vn_reference_compute_hash (vr1);
2000 if (result && TREE_CODE (result) == SSA_NAME)
2001 result = SSA_VAL (result);
2002 vr1->result = result;
2003
2004 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2005 INSERT);
2006
2007 /* At this point we should have all the things inserted that we have
2008 seen before, and we should never try inserting something that
2009 already exists. */
2010 gcc_assert (!*slot);
2011 if (*slot)
2012 free_reference (*slot);
2013
2014 *slot = vr1;
2015 return vr1;
2016 }
2017
2018 /* Compute and return the hash value for nary operation VBO1. */
2019
2020 hashval_t
2021 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2022 {
2023 hashval_t hash;
2024 unsigned i;
2025
2026 for (i = 0; i < vno1->length; ++i)
2027 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2028 vno1->op[i] = SSA_VAL (vno1->op[i]);
2029
2030 if (vno1->length == 2
2031 && commutative_tree_code (vno1->opcode)
2032 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2033 {
2034 tree temp = vno1->op[0];
2035 vno1->op[0] = vno1->op[1];
2036 vno1->op[1] = temp;
2037 }
2038
2039 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2040 for (i = 0; i < vno1->length; ++i)
2041 hash = iterative_hash_expr (vno1->op[i], hash);
2042
2043 return hash;
2044 }
2045
2046 /* Return the computed hashcode for nary operation P1. */
2047
2048 static hashval_t
2049 vn_nary_op_hash (const void *p1)
2050 {
2051 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2052 return vno1->hashcode;
2053 }
2054
2055 /* Compare nary operations P1 and P2 and return true if they are
2056 equivalent. */
2057
2058 int
2059 vn_nary_op_eq (const void *p1, const void *p2)
2060 {
2061 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2062 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2063 unsigned i;
2064
2065 if (vno1->hashcode != vno2->hashcode)
2066 return false;
2067
2068 if (vno1->length != vno2->length)
2069 return false;
2070
2071 if (vno1->opcode != vno2->opcode
2072 || !types_compatible_p (vno1->type, vno2->type))
2073 return false;
2074
2075 for (i = 0; i < vno1->length; ++i)
2076 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2077 return false;
2078
2079 return true;
2080 }
2081
2082 /* Initialize VNO from the pieces provided. */
2083
2084 static void
2085 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2086 enum tree_code code, tree type, tree *ops)
2087 {
2088 vno->opcode = code;
2089 vno->length = length;
2090 vno->type = type;
2091 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2092 }
2093
2094 /* Initialize VNO from OP. */
2095
2096 static void
2097 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2098 {
2099 unsigned i;
2100
2101 vno->opcode = TREE_CODE (op);
2102 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2103 vno->type = TREE_TYPE (op);
2104 for (i = 0; i < vno->length; ++i)
2105 vno->op[i] = TREE_OPERAND (op, i);
2106 }
2107
2108 /* Return the number of operands for a vn_nary ops structure from STMT. */
2109
2110 static unsigned int
2111 vn_nary_length_from_stmt (gimple stmt)
2112 {
2113 switch (gimple_assign_rhs_code (stmt))
2114 {
2115 case REALPART_EXPR:
2116 case IMAGPART_EXPR:
2117 case VIEW_CONVERT_EXPR:
2118 return 1;
2119
2120 case CONSTRUCTOR:
2121 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2122
2123 default:
2124 return gimple_num_ops (stmt) - 1;
2125 }
2126 }
2127
2128 /* Initialize VNO from STMT. */
2129
2130 static void
2131 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2132 {
2133 unsigned i;
2134
2135 vno->opcode = gimple_assign_rhs_code (stmt);
2136 vno->type = gimple_expr_type (stmt);
2137 switch (vno->opcode)
2138 {
2139 case REALPART_EXPR:
2140 case IMAGPART_EXPR:
2141 case VIEW_CONVERT_EXPR:
2142 vno->length = 1;
2143 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2144 break;
2145
2146 case CONSTRUCTOR:
2147 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2148 for (i = 0; i < vno->length; ++i)
2149 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2150 break;
2151
2152 default:
2153 vno->length = gimple_num_ops (stmt) - 1;
2154 for (i = 0; i < vno->length; ++i)
2155 vno->op[i] = gimple_op (stmt, i + 1);
2156 }
2157 }
2158
2159 /* Compute the hashcode for VNO and look for it in the hash table;
2160 return the resulting value number if it exists in the hash table.
2161 Return NULL_TREE if it does not exist in the hash table or if the
2162 result field of the operation is NULL. VNRESULT will contain the
2163 vn_nary_op_t from the hashtable if it exists. */
2164
2165 static tree
2166 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2167 {
2168 void **slot;
2169
2170 if (vnresult)
2171 *vnresult = NULL;
2172
2173 vno->hashcode = vn_nary_op_compute_hash (vno);
2174 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2175 NO_INSERT);
2176 if (!slot && current_info == optimistic_info)
2177 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2178 NO_INSERT);
2179 if (!slot)
2180 return NULL_TREE;
2181 if (vnresult)
2182 *vnresult = (vn_nary_op_t)*slot;
2183 return ((vn_nary_op_t)*slot)->result;
2184 }
2185
2186 /* Lookup a n-ary operation by its pieces and return the resulting value
2187 number if it exists in the hash table. Return NULL_TREE if it does
2188 not exist in the hash table or if the result field of the operation
2189 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2190 if it exists. */
2191
2192 tree
2193 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2194 tree type, tree *ops, vn_nary_op_t *vnresult)
2195 {
2196 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2197 sizeof_vn_nary_op (length));
2198 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2199 return vn_nary_op_lookup_1 (vno1, vnresult);
2200 }
2201
2202 /* Lookup OP in the current hash table, and return the resulting value
2203 number if it exists in the hash table. Return NULL_TREE if it does
2204 not exist in the hash table or if the result field of the operation
2205 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2206 if it exists. */
2207
2208 tree
2209 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2210 {
2211 vn_nary_op_t vno1
2212 = XALLOCAVAR (struct vn_nary_op_s,
2213 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2214 init_vn_nary_op_from_op (vno1, op);
2215 return vn_nary_op_lookup_1 (vno1, vnresult);
2216 }
2217
2218 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2219 value number if it exists in the hash table. Return NULL_TREE if
2220 it does not exist in the hash table. VNRESULT will contain the
2221 vn_nary_op_t from the hashtable if it exists. */
2222
2223 tree
2224 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2225 {
2226 vn_nary_op_t vno1
2227 = XALLOCAVAR (struct vn_nary_op_s,
2228 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2229 init_vn_nary_op_from_stmt (vno1, stmt);
2230 return vn_nary_op_lookup_1 (vno1, vnresult);
2231 }
2232
2233 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2234
2235 static vn_nary_op_t
2236 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2237 {
2238 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2239 }
2240
2241 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2242 obstack. */
2243
2244 static vn_nary_op_t
2245 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2246 {
2247 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2248 &current_info->nary_obstack);
2249
2250 vno1->value_id = value_id;
2251 vno1->length = length;
2252 vno1->result = result;
2253
2254 return vno1;
2255 }
2256
2257 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2258 VNO->HASHCODE first. */
2259
2260 static vn_nary_op_t
2261 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2262 {
2263 void **slot;
2264
2265 if (compute_hash)
2266 vno->hashcode = vn_nary_op_compute_hash (vno);
2267
2268 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2269 gcc_assert (!*slot);
2270
2271 *slot = vno;
2272 return vno;
2273 }
2274
2275 /* Insert a n-ary operation into the current hash table using it's
2276 pieces. Return the vn_nary_op_t structure we created and put in
2277 the hashtable. */
2278
2279 vn_nary_op_t
2280 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2281 tree type, tree *ops,
2282 tree result, unsigned int value_id)
2283 {
2284 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2285 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2286 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2287 }
2288
2289 /* Insert OP into the current hash table with a value number of
2290 RESULT. Return the vn_nary_op_t structure we created and put in
2291 the hashtable. */
2292
2293 vn_nary_op_t
2294 vn_nary_op_insert (tree op, tree result)
2295 {
2296 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2297 vn_nary_op_t vno1;
2298
2299 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2300 init_vn_nary_op_from_op (vno1, op);
2301 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2302 }
2303
2304 /* Insert the rhs of STMT into the current hash table with a value number of
2305 RESULT. */
2306
2307 vn_nary_op_t
2308 vn_nary_op_insert_stmt (gimple stmt, tree result)
2309 {
2310 vn_nary_op_t vno1
2311 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2312 result, VN_INFO (result)->value_id);
2313 init_vn_nary_op_from_stmt (vno1, stmt);
2314 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2315 }
2316
2317 /* Compute a hashcode for PHI operation VP1 and return it. */
2318
2319 static inline hashval_t
2320 vn_phi_compute_hash (vn_phi_t vp1)
2321 {
2322 hashval_t result;
2323 int i;
2324 tree phi1op;
2325 tree type;
2326
2327 result = vp1->block->index;
2328
2329 /* If all PHI arguments are constants we need to distinguish
2330 the PHI node via its type. */
2331 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2332 result += (INTEGRAL_TYPE_P (type)
2333 + (INTEGRAL_TYPE_P (type)
2334 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2335
2336 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2337 {
2338 if (phi1op == VN_TOP)
2339 continue;
2340 result = iterative_hash_expr (phi1op, result);
2341 }
2342
2343 return result;
2344 }
2345
2346 /* Return the computed hashcode for phi operation P1. */
2347
2348 static hashval_t
2349 vn_phi_hash (const void *p1)
2350 {
2351 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2352 return vp1->hashcode;
2353 }
2354
2355 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2356
2357 static int
2358 vn_phi_eq (const void *p1, const void *p2)
2359 {
2360 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2361 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2362
2363 if (vp1->hashcode != vp2->hashcode)
2364 return false;
2365
2366 if (vp1->block == vp2->block)
2367 {
2368 int i;
2369 tree phi1op;
2370
2371 /* If the PHI nodes do not have compatible types
2372 they are not the same. */
2373 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2374 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2375 return false;
2376
2377 /* Any phi in the same block will have it's arguments in the
2378 same edge order, because of how we store phi nodes. */
2379 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2380 {
2381 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2382 if (phi1op == VN_TOP || phi2op == VN_TOP)
2383 continue;
2384 if (!expressions_equal_p (phi1op, phi2op))
2385 return false;
2386 }
2387 return true;
2388 }
2389 return false;
2390 }
2391
2392 static VEC(tree, heap) *shared_lookup_phiargs;
2393
2394 /* Lookup PHI in the current hash table, and return the resulting
2395 value number if it exists in the hash table. Return NULL_TREE if
2396 it does not exist in the hash table. */
2397
2398 static tree
2399 vn_phi_lookup (gimple phi)
2400 {
2401 void **slot;
2402 struct vn_phi_s vp1;
2403 unsigned i;
2404
2405 VEC_truncate (tree, shared_lookup_phiargs, 0);
2406
2407 /* Canonicalize the SSA_NAME's to their value number. */
2408 for (i = 0; i < gimple_phi_num_args (phi); i++)
2409 {
2410 tree def = PHI_ARG_DEF (phi, i);
2411 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2412 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2413 }
2414 vp1.phiargs = shared_lookup_phiargs;
2415 vp1.block = gimple_bb (phi);
2416 vp1.hashcode = vn_phi_compute_hash (&vp1);
2417 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2418 NO_INSERT);
2419 if (!slot && current_info == optimistic_info)
2420 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2421 NO_INSERT);
2422 if (!slot)
2423 return NULL_TREE;
2424 return ((vn_phi_t)*slot)->result;
2425 }
2426
2427 /* Insert PHI into the current hash table with a value number of
2428 RESULT. */
2429
2430 static vn_phi_t
2431 vn_phi_insert (gimple phi, tree result)
2432 {
2433 void **slot;
2434 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2435 unsigned i;
2436 VEC (tree, heap) *args = NULL;
2437
2438 /* Canonicalize the SSA_NAME's to their value number. */
2439 for (i = 0; i < gimple_phi_num_args (phi); i++)
2440 {
2441 tree def = PHI_ARG_DEF (phi, i);
2442 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2443 VEC_safe_push (tree, heap, args, def);
2444 }
2445 vp1->value_id = VN_INFO (result)->value_id;
2446 vp1->phiargs = args;
2447 vp1->block = gimple_bb (phi);
2448 vp1->result = result;
2449 vp1->hashcode = vn_phi_compute_hash (vp1);
2450
2451 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2452 INSERT);
2453
2454 /* Because we iterate over phi operations more than once, it's
2455 possible the slot might already exist here, hence no assert.*/
2456 *slot = vp1;
2457 return vp1;
2458 }
2459
2460
2461 /* Print set of components in strongly connected component SCC to OUT. */
2462
2463 static void
2464 print_scc (FILE *out, VEC (tree, heap) *scc)
2465 {
2466 tree var;
2467 unsigned int i;
2468
2469 fprintf (out, "SCC consists of:");
2470 FOR_EACH_VEC_ELT (tree, scc, i, var)
2471 {
2472 fprintf (out, " ");
2473 print_generic_expr (out, var, 0);
2474 }
2475 fprintf (out, "\n");
2476 }
2477
2478 /* Set the value number of FROM to TO, return true if it has changed
2479 as a result. */
2480
2481 static inline bool
2482 set_ssa_val_to (tree from, tree to)
2483 {
2484 tree currval = SSA_VAL (from);
2485
2486 if (from != to)
2487 {
2488 if (currval == from)
2489 {
2490 if (dump_file && (dump_flags & TDF_DETAILS))
2491 {
2492 fprintf (dump_file, "Not changing value number of ");
2493 print_generic_expr (dump_file, from, 0);
2494 fprintf (dump_file, " from VARYING to ");
2495 print_generic_expr (dump_file, to, 0);
2496 fprintf (dump_file, "\n");
2497 }
2498 return false;
2499 }
2500 else if (TREE_CODE (to) == SSA_NAME
2501 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2502 to = from;
2503 }
2504
2505 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2506 and invariants. So assert that here. */
2507 gcc_assert (to != NULL_TREE
2508 && (to == VN_TOP
2509 || TREE_CODE (to) == SSA_NAME
2510 || is_gimple_min_invariant (to)));
2511
2512 if (dump_file && (dump_flags & TDF_DETAILS))
2513 {
2514 fprintf (dump_file, "Setting value number of ");
2515 print_generic_expr (dump_file, from, 0);
2516 fprintf (dump_file, " to ");
2517 print_generic_expr (dump_file, to, 0);
2518 }
2519
2520 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2521 {
2522 VN_INFO (from)->valnum = to;
2523 if (dump_file && (dump_flags & TDF_DETAILS))
2524 fprintf (dump_file, " (changed)\n");
2525 return true;
2526 }
2527 if (dump_file && (dump_flags & TDF_DETAILS))
2528 fprintf (dump_file, "\n");
2529 return false;
2530 }
2531
2532 /* Mark as processed all the definitions in the defining stmt of USE, or
2533 the USE itself. */
2534
2535 static void
2536 mark_use_processed (tree use)
2537 {
2538 ssa_op_iter iter;
2539 def_operand_p defp;
2540 gimple stmt = SSA_NAME_DEF_STMT (use);
2541
2542 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2543 {
2544 VN_INFO (use)->use_processed = true;
2545 return;
2546 }
2547
2548 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2549 {
2550 tree def = DEF_FROM_PTR (defp);
2551
2552 VN_INFO (def)->use_processed = true;
2553 }
2554 }
2555
2556 /* Set all definitions in STMT to value number to themselves.
2557 Return true if a value number changed. */
2558
2559 static bool
2560 defs_to_varying (gimple stmt)
2561 {
2562 bool changed = false;
2563 ssa_op_iter iter;
2564 def_operand_p defp;
2565
2566 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2567 {
2568 tree def = DEF_FROM_PTR (defp);
2569 changed |= set_ssa_val_to (def, def);
2570 }
2571 return changed;
2572 }
2573
2574 static bool expr_has_constants (tree expr);
2575 static tree valueize_expr (tree expr);
2576
2577 /* Visit a copy between LHS and RHS, return true if the value number
2578 changed. */
2579
2580 static bool
2581 visit_copy (tree lhs, tree rhs)
2582 {
2583 /* Follow chains of copies to their destination. */
2584 while (TREE_CODE (rhs) == SSA_NAME
2585 && SSA_VAL (rhs) != rhs)
2586 rhs = SSA_VAL (rhs);
2587
2588 /* The copy may have a more interesting constant filled expression
2589 (we don't, since we know our RHS is just an SSA name). */
2590 if (TREE_CODE (rhs) == SSA_NAME)
2591 {
2592 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2593 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2594 }
2595
2596 return set_ssa_val_to (lhs, rhs);
2597 }
2598
2599 /* Visit a nary operator RHS, value number it, and return true if the
2600 value number of LHS has changed as a result. */
2601
2602 static bool
2603 visit_nary_op (tree lhs, gimple stmt)
2604 {
2605 bool changed = false;
2606 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2607
2608 if (result)
2609 changed = set_ssa_val_to (lhs, result);
2610 else
2611 {
2612 changed = set_ssa_val_to (lhs, lhs);
2613 vn_nary_op_insert_stmt (stmt, lhs);
2614 }
2615
2616 return changed;
2617 }
2618
2619 /* Visit a call STMT storing into LHS. Return true if the value number
2620 of the LHS has changed as a result. */
2621
2622 static bool
2623 visit_reference_op_call (tree lhs, gimple stmt)
2624 {
2625 bool changed = false;
2626 struct vn_reference_s vr1;
2627 vn_reference_t vnresult = NULL;
2628 tree vuse = gimple_vuse (stmt);
2629 tree vdef = gimple_vdef (stmt);
2630
2631 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2632 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2633 vr1.type = gimple_expr_type (stmt);
2634 vr1.set = 0;
2635 vr1.hashcode = vn_reference_compute_hash (&vr1);
2636 vn_reference_lookup_1 (&vr1, &vnresult);
2637
2638 if (vnresult)
2639 {
2640 if (vnresult->result_vdef)
2641 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2642
2643 if (!vnresult->result && lhs)
2644 vnresult->result = lhs;
2645
2646 if (vnresult->result && lhs)
2647 {
2648 changed |= set_ssa_val_to (lhs, vnresult->result);
2649
2650 if (VN_INFO (vnresult->result)->has_constants)
2651 VN_INFO (lhs)->has_constants = true;
2652 }
2653 }
2654 else
2655 {
2656 void **slot;
2657 vn_reference_t vr2;
2658 if (vdef)
2659 changed |= set_ssa_val_to (vdef, vdef);
2660 if (lhs)
2661 changed |= set_ssa_val_to (lhs, lhs);
2662 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2663 vr2->vuse = vr1.vuse;
2664 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2665 vr2->type = vr1.type;
2666 vr2->set = vr1.set;
2667 vr2->hashcode = vr1.hashcode;
2668 vr2->result = lhs;
2669 vr2->result_vdef = vdef;
2670 slot = htab_find_slot_with_hash (current_info->references,
2671 vr2, vr2->hashcode, INSERT);
2672 if (*slot)
2673 free_reference (*slot);
2674 *slot = vr2;
2675 }
2676
2677 return changed;
2678 }
2679
2680 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2681 and return true if the value number of the LHS has changed as a result. */
2682
2683 static bool
2684 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2685 {
2686 bool changed = false;
2687 tree last_vuse;
2688 tree result;
2689
2690 last_vuse = gimple_vuse (stmt);
2691 last_vuse_ptr = &last_vuse;
2692 result = vn_reference_lookup (op, gimple_vuse (stmt),
2693 default_vn_walk_kind, NULL);
2694 last_vuse_ptr = NULL;
2695
2696 /* If we have a VCE, try looking up its operand as it might be stored in
2697 a different type. */
2698 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2699 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2700 default_vn_walk_kind, NULL);
2701
2702 /* We handle type-punning through unions by value-numbering based
2703 on offset and size of the access. Be prepared to handle a
2704 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2705 if (result
2706 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2707 {
2708 /* We will be setting the value number of lhs to the value number
2709 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2710 So first simplify and lookup this expression to see if it
2711 is already available. */
2712 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2713 if ((CONVERT_EXPR_P (val)
2714 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2715 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2716 {
2717 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2718 if ((CONVERT_EXPR_P (tem)
2719 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2720 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2721 TREE_TYPE (val), tem)))
2722 val = tem;
2723 }
2724 result = val;
2725 if (!is_gimple_min_invariant (val)
2726 && TREE_CODE (val) != SSA_NAME)
2727 result = vn_nary_op_lookup (val, NULL);
2728 /* If the expression is not yet available, value-number lhs to
2729 a new SSA_NAME we create. */
2730 if (!result)
2731 {
2732 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2733 /* Initialize value-number information properly. */
2734 VN_INFO_GET (result)->valnum = result;
2735 VN_INFO (result)->value_id = get_next_value_id ();
2736 VN_INFO (result)->expr = val;
2737 VN_INFO (result)->has_constants = expr_has_constants (val);
2738 VN_INFO (result)->needs_insertion = true;
2739 /* As all "inserted" statements are singleton SCCs, insert
2740 to the valid table. This is strictly needed to
2741 avoid re-generating new value SSA_NAMEs for the same
2742 expression during SCC iteration over and over (the
2743 optimistic table gets cleared after each iteration).
2744 We do not need to insert into the optimistic table, as
2745 lookups there will fall back to the valid table. */
2746 if (current_info == optimistic_info)
2747 {
2748 current_info = valid_info;
2749 vn_nary_op_insert (val, result);
2750 current_info = optimistic_info;
2751 }
2752 else
2753 vn_nary_op_insert (val, result);
2754 if (dump_file && (dump_flags & TDF_DETAILS))
2755 {
2756 fprintf (dump_file, "Inserting name ");
2757 print_generic_expr (dump_file, result, 0);
2758 fprintf (dump_file, " for expression ");
2759 print_generic_expr (dump_file, val, 0);
2760 fprintf (dump_file, "\n");
2761 }
2762 }
2763 }
2764
2765 if (result)
2766 {
2767 changed = set_ssa_val_to (lhs, result);
2768 if (TREE_CODE (result) == SSA_NAME
2769 && VN_INFO (result)->has_constants)
2770 {
2771 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2772 VN_INFO (lhs)->has_constants = true;
2773 }
2774 }
2775 else
2776 {
2777 changed = set_ssa_val_to (lhs, lhs);
2778 vn_reference_insert (op, lhs, last_vuse);
2779 }
2780
2781 return changed;
2782 }
2783
2784
2785 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2786 and return true if the value number of the LHS has changed as a result. */
2787
2788 static bool
2789 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2790 {
2791 bool changed = false;
2792 tree result;
2793 bool resultsame = false;
2794
2795 /* First we want to lookup using the *vuses* from the store and see
2796 if there the last store to this location with the same address
2797 had the same value.
2798
2799 The vuses represent the memory state before the store. If the
2800 memory state, address, and value of the store is the same as the
2801 last store to this location, then this store will produce the
2802 same memory state as that store.
2803
2804 In this case the vdef versions for this store are value numbered to those
2805 vuse versions, since they represent the same memory state after
2806 this store.
2807
2808 Otherwise, the vdefs for the store are used when inserting into
2809 the table, since the store generates a new memory state. */
2810
2811 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2812
2813 if (result)
2814 {
2815 if (TREE_CODE (result) == SSA_NAME)
2816 result = SSA_VAL (result);
2817 if (TREE_CODE (op) == SSA_NAME)
2818 op = SSA_VAL (op);
2819 resultsame = expressions_equal_p (result, op);
2820 }
2821
2822 if (!result || !resultsame)
2823 {
2824 tree vdef;
2825
2826 if (dump_file && (dump_flags & TDF_DETAILS))
2827 {
2828 fprintf (dump_file, "No store match\n");
2829 fprintf (dump_file, "Value numbering store ");
2830 print_generic_expr (dump_file, lhs, 0);
2831 fprintf (dump_file, " to ");
2832 print_generic_expr (dump_file, op, 0);
2833 fprintf (dump_file, "\n");
2834 }
2835 /* Have to set value numbers before insert, since insert is
2836 going to valueize the references in-place. */
2837 if ((vdef = gimple_vdef (stmt)))
2838 {
2839 changed |= set_ssa_val_to (vdef, vdef);
2840 }
2841
2842 /* Do not insert structure copies into the tables. */
2843 if (is_gimple_min_invariant (op)
2844 || is_gimple_reg (op))
2845 vn_reference_insert (lhs, op, vdef);
2846 }
2847 else
2848 {
2849 /* We had a match, so value number the vdef to have the value
2850 number of the vuse it came from. */
2851 tree def, use;
2852
2853 if (dump_file && (dump_flags & TDF_DETAILS))
2854 fprintf (dump_file, "Store matched earlier value,"
2855 "value numbering store vdefs to matching vuses.\n");
2856
2857 def = gimple_vdef (stmt);
2858 use = gimple_vuse (stmt);
2859
2860 changed |= set_ssa_val_to (def, SSA_VAL (use));
2861 }
2862
2863 return changed;
2864 }
2865
2866 /* Visit and value number PHI, return true if the value number
2867 changed. */
2868
2869 static bool
2870 visit_phi (gimple phi)
2871 {
2872 bool changed = false;
2873 tree result;
2874 tree sameval = VN_TOP;
2875 bool allsame = true;
2876 unsigned i;
2877
2878 /* TODO: We could check for this in init_sccvn, and replace this
2879 with a gcc_assert. */
2880 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2881 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2882
2883 /* See if all non-TOP arguments have the same value. TOP is
2884 equivalent to everything, so we can ignore it. */
2885 for (i = 0; i < gimple_phi_num_args (phi); i++)
2886 {
2887 tree def = PHI_ARG_DEF (phi, i);
2888
2889 if (TREE_CODE (def) == SSA_NAME)
2890 def = SSA_VAL (def);
2891 if (def == VN_TOP)
2892 continue;
2893 if (sameval == VN_TOP)
2894 {
2895 sameval = def;
2896 }
2897 else
2898 {
2899 if (!expressions_equal_p (def, sameval))
2900 {
2901 allsame = false;
2902 break;
2903 }
2904 }
2905 }
2906
2907 /* If all value numbered to the same value, the phi node has that
2908 value. */
2909 if (allsame)
2910 {
2911 if (is_gimple_min_invariant (sameval))
2912 {
2913 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2914 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2915 }
2916 else
2917 {
2918 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2919 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2920 }
2921
2922 if (TREE_CODE (sameval) == SSA_NAME)
2923 return visit_copy (PHI_RESULT (phi), sameval);
2924
2925 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2926 }
2927
2928 /* Otherwise, see if it is equivalent to a phi node in this block. */
2929 result = vn_phi_lookup (phi);
2930 if (result)
2931 {
2932 if (TREE_CODE (result) == SSA_NAME)
2933 changed = visit_copy (PHI_RESULT (phi), result);
2934 else
2935 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2936 }
2937 else
2938 {
2939 vn_phi_insert (phi, PHI_RESULT (phi));
2940 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2941 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2942 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2943 }
2944
2945 return changed;
2946 }
2947
2948 /* Return true if EXPR contains constants. */
2949
2950 static bool
2951 expr_has_constants (tree expr)
2952 {
2953 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2954 {
2955 case tcc_unary:
2956 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2957
2958 case tcc_binary:
2959 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2960 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2961 /* Constants inside reference ops are rarely interesting, but
2962 it can take a lot of looking to find them. */
2963 case tcc_reference:
2964 case tcc_declaration:
2965 return false;
2966 default:
2967 return is_gimple_min_invariant (expr);
2968 }
2969 return false;
2970 }
2971
2972 /* Return true if STMT contains constants. */
2973
2974 static bool
2975 stmt_has_constants (gimple stmt)
2976 {
2977 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2978 return false;
2979
2980 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2981 {
2982 case GIMPLE_UNARY_RHS:
2983 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2984
2985 case GIMPLE_BINARY_RHS:
2986 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2987 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2988 case GIMPLE_TERNARY_RHS:
2989 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2990 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2991 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2992 case GIMPLE_SINGLE_RHS:
2993 /* Constants inside reference ops are rarely interesting, but
2994 it can take a lot of looking to find them. */
2995 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2996 default:
2997 gcc_unreachable ();
2998 }
2999 return false;
3000 }
3001
3002 /* Replace SSA_NAMES in expr with their value numbers, and return the
3003 result.
3004 This is performed in place. */
3005
3006 static tree
3007 valueize_expr (tree expr)
3008 {
3009 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3010 {
3011 case tcc_binary:
3012 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3013 /* Fallthru. */
3014 case tcc_unary:
3015 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3016 break;
3017 default:;
3018 }
3019 return expr;
3020 }
3021
3022 /* Simplify the binary expression RHS, and return the result if
3023 simplified. */
3024
3025 static tree
3026 simplify_binary_expression (gimple stmt)
3027 {
3028 tree result = NULL_TREE;
3029 tree op0 = gimple_assign_rhs1 (stmt);
3030 tree op1 = gimple_assign_rhs2 (stmt);
3031 enum tree_code code = gimple_assign_rhs_code (stmt);
3032
3033 /* This will not catch every single case we could combine, but will
3034 catch those with constants. The goal here is to simultaneously
3035 combine constants between expressions, but avoid infinite
3036 expansion of expressions during simplification. */
3037 if (TREE_CODE (op0) == SSA_NAME)
3038 {
3039 if (VN_INFO (op0)->has_constants
3040 || TREE_CODE_CLASS (code) == tcc_comparison
3041 || code == COMPLEX_EXPR)
3042 op0 = valueize_expr (vn_get_expr_for (op0));
3043 else
3044 op0 = vn_valueize (op0);
3045 }
3046
3047 if (TREE_CODE (op1) == SSA_NAME)
3048 {
3049 if (VN_INFO (op1)->has_constants
3050 || code == COMPLEX_EXPR)
3051 op1 = valueize_expr (vn_get_expr_for (op1));
3052 else
3053 op1 = vn_valueize (op1);
3054 }
3055
3056 /* Pointer plus constant can be represented as invariant address.
3057 Do so to allow further propatation, see also tree forwprop. */
3058 if (code == POINTER_PLUS_EXPR
3059 && host_integerp (op1, 1)
3060 && TREE_CODE (op0) == ADDR_EXPR
3061 && is_gimple_min_invariant (op0))
3062 return build_invariant_address (TREE_TYPE (op0),
3063 TREE_OPERAND (op0, 0),
3064 TREE_INT_CST_LOW (op1));
3065
3066 /* Avoid folding if nothing changed. */
3067 if (op0 == gimple_assign_rhs1 (stmt)
3068 && op1 == gimple_assign_rhs2 (stmt))
3069 return NULL_TREE;
3070
3071 fold_defer_overflow_warnings ();
3072
3073 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3074 if (result)
3075 STRIP_USELESS_TYPE_CONVERSION (result);
3076
3077 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3078 stmt, 0);
3079
3080 /* Make sure result is not a complex expression consisting
3081 of operators of operators (IE (a + b) + (a + c))
3082 Otherwise, we will end up with unbounded expressions if
3083 fold does anything at all. */
3084 if (result && valid_gimple_rhs_p (result))
3085 return result;
3086
3087 return NULL_TREE;
3088 }
3089
3090 /* Simplify the unary expression RHS, and return the result if
3091 simplified. */
3092
3093 static tree
3094 simplify_unary_expression (gimple stmt)
3095 {
3096 tree result = NULL_TREE;
3097 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3098 enum tree_code code = gimple_assign_rhs_code (stmt);
3099
3100 /* We handle some tcc_reference codes here that are all
3101 GIMPLE_ASSIGN_SINGLE codes. */
3102 if (code == REALPART_EXPR
3103 || code == IMAGPART_EXPR
3104 || code == VIEW_CONVERT_EXPR
3105 || code == BIT_FIELD_REF)
3106 op0 = TREE_OPERAND (op0, 0);
3107
3108 if (TREE_CODE (op0) != SSA_NAME)
3109 return NULL_TREE;
3110
3111 orig_op0 = op0;
3112 if (VN_INFO (op0)->has_constants)
3113 op0 = valueize_expr (vn_get_expr_for (op0));
3114 else if (CONVERT_EXPR_CODE_P (code)
3115 || code == REALPART_EXPR
3116 || code == IMAGPART_EXPR
3117 || code == VIEW_CONVERT_EXPR
3118 || code == BIT_FIELD_REF)
3119 {
3120 /* We want to do tree-combining on conversion-like expressions.
3121 Make sure we feed only SSA_NAMEs or constants to fold though. */
3122 tree tem = valueize_expr (vn_get_expr_for (op0));
3123 if (UNARY_CLASS_P (tem)
3124 || BINARY_CLASS_P (tem)
3125 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3126 || TREE_CODE (tem) == SSA_NAME
3127 || TREE_CODE (tem) == CONSTRUCTOR
3128 || is_gimple_min_invariant (tem))
3129 op0 = tem;
3130 }
3131
3132 /* Avoid folding if nothing changed, but remember the expression. */
3133 if (op0 == orig_op0)
3134 return NULL_TREE;
3135
3136 if (code == BIT_FIELD_REF)
3137 {
3138 tree rhs = gimple_assign_rhs1 (stmt);
3139 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3140 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3141 }
3142 else
3143 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3144 if (result)
3145 {
3146 STRIP_USELESS_TYPE_CONVERSION (result);
3147 if (valid_gimple_rhs_p (result))
3148 return result;
3149 }
3150
3151 return NULL_TREE;
3152 }
3153
3154 /* Try to simplify RHS using equivalences and constant folding. */
3155
3156 static tree
3157 try_to_simplify (gimple stmt)
3158 {
3159 enum tree_code code = gimple_assign_rhs_code (stmt);
3160 tree tem;
3161
3162 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3163 in this case, there is no point in doing extra work. */
3164 if (code == SSA_NAME)
3165 return NULL_TREE;
3166
3167 /* First try constant folding based on our current lattice. */
3168 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3169 if (tem
3170 && (TREE_CODE (tem) == SSA_NAME
3171 || is_gimple_min_invariant (tem)))
3172 return tem;
3173
3174 /* If that didn't work try combining multiple statements. */
3175 switch (TREE_CODE_CLASS (code))
3176 {
3177 case tcc_reference:
3178 /* Fallthrough for some unary codes that can operate on registers. */
3179 if (!(code == REALPART_EXPR
3180 || code == IMAGPART_EXPR
3181 || code == VIEW_CONVERT_EXPR
3182 || code == BIT_FIELD_REF))
3183 break;
3184 /* We could do a little more with unary ops, if they expand
3185 into binary ops, but it's debatable whether it is worth it. */
3186 case tcc_unary:
3187 return simplify_unary_expression (stmt);
3188
3189 case tcc_comparison:
3190 case tcc_binary:
3191 return simplify_binary_expression (stmt);
3192
3193 default:
3194 break;
3195 }
3196
3197 return NULL_TREE;
3198 }
3199
3200 /* Visit and value number USE, return true if the value number
3201 changed. */
3202
3203 static bool
3204 visit_use (tree use)
3205 {
3206 bool changed = false;
3207 gimple stmt = SSA_NAME_DEF_STMT (use);
3208
3209 mark_use_processed (use);
3210
3211 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3212 if (dump_file && (dump_flags & TDF_DETAILS)
3213 && !SSA_NAME_IS_DEFAULT_DEF (use))
3214 {
3215 fprintf (dump_file, "Value numbering ");
3216 print_generic_expr (dump_file, use, 0);
3217 fprintf (dump_file, " stmt = ");
3218 print_gimple_stmt (dump_file, stmt, 0, 0);
3219 }
3220
3221 /* Handle uninitialized uses. */
3222 if (SSA_NAME_IS_DEFAULT_DEF (use))
3223 changed = set_ssa_val_to (use, use);
3224 else
3225 {
3226 if (gimple_code (stmt) == GIMPLE_PHI)
3227 changed = visit_phi (stmt);
3228 else if (gimple_has_volatile_ops (stmt))
3229 changed = defs_to_varying (stmt);
3230 else if (is_gimple_assign (stmt))
3231 {
3232 enum tree_code code = gimple_assign_rhs_code (stmt);
3233 tree lhs = gimple_assign_lhs (stmt);
3234 tree rhs1 = gimple_assign_rhs1 (stmt);
3235 tree simplified;
3236
3237 /* Shortcut for copies. Simplifying copies is pointless,
3238 since we copy the expression and value they represent. */
3239 if (code == SSA_NAME
3240 && TREE_CODE (lhs) == SSA_NAME)
3241 {
3242 changed = visit_copy (lhs, rhs1);
3243 goto done;
3244 }
3245 simplified = try_to_simplify (stmt);
3246 if (simplified)
3247 {
3248 if (dump_file && (dump_flags & TDF_DETAILS))
3249 {
3250 fprintf (dump_file, "RHS ");
3251 print_gimple_expr (dump_file, stmt, 0, 0);
3252 fprintf (dump_file, " simplified to ");
3253 print_generic_expr (dump_file, simplified, 0);
3254 if (TREE_CODE (lhs) == SSA_NAME)
3255 fprintf (dump_file, " has constants %d\n",
3256 expr_has_constants (simplified));
3257 else
3258 fprintf (dump_file, "\n");
3259 }
3260 }
3261 /* Setting value numbers to constants will occasionally
3262 screw up phi congruence because constants are not
3263 uniquely associated with a single ssa name that can be
3264 looked up. */
3265 if (simplified
3266 && is_gimple_min_invariant (simplified)
3267 && TREE_CODE (lhs) == SSA_NAME)
3268 {
3269 VN_INFO (lhs)->expr = simplified;
3270 VN_INFO (lhs)->has_constants = true;
3271 changed = set_ssa_val_to (lhs, simplified);
3272 goto done;
3273 }
3274 else if (simplified
3275 && TREE_CODE (simplified) == SSA_NAME
3276 && TREE_CODE (lhs) == SSA_NAME)
3277 {
3278 changed = visit_copy (lhs, simplified);
3279 goto done;
3280 }
3281 else if (simplified)
3282 {
3283 if (TREE_CODE (lhs) == SSA_NAME)
3284 {
3285 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3286 /* We have to unshare the expression or else
3287 valuizing may change the IL stream. */
3288 VN_INFO (lhs)->expr = unshare_expr (simplified);
3289 }
3290 }
3291 else if (stmt_has_constants (stmt)
3292 && TREE_CODE (lhs) == SSA_NAME)
3293 VN_INFO (lhs)->has_constants = true;
3294 else if (TREE_CODE (lhs) == SSA_NAME)
3295 {
3296 /* We reset expr and constantness here because we may
3297 have been value numbering optimistically, and
3298 iterating. They may become non-constant in this case,
3299 even if they were optimistically constant. */
3300
3301 VN_INFO (lhs)->has_constants = false;
3302 VN_INFO (lhs)->expr = NULL_TREE;
3303 }
3304
3305 if ((TREE_CODE (lhs) == SSA_NAME
3306 /* We can substitute SSA_NAMEs that are live over
3307 abnormal edges with their constant value. */
3308 && !(gimple_assign_copy_p (stmt)
3309 && is_gimple_min_invariant (rhs1))
3310 && !(simplified
3311 && is_gimple_min_invariant (simplified))
3312 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3313 /* Stores or copies from SSA_NAMEs that are live over
3314 abnormal edges are a problem. */
3315 || (code == SSA_NAME
3316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3317 changed = defs_to_varying (stmt);
3318 else if (REFERENCE_CLASS_P (lhs)
3319 || DECL_P (lhs))
3320 changed = visit_reference_op_store (lhs, rhs1, stmt);
3321 else if (TREE_CODE (lhs) == SSA_NAME)
3322 {
3323 if ((gimple_assign_copy_p (stmt)
3324 && is_gimple_min_invariant (rhs1))
3325 || (simplified
3326 && is_gimple_min_invariant (simplified)))
3327 {
3328 VN_INFO (lhs)->has_constants = true;
3329 if (simplified)
3330 changed = set_ssa_val_to (lhs, simplified);
3331 else
3332 changed = set_ssa_val_to (lhs, rhs1);
3333 }
3334 else
3335 {
3336 switch (get_gimple_rhs_class (code))
3337 {
3338 case GIMPLE_UNARY_RHS:
3339 case GIMPLE_BINARY_RHS:
3340 case GIMPLE_TERNARY_RHS:
3341 changed = visit_nary_op (lhs, stmt);
3342 break;
3343 case GIMPLE_SINGLE_RHS:
3344 switch (TREE_CODE_CLASS (code))
3345 {
3346 case tcc_reference:
3347 /* VOP-less references can go through unary case. */
3348 if ((code == REALPART_EXPR
3349 || code == IMAGPART_EXPR
3350 || code == VIEW_CONVERT_EXPR
3351 || code == BIT_FIELD_REF)
3352 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3353 {
3354 changed = visit_nary_op (lhs, stmt);
3355 break;
3356 }
3357 /* Fallthrough. */
3358 case tcc_declaration:
3359 changed = visit_reference_op_load (lhs, rhs1, stmt);
3360 break;
3361 default:
3362 if (code == ADDR_EXPR)
3363 {
3364 changed = visit_nary_op (lhs, stmt);
3365 break;
3366 }
3367 else if (code == CONSTRUCTOR)
3368 {
3369 changed = visit_nary_op (lhs, stmt);
3370 break;
3371 }
3372 changed = defs_to_varying (stmt);
3373 }
3374 break;
3375 default:
3376 changed = defs_to_varying (stmt);
3377 break;
3378 }
3379 }
3380 }
3381 else
3382 changed = defs_to_varying (stmt);
3383 }
3384 else if (is_gimple_call (stmt))
3385 {
3386 tree lhs = gimple_call_lhs (stmt);
3387
3388 /* ??? We could try to simplify calls. */
3389
3390 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3391 {
3392 if (stmt_has_constants (stmt))
3393 VN_INFO (lhs)->has_constants = true;
3394 else
3395 {
3396 /* We reset expr and constantness here because we may
3397 have been value numbering optimistically, and
3398 iterating. They may become non-constant in this case,
3399 even if they were optimistically constant. */
3400 VN_INFO (lhs)->has_constants = false;
3401 VN_INFO (lhs)->expr = NULL_TREE;
3402 }
3403
3404 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3405 {
3406 changed = defs_to_varying (stmt);
3407 goto done;
3408 }
3409 }
3410
3411 /* ??? We should handle stores from calls. */
3412 if (!gimple_call_internal_p (stmt)
3413 && (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3414 /* If the call has side effects, subsequent calls won't have
3415 the same incoming vuse, so it's save to assume
3416 equality. */
3417 || gimple_has_side_effects (stmt))
3418 && ((lhs && TREE_CODE (lhs) == SSA_NAME)
3419 || (!lhs && gimple_vdef (stmt))))
3420 {
3421 changed = visit_reference_op_call (lhs, stmt);
3422 }
3423 else
3424 changed = defs_to_varying (stmt);
3425 }
3426 else
3427 changed = defs_to_varying (stmt);
3428 }
3429 done:
3430 return changed;
3431 }
3432
3433 /* Compare two operands by reverse postorder index */
3434
3435 static int
3436 compare_ops (const void *pa, const void *pb)
3437 {
3438 const tree opa = *((const tree *)pa);
3439 const tree opb = *((const tree *)pb);
3440 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3441 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3442 basic_block bba;
3443 basic_block bbb;
3444
3445 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3446 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3447 else if (gimple_nop_p (opstmta))
3448 return -1;
3449 else if (gimple_nop_p (opstmtb))
3450 return 1;
3451
3452 bba = gimple_bb (opstmta);
3453 bbb = gimple_bb (opstmtb);
3454
3455 if (!bba && !bbb)
3456 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3457 else if (!bba)
3458 return -1;
3459 else if (!bbb)
3460 return 1;
3461
3462 if (bba == bbb)
3463 {
3464 if (gimple_code (opstmta) == GIMPLE_PHI
3465 && gimple_code (opstmtb) == GIMPLE_PHI)
3466 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3467 else if (gimple_code (opstmta) == GIMPLE_PHI)
3468 return -1;
3469 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3470 return 1;
3471 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3472 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3473 else
3474 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3475 }
3476 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3477 }
3478
3479 /* Sort an array containing members of a strongly connected component
3480 SCC so that the members are ordered by RPO number.
3481 This means that when the sort is complete, iterating through the
3482 array will give you the members in RPO order. */
3483
3484 static void
3485 sort_scc (VEC (tree, heap) *scc)
3486 {
3487 VEC_qsort (tree, scc, compare_ops);
3488 }
3489
3490 /* Insert the no longer used nary ONARY to the hash INFO. */
3491
3492 static void
3493 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3494 {
3495 size_t size = sizeof_vn_nary_op (onary->length);
3496 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3497 &info->nary_obstack);
3498 memcpy (nary, onary, size);
3499 vn_nary_op_insert_into (nary, info->nary, false);
3500 }
3501
3502 /* Insert the no longer used phi OPHI to the hash INFO. */
3503
3504 static void
3505 copy_phi (vn_phi_t ophi, vn_tables_t info)
3506 {
3507 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3508 void **slot;
3509 memcpy (phi, ophi, sizeof (*phi));
3510 ophi->phiargs = NULL;
3511 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3512 gcc_assert (!*slot);
3513 *slot = phi;
3514 }
3515
3516 /* Insert the no longer used reference OREF to the hash INFO. */
3517
3518 static void
3519 copy_reference (vn_reference_t oref, vn_tables_t info)
3520 {
3521 vn_reference_t ref;
3522 void **slot;
3523 ref = (vn_reference_t) pool_alloc (info->references_pool);
3524 memcpy (ref, oref, sizeof (*ref));
3525 oref->operands = NULL;
3526 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3527 INSERT);
3528 if (*slot)
3529 free_reference (*slot);
3530 *slot = ref;
3531 }
3532
3533 /* Process a strongly connected component in the SSA graph. */
3534
3535 static void
3536 process_scc (VEC (tree, heap) *scc)
3537 {
3538 tree var;
3539 unsigned int i;
3540 unsigned int iterations = 0;
3541 bool changed = true;
3542 htab_iterator hi;
3543 vn_nary_op_t nary;
3544 vn_phi_t phi;
3545 vn_reference_t ref;
3546
3547 /* If the SCC has a single member, just visit it. */
3548 if (VEC_length (tree, scc) == 1)
3549 {
3550 tree use = VEC_index (tree, scc, 0);
3551 if (VN_INFO (use)->use_processed)
3552 return;
3553 /* We need to make sure it doesn't form a cycle itself, which can
3554 happen for self-referential PHI nodes. In that case we would
3555 end up inserting an expression with VN_TOP operands into the
3556 valid table which makes us derive bogus equivalences later.
3557 The cheapest way to check this is to assume it for all PHI nodes. */
3558 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3559 /* Fallthru to iteration. */ ;
3560 else
3561 {
3562 visit_use (use);
3563 return;
3564 }
3565 }
3566
3567 /* Iterate over the SCC with the optimistic table until it stops
3568 changing. */
3569 current_info = optimistic_info;
3570 while (changed)
3571 {
3572 changed = false;
3573 iterations++;
3574 if (dump_file && (dump_flags & TDF_DETAILS))
3575 fprintf (dump_file, "Starting iteration %d\n", iterations);
3576 /* As we are value-numbering optimistically we have to
3577 clear the expression tables and the simplified expressions
3578 in each iteration until we converge. */
3579 htab_empty (optimistic_info->nary);
3580 htab_empty (optimistic_info->phis);
3581 htab_empty (optimistic_info->references);
3582 obstack_free (&optimistic_info->nary_obstack, NULL);
3583 gcc_obstack_init (&optimistic_info->nary_obstack);
3584 empty_alloc_pool (optimistic_info->phis_pool);
3585 empty_alloc_pool (optimistic_info->references_pool);
3586 FOR_EACH_VEC_ELT (tree, scc, i, var)
3587 VN_INFO (var)->expr = NULL_TREE;
3588 FOR_EACH_VEC_ELT (tree, scc, i, var)
3589 changed |= visit_use (var);
3590 }
3591
3592 statistics_histogram_event (cfun, "SCC iterations", iterations);
3593
3594 /* Finally, copy the contents of the no longer used optimistic
3595 table to the valid table. */
3596 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3597 copy_nary (nary, valid_info);
3598 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3599 copy_phi (phi, valid_info);
3600 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3601 copy_reference (ref, valid_info);
3602
3603 current_info = valid_info;
3604 }
3605
3606 DEF_VEC_O(ssa_op_iter);
3607 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3608
3609 /* Pop the components of the found SCC for NAME off the SCC stack
3610 and process them. Returns true if all went well, false if
3611 we run into resource limits. */
3612
3613 static bool
3614 extract_and_process_scc_for_name (tree name)
3615 {
3616 VEC (tree, heap) *scc = NULL;
3617 tree x;
3618
3619 /* Found an SCC, pop the components off the SCC stack and
3620 process them. */
3621 do
3622 {
3623 x = VEC_pop (tree, sccstack);
3624
3625 VN_INFO (x)->on_sccstack = false;
3626 VEC_safe_push (tree, heap, scc, x);
3627 } while (x != name);
3628
3629 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3630 if (VEC_length (tree, scc)
3631 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3632 {
3633 if (dump_file)
3634 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3635 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3636 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3637 return false;
3638 }
3639
3640 if (VEC_length (tree, scc) > 1)
3641 sort_scc (scc);
3642
3643 if (dump_file && (dump_flags & TDF_DETAILS))
3644 print_scc (dump_file, scc);
3645
3646 process_scc (scc);
3647
3648 VEC_free (tree, heap, scc);
3649
3650 return true;
3651 }
3652
3653 /* Depth first search on NAME to discover and process SCC's in the SSA
3654 graph.
3655 Execution of this algorithm relies on the fact that the SCC's are
3656 popped off the stack in topological order.
3657 Returns true if successful, false if we stopped processing SCC's due
3658 to resource constraints. */
3659
3660 static bool
3661 DFS (tree name)
3662 {
3663 VEC(ssa_op_iter, heap) *itervec = NULL;
3664 VEC(tree, heap) *namevec = NULL;
3665 use_operand_p usep = NULL;
3666 gimple defstmt;
3667 tree use;
3668 ssa_op_iter iter;
3669
3670 start_over:
3671 /* SCC info */
3672 VN_INFO (name)->dfsnum = next_dfs_num++;
3673 VN_INFO (name)->visited = true;
3674 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3675
3676 VEC_safe_push (tree, heap, sccstack, name);
3677 VN_INFO (name)->on_sccstack = true;
3678 defstmt = SSA_NAME_DEF_STMT (name);
3679
3680 /* Recursively DFS on our operands, looking for SCC's. */
3681 if (!gimple_nop_p (defstmt))
3682 {
3683 /* Push a new iterator. */
3684 if (gimple_code (defstmt) == GIMPLE_PHI)
3685 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3686 else
3687 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3688 }
3689 else
3690 clear_and_done_ssa_iter (&iter);
3691
3692 while (1)
3693 {
3694 /* If we are done processing uses of a name, go up the stack
3695 of iterators and process SCCs as we found them. */
3696 if (op_iter_done (&iter))
3697 {
3698 /* See if we found an SCC. */
3699 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3700 if (!extract_and_process_scc_for_name (name))
3701 {
3702 VEC_free (tree, heap, namevec);
3703 VEC_free (ssa_op_iter, heap, itervec);
3704 return false;
3705 }
3706
3707 /* Check if we are done. */
3708 if (VEC_empty (tree, namevec))
3709 {
3710 VEC_free (tree, heap, namevec);
3711 VEC_free (ssa_op_iter, heap, itervec);
3712 return true;
3713 }
3714
3715 /* Restore the last use walker and continue walking there. */
3716 use = name;
3717 name = VEC_pop (tree, namevec);
3718 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3719 sizeof (ssa_op_iter));
3720 VEC_pop (ssa_op_iter, itervec);
3721 goto continue_walking;
3722 }
3723
3724 use = USE_FROM_PTR (usep);
3725
3726 /* Since we handle phi nodes, we will sometimes get
3727 invariants in the use expression. */
3728 if (TREE_CODE (use) == SSA_NAME)
3729 {
3730 if (! (VN_INFO (use)->visited))
3731 {
3732 /* Recurse by pushing the current use walking state on
3733 the stack and starting over. */
3734 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3735 VEC_safe_push(tree, heap, namevec, name);
3736 name = use;
3737 goto start_over;
3738
3739 continue_walking:
3740 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3741 VN_INFO (use)->low);
3742 }
3743 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3744 && VN_INFO (use)->on_sccstack)
3745 {
3746 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3747 VN_INFO (name)->low);
3748 }
3749 }
3750
3751 usep = op_iter_next_use (&iter);
3752 }
3753 }
3754
3755 /* Allocate a value number table. */
3756
3757 static void
3758 allocate_vn_table (vn_tables_t table)
3759 {
3760 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3761 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3762 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3763 free_reference);
3764
3765 gcc_obstack_init (&table->nary_obstack);
3766 table->phis_pool = create_alloc_pool ("VN phis",
3767 sizeof (struct vn_phi_s),
3768 30);
3769 table->references_pool = create_alloc_pool ("VN references",
3770 sizeof (struct vn_reference_s),
3771 30);
3772 }
3773
3774 /* Free a value number table. */
3775
3776 static void
3777 free_vn_table (vn_tables_t table)
3778 {
3779 htab_delete (table->phis);
3780 htab_delete (table->nary);
3781 htab_delete (table->references);
3782 obstack_free (&table->nary_obstack, NULL);
3783 free_alloc_pool (table->phis_pool);
3784 free_alloc_pool (table->references_pool);
3785 }
3786
3787 static void
3788 init_scc_vn (void)
3789 {
3790 size_t i;
3791 int j;
3792 int *rpo_numbers_temp;
3793
3794 calculate_dominance_info (CDI_DOMINATORS);
3795 sccstack = NULL;
3796 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3797 free);
3798
3799 constant_value_ids = BITMAP_ALLOC (NULL);
3800
3801 next_dfs_num = 1;
3802 next_value_id = 1;
3803
3804 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3805 /* VEC_alloc doesn't actually grow it to the right size, it just
3806 preallocates the space to do so. */
3807 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3808 gcc_obstack_init (&vn_ssa_aux_obstack);
3809
3810 shared_lookup_phiargs = NULL;
3811 shared_lookup_references = NULL;
3812 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3813 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3814 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3815
3816 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3817 the i'th block in RPO order is bb. We want to map bb's to RPO
3818 numbers, so we need to rearrange this array. */
3819 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3820 rpo_numbers[rpo_numbers_temp[j]] = j;
3821
3822 XDELETE (rpo_numbers_temp);
3823
3824 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3825
3826 /* Create the VN_INFO structures, and initialize value numbers to
3827 TOP. */
3828 for (i = 0; i < num_ssa_names; i++)
3829 {
3830 tree name = ssa_name (i);
3831 if (name)
3832 {
3833 VN_INFO_GET (name)->valnum = VN_TOP;
3834 VN_INFO (name)->expr = NULL_TREE;
3835 VN_INFO (name)->value_id = 0;
3836 }
3837 }
3838
3839 renumber_gimple_stmt_uids ();
3840
3841 /* Create the valid and optimistic value numbering tables. */
3842 valid_info = XCNEW (struct vn_tables_s);
3843 allocate_vn_table (valid_info);
3844 optimistic_info = XCNEW (struct vn_tables_s);
3845 allocate_vn_table (optimistic_info);
3846 }
3847
3848 void
3849 free_scc_vn (void)
3850 {
3851 size_t i;
3852
3853 htab_delete (constant_to_value_id);
3854 BITMAP_FREE (constant_value_ids);
3855 VEC_free (tree, heap, shared_lookup_phiargs);
3856 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3857 XDELETEVEC (rpo_numbers);
3858
3859 for (i = 0; i < num_ssa_names; i++)
3860 {
3861 tree name = ssa_name (i);
3862 if (name
3863 && VN_INFO (name)->needs_insertion)
3864 release_ssa_name (name);
3865 }
3866 obstack_free (&vn_ssa_aux_obstack, NULL);
3867 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3868
3869 VEC_free (tree, heap, sccstack);
3870 free_vn_table (valid_info);
3871 XDELETE (valid_info);
3872 free_vn_table (optimistic_info);
3873 XDELETE (optimistic_info);
3874 }
3875
3876 /* Set *ID if we computed something useful in RESULT. */
3877
3878 static void
3879 set_value_id_for_result (tree result, unsigned int *id)
3880 {
3881 if (result)
3882 {
3883 if (TREE_CODE (result) == SSA_NAME)
3884 *id = VN_INFO (result)->value_id;
3885 else if (is_gimple_min_invariant (result))
3886 *id = get_or_alloc_constant_value_id (result);
3887 }
3888 }
3889
3890 /* Set the value ids in the valid hash tables. */
3891
3892 static void
3893 set_hashtable_value_ids (void)
3894 {
3895 htab_iterator hi;
3896 vn_nary_op_t vno;
3897 vn_reference_t vr;
3898 vn_phi_t vp;
3899
3900 /* Now set the value ids of the things we had put in the hash
3901 table. */
3902
3903 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3904 vno, vn_nary_op_t, hi)
3905 set_value_id_for_result (vno->result, &vno->value_id);
3906
3907 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3908 vp, vn_phi_t, hi)
3909 set_value_id_for_result (vp->result, &vp->value_id);
3910
3911 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3912 vr, vn_reference_t, hi)
3913 set_value_id_for_result (vr->result, &vr->value_id);
3914 }
3915
3916 /* Do SCCVN. Returns true if it finished, false if we bailed out
3917 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3918 how we use the alias oracle walking during the VN process. */
3919
3920 bool
3921 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3922 {
3923 size_t i;
3924 tree param;
3925 bool changed = true;
3926
3927 default_vn_walk_kind = default_vn_walk_kind_;
3928
3929 init_scc_vn ();
3930 current_info = valid_info;
3931
3932 for (param = DECL_ARGUMENTS (current_function_decl);
3933 param;
3934 param = DECL_CHAIN (param))
3935 {
3936 if (gimple_default_def (cfun, param) != NULL)
3937 {
3938 tree def = gimple_default_def (cfun, param);
3939 VN_INFO (def)->valnum = def;
3940 }
3941 }
3942
3943 for (i = 1; i < num_ssa_names; ++i)
3944 {
3945 tree name = ssa_name (i);
3946 if (name
3947 && VN_INFO (name)->visited == false
3948 && !has_zero_uses (name))
3949 if (!DFS (name))
3950 {
3951 free_scc_vn ();
3952 return false;
3953 }
3954 }
3955
3956 /* Initialize the value ids. */
3957
3958 for (i = 1; i < num_ssa_names; ++i)
3959 {
3960 tree name = ssa_name (i);
3961 vn_ssa_aux_t info;
3962 if (!name)
3963 continue;
3964 info = VN_INFO (name);
3965 if (info->valnum == name
3966 || info->valnum == VN_TOP)
3967 info->value_id = get_next_value_id ();
3968 else if (is_gimple_min_invariant (info->valnum))
3969 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3970 }
3971
3972 /* Propagate until they stop changing. */
3973 while (changed)
3974 {
3975 changed = false;
3976 for (i = 1; i < num_ssa_names; ++i)
3977 {
3978 tree name = ssa_name (i);
3979 vn_ssa_aux_t info;
3980 if (!name)
3981 continue;
3982 info = VN_INFO (name);
3983 if (TREE_CODE (info->valnum) == SSA_NAME
3984 && info->valnum != name
3985 && info->value_id != VN_INFO (info->valnum)->value_id)
3986 {
3987 changed = true;
3988 info->value_id = VN_INFO (info->valnum)->value_id;
3989 }
3990 }
3991 }
3992
3993 set_hashtable_value_ids ();
3994
3995 if (dump_file && (dump_flags & TDF_DETAILS))
3996 {
3997 fprintf (dump_file, "Value numbers:\n");
3998 for (i = 0; i < num_ssa_names; i++)
3999 {
4000 tree name = ssa_name (i);
4001 if (name
4002 && VN_INFO (name)->visited
4003 && SSA_VAL (name) != name)
4004 {
4005 print_generic_expr (dump_file, name, 0);
4006 fprintf (dump_file, " = ");
4007 print_generic_expr (dump_file, SSA_VAL (name), 0);
4008 fprintf (dump_file, "\n");
4009 }
4010 }
4011 }
4012
4013 return true;
4014 }
4015
4016 /* Return the maximum value id we have ever seen. */
4017
4018 unsigned int
4019 get_max_value_id (void)
4020 {
4021 return next_value_id;
4022 }
4023
4024 /* Return the next unique value id. */
4025
4026 unsigned int
4027 get_next_value_id (void)
4028 {
4029 return next_value_id++;
4030 }
4031
4032
4033 /* Compare two expressions E1 and E2 and return true if they are equal. */
4034
4035 bool
4036 expressions_equal_p (tree e1, tree e2)
4037 {
4038 /* The obvious case. */
4039 if (e1 == e2)
4040 return true;
4041
4042 /* If only one of them is null, they cannot be equal. */
4043 if (!e1 || !e2)
4044 return false;
4045
4046 /* Now perform the actual comparison. */
4047 if (TREE_CODE (e1) == TREE_CODE (e2)
4048 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4049 return true;
4050
4051 return false;
4052 }
4053
4054
4055 /* Return true if the nary operation NARY may trap. This is a copy
4056 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4057
4058 bool
4059 vn_nary_may_trap (vn_nary_op_t nary)
4060 {
4061 tree type;
4062 tree rhs2 = NULL_TREE;
4063 bool honor_nans = false;
4064 bool honor_snans = false;
4065 bool fp_operation = false;
4066 bool honor_trapv = false;
4067 bool handled, ret;
4068 unsigned i;
4069
4070 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4071 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4072 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4073 {
4074 type = nary->type;
4075 fp_operation = FLOAT_TYPE_P (type);
4076 if (fp_operation)
4077 {
4078 honor_nans = flag_trapping_math && !flag_finite_math_only;
4079 honor_snans = flag_signaling_nans != 0;
4080 }
4081 else if (INTEGRAL_TYPE_P (type)
4082 && TYPE_OVERFLOW_TRAPS (type))
4083 honor_trapv = true;
4084 }
4085 if (nary->length >= 2)
4086 rhs2 = nary->op[1];
4087 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4088 honor_trapv,
4089 honor_nans, honor_snans, rhs2,
4090 &handled);
4091 if (handled
4092 && ret)
4093 return true;
4094
4095 for (i = 0; i < nary->length; ++i)
4096 if (tree_could_trap_p (nary->op[i]))
4097 return true;
4098
4099 return false;
4100 }