exp_attr.adb, [...]: Minor reformatting.
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
68
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
72
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
77
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
82
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
86
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
91
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
95
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
98
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
104
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
110
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
118 */
119
120
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
125
126 /* vn_nary_op hashtable helpers. */
127
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
129 {
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
133 };
134
135 /* Return the computed hashcode for nary operation P1. */
136
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
139 {
140 return vno1->hashcode;
141 }
142
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
145
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
148 {
149 return vn_nary_op_eq (vno1, vno2);
150 }
151
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
154
155
156 /* vn_phi hashtable helpers. */
157
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
160
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
162 {
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
166 };
167
168 /* Return the computed hashcode for phi operation P1. */
169
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
172 {
173 return vp1->hashcode;
174 }
175
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
177
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
180 {
181 return vn_phi_eq (vp1, vp2);
182 }
183
184 /* Free a phi operation structure VP. */
185
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
188 {
189 phi->phiargs.release ();
190 }
191
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
194
195
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
198
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
201 {
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
204
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
214 }
215
216 /* Free a reference operation structure VP. */
217
218 static inline void
219 free_reference (vn_reference_s *vr)
220 {
221 vr->operands.release ();
222 }
223
224
225 /* vn_reference hashtable helpers. */
226
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
228 {
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
232 };
233
234 /* Return the hashcode for a given reference operation P1. */
235
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
238 {
239 return vr1->hashcode;
240 }
241
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
244 {
245 return vn_reference_eq (v, c);
246 }
247
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
250 {
251 free_reference (v);
252 }
253
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
256
257
258 /* The set of hashtables and alloc_pool's for their items. */
259
260 typedef struct vn_tables_s
261 {
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
269
270
271 /* vn_constant hashtable helpers. */
272
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
274 {
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
277 };
278
279 /* Hash table hash function for vn_constant_t. */
280
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
283 {
284 return vc1->hashcode;
285 }
286
287 /* Hash table equality function for vn_constant_t. */
288
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
291 {
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
294
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
296 }
297
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
300
301
302 /* Valid hashtables storing information we have proven to be
303 correct. */
304
305 static vn_tables_t valid_info;
306
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
309
310 static vn_tables_t optimistic_info;
311
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
315
316 static vn_tables_t current_info;
317
318
319 /* Reverse post order index for each basic block. */
320
321 static int *rpo_numbers;
322
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
324
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
328
329 static inline tree
330 vuse_ssa_val (tree x)
331 {
332 if (!x)
333 return NULL_TREE;
334
335 do
336 {
337 x = SSA_VAL (x);
338 }
339 while (SSA_NAME_IN_FREE_LIST (x));
340
341 return x;
342 }
343
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
346
347 tree VN_TOP;
348
349 /* Unique counter for our value ids. */
350
351 static unsigned int next_value_id;
352
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
355
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
358
359
360
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
364
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
367
368 /* Return whether there is value numbering information for a given SSA name. */
369
370 bool
371 has_VN_INFO (tree name)
372 {
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
376 }
377
378 /* Return the value numbering information for a given SSA name. */
379
380 vn_ssa_aux_t
381 VN_INFO (tree name)
382 {
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
386 }
387
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
390
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
393 {
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
395 }
396
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
399
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
402 {
403 vn_ssa_aux_t newinfo;
404
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
413 }
414
415
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
418
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
421 {
422 switch (gimple_code (stmt))
423 {
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
429 {
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
433 {
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
440 {
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
449
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
453
454 case tcc_constant:
455 return VN_CONSTANT;
456
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
464 }
465 default:
466 return VN_NONE;
467 }
468 }
469 default:
470 return VN_NONE;
471 }
472 }
473
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
476
477 unsigned int
478 get_constant_value_id (tree constant)
479 {
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
482
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
489 }
490
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
493
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
496 {
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
500
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
506
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
514 }
515
516 /* Return true if V is a value id for a constant. */
517
518 bool
519 value_id_constant_p (unsigned int v)
520 {
521 return bitmap_bit_p (constant_value_ids, v);
522 }
523
524 /* Compute the hash for a reference operand VRO1. */
525
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
528 {
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
536 }
537
538 /* Compute a hash for the reference operation VR1 and return it. */
539
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
542 {
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
549
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
551 {
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
557 {
558 if (off == -1)
559 off = 0;
560 off += vro->off;
561 }
562 else
563 {
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
570 {
571 if (vro->op0)
572 {
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
576 }
577 }
578 else
579 vn_reference_op_compute_hash (vro, hstate);
580 }
581 }
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
586
587 return result;
588 }
589
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
592
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
595 {
596 unsigned i, j;
597
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
601
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
605
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
609
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
612
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
615 {
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
618 }
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
627
628 i = 0;
629 j = 0;
630 do
631 {
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
637 {
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
646 }
647 for (; vr2->operands.iterate (j, &vro2); j++)
648 {
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
657 }
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
661 {
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
668 }
669 if (deref2 && vro2->opcode == ADDR_EXPR)
670 {
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
677 }
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
684 }
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
687
688 return true;
689 }
690
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
693
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
696 {
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
698 {
699 vn_reference_op_s temp;
700
701 result->reserve (3);
702
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
713
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
720
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
728 }
729
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
733 {
734 vn_reference_op_s temp;
735
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
740
741 switch (temp.opcode)
742 {
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
753 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
754 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
755 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
756 temp.base = MR_DEPENDENCE_BASE (ref);
757 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
758 break;
759 case BIT_FIELD_REF:
760 /* Record bits, position and storage order. */
761 temp.op0 = TREE_OPERAND (ref, 1);
762 temp.op1 = TREE_OPERAND (ref, 2);
763 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
764 {
765 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
766 if (off % BITS_PER_UNIT == 0)
767 temp.off = off / BITS_PER_UNIT;
768 }
769 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
770 break;
771 case COMPONENT_REF:
772 /* The field decl is enough to unambiguously specify the field,
773 a matching type is not necessary and a mismatching type
774 is always a spurious difference. */
775 temp.type = NULL_TREE;
776 temp.op0 = TREE_OPERAND (ref, 1);
777 temp.op1 = TREE_OPERAND (ref, 2);
778 {
779 tree this_offset = component_ref_field_offset (ref);
780 if (this_offset
781 && TREE_CODE (this_offset) == INTEGER_CST)
782 {
783 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
784 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
785 {
786 offset_int off
787 = (wi::to_offset (this_offset)
788 + wi::lrshift (wi::to_offset (bit_offset),
789 LOG2_BITS_PER_UNIT));
790 if (wi::fits_shwi_p (off)
791 /* Probibit value-numbering zero offset components
792 of addresses the same before the pass folding
793 __builtin_object_size had a chance to run
794 (checking cfun->after_inlining does the
795 trick here). */
796 && (TREE_CODE (orig) != ADDR_EXPR
797 || off != 0
798 || cfun->after_inlining))
799 temp.off = off.to_shwi ();
800 }
801 }
802 }
803 break;
804 case ARRAY_RANGE_REF:
805 case ARRAY_REF:
806 /* Record index as operand. */
807 temp.op0 = TREE_OPERAND (ref, 1);
808 /* Always record lower bounds and element size. */
809 temp.op1 = array_ref_low_bound (ref);
810 temp.op2 = array_ref_element_size (ref);
811 if (TREE_CODE (temp.op0) == INTEGER_CST
812 && TREE_CODE (temp.op1) == INTEGER_CST
813 && TREE_CODE (temp.op2) == INTEGER_CST)
814 {
815 offset_int off = ((wi::to_offset (temp.op0)
816 - wi::to_offset (temp.op1))
817 * wi::to_offset (temp.op2));
818 if (wi::fits_shwi_p (off))
819 temp.off = off.to_shwi();
820 }
821 break;
822 case VAR_DECL:
823 if (DECL_HARD_REGISTER (ref))
824 {
825 temp.op0 = ref;
826 break;
827 }
828 /* Fallthru. */
829 case PARM_DECL:
830 case CONST_DECL:
831 case RESULT_DECL:
832 /* Canonicalize decls to MEM[&decl] which is what we end up with
833 when valueizing MEM[ptr] with ptr = &decl. */
834 temp.opcode = MEM_REF;
835 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
836 temp.off = 0;
837 result->safe_push (temp);
838 temp.opcode = ADDR_EXPR;
839 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
840 temp.type = TREE_TYPE (temp.op0);
841 temp.off = -1;
842 break;
843 case STRING_CST:
844 case INTEGER_CST:
845 case COMPLEX_CST:
846 case VECTOR_CST:
847 case REAL_CST:
848 case FIXED_CST:
849 case CONSTRUCTOR:
850 case SSA_NAME:
851 temp.op0 = ref;
852 break;
853 case ADDR_EXPR:
854 if (is_gimple_min_invariant (ref))
855 {
856 temp.op0 = ref;
857 break;
858 }
859 break;
860 /* These are only interesting for their operands, their
861 existence, and their type. They will never be the last
862 ref in the chain of references (IE they require an
863 operand), so we don't have to put anything
864 for op* as it will be handled by the iteration */
865 case REALPART_EXPR:
866 temp.off = 0;
867 break;
868 case VIEW_CONVERT_EXPR:
869 temp.off = 0;
870 temp.reverse = storage_order_barrier_p (ref);
871 break;
872 case IMAGPART_EXPR:
873 /* This is only interesting for its constant offset. */
874 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
875 break;
876 default:
877 gcc_unreachable ();
878 }
879 result->safe_push (temp);
880
881 if (REFERENCE_CLASS_P (ref)
882 || TREE_CODE (ref) == MODIFY_EXPR
883 || TREE_CODE (ref) == WITH_SIZE_EXPR
884 || (TREE_CODE (ref) == ADDR_EXPR
885 && !is_gimple_min_invariant (ref)))
886 ref = TREE_OPERAND (ref, 0);
887 else
888 ref = NULL_TREE;
889 }
890 }
891
892 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
893 operands in *OPS, the reference alias set SET and the reference type TYPE.
894 Return true if something useful was produced. */
895
896 bool
897 ao_ref_init_from_vn_reference (ao_ref *ref,
898 alias_set_type set, tree type,
899 vec<vn_reference_op_s> ops)
900 {
901 vn_reference_op_t op;
902 unsigned i;
903 tree base = NULL_TREE;
904 tree *op0_p = &base;
905 offset_int offset = 0;
906 offset_int max_size;
907 offset_int size = -1;
908 tree size_tree = NULL_TREE;
909 alias_set_type base_alias_set = -1;
910
911 /* First get the final access size from just the outermost expression. */
912 op = &ops[0];
913 if (op->opcode == COMPONENT_REF)
914 size_tree = DECL_SIZE (op->op0);
915 else if (op->opcode == BIT_FIELD_REF)
916 size_tree = op->op0;
917 else
918 {
919 machine_mode mode = TYPE_MODE (type);
920 if (mode == BLKmode)
921 size_tree = TYPE_SIZE (type);
922 else
923 size = int (GET_MODE_BITSIZE (mode));
924 }
925 if (size_tree != NULL_TREE
926 && TREE_CODE (size_tree) == INTEGER_CST)
927 size = wi::to_offset (size_tree);
928
929 /* Initially, maxsize is the same as the accessed element size.
930 In the following it will only grow (or become -1). */
931 max_size = size;
932
933 /* Compute cumulative bit-offset for nested component-refs and array-refs,
934 and find the ultimate containing object. */
935 FOR_EACH_VEC_ELT (ops, i, op)
936 {
937 switch (op->opcode)
938 {
939 /* These may be in the reference ops, but we cannot do anything
940 sensible with them here. */
941 case ADDR_EXPR:
942 /* Apart from ADDR_EXPR arguments to MEM_REF. */
943 if (base != NULL_TREE
944 && TREE_CODE (base) == MEM_REF
945 && op->op0
946 && DECL_P (TREE_OPERAND (op->op0, 0)))
947 {
948 vn_reference_op_t pop = &ops[i-1];
949 base = TREE_OPERAND (op->op0, 0);
950 if (pop->off == -1)
951 {
952 max_size = -1;
953 offset = 0;
954 }
955 else
956 offset += pop->off * BITS_PER_UNIT;
957 op0_p = NULL;
958 break;
959 }
960 /* Fallthru. */
961 case CALL_EXPR:
962 return false;
963
964 /* Record the base objects. */
965 case MEM_REF:
966 base_alias_set = get_deref_alias_set (op->op0);
967 *op0_p = build2 (MEM_REF, op->type,
968 NULL_TREE, op->op0);
969 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
970 MR_DEPENDENCE_BASE (*op0_p) = op->base;
971 op0_p = &TREE_OPERAND (*op0_p, 0);
972 break;
973
974 case VAR_DECL:
975 case PARM_DECL:
976 case RESULT_DECL:
977 case SSA_NAME:
978 *op0_p = op->op0;
979 op0_p = NULL;
980 break;
981
982 /* And now the usual component-reference style ops. */
983 case BIT_FIELD_REF:
984 offset += wi::to_offset (op->op1);
985 break;
986
987 case COMPONENT_REF:
988 {
989 tree field = op->op0;
990 /* We do not have a complete COMPONENT_REF tree here so we
991 cannot use component_ref_field_offset. Do the interesting
992 parts manually. */
993 tree this_offset = DECL_FIELD_OFFSET (field);
994
995 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
996 max_size = -1;
997 else
998 {
999 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1000 LOG2_BITS_PER_UNIT);
1001 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1002 offset += woffset;
1003 }
1004 break;
1005 }
1006
1007 case ARRAY_RANGE_REF:
1008 case ARRAY_REF:
1009 /* We recorded the lower bound and the element size. */
1010 if (TREE_CODE (op->op0) != INTEGER_CST
1011 || TREE_CODE (op->op1) != INTEGER_CST
1012 || TREE_CODE (op->op2) != INTEGER_CST)
1013 max_size = -1;
1014 else
1015 {
1016 offset_int woffset
1017 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1018 TYPE_PRECISION (TREE_TYPE (op->op0)));
1019 woffset *= wi::to_offset (op->op2);
1020 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1021 offset += woffset;
1022 }
1023 break;
1024
1025 case REALPART_EXPR:
1026 break;
1027
1028 case IMAGPART_EXPR:
1029 offset += size;
1030 break;
1031
1032 case VIEW_CONVERT_EXPR:
1033 break;
1034
1035 case STRING_CST:
1036 case INTEGER_CST:
1037 case COMPLEX_CST:
1038 case VECTOR_CST:
1039 case REAL_CST:
1040 case CONSTRUCTOR:
1041 case CONST_DECL:
1042 return false;
1043
1044 default:
1045 return false;
1046 }
1047 }
1048
1049 if (base == NULL_TREE)
1050 return false;
1051
1052 ref->ref = NULL_TREE;
1053 ref->base = base;
1054 ref->ref_alias_set = set;
1055 if (base_alias_set != -1)
1056 ref->base_alias_set = base_alias_set;
1057 else
1058 ref->base_alias_set = get_alias_set (base);
1059 /* We discount volatiles from value-numbering elsewhere. */
1060 ref->volatile_p = false;
1061
1062 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1063 {
1064 ref->offset = 0;
1065 ref->size = -1;
1066 ref->max_size = -1;
1067 return true;
1068 }
1069
1070 ref->size = size.to_shwi ();
1071
1072 if (!wi::fits_shwi_p (offset))
1073 {
1074 ref->offset = 0;
1075 ref->max_size = -1;
1076 return true;
1077 }
1078
1079 ref->offset = offset.to_shwi ();
1080
1081 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1082 ref->max_size = -1;
1083 else
1084 ref->max_size = max_size.to_shwi ();
1085
1086 return true;
1087 }
1088
1089 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1090 vn_reference_op_s's. */
1091
1092 static void
1093 copy_reference_ops_from_call (gcall *call,
1094 vec<vn_reference_op_s> *result)
1095 {
1096 vn_reference_op_s temp;
1097 unsigned i;
1098 tree lhs = gimple_call_lhs (call);
1099 int lr;
1100
1101 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1102 different. By adding the lhs here in the vector, we ensure that the
1103 hashcode is different, guaranteeing a different value number. */
1104 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1105 {
1106 memset (&temp, 0, sizeof (temp));
1107 temp.opcode = MODIFY_EXPR;
1108 temp.type = TREE_TYPE (lhs);
1109 temp.op0 = lhs;
1110 temp.off = -1;
1111 result->safe_push (temp);
1112 }
1113
1114 /* Copy the type, opcode, function, static chain and EH region, if any. */
1115 memset (&temp, 0, sizeof (temp));
1116 temp.type = gimple_call_return_type (call);
1117 temp.opcode = CALL_EXPR;
1118 temp.op0 = gimple_call_fn (call);
1119 temp.op1 = gimple_call_chain (call);
1120 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1121 temp.op2 = size_int (lr);
1122 temp.off = -1;
1123 if (gimple_call_with_bounds_p (call))
1124 temp.with_bounds = 1;
1125 result->safe_push (temp);
1126
1127 /* Copy the call arguments. As they can be references as well,
1128 just chain them together. */
1129 for (i = 0; i < gimple_call_num_args (call); ++i)
1130 {
1131 tree callarg = gimple_call_arg (call, i);
1132 copy_reference_ops_from_ref (callarg, result);
1133 }
1134 }
1135
1136 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1137 *I_P to point to the last element of the replacement. */
1138 static bool
1139 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1140 unsigned int *i_p)
1141 {
1142 unsigned int i = *i_p;
1143 vn_reference_op_t op = &(*ops)[i];
1144 vn_reference_op_t mem_op = &(*ops)[i - 1];
1145 tree addr_base;
1146 HOST_WIDE_INT addr_offset = 0;
1147
1148 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1149 from .foo.bar to the preceding MEM_REF offset and replace the
1150 address with &OBJ. */
1151 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1152 &addr_offset);
1153 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1154 if (addr_base != TREE_OPERAND (op->op0, 0))
1155 {
1156 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1157 off += addr_offset;
1158 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1159 op->op0 = build_fold_addr_expr (addr_base);
1160 if (tree_fits_shwi_p (mem_op->op0))
1161 mem_op->off = tree_to_shwi (mem_op->op0);
1162 else
1163 mem_op->off = -1;
1164 return true;
1165 }
1166 return false;
1167 }
1168
1169 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1170 *I_P to point to the last element of the replacement. */
1171 static bool
1172 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1173 unsigned int *i_p)
1174 {
1175 unsigned int i = *i_p;
1176 vn_reference_op_t op = &(*ops)[i];
1177 vn_reference_op_t mem_op = &(*ops)[i - 1];
1178 gimple *def_stmt;
1179 enum tree_code code;
1180 offset_int off;
1181
1182 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1183 if (!is_gimple_assign (def_stmt))
1184 return false;
1185
1186 code = gimple_assign_rhs_code (def_stmt);
1187 if (code != ADDR_EXPR
1188 && code != POINTER_PLUS_EXPR)
1189 return false;
1190
1191 off = offset_int::from (mem_op->op0, SIGNED);
1192
1193 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1194 from .foo.bar to the preceding MEM_REF offset and replace the
1195 address with &OBJ. */
1196 if (code == ADDR_EXPR)
1197 {
1198 tree addr, addr_base;
1199 HOST_WIDE_INT addr_offset;
1200
1201 addr = gimple_assign_rhs1 (def_stmt);
1202 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1203 &addr_offset);
1204 /* If that didn't work because the address isn't invariant propagate
1205 the reference tree from the address operation in case the current
1206 dereference isn't offsetted. */
1207 if (!addr_base
1208 && *i_p == ops->length () - 1
1209 && off == 0
1210 /* This makes us disable this transform for PRE where the
1211 reference ops might be also used for code insertion which
1212 is invalid. */
1213 && default_vn_walk_kind == VN_WALKREWRITE)
1214 {
1215 auto_vec<vn_reference_op_s, 32> tem;
1216 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1217 ops->pop ();
1218 ops->pop ();
1219 ops->safe_splice (tem);
1220 --*i_p;
1221 return true;
1222 }
1223 if (!addr_base
1224 || TREE_CODE (addr_base) != MEM_REF)
1225 return false;
1226
1227 off += addr_offset;
1228 off += mem_ref_offset (addr_base);
1229 op->op0 = TREE_OPERAND (addr_base, 0);
1230 }
1231 else
1232 {
1233 tree ptr, ptroff;
1234 ptr = gimple_assign_rhs1 (def_stmt);
1235 ptroff = gimple_assign_rhs2 (def_stmt);
1236 if (TREE_CODE (ptr) != SSA_NAME
1237 || TREE_CODE (ptroff) != INTEGER_CST)
1238 return false;
1239
1240 off += wi::to_offset (ptroff);
1241 op->op0 = ptr;
1242 }
1243
1244 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1245 if (tree_fits_shwi_p (mem_op->op0))
1246 mem_op->off = tree_to_shwi (mem_op->op0);
1247 else
1248 mem_op->off = -1;
1249 if (TREE_CODE (op->op0) == SSA_NAME)
1250 op->op0 = SSA_VAL (op->op0);
1251 if (TREE_CODE (op->op0) != SSA_NAME)
1252 op->opcode = TREE_CODE (op->op0);
1253
1254 /* And recurse. */
1255 if (TREE_CODE (op->op0) == SSA_NAME)
1256 vn_reference_maybe_forwprop_address (ops, i_p);
1257 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1258 vn_reference_fold_indirect (ops, i_p);
1259 return true;
1260 }
1261
1262 /* Optimize the reference REF to a constant if possible or return
1263 NULL_TREE if not. */
1264
1265 tree
1266 fully_constant_vn_reference_p (vn_reference_t ref)
1267 {
1268 vec<vn_reference_op_s> operands = ref->operands;
1269 vn_reference_op_t op;
1270
1271 /* Try to simplify the translated expression if it is
1272 a call to a builtin function with at most two arguments. */
1273 op = &operands[0];
1274 if (op->opcode == CALL_EXPR
1275 && TREE_CODE (op->op0) == ADDR_EXPR
1276 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1277 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1278 && operands.length () >= 2
1279 && operands.length () <= 3)
1280 {
1281 vn_reference_op_t arg0, arg1 = NULL;
1282 bool anyconst = false;
1283 arg0 = &operands[1];
1284 if (operands.length () > 2)
1285 arg1 = &operands[2];
1286 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1287 || (arg0->opcode == ADDR_EXPR
1288 && is_gimple_min_invariant (arg0->op0)))
1289 anyconst = true;
1290 if (arg1
1291 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1292 || (arg1->opcode == ADDR_EXPR
1293 && is_gimple_min_invariant (arg1->op0))))
1294 anyconst = true;
1295 if (anyconst)
1296 {
1297 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1298 arg1 ? 2 : 1,
1299 arg0->op0,
1300 arg1 ? arg1->op0 : NULL);
1301 if (folded
1302 && TREE_CODE (folded) == NOP_EXPR)
1303 folded = TREE_OPERAND (folded, 0);
1304 if (folded
1305 && is_gimple_min_invariant (folded))
1306 return folded;
1307 }
1308 }
1309
1310 /* Simplify reads from constants or constant initializers. */
1311 else if (BITS_PER_UNIT == 8
1312 && is_gimple_reg_type (ref->type)
1313 && (!INTEGRAL_TYPE_P (ref->type)
1314 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1315 {
1316 HOST_WIDE_INT off = 0;
1317 HOST_WIDE_INT size;
1318 if (INTEGRAL_TYPE_P (ref->type))
1319 size = TYPE_PRECISION (ref->type);
1320 else
1321 size = tree_to_shwi (TYPE_SIZE (ref->type));
1322 if (size % BITS_PER_UNIT != 0
1323 || size > MAX_BITSIZE_MODE_ANY_MODE)
1324 return NULL_TREE;
1325 size /= BITS_PER_UNIT;
1326 unsigned i;
1327 for (i = 0; i < operands.length (); ++i)
1328 {
1329 if (operands[i].off == -1)
1330 return NULL_TREE;
1331 off += operands[i].off;
1332 if (operands[i].opcode == MEM_REF)
1333 {
1334 ++i;
1335 break;
1336 }
1337 }
1338 vn_reference_op_t base = &operands[--i];
1339 tree ctor = error_mark_node;
1340 tree decl = NULL_TREE;
1341 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1342 ctor = base->op0;
1343 else if (base->opcode == MEM_REF
1344 && base[1].opcode == ADDR_EXPR
1345 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1346 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1347 {
1348 decl = TREE_OPERAND (base[1].op0, 0);
1349 ctor = ctor_for_folding (decl);
1350 }
1351 if (ctor == NULL_TREE)
1352 return build_zero_cst (ref->type);
1353 else if (ctor != error_mark_node)
1354 {
1355 if (decl)
1356 {
1357 tree res = fold_ctor_reference (ref->type, ctor,
1358 off * BITS_PER_UNIT,
1359 size * BITS_PER_UNIT, decl);
1360 if (res)
1361 {
1362 STRIP_USELESS_TYPE_CONVERSION (res);
1363 if (is_gimple_min_invariant (res))
1364 return res;
1365 }
1366 }
1367 else
1368 {
1369 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1370 if (native_encode_expr (ctor, buf, size, off) > 0)
1371 return native_interpret_expr (ref->type, buf, size);
1372 }
1373 }
1374 }
1375
1376 return NULL_TREE;
1377 }
1378
1379 /* Return true if OPS contain a storage order barrier. */
1380
1381 static bool
1382 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1383 {
1384 vn_reference_op_t op;
1385 unsigned i;
1386
1387 FOR_EACH_VEC_ELT (ops, i, op)
1388 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1389 return true;
1390
1391 return false;
1392 }
1393
1394 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1395 structures into their value numbers. This is done in-place, and
1396 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1397 whether any operands were valueized. */
1398
1399 static vec<vn_reference_op_s>
1400 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1401 {
1402 vn_reference_op_t vro;
1403 unsigned int i;
1404
1405 *valueized_anything = false;
1406
1407 FOR_EACH_VEC_ELT (orig, i, vro)
1408 {
1409 if (vro->opcode == SSA_NAME
1410 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1411 {
1412 tree tem = SSA_VAL (vro->op0);
1413 if (tem != vro->op0)
1414 {
1415 *valueized_anything = true;
1416 vro->op0 = tem;
1417 }
1418 /* If it transforms from an SSA_NAME to a constant, update
1419 the opcode. */
1420 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1421 vro->opcode = TREE_CODE (vro->op0);
1422 }
1423 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1424 {
1425 tree tem = SSA_VAL (vro->op1);
1426 if (tem != vro->op1)
1427 {
1428 *valueized_anything = true;
1429 vro->op1 = tem;
1430 }
1431 }
1432 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1433 {
1434 tree tem = SSA_VAL (vro->op2);
1435 if (tem != vro->op2)
1436 {
1437 *valueized_anything = true;
1438 vro->op2 = tem;
1439 }
1440 }
1441 /* If it transforms from an SSA_NAME to an address, fold with
1442 a preceding indirect reference. */
1443 if (i > 0
1444 && vro->op0
1445 && TREE_CODE (vro->op0) == ADDR_EXPR
1446 && orig[i - 1].opcode == MEM_REF)
1447 {
1448 if (vn_reference_fold_indirect (&orig, &i))
1449 *valueized_anything = true;
1450 }
1451 else if (i > 0
1452 && vro->opcode == SSA_NAME
1453 && orig[i - 1].opcode == MEM_REF)
1454 {
1455 if (vn_reference_maybe_forwprop_address (&orig, &i))
1456 *valueized_anything = true;
1457 }
1458 /* If it transforms a non-constant ARRAY_REF into a constant
1459 one, adjust the constant offset. */
1460 else if (vro->opcode == ARRAY_REF
1461 && vro->off == -1
1462 && TREE_CODE (vro->op0) == INTEGER_CST
1463 && TREE_CODE (vro->op1) == INTEGER_CST
1464 && TREE_CODE (vro->op2) == INTEGER_CST)
1465 {
1466 offset_int off = ((wi::to_offset (vro->op0)
1467 - wi::to_offset (vro->op1))
1468 * wi::to_offset (vro->op2));
1469 if (wi::fits_shwi_p (off))
1470 vro->off = off.to_shwi ();
1471 }
1472 }
1473
1474 return orig;
1475 }
1476
1477 static vec<vn_reference_op_s>
1478 valueize_refs (vec<vn_reference_op_s> orig)
1479 {
1480 bool tem;
1481 return valueize_refs_1 (orig, &tem);
1482 }
1483
1484 static vec<vn_reference_op_s> shared_lookup_references;
1485
1486 /* Create a vector of vn_reference_op_s structures from REF, a
1487 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1488 this function. *VALUEIZED_ANYTHING will specify whether any
1489 operands were valueized. */
1490
1491 static vec<vn_reference_op_s>
1492 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1493 {
1494 if (!ref)
1495 return vNULL;
1496 shared_lookup_references.truncate (0);
1497 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1498 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1499 valueized_anything);
1500 return shared_lookup_references;
1501 }
1502
1503 /* Create a vector of vn_reference_op_s structures from CALL, a
1504 call statement. The vector is shared among all callers of
1505 this function. */
1506
1507 static vec<vn_reference_op_s>
1508 valueize_shared_reference_ops_from_call (gcall *call)
1509 {
1510 if (!call)
1511 return vNULL;
1512 shared_lookup_references.truncate (0);
1513 copy_reference_ops_from_call (call, &shared_lookup_references);
1514 shared_lookup_references = valueize_refs (shared_lookup_references);
1515 return shared_lookup_references;
1516 }
1517
1518 /* Lookup a SCCVN reference operation VR in the current hash table.
1519 Returns the resulting value number if it exists in the hash table,
1520 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1521 vn_reference_t stored in the hashtable if something is found. */
1522
1523 static tree
1524 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1525 {
1526 vn_reference_s **slot;
1527 hashval_t hash;
1528
1529 hash = vr->hashcode;
1530 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1531 if (!slot && current_info == optimistic_info)
1532 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1533 if (slot)
1534 {
1535 if (vnresult)
1536 *vnresult = (vn_reference_t)*slot;
1537 return ((vn_reference_t)*slot)->result;
1538 }
1539
1540 return NULL_TREE;
1541 }
1542
1543 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1544 with the current VUSE and performs the expression lookup. */
1545
1546 static void *
1547 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1548 unsigned int cnt, void *vr_)
1549 {
1550 vn_reference_t vr = (vn_reference_t)vr_;
1551 vn_reference_s **slot;
1552 hashval_t hash;
1553
1554 /* This bounds the stmt walks we perform on reference lookups
1555 to O(1) instead of O(N) where N is the number of dominating
1556 stores. */
1557 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1558 return (void *)-1;
1559
1560 if (last_vuse_ptr)
1561 *last_vuse_ptr = vuse;
1562
1563 /* Fixup vuse and hash. */
1564 if (vr->vuse)
1565 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1566 vr->vuse = vuse_ssa_val (vuse);
1567 if (vr->vuse)
1568 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1569
1570 hash = vr->hashcode;
1571 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1572 if (!slot && current_info == optimistic_info)
1573 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1574 if (slot)
1575 return *slot;
1576
1577 return NULL;
1578 }
1579
1580 /* Lookup an existing or insert a new vn_reference entry into the
1581 value table for the VUSE, SET, TYPE, OPERANDS reference which
1582 has the value VALUE which is either a constant or an SSA name. */
1583
1584 static vn_reference_t
1585 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1586 alias_set_type set,
1587 tree type,
1588 vec<vn_reference_op_s,
1589 va_heap> operands,
1590 tree value)
1591 {
1592 vn_reference_s vr1;
1593 vn_reference_t result;
1594 unsigned value_id;
1595 vr1.vuse = vuse;
1596 vr1.operands = operands;
1597 vr1.type = type;
1598 vr1.set = set;
1599 vr1.hashcode = vn_reference_compute_hash (&vr1);
1600 if (vn_reference_lookup_1 (&vr1, &result))
1601 return result;
1602 if (TREE_CODE (value) == SSA_NAME)
1603 value_id = VN_INFO (value)->value_id;
1604 else
1605 value_id = get_or_alloc_constant_value_id (value);
1606 return vn_reference_insert_pieces (vuse, set, type,
1607 operands.copy (), value, value_id);
1608 }
1609
1610 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1611 from the statement defining VUSE and if not successful tries to
1612 translate *REFP and VR_ through an aggregate copy at the definition
1613 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1614 of *REF and *VR. If only disambiguation was performed then
1615 *DISAMBIGUATE_ONLY is set to true. */
1616
1617 static void *
1618 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1619 bool *disambiguate_only)
1620 {
1621 vn_reference_t vr = (vn_reference_t)vr_;
1622 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1623 tree base = ao_ref_base (ref);
1624 HOST_WIDE_INT offset, maxsize;
1625 static vec<vn_reference_op_s>
1626 lhs_ops = vNULL;
1627 ao_ref lhs_ref;
1628 bool lhs_ref_ok = false;
1629
1630 /* If the reference is based on a parameter that was determined as
1631 pointing to readonly memory it doesn't change. */
1632 if (TREE_CODE (base) == MEM_REF
1633 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1634 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1635 && bitmap_bit_p (const_parms,
1636 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1637 {
1638 *disambiguate_only = true;
1639 return NULL;
1640 }
1641
1642 /* First try to disambiguate after value-replacing in the definitions LHS. */
1643 if (is_gimple_assign (def_stmt))
1644 {
1645 tree lhs = gimple_assign_lhs (def_stmt);
1646 bool valueized_anything = false;
1647 /* Avoid re-allocation overhead. */
1648 lhs_ops.truncate (0);
1649 copy_reference_ops_from_ref (lhs, &lhs_ops);
1650 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1651 if (valueized_anything)
1652 {
1653 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1654 get_alias_set (lhs),
1655 TREE_TYPE (lhs), lhs_ops);
1656 if (lhs_ref_ok
1657 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1658 {
1659 *disambiguate_only = true;
1660 return NULL;
1661 }
1662 }
1663 else
1664 {
1665 ao_ref_init (&lhs_ref, lhs);
1666 lhs_ref_ok = true;
1667 }
1668 }
1669 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1670 && gimple_call_num_args (def_stmt) <= 4)
1671 {
1672 /* For builtin calls valueize its arguments and call the
1673 alias oracle again. Valueization may improve points-to
1674 info of pointers and constify size and position arguments.
1675 Originally this was motivated by PR61034 which has
1676 conditional calls to free falsely clobbering ref because
1677 of imprecise points-to info of the argument. */
1678 tree oldargs[4];
1679 bool valueized_anything = false;
1680 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1681 {
1682 oldargs[i] = gimple_call_arg (def_stmt, i);
1683 if (TREE_CODE (oldargs[i]) == SSA_NAME
1684 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1685 {
1686 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1687 valueized_anything = true;
1688 }
1689 }
1690 if (valueized_anything)
1691 {
1692 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1693 ref);
1694 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1695 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1696 if (!res)
1697 {
1698 *disambiguate_only = true;
1699 return NULL;
1700 }
1701 }
1702 }
1703
1704 if (*disambiguate_only)
1705 return (void *)-1;
1706
1707 offset = ref->offset;
1708 maxsize = ref->max_size;
1709
1710 /* If we cannot constrain the size of the reference we cannot
1711 test if anything kills it. */
1712 if (maxsize == -1)
1713 return (void *)-1;
1714
1715 /* We can't deduce anything useful from clobbers. */
1716 if (gimple_clobber_p (def_stmt))
1717 return (void *)-1;
1718
1719 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1720 from that definition.
1721 1) Memset. */
1722 if (is_gimple_reg_type (vr->type)
1723 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1724 && integer_zerop (gimple_call_arg (def_stmt, 1))
1725 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1726 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1727 {
1728 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1729 tree base2;
1730 HOST_WIDE_INT offset2, size2, maxsize2;
1731 bool reverse;
1732 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1733 &reverse);
1734 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1735 if ((unsigned HOST_WIDE_INT)size2 / 8
1736 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1737 && maxsize2 != -1
1738 && operand_equal_p (base, base2, 0)
1739 && offset2 <= offset
1740 && offset2 + size2 >= offset + maxsize)
1741 {
1742 tree val = build_zero_cst (vr->type);
1743 return vn_reference_lookup_or_insert_for_pieces
1744 (vuse, vr->set, vr->type, vr->operands, val);
1745 }
1746 }
1747
1748 /* 2) Assignment from an empty CONSTRUCTOR. */
1749 else if (is_gimple_reg_type (vr->type)
1750 && gimple_assign_single_p (def_stmt)
1751 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1752 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1753 {
1754 tree base2;
1755 HOST_WIDE_INT offset2, size2, maxsize2;
1756 bool reverse;
1757 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1758 &offset2, &size2, &maxsize2, &reverse);
1759 if (maxsize2 != -1
1760 && operand_equal_p (base, base2, 0)
1761 && offset2 <= offset
1762 && offset2 + size2 >= offset + maxsize)
1763 {
1764 tree val = build_zero_cst (vr->type);
1765 return vn_reference_lookup_or_insert_for_pieces
1766 (vuse, vr->set, vr->type, vr->operands, val);
1767 }
1768 }
1769
1770 /* 3) Assignment from a constant. We can use folds native encode/interpret
1771 routines to extract the assigned bits. */
1772 else if (vn_walk_kind == VN_WALKREWRITE
1773 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1774 && ref->size == maxsize
1775 && maxsize % BITS_PER_UNIT == 0
1776 && offset % BITS_PER_UNIT == 0
1777 && is_gimple_reg_type (vr->type)
1778 && !contains_storage_order_barrier_p (vr->operands)
1779 && gimple_assign_single_p (def_stmt)
1780 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1781 {
1782 tree base2;
1783 HOST_WIDE_INT offset2, size2, maxsize2;
1784 bool reverse;
1785 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1786 &offset2, &size2, &maxsize2, &reverse);
1787 if (!reverse
1788 && maxsize2 != -1
1789 && maxsize2 == size2
1790 && size2 % BITS_PER_UNIT == 0
1791 && offset2 % BITS_PER_UNIT == 0
1792 && operand_equal_p (base, base2, 0)
1793 && offset2 <= offset
1794 && offset2 + size2 >= offset + maxsize)
1795 {
1796 /* We support up to 512-bit values (for V8DFmode). */
1797 unsigned char buffer[64];
1798 int len;
1799
1800 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1801 buffer, sizeof (buffer));
1802 if (len > 0)
1803 {
1804 tree val = native_interpret_expr (vr->type,
1805 buffer
1806 + ((offset - offset2)
1807 / BITS_PER_UNIT),
1808 ref->size / BITS_PER_UNIT);
1809 if (val)
1810 return vn_reference_lookup_or_insert_for_pieces
1811 (vuse, vr->set, vr->type, vr->operands, val);
1812 }
1813 }
1814 }
1815
1816 /* 4) Assignment from an SSA name which definition we may be able
1817 to access pieces from. */
1818 else if (ref->size == maxsize
1819 && is_gimple_reg_type (vr->type)
1820 && !contains_storage_order_barrier_p (vr->operands)
1821 && gimple_assign_single_p (def_stmt)
1822 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1823 {
1824 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1825 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1826 if (is_gimple_assign (def_stmt2)
1827 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1828 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1829 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1830 {
1831 tree base2;
1832 HOST_WIDE_INT offset2, size2, maxsize2, off;
1833 bool reverse;
1834 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1835 &offset2, &size2, &maxsize2,
1836 &reverse);
1837 off = offset - offset2;
1838 if (!reverse
1839 && maxsize2 != -1
1840 && maxsize2 == size2
1841 && operand_equal_p (base, base2, 0)
1842 && offset2 <= offset
1843 && offset2 + size2 >= offset + maxsize)
1844 {
1845 tree val = NULL_TREE;
1846 HOST_WIDE_INT elsz
1847 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1848 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1849 {
1850 if (off == 0)
1851 val = gimple_assign_rhs1 (def_stmt2);
1852 else if (off == elsz)
1853 val = gimple_assign_rhs2 (def_stmt2);
1854 }
1855 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1856 && off % elsz == 0)
1857 {
1858 tree ctor = gimple_assign_rhs1 (def_stmt2);
1859 unsigned i = off / elsz;
1860 if (i < CONSTRUCTOR_NELTS (ctor))
1861 {
1862 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1863 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1864 {
1865 if (TREE_CODE (TREE_TYPE (elt->value))
1866 != VECTOR_TYPE)
1867 val = elt->value;
1868 }
1869 }
1870 }
1871 if (val)
1872 return vn_reference_lookup_or_insert_for_pieces
1873 (vuse, vr->set, vr->type, vr->operands, val);
1874 }
1875 }
1876 }
1877
1878 /* 5) For aggregate copies translate the reference through them if
1879 the copy kills ref. */
1880 else if (vn_walk_kind == VN_WALKREWRITE
1881 && gimple_assign_single_p (def_stmt)
1882 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1883 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1884 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1885 {
1886 tree base2;
1887 HOST_WIDE_INT maxsize2;
1888 int i, j, k;
1889 auto_vec<vn_reference_op_s> rhs;
1890 vn_reference_op_t vro;
1891 ao_ref r;
1892
1893 if (!lhs_ref_ok)
1894 return (void *)-1;
1895
1896 /* See if the assignment kills REF. */
1897 base2 = ao_ref_base (&lhs_ref);
1898 maxsize2 = lhs_ref.max_size;
1899 if (maxsize2 == -1
1900 || (base != base2
1901 && (TREE_CODE (base) != MEM_REF
1902 || TREE_CODE (base2) != MEM_REF
1903 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1904 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1905 TREE_OPERAND (base2, 1))))
1906 || !stmt_kills_ref_p (def_stmt, ref))
1907 return (void *)-1;
1908
1909 /* Find the common base of ref and the lhs. lhs_ops already
1910 contains valueized operands for the lhs. */
1911 i = vr->operands.length () - 1;
1912 j = lhs_ops.length () - 1;
1913 while (j >= 0 && i >= 0
1914 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1915 {
1916 i--;
1917 j--;
1918 }
1919
1920 /* ??? The innermost op should always be a MEM_REF and we already
1921 checked that the assignment to the lhs kills vr. Thus for
1922 aggregate copies using char[] types the vn_reference_op_eq
1923 may fail when comparing types for compatibility. But we really
1924 don't care here - further lookups with the rewritten operands
1925 will simply fail if we messed up types too badly. */
1926 HOST_WIDE_INT extra_off = 0;
1927 if (j == 0 && i >= 0
1928 && lhs_ops[0].opcode == MEM_REF
1929 && lhs_ops[0].off != -1)
1930 {
1931 if (lhs_ops[0].off == vr->operands[i].off)
1932 i--, j--;
1933 else if (vr->operands[i].opcode == MEM_REF
1934 && vr->operands[i].off != -1)
1935 {
1936 extra_off = vr->operands[i].off - lhs_ops[0].off;
1937 i--, j--;
1938 }
1939 }
1940
1941 /* i now points to the first additional op.
1942 ??? LHS may not be completely contained in VR, one or more
1943 VIEW_CONVERT_EXPRs could be in its way. We could at least
1944 try handling outermost VIEW_CONVERT_EXPRs. */
1945 if (j != -1)
1946 return (void *)-1;
1947
1948 /* Punt if the additional ops contain a storage order barrier. */
1949 for (k = i; k >= 0; k--)
1950 {
1951 vro = &vr->operands[k];
1952 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1953 return (void *)-1;
1954 }
1955
1956 /* Now re-write REF to be based on the rhs of the assignment. */
1957 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1958
1959 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1960 if (extra_off != 0)
1961 {
1962 if (rhs.length () < 2
1963 || rhs[0].opcode != MEM_REF
1964 || rhs[0].off == -1)
1965 return (void *)-1;
1966 rhs[0].off += extra_off;
1967 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1968 build_int_cst (TREE_TYPE (rhs[0].op0),
1969 extra_off));
1970 }
1971
1972 /* We need to pre-pend vr->operands[0..i] to rhs. */
1973 vec<vn_reference_op_s> old = vr->operands;
1974 if (i + 1 + rhs.length () > vr->operands.length ())
1975 {
1976 vr->operands.safe_grow (i + 1 + rhs.length ());
1977 if (old == shared_lookup_references)
1978 shared_lookup_references = vr->operands;
1979 }
1980 else
1981 vr->operands.truncate (i + 1 + rhs.length ());
1982 FOR_EACH_VEC_ELT (rhs, j, vro)
1983 vr->operands[i + 1 + j] = *vro;
1984 vr->operands = valueize_refs (vr->operands);
1985 if (old == shared_lookup_references)
1986 shared_lookup_references = vr->operands;
1987 vr->hashcode = vn_reference_compute_hash (vr);
1988
1989 /* Try folding the new reference to a constant. */
1990 tree val = fully_constant_vn_reference_p (vr);
1991 if (val)
1992 return vn_reference_lookup_or_insert_for_pieces
1993 (vuse, vr->set, vr->type, vr->operands, val);
1994
1995 /* Adjust *ref from the new operands. */
1996 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1997 return (void *)-1;
1998 /* This can happen with bitfields. */
1999 if (ref->size != r.size)
2000 return (void *)-1;
2001 *ref = r;
2002
2003 /* Do not update last seen VUSE after translating. */
2004 last_vuse_ptr = NULL;
2005
2006 /* Keep looking for the adjusted *REF / VR pair. */
2007 return NULL;
2008 }
2009
2010 /* 6) For memcpy copies translate the reference through them if
2011 the copy kills ref. */
2012 else if (vn_walk_kind == VN_WALKREWRITE
2013 && is_gimple_reg_type (vr->type)
2014 /* ??? Handle BCOPY as well. */
2015 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2016 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2017 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2018 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2019 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2020 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2021 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2022 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2023 {
2024 tree lhs, rhs;
2025 ao_ref r;
2026 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2027 vn_reference_op_s op;
2028 HOST_WIDE_INT at;
2029
2030 /* Only handle non-variable, addressable refs. */
2031 if (ref->size != maxsize
2032 || offset % BITS_PER_UNIT != 0
2033 || ref->size % BITS_PER_UNIT != 0)
2034 return (void *)-1;
2035
2036 /* Extract a pointer base and an offset for the destination. */
2037 lhs = gimple_call_arg (def_stmt, 0);
2038 lhs_offset = 0;
2039 if (TREE_CODE (lhs) == SSA_NAME)
2040 {
2041 lhs = SSA_VAL (lhs);
2042 if (TREE_CODE (lhs) == SSA_NAME)
2043 {
2044 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2045 if (gimple_assign_single_p (def_stmt)
2046 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2047 lhs = gimple_assign_rhs1 (def_stmt);
2048 }
2049 }
2050 if (TREE_CODE (lhs) == ADDR_EXPR)
2051 {
2052 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2053 &lhs_offset);
2054 if (!tem)
2055 return (void *)-1;
2056 if (TREE_CODE (tem) == MEM_REF
2057 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2058 {
2059 lhs = TREE_OPERAND (tem, 0);
2060 if (TREE_CODE (lhs) == SSA_NAME)
2061 lhs = SSA_VAL (lhs);
2062 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2063 }
2064 else if (DECL_P (tem))
2065 lhs = build_fold_addr_expr (tem);
2066 else
2067 return (void *)-1;
2068 }
2069 if (TREE_CODE (lhs) != SSA_NAME
2070 && TREE_CODE (lhs) != ADDR_EXPR)
2071 return (void *)-1;
2072
2073 /* Extract a pointer base and an offset for the source. */
2074 rhs = gimple_call_arg (def_stmt, 1);
2075 rhs_offset = 0;
2076 if (TREE_CODE (rhs) == SSA_NAME)
2077 rhs = SSA_VAL (rhs);
2078 if (TREE_CODE (rhs) == ADDR_EXPR)
2079 {
2080 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2081 &rhs_offset);
2082 if (!tem)
2083 return (void *)-1;
2084 if (TREE_CODE (tem) == MEM_REF
2085 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2086 {
2087 rhs = TREE_OPERAND (tem, 0);
2088 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2089 }
2090 else if (DECL_P (tem))
2091 rhs = build_fold_addr_expr (tem);
2092 else
2093 return (void *)-1;
2094 }
2095 if (TREE_CODE (rhs) != SSA_NAME
2096 && TREE_CODE (rhs) != ADDR_EXPR)
2097 return (void *)-1;
2098
2099 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2100
2101 /* The bases of the destination and the references have to agree. */
2102 if ((TREE_CODE (base) != MEM_REF
2103 && !DECL_P (base))
2104 || (TREE_CODE (base) == MEM_REF
2105 && (TREE_OPERAND (base, 0) != lhs
2106 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2107 || (DECL_P (base)
2108 && (TREE_CODE (lhs) != ADDR_EXPR
2109 || TREE_OPERAND (lhs, 0) != base)))
2110 return (void *)-1;
2111
2112 at = offset / BITS_PER_UNIT;
2113 if (TREE_CODE (base) == MEM_REF)
2114 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2115 /* If the access is completely outside of the memcpy destination
2116 area there is no aliasing. */
2117 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2118 || lhs_offset + copy_size <= at)
2119 return NULL;
2120 /* And the access has to be contained within the memcpy destination. */
2121 if (lhs_offset > at
2122 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2123 return (void *)-1;
2124
2125 /* Make room for 2 operands in the new reference. */
2126 if (vr->operands.length () < 2)
2127 {
2128 vec<vn_reference_op_s> old = vr->operands;
2129 vr->operands.safe_grow_cleared (2);
2130 if (old == shared_lookup_references
2131 && vr->operands != old)
2132 shared_lookup_references = vr->operands;
2133 }
2134 else
2135 vr->operands.truncate (2);
2136
2137 /* The looked-through reference is a simple MEM_REF. */
2138 memset (&op, 0, sizeof (op));
2139 op.type = vr->type;
2140 op.opcode = MEM_REF;
2141 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2142 op.off = at - lhs_offset + rhs_offset;
2143 vr->operands[0] = op;
2144 op.type = TREE_TYPE (rhs);
2145 op.opcode = TREE_CODE (rhs);
2146 op.op0 = rhs;
2147 op.off = -1;
2148 vr->operands[1] = op;
2149 vr->hashcode = vn_reference_compute_hash (vr);
2150
2151 /* Adjust *ref from the new operands. */
2152 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2153 return (void *)-1;
2154 /* This can happen with bitfields. */
2155 if (ref->size != r.size)
2156 return (void *)-1;
2157 *ref = r;
2158
2159 /* Do not update last seen VUSE after translating. */
2160 last_vuse_ptr = NULL;
2161
2162 /* Keep looking for the adjusted *REF / VR pair. */
2163 return NULL;
2164 }
2165
2166 /* Bail out and stop walking. */
2167 return (void *)-1;
2168 }
2169
2170 /* Lookup a reference operation by it's parts, in the current hash table.
2171 Returns the resulting value number if it exists in the hash table,
2172 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2173 vn_reference_t stored in the hashtable if something is found. */
2174
2175 tree
2176 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2177 vec<vn_reference_op_s> operands,
2178 vn_reference_t *vnresult, vn_lookup_kind kind)
2179 {
2180 struct vn_reference_s vr1;
2181 vn_reference_t tmp;
2182 tree cst;
2183
2184 if (!vnresult)
2185 vnresult = &tmp;
2186 *vnresult = NULL;
2187
2188 vr1.vuse = vuse_ssa_val (vuse);
2189 shared_lookup_references.truncate (0);
2190 shared_lookup_references.safe_grow (operands.length ());
2191 memcpy (shared_lookup_references.address (),
2192 operands.address (),
2193 sizeof (vn_reference_op_s)
2194 * operands.length ());
2195 vr1.operands = operands = shared_lookup_references
2196 = valueize_refs (shared_lookup_references);
2197 vr1.type = type;
2198 vr1.set = set;
2199 vr1.hashcode = vn_reference_compute_hash (&vr1);
2200 if ((cst = fully_constant_vn_reference_p (&vr1)))
2201 return cst;
2202
2203 vn_reference_lookup_1 (&vr1, vnresult);
2204 if (!*vnresult
2205 && kind != VN_NOWALK
2206 && vr1.vuse)
2207 {
2208 ao_ref r;
2209 vn_walk_kind = kind;
2210 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2211 *vnresult =
2212 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2213 vn_reference_lookup_2,
2214 vn_reference_lookup_3,
2215 vuse_ssa_val, &vr1);
2216 gcc_checking_assert (vr1.operands == shared_lookup_references);
2217 }
2218
2219 if (*vnresult)
2220 return (*vnresult)->result;
2221
2222 return NULL_TREE;
2223 }
2224
2225 /* Lookup OP in the current hash table, and return the resulting value
2226 number if it exists in the hash table. Return NULL_TREE if it does
2227 not exist in the hash table or if the result field of the structure
2228 was NULL.. VNRESULT will be filled in with the vn_reference_t
2229 stored in the hashtable if one exists. */
2230
2231 tree
2232 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2233 vn_reference_t *vnresult)
2234 {
2235 vec<vn_reference_op_s> operands;
2236 struct vn_reference_s vr1;
2237 tree cst;
2238 bool valuezied_anything;
2239
2240 if (vnresult)
2241 *vnresult = NULL;
2242
2243 vr1.vuse = vuse_ssa_val (vuse);
2244 vr1.operands = operands
2245 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2246 vr1.type = TREE_TYPE (op);
2247 vr1.set = get_alias_set (op);
2248 vr1.hashcode = vn_reference_compute_hash (&vr1);
2249 if ((cst = fully_constant_vn_reference_p (&vr1)))
2250 return cst;
2251
2252 if (kind != VN_NOWALK
2253 && vr1.vuse)
2254 {
2255 vn_reference_t wvnresult;
2256 ao_ref r;
2257 /* Make sure to use a valueized reference if we valueized anything.
2258 Otherwise preserve the full reference for advanced TBAA. */
2259 if (!valuezied_anything
2260 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2261 vr1.operands))
2262 ao_ref_init (&r, op);
2263 vn_walk_kind = kind;
2264 wvnresult =
2265 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2266 vn_reference_lookup_2,
2267 vn_reference_lookup_3,
2268 vuse_ssa_val, &vr1);
2269 gcc_checking_assert (vr1.operands == shared_lookup_references);
2270 if (wvnresult)
2271 {
2272 if (vnresult)
2273 *vnresult = wvnresult;
2274 return wvnresult->result;
2275 }
2276
2277 return NULL_TREE;
2278 }
2279
2280 return vn_reference_lookup_1 (&vr1, vnresult);
2281 }
2282
2283 /* Lookup CALL in the current hash table and return the entry in
2284 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2285
2286 void
2287 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2288 vn_reference_t vr)
2289 {
2290 if (vnresult)
2291 *vnresult = NULL;
2292
2293 tree vuse = gimple_vuse (call);
2294
2295 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2296 vr->operands = valueize_shared_reference_ops_from_call (call);
2297 vr->type = gimple_expr_type (call);
2298 vr->set = 0;
2299 vr->hashcode = vn_reference_compute_hash (vr);
2300 vn_reference_lookup_1 (vr, vnresult);
2301 }
2302
2303 /* Insert OP into the current hash table with a value number of
2304 RESULT, and return the resulting reference structure we created. */
2305
2306 static vn_reference_t
2307 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2308 {
2309 vn_reference_s **slot;
2310 vn_reference_t vr1;
2311 bool tem;
2312
2313 vr1 = current_info->references_pool->allocate ();
2314 if (TREE_CODE (result) == SSA_NAME)
2315 vr1->value_id = VN_INFO (result)->value_id;
2316 else
2317 vr1->value_id = get_or_alloc_constant_value_id (result);
2318 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2319 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2320 vr1->type = TREE_TYPE (op);
2321 vr1->set = get_alias_set (op);
2322 vr1->hashcode = vn_reference_compute_hash (vr1);
2323 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2324 vr1->result_vdef = vdef;
2325
2326 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2327 INSERT);
2328
2329 /* Because we lookup stores using vuses, and value number failures
2330 using the vdefs (see visit_reference_op_store for how and why),
2331 it's possible that on failure we may try to insert an already
2332 inserted store. This is not wrong, there is no ssa name for a
2333 store that we could use as a differentiator anyway. Thus, unlike
2334 the other lookup functions, you cannot gcc_assert (!*slot)
2335 here. */
2336
2337 /* But free the old slot in case of a collision. */
2338 if (*slot)
2339 free_reference (*slot);
2340
2341 *slot = vr1;
2342 return vr1;
2343 }
2344
2345 /* Insert a reference by it's pieces into the current hash table with
2346 a value number of RESULT. Return the resulting reference
2347 structure we created. */
2348
2349 vn_reference_t
2350 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2351 vec<vn_reference_op_s> operands,
2352 tree result, unsigned int value_id)
2353
2354 {
2355 vn_reference_s **slot;
2356 vn_reference_t vr1;
2357
2358 vr1 = current_info->references_pool->allocate ();
2359 vr1->value_id = value_id;
2360 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2361 vr1->operands = valueize_refs (operands);
2362 vr1->type = type;
2363 vr1->set = set;
2364 vr1->hashcode = vn_reference_compute_hash (vr1);
2365 if (result && TREE_CODE (result) == SSA_NAME)
2366 result = SSA_VAL (result);
2367 vr1->result = result;
2368
2369 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2370 INSERT);
2371
2372 /* At this point we should have all the things inserted that we have
2373 seen before, and we should never try inserting something that
2374 already exists. */
2375 gcc_assert (!*slot);
2376 if (*slot)
2377 free_reference (*slot);
2378
2379 *slot = vr1;
2380 return vr1;
2381 }
2382
2383 /* Compute and return the hash value for nary operation VBO1. */
2384
2385 static hashval_t
2386 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2387 {
2388 inchash::hash hstate;
2389 unsigned i;
2390
2391 for (i = 0; i < vno1->length; ++i)
2392 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2393 vno1->op[i] = SSA_VAL (vno1->op[i]);
2394
2395 if (((vno1->length == 2
2396 && commutative_tree_code (vno1->opcode))
2397 || (vno1->length == 3
2398 && commutative_ternary_tree_code (vno1->opcode)))
2399 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2400 std::swap (vno1->op[0], vno1->op[1]);
2401 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2402 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2403 {
2404 std::swap (vno1->op[0], vno1->op[1]);
2405 vno1->opcode = swap_tree_comparison (vno1->opcode);
2406 }
2407
2408 hstate.add_int (vno1->opcode);
2409 for (i = 0; i < vno1->length; ++i)
2410 inchash::add_expr (vno1->op[i], hstate);
2411
2412 return hstate.end ();
2413 }
2414
2415 /* Compare nary operations VNO1 and VNO2 and return true if they are
2416 equivalent. */
2417
2418 bool
2419 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2420 {
2421 unsigned i;
2422
2423 if (vno1->hashcode != vno2->hashcode)
2424 return false;
2425
2426 if (vno1->length != vno2->length)
2427 return false;
2428
2429 if (vno1->opcode != vno2->opcode
2430 || !types_compatible_p (vno1->type, vno2->type))
2431 return false;
2432
2433 for (i = 0; i < vno1->length; ++i)
2434 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2435 return false;
2436
2437 return true;
2438 }
2439
2440 /* Initialize VNO from the pieces provided. */
2441
2442 static void
2443 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2444 enum tree_code code, tree type, tree *ops)
2445 {
2446 vno->opcode = code;
2447 vno->length = length;
2448 vno->type = type;
2449 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2450 }
2451
2452 /* Initialize VNO from OP. */
2453
2454 static void
2455 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2456 {
2457 unsigned i;
2458
2459 vno->opcode = TREE_CODE (op);
2460 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2461 vno->type = TREE_TYPE (op);
2462 for (i = 0; i < vno->length; ++i)
2463 vno->op[i] = TREE_OPERAND (op, i);
2464 }
2465
2466 /* Return the number of operands for a vn_nary ops structure from STMT. */
2467
2468 static unsigned int
2469 vn_nary_length_from_stmt (gimple *stmt)
2470 {
2471 switch (gimple_assign_rhs_code (stmt))
2472 {
2473 case REALPART_EXPR:
2474 case IMAGPART_EXPR:
2475 case VIEW_CONVERT_EXPR:
2476 return 1;
2477
2478 case BIT_FIELD_REF:
2479 return 3;
2480
2481 case CONSTRUCTOR:
2482 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2483
2484 default:
2485 return gimple_num_ops (stmt) - 1;
2486 }
2487 }
2488
2489 /* Initialize VNO from STMT. */
2490
2491 static void
2492 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2493 {
2494 unsigned i;
2495
2496 vno->opcode = gimple_assign_rhs_code (stmt);
2497 vno->type = gimple_expr_type (stmt);
2498 switch (vno->opcode)
2499 {
2500 case REALPART_EXPR:
2501 case IMAGPART_EXPR:
2502 case VIEW_CONVERT_EXPR:
2503 vno->length = 1;
2504 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2505 break;
2506
2507 case BIT_FIELD_REF:
2508 vno->length = 3;
2509 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2510 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2511 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2512 break;
2513
2514 case CONSTRUCTOR:
2515 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2516 for (i = 0; i < vno->length; ++i)
2517 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2518 break;
2519
2520 default:
2521 gcc_checking_assert (!gimple_assign_single_p (stmt));
2522 vno->length = gimple_num_ops (stmt) - 1;
2523 for (i = 0; i < vno->length; ++i)
2524 vno->op[i] = gimple_op (stmt, i + 1);
2525 }
2526 }
2527
2528 /* Compute the hashcode for VNO and look for it in the hash table;
2529 return the resulting value number if it exists in the hash table.
2530 Return NULL_TREE if it does not exist in the hash table or if the
2531 result field of the operation is NULL. VNRESULT will contain the
2532 vn_nary_op_t from the hashtable if it exists. */
2533
2534 static tree
2535 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2536 {
2537 vn_nary_op_s **slot;
2538
2539 if (vnresult)
2540 *vnresult = NULL;
2541
2542 vno->hashcode = vn_nary_op_compute_hash (vno);
2543 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2544 NO_INSERT);
2545 if (!slot && current_info == optimistic_info)
2546 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2547 NO_INSERT);
2548 if (!slot)
2549 return NULL_TREE;
2550 if (vnresult)
2551 *vnresult = *slot;
2552 return (*slot)->result;
2553 }
2554
2555 /* Lookup a n-ary operation by its pieces and return the resulting value
2556 number if it exists in the hash table. Return NULL_TREE if it does
2557 not exist in the hash table or if the result field of the operation
2558 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2559 if it exists. */
2560
2561 tree
2562 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2563 tree type, tree *ops, vn_nary_op_t *vnresult)
2564 {
2565 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2566 sizeof_vn_nary_op (length));
2567 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2568 return vn_nary_op_lookup_1 (vno1, vnresult);
2569 }
2570
2571 /* Lookup OP in the current hash table, and return the resulting value
2572 number if it exists in the hash table. Return NULL_TREE if it does
2573 not exist in the hash table or if the result field of the operation
2574 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2575 if it exists. */
2576
2577 tree
2578 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2579 {
2580 vn_nary_op_t vno1
2581 = XALLOCAVAR (struct vn_nary_op_s,
2582 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2583 init_vn_nary_op_from_op (vno1, op);
2584 return vn_nary_op_lookup_1 (vno1, vnresult);
2585 }
2586
2587 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2588 value number if it exists in the hash table. Return NULL_TREE if
2589 it does not exist in the hash table. VNRESULT will contain the
2590 vn_nary_op_t from the hashtable if it exists. */
2591
2592 tree
2593 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2594 {
2595 vn_nary_op_t vno1
2596 = XALLOCAVAR (struct vn_nary_op_s,
2597 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2598 init_vn_nary_op_from_stmt (vno1, stmt);
2599 return vn_nary_op_lookup_1 (vno1, vnresult);
2600 }
2601
2602 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2603
2604 static tree
2605 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2606 {
2607 if (!rcode.is_tree_code ())
2608 return NULL_TREE;
2609 vn_nary_op_t vnresult = NULL;
2610 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2611 (tree_code) rcode, type, ops, &vnresult);
2612 }
2613
2614 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2615
2616 static vn_nary_op_t
2617 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2618 {
2619 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2620 }
2621
2622 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2623 obstack. */
2624
2625 static vn_nary_op_t
2626 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2627 {
2628 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2629 &current_info->nary_obstack);
2630
2631 vno1->value_id = value_id;
2632 vno1->length = length;
2633 vno1->result = result;
2634
2635 return vno1;
2636 }
2637
2638 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2639 VNO->HASHCODE first. */
2640
2641 static vn_nary_op_t
2642 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2643 bool compute_hash)
2644 {
2645 vn_nary_op_s **slot;
2646
2647 if (compute_hash)
2648 vno->hashcode = vn_nary_op_compute_hash (vno);
2649
2650 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2651 gcc_assert (!*slot);
2652
2653 *slot = vno;
2654 return vno;
2655 }
2656
2657 /* Insert a n-ary operation into the current hash table using it's
2658 pieces. Return the vn_nary_op_t structure we created and put in
2659 the hashtable. */
2660
2661 vn_nary_op_t
2662 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2663 tree type, tree *ops,
2664 tree result, unsigned int value_id)
2665 {
2666 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2667 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2668 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2669 }
2670
2671 /* Insert OP into the current hash table with a value number of
2672 RESULT. Return the vn_nary_op_t structure we created and put in
2673 the hashtable. */
2674
2675 vn_nary_op_t
2676 vn_nary_op_insert (tree op, tree result)
2677 {
2678 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2679 vn_nary_op_t vno1;
2680
2681 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2682 init_vn_nary_op_from_op (vno1, op);
2683 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2684 }
2685
2686 /* Insert the rhs of STMT into the current hash table with a value number of
2687 RESULT. */
2688
2689 static vn_nary_op_t
2690 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2691 {
2692 vn_nary_op_t vno1
2693 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2694 result, VN_INFO (result)->value_id);
2695 init_vn_nary_op_from_stmt (vno1, stmt);
2696 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2697 }
2698
2699 /* Compute a hashcode for PHI operation VP1 and return it. */
2700
2701 static inline hashval_t
2702 vn_phi_compute_hash (vn_phi_t vp1)
2703 {
2704 inchash::hash hstate (vp1->phiargs.length () > 2
2705 ? vp1->block->index : vp1->phiargs.length ());
2706 tree phi1op;
2707 tree type;
2708 edge e;
2709 edge_iterator ei;
2710
2711 /* If all PHI arguments are constants we need to distinguish
2712 the PHI node via its type. */
2713 type = vp1->type;
2714 hstate.merge_hash (vn_hash_type (type));
2715
2716 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2717 {
2718 /* Don't hash backedge values they need to be handled as VN_TOP
2719 for optimistic value-numbering. */
2720 if (e->flags & EDGE_DFS_BACK)
2721 continue;
2722
2723 phi1op = vp1->phiargs[e->dest_idx];
2724 if (phi1op == VN_TOP)
2725 continue;
2726 inchash::add_expr (phi1op, hstate);
2727 }
2728
2729 return hstate.end ();
2730 }
2731
2732
2733 /* Return true if COND1 and COND2 represent the same condition, set
2734 *INVERTED_P if one needs to be inverted to make it the same as
2735 the other. */
2736
2737 static bool
2738 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2739 {
2740 enum tree_code code1 = gimple_cond_code (cond1);
2741 enum tree_code code2 = gimple_cond_code (cond2);
2742 tree lhs1 = gimple_cond_lhs (cond1);
2743 tree lhs2 = gimple_cond_lhs (cond2);
2744 tree rhs1 = gimple_cond_rhs (cond1);
2745 tree rhs2 = gimple_cond_rhs (cond2);
2746
2747 *inverted_p = false;
2748 if (code1 == code2)
2749 ;
2750 else if (code1 == swap_tree_comparison (code2))
2751 std::swap (lhs2, rhs2);
2752 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2753 *inverted_p = true;
2754 else if (code1 == invert_tree_comparison
2755 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2756 {
2757 std::swap (lhs2, rhs2);
2758 *inverted_p = true;
2759 }
2760 else
2761 return false;
2762
2763 lhs1 = vn_valueize (lhs1);
2764 rhs1 = vn_valueize (rhs1);
2765 lhs2 = vn_valueize (lhs2);
2766 rhs2 = vn_valueize (rhs2);
2767 return ((expressions_equal_p (lhs1, lhs2)
2768 && expressions_equal_p (rhs1, rhs2))
2769 || (commutative_tree_code (code1)
2770 && expressions_equal_p (lhs1, rhs2)
2771 && expressions_equal_p (rhs1, lhs2)));
2772 }
2773
2774 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2775
2776 static int
2777 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2778 {
2779 if (vp1->hashcode != vp2->hashcode)
2780 return false;
2781
2782 if (vp1->block != vp2->block)
2783 {
2784 if (vp1->phiargs.length () != vp2->phiargs.length ())
2785 return false;
2786
2787 switch (vp1->phiargs.length ())
2788 {
2789 case 1:
2790 /* Single-arg PHIs are just copies. */
2791 break;
2792
2793 case 2:
2794 {
2795 /* Rule out backedges into the PHI. */
2796 if (vp1->block->loop_father->header == vp1->block
2797 || vp2->block->loop_father->header == vp2->block)
2798 return false;
2799
2800 /* If the PHI nodes do not have compatible types
2801 they are not the same. */
2802 if (!types_compatible_p (vp1->type, vp2->type))
2803 return false;
2804
2805 basic_block idom1
2806 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2807 basic_block idom2
2808 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2809 /* If the immediate dominator end in switch stmts multiple
2810 values may end up in the same PHI arg via intermediate
2811 CFG merges. */
2812 if (EDGE_COUNT (idom1->succs) != 2
2813 || EDGE_COUNT (idom2->succs) != 2)
2814 return false;
2815
2816 /* Verify the controlling stmt is the same. */
2817 gimple *last1 = last_stmt (idom1);
2818 gimple *last2 = last_stmt (idom2);
2819 if (gimple_code (last1) != GIMPLE_COND
2820 || gimple_code (last2) != GIMPLE_COND)
2821 return false;
2822 bool inverted_p;
2823 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2824 as_a <gcond *> (last2), &inverted_p))
2825 return false;
2826
2827 /* Get at true/false controlled edges into the PHI. */
2828 edge te1, te2, fe1, fe2;
2829 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2830 &te1, &fe1)
2831 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2832 &te2, &fe2))
2833 return false;
2834
2835 /* Swap edges if the second condition is the inverted of the
2836 first. */
2837 if (inverted_p)
2838 std::swap (te2, fe2);
2839
2840 /* ??? Handle VN_TOP specially. */
2841 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2842 vp2->phiargs[te2->dest_idx])
2843 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2844 vp2->phiargs[fe2->dest_idx]))
2845 return false;
2846
2847 return true;
2848 }
2849
2850 default:
2851 return false;
2852 }
2853 }
2854
2855 /* If the PHI nodes do not have compatible types
2856 they are not the same. */
2857 if (!types_compatible_p (vp1->type, vp2->type))
2858 return false;
2859
2860 /* Any phi in the same block will have it's arguments in the
2861 same edge order, because of how we store phi nodes. */
2862 int i;
2863 tree phi1op;
2864 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2865 {
2866 tree phi2op = vp2->phiargs[i];
2867 if (phi1op == VN_TOP || phi2op == VN_TOP)
2868 continue;
2869 if (!expressions_equal_p (phi1op, phi2op))
2870 return false;
2871 }
2872
2873 return true;
2874 }
2875
2876 static vec<tree> shared_lookup_phiargs;
2877
2878 /* Lookup PHI in the current hash table, and return the resulting
2879 value number if it exists in the hash table. Return NULL_TREE if
2880 it does not exist in the hash table. */
2881
2882 static tree
2883 vn_phi_lookup (gimple *phi)
2884 {
2885 vn_phi_s **slot;
2886 struct vn_phi_s vp1;
2887 edge e;
2888 edge_iterator ei;
2889
2890 shared_lookup_phiargs.truncate (0);
2891 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2892
2893 /* Canonicalize the SSA_NAME's to their value number. */
2894 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2895 {
2896 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2897 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2898 shared_lookup_phiargs[e->dest_idx] = def;
2899 }
2900 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2901 vp1.phiargs = shared_lookup_phiargs;
2902 vp1.block = gimple_bb (phi);
2903 vp1.hashcode = vn_phi_compute_hash (&vp1);
2904 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2905 NO_INSERT);
2906 if (!slot && current_info == optimistic_info)
2907 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2908 NO_INSERT);
2909 if (!slot)
2910 return NULL_TREE;
2911 return (*slot)->result;
2912 }
2913
2914 /* Insert PHI into the current hash table with a value number of
2915 RESULT. */
2916
2917 static vn_phi_t
2918 vn_phi_insert (gimple *phi, tree result)
2919 {
2920 vn_phi_s **slot;
2921 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2922 vec<tree> args = vNULL;
2923 edge e;
2924 edge_iterator ei;
2925
2926 args.safe_grow (gimple_phi_num_args (phi));
2927
2928 /* Canonicalize the SSA_NAME's to their value number. */
2929 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2930 {
2931 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2932 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2933 args[e->dest_idx] = def;
2934 }
2935 vp1->value_id = VN_INFO (result)->value_id;
2936 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2937 vp1->phiargs = args;
2938 vp1->block = gimple_bb (phi);
2939 vp1->result = result;
2940 vp1->hashcode = vn_phi_compute_hash (vp1);
2941
2942 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2943
2944 /* Because we iterate over phi operations more than once, it's
2945 possible the slot might already exist here, hence no assert.*/
2946 *slot = vp1;
2947 return vp1;
2948 }
2949
2950
2951 /* Print set of components in strongly connected component SCC to OUT. */
2952
2953 static void
2954 print_scc (FILE *out, vec<tree> scc)
2955 {
2956 tree var;
2957 unsigned int i;
2958
2959 fprintf (out, "SCC consists of:");
2960 FOR_EACH_VEC_ELT (scc, i, var)
2961 {
2962 fprintf (out, " ");
2963 print_generic_expr (out, var, 0);
2964 }
2965 fprintf (out, "\n");
2966 }
2967
2968 /* Set the value number of FROM to TO, return true if it has changed
2969 as a result. */
2970
2971 static inline bool
2972 set_ssa_val_to (tree from, tree to)
2973 {
2974 tree currval = SSA_VAL (from);
2975 HOST_WIDE_INT toff, coff;
2976
2977 /* The only thing we allow as value numbers are ssa_names
2978 and invariants. So assert that here. We don't allow VN_TOP
2979 as visiting a stmt should produce a value-number other than
2980 that.
2981 ??? Still VN_TOP can happen for unreachable code, so force
2982 it to varying in that case. Not all code is prepared to
2983 get VN_TOP on valueization. */
2984 if (to == VN_TOP)
2985 {
2986 if (dump_file && (dump_flags & TDF_DETAILS))
2987 fprintf (dump_file, "Forcing value number to varying on "
2988 "receiving VN_TOP\n");
2989 to = from;
2990 }
2991
2992 gcc_assert (to != NULL_TREE
2993 && ((TREE_CODE (to) == SSA_NAME
2994 && (to == from || SSA_VAL (to) == to))
2995 || is_gimple_min_invariant (to)));
2996
2997 if (from != to)
2998 {
2999 if (currval == from)
3000 {
3001 if (dump_file && (dump_flags & TDF_DETAILS))
3002 {
3003 fprintf (dump_file, "Not changing value number of ");
3004 print_generic_expr (dump_file, from, 0);
3005 fprintf (dump_file, " from VARYING to ");
3006 print_generic_expr (dump_file, to, 0);
3007 fprintf (dump_file, "\n");
3008 }
3009 return false;
3010 }
3011 else if (TREE_CODE (to) == SSA_NAME
3012 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3013 to = from;
3014 }
3015
3016 if (dump_file && (dump_flags & TDF_DETAILS))
3017 {
3018 fprintf (dump_file, "Setting value number of ");
3019 print_generic_expr (dump_file, from, 0);
3020 fprintf (dump_file, " to ");
3021 print_generic_expr (dump_file, to, 0);
3022 }
3023
3024 if (currval != to
3025 && !operand_equal_p (currval, to, 0)
3026 /* ??? For addresses involving volatile objects or types operand_equal_p
3027 does not reliably detect ADDR_EXPRs as equal. We know we are only
3028 getting invariant gimple addresses here, so can use
3029 get_addr_base_and_unit_offset to do this comparison. */
3030 && !(TREE_CODE (currval) == ADDR_EXPR
3031 && TREE_CODE (to) == ADDR_EXPR
3032 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3033 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3034 && coff == toff))
3035 {
3036 VN_INFO (from)->valnum = to;
3037 if (dump_file && (dump_flags & TDF_DETAILS))
3038 fprintf (dump_file, " (changed)\n");
3039 return true;
3040 }
3041 if (dump_file && (dump_flags & TDF_DETAILS))
3042 fprintf (dump_file, "\n");
3043 return false;
3044 }
3045
3046 /* Mark as processed all the definitions in the defining stmt of USE, or
3047 the USE itself. */
3048
3049 static void
3050 mark_use_processed (tree use)
3051 {
3052 ssa_op_iter iter;
3053 def_operand_p defp;
3054 gimple *stmt = SSA_NAME_DEF_STMT (use);
3055
3056 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3057 {
3058 VN_INFO (use)->use_processed = true;
3059 return;
3060 }
3061
3062 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3063 {
3064 tree def = DEF_FROM_PTR (defp);
3065
3066 VN_INFO (def)->use_processed = true;
3067 }
3068 }
3069
3070 /* Set all definitions in STMT to value number to themselves.
3071 Return true if a value number changed. */
3072
3073 static bool
3074 defs_to_varying (gimple *stmt)
3075 {
3076 bool changed = false;
3077 ssa_op_iter iter;
3078 def_operand_p defp;
3079
3080 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3081 {
3082 tree def = DEF_FROM_PTR (defp);
3083 changed |= set_ssa_val_to (def, def);
3084 }
3085 return changed;
3086 }
3087
3088 /* Visit a copy between LHS and RHS, return true if the value number
3089 changed. */
3090
3091 static bool
3092 visit_copy (tree lhs, tree rhs)
3093 {
3094 /* Valueize. */
3095 rhs = SSA_VAL (rhs);
3096
3097 return set_ssa_val_to (lhs, rhs);
3098 }
3099
3100 /* Visit a nary operator RHS, value number it, and return true if the
3101 value number of LHS has changed as a result. */
3102
3103 static bool
3104 visit_nary_op (tree lhs, gimple *stmt)
3105 {
3106 bool changed = false;
3107 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3108
3109 if (result)
3110 changed = set_ssa_val_to (lhs, result);
3111 else
3112 {
3113 changed = set_ssa_val_to (lhs, lhs);
3114 vn_nary_op_insert_stmt (stmt, lhs);
3115 }
3116
3117 return changed;
3118 }
3119
3120 /* Visit a call STMT storing into LHS. Return true if the value number
3121 of the LHS has changed as a result. */
3122
3123 static bool
3124 visit_reference_op_call (tree lhs, gcall *stmt)
3125 {
3126 bool changed = false;
3127 struct vn_reference_s vr1;
3128 vn_reference_t vnresult = NULL;
3129 tree vdef = gimple_vdef (stmt);
3130
3131 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3132 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3133 lhs = NULL_TREE;
3134
3135 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3136 if (vnresult)
3137 {
3138 if (vnresult->result_vdef && vdef)
3139 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3140
3141 if (!vnresult->result && lhs)
3142 vnresult->result = lhs;
3143
3144 if (vnresult->result && lhs)
3145 changed |= set_ssa_val_to (lhs, vnresult->result);
3146 }
3147 else
3148 {
3149 vn_reference_t vr2;
3150 vn_reference_s **slot;
3151 if (vdef)
3152 changed |= set_ssa_val_to (vdef, vdef);
3153 if (lhs)
3154 changed |= set_ssa_val_to (lhs, lhs);
3155 vr2 = current_info->references_pool->allocate ();
3156 vr2->vuse = vr1.vuse;
3157 /* As we are not walking the virtual operand chain we know the
3158 shared_lookup_references are still original so we can re-use
3159 them here. */
3160 vr2->operands = vr1.operands.copy ();
3161 vr2->type = vr1.type;
3162 vr2->set = vr1.set;
3163 vr2->hashcode = vr1.hashcode;
3164 vr2->result = lhs;
3165 vr2->result_vdef = vdef;
3166 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3167 INSERT);
3168 gcc_assert (!*slot);
3169 *slot = vr2;
3170 }
3171
3172 return changed;
3173 }
3174
3175 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3176 and return true if the value number of the LHS has changed as a result. */
3177
3178 static bool
3179 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3180 {
3181 bool changed = false;
3182 tree last_vuse;
3183 tree result;
3184
3185 last_vuse = gimple_vuse (stmt);
3186 last_vuse_ptr = &last_vuse;
3187 result = vn_reference_lookup (op, gimple_vuse (stmt),
3188 default_vn_walk_kind, NULL);
3189 last_vuse_ptr = NULL;
3190
3191 /* We handle type-punning through unions by value-numbering based
3192 on offset and size of the access. Be prepared to handle a
3193 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3194 if (result
3195 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3196 {
3197 /* We will be setting the value number of lhs to the value number
3198 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3199 So first simplify and lookup this expression to see if it
3200 is already available. */
3201 mprts_hook = vn_lookup_simplify_result;
3202 code_helper rcode = VIEW_CONVERT_EXPR;
3203 tree ops[3] = { result };
3204 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3205 vn_valueize);
3206 mprts_hook = NULL;
3207 gimple *new_stmt = NULL;
3208 if (res
3209 && gimple_simplified_result_is_gimple_val (rcode, ops))
3210 /* The expression is already available. */
3211 result = ops[0];
3212 else
3213 {
3214 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3215 if (!val)
3216 {
3217 gimple_seq stmts = NULL;
3218 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3219 &stmts);
3220 gcc_assert (result && gimple_seq_singleton_p (stmts));
3221 new_stmt = gimple_seq_first_stmt (stmts);
3222 }
3223 else
3224 /* The expression is already available. */
3225 result = val;
3226 }
3227 if (new_stmt)
3228 {
3229 /* The expression is not yet available, value-number lhs to
3230 the new SSA_NAME we created. */
3231 /* Initialize value-number information properly. */
3232 VN_INFO_GET (result)->valnum = result;
3233 VN_INFO (result)->value_id = get_next_value_id ();
3234 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3235 new_stmt);
3236 VN_INFO (result)->needs_insertion = true;
3237 /* As all "inserted" statements are singleton SCCs, insert
3238 to the valid table. This is strictly needed to
3239 avoid re-generating new value SSA_NAMEs for the same
3240 expression during SCC iteration over and over (the
3241 optimistic table gets cleared after each iteration).
3242 We do not need to insert into the optimistic table, as
3243 lookups there will fall back to the valid table. */
3244 if (current_info == optimistic_info)
3245 {
3246 current_info = valid_info;
3247 vn_nary_op_insert_stmt (new_stmt, result);
3248 current_info = optimistic_info;
3249 }
3250 else
3251 vn_nary_op_insert_stmt (new_stmt, result);
3252 if (dump_file && (dump_flags & TDF_DETAILS))
3253 {
3254 fprintf (dump_file, "Inserting name ");
3255 print_generic_expr (dump_file, result, 0);
3256 fprintf (dump_file, " for expression ");
3257 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3258 fprintf (dump_file, "\n");
3259 }
3260 }
3261 }
3262
3263 if (result)
3264 changed = set_ssa_val_to (lhs, result);
3265 else
3266 {
3267 changed = set_ssa_val_to (lhs, lhs);
3268 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3269 }
3270
3271 return changed;
3272 }
3273
3274
3275 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3276 and return true if the value number of the LHS has changed as a result. */
3277
3278 static bool
3279 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3280 {
3281 bool changed = false;
3282 vn_reference_t vnresult = NULL;
3283 tree result, assign;
3284 bool resultsame = false;
3285 tree vuse = gimple_vuse (stmt);
3286 tree vdef = gimple_vdef (stmt);
3287
3288 if (TREE_CODE (op) == SSA_NAME)
3289 op = SSA_VAL (op);
3290
3291 /* First we want to lookup using the *vuses* from the store and see
3292 if there the last store to this location with the same address
3293 had the same value.
3294
3295 The vuses represent the memory state before the store. If the
3296 memory state, address, and value of the store is the same as the
3297 last store to this location, then this store will produce the
3298 same memory state as that store.
3299
3300 In this case the vdef versions for this store are value numbered to those
3301 vuse versions, since they represent the same memory state after
3302 this store.
3303
3304 Otherwise, the vdefs for the store are used when inserting into
3305 the table, since the store generates a new memory state. */
3306
3307 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3308
3309 if (result)
3310 {
3311 if (TREE_CODE (result) == SSA_NAME)
3312 result = SSA_VAL (result);
3313 resultsame = expressions_equal_p (result, op);
3314 }
3315
3316 if ((!result || !resultsame)
3317 /* Only perform the following when being called from PRE
3318 which embeds tail merging. */
3319 && default_vn_walk_kind == VN_WALK)
3320 {
3321 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3322 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3323 if (vnresult)
3324 {
3325 VN_INFO (vdef)->use_processed = true;
3326 return set_ssa_val_to (vdef, vnresult->result_vdef);
3327 }
3328 }
3329
3330 if (!result || !resultsame)
3331 {
3332 if (dump_file && (dump_flags & TDF_DETAILS))
3333 {
3334 fprintf (dump_file, "No store match\n");
3335 fprintf (dump_file, "Value numbering store ");
3336 print_generic_expr (dump_file, lhs, 0);
3337 fprintf (dump_file, " to ");
3338 print_generic_expr (dump_file, op, 0);
3339 fprintf (dump_file, "\n");
3340 }
3341 /* Have to set value numbers before insert, since insert is
3342 going to valueize the references in-place. */
3343 if (vdef)
3344 {
3345 changed |= set_ssa_val_to (vdef, vdef);
3346 }
3347
3348 /* Do not insert structure copies into the tables. */
3349 if (is_gimple_min_invariant (op)
3350 || is_gimple_reg (op))
3351 vn_reference_insert (lhs, op, vdef, NULL);
3352
3353 /* Only perform the following when being called from PRE
3354 which embeds tail merging. */
3355 if (default_vn_walk_kind == VN_WALK)
3356 {
3357 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3358 vn_reference_insert (assign, lhs, vuse, vdef);
3359 }
3360 }
3361 else
3362 {
3363 /* We had a match, so value number the vdef to have the value
3364 number of the vuse it came from. */
3365
3366 if (dump_file && (dump_flags & TDF_DETAILS))
3367 fprintf (dump_file, "Store matched earlier value,"
3368 "value numbering store vdefs to matching vuses.\n");
3369
3370 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3371 }
3372
3373 return changed;
3374 }
3375
3376 /* Visit and value number PHI, return true if the value number
3377 changed. */
3378
3379 static bool
3380 visit_phi (gimple *phi)
3381 {
3382 bool changed = false;
3383 tree result;
3384 tree sameval = VN_TOP;
3385 bool allsame = true;
3386 unsigned n_executable = 0;
3387
3388 /* TODO: We could check for this in init_sccvn, and replace this
3389 with a gcc_assert. */
3390 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3391 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3392
3393 /* See if all non-TOP arguments have the same value. TOP is
3394 equivalent to everything, so we can ignore it. */
3395 edge_iterator ei;
3396 edge e;
3397 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3398 if (e->flags & EDGE_EXECUTABLE)
3399 {
3400 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3401
3402 ++n_executable;
3403 if (TREE_CODE (def) == SSA_NAME)
3404 def = SSA_VAL (def);
3405 if (def == VN_TOP)
3406 continue;
3407 if (sameval == VN_TOP)
3408 sameval = def;
3409 else if (!expressions_equal_p (def, sameval))
3410 {
3411 allsame = false;
3412 break;
3413 }
3414 }
3415
3416 /* If none of the edges was executable or all incoming values are
3417 undefined keep the value-number at VN_TOP. If only a single edge
3418 is exectuable use its value. */
3419 if (sameval == VN_TOP
3420 || n_executable == 1)
3421 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3422
3423 /* First see if it is equivalent to a phi node in this block. We prefer
3424 this as it allows IV elimination - see PRs 66502 and 67167. */
3425 result = vn_phi_lookup (phi);
3426 if (result)
3427 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3428 /* Otherwise all value numbered to the same value, the phi node has that
3429 value. */
3430 else if (allsame)
3431 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3432 else
3433 {
3434 vn_phi_insert (phi, PHI_RESULT (phi));
3435 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3436 }
3437
3438 return changed;
3439 }
3440
3441 /* Try to simplify RHS using equivalences and constant folding. */
3442
3443 static tree
3444 try_to_simplify (gassign *stmt)
3445 {
3446 enum tree_code code = gimple_assign_rhs_code (stmt);
3447 tree tem;
3448
3449 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3450 in this case, there is no point in doing extra work. */
3451 if (code == SSA_NAME)
3452 return NULL_TREE;
3453
3454 /* First try constant folding based on our current lattice. */
3455 mprts_hook = vn_lookup_simplify_result;
3456 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3457 mprts_hook = NULL;
3458 if (tem
3459 && (TREE_CODE (tem) == SSA_NAME
3460 || is_gimple_min_invariant (tem)))
3461 return tem;
3462
3463 return NULL_TREE;
3464 }
3465
3466 /* Visit and value number USE, return true if the value number
3467 changed. */
3468
3469 static bool
3470 visit_use (tree use)
3471 {
3472 bool changed = false;
3473 gimple *stmt = SSA_NAME_DEF_STMT (use);
3474
3475 mark_use_processed (use);
3476
3477 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3478 if (dump_file && (dump_flags & TDF_DETAILS)
3479 && !SSA_NAME_IS_DEFAULT_DEF (use))
3480 {
3481 fprintf (dump_file, "Value numbering ");
3482 print_generic_expr (dump_file, use, 0);
3483 fprintf (dump_file, " stmt = ");
3484 print_gimple_stmt (dump_file, stmt, 0, 0);
3485 }
3486
3487 /* Handle uninitialized uses. */
3488 if (SSA_NAME_IS_DEFAULT_DEF (use))
3489 changed = set_ssa_val_to (use, use);
3490 else if (gimple_code (stmt) == GIMPLE_PHI)
3491 changed = visit_phi (stmt);
3492 else if (gimple_has_volatile_ops (stmt))
3493 changed = defs_to_varying (stmt);
3494 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3495 {
3496 enum tree_code code = gimple_assign_rhs_code (ass);
3497 tree lhs = gimple_assign_lhs (ass);
3498 tree rhs1 = gimple_assign_rhs1 (ass);
3499 tree simplified;
3500
3501 /* Shortcut for copies. Simplifying copies is pointless,
3502 since we copy the expression and value they represent. */
3503 if (code == SSA_NAME
3504 && TREE_CODE (lhs) == SSA_NAME)
3505 {
3506 changed = visit_copy (lhs, rhs1);
3507 goto done;
3508 }
3509 simplified = try_to_simplify (ass);
3510 if (simplified)
3511 {
3512 if (dump_file && (dump_flags & TDF_DETAILS))
3513 {
3514 fprintf (dump_file, "RHS ");
3515 print_gimple_expr (dump_file, ass, 0, 0);
3516 fprintf (dump_file, " simplified to ");
3517 print_generic_expr (dump_file, simplified, 0);
3518 fprintf (dump_file, "\n");
3519 }
3520 }
3521 /* Setting value numbers to constants will occasionally
3522 screw up phi congruence because constants are not
3523 uniquely associated with a single ssa name that can be
3524 looked up. */
3525 if (simplified
3526 && is_gimple_min_invariant (simplified)
3527 && TREE_CODE (lhs) == SSA_NAME)
3528 {
3529 changed = set_ssa_val_to (lhs, simplified);
3530 goto done;
3531 }
3532 else if (simplified
3533 && TREE_CODE (simplified) == SSA_NAME
3534 && TREE_CODE (lhs) == SSA_NAME)
3535 {
3536 changed = visit_copy (lhs, simplified);
3537 goto done;
3538 }
3539
3540 if ((TREE_CODE (lhs) == SSA_NAME
3541 /* We can substitute SSA_NAMEs that are live over
3542 abnormal edges with their constant value. */
3543 && !(gimple_assign_copy_p (ass)
3544 && is_gimple_min_invariant (rhs1))
3545 && !(simplified
3546 && is_gimple_min_invariant (simplified))
3547 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3548 /* Stores or copies from SSA_NAMEs that are live over
3549 abnormal edges are a problem. */
3550 || (code == SSA_NAME
3551 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3552 changed = defs_to_varying (ass);
3553 else if (REFERENCE_CLASS_P (lhs)
3554 || DECL_P (lhs))
3555 changed = visit_reference_op_store (lhs, rhs1, ass);
3556 else if (TREE_CODE (lhs) == SSA_NAME)
3557 {
3558 if ((gimple_assign_copy_p (ass)
3559 && is_gimple_min_invariant (rhs1))
3560 || (simplified
3561 && is_gimple_min_invariant (simplified)))
3562 {
3563 if (simplified)
3564 changed = set_ssa_val_to (lhs, simplified);
3565 else
3566 changed = set_ssa_val_to (lhs, rhs1);
3567 }
3568 else
3569 {
3570 /* Visit the original statement. */
3571 switch (vn_get_stmt_kind (ass))
3572 {
3573 case VN_NARY:
3574 changed = visit_nary_op (lhs, ass);
3575 break;
3576 case VN_REFERENCE:
3577 changed = visit_reference_op_load (lhs, rhs1, ass);
3578 break;
3579 default:
3580 changed = defs_to_varying (ass);
3581 break;
3582 }
3583 }
3584 }
3585 else
3586 changed = defs_to_varying (ass);
3587 }
3588 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3589 {
3590 tree lhs = gimple_call_lhs (call_stmt);
3591 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3592 {
3593 /* Try constant folding based on our current lattice. */
3594 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3595 vn_valueize);
3596 if (simplified)
3597 {
3598 if (dump_file && (dump_flags & TDF_DETAILS))
3599 {
3600 fprintf (dump_file, "call ");
3601 print_gimple_expr (dump_file, call_stmt, 0, 0);
3602 fprintf (dump_file, " simplified to ");
3603 print_generic_expr (dump_file, simplified, 0);
3604 fprintf (dump_file, "\n");
3605 }
3606 }
3607 /* Setting value numbers to constants will occasionally
3608 screw up phi congruence because constants are not
3609 uniquely associated with a single ssa name that can be
3610 looked up. */
3611 if (simplified
3612 && is_gimple_min_invariant (simplified))
3613 {
3614 changed = set_ssa_val_to (lhs, simplified);
3615 if (gimple_vdef (call_stmt))
3616 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3617 SSA_VAL (gimple_vuse (call_stmt)));
3618 goto done;
3619 }
3620 else if (simplified
3621 && TREE_CODE (simplified) == SSA_NAME)
3622 {
3623 changed = visit_copy (lhs, simplified);
3624 if (gimple_vdef (call_stmt))
3625 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3626 SSA_VAL (gimple_vuse (call_stmt)));
3627 goto done;
3628 }
3629 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3630 {
3631 changed = defs_to_varying (call_stmt);
3632 goto done;
3633 }
3634 }
3635
3636 if (!gimple_call_internal_p (call_stmt)
3637 && (/* Calls to the same function with the same vuse
3638 and the same operands do not necessarily return the same
3639 value, unless they're pure or const. */
3640 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3641 /* If calls have a vdef, subsequent calls won't have
3642 the same incoming vuse. So, if 2 calls with vdef have the
3643 same vuse, we know they're not subsequent.
3644 We can value number 2 calls to the same function with the
3645 same vuse and the same operands which are not subsequent
3646 the same, because there is no code in the program that can
3647 compare the 2 values... */
3648 || (gimple_vdef (call_stmt)
3649 /* ... unless the call returns a pointer which does
3650 not alias with anything else. In which case the
3651 information that the values are distinct are encoded
3652 in the IL. */
3653 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3654 /* Only perform the following when being called from PRE
3655 which embeds tail merging. */
3656 && default_vn_walk_kind == VN_WALK)))
3657 changed = visit_reference_op_call (lhs, call_stmt);
3658 else
3659 changed = defs_to_varying (call_stmt);
3660 }
3661 else
3662 changed = defs_to_varying (stmt);
3663 done:
3664 return changed;
3665 }
3666
3667 /* Compare two operands by reverse postorder index */
3668
3669 static int
3670 compare_ops (const void *pa, const void *pb)
3671 {
3672 const tree opa = *((const tree *)pa);
3673 const tree opb = *((const tree *)pb);
3674 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3675 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3676 basic_block bba;
3677 basic_block bbb;
3678
3679 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3680 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3681 else if (gimple_nop_p (opstmta))
3682 return -1;
3683 else if (gimple_nop_p (opstmtb))
3684 return 1;
3685
3686 bba = gimple_bb (opstmta);
3687 bbb = gimple_bb (opstmtb);
3688
3689 if (!bba && !bbb)
3690 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3691 else if (!bba)
3692 return -1;
3693 else if (!bbb)
3694 return 1;
3695
3696 if (bba == bbb)
3697 {
3698 if (gimple_code (opstmta) == GIMPLE_PHI
3699 && gimple_code (opstmtb) == GIMPLE_PHI)
3700 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3701 else if (gimple_code (opstmta) == GIMPLE_PHI)
3702 return -1;
3703 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3704 return 1;
3705 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3706 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3707 else
3708 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3709 }
3710 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3711 }
3712
3713 /* Sort an array containing members of a strongly connected component
3714 SCC so that the members are ordered by RPO number.
3715 This means that when the sort is complete, iterating through the
3716 array will give you the members in RPO order. */
3717
3718 static void
3719 sort_scc (vec<tree> scc)
3720 {
3721 scc.qsort (compare_ops);
3722 }
3723
3724 /* Insert the no longer used nary ONARY to the hash INFO. */
3725
3726 static void
3727 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3728 {
3729 size_t size = sizeof_vn_nary_op (onary->length);
3730 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3731 &info->nary_obstack);
3732 memcpy (nary, onary, size);
3733 vn_nary_op_insert_into (nary, info->nary, false);
3734 }
3735
3736 /* Insert the no longer used phi OPHI to the hash INFO. */
3737
3738 static void
3739 copy_phi (vn_phi_t ophi, vn_tables_t info)
3740 {
3741 vn_phi_t phi = info->phis_pool->allocate ();
3742 vn_phi_s **slot;
3743 memcpy (phi, ophi, sizeof (*phi));
3744 ophi->phiargs.create (0);
3745 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3746 gcc_assert (!*slot);
3747 *slot = phi;
3748 }
3749
3750 /* Insert the no longer used reference OREF to the hash INFO. */
3751
3752 static void
3753 copy_reference (vn_reference_t oref, vn_tables_t info)
3754 {
3755 vn_reference_t ref;
3756 vn_reference_s **slot;
3757 ref = info->references_pool->allocate ();
3758 memcpy (ref, oref, sizeof (*ref));
3759 oref->operands.create (0);
3760 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3761 if (*slot)
3762 free_reference (*slot);
3763 *slot = ref;
3764 }
3765
3766 /* Process a strongly connected component in the SSA graph. */
3767
3768 static void
3769 process_scc (vec<tree> scc)
3770 {
3771 tree var;
3772 unsigned int i;
3773 unsigned int iterations = 0;
3774 bool changed = true;
3775 vn_nary_op_iterator_type hin;
3776 vn_phi_iterator_type hip;
3777 vn_reference_iterator_type hir;
3778 vn_nary_op_t nary;
3779 vn_phi_t phi;
3780 vn_reference_t ref;
3781
3782 /* If the SCC has a single member, just visit it. */
3783 if (scc.length () == 1)
3784 {
3785 tree use = scc[0];
3786 if (VN_INFO (use)->use_processed)
3787 return;
3788 /* We need to make sure it doesn't form a cycle itself, which can
3789 happen for self-referential PHI nodes. In that case we would
3790 end up inserting an expression with VN_TOP operands into the
3791 valid table which makes us derive bogus equivalences later.
3792 The cheapest way to check this is to assume it for all PHI nodes. */
3793 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3794 /* Fallthru to iteration. */ ;
3795 else
3796 {
3797 visit_use (use);
3798 return;
3799 }
3800 }
3801
3802 if (dump_file && (dump_flags & TDF_DETAILS))
3803 print_scc (dump_file, scc);
3804
3805 /* Iterate over the SCC with the optimistic table until it stops
3806 changing. */
3807 current_info = optimistic_info;
3808 while (changed)
3809 {
3810 changed = false;
3811 iterations++;
3812 if (dump_file && (dump_flags & TDF_DETAILS))
3813 fprintf (dump_file, "Starting iteration %d\n", iterations);
3814 /* As we are value-numbering optimistically we have to
3815 clear the expression tables and the simplified expressions
3816 in each iteration until we converge. */
3817 optimistic_info->nary->empty ();
3818 optimistic_info->phis->empty ();
3819 optimistic_info->references->empty ();
3820 obstack_free (&optimistic_info->nary_obstack, NULL);
3821 gcc_obstack_init (&optimistic_info->nary_obstack);
3822 optimistic_info->phis_pool->release ();
3823 optimistic_info->references_pool->release ();
3824 FOR_EACH_VEC_ELT (scc, i, var)
3825 gcc_assert (!VN_INFO (var)->needs_insertion
3826 && VN_INFO (var)->expr == NULL);
3827 FOR_EACH_VEC_ELT (scc, i, var)
3828 changed |= visit_use (var);
3829 }
3830
3831 if (dump_file && (dump_flags & TDF_DETAILS))
3832 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3833 statistics_histogram_event (cfun, "SCC iterations", iterations);
3834
3835 /* Finally, copy the contents of the no longer used optimistic
3836 table to the valid table. */
3837 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3838 copy_nary (nary, valid_info);
3839 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3840 copy_phi (phi, valid_info);
3841 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3842 ref, vn_reference_t, hir)
3843 copy_reference (ref, valid_info);
3844
3845 current_info = valid_info;
3846 }
3847
3848
3849 /* Pop the components of the found SCC for NAME off the SCC stack
3850 and process them. Returns true if all went well, false if
3851 we run into resource limits. */
3852
3853 static bool
3854 extract_and_process_scc_for_name (tree name)
3855 {
3856 auto_vec<tree> scc;
3857 tree x;
3858
3859 /* Found an SCC, pop the components off the SCC stack and
3860 process them. */
3861 do
3862 {
3863 x = sccstack.pop ();
3864
3865 VN_INFO (x)->on_sccstack = false;
3866 scc.safe_push (x);
3867 } while (x != name);
3868
3869 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3870 if (scc.length ()
3871 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3872 {
3873 if (dump_file)
3874 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3875 "SCC size %u exceeding %u\n", scc.length (),
3876 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3877
3878 return false;
3879 }
3880
3881 if (scc.length () > 1)
3882 sort_scc (scc);
3883
3884 process_scc (scc);
3885
3886 return true;
3887 }
3888
3889 /* Depth first search on NAME to discover and process SCC's in the SSA
3890 graph.
3891 Execution of this algorithm relies on the fact that the SCC's are
3892 popped off the stack in topological order.
3893 Returns true if successful, false if we stopped processing SCC's due
3894 to resource constraints. */
3895
3896 static bool
3897 DFS (tree name)
3898 {
3899 vec<ssa_op_iter> itervec = vNULL;
3900 vec<tree> namevec = vNULL;
3901 use_operand_p usep = NULL;
3902 gimple *defstmt;
3903 tree use;
3904 ssa_op_iter iter;
3905
3906 start_over:
3907 /* SCC info */
3908 VN_INFO (name)->dfsnum = next_dfs_num++;
3909 VN_INFO (name)->visited = true;
3910 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3911
3912 sccstack.safe_push (name);
3913 VN_INFO (name)->on_sccstack = true;
3914 defstmt = SSA_NAME_DEF_STMT (name);
3915
3916 /* Recursively DFS on our operands, looking for SCC's. */
3917 if (!gimple_nop_p (defstmt))
3918 {
3919 /* Push a new iterator. */
3920 if (gphi *phi = dyn_cast <gphi *> (defstmt))
3921 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
3922 else
3923 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3924 }
3925 else
3926 clear_and_done_ssa_iter (&iter);
3927
3928 while (1)
3929 {
3930 /* If we are done processing uses of a name, go up the stack
3931 of iterators and process SCCs as we found them. */
3932 if (op_iter_done (&iter))
3933 {
3934 /* See if we found an SCC. */
3935 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3936 if (!extract_and_process_scc_for_name (name))
3937 {
3938 namevec.release ();
3939 itervec.release ();
3940 return false;
3941 }
3942
3943 /* Check if we are done. */
3944 if (namevec.is_empty ())
3945 {
3946 namevec.release ();
3947 itervec.release ();
3948 return true;
3949 }
3950
3951 /* Restore the last use walker and continue walking there. */
3952 use = name;
3953 name = namevec.pop ();
3954 memcpy (&iter, &itervec.last (),
3955 sizeof (ssa_op_iter));
3956 itervec.pop ();
3957 goto continue_walking;
3958 }
3959
3960 use = USE_FROM_PTR (usep);
3961
3962 /* Since we handle phi nodes, we will sometimes get
3963 invariants in the use expression. */
3964 if (TREE_CODE (use) == SSA_NAME)
3965 {
3966 if (! (VN_INFO (use)->visited))
3967 {
3968 /* Recurse by pushing the current use walking state on
3969 the stack and starting over. */
3970 itervec.safe_push (iter);
3971 namevec.safe_push (name);
3972 name = use;
3973 goto start_over;
3974
3975 continue_walking:
3976 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3977 VN_INFO (use)->low);
3978 }
3979 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3980 && VN_INFO (use)->on_sccstack)
3981 {
3982 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3983 VN_INFO (name)->low);
3984 }
3985 }
3986
3987 usep = op_iter_next_use (&iter);
3988 }
3989 }
3990
3991 /* Allocate a value number table. */
3992
3993 static void
3994 allocate_vn_table (vn_tables_t table)
3995 {
3996 table->phis = new vn_phi_table_type (23);
3997 table->nary = new vn_nary_op_table_type (23);
3998 table->references = new vn_reference_table_type (23);
3999
4000 gcc_obstack_init (&table->nary_obstack);
4001 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4002 table->references_pool = new object_allocator<vn_reference_s>
4003 ("VN references");
4004 }
4005
4006 /* Free a value number table. */
4007
4008 static void
4009 free_vn_table (vn_tables_t table)
4010 {
4011 delete table->phis;
4012 table->phis = NULL;
4013 delete table->nary;
4014 table->nary = NULL;
4015 delete table->references;
4016 table->references = NULL;
4017 obstack_free (&table->nary_obstack, NULL);
4018 delete table->phis_pool;
4019 delete table->references_pool;
4020 }
4021
4022 static void
4023 init_scc_vn (void)
4024 {
4025 size_t i;
4026 int j;
4027 int *rpo_numbers_temp;
4028
4029 calculate_dominance_info (CDI_DOMINATORS);
4030 mark_dfs_back_edges ();
4031
4032 sccstack.create (0);
4033 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4034
4035 constant_value_ids = BITMAP_ALLOC (NULL);
4036
4037 next_dfs_num = 1;
4038 next_value_id = 1;
4039
4040 vn_ssa_aux_table.create (num_ssa_names + 1);
4041 /* VEC_alloc doesn't actually grow it to the right size, it just
4042 preallocates the space to do so. */
4043 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4044 gcc_obstack_init (&vn_ssa_aux_obstack);
4045
4046 shared_lookup_phiargs.create (0);
4047 shared_lookup_references.create (0);
4048 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4049 rpo_numbers_temp =
4050 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4051 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4052
4053 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4054 the i'th block in RPO order is bb. We want to map bb's to RPO
4055 numbers, so we need to rearrange this array. */
4056 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4057 rpo_numbers[rpo_numbers_temp[j]] = j;
4058
4059 XDELETE (rpo_numbers_temp);
4060
4061 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4062
4063 renumber_gimple_stmt_uids ();
4064
4065 /* Create the valid and optimistic value numbering tables. */
4066 valid_info = XCNEW (struct vn_tables_s);
4067 allocate_vn_table (valid_info);
4068 optimistic_info = XCNEW (struct vn_tables_s);
4069 allocate_vn_table (optimistic_info);
4070 current_info = valid_info;
4071
4072 /* Create the VN_INFO structures, and initialize value numbers to
4073 TOP or VARYING for parameters. */
4074 for (i = 1; i < num_ssa_names; i++)
4075 {
4076 tree name = ssa_name (i);
4077 if (!name)
4078 continue;
4079
4080 VN_INFO_GET (name)->valnum = VN_TOP;
4081 VN_INFO (name)->needs_insertion = false;
4082 VN_INFO (name)->expr = NULL;
4083 VN_INFO (name)->value_id = 0;
4084
4085 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4086 continue;
4087
4088 switch (TREE_CODE (SSA_NAME_VAR (name)))
4089 {
4090 case VAR_DECL:
4091 /* Undefined vars keep TOP. */
4092 break;
4093
4094 case PARM_DECL:
4095 /* Parameters are VARYING but we can record a condition
4096 if we know it is a non-NULL pointer. */
4097 VN_INFO (name)->visited = true;
4098 VN_INFO (name)->valnum = name;
4099 if (POINTER_TYPE_P (TREE_TYPE (name))
4100 && nonnull_arg_p (SSA_NAME_VAR (name)))
4101 {
4102 tree ops[2];
4103 ops[0] = name;
4104 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4105 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4106 boolean_true_node, 0);
4107 if (dump_file && (dump_flags & TDF_DETAILS))
4108 {
4109 fprintf (dump_file, "Recording ");
4110 print_generic_expr (dump_file, name, TDF_SLIM);
4111 fprintf (dump_file, " != 0\n");
4112 }
4113 }
4114 break;
4115
4116 case RESULT_DECL:
4117 /* If the result is passed by invisible reference the default
4118 def is initialized, otherwise it's uninitialized. */
4119 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4120 {
4121 VN_INFO (name)->visited = true;
4122 VN_INFO (name)->valnum = name;
4123 }
4124 break;
4125
4126 default:
4127 gcc_unreachable ();
4128 }
4129 }
4130 }
4131
4132 void
4133 free_scc_vn (void)
4134 {
4135 size_t i;
4136
4137 delete constant_to_value_id;
4138 constant_to_value_id = NULL;
4139 BITMAP_FREE (constant_value_ids);
4140 shared_lookup_phiargs.release ();
4141 shared_lookup_references.release ();
4142 XDELETEVEC (rpo_numbers);
4143
4144 for (i = 0; i < num_ssa_names; i++)
4145 {
4146 tree name = ssa_name (i);
4147 if (name
4148 && has_VN_INFO (name)
4149 && VN_INFO (name)->needs_insertion)
4150 release_ssa_name (name);
4151 }
4152 obstack_free (&vn_ssa_aux_obstack, NULL);
4153 vn_ssa_aux_table.release ();
4154
4155 sccstack.release ();
4156 free_vn_table (valid_info);
4157 XDELETE (valid_info);
4158 free_vn_table (optimistic_info);
4159 XDELETE (optimistic_info);
4160
4161 BITMAP_FREE (const_parms);
4162 }
4163
4164 /* Set *ID according to RESULT. */
4165
4166 static void
4167 set_value_id_for_result (tree result, unsigned int *id)
4168 {
4169 if (result && TREE_CODE (result) == SSA_NAME)
4170 *id = VN_INFO (result)->value_id;
4171 else if (result && is_gimple_min_invariant (result))
4172 *id = get_or_alloc_constant_value_id (result);
4173 else
4174 *id = get_next_value_id ();
4175 }
4176
4177 /* Set the value ids in the valid hash tables. */
4178
4179 static void
4180 set_hashtable_value_ids (void)
4181 {
4182 vn_nary_op_iterator_type hin;
4183 vn_phi_iterator_type hip;
4184 vn_reference_iterator_type hir;
4185 vn_nary_op_t vno;
4186 vn_reference_t vr;
4187 vn_phi_t vp;
4188
4189 /* Now set the value ids of the things we had put in the hash
4190 table. */
4191
4192 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4193 set_value_id_for_result (vno->result, &vno->value_id);
4194
4195 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4196 set_value_id_for_result (vp->result, &vp->value_id);
4197
4198 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4199 hir)
4200 set_value_id_for_result (vr->result, &vr->value_id);
4201 }
4202
4203 class sccvn_dom_walker : public dom_walker
4204 {
4205 public:
4206 sccvn_dom_walker ()
4207 : dom_walker (CDI_DOMINATORS), fail (false), cond_stack (vNULL) {}
4208 ~sccvn_dom_walker ();
4209
4210 virtual void before_dom_children (basic_block);
4211 virtual void after_dom_children (basic_block);
4212
4213 void record_cond (basic_block,
4214 enum tree_code code, tree lhs, tree rhs, bool value);
4215 void record_conds (basic_block,
4216 enum tree_code code, tree lhs, tree rhs, bool value);
4217
4218 bool fail;
4219 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4220 cond_stack;
4221 };
4222
4223 sccvn_dom_walker::~sccvn_dom_walker ()
4224 {
4225 cond_stack.release ();
4226 }
4227
4228 /* Record a temporary condition for the BB and its dominated blocks. */
4229
4230 void
4231 sccvn_dom_walker::record_cond (basic_block bb,
4232 enum tree_code code, tree lhs, tree rhs,
4233 bool value)
4234 {
4235 tree ops[2] = { lhs, rhs };
4236 vn_nary_op_t old = NULL;
4237 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4238 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4239 vn_nary_op_t cond
4240 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4241 value
4242 ? boolean_true_node
4243 : boolean_false_node, 0);
4244 if (dump_file && (dump_flags & TDF_DETAILS))
4245 {
4246 fprintf (dump_file, "Recording temporarily ");
4247 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4248 fprintf (dump_file, " %s ", get_tree_code_name (code));
4249 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4250 fprintf (dump_file, " == %s%s\n",
4251 value ? "true" : "false",
4252 old ? " (old entry saved)" : "");
4253 }
4254 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4255 }
4256
4257 /* Record temporary conditions for the BB and its dominated blocks
4258 according to LHS CODE RHS == VALUE and its dominated conditions. */
4259
4260 void
4261 sccvn_dom_walker::record_conds (basic_block bb,
4262 enum tree_code code, tree lhs, tree rhs,
4263 bool value)
4264 {
4265 /* Record the original condition. */
4266 record_cond (bb, code, lhs, rhs, value);
4267
4268 if (!value)
4269 return;
4270
4271 /* Record dominated conditions if the condition is true. Note that
4272 the inversion is already recorded. */
4273 switch (code)
4274 {
4275 case LT_EXPR:
4276 case GT_EXPR:
4277 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4278 record_cond (bb, NE_EXPR, lhs, rhs, true);
4279 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4280 break;
4281
4282 case EQ_EXPR:
4283 record_cond (bb, LE_EXPR, lhs, rhs, true);
4284 record_cond (bb, GE_EXPR, lhs, rhs, true);
4285 record_cond (bb, LT_EXPR, lhs, rhs, false);
4286 record_cond (bb, GT_EXPR, lhs, rhs, false);
4287 break;
4288
4289 default:
4290 break;
4291 }
4292 }
4293
4294 /* Restore expressions and values derived from conditionals. */
4295
4296 void
4297 sccvn_dom_walker::after_dom_children (basic_block bb)
4298 {
4299 while (!cond_stack.is_empty ()
4300 && cond_stack.last ().first == bb)
4301 {
4302 vn_nary_op_t cond = cond_stack.last ().second.first;
4303 vn_nary_op_t old = cond_stack.last ().second.second;
4304 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4305 if (old)
4306 vn_nary_op_insert_into (old, current_info->nary, false);
4307 cond_stack.pop ();
4308 }
4309 }
4310
4311 /* Value number all statements in BB. */
4312
4313 void
4314 sccvn_dom_walker::before_dom_children (basic_block bb)
4315 {
4316 edge e;
4317 edge_iterator ei;
4318
4319 if (fail)
4320 return;
4321
4322 /* If any of the predecessor edges that do not come from blocks dominated
4323 by us are still marked as possibly executable consider this block
4324 reachable. */
4325 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4326 FOR_EACH_EDGE (e, ei, bb->preds)
4327 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4328 reachable |= (e->flags & EDGE_EXECUTABLE);
4329
4330 /* If the block is not reachable all outgoing edges are not
4331 executable. Neither are incoming edges with src dominated by us. */
4332 if (!reachable)
4333 {
4334 if (dump_file && (dump_flags & TDF_DETAILS))
4335 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4336 "BB %d as not executable\n", bb->index);
4337
4338 FOR_EACH_EDGE (e, ei, bb->succs)
4339 e->flags &= ~EDGE_EXECUTABLE;
4340
4341 FOR_EACH_EDGE (e, ei, bb->preds)
4342 {
4343 if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
4344 {
4345 if (dump_file && (dump_flags & TDF_DETAILS))
4346 fprintf (dump_file, "Marking backedge from BB %d into "
4347 "unreachable BB %d as not executable\n",
4348 e->src->index, bb->index);
4349 e->flags &= ~EDGE_EXECUTABLE;
4350 }
4351 }
4352 return;
4353 }
4354
4355 if (dump_file && (dump_flags & TDF_DETAILS))
4356 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4357
4358 /* If we have a single predecessor record the equivalence from a
4359 possible condition on the predecessor edge. */
4360 if (single_pred_p (bb))
4361 {
4362 edge e = single_pred_edge (bb);
4363 /* Check if there are multiple executable successor edges in
4364 the source block. Otherwise there is no additional info
4365 to be recorded. */
4366 edge e2;
4367 FOR_EACH_EDGE (e2, ei, e->src->succs)
4368 if (e2 != e
4369 && e2->flags & EDGE_EXECUTABLE)
4370 break;
4371 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4372 {
4373 gimple *stmt = last_stmt (e->src);
4374 if (stmt
4375 && gimple_code (stmt) == GIMPLE_COND)
4376 {
4377 enum tree_code code = gimple_cond_code (stmt);
4378 tree lhs = gimple_cond_lhs (stmt);
4379 tree rhs = gimple_cond_rhs (stmt);
4380 record_conds (bb, code, lhs, rhs,
4381 (e->flags & EDGE_TRUE_VALUE) != 0);
4382 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4383 if (code != ERROR_MARK)
4384 record_conds (bb, code, lhs, rhs,
4385 (e->flags & EDGE_TRUE_VALUE) == 0);
4386 }
4387 }
4388 }
4389
4390 /* Value-number all defs in the basic-block. */
4391 for (gphi_iterator gsi = gsi_start_phis (bb);
4392 !gsi_end_p (gsi); gsi_next (&gsi))
4393 {
4394 gphi *phi = gsi.phi ();
4395 tree res = PHI_RESULT (phi);
4396 if (!VN_INFO (res)->visited
4397 && !DFS (res))
4398 {
4399 fail = true;
4400 return;
4401 }
4402 }
4403 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4404 !gsi_end_p (gsi); gsi_next (&gsi))
4405 {
4406 ssa_op_iter i;
4407 tree op;
4408 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4409 if (!VN_INFO (op)->visited
4410 && !DFS (op))
4411 {
4412 fail = true;
4413 return;
4414 }
4415 }
4416
4417 /* Finally look at the last stmt. */
4418 gimple *stmt = last_stmt (bb);
4419 if (!stmt)
4420 return;
4421
4422 enum gimple_code code = gimple_code (stmt);
4423 if (code != GIMPLE_COND
4424 && code != GIMPLE_SWITCH
4425 && code != GIMPLE_GOTO)
4426 return;
4427
4428 if (dump_file && (dump_flags & TDF_DETAILS))
4429 {
4430 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4431 print_gimple_stmt (dump_file, stmt, 0, 0);
4432 }
4433
4434 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4435 if value-numbering can prove they are not reachable. Handling
4436 computed gotos is also possible. */
4437 tree val;
4438 switch (code)
4439 {
4440 case GIMPLE_COND:
4441 {
4442 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4443 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4444 val = gimple_simplify (gimple_cond_code (stmt),
4445 boolean_type_node, lhs, rhs,
4446 NULL, vn_valueize);
4447 /* If that didn't simplify to a constant see if we have recorded
4448 temporary expressions from taken edges. */
4449 if (!val || TREE_CODE (val) != INTEGER_CST)
4450 {
4451 tree ops[2];
4452 ops[0] = lhs;
4453 ops[1] = rhs;
4454 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4455 boolean_type_node, ops, NULL);
4456 }
4457 break;
4458 }
4459 case GIMPLE_SWITCH:
4460 val = gimple_switch_index (as_a <gswitch *> (stmt));
4461 break;
4462 case GIMPLE_GOTO:
4463 val = gimple_goto_dest (stmt);
4464 break;
4465 default:
4466 gcc_unreachable ();
4467 }
4468 if (!val)
4469 return;
4470
4471 edge taken = find_taken_edge (bb, vn_valueize (val));
4472 if (!taken)
4473 return;
4474
4475 if (dump_file && (dump_flags & TDF_DETAILS))
4476 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4477 "not executable\n", bb->index, bb->index, taken->dest->index);
4478
4479 FOR_EACH_EDGE (e, ei, bb->succs)
4480 if (e != taken)
4481 e->flags &= ~EDGE_EXECUTABLE;
4482 }
4483
4484 /* Do SCCVN. Returns true if it finished, false if we bailed out
4485 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4486 how we use the alias oracle walking during the VN process. */
4487
4488 bool
4489 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4490 {
4491 basic_block bb;
4492 size_t i;
4493
4494 default_vn_walk_kind = default_vn_walk_kind_;
4495
4496 init_scc_vn ();
4497
4498 /* Collect pointers we know point to readonly memory. */
4499 const_parms = BITMAP_ALLOC (NULL);
4500 tree fnspec = lookup_attribute ("fn spec",
4501 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4502 if (fnspec)
4503 {
4504 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4505 i = 1;
4506 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4507 arg; arg = DECL_CHAIN (arg), ++i)
4508 {
4509 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4510 break;
4511 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4512 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4513 {
4514 tree name = ssa_default_def (cfun, arg);
4515 if (name)
4516 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4517 }
4518 }
4519 }
4520
4521 /* Mark all edges as possibly executable. */
4522 FOR_ALL_BB_FN (bb, cfun)
4523 {
4524 edge_iterator ei;
4525 edge e;
4526 FOR_EACH_EDGE (e, ei, bb->succs)
4527 e->flags |= EDGE_EXECUTABLE;
4528 }
4529
4530 /* Walk all blocks in dominator order, value-numbering stmts
4531 SSA defs and decide whether outgoing edges are not executable. */
4532 sccvn_dom_walker walker;
4533 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4534 if (walker.fail)
4535 {
4536 free_scc_vn ();
4537 return false;
4538 }
4539
4540 /* Initialize the value ids and prune out remaining VN_TOPs
4541 from dead code. */
4542 for (i = 1; i < num_ssa_names; ++i)
4543 {
4544 tree name = ssa_name (i);
4545 vn_ssa_aux_t info;
4546 if (!name)
4547 continue;
4548 info = VN_INFO (name);
4549 if (!info->visited)
4550 info->valnum = name;
4551 if (info->valnum == name
4552 || info->valnum == VN_TOP)
4553 info->value_id = get_next_value_id ();
4554 else if (is_gimple_min_invariant (info->valnum))
4555 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4556 }
4557
4558 /* Propagate. */
4559 for (i = 1; i < num_ssa_names; ++i)
4560 {
4561 tree name = ssa_name (i);
4562 vn_ssa_aux_t info;
4563 if (!name)
4564 continue;
4565 info = VN_INFO (name);
4566 if (TREE_CODE (info->valnum) == SSA_NAME
4567 && info->valnum != name
4568 && info->value_id != VN_INFO (info->valnum)->value_id)
4569 info->value_id = VN_INFO (info->valnum)->value_id;
4570 }
4571
4572 set_hashtable_value_ids ();
4573
4574 if (dump_file && (dump_flags & TDF_DETAILS))
4575 {
4576 fprintf (dump_file, "Value numbers:\n");
4577 for (i = 0; i < num_ssa_names; i++)
4578 {
4579 tree name = ssa_name (i);
4580 if (name
4581 && VN_INFO (name)->visited
4582 && SSA_VAL (name) != name)
4583 {
4584 print_generic_expr (dump_file, name, 0);
4585 fprintf (dump_file, " = ");
4586 print_generic_expr (dump_file, SSA_VAL (name), 0);
4587 fprintf (dump_file, "\n");
4588 }
4589 }
4590 }
4591
4592 return true;
4593 }
4594
4595 /* Return the maximum value id we have ever seen. */
4596
4597 unsigned int
4598 get_max_value_id (void)
4599 {
4600 return next_value_id;
4601 }
4602
4603 /* Return the next unique value id. */
4604
4605 unsigned int
4606 get_next_value_id (void)
4607 {
4608 return next_value_id++;
4609 }
4610
4611
4612 /* Compare two expressions E1 and E2 and return true if they are equal. */
4613
4614 bool
4615 expressions_equal_p (tree e1, tree e2)
4616 {
4617 /* The obvious case. */
4618 if (e1 == e2)
4619 return true;
4620
4621 /* If either one is VN_TOP consider them equal. */
4622 if (e1 == VN_TOP || e2 == VN_TOP)
4623 return true;
4624
4625 /* If only one of them is null, they cannot be equal. */
4626 if (!e1 || !e2)
4627 return false;
4628
4629 /* Now perform the actual comparison. */
4630 if (TREE_CODE (e1) == TREE_CODE (e2)
4631 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4632 return true;
4633
4634 return false;
4635 }
4636
4637
4638 /* Return true if the nary operation NARY may trap. This is a copy
4639 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4640
4641 bool
4642 vn_nary_may_trap (vn_nary_op_t nary)
4643 {
4644 tree type;
4645 tree rhs2 = NULL_TREE;
4646 bool honor_nans = false;
4647 bool honor_snans = false;
4648 bool fp_operation = false;
4649 bool honor_trapv = false;
4650 bool handled, ret;
4651 unsigned i;
4652
4653 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4654 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4655 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4656 {
4657 type = nary->type;
4658 fp_operation = FLOAT_TYPE_P (type);
4659 if (fp_operation)
4660 {
4661 honor_nans = flag_trapping_math && !flag_finite_math_only;
4662 honor_snans = flag_signaling_nans != 0;
4663 }
4664 else if (INTEGRAL_TYPE_P (type)
4665 && TYPE_OVERFLOW_TRAPS (type))
4666 honor_trapv = true;
4667 }
4668 if (nary->length >= 2)
4669 rhs2 = nary->op[1];
4670 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4671 honor_trapv,
4672 honor_nans, honor_snans, rhs2,
4673 &handled);
4674 if (handled
4675 && ret)
4676 return true;
4677
4678 for (i = 0; i < nary->length; ++i)
4679 if (tree_could_trap_p (nary->op[i]))
4680 return true;
4681
4682 return false;
4683 }