gimple-fold.c (gimple_fold_stmt_to_constant_1): For ternary ops with an embedded...
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
167
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
170
171 /* Return the value numbering information for a given SSA name. */
172
173 vn_ssa_aux_t
174 VN_INFO (tree name)
175 {
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
180 }
181
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
184
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 {
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
190 }
191
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
194
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
197 {
198 vn_ssa_aux_t newinfo;
199
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
208 }
209
210
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
213
214 tree
215 vn_get_expr_for (tree name)
216 {
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
221
222 if (vn->valnum == VN_TOP)
223 return name;
224
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
229
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
232
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
237
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
241
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
248
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
253 {
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
264
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
270
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
277
278 default:;
279 }
280 if (expr == NULL_TREE)
281 return vn->valnum;
282
283 /* Cache the expression. */
284 vn->expr = expr;
285
286 return expr;
287 }
288
289
290 /* Free a phi operation structure VP. */
291
292 static void
293 free_phi (void *vp)
294 {
295 vn_phi_t phi = (vn_phi_t) vp;
296 VEC_free (tree, heap, phi->phiargs);
297 }
298
299 /* Free a reference operation structure VP. */
300
301 static void
302 free_reference (void *vp)
303 {
304 vn_reference_t vr = (vn_reference_t) vp;
305 VEC_free (vn_reference_op_s, heap, vr->operands);
306 }
307
308 /* Hash table equality function for vn_constant_t. */
309
310 static int
311 vn_constant_eq (const void *p1, const void *p2)
312 {
313 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
314 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
315
316 if (vc1->hashcode != vc2->hashcode)
317 return false;
318
319 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
320 }
321
322 /* Hash table hash function for vn_constant_t. */
323
324 static hashval_t
325 vn_constant_hash (const void *p1)
326 {
327 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
328 return vc1->hashcode;
329 }
330
331 /* Lookup a value id for CONSTANT and return it. If it does not
332 exist returns 0. */
333
334 unsigned int
335 get_constant_value_id (tree constant)
336 {
337 void **slot;
338 struct vn_constant_s vc;
339
340 vc.hashcode = vn_hash_constant_with_type (constant);
341 vc.constant = constant;
342 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
343 vc.hashcode, NO_INSERT);
344 if (slot)
345 return ((vn_constant_t)*slot)->value_id;
346 return 0;
347 }
348
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
351
352 unsigned int
353 get_or_alloc_constant_value_id (tree constant)
354 {
355 void **slot;
356 struct vn_constant_s vc;
357 vn_constant_t vcp;
358
359 vc.hashcode = vn_hash_constant_with_type (constant);
360 vc.constant = constant;
361 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
362 vc.hashcode, INSERT);
363 if (*slot)
364 return ((vn_constant_t)*slot)->value_id;
365
366 vcp = XNEW (struct vn_constant_s);
367 vcp->hashcode = vc.hashcode;
368 vcp->constant = constant;
369 vcp->value_id = get_next_value_id ();
370 *slot = (void *) vcp;
371 bitmap_set_bit (constant_value_ids, vcp->value_id);
372 return vcp->value_id;
373 }
374
375 /* Return true if V is a value id for a constant. */
376
377 bool
378 value_id_constant_p (unsigned int v)
379 {
380 return bitmap_bit_p (constant_value_ids, v);
381 }
382
383 /* Compare two reference operands P1 and P2 for equality. Return true if
384 they are equal, and false otherwise. */
385
386 static int
387 vn_reference_op_eq (const void *p1, const void *p2)
388 {
389 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
390 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
391
392 return (vro1->opcode == vro2->opcode
393 /* We do not care for differences in type qualification. */
394 && (vro1->type == vro2->type
395 || (vro1->type && vro2->type
396 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
397 TYPE_MAIN_VARIANT (vro2->type))))
398 && expressions_equal_p (vro1->op0, vro2->op0)
399 && expressions_equal_p (vro1->op1, vro2->op1)
400 && expressions_equal_p (vro1->op2, vro2->op2));
401 }
402
403 /* Compute the hash for a reference operand VRO1. */
404
405 static hashval_t
406 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
407 {
408 result = iterative_hash_hashval_t (vro1->opcode, result);
409 if (vro1->op0)
410 result = iterative_hash_expr (vro1->op0, result);
411 if (vro1->op1)
412 result = iterative_hash_expr (vro1->op1, result);
413 if (vro1->op2)
414 result = iterative_hash_expr (vro1->op2, result);
415 return result;
416 }
417
418 /* Return the hashcode for a given reference operation P1. */
419
420 static hashval_t
421 vn_reference_hash (const void *p1)
422 {
423 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
424 return vr1->hashcode;
425 }
426
427 /* Compute a hash for the reference operation VR1 and return it. */
428
429 hashval_t
430 vn_reference_compute_hash (const vn_reference_t vr1)
431 {
432 hashval_t result = 0;
433 int i;
434 vn_reference_op_t vro;
435 HOST_WIDE_INT off = -1;
436 bool deref = false;
437
438 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
439 {
440 if (vro->opcode == MEM_REF)
441 deref = true;
442 else if (vro->opcode != ADDR_EXPR)
443 deref = false;
444 if (vro->off != -1)
445 {
446 if (off == -1)
447 off = 0;
448 off += vro->off;
449 }
450 else
451 {
452 if (off != -1
453 && off != 0)
454 result = iterative_hash_hashval_t (off, result);
455 off = -1;
456 if (deref
457 && vro->opcode == ADDR_EXPR)
458 {
459 if (vro->op0)
460 {
461 tree op = TREE_OPERAND (vro->op0, 0);
462 result = iterative_hash_hashval_t (TREE_CODE (op), result);
463 result = iterative_hash_expr (op, result);
464 }
465 }
466 else
467 result = vn_reference_op_compute_hash (vro, result);
468 }
469 }
470 if (vr1->vuse)
471 result += SSA_NAME_VERSION (vr1->vuse);
472
473 return result;
474 }
475
476 /* Return true if reference operations P1 and P2 are equivalent. This
477 means they have the same set of operands and vuses. */
478
479 int
480 vn_reference_eq (const void *p1, const void *p2)
481 {
482 unsigned i, j;
483
484 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
485 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
486 if (vr1->hashcode != vr2->hashcode)
487 return false;
488
489 /* Early out if this is not a hash collision. */
490 if (vr1->hashcode != vr2->hashcode)
491 return false;
492
493 /* The VOP needs to be the same. */
494 if (vr1->vuse != vr2->vuse)
495 return false;
496
497 /* If the operands are the same we are done. */
498 if (vr1->operands == vr2->operands)
499 return true;
500
501 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
502 return false;
503
504 if (INTEGRAL_TYPE_P (vr1->type)
505 && INTEGRAL_TYPE_P (vr2->type))
506 {
507 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
508 return false;
509 }
510 else if (INTEGRAL_TYPE_P (vr1->type)
511 && (TYPE_PRECISION (vr1->type)
512 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
513 return false;
514 else if (INTEGRAL_TYPE_P (vr2->type)
515 && (TYPE_PRECISION (vr2->type)
516 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
517 return false;
518
519 i = 0;
520 j = 0;
521 do
522 {
523 HOST_WIDE_INT off1 = 0, off2 = 0;
524 vn_reference_op_t vro1, vro2;
525 vn_reference_op_s tem1, tem2;
526 bool deref1 = false, deref2 = false;
527 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
528 {
529 if (vro1->opcode == MEM_REF)
530 deref1 = true;
531 if (vro1->off == -1)
532 break;
533 off1 += vro1->off;
534 }
535 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
536 {
537 if (vro2->opcode == MEM_REF)
538 deref2 = true;
539 if (vro2->off == -1)
540 break;
541 off2 += vro2->off;
542 }
543 if (off1 != off2)
544 return false;
545 if (deref1 && vro1->opcode == ADDR_EXPR)
546 {
547 memset (&tem1, 0, sizeof (tem1));
548 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
549 tem1.type = TREE_TYPE (tem1.op0);
550 tem1.opcode = TREE_CODE (tem1.op0);
551 vro1 = &tem1;
552 }
553 if (deref2 && vro2->opcode == ADDR_EXPR)
554 {
555 memset (&tem2, 0, sizeof (tem2));
556 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
557 tem2.type = TREE_TYPE (tem2.op0);
558 tem2.opcode = TREE_CODE (tem2.op0);
559 vro2 = &tem2;
560 }
561 if (!vn_reference_op_eq (vro1, vro2))
562 return false;
563 ++j;
564 ++i;
565 }
566 while (VEC_length (vn_reference_op_s, vr1->operands) != i
567 || VEC_length (vn_reference_op_s, vr2->operands) != j);
568
569 return true;
570 }
571
572 /* Copy the operations present in load/store REF into RESULT, a vector of
573 vn_reference_op_s's. */
574
575 void
576 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
577 {
578 if (TREE_CODE (ref) == TARGET_MEM_REF)
579 {
580 vn_reference_op_s temp;
581
582 memset (&temp, 0, sizeof (temp));
583 temp.type = TREE_TYPE (ref);
584 temp.opcode = TREE_CODE (ref);
585 temp.op0 = TMR_INDEX (ref);
586 temp.op1 = TMR_STEP (ref);
587 temp.op2 = TMR_OFFSET (ref);
588 temp.off = -1;
589 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
590
591 memset (&temp, 0, sizeof (temp));
592 temp.type = NULL_TREE;
593 temp.opcode = ERROR_MARK;
594 temp.op0 = TMR_INDEX2 (ref);
595 temp.off = -1;
596 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
597
598 memset (&temp, 0, sizeof (temp));
599 temp.type = NULL_TREE;
600 temp.opcode = TREE_CODE (TMR_BASE (ref));
601 temp.op0 = TMR_BASE (ref);
602 temp.off = -1;
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604 return;
605 }
606
607 /* For non-calls, store the information that makes up the address. */
608
609 while (ref)
610 {
611 vn_reference_op_s temp;
612
613 memset (&temp, 0, sizeof (temp));
614 temp.type = TREE_TYPE (ref);
615 temp.opcode = TREE_CODE (ref);
616 temp.off = -1;
617
618 switch (temp.opcode)
619 {
620 case MEM_REF:
621 /* The base address gets its own vn_reference_op_s structure. */
622 temp.op0 = TREE_OPERAND (ref, 1);
623 if (host_integerp (TREE_OPERAND (ref, 1), 0))
624 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
625 break;
626 case BIT_FIELD_REF:
627 /* Record bits and position. */
628 temp.op0 = TREE_OPERAND (ref, 1);
629 temp.op1 = TREE_OPERAND (ref, 2);
630 break;
631 case COMPONENT_REF:
632 /* The field decl is enough to unambiguously specify the field,
633 a matching type is not necessary and a mismatching type
634 is always a spurious difference. */
635 temp.type = NULL_TREE;
636 temp.op0 = TREE_OPERAND (ref, 1);
637 temp.op1 = TREE_OPERAND (ref, 2);
638 {
639 tree this_offset = component_ref_field_offset (ref);
640 if (this_offset
641 && TREE_CODE (this_offset) == INTEGER_CST)
642 {
643 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
644 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
645 {
646 double_int off
647 = double_int_add (tree_to_double_int (this_offset),
648 double_int_rshift
649 (tree_to_double_int (bit_offset),
650 BITS_PER_UNIT == 8
651 ? 3 : exact_log2 (BITS_PER_UNIT),
652 HOST_BITS_PER_DOUBLE_INT, true));
653 if (double_int_fits_in_shwi_p (off))
654 temp.off = off.low;
655 }
656 }
657 }
658 break;
659 case ARRAY_RANGE_REF:
660 case ARRAY_REF:
661 /* Record index as operand. */
662 temp.op0 = TREE_OPERAND (ref, 1);
663 /* Always record lower bounds and element size. */
664 temp.op1 = array_ref_low_bound (ref);
665 temp.op2 = array_ref_element_size (ref);
666 if (TREE_CODE (temp.op0) == INTEGER_CST
667 && TREE_CODE (temp.op1) == INTEGER_CST
668 && TREE_CODE (temp.op2) == INTEGER_CST)
669 {
670 double_int off = tree_to_double_int (temp.op0);
671 off = double_int_add (off,
672 double_int_neg
673 (tree_to_double_int (temp.op1)));
674 off = double_int_mul (off, tree_to_double_int (temp.op2));
675 if (double_int_fits_in_shwi_p (off))
676 temp.off = off.low;
677 }
678 break;
679 case VAR_DECL:
680 if (DECL_HARD_REGISTER (ref))
681 {
682 temp.op0 = ref;
683 break;
684 }
685 /* Fallthru. */
686 case PARM_DECL:
687 case CONST_DECL:
688 case RESULT_DECL:
689 /* Canonicalize decls to MEM[&decl] which is what we end up with
690 when valueizing MEM[ptr] with ptr = &decl. */
691 temp.opcode = MEM_REF;
692 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
693 temp.off = 0;
694 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
695 temp.opcode = ADDR_EXPR;
696 temp.op0 = build_fold_addr_expr (ref);
697 temp.type = TREE_TYPE (temp.op0);
698 temp.off = -1;
699 break;
700 case STRING_CST:
701 case INTEGER_CST:
702 case COMPLEX_CST:
703 case VECTOR_CST:
704 case REAL_CST:
705 case FIXED_CST:
706 case CONSTRUCTOR:
707 case SSA_NAME:
708 temp.op0 = ref;
709 break;
710 case ADDR_EXPR:
711 if (is_gimple_min_invariant (ref))
712 {
713 temp.op0 = ref;
714 break;
715 }
716 /* Fallthrough. */
717 /* These are only interesting for their operands, their
718 existence, and their type. They will never be the last
719 ref in the chain of references (IE they require an
720 operand), so we don't have to put anything
721 for op* as it will be handled by the iteration */
722 case REALPART_EXPR:
723 case VIEW_CONVERT_EXPR:
724 temp.off = 0;
725 break;
726 case IMAGPART_EXPR:
727 /* This is only interesting for its constant offset. */
728 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
729 break;
730 default:
731 gcc_unreachable ();
732 }
733 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
734
735 if (REFERENCE_CLASS_P (ref)
736 || (TREE_CODE (ref) == ADDR_EXPR
737 && !is_gimple_min_invariant (ref)))
738 ref = TREE_OPERAND (ref, 0);
739 else
740 ref = NULL_TREE;
741 }
742 }
743
744 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
745 operands in *OPS, the reference alias set SET and the reference type TYPE.
746 Return true if something useful was produced. */
747
748 bool
749 ao_ref_init_from_vn_reference (ao_ref *ref,
750 alias_set_type set, tree type,
751 VEC (vn_reference_op_s, heap) *ops)
752 {
753 vn_reference_op_t op;
754 unsigned i;
755 tree base = NULL_TREE;
756 tree *op0_p = &base;
757 HOST_WIDE_INT offset = 0;
758 HOST_WIDE_INT max_size;
759 HOST_WIDE_INT size = -1;
760 tree size_tree = NULL_TREE;
761 alias_set_type base_alias_set = -1;
762
763 /* First get the final access size from just the outermost expression. */
764 op = VEC_index (vn_reference_op_s, ops, 0);
765 if (op->opcode == COMPONENT_REF)
766 size_tree = DECL_SIZE (op->op0);
767 else if (op->opcode == BIT_FIELD_REF)
768 size_tree = op->op0;
769 else
770 {
771 enum machine_mode mode = TYPE_MODE (type);
772 if (mode == BLKmode)
773 size_tree = TYPE_SIZE (type);
774 else
775 size = GET_MODE_BITSIZE (mode);
776 }
777 if (size_tree != NULL_TREE)
778 {
779 if (!host_integerp (size_tree, 1))
780 size = -1;
781 else
782 size = TREE_INT_CST_LOW (size_tree);
783 }
784
785 /* Initially, maxsize is the same as the accessed element size.
786 In the following it will only grow (or become -1). */
787 max_size = size;
788
789 /* Compute cumulative bit-offset for nested component-refs and array-refs,
790 and find the ultimate containing object. */
791 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
792 {
793 switch (op->opcode)
794 {
795 /* These may be in the reference ops, but we cannot do anything
796 sensible with them here. */
797 case ADDR_EXPR:
798 /* Apart from ADDR_EXPR arguments to MEM_REF. */
799 if (base != NULL_TREE
800 && TREE_CODE (base) == MEM_REF
801 && op->op0
802 && DECL_P (TREE_OPERAND (op->op0, 0)))
803 {
804 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
805 base = TREE_OPERAND (op->op0, 0);
806 if (pop->off == -1)
807 {
808 max_size = -1;
809 offset = 0;
810 }
811 else
812 offset += pop->off * BITS_PER_UNIT;
813 op0_p = NULL;
814 break;
815 }
816 /* Fallthru. */
817 case CALL_EXPR:
818 return false;
819
820 /* Record the base objects. */
821 case MEM_REF:
822 base_alias_set = get_deref_alias_set (op->op0);
823 *op0_p = build2 (MEM_REF, op->type,
824 NULL_TREE, op->op0);
825 op0_p = &TREE_OPERAND (*op0_p, 0);
826 break;
827
828 case VAR_DECL:
829 case PARM_DECL:
830 case RESULT_DECL:
831 case SSA_NAME:
832 *op0_p = op->op0;
833 op0_p = NULL;
834 break;
835
836 /* And now the usual component-reference style ops. */
837 case BIT_FIELD_REF:
838 offset += tree_low_cst (op->op1, 0);
839 break;
840
841 case COMPONENT_REF:
842 {
843 tree field = op->op0;
844 /* We do not have a complete COMPONENT_REF tree here so we
845 cannot use component_ref_field_offset. Do the interesting
846 parts manually. */
847
848 if (op->op1
849 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
850 max_size = -1;
851 else
852 {
853 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
854 * BITS_PER_UNIT);
855 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
856 }
857 break;
858 }
859
860 case ARRAY_RANGE_REF:
861 case ARRAY_REF:
862 /* We recorded the lower bound and the element size. */
863 if (!host_integerp (op->op0, 0)
864 || !host_integerp (op->op1, 0)
865 || !host_integerp (op->op2, 0))
866 max_size = -1;
867 else
868 {
869 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
870 hindex -= TREE_INT_CST_LOW (op->op1);
871 hindex *= TREE_INT_CST_LOW (op->op2);
872 hindex *= BITS_PER_UNIT;
873 offset += hindex;
874 }
875 break;
876
877 case REALPART_EXPR:
878 break;
879
880 case IMAGPART_EXPR:
881 offset += size;
882 break;
883
884 case VIEW_CONVERT_EXPR:
885 break;
886
887 case STRING_CST:
888 case INTEGER_CST:
889 case COMPLEX_CST:
890 case VECTOR_CST:
891 case REAL_CST:
892 case CONSTRUCTOR:
893 case CONST_DECL:
894 return false;
895
896 default:
897 return false;
898 }
899 }
900
901 if (base == NULL_TREE)
902 return false;
903
904 ref->ref = NULL_TREE;
905 ref->base = base;
906 ref->offset = offset;
907 ref->size = size;
908 ref->max_size = max_size;
909 ref->ref_alias_set = set;
910 if (base_alias_set != -1)
911 ref->base_alias_set = base_alias_set;
912 else
913 ref->base_alias_set = get_alias_set (base);
914
915 return true;
916 }
917
918 /* Copy the operations present in load/store/call REF into RESULT, a vector of
919 vn_reference_op_s's. */
920
921 void
922 copy_reference_ops_from_call (gimple call,
923 VEC(vn_reference_op_s, heap) **result)
924 {
925 vn_reference_op_s temp;
926 unsigned i;
927
928 /* Copy the type, opcode, function being called and static chain. */
929 memset (&temp, 0, sizeof (temp));
930 temp.type = gimple_call_return_type (call);
931 temp.opcode = CALL_EXPR;
932 temp.op0 = gimple_call_fn (call);
933 temp.op1 = gimple_call_chain (call);
934 temp.off = -1;
935 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
936
937 /* Copy the call arguments. As they can be references as well,
938 just chain them together. */
939 for (i = 0; i < gimple_call_num_args (call); ++i)
940 {
941 tree callarg = gimple_call_arg (call, i);
942 copy_reference_ops_from_ref (callarg, result);
943 }
944 }
945
946 /* Create a vector of vn_reference_op_s structures from REF, a
947 REFERENCE_CLASS_P tree. The vector is not shared. */
948
949 static VEC(vn_reference_op_s, heap) *
950 create_reference_ops_from_ref (tree ref)
951 {
952 VEC (vn_reference_op_s, heap) *result = NULL;
953
954 copy_reference_ops_from_ref (ref, &result);
955 return result;
956 }
957
958 /* Create a vector of vn_reference_op_s structures from CALL, a
959 call statement. The vector is not shared. */
960
961 static VEC(vn_reference_op_s, heap) *
962 create_reference_ops_from_call (gimple call)
963 {
964 VEC (vn_reference_op_s, heap) *result = NULL;
965
966 copy_reference_ops_from_call (call, &result);
967 return result;
968 }
969
970 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
971 *I_P to point to the last element of the replacement. */
972 void
973 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
974 unsigned int *i_p)
975 {
976 unsigned int i = *i_p;
977 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
978 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
979 tree addr_base;
980 HOST_WIDE_INT addr_offset;
981
982 /* The only thing we have to do is from &OBJ.foo.bar add the offset
983 from .foo.bar to the preceeding MEM_REF offset and replace the
984 address with &OBJ. */
985 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
986 &addr_offset);
987 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
988 if (addr_base != op->op0)
989 {
990 double_int off = tree_to_double_int (mem_op->op0);
991 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
992 off = double_int_add (off, shwi_to_double_int (addr_offset));
993 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
994 op->op0 = build_fold_addr_expr (addr_base);
995 if (host_integerp (mem_op->op0, 0))
996 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
997 else
998 mem_op->off = -1;
999 }
1000 }
1001
1002 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1003 *I_P to point to the last element of the replacement. */
1004 static void
1005 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1006 unsigned int *i_p)
1007 {
1008 unsigned int i = *i_p;
1009 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1010 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1011 gimple def_stmt;
1012 enum tree_code code;
1013 double_int off;
1014
1015 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1016 if (!is_gimple_assign (def_stmt))
1017 return;
1018
1019 code = gimple_assign_rhs_code (def_stmt);
1020 if (code != ADDR_EXPR
1021 && code != POINTER_PLUS_EXPR)
1022 return;
1023
1024 off = tree_to_double_int (mem_op->op0);
1025 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1026
1027 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1028 from .foo.bar to the preceeding MEM_REF offset and replace the
1029 address with &OBJ. */
1030 if (code == ADDR_EXPR)
1031 {
1032 tree addr, addr_base;
1033 HOST_WIDE_INT addr_offset;
1034
1035 addr = gimple_assign_rhs1 (def_stmt);
1036 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1037 &addr_offset);
1038 if (!addr_base
1039 || TREE_CODE (addr_base) != MEM_REF)
1040 return;
1041
1042 off = double_int_add (off, shwi_to_double_int (addr_offset));
1043 off = double_int_add (off, mem_ref_offset (addr_base));
1044 op->op0 = TREE_OPERAND (addr_base, 0);
1045 }
1046 else
1047 {
1048 tree ptr, ptroff;
1049 ptr = gimple_assign_rhs1 (def_stmt);
1050 ptroff = gimple_assign_rhs2 (def_stmt);
1051 if (TREE_CODE (ptr) != SSA_NAME
1052 || TREE_CODE (ptroff) != INTEGER_CST)
1053 return;
1054
1055 off = double_int_add (off, tree_to_double_int (ptroff));
1056 op->op0 = ptr;
1057 }
1058
1059 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1060 if (host_integerp (mem_op->op0, 0))
1061 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1062 else
1063 mem_op->off = -1;
1064 if (TREE_CODE (op->op0) == SSA_NAME)
1065 op->op0 = SSA_VAL (op->op0);
1066 if (TREE_CODE (op->op0) != SSA_NAME)
1067 op->opcode = TREE_CODE (op->op0);
1068
1069 /* And recurse. */
1070 if (TREE_CODE (op->op0) == SSA_NAME)
1071 vn_reference_maybe_forwprop_address (ops, i_p);
1072 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1073 vn_reference_fold_indirect (ops, i_p);
1074 }
1075
1076 /* Optimize the reference REF to a constant if possible or return
1077 NULL_TREE if not. */
1078
1079 tree
1080 fully_constant_vn_reference_p (vn_reference_t ref)
1081 {
1082 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1083 vn_reference_op_t op;
1084
1085 /* Try to simplify the translated expression if it is
1086 a call to a builtin function with at most two arguments. */
1087 op = VEC_index (vn_reference_op_s, operands, 0);
1088 if (op->opcode == CALL_EXPR
1089 && TREE_CODE (op->op0) == ADDR_EXPR
1090 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1091 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1092 && VEC_length (vn_reference_op_s, operands) >= 2
1093 && VEC_length (vn_reference_op_s, operands) <= 3)
1094 {
1095 vn_reference_op_t arg0, arg1 = NULL;
1096 bool anyconst = false;
1097 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1098 if (VEC_length (vn_reference_op_s, operands) > 2)
1099 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1100 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1101 || (arg0->opcode == ADDR_EXPR
1102 && is_gimple_min_invariant (arg0->op0)))
1103 anyconst = true;
1104 if (arg1
1105 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1106 || (arg1->opcode == ADDR_EXPR
1107 && is_gimple_min_invariant (arg1->op0))))
1108 anyconst = true;
1109 if (anyconst)
1110 {
1111 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1112 arg1 ? 2 : 1,
1113 arg0->op0,
1114 arg1 ? arg1->op0 : NULL);
1115 if (folded
1116 && TREE_CODE (folded) == NOP_EXPR)
1117 folded = TREE_OPERAND (folded, 0);
1118 if (folded
1119 && is_gimple_min_invariant (folded))
1120 return folded;
1121 }
1122 }
1123
1124 /* Simplify reads from constant strings. */
1125 else if (op->opcode == ARRAY_REF
1126 && TREE_CODE (op->op0) == INTEGER_CST
1127 && integer_zerop (op->op1)
1128 && VEC_length (vn_reference_op_s, operands) == 2)
1129 {
1130 vn_reference_op_t arg0;
1131 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1132 if (arg0->opcode == STRING_CST
1133 && (TYPE_MODE (op->type)
1134 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1135 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1136 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1137 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1138 return build_int_cst_type (op->type,
1139 (TREE_STRING_POINTER (arg0->op0)
1140 [TREE_INT_CST_LOW (op->op0)]));
1141 }
1142
1143 return NULL_TREE;
1144 }
1145
1146 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1147 structures into their value numbers. This is done in-place, and
1148 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1149 whether any operands were valueized. */
1150
1151 static VEC (vn_reference_op_s, heap) *
1152 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1153 {
1154 vn_reference_op_t vro;
1155 unsigned int i;
1156
1157 *valueized_anything = false;
1158
1159 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1160 {
1161 if (vro->opcode == SSA_NAME
1162 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1163 {
1164 tree tem = SSA_VAL (vro->op0);
1165 if (tem != vro->op0)
1166 {
1167 *valueized_anything = true;
1168 vro->op0 = tem;
1169 }
1170 /* If it transforms from an SSA_NAME to a constant, update
1171 the opcode. */
1172 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1173 vro->opcode = TREE_CODE (vro->op0);
1174 }
1175 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1176 {
1177 tree tem = SSA_VAL (vro->op1);
1178 if (tem != vro->op1)
1179 {
1180 *valueized_anything = true;
1181 vro->op1 = tem;
1182 }
1183 }
1184 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1185 {
1186 tree tem = SSA_VAL (vro->op2);
1187 if (tem != vro->op2)
1188 {
1189 *valueized_anything = true;
1190 vro->op2 = tem;
1191 }
1192 }
1193 /* If it transforms from an SSA_NAME to an address, fold with
1194 a preceding indirect reference. */
1195 if (i > 0
1196 && vro->op0
1197 && TREE_CODE (vro->op0) == ADDR_EXPR
1198 && VEC_index (vn_reference_op_s,
1199 orig, i - 1)->opcode == MEM_REF)
1200 vn_reference_fold_indirect (&orig, &i);
1201 else if (i > 0
1202 && vro->opcode == SSA_NAME
1203 && VEC_index (vn_reference_op_s,
1204 orig, i - 1)->opcode == MEM_REF)
1205 vn_reference_maybe_forwprop_address (&orig, &i);
1206 /* If it transforms a non-constant ARRAY_REF into a constant
1207 one, adjust the constant offset. */
1208 else if (vro->opcode == ARRAY_REF
1209 && vro->off == -1
1210 && TREE_CODE (vro->op0) == INTEGER_CST
1211 && TREE_CODE (vro->op1) == INTEGER_CST
1212 && TREE_CODE (vro->op2) == INTEGER_CST)
1213 {
1214 double_int off = tree_to_double_int (vro->op0);
1215 off = double_int_add (off,
1216 double_int_neg
1217 (tree_to_double_int (vro->op1)));
1218 off = double_int_mul (off, tree_to_double_int (vro->op2));
1219 if (double_int_fits_in_shwi_p (off))
1220 vro->off = off.low;
1221 }
1222 }
1223
1224 return orig;
1225 }
1226
1227 static VEC (vn_reference_op_s, heap) *
1228 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1229 {
1230 bool tem;
1231 return valueize_refs_1 (orig, &tem);
1232 }
1233
1234 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1235
1236 /* Create a vector of vn_reference_op_s structures from REF, a
1237 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1238 this function. *VALUEIZED_ANYTHING will specify whether any
1239 operands were valueized. */
1240
1241 static VEC(vn_reference_op_s, heap) *
1242 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1243 {
1244 if (!ref)
1245 return NULL;
1246 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1247 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1248 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1249 valueized_anything);
1250 return shared_lookup_references;
1251 }
1252
1253 /* Create a vector of vn_reference_op_s structures from CALL, a
1254 call statement. The vector is shared among all callers of
1255 this function. */
1256
1257 static VEC(vn_reference_op_s, heap) *
1258 valueize_shared_reference_ops_from_call (gimple call)
1259 {
1260 if (!call)
1261 return NULL;
1262 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1263 copy_reference_ops_from_call (call, &shared_lookup_references);
1264 shared_lookup_references = valueize_refs (shared_lookup_references);
1265 return shared_lookup_references;
1266 }
1267
1268 /* Lookup a SCCVN reference operation VR in the current hash table.
1269 Returns the resulting value number if it exists in the hash table,
1270 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1271 vn_reference_t stored in the hashtable if something is found. */
1272
1273 static tree
1274 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1275 {
1276 void **slot;
1277 hashval_t hash;
1278
1279 hash = vr->hashcode;
1280 slot = htab_find_slot_with_hash (current_info->references, vr,
1281 hash, NO_INSERT);
1282 if (!slot && current_info == optimistic_info)
1283 slot = htab_find_slot_with_hash (valid_info->references, vr,
1284 hash, NO_INSERT);
1285 if (slot)
1286 {
1287 if (vnresult)
1288 *vnresult = (vn_reference_t)*slot;
1289 return ((vn_reference_t)*slot)->result;
1290 }
1291
1292 return NULL_TREE;
1293 }
1294
1295 static tree *last_vuse_ptr;
1296 static vn_lookup_kind vn_walk_kind;
1297 static vn_lookup_kind default_vn_walk_kind;
1298
1299 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1300 with the current VUSE and performs the expression lookup. */
1301
1302 static void *
1303 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1304 {
1305 vn_reference_t vr = (vn_reference_t)vr_;
1306 void **slot;
1307 hashval_t hash;
1308
1309 if (last_vuse_ptr)
1310 *last_vuse_ptr = vuse;
1311
1312 /* Fixup vuse and hash. */
1313 if (vr->vuse)
1314 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1315 vr->vuse = SSA_VAL (vuse);
1316 if (vr->vuse)
1317 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1318
1319 hash = vr->hashcode;
1320 slot = htab_find_slot_with_hash (current_info->references, vr,
1321 hash, NO_INSERT);
1322 if (!slot && current_info == optimistic_info)
1323 slot = htab_find_slot_with_hash (valid_info->references, vr,
1324 hash, NO_INSERT);
1325 if (slot)
1326 return *slot;
1327
1328 return NULL;
1329 }
1330
1331 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1332 from the statement defining VUSE and if not successful tries to
1333 translate *REFP and VR_ through an aggregate copy at the defintion
1334 of VUSE. */
1335
1336 static void *
1337 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1338 {
1339 vn_reference_t vr = (vn_reference_t)vr_;
1340 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1341 tree base;
1342 HOST_WIDE_INT offset, maxsize;
1343 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1344 ao_ref lhs_ref;
1345 bool lhs_ref_ok = false;
1346
1347 /* First try to disambiguate after value-replacing in the definitions LHS. */
1348 if (is_gimple_assign (def_stmt))
1349 {
1350 VEC (vn_reference_op_s, heap) *tem;
1351 tree lhs = gimple_assign_lhs (def_stmt);
1352 bool valueized_anything = false;
1353 /* Avoid re-allocation overhead. */
1354 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1355 copy_reference_ops_from_ref (lhs, &lhs_ops);
1356 tem = lhs_ops;
1357 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1358 gcc_assert (lhs_ops == tem);
1359 if (valueized_anything)
1360 {
1361 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1362 get_alias_set (lhs),
1363 TREE_TYPE (lhs), lhs_ops);
1364 if (lhs_ref_ok
1365 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1366 return NULL;
1367 }
1368 else
1369 {
1370 ao_ref_init (&lhs_ref, lhs);
1371 lhs_ref_ok = true;
1372 }
1373 }
1374
1375 base = ao_ref_base (ref);
1376 offset = ref->offset;
1377 maxsize = ref->max_size;
1378
1379 /* If we cannot constrain the size of the reference we cannot
1380 test if anything kills it. */
1381 if (maxsize == -1)
1382 return (void *)-1;
1383
1384 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1385 from that defintion.
1386 1) Memset. */
1387 if (is_gimple_reg_type (vr->type)
1388 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1389 && integer_zerop (gimple_call_arg (def_stmt, 1))
1390 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1391 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1392 {
1393 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1394 tree base2;
1395 HOST_WIDE_INT offset2, size2, maxsize2;
1396 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1397 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1398 if ((unsigned HOST_WIDE_INT)size2 / 8
1399 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1400 && maxsize2 != -1
1401 && operand_equal_p (base, base2, 0)
1402 && offset2 <= offset
1403 && offset2 + size2 >= offset + maxsize)
1404 {
1405 tree val = build_zero_cst (vr->type);
1406 unsigned int value_id = get_or_alloc_constant_value_id (val);
1407 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1408 VEC_copy (vn_reference_op_s,
1409 heap, vr->operands),
1410 val, value_id);
1411 }
1412 }
1413
1414 /* 2) Assignment from an empty CONSTRUCTOR. */
1415 else if (is_gimple_reg_type (vr->type)
1416 && gimple_assign_single_p (def_stmt)
1417 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1418 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1419 {
1420 tree base2;
1421 HOST_WIDE_INT offset2, size2, maxsize2;
1422 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1423 &offset2, &size2, &maxsize2);
1424 if (maxsize2 != -1
1425 && operand_equal_p (base, base2, 0)
1426 && offset2 <= offset
1427 && offset2 + size2 >= offset + maxsize)
1428 {
1429 tree val = build_zero_cst (vr->type);
1430 unsigned int value_id = get_or_alloc_constant_value_id (val);
1431 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1432 VEC_copy (vn_reference_op_s,
1433 heap, vr->operands),
1434 val, value_id);
1435 }
1436 }
1437
1438 /* 3) For aggregate copies translate the reference through them if
1439 the copy kills ref. */
1440 else if (vn_walk_kind == VN_WALKREWRITE
1441 && gimple_assign_single_p (def_stmt)
1442 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1443 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1444 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1445 {
1446 tree base2;
1447 HOST_WIDE_INT offset2, size2, maxsize2;
1448 int i, j;
1449 VEC (vn_reference_op_s, heap) *rhs = NULL;
1450 vn_reference_op_t vro;
1451 ao_ref r;
1452
1453 if (!lhs_ref_ok)
1454 return (void *)-1;
1455
1456 /* See if the assignment kills REF. */
1457 base2 = ao_ref_base (&lhs_ref);
1458 offset2 = lhs_ref.offset;
1459 size2 = lhs_ref.size;
1460 maxsize2 = lhs_ref.max_size;
1461 if (maxsize2 == -1
1462 || (base != base2 && !operand_equal_p (base, base2, 0))
1463 || offset2 > offset
1464 || offset2 + size2 < offset + maxsize)
1465 return (void *)-1;
1466
1467 /* Find the common base of ref and the lhs. lhs_ops already
1468 contains valueized operands for the lhs. */
1469 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1470 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1471 while (j >= 0 && i >= 0
1472 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1473 vr->operands, i),
1474 VEC_index (vn_reference_op_s, lhs_ops, j)))
1475 {
1476 i--;
1477 j--;
1478 }
1479
1480 /* ??? The innermost op should always be a MEM_REF and we already
1481 checked that the assignment to the lhs kills vr. Thus for
1482 aggregate copies using char[] types the vn_reference_op_eq
1483 may fail when comparing types for compatibility. But we really
1484 don't care here - further lookups with the rewritten operands
1485 will simply fail if we messed up types too badly. */
1486 if (j == 0 && i >= 0
1487 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1488 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1489 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1490 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1491 i--, j--;
1492
1493 /* i now points to the first additional op.
1494 ??? LHS may not be completely contained in VR, one or more
1495 VIEW_CONVERT_EXPRs could be in its way. We could at least
1496 try handling outermost VIEW_CONVERT_EXPRs. */
1497 if (j != -1)
1498 return (void *)-1;
1499
1500 /* Now re-write REF to be based on the rhs of the assignment. */
1501 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1502 /* We need to pre-pend vr->operands[0..i] to rhs. */
1503 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1504 > VEC_length (vn_reference_op_s, vr->operands))
1505 {
1506 VEC (vn_reference_op_s, heap) *old = vr->operands;
1507 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1508 i + 1 + VEC_length (vn_reference_op_s, rhs));
1509 if (old == shared_lookup_references
1510 && vr->operands != old)
1511 shared_lookup_references = NULL;
1512 }
1513 else
1514 VEC_truncate (vn_reference_op_s, vr->operands,
1515 i + 1 + VEC_length (vn_reference_op_s, rhs));
1516 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1517 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1518 VEC_free (vn_reference_op_s, heap, rhs);
1519 vr->hashcode = vn_reference_compute_hash (vr);
1520
1521 /* Adjust *ref from the new operands. */
1522 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1523 return (void *)-1;
1524 /* This can happen with bitfields. */
1525 if (ref->size != r.size)
1526 return (void *)-1;
1527 *ref = r;
1528
1529 /* Do not update last seen VUSE after translating. */
1530 last_vuse_ptr = NULL;
1531
1532 /* Keep looking for the adjusted *REF / VR pair. */
1533 return NULL;
1534 }
1535
1536 /* 4) For memcpy copies translate the reference through them if
1537 the copy kills ref. */
1538 else if (vn_walk_kind == VN_WALKREWRITE
1539 && is_gimple_reg_type (vr->type)
1540 /* ??? Handle BCOPY as well. */
1541 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1542 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1543 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1544 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1545 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1546 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1547 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1548 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1549 {
1550 tree lhs, rhs;
1551 ao_ref r;
1552 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1553 vn_reference_op_s op;
1554 HOST_WIDE_INT at;
1555
1556
1557 /* Only handle non-variable, addressable refs. */
1558 if (ref->size != maxsize
1559 || offset % BITS_PER_UNIT != 0
1560 || ref->size % BITS_PER_UNIT != 0)
1561 return (void *)-1;
1562
1563 /* Extract a pointer base and an offset for the destination. */
1564 lhs = gimple_call_arg (def_stmt, 0);
1565 lhs_offset = 0;
1566 if (TREE_CODE (lhs) == SSA_NAME)
1567 lhs = SSA_VAL (lhs);
1568 if (TREE_CODE (lhs) == ADDR_EXPR)
1569 {
1570 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1571 &lhs_offset);
1572 if (!tem)
1573 return (void *)-1;
1574 if (TREE_CODE (tem) == MEM_REF
1575 && host_integerp (TREE_OPERAND (tem, 1), 1))
1576 {
1577 lhs = TREE_OPERAND (tem, 0);
1578 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1579 }
1580 else if (DECL_P (tem))
1581 lhs = build_fold_addr_expr (tem);
1582 else
1583 return (void *)-1;
1584 }
1585 if (TREE_CODE (lhs) != SSA_NAME
1586 && TREE_CODE (lhs) != ADDR_EXPR)
1587 return (void *)-1;
1588
1589 /* Extract a pointer base and an offset for the source. */
1590 rhs = gimple_call_arg (def_stmt, 1);
1591 rhs_offset = 0;
1592 if (TREE_CODE (rhs) == SSA_NAME)
1593 rhs = SSA_VAL (rhs);
1594 if (TREE_CODE (rhs) == ADDR_EXPR)
1595 {
1596 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1597 &rhs_offset);
1598 if (!tem)
1599 return (void *)-1;
1600 if (TREE_CODE (tem) == MEM_REF
1601 && host_integerp (TREE_OPERAND (tem, 1), 1))
1602 {
1603 rhs = TREE_OPERAND (tem, 0);
1604 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1605 }
1606 else if (DECL_P (tem))
1607 rhs = build_fold_addr_expr (tem);
1608 else
1609 return (void *)-1;
1610 }
1611 if (TREE_CODE (rhs) != SSA_NAME
1612 && TREE_CODE (rhs) != ADDR_EXPR)
1613 return (void *)-1;
1614
1615 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1616
1617 /* The bases of the destination and the references have to agree. */
1618 if ((TREE_CODE (base) != MEM_REF
1619 && !DECL_P (base))
1620 || (TREE_CODE (base) == MEM_REF
1621 && (TREE_OPERAND (base, 0) != lhs
1622 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1623 || (DECL_P (base)
1624 && (TREE_CODE (lhs) != ADDR_EXPR
1625 || TREE_OPERAND (lhs, 0) != base)))
1626 return (void *)-1;
1627
1628 /* And the access has to be contained within the memcpy destination. */
1629 at = offset / BITS_PER_UNIT;
1630 if (TREE_CODE (base) == MEM_REF)
1631 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1632 if (lhs_offset > at
1633 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1634 return (void *)-1;
1635
1636 /* Make room for 2 operands in the new reference. */
1637 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1638 {
1639 VEC (vn_reference_op_s, heap) *old = vr->operands;
1640 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1641 if (old == shared_lookup_references
1642 && vr->operands != old)
1643 shared_lookup_references = NULL;
1644 }
1645 else
1646 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1647
1648 /* The looked-through reference is a simple MEM_REF. */
1649 memset (&op, 0, sizeof (op));
1650 op.type = vr->type;
1651 op.opcode = MEM_REF;
1652 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1653 op.off = at - lhs_offset + rhs_offset;
1654 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1655 op.type = TREE_TYPE (rhs);
1656 op.opcode = TREE_CODE (rhs);
1657 op.op0 = rhs;
1658 op.off = -1;
1659 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1660 vr->hashcode = vn_reference_compute_hash (vr);
1661
1662 /* Adjust *ref from the new operands. */
1663 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1664 return (void *)-1;
1665 /* This can happen with bitfields. */
1666 if (ref->size != r.size)
1667 return (void *)-1;
1668 *ref = r;
1669
1670 /* Do not update last seen VUSE after translating. */
1671 last_vuse_ptr = NULL;
1672
1673 /* Keep looking for the adjusted *REF / VR pair. */
1674 return NULL;
1675 }
1676
1677 /* Bail out and stop walking. */
1678 return (void *)-1;
1679 }
1680
1681 /* Lookup a reference operation by it's parts, in the current hash table.
1682 Returns the resulting value number if it exists in the hash table,
1683 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1684 vn_reference_t stored in the hashtable if something is found. */
1685
1686 tree
1687 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1688 VEC (vn_reference_op_s, heap) *operands,
1689 vn_reference_t *vnresult, vn_lookup_kind kind)
1690 {
1691 struct vn_reference_s vr1;
1692 vn_reference_t tmp;
1693 tree cst;
1694
1695 if (!vnresult)
1696 vnresult = &tmp;
1697 *vnresult = NULL;
1698
1699 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1700 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1701 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1702 VEC_length (vn_reference_op_s, operands));
1703 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1704 VEC_address (vn_reference_op_s, operands),
1705 sizeof (vn_reference_op_s)
1706 * VEC_length (vn_reference_op_s, operands));
1707 vr1.operands = operands = shared_lookup_references
1708 = valueize_refs (shared_lookup_references);
1709 vr1.type = type;
1710 vr1.set = set;
1711 vr1.hashcode = vn_reference_compute_hash (&vr1);
1712 if ((cst = fully_constant_vn_reference_p (&vr1)))
1713 return cst;
1714
1715 vn_reference_lookup_1 (&vr1, vnresult);
1716 if (!*vnresult
1717 && kind != VN_NOWALK
1718 && vr1.vuse)
1719 {
1720 ao_ref r;
1721 vn_walk_kind = kind;
1722 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1723 *vnresult =
1724 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1725 vn_reference_lookup_2,
1726 vn_reference_lookup_3, &vr1);
1727 if (vr1.operands != operands)
1728 VEC_free (vn_reference_op_s, heap, vr1.operands);
1729 }
1730
1731 if (*vnresult)
1732 return (*vnresult)->result;
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Lookup OP in the current hash table, and return the resulting value
1738 number if it exists in the hash table. Return NULL_TREE if it does
1739 not exist in the hash table or if the result field of the structure
1740 was NULL.. VNRESULT will be filled in with the vn_reference_t
1741 stored in the hashtable if one exists. */
1742
1743 tree
1744 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1745 vn_reference_t *vnresult)
1746 {
1747 VEC (vn_reference_op_s, heap) *operands;
1748 struct vn_reference_s vr1;
1749 tree cst;
1750 bool valuezied_anything;
1751
1752 if (vnresult)
1753 *vnresult = NULL;
1754
1755 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1756 vr1.operands = operands
1757 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1758 vr1.type = TREE_TYPE (op);
1759 vr1.set = get_alias_set (op);
1760 vr1.hashcode = vn_reference_compute_hash (&vr1);
1761 if ((cst = fully_constant_vn_reference_p (&vr1)))
1762 return cst;
1763
1764 if (kind != VN_NOWALK
1765 && vr1.vuse)
1766 {
1767 vn_reference_t wvnresult;
1768 ao_ref r;
1769 /* Make sure to use a valueized reference if we valueized anything.
1770 Otherwise preserve the full reference for advanced TBAA. */
1771 if (!valuezied_anything
1772 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1773 vr1.operands))
1774 ao_ref_init (&r, op);
1775 vn_walk_kind = kind;
1776 wvnresult =
1777 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1778 vn_reference_lookup_2,
1779 vn_reference_lookup_3, &vr1);
1780 if (vr1.operands != operands)
1781 VEC_free (vn_reference_op_s, heap, vr1.operands);
1782 if (wvnresult)
1783 {
1784 if (vnresult)
1785 *vnresult = wvnresult;
1786 return wvnresult->result;
1787 }
1788
1789 return NULL_TREE;
1790 }
1791
1792 return vn_reference_lookup_1 (&vr1, vnresult);
1793 }
1794
1795
1796 /* Insert OP into the current hash table with a value number of
1797 RESULT, and return the resulting reference structure we created. */
1798
1799 vn_reference_t
1800 vn_reference_insert (tree op, tree result, tree vuse)
1801 {
1802 void **slot;
1803 vn_reference_t vr1;
1804
1805 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1806 if (TREE_CODE (result) == SSA_NAME)
1807 vr1->value_id = VN_INFO (result)->value_id;
1808 else
1809 vr1->value_id = get_or_alloc_constant_value_id (result);
1810 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1811 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1812 vr1->type = TREE_TYPE (op);
1813 vr1->set = get_alias_set (op);
1814 vr1->hashcode = vn_reference_compute_hash (vr1);
1815 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1816
1817 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1818 INSERT);
1819
1820 /* Because we lookup stores using vuses, and value number failures
1821 using the vdefs (see visit_reference_op_store for how and why),
1822 it's possible that on failure we may try to insert an already
1823 inserted store. This is not wrong, there is no ssa name for a
1824 store that we could use as a differentiator anyway. Thus, unlike
1825 the other lookup functions, you cannot gcc_assert (!*slot)
1826 here. */
1827
1828 /* But free the old slot in case of a collision. */
1829 if (*slot)
1830 free_reference (*slot);
1831
1832 *slot = vr1;
1833 return vr1;
1834 }
1835
1836 /* Insert a reference by it's pieces into the current hash table with
1837 a value number of RESULT. Return the resulting reference
1838 structure we created. */
1839
1840 vn_reference_t
1841 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1842 VEC (vn_reference_op_s, heap) *operands,
1843 tree result, unsigned int value_id)
1844
1845 {
1846 void **slot;
1847 vn_reference_t vr1;
1848
1849 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1850 vr1->value_id = value_id;
1851 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1852 vr1->operands = valueize_refs (operands);
1853 vr1->type = type;
1854 vr1->set = set;
1855 vr1->hashcode = vn_reference_compute_hash (vr1);
1856 if (result && TREE_CODE (result) == SSA_NAME)
1857 result = SSA_VAL (result);
1858 vr1->result = result;
1859
1860 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1861 INSERT);
1862
1863 /* At this point we should have all the things inserted that we have
1864 seen before, and we should never try inserting something that
1865 already exists. */
1866 gcc_assert (!*slot);
1867 if (*slot)
1868 free_reference (*slot);
1869
1870 *slot = vr1;
1871 return vr1;
1872 }
1873
1874 /* Compute and return the hash value for nary operation VBO1. */
1875
1876 hashval_t
1877 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1878 {
1879 hashval_t hash;
1880 unsigned i;
1881
1882 for (i = 0; i < vno1->length; ++i)
1883 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1884 vno1->op[i] = SSA_VAL (vno1->op[i]);
1885
1886 if (vno1->length == 2
1887 && commutative_tree_code (vno1->opcode)
1888 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1889 {
1890 tree temp = vno1->op[0];
1891 vno1->op[0] = vno1->op[1];
1892 vno1->op[1] = temp;
1893 }
1894
1895 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1896 for (i = 0; i < vno1->length; ++i)
1897 hash = iterative_hash_expr (vno1->op[i], hash);
1898
1899 return hash;
1900 }
1901
1902 /* Return the computed hashcode for nary operation P1. */
1903
1904 static hashval_t
1905 vn_nary_op_hash (const void *p1)
1906 {
1907 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1908 return vno1->hashcode;
1909 }
1910
1911 /* Compare nary operations P1 and P2 and return true if they are
1912 equivalent. */
1913
1914 int
1915 vn_nary_op_eq (const void *p1, const void *p2)
1916 {
1917 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1918 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1919 unsigned i;
1920
1921 if (vno1->hashcode != vno2->hashcode)
1922 return false;
1923
1924 if (vno1->length != vno2->length)
1925 return false;
1926
1927 if (vno1->opcode != vno2->opcode
1928 || !types_compatible_p (vno1->type, vno2->type))
1929 return false;
1930
1931 for (i = 0; i < vno1->length; ++i)
1932 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1933 return false;
1934
1935 return true;
1936 }
1937
1938 /* Initialize VNO from the pieces provided. */
1939
1940 static void
1941 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1942 enum tree_code code, tree type, tree *ops)
1943 {
1944 vno->opcode = code;
1945 vno->length = length;
1946 vno->type = type;
1947 memcpy (&vno->op[0], ops, sizeof (tree) * length);
1948 }
1949
1950 /* Initialize VNO from OP. */
1951
1952 static void
1953 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1954 {
1955 unsigned i;
1956
1957 vno->opcode = TREE_CODE (op);
1958 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1959 vno->type = TREE_TYPE (op);
1960 for (i = 0; i < vno->length; ++i)
1961 vno->op[i] = TREE_OPERAND (op, i);
1962 }
1963
1964 /* Return the number of operands for a vn_nary ops structure from STMT. */
1965
1966 static unsigned int
1967 vn_nary_length_from_stmt (gimple stmt)
1968 {
1969 switch (gimple_assign_rhs_code (stmt))
1970 {
1971 case REALPART_EXPR:
1972 case IMAGPART_EXPR:
1973 case VIEW_CONVERT_EXPR:
1974 return 1;
1975
1976 case CONSTRUCTOR:
1977 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
1978
1979 default:
1980 return gimple_num_ops (stmt) - 1;
1981 }
1982 }
1983
1984 /* Initialize VNO from STMT. */
1985
1986 static void
1987 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1988 {
1989 unsigned i;
1990
1991 vno->opcode = gimple_assign_rhs_code (stmt);
1992 vno->type = gimple_expr_type (stmt);
1993 switch (vno->opcode)
1994 {
1995 case REALPART_EXPR:
1996 case IMAGPART_EXPR:
1997 case VIEW_CONVERT_EXPR:
1998 vno->length = 1;
1999 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2000 break;
2001
2002 case CONSTRUCTOR:
2003 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2004 for (i = 0; i < vno->length; ++i)
2005 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2006 break;
2007
2008 default:
2009 vno->length = gimple_num_ops (stmt) - 1;
2010 for (i = 0; i < vno->length; ++i)
2011 vno->op[i] = gimple_op (stmt, i + 1);
2012 }
2013 }
2014
2015 /* Compute the hashcode for VNO and look for it in the hash table;
2016 return the resulting value number if it exists in the hash table.
2017 Return NULL_TREE if it does not exist in the hash table or if the
2018 result field of the operation is NULL. VNRESULT will contain the
2019 vn_nary_op_t from the hashtable if it exists. */
2020
2021 static tree
2022 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2023 {
2024 void **slot;
2025
2026 if (vnresult)
2027 *vnresult = NULL;
2028
2029 vno->hashcode = vn_nary_op_compute_hash (vno);
2030 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2031 NO_INSERT);
2032 if (!slot && current_info == optimistic_info)
2033 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2034 NO_INSERT);
2035 if (!slot)
2036 return NULL_TREE;
2037 if (vnresult)
2038 *vnresult = (vn_nary_op_t)*slot;
2039 return ((vn_nary_op_t)*slot)->result;
2040 }
2041
2042 /* Lookup a n-ary operation by its pieces and return the resulting value
2043 number if it exists in the hash table. Return NULL_TREE if it does
2044 not exist in the hash table or if the result field of the operation
2045 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2046 if it exists. */
2047
2048 tree
2049 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2050 tree type, tree *ops, vn_nary_op_t *vnresult)
2051 {
2052 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2053 sizeof_vn_nary_op (length));
2054 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2055 return vn_nary_op_lookup_1 (vno1, vnresult);
2056 }
2057
2058 /* Lookup OP in the current hash table, and return the resulting value
2059 number if it exists in the hash table. Return NULL_TREE if it does
2060 not exist in the hash table or if the result field of the operation
2061 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2062 if it exists. */
2063
2064 tree
2065 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2066 {
2067 vn_nary_op_t vno1
2068 = XALLOCAVAR (struct vn_nary_op_s,
2069 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2070 init_vn_nary_op_from_op (vno1, op);
2071 return vn_nary_op_lookup_1 (vno1, vnresult);
2072 }
2073
2074 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2075 value number if it exists in the hash table. Return NULL_TREE if
2076 it does not exist in the hash table. VNRESULT will contain the
2077 vn_nary_op_t from the hashtable if it exists. */
2078
2079 tree
2080 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2081 {
2082 vn_nary_op_t vno1
2083 = XALLOCAVAR (struct vn_nary_op_s,
2084 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2085 init_vn_nary_op_from_stmt (vno1, stmt);
2086 return vn_nary_op_lookup_1 (vno1, vnresult);
2087 }
2088
2089 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2090
2091 static vn_nary_op_t
2092 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2093 {
2094 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2095 }
2096
2097 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2098 obstack. */
2099
2100 static vn_nary_op_t
2101 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2102 {
2103 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2104 &current_info->nary_obstack);
2105
2106 vno1->value_id = value_id;
2107 vno1->length = length;
2108 vno1->result = result;
2109
2110 return vno1;
2111 }
2112
2113 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2114 VNO->HASHCODE first. */
2115
2116 static vn_nary_op_t
2117 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2118 {
2119 void **slot;
2120
2121 if (compute_hash)
2122 vno->hashcode = vn_nary_op_compute_hash (vno);
2123
2124 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2125 gcc_assert (!*slot);
2126
2127 *slot = vno;
2128 return vno;
2129 }
2130
2131 /* Insert a n-ary operation into the current hash table using it's
2132 pieces. Return the vn_nary_op_t structure we created and put in
2133 the hashtable. */
2134
2135 vn_nary_op_t
2136 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2137 tree type, tree *ops,
2138 tree result, unsigned int value_id)
2139 {
2140 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2141 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2142 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2143 }
2144
2145 /* Insert OP into the current hash table with a value number of
2146 RESULT. Return the vn_nary_op_t structure we created and put in
2147 the hashtable. */
2148
2149 vn_nary_op_t
2150 vn_nary_op_insert (tree op, tree result)
2151 {
2152 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2153 vn_nary_op_t vno1;
2154
2155 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2156 init_vn_nary_op_from_op (vno1, op);
2157 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2158 }
2159
2160 /* Insert the rhs of STMT into the current hash table with a value number of
2161 RESULT. */
2162
2163 vn_nary_op_t
2164 vn_nary_op_insert_stmt (gimple stmt, tree result)
2165 {
2166 vn_nary_op_t vno1
2167 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2168 result, VN_INFO (result)->value_id);
2169 init_vn_nary_op_from_stmt (vno1, stmt);
2170 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2171 }
2172
2173 /* Compute a hashcode for PHI operation VP1 and return it. */
2174
2175 static inline hashval_t
2176 vn_phi_compute_hash (vn_phi_t vp1)
2177 {
2178 hashval_t result;
2179 int i;
2180 tree phi1op;
2181 tree type;
2182
2183 result = vp1->block->index;
2184
2185 /* If all PHI arguments are constants we need to distinguish
2186 the PHI node via its type. */
2187 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2188 result += (INTEGRAL_TYPE_P (type)
2189 + (INTEGRAL_TYPE_P (type)
2190 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2191
2192 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2193 {
2194 if (phi1op == VN_TOP)
2195 continue;
2196 result = iterative_hash_expr (phi1op, result);
2197 }
2198
2199 return result;
2200 }
2201
2202 /* Return the computed hashcode for phi operation P1. */
2203
2204 static hashval_t
2205 vn_phi_hash (const void *p1)
2206 {
2207 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2208 return vp1->hashcode;
2209 }
2210
2211 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2212
2213 static int
2214 vn_phi_eq (const void *p1, const void *p2)
2215 {
2216 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2217 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2218
2219 if (vp1->hashcode != vp2->hashcode)
2220 return false;
2221
2222 if (vp1->block == vp2->block)
2223 {
2224 int i;
2225 tree phi1op;
2226
2227 /* If the PHI nodes do not have compatible types
2228 they are not the same. */
2229 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2230 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2231 return false;
2232
2233 /* Any phi in the same block will have it's arguments in the
2234 same edge order, because of how we store phi nodes. */
2235 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2236 {
2237 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2238 if (phi1op == VN_TOP || phi2op == VN_TOP)
2239 continue;
2240 if (!expressions_equal_p (phi1op, phi2op))
2241 return false;
2242 }
2243 return true;
2244 }
2245 return false;
2246 }
2247
2248 static VEC(tree, heap) *shared_lookup_phiargs;
2249
2250 /* Lookup PHI in the current hash table, and return the resulting
2251 value number if it exists in the hash table. Return NULL_TREE if
2252 it does not exist in the hash table. */
2253
2254 static tree
2255 vn_phi_lookup (gimple phi)
2256 {
2257 void **slot;
2258 struct vn_phi_s vp1;
2259 unsigned i;
2260
2261 VEC_truncate (tree, shared_lookup_phiargs, 0);
2262
2263 /* Canonicalize the SSA_NAME's to their value number. */
2264 for (i = 0; i < gimple_phi_num_args (phi); i++)
2265 {
2266 tree def = PHI_ARG_DEF (phi, i);
2267 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2268 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2269 }
2270 vp1.phiargs = shared_lookup_phiargs;
2271 vp1.block = gimple_bb (phi);
2272 vp1.hashcode = vn_phi_compute_hash (&vp1);
2273 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2274 NO_INSERT);
2275 if (!slot && current_info == optimistic_info)
2276 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2277 NO_INSERT);
2278 if (!slot)
2279 return NULL_TREE;
2280 return ((vn_phi_t)*slot)->result;
2281 }
2282
2283 /* Insert PHI into the current hash table with a value number of
2284 RESULT. */
2285
2286 static vn_phi_t
2287 vn_phi_insert (gimple phi, tree result)
2288 {
2289 void **slot;
2290 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2291 unsigned i;
2292 VEC (tree, heap) *args = NULL;
2293
2294 /* Canonicalize the SSA_NAME's to their value number. */
2295 for (i = 0; i < gimple_phi_num_args (phi); i++)
2296 {
2297 tree def = PHI_ARG_DEF (phi, i);
2298 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2299 VEC_safe_push (tree, heap, args, def);
2300 }
2301 vp1->value_id = VN_INFO (result)->value_id;
2302 vp1->phiargs = args;
2303 vp1->block = gimple_bb (phi);
2304 vp1->result = result;
2305 vp1->hashcode = vn_phi_compute_hash (vp1);
2306
2307 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2308 INSERT);
2309
2310 /* Because we iterate over phi operations more than once, it's
2311 possible the slot might already exist here, hence no assert.*/
2312 *slot = vp1;
2313 return vp1;
2314 }
2315
2316
2317 /* Print set of components in strongly connected component SCC to OUT. */
2318
2319 static void
2320 print_scc (FILE *out, VEC (tree, heap) *scc)
2321 {
2322 tree var;
2323 unsigned int i;
2324
2325 fprintf (out, "SCC consists of: ");
2326 FOR_EACH_VEC_ELT (tree, scc, i, var)
2327 {
2328 print_generic_expr (out, var, 0);
2329 fprintf (out, " ");
2330 }
2331 fprintf (out, "\n");
2332 }
2333
2334 /* Set the value number of FROM to TO, return true if it has changed
2335 as a result. */
2336
2337 static inline bool
2338 set_ssa_val_to (tree from, tree to)
2339 {
2340 tree currval = SSA_VAL (from);
2341
2342 if (from != to)
2343 {
2344 if (currval == from)
2345 {
2346 if (dump_file && (dump_flags & TDF_DETAILS))
2347 {
2348 fprintf (dump_file, "Not changing value number of ");
2349 print_generic_expr (dump_file, from, 0);
2350 fprintf (dump_file, " from VARYING to ");
2351 print_generic_expr (dump_file, to, 0);
2352 fprintf (dump_file, "\n");
2353 }
2354 return false;
2355 }
2356 else if (TREE_CODE (to) == SSA_NAME
2357 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2358 to = from;
2359 }
2360
2361 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2362 and invariants. So assert that here. */
2363 gcc_assert (to != NULL_TREE
2364 && (to == VN_TOP
2365 || TREE_CODE (to) == SSA_NAME
2366 || is_gimple_min_invariant (to)));
2367
2368 if (dump_file && (dump_flags & TDF_DETAILS))
2369 {
2370 fprintf (dump_file, "Setting value number of ");
2371 print_generic_expr (dump_file, from, 0);
2372 fprintf (dump_file, " to ");
2373 print_generic_expr (dump_file, to, 0);
2374 }
2375
2376 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2377 {
2378 VN_INFO (from)->valnum = to;
2379 if (dump_file && (dump_flags & TDF_DETAILS))
2380 fprintf (dump_file, " (changed)\n");
2381 return true;
2382 }
2383 if (dump_file && (dump_flags & TDF_DETAILS))
2384 fprintf (dump_file, "\n");
2385 return false;
2386 }
2387
2388 /* Set all definitions in STMT to value number to themselves.
2389 Return true if a value number changed. */
2390
2391 static bool
2392 defs_to_varying (gimple stmt)
2393 {
2394 bool changed = false;
2395 ssa_op_iter iter;
2396 def_operand_p defp;
2397
2398 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2399 {
2400 tree def = DEF_FROM_PTR (defp);
2401
2402 VN_INFO (def)->use_processed = true;
2403 changed |= set_ssa_val_to (def, def);
2404 }
2405 return changed;
2406 }
2407
2408 static bool expr_has_constants (tree expr);
2409 static tree valueize_expr (tree expr);
2410
2411 /* Visit a copy between LHS and RHS, return true if the value number
2412 changed. */
2413
2414 static bool
2415 visit_copy (tree lhs, tree rhs)
2416 {
2417 /* Follow chains of copies to their destination. */
2418 while (TREE_CODE (rhs) == SSA_NAME
2419 && SSA_VAL (rhs) != rhs)
2420 rhs = SSA_VAL (rhs);
2421
2422 /* The copy may have a more interesting constant filled expression
2423 (we don't, since we know our RHS is just an SSA name). */
2424 if (TREE_CODE (rhs) == SSA_NAME)
2425 {
2426 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2427 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2428 }
2429
2430 return set_ssa_val_to (lhs, rhs);
2431 }
2432
2433 /* Visit a nary operator RHS, value number it, and return true if the
2434 value number of LHS has changed as a result. */
2435
2436 static bool
2437 visit_nary_op (tree lhs, gimple stmt)
2438 {
2439 bool changed = false;
2440 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2441
2442 if (result)
2443 changed = set_ssa_val_to (lhs, result);
2444 else
2445 {
2446 changed = set_ssa_val_to (lhs, lhs);
2447 vn_nary_op_insert_stmt (stmt, lhs);
2448 }
2449
2450 return changed;
2451 }
2452
2453 /* Visit a call STMT storing into LHS. Return true if the value number
2454 of the LHS has changed as a result. */
2455
2456 static bool
2457 visit_reference_op_call (tree lhs, gimple stmt)
2458 {
2459 bool changed = false;
2460 struct vn_reference_s vr1;
2461 tree result;
2462 tree vuse = gimple_vuse (stmt);
2463
2464 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2465 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2466 vr1.type = gimple_expr_type (stmt);
2467 vr1.set = 0;
2468 vr1.hashcode = vn_reference_compute_hash (&vr1);
2469 result = vn_reference_lookup_1 (&vr1, NULL);
2470 if (result)
2471 {
2472 changed = set_ssa_val_to (lhs, result);
2473 if (TREE_CODE (result) == SSA_NAME
2474 && VN_INFO (result)->has_constants)
2475 VN_INFO (lhs)->has_constants = true;
2476 }
2477 else
2478 {
2479 void **slot;
2480 vn_reference_t vr2;
2481 changed = set_ssa_val_to (lhs, lhs);
2482 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2483 vr2->vuse = vr1.vuse;
2484 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2485 vr2->type = vr1.type;
2486 vr2->set = vr1.set;
2487 vr2->hashcode = vr1.hashcode;
2488 vr2->result = lhs;
2489 slot = htab_find_slot_with_hash (current_info->references,
2490 vr2, vr2->hashcode, INSERT);
2491 if (*slot)
2492 free_reference (*slot);
2493 *slot = vr2;
2494 }
2495
2496 return changed;
2497 }
2498
2499 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2500 and return true if the value number of the LHS has changed as a result. */
2501
2502 static bool
2503 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2504 {
2505 bool changed = false;
2506 tree last_vuse;
2507 tree result;
2508
2509 last_vuse = gimple_vuse (stmt);
2510 last_vuse_ptr = &last_vuse;
2511 result = vn_reference_lookup (op, gimple_vuse (stmt),
2512 default_vn_walk_kind, NULL);
2513 last_vuse_ptr = NULL;
2514
2515 /* If we have a VCE, try looking up its operand as it might be stored in
2516 a different type. */
2517 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2518 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2519 default_vn_walk_kind, NULL);
2520
2521 /* We handle type-punning through unions by value-numbering based
2522 on offset and size of the access. Be prepared to handle a
2523 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2524 if (result
2525 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2526 {
2527 /* We will be setting the value number of lhs to the value number
2528 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2529 So first simplify and lookup this expression to see if it
2530 is already available. */
2531 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2532 if ((CONVERT_EXPR_P (val)
2533 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2534 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2535 {
2536 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2537 if ((CONVERT_EXPR_P (tem)
2538 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2539 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2540 TREE_TYPE (val), tem)))
2541 val = tem;
2542 }
2543 result = val;
2544 if (!is_gimple_min_invariant (val)
2545 && TREE_CODE (val) != SSA_NAME)
2546 result = vn_nary_op_lookup (val, NULL);
2547 /* If the expression is not yet available, value-number lhs to
2548 a new SSA_NAME we create. */
2549 if (!result)
2550 {
2551 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2552 /* Initialize value-number information properly. */
2553 VN_INFO_GET (result)->valnum = result;
2554 VN_INFO (result)->value_id = get_next_value_id ();
2555 VN_INFO (result)->expr = val;
2556 VN_INFO (result)->has_constants = expr_has_constants (val);
2557 VN_INFO (result)->needs_insertion = true;
2558 /* As all "inserted" statements are singleton SCCs, insert
2559 to the valid table. This is strictly needed to
2560 avoid re-generating new value SSA_NAMEs for the same
2561 expression during SCC iteration over and over (the
2562 optimistic table gets cleared after each iteration).
2563 We do not need to insert into the optimistic table, as
2564 lookups there will fall back to the valid table. */
2565 if (current_info == optimistic_info)
2566 {
2567 current_info = valid_info;
2568 vn_nary_op_insert (val, result);
2569 current_info = optimistic_info;
2570 }
2571 else
2572 vn_nary_op_insert (val, result);
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2574 {
2575 fprintf (dump_file, "Inserting name ");
2576 print_generic_expr (dump_file, result, 0);
2577 fprintf (dump_file, " for expression ");
2578 print_generic_expr (dump_file, val, 0);
2579 fprintf (dump_file, "\n");
2580 }
2581 }
2582 }
2583
2584 if (result)
2585 {
2586 changed = set_ssa_val_to (lhs, result);
2587 if (TREE_CODE (result) == SSA_NAME
2588 && VN_INFO (result)->has_constants)
2589 {
2590 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2591 VN_INFO (lhs)->has_constants = true;
2592 }
2593 }
2594 else
2595 {
2596 changed = set_ssa_val_to (lhs, lhs);
2597 vn_reference_insert (op, lhs, last_vuse);
2598 }
2599
2600 return changed;
2601 }
2602
2603
2604 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2605 and return true if the value number of the LHS has changed as a result. */
2606
2607 static bool
2608 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2609 {
2610 bool changed = false;
2611 tree result;
2612 bool resultsame = false;
2613
2614 /* First we want to lookup using the *vuses* from the store and see
2615 if there the last store to this location with the same address
2616 had the same value.
2617
2618 The vuses represent the memory state before the store. If the
2619 memory state, address, and value of the store is the same as the
2620 last store to this location, then this store will produce the
2621 same memory state as that store.
2622
2623 In this case the vdef versions for this store are value numbered to those
2624 vuse versions, since they represent the same memory state after
2625 this store.
2626
2627 Otherwise, the vdefs for the store are used when inserting into
2628 the table, since the store generates a new memory state. */
2629
2630 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2631
2632 if (result)
2633 {
2634 if (TREE_CODE (result) == SSA_NAME)
2635 result = SSA_VAL (result);
2636 if (TREE_CODE (op) == SSA_NAME)
2637 op = SSA_VAL (op);
2638 resultsame = expressions_equal_p (result, op);
2639 }
2640
2641 if (!result || !resultsame)
2642 {
2643 tree vdef;
2644
2645 if (dump_file && (dump_flags & TDF_DETAILS))
2646 {
2647 fprintf (dump_file, "No store match\n");
2648 fprintf (dump_file, "Value numbering store ");
2649 print_generic_expr (dump_file, lhs, 0);
2650 fprintf (dump_file, " to ");
2651 print_generic_expr (dump_file, op, 0);
2652 fprintf (dump_file, "\n");
2653 }
2654 /* Have to set value numbers before insert, since insert is
2655 going to valueize the references in-place. */
2656 if ((vdef = gimple_vdef (stmt)))
2657 {
2658 VN_INFO (vdef)->use_processed = true;
2659 changed |= set_ssa_val_to (vdef, vdef);
2660 }
2661
2662 /* Do not insert structure copies into the tables. */
2663 if (is_gimple_min_invariant (op)
2664 || is_gimple_reg (op))
2665 vn_reference_insert (lhs, op, vdef);
2666 }
2667 else
2668 {
2669 /* We had a match, so value number the vdef to have the value
2670 number of the vuse it came from. */
2671 tree def, use;
2672
2673 if (dump_file && (dump_flags & TDF_DETAILS))
2674 fprintf (dump_file, "Store matched earlier value,"
2675 "value numbering store vdefs to matching vuses.\n");
2676
2677 def = gimple_vdef (stmt);
2678 use = gimple_vuse (stmt);
2679
2680 VN_INFO (def)->use_processed = true;
2681 changed |= set_ssa_val_to (def, SSA_VAL (use));
2682 }
2683
2684 return changed;
2685 }
2686
2687 /* Visit and value number PHI, return true if the value number
2688 changed. */
2689
2690 static bool
2691 visit_phi (gimple phi)
2692 {
2693 bool changed = false;
2694 tree result;
2695 tree sameval = VN_TOP;
2696 bool allsame = true;
2697 unsigned i;
2698
2699 /* TODO: We could check for this in init_sccvn, and replace this
2700 with a gcc_assert. */
2701 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2702 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2703
2704 /* See if all non-TOP arguments have the same value. TOP is
2705 equivalent to everything, so we can ignore it. */
2706 for (i = 0; i < gimple_phi_num_args (phi); i++)
2707 {
2708 tree def = PHI_ARG_DEF (phi, i);
2709
2710 if (TREE_CODE (def) == SSA_NAME)
2711 def = SSA_VAL (def);
2712 if (def == VN_TOP)
2713 continue;
2714 if (sameval == VN_TOP)
2715 {
2716 sameval = def;
2717 }
2718 else
2719 {
2720 if (!expressions_equal_p (def, sameval))
2721 {
2722 allsame = false;
2723 break;
2724 }
2725 }
2726 }
2727
2728 /* If all value numbered to the same value, the phi node has that
2729 value. */
2730 if (allsame)
2731 {
2732 if (is_gimple_min_invariant (sameval))
2733 {
2734 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2735 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2736 }
2737 else
2738 {
2739 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2740 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2741 }
2742
2743 if (TREE_CODE (sameval) == SSA_NAME)
2744 return visit_copy (PHI_RESULT (phi), sameval);
2745
2746 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2747 }
2748
2749 /* Otherwise, see if it is equivalent to a phi node in this block. */
2750 result = vn_phi_lookup (phi);
2751 if (result)
2752 {
2753 if (TREE_CODE (result) == SSA_NAME)
2754 changed = visit_copy (PHI_RESULT (phi), result);
2755 else
2756 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2757 }
2758 else
2759 {
2760 vn_phi_insert (phi, PHI_RESULT (phi));
2761 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2762 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2763 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2764 }
2765
2766 return changed;
2767 }
2768
2769 /* Return true if EXPR contains constants. */
2770
2771 static bool
2772 expr_has_constants (tree expr)
2773 {
2774 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2775 {
2776 case tcc_unary:
2777 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2778
2779 case tcc_binary:
2780 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2781 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2782 /* Constants inside reference ops are rarely interesting, but
2783 it can take a lot of looking to find them. */
2784 case tcc_reference:
2785 case tcc_declaration:
2786 return false;
2787 default:
2788 return is_gimple_min_invariant (expr);
2789 }
2790 return false;
2791 }
2792
2793 /* Return true if STMT contains constants. */
2794
2795 static bool
2796 stmt_has_constants (gimple stmt)
2797 {
2798 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2799 return false;
2800
2801 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2802 {
2803 case GIMPLE_UNARY_RHS:
2804 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2805
2806 case GIMPLE_BINARY_RHS:
2807 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2808 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2809 case GIMPLE_TERNARY_RHS:
2810 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2811 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2812 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2813 case GIMPLE_SINGLE_RHS:
2814 /* Constants inside reference ops are rarely interesting, but
2815 it can take a lot of looking to find them. */
2816 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2817 default:
2818 gcc_unreachable ();
2819 }
2820 return false;
2821 }
2822
2823 /* Replace SSA_NAMES in expr with their value numbers, and return the
2824 result.
2825 This is performed in place. */
2826
2827 static tree
2828 valueize_expr (tree expr)
2829 {
2830 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2831 {
2832 case tcc_binary:
2833 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2834 /* Fallthru. */
2835 case tcc_unary:
2836 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2837 break;
2838 default:;
2839 }
2840 return expr;
2841 }
2842
2843 /* Simplify the binary expression RHS, and return the result if
2844 simplified. */
2845
2846 static tree
2847 simplify_binary_expression (gimple stmt)
2848 {
2849 tree result = NULL_TREE;
2850 tree op0 = gimple_assign_rhs1 (stmt);
2851 tree op1 = gimple_assign_rhs2 (stmt);
2852 enum tree_code code = gimple_assign_rhs_code (stmt);
2853
2854 /* This will not catch every single case we could combine, but will
2855 catch those with constants. The goal here is to simultaneously
2856 combine constants between expressions, but avoid infinite
2857 expansion of expressions during simplification. */
2858 if (TREE_CODE (op0) == SSA_NAME)
2859 {
2860 if (VN_INFO (op0)->has_constants
2861 || TREE_CODE_CLASS (code) == tcc_comparison
2862 || code == COMPLEX_EXPR)
2863 op0 = valueize_expr (vn_get_expr_for (op0));
2864 else
2865 op0 = vn_valueize (op0);
2866 }
2867
2868 if (TREE_CODE (op1) == SSA_NAME)
2869 {
2870 if (VN_INFO (op1)->has_constants
2871 || code == COMPLEX_EXPR)
2872 op1 = valueize_expr (vn_get_expr_for (op1));
2873 else
2874 op1 = vn_valueize (op1);
2875 }
2876
2877 /* Pointer plus constant can be represented as invariant address.
2878 Do so to allow further propatation, see also tree forwprop. */
2879 if (code == POINTER_PLUS_EXPR
2880 && host_integerp (op1, 1)
2881 && TREE_CODE (op0) == ADDR_EXPR
2882 && is_gimple_min_invariant (op0))
2883 return build_invariant_address (TREE_TYPE (op0),
2884 TREE_OPERAND (op0, 0),
2885 TREE_INT_CST_LOW (op1));
2886
2887 /* Avoid folding if nothing changed. */
2888 if (op0 == gimple_assign_rhs1 (stmt)
2889 && op1 == gimple_assign_rhs2 (stmt))
2890 return NULL_TREE;
2891
2892 fold_defer_overflow_warnings ();
2893
2894 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
2895 if (result)
2896 STRIP_USELESS_TYPE_CONVERSION (result);
2897
2898 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2899 stmt, 0);
2900
2901 /* Make sure result is not a complex expression consisting
2902 of operators of operators (IE (a + b) + (a + c))
2903 Otherwise, we will end up with unbounded expressions if
2904 fold does anything at all. */
2905 if (result && valid_gimple_rhs_p (result))
2906 return result;
2907
2908 return NULL_TREE;
2909 }
2910
2911 /* Simplify the unary expression RHS, and return the result if
2912 simplified. */
2913
2914 static tree
2915 simplify_unary_expression (gimple stmt)
2916 {
2917 tree result = NULL_TREE;
2918 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2919 enum tree_code code = gimple_assign_rhs_code (stmt);
2920
2921 /* We handle some tcc_reference codes here that are all
2922 GIMPLE_ASSIGN_SINGLE codes. */
2923 if (code == REALPART_EXPR
2924 || code == IMAGPART_EXPR
2925 || code == VIEW_CONVERT_EXPR)
2926 op0 = TREE_OPERAND (op0, 0);
2927
2928 if (TREE_CODE (op0) != SSA_NAME)
2929 return NULL_TREE;
2930
2931 orig_op0 = op0;
2932 if (VN_INFO (op0)->has_constants)
2933 op0 = valueize_expr (vn_get_expr_for (op0));
2934 else if (CONVERT_EXPR_CODE_P (code)
2935 || code == REALPART_EXPR
2936 || code == IMAGPART_EXPR
2937 || code == VIEW_CONVERT_EXPR)
2938 {
2939 /* We want to do tree-combining on conversion-like expressions.
2940 Make sure we feed only SSA_NAMEs or constants to fold though. */
2941 tree tem = valueize_expr (vn_get_expr_for (op0));
2942 if (UNARY_CLASS_P (tem)
2943 || BINARY_CLASS_P (tem)
2944 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2945 || TREE_CODE (tem) == SSA_NAME
2946 || is_gimple_min_invariant (tem))
2947 op0 = tem;
2948 }
2949
2950 /* Avoid folding if nothing changed, but remember the expression. */
2951 if (op0 == orig_op0)
2952 return NULL_TREE;
2953
2954 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
2955 if (result)
2956 {
2957 STRIP_USELESS_TYPE_CONVERSION (result);
2958 if (valid_gimple_rhs_p (result))
2959 return result;
2960 }
2961
2962 return NULL_TREE;
2963 }
2964
2965 /* Try to simplify RHS using equivalences and constant folding. */
2966
2967 static tree
2968 try_to_simplify (gimple stmt)
2969 {
2970 enum tree_code code = gimple_assign_rhs_code (stmt);
2971 tree tem;
2972
2973 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2974 in this case, there is no point in doing extra work. */
2975 if (code == SSA_NAME)
2976 return NULL_TREE;
2977
2978 /* First try constant folding based on our current lattice. */
2979 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
2980 if (tem
2981 && (TREE_CODE (tem) == SSA_NAME
2982 || is_gimple_min_invariant (tem)))
2983 return tem;
2984
2985 /* If that didn't work try combining multiple statements. */
2986 switch (TREE_CODE_CLASS (code))
2987 {
2988 case tcc_reference:
2989 /* Fallthrough for some unary codes that can operate on registers. */
2990 if (!(code == REALPART_EXPR
2991 || code == IMAGPART_EXPR
2992 || code == VIEW_CONVERT_EXPR))
2993 break;
2994 /* We could do a little more with unary ops, if they expand
2995 into binary ops, but it's debatable whether it is worth it. */
2996 case tcc_unary:
2997 return simplify_unary_expression (stmt);
2998
2999 case tcc_comparison:
3000 case tcc_binary:
3001 return simplify_binary_expression (stmt);
3002
3003 default:
3004 break;
3005 }
3006
3007 return NULL_TREE;
3008 }
3009
3010 /* Visit and value number USE, return true if the value number
3011 changed. */
3012
3013 static bool
3014 visit_use (tree use)
3015 {
3016 bool changed = false;
3017 gimple stmt = SSA_NAME_DEF_STMT (use);
3018
3019 VN_INFO (use)->use_processed = true;
3020
3021 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3022 if (dump_file && (dump_flags & TDF_DETAILS)
3023 && !SSA_NAME_IS_DEFAULT_DEF (use))
3024 {
3025 fprintf (dump_file, "Value numbering ");
3026 print_generic_expr (dump_file, use, 0);
3027 fprintf (dump_file, " stmt = ");
3028 print_gimple_stmt (dump_file, stmt, 0, 0);
3029 }
3030
3031 /* Handle uninitialized uses. */
3032 if (SSA_NAME_IS_DEFAULT_DEF (use))
3033 changed = set_ssa_val_to (use, use);
3034 else
3035 {
3036 if (gimple_code (stmt) == GIMPLE_PHI)
3037 changed = visit_phi (stmt);
3038 else if (!gimple_has_lhs (stmt)
3039 || gimple_has_volatile_ops (stmt)
3040 || stmt_could_throw_p (stmt))
3041 changed = defs_to_varying (stmt);
3042 else if (is_gimple_assign (stmt))
3043 {
3044 enum tree_code code = gimple_assign_rhs_code (stmt);
3045 tree lhs = gimple_assign_lhs (stmt);
3046 tree rhs1 = gimple_assign_rhs1 (stmt);
3047 tree simplified;
3048
3049 /* Shortcut for copies. Simplifying copies is pointless,
3050 since we copy the expression and value they represent. */
3051 if (code == SSA_NAME
3052 && TREE_CODE (lhs) == SSA_NAME)
3053 {
3054 changed = visit_copy (lhs, rhs1);
3055 goto done;
3056 }
3057 simplified = try_to_simplify (stmt);
3058 if (simplified)
3059 {
3060 if (dump_file && (dump_flags & TDF_DETAILS))
3061 {
3062 fprintf (dump_file, "RHS ");
3063 print_gimple_expr (dump_file, stmt, 0, 0);
3064 fprintf (dump_file, " simplified to ");
3065 print_generic_expr (dump_file, simplified, 0);
3066 if (TREE_CODE (lhs) == SSA_NAME)
3067 fprintf (dump_file, " has constants %d\n",
3068 expr_has_constants (simplified));
3069 else
3070 fprintf (dump_file, "\n");
3071 }
3072 }
3073 /* Setting value numbers to constants will occasionally
3074 screw up phi congruence because constants are not
3075 uniquely associated with a single ssa name that can be
3076 looked up. */
3077 if (simplified
3078 && is_gimple_min_invariant (simplified)
3079 && TREE_CODE (lhs) == SSA_NAME)
3080 {
3081 VN_INFO (lhs)->expr = simplified;
3082 VN_INFO (lhs)->has_constants = true;
3083 changed = set_ssa_val_to (lhs, simplified);
3084 goto done;
3085 }
3086 else if (simplified
3087 && TREE_CODE (simplified) == SSA_NAME
3088 && TREE_CODE (lhs) == SSA_NAME)
3089 {
3090 changed = visit_copy (lhs, simplified);
3091 goto done;
3092 }
3093 else if (simplified)
3094 {
3095 if (TREE_CODE (lhs) == SSA_NAME)
3096 {
3097 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3098 /* We have to unshare the expression or else
3099 valuizing may change the IL stream. */
3100 VN_INFO (lhs)->expr = unshare_expr (simplified);
3101 }
3102 }
3103 else if (stmt_has_constants (stmt)
3104 && TREE_CODE (lhs) == SSA_NAME)
3105 VN_INFO (lhs)->has_constants = true;
3106 else if (TREE_CODE (lhs) == SSA_NAME)
3107 {
3108 /* We reset expr and constantness here because we may
3109 have been value numbering optimistically, and
3110 iterating. They may become non-constant in this case,
3111 even if they were optimistically constant. */
3112
3113 VN_INFO (lhs)->has_constants = false;
3114 VN_INFO (lhs)->expr = NULL_TREE;
3115 }
3116
3117 if ((TREE_CODE (lhs) == SSA_NAME
3118 /* We can substitute SSA_NAMEs that are live over
3119 abnormal edges with their constant value. */
3120 && !(gimple_assign_copy_p (stmt)
3121 && is_gimple_min_invariant (rhs1))
3122 && !(simplified
3123 && is_gimple_min_invariant (simplified))
3124 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3125 /* Stores or copies from SSA_NAMEs that are live over
3126 abnormal edges are a problem. */
3127 || (code == SSA_NAME
3128 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3129 changed = defs_to_varying (stmt);
3130 else if (REFERENCE_CLASS_P (lhs)
3131 || DECL_P (lhs))
3132 changed = visit_reference_op_store (lhs, rhs1, stmt);
3133 else if (TREE_CODE (lhs) == SSA_NAME)
3134 {
3135 if ((gimple_assign_copy_p (stmt)
3136 && is_gimple_min_invariant (rhs1))
3137 || (simplified
3138 && is_gimple_min_invariant (simplified)))
3139 {
3140 VN_INFO (lhs)->has_constants = true;
3141 if (simplified)
3142 changed = set_ssa_val_to (lhs, simplified);
3143 else
3144 changed = set_ssa_val_to (lhs, rhs1);
3145 }
3146 else
3147 {
3148 switch (get_gimple_rhs_class (code))
3149 {
3150 case GIMPLE_UNARY_RHS:
3151 case GIMPLE_BINARY_RHS:
3152 case GIMPLE_TERNARY_RHS:
3153 changed = visit_nary_op (lhs, stmt);
3154 break;
3155 case GIMPLE_SINGLE_RHS:
3156 switch (TREE_CODE_CLASS (code))
3157 {
3158 case tcc_reference:
3159 /* VOP-less references can go through unary case. */
3160 if ((code == REALPART_EXPR
3161 || code == IMAGPART_EXPR
3162 || code == VIEW_CONVERT_EXPR)
3163 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3164 {
3165 changed = visit_nary_op (lhs, stmt);
3166 break;
3167 }
3168 /* Fallthrough. */
3169 case tcc_declaration:
3170 changed = visit_reference_op_load (lhs, rhs1, stmt);
3171 break;
3172 default:
3173 if (code == ADDR_EXPR)
3174 {
3175 changed = visit_nary_op (lhs, stmt);
3176 break;
3177 }
3178 else if (code == CONSTRUCTOR)
3179 {
3180 changed = visit_nary_op (lhs, stmt);
3181 break;
3182 }
3183 changed = defs_to_varying (stmt);
3184 }
3185 break;
3186 default:
3187 changed = defs_to_varying (stmt);
3188 break;
3189 }
3190 }
3191 }
3192 else
3193 changed = defs_to_varying (stmt);
3194 }
3195 else if (is_gimple_call (stmt))
3196 {
3197 tree lhs = gimple_call_lhs (stmt);
3198
3199 /* ??? We could try to simplify calls. */
3200
3201 if (stmt_has_constants (stmt)
3202 && TREE_CODE (lhs) == SSA_NAME)
3203 VN_INFO (lhs)->has_constants = true;
3204 else if (TREE_CODE (lhs) == SSA_NAME)
3205 {
3206 /* We reset expr and constantness here because we may
3207 have been value numbering optimistically, and
3208 iterating. They may become non-constant in this case,
3209 even if they were optimistically constant. */
3210 VN_INFO (lhs)->has_constants = false;
3211 VN_INFO (lhs)->expr = NULL_TREE;
3212 }
3213
3214 if (TREE_CODE (lhs) == SSA_NAME
3215 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3216 changed = defs_to_varying (stmt);
3217 /* ??? We should handle stores from calls. */
3218 else if (TREE_CODE (lhs) == SSA_NAME)
3219 {
3220 if (!gimple_call_internal_p (stmt)
3221 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3222 changed = visit_reference_op_call (lhs, stmt);
3223 else
3224 changed = defs_to_varying (stmt);
3225 }
3226 else
3227 changed = defs_to_varying (stmt);
3228 }
3229 }
3230 done:
3231 return changed;
3232 }
3233
3234 /* Compare two operands by reverse postorder index */
3235
3236 static int
3237 compare_ops (const void *pa, const void *pb)
3238 {
3239 const tree opa = *((const tree *)pa);
3240 const tree opb = *((const tree *)pb);
3241 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3242 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3243 basic_block bba;
3244 basic_block bbb;
3245
3246 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3247 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3248 else if (gimple_nop_p (opstmta))
3249 return -1;
3250 else if (gimple_nop_p (opstmtb))
3251 return 1;
3252
3253 bba = gimple_bb (opstmta);
3254 bbb = gimple_bb (opstmtb);
3255
3256 if (!bba && !bbb)
3257 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3258 else if (!bba)
3259 return -1;
3260 else if (!bbb)
3261 return 1;
3262
3263 if (bba == bbb)
3264 {
3265 if (gimple_code (opstmta) == GIMPLE_PHI
3266 && gimple_code (opstmtb) == GIMPLE_PHI)
3267 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3268 else if (gimple_code (opstmta) == GIMPLE_PHI)
3269 return -1;
3270 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3271 return 1;
3272 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3273 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3274 else
3275 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3276 }
3277 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3278 }
3279
3280 /* Sort an array containing members of a strongly connected component
3281 SCC so that the members are ordered by RPO number.
3282 This means that when the sort is complete, iterating through the
3283 array will give you the members in RPO order. */
3284
3285 static void
3286 sort_scc (VEC (tree, heap) *scc)
3287 {
3288 VEC_qsort (tree, scc, compare_ops);
3289 }
3290
3291 /* Insert the no longer used nary ONARY to the hash INFO. */
3292
3293 static void
3294 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3295 {
3296 size_t size = sizeof_vn_nary_op (onary->length);
3297 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3298 &info->nary_obstack);
3299 memcpy (nary, onary, size);
3300 vn_nary_op_insert_into (nary, info->nary, false);
3301 }
3302
3303 /* Insert the no longer used phi OPHI to the hash INFO. */
3304
3305 static void
3306 copy_phi (vn_phi_t ophi, vn_tables_t info)
3307 {
3308 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3309 void **slot;
3310 memcpy (phi, ophi, sizeof (*phi));
3311 ophi->phiargs = NULL;
3312 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3313 gcc_assert (!*slot);
3314 *slot = phi;
3315 }
3316
3317 /* Insert the no longer used reference OREF to the hash INFO. */
3318
3319 static void
3320 copy_reference (vn_reference_t oref, vn_tables_t info)
3321 {
3322 vn_reference_t ref;
3323 void **slot;
3324 ref = (vn_reference_t) pool_alloc (info->references_pool);
3325 memcpy (ref, oref, sizeof (*ref));
3326 oref->operands = NULL;
3327 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3328 INSERT);
3329 if (*slot)
3330 free_reference (*slot);
3331 *slot = ref;
3332 }
3333
3334 /* Process a strongly connected component in the SSA graph. */
3335
3336 static void
3337 process_scc (VEC (tree, heap) *scc)
3338 {
3339 tree var;
3340 unsigned int i;
3341 unsigned int iterations = 0;
3342 bool changed = true;
3343 htab_iterator hi;
3344 vn_nary_op_t nary;
3345 vn_phi_t phi;
3346 vn_reference_t ref;
3347
3348 /* If the SCC has a single member, just visit it. */
3349 if (VEC_length (tree, scc) == 1)
3350 {
3351 tree use = VEC_index (tree, scc, 0);
3352 if (VN_INFO (use)->use_processed)
3353 return;
3354 /* We need to make sure it doesn't form a cycle itself, which can
3355 happen for self-referential PHI nodes. In that case we would
3356 end up inserting an expression with VN_TOP operands into the
3357 valid table which makes us derive bogus equivalences later.
3358 The cheapest way to check this is to assume it for all PHI nodes. */
3359 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3360 /* Fallthru to iteration. */ ;
3361 else
3362 {
3363 visit_use (use);
3364 return;
3365 }
3366 }
3367
3368 /* Iterate over the SCC with the optimistic table until it stops
3369 changing. */
3370 current_info = optimistic_info;
3371 while (changed)
3372 {
3373 changed = false;
3374 iterations++;
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3376 fprintf (dump_file, "Starting iteration %d\n", iterations);
3377 /* As we are value-numbering optimistically we have to
3378 clear the expression tables and the simplified expressions
3379 in each iteration until we converge. */
3380 htab_empty (optimistic_info->nary);
3381 htab_empty (optimistic_info->phis);
3382 htab_empty (optimistic_info->references);
3383 obstack_free (&optimistic_info->nary_obstack, NULL);
3384 gcc_obstack_init (&optimistic_info->nary_obstack);
3385 empty_alloc_pool (optimistic_info->phis_pool);
3386 empty_alloc_pool (optimistic_info->references_pool);
3387 FOR_EACH_VEC_ELT (tree, scc, i, var)
3388 VN_INFO (var)->expr = NULL_TREE;
3389 FOR_EACH_VEC_ELT (tree, scc, i, var)
3390 changed |= visit_use (var);
3391 }
3392
3393 statistics_histogram_event (cfun, "SCC iterations", iterations);
3394
3395 /* Finally, copy the contents of the no longer used optimistic
3396 table to the valid table. */
3397 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3398 copy_nary (nary, valid_info);
3399 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3400 copy_phi (phi, valid_info);
3401 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3402 copy_reference (ref, valid_info);
3403
3404 current_info = valid_info;
3405 }
3406
3407 DEF_VEC_O(ssa_op_iter);
3408 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3409
3410 /* Pop the components of the found SCC for NAME off the SCC stack
3411 and process them. Returns true if all went well, false if
3412 we run into resource limits. */
3413
3414 static bool
3415 extract_and_process_scc_for_name (tree name)
3416 {
3417 VEC (tree, heap) *scc = NULL;
3418 tree x;
3419
3420 /* Found an SCC, pop the components off the SCC stack and
3421 process them. */
3422 do
3423 {
3424 x = VEC_pop (tree, sccstack);
3425
3426 VN_INFO (x)->on_sccstack = false;
3427 VEC_safe_push (tree, heap, scc, x);
3428 } while (x != name);
3429
3430 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3431 if (VEC_length (tree, scc)
3432 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3433 {
3434 if (dump_file)
3435 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3436 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3437 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3438 return false;
3439 }
3440
3441 if (VEC_length (tree, scc) > 1)
3442 sort_scc (scc);
3443
3444 if (dump_file && (dump_flags & TDF_DETAILS))
3445 print_scc (dump_file, scc);
3446
3447 process_scc (scc);
3448
3449 VEC_free (tree, heap, scc);
3450
3451 return true;
3452 }
3453
3454 /* Depth first search on NAME to discover and process SCC's in the SSA
3455 graph.
3456 Execution of this algorithm relies on the fact that the SCC's are
3457 popped off the stack in topological order.
3458 Returns true if successful, false if we stopped processing SCC's due
3459 to resource constraints. */
3460
3461 static bool
3462 DFS (tree name)
3463 {
3464 VEC(ssa_op_iter, heap) *itervec = NULL;
3465 VEC(tree, heap) *namevec = NULL;
3466 use_operand_p usep = NULL;
3467 gimple defstmt;
3468 tree use;
3469 ssa_op_iter iter;
3470
3471 start_over:
3472 /* SCC info */
3473 VN_INFO (name)->dfsnum = next_dfs_num++;
3474 VN_INFO (name)->visited = true;
3475 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3476
3477 VEC_safe_push (tree, heap, sccstack, name);
3478 VN_INFO (name)->on_sccstack = true;
3479 defstmt = SSA_NAME_DEF_STMT (name);
3480
3481 /* Recursively DFS on our operands, looking for SCC's. */
3482 if (!gimple_nop_p (defstmt))
3483 {
3484 /* Push a new iterator. */
3485 if (gimple_code (defstmt) == GIMPLE_PHI)
3486 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3487 else
3488 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3489 }
3490 else
3491 clear_and_done_ssa_iter (&iter);
3492
3493 while (1)
3494 {
3495 /* If we are done processing uses of a name, go up the stack
3496 of iterators and process SCCs as we found them. */
3497 if (op_iter_done (&iter))
3498 {
3499 /* See if we found an SCC. */
3500 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3501 if (!extract_and_process_scc_for_name (name))
3502 {
3503 VEC_free (tree, heap, namevec);
3504 VEC_free (ssa_op_iter, heap, itervec);
3505 return false;
3506 }
3507
3508 /* Check if we are done. */
3509 if (VEC_empty (tree, namevec))
3510 {
3511 VEC_free (tree, heap, namevec);
3512 VEC_free (ssa_op_iter, heap, itervec);
3513 return true;
3514 }
3515
3516 /* Restore the last use walker and continue walking there. */
3517 use = name;
3518 name = VEC_pop (tree, namevec);
3519 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3520 sizeof (ssa_op_iter));
3521 VEC_pop (ssa_op_iter, itervec);
3522 goto continue_walking;
3523 }
3524
3525 use = USE_FROM_PTR (usep);
3526
3527 /* Since we handle phi nodes, we will sometimes get
3528 invariants in the use expression. */
3529 if (TREE_CODE (use) == SSA_NAME)
3530 {
3531 if (! (VN_INFO (use)->visited))
3532 {
3533 /* Recurse by pushing the current use walking state on
3534 the stack and starting over. */
3535 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3536 VEC_safe_push(tree, heap, namevec, name);
3537 name = use;
3538 goto start_over;
3539
3540 continue_walking:
3541 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3542 VN_INFO (use)->low);
3543 }
3544 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3545 && VN_INFO (use)->on_sccstack)
3546 {
3547 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3548 VN_INFO (name)->low);
3549 }
3550 }
3551
3552 usep = op_iter_next_use (&iter);
3553 }
3554 }
3555
3556 /* Allocate a value number table. */
3557
3558 static void
3559 allocate_vn_table (vn_tables_t table)
3560 {
3561 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3562 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3563 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3564 free_reference);
3565
3566 gcc_obstack_init (&table->nary_obstack);
3567 table->phis_pool = create_alloc_pool ("VN phis",
3568 sizeof (struct vn_phi_s),
3569 30);
3570 table->references_pool = create_alloc_pool ("VN references",
3571 sizeof (struct vn_reference_s),
3572 30);
3573 }
3574
3575 /* Free a value number table. */
3576
3577 static void
3578 free_vn_table (vn_tables_t table)
3579 {
3580 htab_delete (table->phis);
3581 htab_delete (table->nary);
3582 htab_delete (table->references);
3583 obstack_free (&table->nary_obstack, NULL);
3584 free_alloc_pool (table->phis_pool);
3585 free_alloc_pool (table->references_pool);
3586 }
3587
3588 static void
3589 init_scc_vn (void)
3590 {
3591 size_t i;
3592 int j;
3593 int *rpo_numbers_temp;
3594
3595 calculate_dominance_info (CDI_DOMINATORS);
3596 sccstack = NULL;
3597 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3598 free);
3599
3600 constant_value_ids = BITMAP_ALLOC (NULL);
3601
3602 next_dfs_num = 1;
3603 next_value_id = 1;
3604
3605 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3606 /* VEC_alloc doesn't actually grow it to the right size, it just
3607 preallocates the space to do so. */
3608 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3609 gcc_obstack_init (&vn_ssa_aux_obstack);
3610
3611 shared_lookup_phiargs = NULL;
3612 shared_lookup_references = NULL;
3613 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3614 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3615 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3616
3617 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3618 the i'th block in RPO order is bb. We want to map bb's to RPO
3619 numbers, so we need to rearrange this array. */
3620 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3621 rpo_numbers[rpo_numbers_temp[j]] = j;
3622
3623 XDELETE (rpo_numbers_temp);
3624
3625 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3626
3627 /* Create the VN_INFO structures, and initialize value numbers to
3628 TOP. */
3629 for (i = 0; i < num_ssa_names; i++)
3630 {
3631 tree name = ssa_name (i);
3632 if (name)
3633 {
3634 VN_INFO_GET (name)->valnum = VN_TOP;
3635 VN_INFO (name)->expr = NULL_TREE;
3636 VN_INFO (name)->value_id = 0;
3637 }
3638 }
3639
3640 renumber_gimple_stmt_uids ();
3641
3642 /* Create the valid and optimistic value numbering tables. */
3643 valid_info = XCNEW (struct vn_tables_s);
3644 allocate_vn_table (valid_info);
3645 optimistic_info = XCNEW (struct vn_tables_s);
3646 allocate_vn_table (optimistic_info);
3647 }
3648
3649 void
3650 free_scc_vn (void)
3651 {
3652 size_t i;
3653
3654 htab_delete (constant_to_value_id);
3655 BITMAP_FREE (constant_value_ids);
3656 VEC_free (tree, heap, shared_lookup_phiargs);
3657 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3658 XDELETEVEC (rpo_numbers);
3659
3660 for (i = 0; i < num_ssa_names; i++)
3661 {
3662 tree name = ssa_name (i);
3663 if (name
3664 && VN_INFO (name)->needs_insertion)
3665 release_ssa_name (name);
3666 }
3667 obstack_free (&vn_ssa_aux_obstack, NULL);
3668 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3669
3670 VEC_free (tree, heap, sccstack);
3671 free_vn_table (valid_info);
3672 XDELETE (valid_info);
3673 free_vn_table (optimistic_info);
3674 XDELETE (optimistic_info);
3675 }
3676
3677 /* Set *ID if we computed something useful in RESULT. */
3678
3679 static void
3680 set_value_id_for_result (tree result, unsigned int *id)
3681 {
3682 if (result)
3683 {
3684 if (TREE_CODE (result) == SSA_NAME)
3685 *id = VN_INFO (result)->value_id;
3686 else if (is_gimple_min_invariant (result))
3687 *id = get_or_alloc_constant_value_id (result);
3688 }
3689 }
3690
3691 /* Set the value ids in the valid hash tables. */
3692
3693 static void
3694 set_hashtable_value_ids (void)
3695 {
3696 htab_iterator hi;
3697 vn_nary_op_t vno;
3698 vn_reference_t vr;
3699 vn_phi_t vp;
3700
3701 /* Now set the value ids of the things we had put in the hash
3702 table. */
3703
3704 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3705 vno, vn_nary_op_t, hi)
3706 set_value_id_for_result (vno->result, &vno->value_id);
3707
3708 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3709 vp, vn_phi_t, hi)
3710 set_value_id_for_result (vp->result, &vp->value_id);
3711
3712 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3713 vr, vn_reference_t, hi)
3714 set_value_id_for_result (vr->result, &vr->value_id);
3715 }
3716
3717 /* Do SCCVN. Returns true if it finished, false if we bailed out
3718 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3719 how we use the alias oracle walking during the VN process. */
3720
3721 bool
3722 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3723 {
3724 size_t i;
3725 tree param;
3726 bool changed = true;
3727
3728 default_vn_walk_kind = default_vn_walk_kind_;
3729
3730 init_scc_vn ();
3731 current_info = valid_info;
3732
3733 for (param = DECL_ARGUMENTS (current_function_decl);
3734 param;
3735 param = DECL_CHAIN (param))
3736 {
3737 if (gimple_default_def (cfun, param) != NULL)
3738 {
3739 tree def = gimple_default_def (cfun, param);
3740 VN_INFO (def)->valnum = def;
3741 }
3742 }
3743
3744 for (i = 1; i < num_ssa_names; ++i)
3745 {
3746 tree name = ssa_name (i);
3747 if (name
3748 && VN_INFO (name)->visited == false
3749 && !has_zero_uses (name))
3750 if (!DFS (name))
3751 {
3752 free_scc_vn ();
3753 return false;
3754 }
3755 }
3756
3757 /* Initialize the value ids. */
3758
3759 for (i = 1; i < num_ssa_names; ++i)
3760 {
3761 tree name = ssa_name (i);
3762 vn_ssa_aux_t info;
3763 if (!name)
3764 continue;
3765 info = VN_INFO (name);
3766 if (info->valnum == name
3767 || info->valnum == VN_TOP)
3768 info->value_id = get_next_value_id ();
3769 else if (is_gimple_min_invariant (info->valnum))
3770 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3771 }
3772
3773 /* Propagate until they stop changing. */
3774 while (changed)
3775 {
3776 changed = false;
3777 for (i = 1; i < num_ssa_names; ++i)
3778 {
3779 tree name = ssa_name (i);
3780 vn_ssa_aux_t info;
3781 if (!name)
3782 continue;
3783 info = VN_INFO (name);
3784 if (TREE_CODE (info->valnum) == SSA_NAME
3785 && info->valnum != name
3786 && info->value_id != VN_INFO (info->valnum)->value_id)
3787 {
3788 changed = true;
3789 info->value_id = VN_INFO (info->valnum)->value_id;
3790 }
3791 }
3792 }
3793
3794 set_hashtable_value_ids ();
3795
3796 if (dump_file && (dump_flags & TDF_DETAILS))
3797 {
3798 fprintf (dump_file, "Value numbers:\n");
3799 for (i = 0; i < num_ssa_names; i++)
3800 {
3801 tree name = ssa_name (i);
3802 if (name
3803 && VN_INFO (name)->visited
3804 && SSA_VAL (name) != name)
3805 {
3806 print_generic_expr (dump_file, name, 0);
3807 fprintf (dump_file, " = ");
3808 print_generic_expr (dump_file, SSA_VAL (name), 0);
3809 fprintf (dump_file, "\n");
3810 }
3811 }
3812 }
3813
3814 return true;
3815 }
3816
3817 /* Return the maximum value id we have ever seen. */
3818
3819 unsigned int
3820 get_max_value_id (void)
3821 {
3822 return next_value_id;
3823 }
3824
3825 /* Return the next unique value id. */
3826
3827 unsigned int
3828 get_next_value_id (void)
3829 {
3830 return next_value_id++;
3831 }
3832
3833
3834 /* Compare two expressions E1 and E2 and return true if they are equal. */
3835
3836 bool
3837 expressions_equal_p (tree e1, tree e2)
3838 {
3839 /* The obvious case. */
3840 if (e1 == e2)
3841 return true;
3842
3843 /* If only one of them is null, they cannot be equal. */
3844 if (!e1 || !e2)
3845 return false;
3846
3847 /* Now perform the actual comparison. */
3848 if (TREE_CODE (e1) == TREE_CODE (e2)
3849 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3850 return true;
3851
3852 return false;
3853 }
3854
3855
3856 /* Return true if the nary operation NARY may trap. This is a copy
3857 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3858
3859 bool
3860 vn_nary_may_trap (vn_nary_op_t nary)
3861 {
3862 tree type;
3863 tree rhs2 = NULL_TREE;
3864 bool honor_nans = false;
3865 bool honor_snans = false;
3866 bool fp_operation = false;
3867 bool honor_trapv = false;
3868 bool handled, ret;
3869 unsigned i;
3870
3871 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3872 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3873 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3874 {
3875 type = nary->type;
3876 fp_operation = FLOAT_TYPE_P (type);
3877 if (fp_operation)
3878 {
3879 honor_nans = flag_trapping_math && !flag_finite_math_only;
3880 honor_snans = flag_signaling_nans != 0;
3881 }
3882 else if (INTEGRAL_TYPE_P (type)
3883 && TYPE_OVERFLOW_TRAPS (type))
3884 honor_trapv = true;
3885 }
3886 if (nary->length >= 2)
3887 rhs2 = nary->op[1];
3888 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3889 honor_trapv,
3890 honor_nans, honor_snans, rhs2,
3891 &handled);
3892 if (handled
3893 && ret)
3894 return true;
3895
3896 for (i = 0; i < nary->length; ++i)
3897 if (tree_could_trap_p (nary->op[i]))
3898 return true;
3899
3900 return false;
3901 }