4b5d388cf31c647ea64d1a53c5aaed08c95341a8
[gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
48
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
54
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
58
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
63
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
68
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
72
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
77
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
81
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
84
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
90
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
96
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
104 */
105
106 /* The set of hashtables and alloc_pool's for their items. */
107
108 typedef struct vn_tables_s
109 {
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
117
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120
121
122 /* Valid hashtables storing information we have proven to be
123 correct. */
124
125 static vn_tables_t valid_info;
126
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
129
130 static vn_tables_t optimistic_info;
131
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
135
136 static vn_tables_t current_info;
137
138
139 /* Reverse post order index for each basic block. */
140
141 static int *rpo_numbers;
142
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
147
148 tree VN_TOP;
149
150 /* Unique counter for our value ids. */
151
152 static unsigned int next_value_id;
153
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
156
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159
160
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
167
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
170
171 /* Return the value numbering information for a given SSA name. */
172
173 vn_ssa_aux_t
174 VN_INFO (tree name)
175 {
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
180 }
181
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
184
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 {
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
190 }
191
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
194
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
197 {
198 vn_ssa_aux_t newinfo;
199
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
208 }
209
210
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
213
214 tree
215 vn_get_expr_for (tree name)
216 {
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
221
222 if (vn->valnum == VN_TOP)
223 return name;
224
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
229
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
232
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
237
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
241
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
248
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
253 {
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
264
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
270
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
277
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
280 && TREE_CODE
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
283 break;
284
285 default:;
286 }
287 if (expr == NULL_TREE)
288 return vn->valnum;
289
290 /* Cache the expression. */
291 vn->expr = expr;
292
293 return expr;
294 }
295
296
297 /* Free a phi operation structure VP. */
298
299 static void
300 free_phi (void *vp)
301 {
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
304 }
305
306 /* Free a reference operation structure VP. */
307
308 static void
309 free_reference (void *vp)
310 {
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
313 }
314
315 /* Hash table equality function for vn_constant_t. */
316
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
319 {
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
322
323 if (vc1->hashcode != vc2->hashcode)
324 return false;
325
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
327 }
328
329 /* Hash table hash function for vn_constant_t. */
330
331 static hashval_t
332 vn_constant_hash (const void *p1)
333 {
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
336 }
337
338 /* Lookup a value id for CONSTANT and return it. If it does not
339 exist returns 0. */
340
341 unsigned int
342 get_constant_value_id (tree constant)
343 {
344 void **slot;
345 struct vn_constant_s vc;
346
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
351 if (slot)
352 return ((vn_constant_t)*slot)->value_id;
353 return 0;
354 }
355
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
358
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
361 {
362 void **slot;
363 struct vn_constant_s vc;
364 vn_constant_t vcp;
365
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
370 if (*slot)
371 return ((vn_constant_t)*slot)->value_id;
372
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
380 }
381
382 /* Return true if V is a value id for a constant. */
383
384 bool
385 value_id_constant_p (unsigned int v)
386 {
387 return bitmap_bit_p (constant_value_ids, v);
388 }
389
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
392
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
395 {
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
398
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
408 }
409
410 /* Compute the hash for a reference operand VRO1. */
411
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
414 {
415 result = iterative_hash_hashval_t (vro1->opcode, result);
416 if (vro1->op0)
417 result = iterative_hash_expr (vro1->op0, result);
418 if (vro1->op1)
419 result = iterative_hash_expr (vro1->op1, result);
420 if (vro1->op2)
421 result = iterative_hash_expr (vro1->op2, result);
422 return result;
423 }
424
425 /* Return the hashcode for a given reference operation P1. */
426
427 static hashval_t
428 vn_reference_hash (const void *p1)
429 {
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
432 }
433
434 /* Compute a hash for the reference operation VR1 and return it. */
435
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
438 {
439 hashval_t result = 0;
440 int i;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
443 bool deref = false;
444
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
446 {
447 if (vro->opcode == MEM_REF)
448 deref = true;
449 else if (vro->opcode != ADDR_EXPR)
450 deref = false;
451 if (vro->off != -1)
452 {
453 if (off == -1)
454 off = 0;
455 off += vro->off;
456 }
457 else
458 {
459 if (off != -1
460 && off != 0)
461 result = iterative_hash_hashval_t (off, result);
462 off = -1;
463 if (deref
464 && vro->opcode == ADDR_EXPR)
465 {
466 if (vro->op0)
467 {
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
471 }
472 }
473 else
474 result = vn_reference_op_compute_hash (vro, result);
475 }
476 }
477 if (vr1->vuse)
478 result += SSA_NAME_VERSION (vr1->vuse);
479
480 return result;
481 }
482
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
485
486 int
487 vn_reference_eq (const void *p1, const void *p2)
488 {
489 unsigned i, j;
490
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
494 return false;
495
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
498 return false;
499
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
502 return false;
503
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
506 return true;
507
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509 return false;
510
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
513 {
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 return false;
516 }
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520 return false;
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524 return false;
525
526 i = 0;
527 j = 0;
528 do
529 {
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
535 {
536 if (vro1->opcode == MEM_REF)
537 deref1 = true;
538 if (vro1->off == -1)
539 break;
540 off1 += vro1->off;
541 }
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
543 {
544 if (vro2->opcode == MEM_REF)
545 deref2 = true;
546 if (vro2->off == -1)
547 break;
548 off2 += vro2->off;
549 }
550 if (off1 != off2)
551 return false;
552 if (deref1 && vro1->opcode == ADDR_EXPR)
553 {
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
558 vro1 = &tem1;
559 }
560 if (deref2 && vro2->opcode == ADDR_EXPR)
561 {
562 memset (&tem2, 0, sizeof (tem2));
563 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
564 tem2.type = TREE_TYPE (tem2.op0);
565 tem2.opcode = TREE_CODE (tem2.op0);
566 vro2 = &tem2;
567 }
568 if (!vn_reference_op_eq (vro1, vro2))
569 return false;
570 ++j;
571 ++i;
572 }
573 while (VEC_length (vn_reference_op_s, vr1->operands) != i
574 || VEC_length (vn_reference_op_s, vr2->operands) != j);
575
576 return true;
577 }
578
579 /* Copy the operations present in load/store REF into RESULT, a vector of
580 vn_reference_op_s's. */
581
582 void
583 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
584 {
585 if (TREE_CODE (ref) == TARGET_MEM_REF)
586 {
587 vn_reference_op_s temp;
588
589 memset (&temp, 0, sizeof (temp));
590 temp.type = TREE_TYPE (ref);
591 temp.opcode = TREE_CODE (ref);
592 temp.op0 = TMR_INDEX (ref);
593 temp.op1 = TMR_STEP (ref);
594 temp.op2 = TMR_OFFSET (ref);
595 temp.off = -1;
596 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
597
598 memset (&temp, 0, sizeof (temp));
599 temp.type = NULL_TREE;
600 temp.opcode = ERROR_MARK;
601 temp.op0 = TMR_INDEX2 (ref);
602 temp.off = -1;
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
604
605 memset (&temp, 0, sizeof (temp));
606 temp.type = NULL_TREE;
607 temp.opcode = TREE_CODE (TMR_BASE (ref));
608 temp.op0 = TMR_BASE (ref);
609 temp.off = -1;
610 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
611 return;
612 }
613
614 /* For non-calls, store the information that makes up the address. */
615
616 while (ref)
617 {
618 vn_reference_op_s temp;
619
620 memset (&temp, 0, sizeof (temp));
621 temp.type = TREE_TYPE (ref);
622 temp.opcode = TREE_CODE (ref);
623 temp.off = -1;
624
625 switch (temp.opcode)
626 {
627 case MEM_REF:
628 /* The base address gets its own vn_reference_op_s structure. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 if (host_integerp (TREE_OPERAND (ref, 1), 0))
631 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
632 break;
633 case BIT_FIELD_REF:
634 /* Record bits and position. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 break;
638 case COMPONENT_REF:
639 /* The field decl is enough to unambiguously specify the field,
640 a matching type is not necessary and a mismatching type
641 is always a spurious difference. */
642 temp.type = NULL_TREE;
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
645 {
646 tree this_offset = component_ref_field_offset (ref);
647 if (this_offset
648 && TREE_CODE (this_offset) == INTEGER_CST)
649 {
650 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
651 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
652 {
653 double_int off
654 = double_int_add (tree_to_double_int (this_offset),
655 double_int_rshift
656 (tree_to_double_int (bit_offset),
657 BITS_PER_UNIT == 8
658 ? 3 : exact_log2 (BITS_PER_UNIT),
659 HOST_BITS_PER_DOUBLE_INT, true));
660 if (double_int_fits_in_shwi_p (off))
661 temp.off = off.low;
662 }
663 }
664 }
665 break;
666 case ARRAY_RANGE_REF:
667 case ARRAY_REF:
668 /* Record index as operand. */
669 temp.op0 = TREE_OPERAND (ref, 1);
670 /* Always record lower bounds and element size. */
671 temp.op1 = array_ref_low_bound (ref);
672 temp.op2 = array_ref_element_size (ref);
673 if (TREE_CODE (temp.op0) == INTEGER_CST
674 && TREE_CODE (temp.op1) == INTEGER_CST
675 && TREE_CODE (temp.op2) == INTEGER_CST)
676 {
677 double_int off = tree_to_double_int (temp.op0);
678 off = double_int_add (off,
679 double_int_neg
680 (tree_to_double_int (temp.op1)));
681 off = double_int_mul (off, tree_to_double_int (temp.op2));
682 if (double_int_fits_in_shwi_p (off))
683 temp.off = off.low;
684 }
685 break;
686 case VAR_DECL:
687 if (DECL_HARD_REGISTER (ref))
688 {
689 temp.op0 = ref;
690 break;
691 }
692 /* Fallthru. */
693 case PARM_DECL:
694 case CONST_DECL:
695 case RESULT_DECL:
696 /* Canonicalize decls to MEM[&decl] which is what we end up with
697 when valueizing MEM[ptr] with ptr = &decl. */
698 temp.opcode = MEM_REF;
699 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
700 temp.off = 0;
701 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 temp.opcode = ADDR_EXPR;
703 temp.op0 = build_fold_addr_expr (ref);
704 temp.type = TREE_TYPE (temp.op0);
705 temp.off = -1;
706 break;
707 case STRING_CST:
708 case INTEGER_CST:
709 case COMPLEX_CST:
710 case VECTOR_CST:
711 case REAL_CST:
712 case FIXED_CST:
713 case CONSTRUCTOR:
714 case SSA_NAME:
715 temp.op0 = ref;
716 break;
717 case ADDR_EXPR:
718 if (is_gimple_min_invariant (ref))
719 {
720 temp.op0 = ref;
721 break;
722 }
723 /* Fallthrough. */
724 /* These are only interesting for their operands, their
725 existence, and their type. They will never be the last
726 ref in the chain of references (IE they require an
727 operand), so we don't have to put anything
728 for op* as it will be handled by the iteration */
729 case REALPART_EXPR:
730 case VIEW_CONVERT_EXPR:
731 temp.off = 0;
732 break;
733 case IMAGPART_EXPR:
734 /* This is only interesting for its constant offset. */
735 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
736 break;
737 default:
738 gcc_unreachable ();
739 }
740 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
741
742 if (REFERENCE_CLASS_P (ref)
743 || (TREE_CODE (ref) == ADDR_EXPR
744 && !is_gimple_min_invariant (ref)))
745 ref = TREE_OPERAND (ref, 0);
746 else
747 ref = NULL_TREE;
748 }
749 }
750
751 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
752 operands in *OPS, the reference alias set SET and the reference type TYPE.
753 Return true if something useful was produced. */
754
755 bool
756 ao_ref_init_from_vn_reference (ao_ref *ref,
757 alias_set_type set, tree type,
758 VEC (vn_reference_op_s, heap) *ops)
759 {
760 vn_reference_op_t op;
761 unsigned i;
762 tree base = NULL_TREE;
763 tree *op0_p = &base;
764 HOST_WIDE_INT offset = 0;
765 HOST_WIDE_INT max_size;
766 HOST_WIDE_INT size = -1;
767 tree size_tree = NULL_TREE;
768 alias_set_type base_alias_set = -1;
769
770 /* First get the final access size from just the outermost expression. */
771 op = VEC_index (vn_reference_op_s, ops, 0);
772 if (op->opcode == COMPONENT_REF)
773 size_tree = DECL_SIZE (op->op0);
774 else if (op->opcode == BIT_FIELD_REF)
775 size_tree = op->op0;
776 else
777 {
778 enum machine_mode mode = TYPE_MODE (type);
779 if (mode == BLKmode)
780 size_tree = TYPE_SIZE (type);
781 else
782 size = GET_MODE_BITSIZE (mode);
783 }
784 if (size_tree != NULL_TREE)
785 {
786 if (!host_integerp (size_tree, 1))
787 size = -1;
788 else
789 size = TREE_INT_CST_LOW (size_tree);
790 }
791
792 /* Initially, maxsize is the same as the accessed element size.
793 In the following it will only grow (or become -1). */
794 max_size = size;
795
796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
797 and find the ultimate containing object. */
798 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
799 {
800 switch (op->opcode)
801 {
802 /* These may be in the reference ops, but we cannot do anything
803 sensible with them here. */
804 case ADDR_EXPR:
805 /* Apart from ADDR_EXPR arguments to MEM_REF. */
806 if (base != NULL_TREE
807 && TREE_CODE (base) == MEM_REF
808 && op->op0
809 && DECL_P (TREE_OPERAND (op->op0, 0)))
810 {
811 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
812 base = TREE_OPERAND (op->op0, 0);
813 if (pop->off == -1)
814 {
815 max_size = -1;
816 offset = 0;
817 }
818 else
819 offset += pop->off * BITS_PER_UNIT;
820 op0_p = NULL;
821 break;
822 }
823 /* Fallthru. */
824 case CALL_EXPR:
825 return false;
826
827 /* Record the base objects. */
828 case MEM_REF:
829 base_alias_set = get_deref_alias_set (op->op0);
830 *op0_p = build2 (MEM_REF, op->type,
831 NULL_TREE, op->op0);
832 op0_p = &TREE_OPERAND (*op0_p, 0);
833 break;
834
835 case VAR_DECL:
836 case PARM_DECL:
837 case RESULT_DECL:
838 case SSA_NAME:
839 *op0_p = op->op0;
840 op0_p = NULL;
841 break;
842
843 /* And now the usual component-reference style ops. */
844 case BIT_FIELD_REF:
845 offset += tree_low_cst (op->op1, 0);
846 break;
847
848 case COMPONENT_REF:
849 {
850 tree field = op->op0;
851 /* We do not have a complete COMPONENT_REF tree here so we
852 cannot use component_ref_field_offset. Do the interesting
853 parts manually. */
854
855 if (op->op1
856 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
857 max_size = -1;
858 else
859 {
860 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
861 * BITS_PER_UNIT);
862 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
863 }
864 break;
865 }
866
867 case ARRAY_RANGE_REF:
868 case ARRAY_REF:
869 /* We recorded the lower bound and the element size. */
870 if (!host_integerp (op->op0, 0)
871 || !host_integerp (op->op1, 0)
872 || !host_integerp (op->op2, 0))
873 max_size = -1;
874 else
875 {
876 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
877 hindex -= TREE_INT_CST_LOW (op->op1);
878 hindex *= TREE_INT_CST_LOW (op->op2);
879 hindex *= BITS_PER_UNIT;
880 offset += hindex;
881 }
882 break;
883
884 case REALPART_EXPR:
885 break;
886
887 case IMAGPART_EXPR:
888 offset += size;
889 break;
890
891 case VIEW_CONVERT_EXPR:
892 break;
893
894 case STRING_CST:
895 case INTEGER_CST:
896 case COMPLEX_CST:
897 case VECTOR_CST:
898 case REAL_CST:
899 case CONSTRUCTOR:
900 case CONST_DECL:
901 return false;
902
903 default:
904 return false;
905 }
906 }
907
908 if (base == NULL_TREE)
909 return false;
910
911 ref->ref = NULL_TREE;
912 ref->base = base;
913 ref->offset = offset;
914 ref->size = size;
915 ref->max_size = max_size;
916 ref->ref_alias_set = set;
917 if (base_alias_set != -1)
918 ref->base_alias_set = base_alias_set;
919 else
920 ref->base_alias_set = get_alias_set (base);
921
922 return true;
923 }
924
925 /* Copy the operations present in load/store/call REF into RESULT, a vector of
926 vn_reference_op_s's. */
927
928 void
929 copy_reference_ops_from_call (gimple call,
930 VEC(vn_reference_op_s, heap) **result)
931 {
932 vn_reference_op_s temp;
933 unsigned i;
934
935 /* Copy the type, opcode, function being called and static chain. */
936 memset (&temp, 0, sizeof (temp));
937 temp.type = gimple_call_return_type (call);
938 temp.opcode = CALL_EXPR;
939 temp.op0 = gimple_call_fn (call);
940 temp.op1 = gimple_call_chain (call);
941 temp.off = -1;
942 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
943
944 /* Copy the call arguments. As they can be references as well,
945 just chain them together. */
946 for (i = 0; i < gimple_call_num_args (call); ++i)
947 {
948 tree callarg = gimple_call_arg (call, i);
949 copy_reference_ops_from_ref (callarg, result);
950 }
951 }
952
953 /* Create a vector of vn_reference_op_s structures from REF, a
954 REFERENCE_CLASS_P tree. The vector is not shared. */
955
956 static VEC(vn_reference_op_s, heap) *
957 create_reference_ops_from_ref (tree ref)
958 {
959 VEC (vn_reference_op_s, heap) *result = NULL;
960
961 copy_reference_ops_from_ref (ref, &result);
962 return result;
963 }
964
965 /* Create a vector of vn_reference_op_s structures from CALL, a
966 call statement. The vector is not shared. */
967
968 static VEC(vn_reference_op_s, heap) *
969 create_reference_ops_from_call (gimple call)
970 {
971 VEC (vn_reference_op_s, heap) *result = NULL;
972
973 copy_reference_ops_from_call (call, &result);
974 return result;
975 }
976
977 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
978 *I_P to point to the last element of the replacement. */
979 void
980 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
981 unsigned int *i_p)
982 {
983 unsigned int i = *i_p;
984 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
985 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
986 tree addr_base;
987 HOST_WIDE_INT addr_offset;
988
989 /* The only thing we have to do is from &OBJ.foo.bar add the offset
990 from .foo.bar to the preceeding MEM_REF offset and replace the
991 address with &OBJ. */
992 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
993 &addr_offset);
994 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
995 if (addr_base != op->op0)
996 {
997 double_int off = tree_to_double_int (mem_op->op0);
998 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
999 off = double_int_add (off, shwi_to_double_int (addr_offset));
1000 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1001 op->op0 = build_fold_addr_expr (addr_base);
1002 if (host_integerp (mem_op->op0, 0))
1003 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1004 else
1005 mem_op->off = -1;
1006 }
1007 }
1008
1009 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1010 *I_P to point to the last element of the replacement. */
1011 static void
1012 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1013 unsigned int *i_p)
1014 {
1015 unsigned int i = *i_p;
1016 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1017 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1018 gimple def_stmt;
1019 enum tree_code code;
1020 double_int off;
1021
1022 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1023 if (!is_gimple_assign (def_stmt))
1024 return;
1025
1026 code = gimple_assign_rhs_code (def_stmt);
1027 if (code != ADDR_EXPR
1028 && code != POINTER_PLUS_EXPR)
1029 return;
1030
1031 off = tree_to_double_int (mem_op->op0);
1032 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1033
1034 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1035 from .foo.bar to the preceeding MEM_REF offset and replace the
1036 address with &OBJ. */
1037 if (code == ADDR_EXPR)
1038 {
1039 tree addr, addr_base;
1040 HOST_WIDE_INT addr_offset;
1041
1042 addr = gimple_assign_rhs1 (def_stmt);
1043 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1044 &addr_offset);
1045 if (!addr_base
1046 || TREE_CODE (addr_base) != MEM_REF)
1047 return;
1048
1049 off = double_int_add (off, shwi_to_double_int (addr_offset));
1050 off = double_int_add (off, mem_ref_offset (addr_base));
1051 op->op0 = TREE_OPERAND (addr_base, 0);
1052 }
1053 else
1054 {
1055 tree ptr, ptroff;
1056 ptr = gimple_assign_rhs1 (def_stmt);
1057 ptroff = gimple_assign_rhs2 (def_stmt);
1058 if (TREE_CODE (ptr) != SSA_NAME
1059 || TREE_CODE (ptroff) != INTEGER_CST)
1060 return;
1061
1062 off = double_int_add (off, tree_to_double_int (ptroff));
1063 op->op0 = ptr;
1064 }
1065
1066 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1067 if (host_integerp (mem_op->op0, 0))
1068 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1069 else
1070 mem_op->off = -1;
1071 if (TREE_CODE (op->op0) == SSA_NAME)
1072 op->op0 = SSA_VAL (op->op0);
1073 if (TREE_CODE (op->op0) != SSA_NAME)
1074 op->opcode = TREE_CODE (op->op0);
1075
1076 /* And recurse. */
1077 if (TREE_CODE (op->op0) == SSA_NAME)
1078 vn_reference_maybe_forwprop_address (ops, i_p);
1079 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1080 vn_reference_fold_indirect (ops, i_p);
1081 }
1082
1083 /* Optimize the reference REF to a constant if possible or return
1084 NULL_TREE if not. */
1085
1086 tree
1087 fully_constant_vn_reference_p (vn_reference_t ref)
1088 {
1089 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1090 vn_reference_op_t op;
1091
1092 /* Try to simplify the translated expression if it is
1093 a call to a builtin function with at most two arguments. */
1094 op = VEC_index (vn_reference_op_s, operands, 0);
1095 if (op->opcode == CALL_EXPR
1096 && TREE_CODE (op->op0) == ADDR_EXPR
1097 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1098 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1099 && VEC_length (vn_reference_op_s, operands) >= 2
1100 && VEC_length (vn_reference_op_s, operands) <= 3)
1101 {
1102 vn_reference_op_t arg0, arg1 = NULL;
1103 bool anyconst = false;
1104 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1105 if (VEC_length (vn_reference_op_s, operands) > 2)
1106 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1107 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1108 || (arg0->opcode == ADDR_EXPR
1109 && is_gimple_min_invariant (arg0->op0)))
1110 anyconst = true;
1111 if (arg1
1112 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1113 || (arg1->opcode == ADDR_EXPR
1114 && is_gimple_min_invariant (arg1->op0))))
1115 anyconst = true;
1116 if (anyconst)
1117 {
1118 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1119 arg1 ? 2 : 1,
1120 arg0->op0,
1121 arg1 ? arg1->op0 : NULL);
1122 if (folded
1123 && TREE_CODE (folded) == NOP_EXPR)
1124 folded = TREE_OPERAND (folded, 0);
1125 if (folded
1126 && is_gimple_min_invariant (folded))
1127 return folded;
1128 }
1129 }
1130
1131 /* Simplify reads from constant strings. */
1132 else if (op->opcode == ARRAY_REF
1133 && TREE_CODE (op->op0) == INTEGER_CST
1134 && integer_zerop (op->op1)
1135 && VEC_length (vn_reference_op_s, operands) == 2)
1136 {
1137 vn_reference_op_t arg0;
1138 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1139 if (arg0->opcode == STRING_CST
1140 && (TYPE_MODE (op->type)
1141 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1142 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1143 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1144 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1145 return build_int_cst_type (op->type,
1146 (TREE_STRING_POINTER (arg0->op0)
1147 [TREE_INT_CST_LOW (op->op0)]));
1148 }
1149
1150 return NULL_TREE;
1151 }
1152
1153 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1154 structures into their value numbers. This is done in-place, and
1155 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1156 whether any operands were valueized. */
1157
1158 static VEC (vn_reference_op_s, heap) *
1159 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1160 {
1161 vn_reference_op_t vro;
1162 unsigned int i;
1163
1164 *valueized_anything = false;
1165
1166 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1167 {
1168 if (vro->opcode == SSA_NAME
1169 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1170 {
1171 tree tem = SSA_VAL (vro->op0);
1172 if (tem != vro->op0)
1173 {
1174 *valueized_anything = true;
1175 vro->op0 = tem;
1176 }
1177 /* If it transforms from an SSA_NAME to a constant, update
1178 the opcode. */
1179 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1180 vro->opcode = TREE_CODE (vro->op0);
1181 }
1182 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1183 {
1184 tree tem = SSA_VAL (vro->op1);
1185 if (tem != vro->op1)
1186 {
1187 *valueized_anything = true;
1188 vro->op1 = tem;
1189 }
1190 }
1191 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1192 {
1193 tree tem = SSA_VAL (vro->op2);
1194 if (tem != vro->op2)
1195 {
1196 *valueized_anything = true;
1197 vro->op2 = tem;
1198 }
1199 }
1200 /* If it transforms from an SSA_NAME to an address, fold with
1201 a preceding indirect reference. */
1202 if (i > 0
1203 && vro->op0
1204 && TREE_CODE (vro->op0) == ADDR_EXPR
1205 && VEC_index (vn_reference_op_s,
1206 orig, i - 1)->opcode == MEM_REF)
1207 vn_reference_fold_indirect (&orig, &i);
1208 else if (i > 0
1209 && vro->opcode == SSA_NAME
1210 && VEC_index (vn_reference_op_s,
1211 orig, i - 1)->opcode == MEM_REF)
1212 vn_reference_maybe_forwprop_address (&orig, &i);
1213 /* If it transforms a non-constant ARRAY_REF into a constant
1214 one, adjust the constant offset. */
1215 else if (vro->opcode == ARRAY_REF
1216 && vro->off == -1
1217 && TREE_CODE (vro->op0) == INTEGER_CST
1218 && TREE_CODE (vro->op1) == INTEGER_CST
1219 && TREE_CODE (vro->op2) == INTEGER_CST)
1220 {
1221 double_int off = tree_to_double_int (vro->op0);
1222 off = double_int_add (off,
1223 double_int_neg
1224 (tree_to_double_int (vro->op1)));
1225 off = double_int_mul (off, tree_to_double_int (vro->op2));
1226 if (double_int_fits_in_shwi_p (off))
1227 vro->off = off.low;
1228 }
1229 }
1230
1231 return orig;
1232 }
1233
1234 static VEC (vn_reference_op_s, heap) *
1235 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1236 {
1237 bool tem;
1238 return valueize_refs_1 (orig, &tem);
1239 }
1240
1241 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1242
1243 /* Create a vector of vn_reference_op_s structures from REF, a
1244 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1245 this function. *VALUEIZED_ANYTHING will specify whether any
1246 operands were valueized. */
1247
1248 static VEC(vn_reference_op_s, heap) *
1249 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1250 {
1251 if (!ref)
1252 return NULL;
1253 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1254 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1255 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1256 valueized_anything);
1257 return shared_lookup_references;
1258 }
1259
1260 /* Create a vector of vn_reference_op_s structures from CALL, a
1261 call statement. The vector is shared among all callers of
1262 this function. */
1263
1264 static VEC(vn_reference_op_s, heap) *
1265 valueize_shared_reference_ops_from_call (gimple call)
1266 {
1267 if (!call)
1268 return NULL;
1269 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1270 copy_reference_ops_from_call (call, &shared_lookup_references);
1271 shared_lookup_references = valueize_refs (shared_lookup_references);
1272 return shared_lookup_references;
1273 }
1274
1275 /* Lookup a SCCVN reference operation VR in the current hash table.
1276 Returns the resulting value number if it exists in the hash table,
1277 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1278 vn_reference_t stored in the hashtable if something is found. */
1279
1280 static tree
1281 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1282 {
1283 void **slot;
1284 hashval_t hash;
1285
1286 hash = vr->hashcode;
1287 slot = htab_find_slot_with_hash (current_info->references, vr,
1288 hash, NO_INSERT);
1289 if (!slot && current_info == optimistic_info)
1290 slot = htab_find_slot_with_hash (valid_info->references, vr,
1291 hash, NO_INSERT);
1292 if (slot)
1293 {
1294 if (vnresult)
1295 *vnresult = (vn_reference_t)*slot;
1296 return ((vn_reference_t)*slot)->result;
1297 }
1298
1299 return NULL_TREE;
1300 }
1301
1302 static tree *last_vuse_ptr;
1303 static vn_lookup_kind vn_walk_kind;
1304 static vn_lookup_kind default_vn_walk_kind;
1305
1306 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1307 with the current VUSE and performs the expression lookup. */
1308
1309 static void *
1310 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1311 {
1312 vn_reference_t vr = (vn_reference_t)vr_;
1313 void **slot;
1314 hashval_t hash;
1315
1316 if (last_vuse_ptr)
1317 *last_vuse_ptr = vuse;
1318
1319 /* Fixup vuse and hash. */
1320 if (vr->vuse)
1321 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1322 vr->vuse = SSA_VAL (vuse);
1323 if (vr->vuse)
1324 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1325
1326 hash = vr->hashcode;
1327 slot = htab_find_slot_with_hash (current_info->references, vr,
1328 hash, NO_INSERT);
1329 if (!slot && current_info == optimistic_info)
1330 slot = htab_find_slot_with_hash (valid_info->references, vr,
1331 hash, NO_INSERT);
1332 if (slot)
1333 return *slot;
1334
1335 return NULL;
1336 }
1337
1338 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1339 from the statement defining VUSE and if not successful tries to
1340 translate *REFP and VR_ through an aggregate copy at the defintion
1341 of VUSE. */
1342
1343 static void *
1344 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1345 {
1346 vn_reference_t vr = (vn_reference_t)vr_;
1347 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1348 tree base;
1349 HOST_WIDE_INT offset, maxsize;
1350 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1351 ao_ref lhs_ref;
1352 bool lhs_ref_ok = false;
1353
1354 /* First try to disambiguate after value-replacing in the definitions LHS. */
1355 if (is_gimple_assign (def_stmt))
1356 {
1357 VEC (vn_reference_op_s, heap) *tem;
1358 tree lhs = gimple_assign_lhs (def_stmt);
1359 bool valueized_anything = false;
1360 /* Avoid re-allocation overhead. */
1361 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1362 copy_reference_ops_from_ref (lhs, &lhs_ops);
1363 tem = lhs_ops;
1364 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1365 gcc_assert (lhs_ops == tem);
1366 if (valueized_anything)
1367 {
1368 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1369 get_alias_set (lhs),
1370 TREE_TYPE (lhs), lhs_ops);
1371 if (lhs_ref_ok
1372 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1373 return NULL;
1374 }
1375 else
1376 {
1377 ao_ref_init (&lhs_ref, lhs);
1378 lhs_ref_ok = true;
1379 }
1380 }
1381
1382 base = ao_ref_base (ref);
1383 offset = ref->offset;
1384 maxsize = ref->max_size;
1385
1386 /* If we cannot constrain the size of the reference we cannot
1387 test if anything kills it. */
1388 if (maxsize == -1)
1389 return (void *)-1;
1390
1391 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1392 from that defintion.
1393 1) Memset. */
1394 if (is_gimple_reg_type (vr->type)
1395 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1396 && integer_zerop (gimple_call_arg (def_stmt, 1))
1397 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1398 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1399 {
1400 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1401 tree base2;
1402 HOST_WIDE_INT offset2, size2, maxsize2;
1403 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1404 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1405 if ((unsigned HOST_WIDE_INT)size2 / 8
1406 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1407 && maxsize2 != -1
1408 && operand_equal_p (base, base2, 0)
1409 && offset2 <= offset
1410 && offset2 + size2 >= offset + maxsize)
1411 {
1412 tree val = build_zero_cst (vr->type);
1413 unsigned int value_id = get_or_alloc_constant_value_id (val);
1414 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1415 VEC_copy (vn_reference_op_s,
1416 heap, vr->operands),
1417 val, value_id);
1418 }
1419 }
1420
1421 /* 2) Assignment from an empty CONSTRUCTOR. */
1422 else if (is_gimple_reg_type (vr->type)
1423 && gimple_assign_single_p (def_stmt)
1424 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1425 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1426 {
1427 tree base2;
1428 HOST_WIDE_INT offset2, size2, maxsize2;
1429 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1430 &offset2, &size2, &maxsize2);
1431 if (maxsize2 != -1
1432 && operand_equal_p (base, base2, 0)
1433 && offset2 <= offset
1434 && offset2 + size2 >= offset + maxsize)
1435 {
1436 tree val = build_zero_cst (vr->type);
1437 unsigned int value_id = get_or_alloc_constant_value_id (val);
1438 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1439 VEC_copy (vn_reference_op_s,
1440 heap, vr->operands),
1441 val, value_id);
1442 }
1443 }
1444
1445 /* 3) For aggregate copies translate the reference through them if
1446 the copy kills ref. */
1447 else if (vn_walk_kind == VN_WALKREWRITE
1448 && gimple_assign_single_p (def_stmt)
1449 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1450 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1451 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1452 {
1453 tree base2;
1454 HOST_WIDE_INT offset2, size2, maxsize2;
1455 int i, j;
1456 VEC (vn_reference_op_s, heap) *rhs = NULL;
1457 vn_reference_op_t vro;
1458 ao_ref r;
1459
1460 if (!lhs_ref_ok)
1461 return (void *)-1;
1462
1463 /* See if the assignment kills REF. */
1464 base2 = ao_ref_base (&lhs_ref);
1465 offset2 = lhs_ref.offset;
1466 size2 = lhs_ref.size;
1467 maxsize2 = lhs_ref.max_size;
1468 if (maxsize2 == -1
1469 || (base != base2 && !operand_equal_p (base, base2, 0))
1470 || offset2 > offset
1471 || offset2 + size2 < offset + maxsize)
1472 return (void *)-1;
1473
1474 /* Find the common base of ref and the lhs. lhs_ops already
1475 contains valueized operands for the lhs. */
1476 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1477 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1478 while (j >= 0 && i >= 0
1479 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1480 vr->operands, i),
1481 VEC_index (vn_reference_op_s, lhs_ops, j)))
1482 {
1483 i--;
1484 j--;
1485 }
1486
1487 /* ??? The innermost op should always be a MEM_REF and we already
1488 checked that the assignment to the lhs kills vr. Thus for
1489 aggregate copies using char[] types the vn_reference_op_eq
1490 may fail when comparing types for compatibility. But we really
1491 don't care here - further lookups with the rewritten operands
1492 will simply fail if we messed up types too badly. */
1493 if (j == 0 && i >= 0
1494 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1495 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1496 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1497 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1498 i--, j--;
1499
1500 /* i now points to the first additional op.
1501 ??? LHS may not be completely contained in VR, one or more
1502 VIEW_CONVERT_EXPRs could be in its way. We could at least
1503 try handling outermost VIEW_CONVERT_EXPRs. */
1504 if (j != -1)
1505 return (void *)-1;
1506
1507 /* Now re-write REF to be based on the rhs of the assignment. */
1508 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1509 /* We need to pre-pend vr->operands[0..i] to rhs. */
1510 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1511 > VEC_length (vn_reference_op_s, vr->operands))
1512 {
1513 VEC (vn_reference_op_s, heap) *old = vr->operands;
1514 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1515 i + 1 + VEC_length (vn_reference_op_s, rhs));
1516 if (old == shared_lookup_references
1517 && vr->operands != old)
1518 shared_lookup_references = NULL;
1519 }
1520 else
1521 VEC_truncate (vn_reference_op_s, vr->operands,
1522 i + 1 + VEC_length (vn_reference_op_s, rhs));
1523 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1524 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1525 VEC_free (vn_reference_op_s, heap, rhs);
1526 vr->hashcode = vn_reference_compute_hash (vr);
1527
1528 /* Adjust *ref from the new operands. */
1529 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1530 return (void *)-1;
1531 /* This can happen with bitfields. */
1532 if (ref->size != r.size)
1533 return (void *)-1;
1534 *ref = r;
1535
1536 /* Do not update last seen VUSE after translating. */
1537 last_vuse_ptr = NULL;
1538
1539 /* Keep looking for the adjusted *REF / VR pair. */
1540 return NULL;
1541 }
1542
1543 /* 4) For memcpy copies translate the reference through them if
1544 the copy kills ref. */
1545 else if (vn_walk_kind == VN_WALKREWRITE
1546 && is_gimple_reg_type (vr->type)
1547 /* ??? Handle BCOPY as well. */
1548 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1549 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1550 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1551 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1552 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1553 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1554 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1555 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1556 {
1557 tree lhs, rhs;
1558 ao_ref r;
1559 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1560 vn_reference_op_s op;
1561 HOST_WIDE_INT at;
1562
1563
1564 /* Only handle non-variable, addressable refs. */
1565 if (ref->size != maxsize
1566 || offset % BITS_PER_UNIT != 0
1567 || ref->size % BITS_PER_UNIT != 0)
1568 return (void *)-1;
1569
1570 /* Extract a pointer base and an offset for the destination. */
1571 lhs = gimple_call_arg (def_stmt, 0);
1572 lhs_offset = 0;
1573 if (TREE_CODE (lhs) == SSA_NAME)
1574 lhs = SSA_VAL (lhs);
1575 if (TREE_CODE (lhs) == ADDR_EXPR)
1576 {
1577 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1578 &lhs_offset);
1579 if (!tem)
1580 return (void *)-1;
1581 if (TREE_CODE (tem) == MEM_REF
1582 && host_integerp (TREE_OPERAND (tem, 1), 1))
1583 {
1584 lhs = TREE_OPERAND (tem, 0);
1585 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1586 }
1587 else if (DECL_P (tem))
1588 lhs = build_fold_addr_expr (tem);
1589 else
1590 return (void *)-1;
1591 }
1592 if (TREE_CODE (lhs) != SSA_NAME
1593 && TREE_CODE (lhs) != ADDR_EXPR)
1594 return (void *)-1;
1595
1596 /* Extract a pointer base and an offset for the source. */
1597 rhs = gimple_call_arg (def_stmt, 1);
1598 rhs_offset = 0;
1599 if (TREE_CODE (rhs) == SSA_NAME)
1600 rhs = SSA_VAL (rhs);
1601 if (TREE_CODE (rhs) == ADDR_EXPR)
1602 {
1603 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1604 &rhs_offset);
1605 if (!tem)
1606 return (void *)-1;
1607 if (TREE_CODE (tem) == MEM_REF
1608 && host_integerp (TREE_OPERAND (tem, 1), 1))
1609 {
1610 rhs = TREE_OPERAND (tem, 0);
1611 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1612 }
1613 else if (DECL_P (tem))
1614 rhs = build_fold_addr_expr (tem);
1615 else
1616 return (void *)-1;
1617 }
1618 if (TREE_CODE (rhs) != SSA_NAME
1619 && TREE_CODE (rhs) != ADDR_EXPR)
1620 return (void *)-1;
1621
1622 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1623
1624 /* The bases of the destination and the references have to agree. */
1625 if ((TREE_CODE (base) != MEM_REF
1626 && !DECL_P (base))
1627 || (TREE_CODE (base) == MEM_REF
1628 && (TREE_OPERAND (base, 0) != lhs
1629 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1630 || (DECL_P (base)
1631 && (TREE_CODE (lhs) != ADDR_EXPR
1632 || TREE_OPERAND (lhs, 0) != base)))
1633 return (void *)-1;
1634
1635 /* And the access has to be contained within the memcpy destination. */
1636 at = offset / BITS_PER_UNIT;
1637 if (TREE_CODE (base) == MEM_REF)
1638 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1639 if (lhs_offset > at
1640 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1641 return (void *)-1;
1642
1643 /* Make room for 2 operands in the new reference. */
1644 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1645 {
1646 VEC (vn_reference_op_s, heap) *old = vr->operands;
1647 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1648 if (old == shared_lookup_references
1649 && vr->operands != old)
1650 shared_lookup_references = NULL;
1651 }
1652 else
1653 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1654
1655 /* The looked-through reference is a simple MEM_REF. */
1656 memset (&op, 0, sizeof (op));
1657 op.type = vr->type;
1658 op.opcode = MEM_REF;
1659 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1660 op.off = at - lhs_offset + rhs_offset;
1661 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1662 op.type = TREE_TYPE (rhs);
1663 op.opcode = TREE_CODE (rhs);
1664 op.op0 = rhs;
1665 op.off = -1;
1666 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1667 vr->hashcode = vn_reference_compute_hash (vr);
1668
1669 /* Adjust *ref from the new operands. */
1670 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1671 return (void *)-1;
1672 /* This can happen with bitfields. */
1673 if (ref->size != r.size)
1674 return (void *)-1;
1675 *ref = r;
1676
1677 /* Do not update last seen VUSE after translating. */
1678 last_vuse_ptr = NULL;
1679
1680 /* Keep looking for the adjusted *REF / VR pair. */
1681 return NULL;
1682 }
1683
1684 /* Bail out and stop walking. */
1685 return (void *)-1;
1686 }
1687
1688 /* Lookup a reference operation by it's parts, in the current hash table.
1689 Returns the resulting value number if it exists in the hash table,
1690 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1691 vn_reference_t stored in the hashtable if something is found. */
1692
1693 tree
1694 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1695 VEC (vn_reference_op_s, heap) *operands,
1696 vn_reference_t *vnresult, vn_lookup_kind kind)
1697 {
1698 struct vn_reference_s vr1;
1699 vn_reference_t tmp;
1700 tree cst;
1701
1702 if (!vnresult)
1703 vnresult = &tmp;
1704 *vnresult = NULL;
1705
1706 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1707 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1708 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1709 VEC_length (vn_reference_op_s, operands));
1710 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1711 VEC_address (vn_reference_op_s, operands),
1712 sizeof (vn_reference_op_s)
1713 * VEC_length (vn_reference_op_s, operands));
1714 vr1.operands = operands = shared_lookup_references
1715 = valueize_refs (shared_lookup_references);
1716 vr1.type = type;
1717 vr1.set = set;
1718 vr1.hashcode = vn_reference_compute_hash (&vr1);
1719 if ((cst = fully_constant_vn_reference_p (&vr1)))
1720 return cst;
1721
1722 vn_reference_lookup_1 (&vr1, vnresult);
1723 if (!*vnresult
1724 && kind != VN_NOWALK
1725 && vr1.vuse)
1726 {
1727 ao_ref r;
1728 vn_walk_kind = kind;
1729 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1730 *vnresult =
1731 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1732 vn_reference_lookup_2,
1733 vn_reference_lookup_3, &vr1);
1734 if (vr1.operands != operands)
1735 VEC_free (vn_reference_op_s, heap, vr1.operands);
1736 }
1737
1738 if (*vnresult)
1739 return (*vnresult)->result;
1740
1741 return NULL_TREE;
1742 }
1743
1744 /* Lookup OP in the current hash table, and return the resulting value
1745 number if it exists in the hash table. Return NULL_TREE if it does
1746 not exist in the hash table or if the result field of the structure
1747 was NULL.. VNRESULT will be filled in with the vn_reference_t
1748 stored in the hashtable if one exists. */
1749
1750 tree
1751 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1752 vn_reference_t *vnresult)
1753 {
1754 VEC (vn_reference_op_s, heap) *operands;
1755 struct vn_reference_s vr1;
1756 tree cst;
1757 bool valuezied_anything;
1758
1759 if (vnresult)
1760 *vnresult = NULL;
1761
1762 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1763 vr1.operands = operands
1764 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1765 vr1.type = TREE_TYPE (op);
1766 vr1.set = get_alias_set (op);
1767 vr1.hashcode = vn_reference_compute_hash (&vr1);
1768 if ((cst = fully_constant_vn_reference_p (&vr1)))
1769 return cst;
1770
1771 if (kind != VN_NOWALK
1772 && vr1.vuse)
1773 {
1774 vn_reference_t wvnresult;
1775 ao_ref r;
1776 /* Make sure to use a valueized reference if we valueized anything.
1777 Otherwise preserve the full reference for advanced TBAA. */
1778 if (!valuezied_anything
1779 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1780 vr1.operands))
1781 ao_ref_init (&r, op);
1782 vn_walk_kind = kind;
1783 wvnresult =
1784 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1785 vn_reference_lookup_2,
1786 vn_reference_lookup_3, &vr1);
1787 if (vr1.operands != operands)
1788 VEC_free (vn_reference_op_s, heap, vr1.operands);
1789 if (wvnresult)
1790 {
1791 if (vnresult)
1792 *vnresult = wvnresult;
1793 return wvnresult->result;
1794 }
1795
1796 return NULL_TREE;
1797 }
1798
1799 return vn_reference_lookup_1 (&vr1, vnresult);
1800 }
1801
1802
1803 /* Insert OP into the current hash table with a value number of
1804 RESULT, and return the resulting reference structure we created. */
1805
1806 vn_reference_t
1807 vn_reference_insert (tree op, tree result, tree vuse)
1808 {
1809 void **slot;
1810 vn_reference_t vr1;
1811
1812 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1813 if (TREE_CODE (result) == SSA_NAME)
1814 vr1->value_id = VN_INFO (result)->value_id;
1815 else
1816 vr1->value_id = get_or_alloc_constant_value_id (result);
1817 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1818 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1819 vr1->type = TREE_TYPE (op);
1820 vr1->set = get_alias_set (op);
1821 vr1->hashcode = vn_reference_compute_hash (vr1);
1822 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1823
1824 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1825 INSERT);
1826
1827 /* Because we lookup stores using vuses, and value number failures
1828 using the vdefs (see visit_reference_op_store for how and why),
1829 it's possible that on failure we may try to insert an already
1830 inserted store. This is not wrong, there is no ssa name for a
1831 store that we could use as a differentiator anyway. Thus, unlike
1832 the other lookup functions, you cannot gcc_assert (!*slot)
1833 here. */
1834
1835 /* But free the old slot in case of a collision. */
1836 if (*slot)
1837 free_reference (*slot);
1838
1839 *slot = vr1;
1840 return vr1;
1841 }
1842
1843 /* Insert a reference by it's pieces into the current hash table with
1844 a value number of RESULT. Return the resulting reference
1845 structure we created. */
1846
1847 vn_reference_t
1848 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1849 VEC (vn_reference_op_s, heap) *operands,
1850 tree result, unsigned int value_id)
1851
1852 {
1853 void **slot;
1854 vn_reference_t vr1;
1855
1856 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1857 vr1->value_id = value_id;
1858 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1859 vr1->operands = valueize_refs (operands);
1860 vr1->type = type;
1861 vr1->set = set;
1862 vr1->hashcode = vn_reference_compute_hash (vr1);
1863 if (result && TREE_CODE (result) == SSA_NAME)
1864 result = SSA_VAL (result);
1865 vr1->result = result;
1866
1867 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1868 INSERT);
1869
1870 /* At this point we should have all the things inserted that we have
1871 seen before, and we should never try inserting something that
1872 already exists. */
1873 gcc_assert (!*slot);
1874 if (*slot)
1875 free_reference (*slot);
1876
1877 *slot = vr1;
1878 return vr1;
1879 }
1880
1881 /* Compute and return the hash value for nary operation VBO1. */
1882
1883 hashval_t
1884 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1885 {
1886 hashval_t hash;
1887 unsigned i;
1888
1889 for (i = 0; i < vno1->length; ++i)
1890 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1891 vno1->op[i] = SSA_VAL (vno1->op[i]);
1892
1893 if (vno1->length == 2
1894 && commutative_tree_code (vno1->opcode)
1895 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1896 {
1897 tree temp = vno1->op[0];
1898 vno1->op[0] = vno1->op[1];
1899 vno1->op[1] = temp;
1900 }
1901
1902 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1903 for (i = 0; i < vno1->length; ++i)
1904 hash = iterative_hash_expr (vno1->op[i], hash);
1905
1906 return hash;
1907 }
1908
1909 /* Return the computed hashcode for nary operation P1. */
1910
1911 static hashval_t
1912 vn_nary_op_hash (const void *p1)
1913 {
1914 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1915 return vno1->hashcode;
1916 }
1917
1918 /* Compare nary operations P1 and P2 and return true if they are
1919 equivalent. */
1920
1921 int
1922 vn_nary_op_eq (const void *p1, const void *p2)
1923 {
1924 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1925 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1926 unsigned i;
1927
1928 if (vno1->hashcode != vno2->hashcode)
1929 return false;
1930
1931 if (vno1->length != vno2->length)
1932 return false;
1933
1934 if (vno1->opcode != vno2->opcode
1935 || !types_compatible_p (vno1->type, vno2->type))
1936 return false;
1937
1938 for (i = 0; i < vno1->length; ++i)
1939 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1940 return false;
1941
1942 return true;
1943 }
1944
1945 /* Initialize VNO from the pieces provided. */
1946
1947 static void
1948 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1949 enum tree_code code, tree type, tree *ops)
1950 {
1951 vno->opcode = code;
1952 vno->length = length;
1953 vno->type = type;
1954 memcpy (&vno->op[0], ops, sizeof (tree) * length);
1955 }
1956
1957 /* Initialize VNO from OP. */
1958
1959 static void
1960 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1961 {
1962 unsigned i;
1963
1964 vno->opcode = TREE_CODE (op);
1965 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1966 vno->type = TREE_TYPE (op);
1967 for (i = 0; i < vno->length; ++i)
1968 vno->op[i] = TREE_OPERAND (op, i);
1969 }
1970
1971 /* Return the number of operands for a vn_nary ops structure from STMT. */
1972
1973 static unsigned int
1974 vn_nary_length_from_stmt (gimple stmt)
1975 {
1976 switch (gimple_assign_rhs_code (stmt))
1977 {
1978 case REALPART_EXPR:
1979 case IMAGPART_EXPR:
1980 case VIEW_CONVERT_EXPR:
1981 return 1;
1982
1983 case CONSTRUCTOR:
1984 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
1985
1986 default:
1987 return gimple_num_ops (stmt) - 1;
1988 }
1989 }
1990
1991 /* Initialize VNO from STMT. */
1992
1993 static void
1994 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1995 {
1996 unsigned i;
1997
1998 vno->opcode = gimple_assign_rhs_code (stmt);
1999 vno->type = gimple_expr_type (stmt);
2000 switch (vno->opcode)
2001 {
2002 case REALPART_EXPR:
2003 case IMAGPART_EXPR:
2004 case VIEW_CONVERT_EXPR:
2005 vno->length = 1;
2006 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2007 break;
2008
2009 case CONSTRUCTOR:
2010 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2011 for (i = 0; i < vno->length; ++i)
2012 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2013 break;
2014
2015 default:
2016 vno->length = gimple_num_ops (stmt) - 1;
2017 for (i = 0; i < vno->length; ++i)
2018 vno->op[i] = gimple_op (stmt, i + 1);
2019 }
2020 }
2021
2022 /* Compute the hashcode for VNO and look for it in the hash table;
2023 return the resulting value number if it exists in the hash table.
2024 Return NULL_TREE if it does not exist in the hash table or if the
2025 result field of the operation is NULL. VNRESULT will contain the
2026 vn_nary_op_t from the hashtable if it exists. */
2027
2028 static tree
2029 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2030 {
2031 void **slot;
2032
2033 if (vnresult)
2034 *vnresult = NULL;
2035
2036 vno->hashcode = vn_nary_op_compute_hash (vno);
2037 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2038 NO_INSERT);
2039 if (!slot && current_info == optimistic_info)
2040 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2041 NO_INSERT);
2042 if (!slot)
2043 return NULL_TREE;
2044 if (vnresult)
2045 *vnresult = (vn_nary_op_t)*slot;
2046 return ((vn_nary_op_t)*slot)->result;
2047 }
2048
2049 /* Lookup a n-ary operation by its pieces and return the resulting value
2050 number if it exists in the hash table. Return NULL_TREE if it does
2051 not exist in the hash table or if the result field of the operation
2052 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2053 if it exists. */
2054
2055 tree
2056 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2057 tree type, tree *ops, vn_nary_op_t *vnresult)
2058 {
2059 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2060 sizeof_vn_nary_op (length));
2061 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2062 return vn_nary_op_lookup_1 (vno1, vnresult);
2063 }
2064
2065 /* Lookup OP in the current hash table, and return the resulting value
2066 number if it exists in the hash table. Return NULL_TREE if it does
2067 not exist in the hash table or if the result field of the operation
2068 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2069 if it exists. */
2070
2071 tree
2072 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2073 {
2074 vn_nary_op_t vno1
2075 = XALLOCAVAR (struct vn_nary_op_s,
2076 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2077 init_vn_nary_op_from_op (vno1, op);
2078 return vn_nary_op_lookup_1 (vno1, vnresult);
2079 }
2080
2081 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2082 value number if it exists in the hash table. Return NULL_TREE if
2083 it does not exist in the hash table. VNRESULT will contain the
2084 vn_nary_op_t from the hashtable if it exists. */
2085
2086 tree
2087 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2088 {
2089 vn_nary_op_t vno1
2090 = XALLOCAVAR (struct vn_nary_op_s,
2091 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2092 init_vn_nary_op_from_stmt (vno1, stmt);
2093 return vn_nary_op_lookup_1 (vno1, vnresult);
2094 }
2095
2096 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2097
2098 static vn_nary_op_t
2099 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2100 {
2101 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2102 }
2103
2104 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2105 obstack. */
2106
2107 static vn_nary_op_t
2108 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2109 {
2110 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2111 &current_info->nary_obstack);
2112
2113 vno1->value_id = value_id;
2114 vno1->length = length;
2115 vno1->result = result;
2116
2117 return vno1;
2118 }
2119
2120 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2121 VNO->HASHCODE first. */
2122
2123 static vn_nary_op_t
2124 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2125 {
2126 void **slot;
2127
2128 if (compute_hash)
2129 vno->hashcode = vn_nary_op_compute_hash (vno);
2130
2131 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2132 gcc_assert (!*slot);
2133
2134 *slot = vno;
2135 return vno;
2136 }
2137
2138 /* Insert a n-ary operation into the current hash table using it's
2139 pieces. Return the vn_nary_op_t structure we created and put in
2140 the hashtable. */
2141
2142 vn_nary_op_t
2143 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2144 tree type, tree *ops,
2145 tree result, unsigned int value_id)
2146 {
2147 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2148 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2149 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2150 }
2151
2152 /* Insert OP into the current hash table with a value number of
2153 RESULT. Return the vn_nary_op_t structure we created and put in
2154 the hashtable. */
2155
2156 vn_nary_op_t
2157 vn_nary_op_insert (tree op, tree result)
2158 {
2159 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2160 vn_nary_op_t vno1;
2161
2162 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2163 init_vn_nary_op_from_op (vno1, op);
2164 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2165 }
2166
2167 /* Insert the rhs of STMT into the current hash table with a value number of
2168 RESULT. */
2169
2170 vn_nary_op_t
2171 vn_nary_op_insert_stmt (gimple stmt, tree result)
2172 {
2173 vn_nary_op_t vno1
2174 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2175 result, VN_INFO (result)->value_id);
2176 init_vn_nary_op_from_stmt (vno1, stmt);
2177 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2178 }
2179
2180 /* Compute a hashcode for PHI operation VP1 and return it. */
2181
2182 static inline hashval_t
2183 vn_phi_compute_hash (vn_phi_t vp1)
2184 {
2185 hashval_t result;
2186 int i;
2187 tree phi1op;
2188 tree type;
2189
2190 result = vp1->block->index;
2191
2192 /* If all PHI arguments are constants we need to distinguish
2193 the PHI node via its type. */
2194 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2195 result += (INTEGRAL_TYPE_P (type)
2196 + (INTEGRAL_TYPE_P (type)
2197 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2198
2199 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2200 {
2201 if (phi1op == VN_TOP)
2202 continue;
2203 result = iterative_hash_expr (phi1op, result);
2204 }
2205
2206 return result;
2207 }
2208
2209 /* Return the computed hashcode for phi operation P1. */
2210
2211 static hashval_t
2212 vn_phi_hash (const void *p1)
2213 {
2214 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2215 return vp1->hashcode;
2216 }
2217
2218 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2219
2220 static int
2221 vn_phi_eq (const void *p1, const void *p2)
2222 {
2223 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2224 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2225
2226 if (vp1->hashcode != vp2->hashcode)
2227 return false;
2228
2229 if (vp1->block == vp2->block)
2230 {
2231 int i;
2232 tree phi1op;
2233
2234 /* If the PHI nodes do not have compatible types
2235 they are not the same. */
2236 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2237 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2238 return false;
2239
2240 /* Any phi in the same block will have it's arguments in the
2241 same edge order, because of how we store phi nodes. */
2242 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2243 {
2244 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2245 if (phi1op == VN_TOP || phi2op == VN_TOP)
2246 continue;
2247 if (!expressions_equal_p (phi1op, phi2op))
2248 return false;
2249 }
2250 return true;
2251 }
2252 return false;
2253 }
2254
2255 static VEC(tree, heap) *shared_lookup_phiargs;
2256
2257 /* Lookup PHI in the current hash table, and return the resulting
2258 value number if it exists in the hash table. Return NULL_TREE if
2259 it does not exist in the hash table. */
2260
2261 static tree
2262 vn_phi_lookup (gimple phi)
2263 {
2264 void **slot;
2265 struct vn_phi_s vp1;
2266 unsigned i;
2267
2268 VEC_truncate (tree, shared_lookup_phiargs, 0);
2269
2270 /* Canonicalize the SSA_NAME's to their value number. */
2271 for (i = 0; i < gimple_phi_num_args (phi); i++)
2272 {
2273 tree def = PHI_ARG_DEF (phi, i);
2274 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2275 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2276 }
2277 vp1.phiargs = shared_lookup_phiargs;
2278 vp1.block = gimple_bb (phi);
2279 vp1.hashcode = vn_phi_compute_hash (&vp1);
2280 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2281 NO_INSERT);
2282 if (!slot && current_info == optimistic_info)
2283 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2284 NO_INSERT);
2285 if (!slot)
2286 return NULL_TREE;
2287 return ((vn_phi_t)*slot)->result;
2288 }
2289
2290 /* Insert PHI into the current hash table with a value number of
2291 RESULT. */
2292
2293 static vn_phi_t
2294 vn_phi_insert (gimple phi, tree result)
2295 {
2296 void **slot;
2297 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2298 unsigned i;
2299 VEC (tree, heap) *args = NULL;
2300
2301 /* Canonicalize the SSA_NAME's to their value number. */
2302 for (i = 0; i < gimple_phi_num_args (phi); i++)
2303 {
2304 tree def = PHI_ARG_DEF (phi, i);
2305 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2306 VEC_safe_push (tree, heap, args, def);
2307 }
2308 vp1->value_id = VN_INFO (result)->value_id;
2309 vp1->phiargs = args;
2310 vp1->block = gimple_bb (phi);
2311 vp1->result = result;
2312 vp1->hashcode = vn_phi_compute_hash (vp1);
2313
2314 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2315 INSERT);
2316
2317 /* Because we iterate over phi operations more than once, it's
2318 possible the slot might already exist here, hence no assert.*/
2319 *slot = vp1;
2320 return vp1;
2321 }
2322
2323
2324 /* Print set of components in strongly connected component SCC to OUT. */
2325
2326 static void
2327 print_scc (FILE *out, VEC (tree, heap) *scc)
2328 {
2329 tree var;
2330 unsigned int i;
2331
2332 fprintf (out, "SCC consists of: ");
2333 FOR_EACH_VEC_ELT (tree, scc, i, var)
2334 {
2335 print_generic_expr (out, var, 0);
2336 fprintf (out, " ");
2337 }
2338 fprintf (out, "\n");
2339 }
2340
2341 /* Set the value number of FROM to TO, return true if it has changed
2342 as a result. */
2343
2344 static inline bool
2345 set_ssa_val_to (tree from, tree to)
2346 {
2347 tree currval = SSA_VAL (from);
2348
2349 if (from != to)
2350 {
2351 if (currval == from)
2352 {
2353 if (dump_file && (dump_flags & TDF_DETAILS))
2354 {
2355 fprintf (dump_file, "Not changing value number of ");
2356 print_generic_expr (dump_file, from, 0);
2357 fprintf (dump_file, " from VARYING to ");
2358 print_generic_expr (dump_file, to, 0);
2359 fprintf (dump_file, "\n");
2360 }
2361 return false;
2362 }
2363 else if (TREE_CODE (to) == SSA_NAME
2364 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2365 to = from;
2366 }
2367
2368 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2369 and invariants. So assert that here. */
2370 gcc_assert (to != NULL_TREE
2371 && (to == VN_TOP
2372 || TREE_CODE (to) == SSA_NAME
2373 || is_gimple_min_invariant (to)));
2374
2375 if (dump_file && (dump_flags & TDF_DETAILS))
2376 {
2377 fprintf (dump_file, "Setting value number of ");
2378 print_generic_expr (dump_file, from, 0);
2379 fprintf (dump_file, " to ");
2380 print_generic_expr (dump_file, to, 0);
2381 }
2382
2383 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2384 {
2385 VN_INFO (from)->valnum = to;
2386 if (dump_file && (dump_flags & TDF_DETAILS))
2387 fprintf (dump_file, " (changed)\n");
2388 return true;
2389 }
2390 if (dump_file && (dump_flags & TDF_DETAILS))
2391 fprintf (dump_file, "\n");
2392 return false;
2393 }
2394
2395 /* Set all definitions in STMT to value number to themselves.
2396 Return true if a value number changed. */
2397
2398 static bool
2399 defs_to_varying (gimple stmt)
2400 {
2401 bool changed = false;
2402 ssa_op_iter iter;
2403 def_operand_p defp;
2404
2405 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2406 {
2407 tree def = DEF_FROM_PTR (defp);
2408
2409 VN_INFO (def)->use_processed = true;
2410 changed |= set_ssa_val_to (def, def);
2411 }
2412 return changed;
2413 }
2414
2415 static bool expr_has_constants (tree expr);
2416 static tree valueize_expr (tree expr);
2417
2418 /* Visit a copy between LHS and RHS, return true if the value number
2419 changed. */
2420
2421 static bool
2422 visit_copy (tree lhs, tree rhs)
2423 {
2424 /* Follow chains of copies to their destination. */
2425 while (TREE_CODE (rhs) == SSA_NAME
2426 && SSA_VAL (rhs) != rhs)
2427 rhs = SSA_VAL (rhs);
2428
2429 /* The copy may have a more interesting constant filled expression
2430 (we don't, since we know our RHS is just an SSA name). */
2431 if (TREE_CODE (rhs) == SSA_NAME)
2432 {
2433 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2434 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2435 }
2436
2437 return set_ssa_val_to (lhs, rhs);
2438 }
2439
2440 /* Visit a nary operator RHS, value number it, and return true if the
2441 value number of LHS has changed as a result. */
2442
2443 static bool
2444 visit_nary_op (tree lhs, gimple stmt)
2445 {
2446 bool changed = false;
2447 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2448
2449 if (result)
2450 changed = set_ssa_val_to (lhs, result);
2451 else
2452 {
2453 changed = set_ssa_val_to (lhs, lhs);
2454 vn_nary_op_insert_stmt (stmt, lhs);
2455 }
2456
2457 return changed;
2458 }
2459
2460 /* Visit a call STMT storing into LHS. Return true if the value number
2461 of the LHS has changed as a result. */
2462
2463 static bool
2464 visit_reference_op_call (tree lhs, gimple stmt)
2465 {
2466 bool changed = false;
2467 struct vn_reference_s vr1;
2468 tree result;
2469 tree vuse = gimple_vuse (stmt);
2470
2471 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2472 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2473 vr1.type = gimple_expr_type (stmt);
2474 vr1.set = 0;
2475 vr1.hashcode = vn_reference_compute_hash (&vr1);
2476 result = vn_reference_lookup_1 (&vr1, NULL);
2477 if (result)
2478 {
2479 changed = set_ssa_val_to (lhs, result);
2480 if (TREE_CODE (result) == SSA_NAME
2481 && VN_INFO (result)->has_constants)
2482 VN_INFO (lhs)->has_constants = true;
2483 }
2484 else
2485 {
2486 void **slot;
2487 vn_reference_t vr2;
2488 changed = set_ssa_val_to (lhs, lhs);
2489 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2490 vr2->vuse = vr1.vuse;
2491 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2492 vr2->type = vr1.type;
2493 vr2->set = vr1.set;
2494 vr2->hashcode = vr1.hashcode;
2495 vr2->result = lhs;
2496 slot = htab_find_slot_with_hash (current_info->references,
2497 vr2, vr2->hashcode, INSERT);
2498 if (*slot)
2499 free_reference (*slot);
2500 *slot = vr2;
2501 }
2502
2503 return changed;
2504 }
2505
2506 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2507 and return true if the value number of the LHS has changed as a result. */
2508
2509 static bool
2510 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2511 {
2512 bool changed = false;
2513 tree last_vuse;
2514 tree result;
2515
2516 last_vuse = gimple_vuse (stmt);
2517 last_vuse_ptr = &last_vuse;
2518 result = vn_reference_lookup (op, gimple_vuse (stmt),
2519 default_vn_walk_kind, NULL);
2520 last_vuse_ptr = NULL;
2521
2522 /* If we have a VCE, try looking up its operand as it might be stored in
2523 a different type. */
2524 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2525 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2526 default_vn_walk_kind, NULL);
2527
2528 /* We handle type-punning through unions by value-numbering based
2529 on offset and size of the access. Be prepared to handle a
2530 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2531 if (result
2532 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2533 {
2534 /* We will be setting the value number of lhs to the value number
2535 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2536 So first simplify and lookup this expression to see if it
2537 is already available. */
2538 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2539 if ((CONVERT_EXPR_P (val)
2540 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2541 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2542 {
2543 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2544 if ((CONVERT_EXPR_P (tem)
2545 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2546 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2547 TREE_TYPE (val), tem)))
2548 val = tem;
2549 }
2550 result = val;
2551 if (!is_gimple_min_invariant (val)
2552 && TREE_CODE (val) != SSA_NAME)
2553 result = vn_nary_op_lookup (val, NULL);
2554 /* If the expression is not yet available, value-number lhs to
2555 a new SSA_NAME we create. */
2556 if (!result)
2557 {
2558 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2559 /* Initialize value-number information properly. */
2560 VN_INFO_GET (result)->valnum = result;
2561 VN_INFO (result)->value_id = get_next_value_id ();
2562 VN_INFO (result)->expr = val;
2563 VN_INFO (result)->has_constants = expr_has_constants (val);
2564 VN_INFO (result)->needs_insertion = true;
2565 /* As all "inserted" statements are singleton SCCs, insert
2566 to the valid table. This is strictly needed to
2567 avoid re-generating new value SSA_NAMEs for the same
2568 expression during SCC iteration over and over (the
2569 optimistic table gets cleared after each iteration).
2570 We do not need to insert into the optimistic table, as
2571 lookups there will fall back to the valid table. */
2572 if (current_info == optimistic_info)
2573 {
2574 current_info = valid_info;
2575 vn_nary_op_insert (val, result);
2576 current_info = optimistic_info;
2577 }
2578 else
2579 vn_nary_op_insert (val, result);
2580 if (dump_file && (dump_flags & TDF_DETAILS))
2581 {
2582 fprintf (dump_file, "Inserting name ");
2583 print_generic_expr (dump_file, result, 0);
2584 fprintf (dump_file, " for expression ");
2585 print_generic_expr (dump_file, val, 0);
2586 fprintf (dump_file, "\n");
2587 }
2588 }
2589 }
2590
2591 if (result)
2592 {
2593 changed = set_ssa_val_to (lhs, result);
2594 if (TREE_CODE (result) == SSA_NAME
2595 && VN_INFO (result)->has_constants)
2596 {
2597 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2598 VN_INFO (lhs)->has_constants = true;
2599 }
2600 }
2601 else
2602 {
2603 changed = set_ssa_val_to (lhs, lhs);
2604 vn_reference_insert (op, lhs, last_vuse);
2605 }
2606
2607 return changed;
2608 }
2609
2610
2611 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2612 and return true if the value number of the LHS has changed as a result. */
2613
2614 static bool
2615 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2616 {
2617 bool changed = false;
2618 tree result;
2619 bool resultsame = false;
2620
2621 /* First we want to lookup using the *vuses* from the store and see
2622 if there the last store to this location with the same address
2623 had the same value.
2624
2625 The vuses represent the memory state before the store. If the
2626 memory state, address, and value of the store is the same as the
2627 last store to this location, then this store will produce the
2628 same memory state as that store.
2629
2630 In this case the vdef versions for this store are value numbered to those
2631 vuse versions, since they represent the same memory state after
2632 this store.
2633
2634 Otherwise, the vdefs for the store are used when inserting into
2635 the table, since the store generates a new memory state. */
2636
2637 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2638
2639 if (result)
2640 {
2641 if (TREE_CODE (result) == SSA_NAME)
2642 result = SSA_VAL (result);
2643 if (TREE_CODE (op) == SSA_NAME)
2644 op = SSA_VAL (op);
2645 resultsame = expressions_equal_p (result, op);
2646 }
2647
2648 if (!result || !resultsame)
2649 {
2650 tree vdef;
2651
2652 if (dump_file && (dump_flags & TDF_DETAILS))
2653 {
2654 fprintf (dump_file, "No store match\n");
2655 fprintf (dump_file, "Value numbering store ");
2656 print_generic_expr (dump_file, lhs, 0);
2657 fprintf (dump_file, " to ");
2658 print_generic_expr (dump_file, op, 0);
2659 fprintf (dump_file, "\n");
2660 }
2661 /* Have to set value numbers before insert, since insert is
2662 going to valueize the references in-place. */
2663 if ((vdef = gimple_vdef (stmt)))
2664 {
2665 VN_INFO (vdef)->use_processed = true;
2666 changed |= set_ssa_val_to (vdef, vdef);
2667 }
2668
2669 /* Do not insert structure copies into the tables. */
2670 if (is_gimple_min_invariant (op)
2671 || is_gimple_reg (op))
2672 vn_reference_insert (lhs, op, vdef);
2673 }
2674 else
2675 {
2676 /* We had a match, so value number the vdef to have the value
2677 number of the vuse it came from. */
2678 tree def, use;
2679
2680 if (dump_file && (dump_flags & TDF_DETAILS))
2681 fprintf (dump_file, "Store matched earlier value,"
2682 "value numbering store vdefs to matching vuses.\n");
2683
2684 def = gimple_vdef (stmt);
2685 use = gimple_vuse (stmt);
2686
2687 VN_INFO (def)->use_processed = true;
2688 changed |= set_ssa_val_to (def, SSA_VAL (use));
2689 }
2690
2691 return changed;
2692 }
2693
2694 /* Visit and value number PHI, return true if the value number
2695 changed. */
2696
2697 static bool
2698 visit_phi (gimple phi)
2699 {
2700 bool changed = false;
2701 tree result;
2702 tree sameval = VN_TOP;
2703 bool allsame = true;
2704 unsigned i;
2705
2706 /* TODO: We could check for this in init_sccvn, and replace this
2707 with a gcc_assert. */
2708 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2709 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2710
2711 /* See if all non-TOP arguments have the same value. TOP is
2712 equivalent to everything, so we can ignore it. */
2713 for (i = 0; i < gimple_phi_num_args (phi); i++)
2714 {
2715 tree def = PHI_ARG_DEF (phi, i);
2716
2717 if (TREE_CODE (def) == SSA_NAME)
2718 def = SSA_VAL (def);
2719 if (def == VN_TOP)
2720 continue;
2721 if (sameval == VN_TOP)
2722 {
2723 sameval = def;
2724 }
2725 else
2726 {
2727 if (!expressions_equal_p (def, sameval))
2728 {
2729 allsame = false;
2730 break;
2731 }
2732 }
2733 }
2734
2735 /* If all value numbered to the same value, the phi node has that
2736 value. */
2737 if (allsame)
2738 {
2739 if (is_gimple_min_invariant (sameval))
2740 {
2741 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2742 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2743 }
2744 else
2745 {
2746 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2747 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2748 }
2749
2750 if (TREE_CODE (sameval) == SSA_NAME)
2751 return visit_copy (PHI_RESULT (phi), sameval);
2752
2753 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2754 }
2755
2756 /* Otherwise, see if it is equivalent to a phi node in this block. */
2757 result = vn_phi_lookup (phi);
2758 if (result)
2759 {
2760 if (TREE_CODE (result) == SSA_NAME)
2761 changed = visit_copy (PHI_RESULT (phi), result);
2762 else
2763 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2764 }
2765 else
2766 {
2767 vn_phi_insert (phi, PHI_RESULT (phi));
2768 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2769 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2770 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2771 }
2772
2773 return changed;
2774 }
2775
2776 /* Return true if EXPR contains constants. */
2777
2778 static bool
2779 expr_has_constants (tree expr)
2780 {
2781 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2782 {
2783 case tcc_unary:
2784 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2785
2786 case tcc_binary:
2787 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2788 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2789 /* Constants inside reference ops are rarely interesting, but
2790 it can take a lot of looking to find them. */
2791 case tcc_reference:
2792 case tcc_declaration:
2793 return false;
2794 default:
2795 return is_gimple_min_invariant (expr);
2796 }
2797 return false;
2798 }
2799
2800 /* Return true if STMT contains constants. */
2801
2802 static bool
2803 stmt_has_constants (gimple stmt)
2804 {
2805 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2806 return false;
2807
2808 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2809 {
2810 case GIMPLE_UNARY_RHS:
2811 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2812
2813 case GIMPLE_BINARY_RHS:
2814 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2815 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2816 case GIMPLE_TERNARY_RHS:
2817 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2818 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2819 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2820 case GIMPLE_SINGLE_RHS:
2821 /* Constants inside reference ops are rarely interesting, but
2822 it can take a lot of looking to find them. */
2823 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2824 default:
2825 gcc_unreachable ();
2826 }
2827 return false;
2828 }
2829
2830 /* Replace SSA_NAMES in expr with their value numbers, and return the
2831 result.
2832 This is performed in place. */
2833
2834 static tree
2835 valueize_expr (tree expr)
2836 {
2837 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2838 {
2839 case tcc_binary:
2840 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2841 /* Fallthru. */
2842 case tcc_unary:
2843 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2844 break;
2845 default:;
2846 }
2847 return expr;
2848 }
2849
2850 /* Simplify the binary expression RHS, and return the result if
2851 simplified. */
2852
2853 static tree
2854 simplify_binary_expression (gimple stmt)
2855 {
2856 tree result = NULL_TREE;
2857 tree op0 = gimple_assign_rhs1 (stmt);
2858 tree op1 = gimple_assign_rhs2 (stmt);
2859 enum tree_code code = gimple_assign_rhs_code (stmt);
2860
2861 /* This will not catch every single case we could combine, but will
2862 catch those with constants. The goal here is to simultaneously
2863 combine constants between expressions, but avoid infinite
2864 expansion of expressions during simplification. */
2865 if (TREE_CODE (op0) == SSA_NAME)
2866 {
2867 if (VN_INFO (op0)->has_constants
2868 || TREE_CODE_CLASS (code) == tcc_comparison
2869 || code == COMPLEX_EXPR)
2870 op0 = valueize_expr (vn_get_expr_for (op0));
2871 else
2872 op0 = vn_valueize (op0);
2873 }
2874
2875 if (TREE_CODE (op1) == SSA_NAME)
2876 {
2877 if (VN_INFO (op1)->has_constants
2878 || code == COMPLEX_EXPR)
2879 op1 = valueize_expr (vn_get_expr_for (op1));
2880 else
2881 op1 = vn_valueize (op1);
2882 }
2883
2884 /* Pointer plus constant can be represented as invariant address.
2885 Do so to allow further propatation, see also tree forwprop. */
2886 if (code == POINTER_PLUS_EXPR
2887 && host_integerp (op1, 1)
2888 && TREE_CODE (op0) == ADDR_EXPR
2889 && is_gimple_min_invariant (op0))
2890 return build_invariant_address (TREE_TYPE (op0),
2891 TREE_OPERAND (op0, 0),
2892 TREE_INT_CST_LOW (op1));
2893
2894 /* Avoid folding if nothing changed. */
2895 if (op0 == gimple_assign_rhs1 (stmt)
2896 && op1 == gimple_assign_rhs2 (stmt))
2897 return NULL_TREE;
2898
2899 fold_defer_overflow_warnings ();
2900
2901 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
2902 if (result)
2903 STRIP_USELESS_TYPE_CONVERSION (result);
2904
2905 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2906 stmt, 0);
2907
2908 /* Make sure result is not a complex expression consisting
2909 of operators of operators (IE (a + b) + (a + c))
2910 Otherwise, we will end up with unbounded expressions if
2911 fold does anything at all. */
2912 if (result && valid_gimple_rhs_p (result))
2913 return result;
2914
2915 return NULL_TREE;
2916 }
2917
2918 /* Simplify the unary expression RHS, and return the result if
2919 simplified. */
2920
2921 static tree
2922 simplify_unary_expression (gimple stmt)
2923 {
2924 tree result = NULL_TREE;
2925 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2926 enum tree_code code = gimple_assign_rhs_code (stmt);
2927
2928 /* We handle some tcc_reference codes here that are all
2929 GIMPLE_ASSIGN_SINGLE codes. */
2930 if (code == REALPART_EXPR
2931 || code == IMAGPART_EXPR
2932 || code == VIEW_CONVERT_EXPR
2933 || code == BIT_FIELD_REF)
2934 op0 = TREE_OPERAND (op0, 0);
2935
2936 if (TREE_CODE (op0) != SSA_NAME)
2937 return NULL_TREE;
2938
2939 orig_op0 = op0;
2940 if (VN_INFO (op0)->has_constants)
2941 op0 = valueize_expr (vn_get_expr_for (op0));
2942 else if (CONVERT_EXPR_CODE_P (code)
2943 || code == REALPART_EXPR
2944 || code == IMAGPART_EXPR
2945 || code == VIEW_CONVERT_EXPR
2946 || code == BIT_FIELD_REF)
2947 {
2948 /* We want to do tree-combining on conversion-like expressions.
2949 Make sure we feed only SSA_NAMEs or constants to fold though. */
2950 tree tem = valueize_expr (vn_get_expr_for (op0));
2951 if (UNARY_CLASS_P (tem)
2952 || BINARY_CLASS_P (tem)
2953 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2954 || TREE_CODE (tem) == SSA_NAME
2955 || TREE_CODE (tem) == CONSTRUCTOR
2956 || is_gimple_min_invariant (tem))
2957 op0 = tem;
2958 }
2959
2960 /* Avoid folding if nothing changed, but remember the expression. */
2961 if (op0 == orig_op0)
2962 return NULL_TREE;
2963
2964 if (code == BIT_FIELD_REF)
2965 {
2966 tree rhs = gimple_assign_rhs1 (stmt);
2967 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
2968 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
2969 }
2970 else
2971 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
2972 if (result)
2973 {
2974 STRIP_USELESS_TYPE_CONVERSION (result);
2975 if (valid_gimple_rhs_p (result))
2976 return result;
2977 }
2978
2979 return NULL_TREE;
2980 }
2981
2982 /* Try to simplify RHS using equivalences and constant folding. */
2983
2984 static tree
2985 try_to_simplify (gimple stmt)
2986 {
2987 enum tree_code code = gimple_assign_rhs_code (stmt);
2988 tree tem;
2989
2990 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2991 in this case, there is no point in doing extra work. */
2992 if (code == SSA_NAME)
2993 return NULL_TREE;
2994
2995 /* First try constant folding based on our current lattice. */
2996 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
2997 if (tem
2998 && (TREE_CODE (tem) == SSA_NAME
2999 || is_gimple_min_invariant (tem)))
3000 return tem;
3001
3002 /* If that didn't work try combining multiple statements. */
3003 switch (TREE_CODE_CLASS (code))
3004 {
3005 case tcc_reference:
3006 /* Fallthrough for some unary codes that can operate on registers. */
3007 if (!(code == REALPART_EXPR
3008 || code == IMAGPART_EXPR
3009 || code == VIEW_CONVERT_EXPR
3010 || code == BIT_FIELD_REF))
3011 break;
3012 /* We could do a little more with unary ops, if they expand
3013 into binary ops, but it's debatable whether it is worth it. */
3014 case tcc_unary:
3015 return simplify_unary_expression (stmt);
3016
3017 case tcc_comparison:
3018 case tcc_binary:
3019 return simplify_binary_expression (stmt);
3020
3021 default:
3022 break;
3023 }
3024
3025 return NULL_TREE;
3026 }
3027
3028 /* Visit and value number USE, return true if the value number
3029 changed. */
3030
3031 static bool
3032 visit_use (tree use)
3033 {
3034 bool changed = false;
3035 gimple stmt = SSA_NAME_DEF_STMT (use);
3036
3037 VN_INFO (use)->use_processed = true;
3038
3039 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3040 if (dump_file && (dump_flags & TDF_DETAILS)
3041 && !SSA_NAME_IS_DEFAULT_DEF (use))
3042 {
3043 fprintf (dump_file, "Value numbering ");
3044 print_generic_expr (dump_file, use, 0);
3045 fprintf (dump_file, " stmt = ");
3046 print_gimple_stmt (dump_file, stmt, 0, 0);
3047 }
3048
3049 /* Handle uninitialized uses. */
3050 if (SSA_NAME_IS_DEFAULT_DEF (use))
3051 changed = set_ssa_val_to (use, use);
3052 else
3053 {
3054 if (gimple_code (stmt) == GIMPLE_PHI)
3055 changed = visit_phi (stmt);
3056 else if (!gimple_has_lhs (stmt)
3057 || gimple_has_volatile_ops (stmt)
3058 || stmt_could_throw_p (stmt))
3059 changed = defs_to_varying (stmt);
3060 else if (is_gimple_assign (stmt))
3061 {
3062 enum tree_code code = gimple_assign_rhs_code (stmt);
3063 tree lhs = gimple_assign_lhs (stmt);
3064 tree rhs1 = gimple_assign_rhs1 (stmt);
3065 tree simplified;
3066
3067 /* Shortcut for copies. Simplifying copies is pointless,
3068 since we copy the expression and value they represent. */
3069 if (code == SSA_NAME
3070 && TREE_CODE (lhs) == SSA_NAME)
3071 {
3072 changed = visit_copy (lhs, rhs1);
3073 goto done;
3074 }
3075 simplified = try_to_simplify (stmt);
3076 if (simplified)
3077 {
3078 if (dump_file && (dump_flags & TDF_DETAILS))
3079 {
3080 fprintf (dump_file, "RHS ");
3081 print_gimple_expr (dump_file, stmt, 0, 0);
3082 fprintf (dump_file, " simplified to ");
3083 print_generic_expr (dump_file, simplified, 0);
3084 if (TREE_CODE (lhs) == SSA_NAME)
3085 fprintf (dump_file, " has constants %d\n",
3086 expr_has_constants (simplified));
3087 else
3088 fprintf (dump_file, "\n");
3089 }
3090 }
3091 /* Setting value numbers to constants will occasionally
3092 screw up phi congruence because constants are not
3093 uniquely associated with a single ssa name that can be
3094 looked up. */
3095 if (simplified
3096 && is_gimple_min_invariant (simplified)
3097 && TREE_CODE (lhs) == SSA_NAME)
3098 {
3099 VN_INFO (lhs)->expr = simplified;
3100 VN_INFO (lhs)->has_constants = true;
3101 changed = set_ssa_val_to (lhs, simplified);
3102 goto done;
3103 }
3104 else if (simplified
3105 && TREE_CODE (simplified) == SSA_NAME
3106 && TREE_CODE (lhs) == SSA_NAME)
3107 {
3108 changed = visit_copy (lhs, simplified);
3109 goto done;
3110 }
3111 else if (simplified)
3112 {
3113 if (TREE_CODE (lhs) == SSA_NAME)
3114 {
3115 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3116 /* We have to unshare the expression or else
3117 valuizing may change the IL stream. */
3118 VN_INFO (lhs)->expr = unshare_expr (simplified);
3119 }
3120 }
3121 else if (stmt_has_constants (stmt)
3122 && TREE_CODE (lhs) == SSA_NAME)
3123 VN_INFO (lhs)->has_constants = true;
3124 else if (TREE_CODE (lhs) == SSA_NAME)
3125 {
3126 /* We reset expr and constantness here because we may
3127 have been value numbering optimistically, and
3128 iterating. They may become non-constant in this case,
3129 even if they were optimistically constant. */
3130
3131 VN_INFO (lhs)->has_constants = false;
3132 VN_INFO (lhs)->expr = NULL_TREE;
3133 }
3134
3135 if ((TREE_CODE (lhs) == SSA_NAME
3136 /* We can substitute SSA_NAMEs that are live over
3137 abnormal edges with their constant value. */
3138 && !(gimple_assign_copy_p (stmt)
3139 && is_gimple_min_invariant (rhs1))
3140 && !(simplified
3141 && is_gimple_min_invariant (simplified))
3142 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3143 /* Stores or copies from SSA_NAMEs that are live over
3144 abnormal edges are a problem. */
3145 || (code == SSA_NAME
3146 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3147 changed = defs_to_varying (stmt);
3148 else if (REFERENCE_CLASS_P (lhs)
3149 || DECL_P (lhs))
3150 changed = visit_reference_op_store (lhs, rhs1, stmt);
3151 else if (TREE_CODE (lhs) == SSA_NAME)
3152 {
3153 if ((gimple_assign_copy_p (stmt)
3154 && is_gimple_min_invariant (rhs1))
3155 || (simplified
3156 && is_gimple_min_invariant (simplified)))
3157 {
3158 VN_INFO (lhs)->has_constants = true;
3159 if (simplified)
3160 changed = set_ssa_val_to (lhs, simplified);
3161 else
3162 changed = set_ssa_val_to (lhs, rhs1);
3163 }
3164 else
3165 {
3166 switch (get_gimple_rhs_class (code))
3167 {
3168 case GIMPLE_UNARY_RHS:
3169 case GIMPLE_BINARY_RHS:
3170 case GIMPLE_TERNARY_RHS:
3171 changed = visit_nary_op (lhs, stmt);
3172 break;
3173 case GIMPLE_SINGLE_RHS:
3174 switch (TREE_CODE_CLASS (code))
3175 {
3176 case tcc_reference:
3177 /* VOP-less references can go through unary case. */
3178 if ((code == REALPART_EXPR
3179 || code == IMAGPART_EXPR
3180 || code == VIEW_CONVERT_EXPR
3181 || code == BIT_FIELD_REF)
3182 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3183 {
3184 changed = visit_nary_op (lhs, stmt);
3185 break;
3186 }
3187 /* Fallthrough. */
3188 case tcc_declaration:
3189 changed = visit_reference_op_load (lhs, rhs1, stmt);
3190 break;
3191 default:
3192 if (code == ADDR_EXPR)
3193 {
3194 changed = visit_nary_op (lhs, stmt);
3195 break;
3196 }
3197 else if (code == CONSTRUCTOR)
3198 {
3199 changed = visit_nary_op (lhs, stmt);
3200 break;
3201 }
3202 changed = defs_to_varying (stmt);
3203 }
3204 break;
3205 default:
3206 changed = defs_to_varying (stmt);
3207 break;
3208 }
3209 }
3210 }
3211 else
3212 changed = defs_to_varying (stmt);
3213 }
3214 else if (is_gimple_call (stmt))
3215 {
3216 tree lhs = gimple_call_lhs (stmt);
3217
3218 /* ??? We could try to simplify calls. */
3219
3220 if (stmt_has_constants (stmt)
3221 && TREE_CODE (lhs) == SSA_NAME)
3222 VN_INFO (lhs)->has_constants = true;
3223 else if (TREE_CODE (lhs) == SSA_NAME)
3224 {
3225 /* We reset expr and constantness here because we may
3226 have been value numbering optimistically, and
3227 iterating. They may become non-constant in this case,
3228 even if they were optimistically constant. */
3229 VN_INFO (lhs)->has_constants = false;
3230 VN_INFO (lhs)->expr = NULL_TREE;
3231 }
3232
3233 if (TREE_CODE (lhs) == SSA_NAME
3234 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3235 changed = defs_to_varying (stmt);
3236 /* ??? We should handle stores from calls. */
3237 else if (TREE_CODE (lhs) == SSA_NAME)
3238 {
3239 if (!gimple_call_internal_p (stmt)
3240 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3241 changed = visit_reference_op_call (lhs, stmt);
3242 else
3243 changed = defs_to_varying (stmt);
3244 }
3245 else
3246 changed = defs_to_varying (stmt);
3247 }
3248 }
3249 done:
3250 return changed;
3251 }
3252
3253 /* Compare two operands by reverse postorder index */
3254
3255 static int
3256 compare_ops (const void *pa, const void *pb)
3257 {
3258 const tree opa = *((const tree *)pa);
3259 const tree opb = *((const tree *)pb);
3260 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3261 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3262 basic_block bba;
3263 basic_block bbb;
3264
3265 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3266 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3267 else if (gimple_nop_p (opstmta))
3268 return -1;
3269 else if (gimple_nop_p (opstmtb))
3270 return 1;
3271
3272 bba = gimple_bb (opstmta);
3273 bbb = gimple_bb (opstmtb);
3274
3275 if (!bba && !bbb)
3276 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3277 else if (!bba)
3278 return -1;
3279 else if (!bbb)
3280 return 1;
3281
3282 if (bba == bbb)
3283 {
3284 if (gimple_code (opstmta) == GIMPLE_PHI
3285 && gimple_code (opstmtb) == GIMPLE_PHI)
3286 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3287 else if (gimple_code (opstmta) == GIMPLE_PHI)
3288 return -1;
3289 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3290 return 1;
3291 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3292 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3293 else
3294 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3295 }
3296 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3297 }
3298
3299 /* Sort an array containing members of a strongly connected component
3300 SCC so that the members are ordered by RPO number.
3301 This means that when the sort is complete, iterating through the
3302 array will give you the members in RPO order. */
3303
3304 static void
3305 sort_scc (VEC (tree, heap) *scc)
3306 {
3307 VEC_qsort (tree, scc, compare_ops);
3308 }
3309
3310 /* Insert the no longer used nary ONARY to the hash INFO. */
3311
3312 static void
3313 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3314 {
3315 size_t size = sizeof_vn_nary_op (onary->length);
3316 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3317 &info->nary_obstack);
3318 memcpy (nary, onary, size);
3319 vn_nary_op_insert_into (nary, info->nary, false);
3320 }
3321
3322 /* Insert the no longer used phi OPHI to the hash INFO. */
3323
3324 static void
3325 copy_phi (vn_phi_t ophi, vn_tables_t info)
3326 {
3327 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3328 void **slot;
3329 memcpy (phi, ophi, sizeof (*phi));
3330 ophi->phiargs = NULL;
3331 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3332 gcc_assert (!*slot);
3333 *slot = phi;
3334 }
3335
3336 /* Insert the no longer used reference OREF to the hash INFO. */
3337
3338 static void
3339 copy_reference (vn_reference_t oref, vn_tables_t info)
3340 {
3341 vn_reference_t ref;
3342 void **slot;
3343 ref = (vn_reference_t) pool_alloc (info->references_pool);
3344 memcpy (ref, oref, sizeof (*ref));
3345 oref->operands = NULL;
3346 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3347 INSERT);
3348 if (*slot)
3349 free_reference (*slot);
3350 *slot = ref;
3351 }
3352
3353 /* Process a strongly connected component in the SSA graph. */
3354
3355 static void
3356 process_scc (VEC (tree, heap) *scc)
3357 {
3358 tree var;
3359 unsigned int i;
3360 unsigned int iterations = 0;
3361 bool changed = true;
3362 htab_iterator hi;
3363 vn_nary_op_t nary;
3364 vn_phi_t phi;
3365 vn_reference_t ref;
3366
3367 /* If the SCC has a single member, just visit it. */
3368 if (VEC_length (tree, scc) == 1)
3369 {
3370 tree use = VEC_index (tree, scc, 0);
3371 if (VN_INFO (use)->use_processed)
3372 return;
3373 /* We need to make sure it doesn't form a cycle itself, which can
3374 happen for self-referential PHI nodes. In that case we would
3375 end up inserting an expression with VN_TOP operands into the
3376 valid table which makes us derive bogus equivalences later.
3377 The cheapest way to check this is to assume it for all PHI nodes. */
3378 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3379 /* Fallthru to iteration. */ ;
3380 else
3381 {
3382 visit_use (use);
3383 return;
3384 }
3385 }
3386
3387 /* Iterate over the SCC with the optimistic table until it stops
3388 changing. */
3389 current_info = optimistic_info;
3390 while (changed)
3391 {
3392 changed = false;
3393 iterations++;
3394 if (dump_file && (dump_flags & TDF_DETAILS))
3395 fprintf (dump_file, "Starting iteration %d\n", iterations);
3396 /* As we are value-numbering optimistically we have to
3397 clear the expression tables and the simplified expressions
3398 in each iteration until we converge. */
3399 htab_empty (optimistic_info->nary);
3400 htab_empty (optimistic_info->phis);
3401 htab_empty (optimistic_info->references);
3402 obstack_free (&optimistic_info->nary_obstack, NULL);
3403 gcc_obstack_init (&optimistic_info->nary_obstack);
3404 empty_alloc_pool (optimistic_info->phis_pool);
3405 empty_alloc_pool (optimistic_info->references_pool);
3406 FOR_EACH_VEC_ELT (tree, scc, i, var)
3407 VN_INFO (var)->expr = NULL_TREE;
3408 FOR_EACH_VEC_ELT (tree, scc, i, var)
3409 changed |= visit_use (var);
3410 }
3411
3412 statistics_histogram_event (cfun, "SCC iterations", iterations);
3413
3414 /* Finally, copy the contents of the no longer used optimistic
3415 table to the valid table. */
3416 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3417 copy_nary (nary, valid_info);
3418 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3419 copy_phi (phi, valid_info);
3420 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3421 copy_reference (ref, valid_info);
3422
3423 current_info = valid_info;
3424 }
3425
3426 DEF_VEC_O(ssa_op_iter);
3427 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3428
3429 /* Pop the components of the found SCC for NAME off the SCC stack
3430 and process them. Returns true if all went well, false if
3431 we run into resource limits. */
3432
3433 static bool
3434 extract_and_process_scc_for_name (tree name)
3435 {
3436 VEC (tree, heap) *scc = NULL;
3437 tree x;
3438
3439 /* Found an SCC, pop the components off the SCC stack and
3440 process them. */
3441 do
3442 {
3443 x = VEC_pop (tree, sccstack);
3444
3445 VN_INFO (x)->on_sccstack = false;
3446 VEC_safe_push (tree, heap, scc, x);
3447 } while (x != name);
3448
3449 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3450 if (VEC_length (tree, scc)
3451 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3452 {
3453 if (dump_file)
3454 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3455 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3456 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3457 return false;
3458 }
3459
3460 if (VEC_length (tree, scc) > 1)
3461 sort_scc (scc);
3462
3463 if (dump_file && (dump_flags & TDF_DETAILS))
3464 print_scc (dump_file, scc);
3465
3466 process_scc (scc);
3467
3468 VEC_free (tree, heap, scc);
3469
3470 return true;
3471 }
3472
3473 /* Depth first search on NAME to discover and process SCC's in the SSA
3474 graph.
3475 Execution of this algorithm relies on the fact that the SCC's are
3476 popped off the stack in topological order.
3477 Returns true if successful, false if we stopped processing SCC's due
3478 to resource constraints. */
3479
3480 static bool
3481 DFS (tree name)
3482 {
3483 VEC(ssa_op_iter, heap) *itervec = NULL;
3484 VEC(tree, heap) *namevec = NULL;
3485 use_operand_p usep = NULL;
3486 gimple defstmt;
3487 tree use;
3488 ssa_op_iter iter;
3489
3490 start_over:
3491 /* SCC info */
3492 VN_INFO (name)->dfsnum = next_dfs_num++;
3493 VN_INFO (name)->visited = true;
3494 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3495
3496 VEC_safe_push (tree, heap, sccstack, name);
3497 VN_INFO (name)->on_sccstack = true;
3498 defstmt = SSA_NAME_DEF_STMT (name);
3499
3500 /* Recursively DFS on our operands, looking for SCC's. */
3501 if (!gimple_nop_p (defstmt))
3502 {
3503 /* Push a new iterator. */
3504 if (gimple_code (defstmt) == GIMPLE_PHI)
3505 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3506 else
3507 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3508 }
3509 else
3510 clear_and_done_ssa_iter (&iter);
3511
3512 while (1)
3513 {
3514 /* If we are done processing uses of a name, go up the stack
3515 of iterators and process SCCs as we found them. */
3516 if (op_iter_done (&iter))
3517 {
3518 /* See if we found an SCC. */
3519 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3520 if (!extract_and_process_scc_for_name (name))
3521 {
3522 VEC_free (tree, heap, namevec);
3523 VEC_free (ssa_op_iter, heap, itervec);
3524 return false;
3525 }
3526
3527 /* Check if we are done. */
3528 if (VEC_empty (tree, namevec))
3529 {
3530 VEC_free (tree, heap, namevec);
3531 VEC_free (ssa_op_iter, heap, itervec);
3532 return true;
3533 }
3534
3535 /* Restore the last use walker and continue walking there. */
3536 use = name;
3537 name = VEC_pop (tree, namevec);
3538 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3539 sizeof (ssa_op_iter));
3540 VEC_pop (ssa_op_iter, itervec);
3541 goto continue_walking;
3542 }
3543
3544 use = USE_FROM_PTR (usep);
3545
3546 /* Since we handle phi nodes, we will sometimes get
3547 invariants in the use expression. */
3548 if (TREE_CODE (use) == SSA_NAME)
3549 {
3550 if (! (VN_INFO (use)->visited))
3551 {
3552 /* Recurse by pushing the current use walking state on
3553 the stack and starting over. */
3554 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3555 VEC_safe_push(tree, heap, namevec, name);
3556 name = use;
3557 goto start_over;
3558
3559 continue_walking:
3560 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3561 VN_INFO (use)->low);
3562 }
3563 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3564 && VN_INFO (use)->on_sccstack)
3565 {
3566 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3567 VN_INFO (name)->low);
3568 }
3569 }
3570
3571 usep = op_iter_next_use (&iter);
3572 }
3573 }
3574
3575 /* Allocate a value number table. */
3576
3577 static void
3578 allocate_vn_table (vn_tables_t table)
3579 {
3580 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3581 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3582 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3583 free_reference);
3584
3585 gcc_obstack_init (&table->nary_obstack);
3586 table->phis_pool = create_alloc_pool ("VN phis",
3587 sizeof (struct vn_phi_s),
3588 30);
3589 table->references_pool = create_alloc_pool ("VN references",
3590 sizeof (struct vn_reference_s),
3591 30);
3592 }
3593
3594 /* Free a value number table. */
3595
3596 static void
3597 free_vn_table (vn_tables_t table)
3598 {
3599 htab_delete (table->phis);
3600 htab_delete (table->nary);
3601 htab_delete (table->references);
3602 obstack_free (&table->nary_obstack, NULL);
3603 free_alloc_pool (table->phis_pool);
3604 free_alloc_pool (table->references_pool);
3605 }
3606
3607 static void
3608 init_scc_vn (void)
3609 {
3610 size_t i;
3611 int j;
3612 int *rpo_numbers_temp;
3613
3614 calculate_dominance_info (CDI_DOMINATORS);
3615 sccstack = NULL;
3616 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3617 free);
3618
3619 constant_value_ids = BITMAP_ALLOC (NULL);
3620
3621 next_dfs_num = 1;
3622 next_value_id = 1;
3623
3624 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3625 /* VEC_alloc doesn't actually grow it to the right size, it just
3626 preallocates the space to do so. */
3627 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3628 gcc_obstack_init (&vn_ssa_aux_obstack);
3629
3630 shared_lookup_phiargs = NULL;
3631 shared_lookup_references = NULL;
3632 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3633 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3634 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3635
3636 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3637 the i'th block in RPO order is bb. We want to map bb's to RPO
3638 numbers, so we need to rearrange this array. */
3639 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3640 rpo_numbers[rpo_numbers_temp[j]] = j;
3641
3642 XDELETE (rpo_numbers_temp);
3643
3644 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3645
3646 /* Create the VN_INFO structures, and initialize value numbers to
3647 TOP. */
3648 for (i = 0; i < num_ssa_names; i++)
3649 {
3650 tree name = ssa_name (i);
3651 if (name)
3652 {
3653 VN_INFO_GET (name)->valnum = VN_TOP;
3654 VN_INFO (name)->expr = NULL_TREE;
3655 VN_INFO (name)->value_id = 0;
3656 }
3657 }
3658
3659 renumber_gimple_stmt_uids ();
3660
3661 /* Create the valid and optimistic value numbering tables. */
3662 valid_info = XCNEW (struct vn_tables_s);
3663 allocate_vn_table (valid_info);
3664 optimistic_info = XCNEW (struct vn_tables_s);
3665 allocate_vn_table (optimistic_info);
3666 }
3667
3668 void
3669 free_scc_vn (void)
3670 {
3671 size_t i;
3672
3673 htab_delete (constant_to_value_id);
3674 BITMAP_FREE (constant_value_ids);
3675 VEC_free (tree, heap, shared_lookup_phiargs);
3676 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3677 XDELETEVEC (rpo_numbers);
3678
3679 for (i = 0; i < num_ssa_names; i++)
3680 {
3681 tree name = ssa_name (i);
3682 if (name
3683 && VN_INFO (name)->needs_insertion)
3684 release_ssa_name (name);
3685 }
3686 obstack_free (&vn_ssa_aux_obstack, NULL);
3687 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3688
3689 VEC_free (tree, heap, sccstack);
3690 free_vn_table (valid_info);
3691 XDELETE (valid_info);
3692 free_vn_table (optimistic_info);
3693 XDELETE (optimistic_info);
3694 }
3695
3696 /* Set *ID if we computed something useful in RESULT. */
3697
3698 static void
3699 set_value_id_for_result (tree result, unsigned int *id)
3700 {
3701 if (result)
3702 {
3703 if (TREE_CODE (result) == SSA_NAME)
3704 *id = VN_INFO (result)->value_id;
3705 else if (is_gimple_min_invariant (result))
3706 *id = get_or_alloc_constant_value_id (result);
3707 }
3708 }
3709
3710 /* Set the value ids in the valid hash tables. */
3711
3712 static void
3713 set_hashtable_value_ids (void)
3714 {
3715 htab_iterator hi;
3716 vn_nary_op_t vno;
3717 vn_reference_t vr;
3718 vn_phi_t vp;
3719
3720 /* Now set the value ids of the things we had put in the hash
3721 table. */
3722
3723 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3724 vno, vn_nary_op_t, hi)
3725 set_value_id_for_result (vno->result, &vno->value_id);
3726
3727 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3728 vp, vn_phi_t, hi)
3729 set_value_id_for_result (vp->result, &vp->value_id);
3730
3731 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3732 vr, vn_reference_t, hi)
3733 set_value_id_for_result (vr->result, &vr->value_id);
3734 }
3735
3736 /* Do SCCVN. Returns true if it finished, false if we bailed out
3737 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3738 how we use the alias oracle walking during the VN process. */
3739
3740 bool
3741 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3742 {
3743 size_t i;
3744 tree param;
3745 bool changed = true;
3746
3747 default_vn_walk_kind = default_vn_walk_kind_;
3748
3749 init_scc_vn ();
3750 current_info = valid_info;
3751
3752 for (param = DECL_ARGUMENTS (current_function_decl);
3753 param;
3754 param = DECL_CHAIN (param))
3755 {
3756 if (gimple_default_def (cfun, param) != NULL)
3757 {
3758 tree def = gimple_default_def (cfun, param);
3759 VN_INFO (def)->valnum = def;
3760 }
3761 }
3762
3763 for (i = 1; i < num_ssa_names; ++i)
3764 {
3765 tree name = ssa_name (i);
3766 if (name
3767 && VN_INFO (name)->visited == false
3768 && !has_zero_uses (name))
3769 if (!DFS (name))
3770 {
3771 free_scc_vn ();
3772 return false;
3773 }
3774 }
3775
3776 /* Initialize the value ids. */
3777
3778 for (i = 1; i < num_ssa_names; ++i)
3779 {
3780 tree name = ssa_name (i);
3781 vn_ssa_aux_t info;
3782 if (!name)
3783 continue;
3784 info = VN_INFO (name);
3785 if (info->valnum == name
3786 || info->valnum == VN_TOP)
3787 info->value_id = get_next_value_id ();
3788 else if (is_gimple_min_invariant (info->valnum))
3789 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3790 }
3791
3792 /* Propagate until they stop changing. */
3793 while (changed)
3794 {
3795 changed = false;
3796 for (i = 1; i < num_ssa_names; ++i)
3797 {
3798 tree name = ssa_name (i);
3799 vn_ssa_aux_t info;
3800 if (!name)
3801 continue;
3802 info = VN_INFO (name);
3803 if (TREE_CODE (info->valnum) == SSA_NAME
3804 && info->valnum != name
3805 && info->value_id != VN_INFO (info->valnum)->value_id)
3806 {
3807 changed = true;
3808 info->value_id = VN_INFO (info->valnum)->value_id;
3809 }
3810 }
3811 }
3812
3813 set_hashtable_value_ids ();
3814
3815 if (dump_file && (dump_flags & TDF_DETAILS))
3816 {
3817 fprintf (dump_file, "Value numbers:\n");
3818 for (i = 0; i < num_ssa_names; i++)
3819 {
3820 tree name = ssa_name (i);
3821 if (name
3822 && VN_INFO (name)->visited
3823 && SSA_VAL (name) != name)
3824 {
3825 print_generic_expr (dump_file, name, 0);
3826 fprintf (dump_file, " = ");
3827 print_generic_expr (dump_file, SSA_VAL (name), 0);
3828 fprintf (dump_file, "\n");
3829 }
3830 }
3831 }
3832
3833 return true;
3834 }
3835
3836 /* Return the maximum value id we have ever seen. */
3837
3838 unsigned int
3839 get_max_value_id (void)
3840 {
3841 return next_value_id;
3842 }
3843
3844 /* Return the next unique value id. */
3845
3846 unsigned int
3847 get_next_value_id (void)
3848 {
3849 return next_value_id++;
3850 }
3851
3852
3853 /* Compare two expressions E1 and E2 and return true if they are equal. */
3854
3855 bool
3856 expressions_equal_p (tree e1, tree e2)
3857 {
3858 /* The obvious case. */
3859 if (e1 == e2)
3860 return true;
3861
3862 /* If only one of them is null, they cannot be equal. */
3863 if (!e1 || !e2)
3864 return false;
3865
3866 /* Now perform the actual comparison. */
3867 if (TREE_CODE (e1) == TREE_CODE (e2)
3868 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3869 return true;
3870
3871 return false;
3872 }
3873
3874
3875 /* Return true if the nary operation NARY may trap. This is a copy
3876 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3877
3878 bool
3879 vn_nary_may_trap (vn_nary_op_t nary)
3880 {
3881 tree type;
3882 tree rhs2 = NULL_TREE;
3883 bool honor_nans = false;
3884 bool honor_snans = false;
3885 bool fp_operation = false;
3886 bool honor_trapv = false;
3887 bool handled, ret;
3888 unsigned i;
3889
3890 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3891 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3892 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3893 {
3894 type = nary->type;
3895 fp_operation = FLOAT_TYPE_P (type);
3896 if (fp_operation)
3897 {
3898 honor_nans = flag_trapping_math && !flag_finite_math_only;
3899 honor_snans = flag_signaling_nans != 0;
3900 }
3901 else if (INTEGRAL_TYPE_P (type)
3902 && TYPE_OVERFLOW_TRAPS (type))
3903 honor_trapv = true;
3904 }
3905 if (nary->length >= 2)
3906 rhs2 = nary->op[1];
3907 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3908 honor_trapv,
3909 honor_nans, honor_snans, rhs2,
3910 &handled);
3911 if (handled
3912 && ret)
3913 return true;
3914
3915 for (i = 0; i < nary->length; ++i)
3916 if (tree_could_trap_p (nary->op[i]))
3917 return true;
3918
3919 return false;
3920 }