expr.c (expand_expr_real_2): Move COND_EXPR and VEC_COND_EXPR handling here, from ...
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
34
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
38
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
58 {
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
75 }
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
108 }
109 return true;
110 }
111
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
114
115 tree
116 canonicalize_constructor_val (tree cval)
117 {
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
121 {
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
130 }
131 if (TREE_CODE (cval) == ADDR_EXPR)
132 {
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
134
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
145 }
146 return cval;
147 }
148
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
151
152 tree
153 get_symbol_constant_value (tree sym)
154 {
155 if (const_value_known_p (sym))
156 {
157 tree val = DECL_INITIAL (sym);
158 if (val)
159 {
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
162 return val;
163 else
164 return NULL_TREE;
165 }
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
169 if (!val
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
173 }
174
175 return NULL_TREE;
176 }
177
178
179
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
184
185 static tree
186 maybe_fold_reference (tree expr, bool is_lhs)
187 {
188 tree *t = &expr;
189 tree result;
190
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
196 TREE_CODE (expr),
197 TREE_TYPE (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
202 TREE_CODE (expr),
203 TREE_TYPE (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
207
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
210
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
215 {
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
220 if (tem)
221 {
222 TREE_THIS_VOLATILE (tem) = volatile_p;
223 *t = tem;
224 tem = maybe_fold_reference (expr, is_lhs);
225 if (tem)
226 return tem;
227 return expr;
228 }
229 }
230
231 if (!is_lhs
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
234 return result;
235
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
248 compatibility. */
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
252 {
253 tree tem;
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
256 if (tem)
257 return tem;
258 return expr;
259 }
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
261 {
262 tree tem = maybe_fold_tmr (*t);
263 if (tem)
264 {
265 *t = tem;
266 tem = maybe_fold_reference (expr, is_lhs);
267 if (tem)
268 return tem;
269 return expr;
270 }
271 }
272
273 return NULL_TREE;
274 }
275
276
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
280 folded. */
281
282 static tree
283 fold_gimple_assign (gimple_stmt_iterator *si)
284 {
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
288
289 tree result = NULL_TREE;
290
291 switch (get_gimple_rhs_class (subcode))
292 {
293 case GIMPLE_SINGLE_RHS:
294 {
295 tree rhs = gimple_assign_rhs1 (stmt);
296
297 if (REFERENCE_CLASS_P (rhs))
298 return maybe_fold_reference (rhs, false);
299
300 else if (TREE_CODE (rhs) == ADDR_EXPR)
301 {
302 tree ref = TREE_OPERAND (rhs, 0);
303 tree tem = maybe_fold_reference (ref, true);
304 if (tem
305 && TREE_CODE (tem) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem, 1)))
307 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
308 else if (tem)
309 result = fold_convert (TREE_TYPE (rhs),
310 build_fold_addr_expr_loc (loc, tem));
311 else if (TREE_CODE (ref) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
314 }
315
316 else if (TREE_CODE (rhs) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
320 {
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
322 unsigned i;
323 tree val;
324
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
326 if (TREE_CODE (val) != INTEGER_CST
327 && TREE_CODE (val) != REAL_CST
328 && TREE_CODE (val) != FIXED_CST)
329 return NULL_TREE;
330
331 return build_vector_from_ctor (TREE_TYPE (rhs),
332 CONSTRUCTOR_ELTS (rhs));
333 }
334
335 else if (DECL_P (rhs))
336 return unshare_expr (get_symbol_constant_value (rhs));
337
338 /* If we couldn't fold the RHS, hand over to the generic
339 fold routines. */
340 if (result == NULL_TREE)
341 result = fold (rhs);
342
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result);
347
348 if (result != rhs && valid_gimple_rhs_p (result))
349 return result;
350
351 return NULL_TREE;
352 }
353 break;
354
355 case GIMPLE_UNARY_RHS:
356 {
357 tree rhs = gimple_assign_rhs1 (stmt);
358
359 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
360 if (result)
361 {
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode)
368 && TREE_CODE (result) == INTEGER_CST
369 && TREE_CODE (rhs) == INTEGER_CST)
370 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
371
372 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (valid_gimple_rhs_p (result))
374 return result;
375 }
376 }
377 break;
378
379 case GIMPLE_BINARY_RHS:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt) == NE_EXPR)
384 {
385 tree lhs = gimple_assign_lhs (stmt);
386 tree op1 = gimple_assign_rhs1 (stmt);
387 tree op2 = gimple_assign_rhs2 (stmt);
388 tree type = TREE_TYPE (op1);
389
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
393 one or zero. */
394 if (TREE_CODE (op2) == INTEGER_CST
395 && (integer_zerop (op2) || integer_onep (op2))
396 && useless_type_conversion_p (TREE_TYPE (lhs), type))
397 {
398 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
399 bool is_logical_not = false;
400
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
404 || (cmp_code == NE_EXPR && integer_onep (op2)))
405 is_logical_not = true;
406
407 if (is_logical_not == false)
408 result = op1;
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type) == 1)
412 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
413 type, op1);
414 /* Otherwise we use !X -> X ^ 1. */
415 else
416 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
417 type, op1, build_int_cst (type, 1));
418
419 }
420 }
421
422 if (!result)
423 result = fold_binary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt));
427
428 if (result)
429 {
430 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (valid_gimple_rhs_p (result))
432 return result;
433 }
434 break;
435
436 case GIMPLE_TERNARY_RHS:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
439 {
440 tree op0 = gimple_assign_rhs1 (stmt);
441 tree tem;
442 bool set = false;
443 location_t cond_loc = gimple_location (stmt);
444
445 if (COMPARISON_CLASS_P (op0))
446 {
447 fold_defer_overflow_warnings ();
448 tem = fold_binary_loc (cond_loc,
449 TREE_CODE (op0), TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set = (tem && is_gimple_condexpr (tem)
456 && valid_gimple_rhs_p (tem));
457 fold_undefer_overflow_warnings (set, stmt, 0);
458 }
459 else if (is_gimple_min_invariant (op0))
460 {
461 tem = op0;
462 set = true;
463 }
464 else
465 return NULL_TREE;
466
467 if (set)
468 result = fold_build3_loc (cond_loc, COND_EXPR,
469 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
470 gimple_assign_rhs2 (stmt),
471 gimple_assign_rhs3 (stmt));
472 }
473
474 if (!result)
475 result = fold_ternary_loc (loc, subcode,
476 TREE_TYPE (gimple_assign_lhs (stmt)),
477 gimple_assign_rhs1 (stmt),
478 gimple_assign_rhs2 (stmt),
479 gimple_assign_rhs3 (stmt));
480
481 if (result)
482 {
483 STRIP_USELESS_TYPE_CONVERSION (result);
484 if (valid_gimple_rhs_p (result))
485 return result;
486 }
487 break;
488
489 case GIMPLE_INVALID_RHS:
490 gcc_unreachable ();
491 }
492
493 return NULL_TREE;
494 }
495
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
500
501 static bool
502 fold_gimple_cond (gimple stmt)
503 {
504 tree result = fold_binary_loc (gimple_location (stmt),
505 gimple_cond_code (stmt),
506 boolean_type_node,
507 gimple_cond_lhs (stmt),
508 gimple_cond_rhs (stmt));
509
510 if (result)
511 {
512 STRIP_USELESS_TYPE_CONVERSION (result);
513 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
514 {
515 gimple_cond_set_condition_from_tree (stmt, result);
516 return true;
517 }
518 }
519
520 return false;
521 }
522
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
532
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 {
536 tree lhs;
537 tree tmp = NULL_TREE; /* Silence warning. */
538 gimple stmt, new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = gimple_seq_alloc();
541 struct gimplify_ctx gctx;
542 gimple last = NULL;
543 gimple laststore = NULL;
544 tree reaching_vuse;
545
546 stmt = gsi_stmt (*si_p);
547
548 gcc_assert (is_gimple_call (stmt));
549
550 lhs = gimple_call_lhs (stmt);
551 reaching_vuse = gimple_vuse (stmt);
552
553 push_gimplify_context (&gctx);
554
555 if (lhs == NULL_TREE)
556 {
557 gimplify_and_add (expr, &stmts);
558 /* We can end up with folding a memcpy of an empty class assignment
559 which gets optimized away by C++ gimplification. */
560 if (gimple_seq_empty_p (stmts))
561 {
562 pop_gimplify_context (NULL);
563 if (gimple_in_ssa_p (cfun))
564 {
565 unlink_stmt_vdef (stmt);
566 release_defs (stmt);
567 }
568 gsi_remove (si_p, true);
569 return;
570 }
571 }
572 else
573 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
574
575 pop_gimplify_context (NULL);
576
577 if (gimple_has_location (stmt))
578 annotate_all_with_location (stmts, gimple_location (stmt));
579
580 /* The replacement can expose previously unreferenced variables. */
581 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
582 {
583 if (last)
584 {
585 gsi_insert_before (si_p, last, GSI_NEW_STMT);
586 gsi_next (si_p);
587 }
588 new_stmt = gsi_stmt (i);
589 if (gimple_in_ssa_p (cfun))
590 {
591 find_new_referenced_vars (new_stmt);
592 mark_symbols_for_renaming (new_stmt);
593 }
594 /* If the new statement has a VUSE, update it with exact SSA name we
595 know will reach this one. */
596 if (gimple_vuse (new_stmt))
597 {
598 /* If we've also seen a previous store create a new VDEF for
599 the latter one, and make that the new reaching VUSE. */
600 if (laststore)
601 {
602 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
603 gimple_set_vdef (laststore, reaching_vuse);
604 update_stmt (laststore);
605 laststore = NULL;
606 }
607 gimple_set_vuse (new_stmt, reaching_vuse);
608 gimple_set_modified (new_stmt, true);
609 }
610 if (gimple_assign_single_p (new_stmt)
611 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
612 {
613 laststore = new_stmt;
614 }
615 last = new_stmt;
616 }
617
618 if (lhs == NULL_TREE)
619 {
620 /* If we replace a call without LHS that has a VDEF and our new
621 sequence ends with a store we must make that store have the same
622 vdef in order not to break the sequencing. This can happen
623 for instance when folding memcpy calls into assignments. */
624 if (gimple_vdef (stmt) && laststore)
625 {
626 gimple_set_vdef (laststore, gimple_vdef (stmt));
627 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
628 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
629 update_stmt (laststore);
630 }
631 else if (gimple_in_ssa_p (cfun))
632 {
633 unlink_stmt_vdef (stmt);
634 release_defs (stmt);
635 }
636 new_stmt = last;
637 }
638 else
639 {
640 if (last)
641 {
642 gsi_insert_before (si_p, last, GSI_NEW_STMT);
643 gsi_next (si_p);
644 }
645 if (laststore && is_gimple_reg (lhs))
646 {
647 gimple_set_vdef (laststore, gimple_vdef (stmt));
648 update_stmt (laststore);
649 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
650 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
651 laststore = NULL;
652 }
653 else if (laststore)
654 {
655 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
656 gimple_set_vdef (laststore, reaching_vuse);
657 update_stmt (laststore);
658 laststore = NULL;
659 }
660 new_stmt = gimple_build_assign (lhs, tmp);
661 if (!is_gimple_reg (tmp))
662 gimple_set_vuse (new_stmt, reaching_vuse);
663 if (!is_gimple_reg (lhs))
664 {
665 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
666 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
667 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
668 }
669 else if (reaching_vuse == gimple_vuse (stmt))
670 unlink_stmt_vdef (stmt);
671 }
672
673 gimple_set_location (new_stmt, gimple_location (stmt));
674 gsi_replace (si_p, new_stmt, false);
675 }
676
677 /* Return the string length, maximum string length or maximum value of
678 ARG in LENGTH.
679 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
680 is not NULL and, for TYPE == 0, its value is not equal to the length
681 we determine or if we are unable to determine the length or value,
682 return false. VISITED is a bitmap of visited variables.
683 TYPE is 0 if string length should be returned, 1 for maximum string
684 length and 2 for maximum value ARG can have. */
685
686 static bool
687 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
688 {
689 tree var, val;
690 gimple def_stmt;
691
692 if (TREE_CODE (arg) != SSA_NAME)
693 {
694 if (TREE_CODE (arg) == COND_EXPR)
695 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
696 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
697 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
698 else if (TREE_CODE (arg) == ADDR_EXPR
699 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
700 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
701 {
702 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
703 if (TREE_CODE (aop0) == INDIRECT_REF
704 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
705 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
706 length, visited, type);
707 }
708
709 if (type == 2)
710 {
711 val = arg;
712 if (TREE_CODE (val) != INTEGER_CST
713 || tree_int_cst_sgn (val) < 0)
714 return false;
715 }
716 else
717 val = c_strlen (arg, 1);
718 if (!val)
719 return false;
720
721 if (*length)
722 {
723 if (type > 0)
724 {
725 if (TREE_CODE (*length) != INTEGER_CST
726 || TREE_CODE (val) != INTEGER_CST)
727 return false;
728
729 if (tree_int_cst_lt (*length, val))
730 *length = val;
731 return true;
732 }
733 else if (simple_cst_equal (val, *length) != 1)
734 return false;
735 }
736
737 *length = val;
738 return true;
739 }
740
741 /* If we were already here, break the infinite cycle. */
742 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
743 return true;
744
745 var = arg;
746 def_stmt = SSA_NAME_DEF_STMT (var);
747
748 switch (gimple_code (def_stmt))
749 {
750 case GIMPLE_ASSIGN:
751 /* The RHS of the statement defining VAR must either have a
752 constant length or come from another SSA_NAME with a constant
753 length. */
754 if (gimple_assign_single_p (def_stmt)
755 || gimple_assign_unary_nop_p (def_stmt))
756 {
757 tree rhs = gimple_assign_rhs1 (def_stmt);
758 return get_maxval_strlen (rhs, length, visited, type);
759 }
760 return false;
761
762 case GIMPLE_PHI:
763 {
764 /* All the arguments of the PHI node must have the same constant
765 length. */
766 unsigned i;
767
768 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
769 {
770 tree arg = gimple_phi_arg (def_stmt, i)->def;
771
772 /* If this PHI has itself as an argument, we cannot
773 determine the string length of this argument. However,
774 if we can find a constant string length for the other
775 PHI args then we can still be sure that this is a
776 constant string length. So be optimistic and just
777 continue with the next argument. */
778 if (arg == gimple_phi_result (def_stmt))
779 continue;
780
781 if (!get_maxval_strlen (arg, length, visited, type))
782 return false;
783 }
784 }
785 return true;
786
787 default:
788 return false;
789 }
790 }
791
792
793 /* Fold builtin call in statement STMT. Returns a simplified tree.
794 We may return a non-constant expression, including another call
795 to a different function and with different arguments, e.g.,
796 substituting memcpy for strcpy when the string length is known.
797 Note that some builtins expand into inline code that may not
798 be valid in GIMPLE. Callers must take care. */
799
800 tree
801 gimple_fold_builtin (gimple stmt)
802 {
803 tree result, val[3];
804 tree callee, a;
805 int arg_idx, type;
806 bitmap visited;
807 bool ignore;
808 int nargs;
809 location_t loc = gimple_location (stmt);
810
811 gcc_assert (is_gimple_call (stmt));
812
813 ignore = (gimple_call_lhs (stmt) == NULL);
814
815 /* First try the generic builtin folder. If that succeeds, return the
816 result directly. */
817 result = fold_call_stmt (stmt, ignore);
818 if (result)
819 {
820 if (ignore)
821 STRIP_NOPS (result);
822 return result;
823 }
824
825 /* Ignore MD builtins. */
826 callee = gimple_call_fndecl (stmt);
827 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
828 return NULL_TREE;
829
830 /* If the builtin could not be folded, and it has no argument list,
831 we're done. */
832 nargs = gimple_call_num_args (stmt);
833 if (nargs == 0)
834 return NULL_TREE;
835
836 /* Limit the work only for builtins we know how to simplify. */
837 switch (DECL_FUNCTION_CODE (callee))
838 {
839 case BUILT_IN_STRLEN:
840 case BUILT_IN_FPUTS:
841 case BUILT_IN_FPUTS_UNLOCKED:
842 arg_idx = 0;
843 type = 0;
844 break;
845 case BUILT_IN_STRCPY:
846 case BUILT_IN_STRNCPY:
847 arg_idx = 1;
848 type = 0;
849 break;
850 case BUILT_IN_MEMCPY_CHK:
851 case BUILT_IN_MEMPCPY_CHK:
852 case BUILT_IN_MEMMOVE_CHK:
853 case BUILT_IN_MEMSET_CHK:
854 case BUILT_IN_STRNCPY_CHK:
855 arg_idx = 2;
856 type = 2;
857 break;
858 case BUILT_IN_STRCPY_CHK:
859 case BUILT_IN_STPCPY_CHK:
860 arg_idx = 1;
861 type = 1;
862 break;
863 case BUILT_IN_SNPRINTF_CHK:
864 case BUILT_IN_VSNPRINTF_CHK:
865 arg_idx = 1;
866 type = 2;
867 break;
868 default:
869 return NULL_TREE;
870 }
871
872 if (arg_idx >= nargs)
873 return NULL_TREE;
874
875 /* Try to use the dataflow information gathered by the CCP process. */
876 visited = BITMAP_ALLOC (NULL);
877 bitmap_clear (visited);
878
879 memset (val, 0, sizeof (val));
880 a = gimple_call_arg (stmt, arg_idx);
881 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
882 val[arg_idx] = NULL_TREE;
883
884 BITMAP_FREE (visited);
885
886 result = NULL_TREE;
887 switch (DECL_FUNCTION_CODE (callee))
888 {
889 case BUILT_IN_STRLEN:
890 if (val[0] && nargs == 1)
891 {
892 tree new_val =
893 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
894
895 /* If the result is not a valid gimple value, or not a cast
896 of a valid gimple value, then we cannot use the result. */
897 if (is_gimple_val (new_val)
898 || (CONVERT_EXPR_P (new_val)
899 && is_gimple_val (TREE_OPERAND (new_val, 0))))
900 return new_val;
901 }
902 break;
903
904 case BUILT_IN_STRCPY:
905 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
906 result = fold_builtin_strcpy (loc, callee,
907 gimple_call_arg (stmt, 0),
908 gimple_call_arg (stmt, 1),
909 val[1]);
910 break;
911
912 case BUILT_IN_STRNCPY:
913 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
914 result = fold_builtin_strncpy (loc, callee,
915 gimple_call_arg (stmt, 0),
916 gimple_call_arg (stmt, 1),
917 gimple_call_arg (stmt, 2),
918 val[1]);
919 break;
920
921 case BUILT_IN_FPUTS:
922 if (nargs == 2)
923 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
924 gimple_call_arg (stmt, 1),
925 ignore, false, val[0]);
926 break;
927
928 case BUILT_IN_FPUTS_UNLOCKED:
929 if (nargs == 2)
930 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
931 gimple_call_arg (stmt, 1),
932 ignore, true, val[0]);
933 break;
934
935 case BUILT_IN_MEMCPY_CHK:
936 case BUILT_IN_MEMPCPY_CHK:
937 case BUILT_IN_MEMMOVE_CHK:
938 case BUILT_IN_MEMSET_CHK:
939 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
940 result = fold_builtin_memory_chk (loc, callee,
941 gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 gimple_call_arg (stmt, 2),
944 gimple_call_arg (stmt, 3),
945 val[2], ignore,
946 DECL_FUNCTION_CODE (callee));
947 break;
948
949 case BUILT_IN_STRCPY_CHK:
950 case BUILT_IN_STPCPY_CHK:
951 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
952 result = fold_builtin_stxcpy_chk (loc, callee,
953 gimple_call_arg (stmt, 0),
954 gimple_call_arg (stmt, 1),
955 gimple_call_arg (stmt, 2),
956 val[1], ignore,
957 DECL_FUNCTION_CODE (callee));
958 break;
959
960 case BUILT_IN_STRNCPY_CHK:
961 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
962 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
963 gimple_call_arg (stmt, 1),
964 gimple_call_arg (stmt, 2),
965 gimple_call_arg (stmt, 3),
966 val[2]);
967 break;
968
969 case BUILT_IN_SNPRINTF_CHK:
970 case BUILT_IN_VSNPRINTF_CHK:
971 if (val[1] && is_gimple_val (val[1]))
972 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
973 DECL_FUNCTION_CODE (callee));
974 break;
975
976 default:
977 gcc_unreachable ();
978 }
979
980 if (result && ignore)
981 result = fold_ignored_result (result);
982 return result;
983 }
984
985 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
986 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
987 KNOWN_BINFO carries the binfo describing the true type of
988 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
989 with a this adjustment, the constant which should be added to this pointer
990 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
991 is a thunk (other than a this adjustment which is dealt with by DELTA). */
992
993 tree
994 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
995 tree *delta)
996 {
997 HOST_WIDE_INT i;
998 tree v, fndecl;
999
1000 v = BINFO_VIRTUALS (known_binfo);
1001 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1002 if (!v)
1003 return NULL_TREE;
1004 i = 0;
1005 while (i != token)
1006 {
1007 i += (TARGET_VTABLE_USES_DESCRIPTORS
1008 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1009 v = TREE_CHAIN (v);
1010 }
1011
1012 /* If BV_VCALL_INDEX is non-NULL, give up. */
1013 if (TREE_TYPE (v))
1014 return NULL_TREE;
1015
1016 fndecl = TREE_VALUE (v);
1017
1018 /* When cgraph node is missing and function is not public, we cannot
1019 devirtualize. This can happen in WHOPR when the actual method
1020 ends up in other partition, because we found devirtualization
1021 possibility too late. */
1022 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1023 return NULL_TREE;
1024
1025 *delta = TREE_PURPOSE (v);
1026 gcc_checking_assert (host_integerp (*delta, 0));
1027 return fndecl;
1028 }
1029
1030 /* Generate code adjusting the first parameter of a call statement determined
1031 by GSI by DELTA. */
1032
1033 void
1034 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1035 {
1036 gimple call_stmt = gsi_stmt (*gsi);
1037 tree parm, tmp;
1038 gimple new_stmt;
1039
1040 delta = convert_to_ptrofftype (delta);
1041 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1042 parm = gimple_call_arg (call_stmt, 0);
1043 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1044 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1045 add_referenced_var (tmp);
1046
1047 tmp = make_ssa_name (tmp, NULL);
1048 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1049 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1050 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1051 gimple_call_set_arg (call_stmt, 0, tmp);
1052 }
1053
1054 /* Return a binfo to be used for devirtualization of calls based on an object
1055 represented by a declaration (i.e. a global or automatically allocated one)
1056 or NULL if it cannot be found or is not safe. CST is expected to be an
1057 ADDR_EXPR of such object or the function will return NULL. Currently it is
1058 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1059
1060 tree
1061 gimple_extract_devirt_binfo_from_cst (tree cst)
1062 {
1063 HOST_WIDE_INT offset, size, max_size;
1064 tree base, type, expected_type, binfo;
1065 bool last_artificial = false;
1066
1067 if (!flag_devirtualize
1068 || TREE_CODE (cst) != ADDR_EXPR
1069 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1070 return NULL_TREE;
1071
1072 cst = TREE_OPERAND (cst, 0);
1073 expected_type = TREE_TYPE (cst);
1074 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1075 type = TREE_TYPE (base);
1076 if (!DECL_P (base)
1077 || max_size == -1
1078 || max_size != size
1079 || TREE_CODE (type) != RECORD_TYPE)
1080 return NULL_TREE;
1081
1082 /* Find the sub-object the constant actually refers to and mark whether it is
1083 an artificial one (as opposed to a user-defined one). */
1084 while (true)
1085 {
1086 HOST_WIDE_INT pos, size;
1087 tree fld;
1088
1089 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1090 break;
1091 if (offset < 0)
1092 return NULL_TREE;
1093
1094 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1095 {
1096 if (TREE_CODE (fld) != FIELD_DECL)
1097 continue;
1098
1099 pos = int_bit_position (fld);
1100 size = tree_low_cst (DECL_SIZE (fld), 1);
1101 if (pos <= offset && (pos + size) > offset)
1102 break;
1103 }
1104 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1105 return NULL_TREE;
1106
1107 last_artificial = DECL_ARTIFICIAL (fld);
1108 type = TREE_TYPE (fld);
1109 offset -= pos;
1110 }
1111 /* Artifical sub-objects are ancestors, we do not want to use them for
1112 devirtualization, at least not here. */
1113 if (last_artificial)
1114 return NULL_TREE;
1115 binfo = TYPE_BINFO (type);
1116 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1117 return NULL_TREE;
1118 else
1119 return binfo;
1120 }
1121
1122 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1123 The statement may be replaced by another statement, e.g., if the call
1124 simplifies to a constant value. Return true if any changes were made.
1125 It is assumed that the operands have been previously folded. */
1126
1127 bool
1128 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1129 {
1130 gimple stmt = gsi_stmt (*gsi);
1131 tree callee;
1132
1133 /* Check for builtins that CCP can handle using information not
1134 available in the generic fold routines. */
1135 callee = gimple_call_fndecl (stmt);
1136 if (!inplace && callee && DECL_BUILT_IN (callee))
1137 {
1138 tree result = gimple_fold_builtin (stmt);
1139
1140 if (result)
1141 {
1142 if (!update_call_from_tree (gsi, result))
1143 gimplify_and_update_call_from_tree (gsi, result);
1144 return true;
1145 }
1146 }
1147
1148 /* Check for virtual calls that became direct calls. */
1149 callee = gimple_call_fn (stmt);
1150 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1151 {
1152 tree binfo, fndecl, delta, obj;
1153 HOST_WIDE_INT token;
1154
1155 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1156 {
1157 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1158 return true;
1159 }
1160
1161 obj = OBJ_TYPE_REF_OBJECT (callee);
1162 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1163 if (!binfo)
1164 return false;
1165 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1166 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta);
1167 if (!fndecl)
1168 return false;
1169 gcc_assert (integer_zerop (delta));
1170 gimple_call_set_fndecl (stmt, fndecl);
1171 return true;
1172 }
1173
1174 return false;
1175 }
1176
1177 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1178 distinguishes both cases. */
1179
1180 static bool
1181 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1182 {
1183 bool changed = false;
1184 gimple stmt = gsi_stmt (*gsi);
1185 unsigned i;
1186 gimple_stmt_iterator gsinext = *gsi;
1187 gimple next_stmt;
1188
1189 gsi_next (&gsinext);
1190 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1191
1192 /* Fold the main computation performed by the statement. */
1193 switch (gimple_code (stmt))
1194 {
1195 case GIMPLE_ASSIGN:
1196 {
1197 unsigned old_num_ops = gimple_num_ops (stmt);
1198 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1199 tree lhs = gimple_assign_lhs (stmt);
1200 tree new_rhs;
1201 /* First canonicalize operand order. This avoids building new
1202 trees if this is the only thing fold would later do. */
1203 if ((commutative_tree_code (subcode)
1204 || commutative_ternary_tree_code (subcode))
1205 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1206 gimple_assign_rhs2 (stmt), false))
1207 {
1208 tree tem = gimple_assign_rhs1 (stmt);
1209 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1210 gimple_assign_set_rhs2 (stmt, tem);
1211 changed = true;
1212 }
1213 new_rhs = fold_gimple_assign (gsi);
1214 if (new_rhs
1215 && !useless_type_conversion_p (TREE_TYPE (lhs),
1216 TREE_TYPE (new_rhs)))
1217 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1218 if (new_rhs
1219 && (!inplace
1220 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1221 {
1222 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1223 changed = true;
1224 }
1225 break;
1226 }
1227
1228 case GIMPLE_COND:
1229 changed |= fold_gimple_cond (stmt);
1230 break;
1231
1232 case GIMPLE_CALL:
1233 /* Fold *& in call arguments. */
1234 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1235 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1236 {
1237 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1238 if (tmp)
1239 {
1240 gimple_call_set_arg (stmt, i, tmp);
1241 changed = true;
1242 }
1243 }
1244 changed |= gimple_fold_call (gsi, inplace);
1245 break;
1246
1247 case GIMPLE_ASM:
1248 /* Fold *& in asm operands. */
1249 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1250 {
1251 tree link = gimple_asm_output_op (stmt, i);
1252 tree op = TREE_VALUE (link);
1253 if (REFERENCE_CLASS_P (op)
1254 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1255 {
1256 TREE_VALUE (link) = op;
1257 changed = true;
1258 }
1259 }
1260 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1261 {
1262 tree link = gimple_asm_input_op (stmt, i);
1263 tree op = TREE_VALUE (link);
1264 if (REFERENCE_CLASS_P (op)
1265 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1266 {
1267 TREE_VALUE (link) = op;
1268 changed = true;
1269 }
1270 }
1271 break;
1272
1273 case GIMPLE_DEBUG:
1274 if (gimple_debug_bind_p (stmt))
1275 {
1276 tree val = gimple_debug_bind_get_value (stmt);
1277 if (val
1278 && REFERENCE_CLASS_P (val))
1279 {
1280 tree tem = maybe_fold_reference (val, false);
1281 if (tem)
1282 {
1283 gimple_debug_bind_set_value (stmt, tem);
1284 changed = true;
1285 }
1286 }
1287 }
1288 break;
1289
1290 default:;
1291 }
1292
1293 /* If stmt folds into nothing and it was the last stmt in a bb,
1294 don't call gsi_stmt. */
1295 if (gsi_end_p (*gsi))
1296 {
1297 gcc_assert (next_stmt == NULL);
1298 return changed;
1299 }
1300
1301 stmt = gsi_stmt (*gsi);
1302
1303 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1304 as we'd changing the next stmt. */
1305 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1306 {
1307 tree lhs = gimple_get_lhs (stmt);
1308 if (lhs && REFERENCE_CLASS_P (lhs))
1309 {
1310 tree new_lhs = maybe_fold_reference (lhs, true);
1311 if (new_lhs)
1312 {
1313 gimple_set_lhs (stmt, new_lhs);
1314 changed = true;
1315 }
1316 }
1317 }
1318
1319 return changed;
1320 }
1321
1322 /* Fold the statement pointed to by GSI. In some cases, this function may
1323 replace the whole statement with a new one. Returns true iff folding
1324 makes any changes.
1325 The statement pointed to by GSI should be in valid gimple form but may
1326 be in unfolded state as resulting from for example constant propagation
1327 which can produce *&x = 0. */
1328
1329 bool
1330 fold_stmt (gimple_stmt_iterator *gsi)
1331 {
1332 return fold_stmt_1 (gsi, false);
1333 }
1334
1335 /* Perform the minimal folding on statement STMT. Only operations like
1336 *&x created by constant propagation are handled. The statement cannot
1337 be replaced with a new one. Return true if the statement was
1338 changed, false otherwise.
1339 The statement STMT should be in valid gimple form but may
1340 be in unfolded state as resulting from for example constant propagation
1341 which can produce *&x = 0. */
1342
1343 bool
1344 fold_stmt_inplace (gimple stmt)
1345 {
1346 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1347 bool changed = fold_stmt_1 (&gsi, true);
1348 gcc_assert (gsi_stmt (gsi) == stmt);
1349 return changed;
1350 }
1351
1352 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1353 if EXPR is null or we don't know how.
1354 If non-null, the result always has boolean type. */
1355
1356 static tree
1357 canonicalize_bool (tree expr, bool invert)
1358 {
1359 if (!expr)
1360 return NULL_TREE;
1361 else if (invert)
1362 {
1363 if (integer_nonzerop (expr))
1364 return boolean_false_node;
1365 else if (integer_zerop (expr))
1366 return boolean_true_node;
1367 else if (TREE_CODE (expr) == SSA_NAME)
1368 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1369 build_int_cst (TREE_TYPE (expr), 0));
1370 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1371 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1372 boolean_type_node,
1373 TREE_OPERAND (expr, 0),
1374 TREE_OPERAND (expr, 1));
1375 else
1376 return NULL_TREE;
1377 }
1378 else
1379 {
1380 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1381 return expr;
1382 if (integer_nonzerop (expr))
1383 return boolean_true_node;
1384 else if (integer_zerop (expr))
1385 return boolean_false_node;
1386 else if (TREE_CODE (expr) == SSA_NAME)
1387 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1388 build_int_cst (TREE_TYPE (expr), 0));
1389 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1390 return fold_build2 (TREE_CODE (expr),
1391 boolean_type_node,
1392 TREE_OPERAND (expr, 0),
1393 TREE_OPERAND (expr, 1));
1394 else
1395 return NULL_TREE;
1396 }
1397 }
1398
1399 /* Check to see if a boolean expression EXPR is logically equivalent to the
1400 comparison (OP1 CODE OP2). Check for various identities involving
1401 SSA_NAMEs. */
1402
1403 static bool
1404 same_bool_comparison_p (const_tree expr, enum tree_code code,
1405 const_tree op1, const_tree op2)
1406 {
1407 gimple s;
1408
1409 /* The obvious case. */
1410 if (TREE_CODE (expr) == code
1411 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1412 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1413 return true;
1414
1415 /* Check for comparing (name, name != 0) and the case where expr
1416 is an SSA_NAME with a definition matching the comparison. */
1417 if (TREE_CODE (expr) == SSA_NAME
1418 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1419 {
1420 if (operand_equal_p (expr, op1, 0))
1421 return ((code == NE_EXPR && integer_zerop (op2))
1422 || (code == EQ_EXPR && integer_nonzerop (op2)));
1423 s = SSA_NAME_DEF_STMT (expr);
1424 if (is_gimple_assign (s)
1425 && gimple_assign_rhs_code (s) == code
1426 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1427 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1428 return true;
1429 }
1430
1431 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1432 of name is a comparison, recurse. */
1433 if (TREE_CODE (op1) == SSA_NAME
1434 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1435 {
1436 s = SSA_NAME_DEF_STMT (op1);
1437 if (is_gimple_assign (s)
1438 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1439 {
1440 enum tree_code c = gimple_assign_rhs_code (s);
1441 if ((c == NE_EXPR && integer_zerop (op2))
1442 || (c == EQ_EXPR && integer_nonzerop (op2)))
1443 return same_bool_comparison_p (expr, c,
1444 gimple_assign_rhs1 (s),
1445 gimple_assign_rhs2 (s));
1446 if ((c == EQ_EXPR && integer_zerop (op2))
1447 || (c == NE_EXPR && integer_nonzerop (op2)))
1448 return same_bool_comparison_p (expr,
1449 invert_tree_comparison (c, false),
1450 gimple_assign_rhs1 (s),
1451 gimple_assign_rhs2 (s));
1452 }
1453 }
1454 return false;
1455 }
1456
1457 /* Check to see if two boolean expressions OP1 and OP2 are logically
1458 equivalent. */
1459
1460 static bool
1461 same_bool_result_p (const_tree op1, const_tree op2)
1462 {
1463 /* Simple cases first. */
1464 if (operand_equal_p (op1, op2, 0))
1465 return true;
1466
1467 /* Check the cases where at least one of the operands is a comparison.
1468 These are a bit smarter than operand_equal_p in that they apply some
1469 identifies on SSA_NAMEs. */
1470 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1471 && same_bool_comparison_p (op1, TREE_CODE (op2),
1472 TREE_OPERAND (op2, 0),
1473 TREE_OPERAND (op2, 1)))
1474 return true;
1475 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1476 && same_bool_comparison_p (op2, TREE_CODE (op1),
1477 TREE_OPERAND (op1, 0),
1478 TREE_OPERAND (op1, 1)))
1479 return true;
1480
1481 /* Default case. */
1482 return false;
1483 }
1484
1485 /* Forward declarations for some mutually recursive functions. */
1486
1487 static tree
1488 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1489 enum tree_code code2, tree op2a, tree op2b);
1490 static tree
1491 and_var_with_comparison (tree var, bool invert,
1492 enum tree_code code2, tree op2a, tree op2b);
1493 static tree
1494 and_var_with_comparison_1 (gimple stmt,
1495 enum tree_code code2, tree op2a, tree op2b);
1496 static tree
1497 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1498 enum tree_code code2, tree op2a, tree op2b);
1499 static tree
1500 or_var_with_comparison (tree var, bool invert,
1501 enum tree_code code2, tree op2a, tree op2b);
1502 static tree
1503 or_var_with_comparison_1 (gimple stmt,
1504 enum tree_code code2, tree op2a, tree op2b);
1505
1506 /* Helper function for and_comparisons_1: try to simplify the AND of the
1507 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1508 If INVERT is true, invert the value of the VAR before doing the AND.
1509 Return NULL_EXPR if we can't simplify this to a single expression. */
1510
1511 static tree
1512 and_var_with_comparison (tree var, bool invert,
1513 enum tree_code code2, tree op2a, tree op2b)
1514 {
1515 tree t;
1516 gimple stmt = SSA_NAME_DEF_STMT (var);
1517
1518 /* We can only deal with variables whose definitions are assignments. */
1519 if (!is_gimple_assign (stmt))
1520 return NULL_TREE;
1521
1522 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1523 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1524 Then we only have to consider the simpler non-inverted cases. */
1525 if (invert)
1526 t = or_var_with_comparison_1 (stmt,
1527 invert_tree_comparison (code2, false),
1528 op2a, op2b);
1529 else
1530 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1531 return canonicalize_bool (t, invert);
1532 }
1533
1534 /* Try to simplify the AND of the ssa variable defined by the assignment
1535 STMT with the comparison specified by (OP2A CODE2 OP2B).
1536 Return NULL_EXPR if we can't simplify this to a single expression. */
1537
1538 static tree
1539 and_var_with_comparison_1 (gimple stmt,
1540 enum tree_code code2, tree op2a, tree op2b)
1541 {
1542 tree var = gimple_assign_lhs (stmt);
1543 tree true_test_var = NULL_TREE;
1544 tree false_test_var = NULL_TREE;
1545 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1546
1547 /* Check for identities like (var AND (var == 0)) => false. */
1548 if (TREE_CODE (op2a) == SSA_NAME
1549 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1550 {
1551 if ((code2 == NE_EXPR && integer_zerop (op2b))
1552 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1553 {
1554 true_test_var = op2a;
1555 if (var == true_test_var)
1556 return var;
1557 }
1558 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1559 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1560 {
1561 false_test_var = op2a;
1562 if (var == false_test_var)
1563 return boolean_false_node;
1564 }
1565 }
1566
1567 /* If the definition is a comparison, recurse on it. */
1568 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1569 {
1570 tree t = and_comparisons_1 (innercode,
1571 gimple_assign_rhs1 (stmt),
1572 gimple_assign_rhs2 (stmt),
1573 code2,
1574 op2a,
1575 op2b);
1576 if (t)
1577 return t;
1578 }
1579
1580 /* If the definition is an AND or OR expression, we may be able to
1581 simplify by reassociating. */
1582 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1583 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1584 {
1585 tree inner1 = gimple_assign_rhs1 (stmt);
1586 tree inner2 = gimple_assign_rhs2 (stmt);
1587 gimple s;
1588 tree t;
1589 tree partial = NULL_TREE;
1590 bool is_and = (innercode == BIT_AND_EXPR);
1591
1592 /* Check for boolean identities that don't require recursive examination
1593 of inner1/inner2:
1594 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1595 inner1 AND (inner1 OR inner2) => inner1
1596 !inner1 AND (inner1 AND inner2) => false
1597 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1598 Likewise for similar cases involving inner2. */
1599 if (inner1 == true_test_var)
1600 return (is_and ? var : inner1);
1601 else if (inner2 == true_test_var)
1602 return (is_and ? var : inner2);
1603 else if (inner1 == false_test_var)
1604 return (is_and
1605 ? boolean_false_node
1606 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1607 else if (inner2 == false_test_var)
1608 return (is_and
1609 ? boolean_false_node
1610 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1611
1612 /* Next, redistribute/reassociate the AND across the inner tests.
1613 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1614 if (TREE_CODE (inner1) == SSA_NAME
1615 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1616 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1617 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1618 gimple_assign_rhs1 (s),
1619 gimple_assign_rhs2 (s),
1620 code2, op2a, op2b)))
1621 {
1622 /* Handle the AND case, where we are reassociating:
1623 (inner1 AND inner2) AND (op2a code2 op2b)
1624 => (t AND inner2)
1625 If the partial result t is a constant, we win. Otherwise
1626 continue on to try reassociating with the other inner test. */
1627 if (is_and)
1628 {
1629 if (integer_onep (t))
1630 return inner2;
1631 else if (integer_zerop (t))
1632 return boolean_false_node;
1633 }
1634
1635 /* Handle the OR case, where we are redistributing:
1636 (inner1 OR inner2) AND (op2a code2 op2b)
1637 => (t OR (inner2 AND (op2a code2 op2b))) */
1638 else if (integer_onep (t))
1639 return boolean_true_node;
1640
1641 /* Save partial result for later. */
1642 partial = t;
1643 }
1644
1645 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1646 if (TREE_CODE (inner2) == SSA_NAME
1647 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1648 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1649 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1650 gimple_assign_rhs1 (s),
1651 gimple_assign_rhs2 (s),
1652 code2, op2a, op2b)))
1653 {
1654 /* Handle the AND case, where we are reassociating:
1655 (inner1 AND inner2) AND (op2a code2 op2b)
1656 => (inner1 AND t) */
1657 if (is_and)
1658 {
1659 if (integer_onep (t))
1660 return inner1;
1661 else if (integer_zerop (t))
1662 return boolean_false_node;
1663 /* If both are the same, we can apply the identity
1664 (x AND x) == x. */
1665 else if (partial && same_bool_result_p (t, partial))
1666 return t;
1667 }
1668
1669 /* Handle the OR case. where we are redistributing:
1670 (inner1 OR inner2) AND (op2a code2 op2b)
1671 => (t OR (inner1 AND (op2a code2 op2b)))
1672 => (t OR partial) */
1673 else
1674 {
1675 if (integer_onep (t))
1676 return boolean_true_node;
1677 else if (partial)
1678 {
1679 /* We already got a simplification for the other
1680 operand to the redistributed OR expression. The
1681 interesting case is when at least one is false.
1682 Or, if both are the same, we can apply the identity
1683 (x OR x) == x. */
1684 if (integer_zerop (partial))
1685 return t;
1686 else if (integer_zerop (t))
1687 return partial;
1688 else if (same_bool_result_p (t, partial))
1689 return t;
1690 }
1691 }
1692 }
1693 }
1694 return NULL_TREE;
1695 }
1696
1697 /* Try to simplify the AND of two comparisons defined by
1698 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1699 If this can be done without constructing an intermediate value,
1700 return the resulting tree; otherwise NULL_TREE is returned.
1701 This function is deliberately asymmetric as it recurses on SSA_DEFs
1702 in the first comparison but not the second. */
1703
1704 static tree
1705 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1706 enum tree_code code2, tree op2a, tree op2b)
1707 {
1708 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1709 if (operand_equal_p (op1a, op2a, 0)
1710 && operand_equal_p (op1b, op2b, 0))
1711 {
1712 /* Result will be either NULL_TREE, or a combined comparison. */
1713 tree t = combine_comparisons (UNKNOWN_LOCATION,
1714 TRUTH_ANDIF_EXPR, code1, code2,
1715 boolean_type_node, op1a, op1b);
1716 if (t)
1717 return t;
1718 }
1719
1720 /* Likewise the swapped case of the above. */
1721 if (operand_equal_p (op1a, op2b, 0)
1722 && operand_equal_p (op1b, op2a, 0))
1723 {
1724 /* Result will be either NULL_TREE, or a combined comparison. */
1725 tree t = combine_comparisons (UNKNOWN_LOCATION,
1726 TRUTH_ANDIF_EXPR, code1,
1727 swap_tree_comparison (code2),
1728 boolean_type_node, op1a, op1b);
1729 if (t)
1730 return t;
1731 }
1732
1733 /* If both comparisons are of the same value against constants, we might
1734 be able to merge them. */
1735 if (operand_equal_p (op1a, op2a, 0)
1736 && TREE_CODE (op1b) == INTEGER_CST
1737 && TREE_CODE (op2b) == INTEGER_CST)
1738 {
1739 int cmp = tree_int_cst_compare (op1b, op2b);
1740
1741 /* If we have (op1a == op1b), we should either be able to
1742 return that or FALSE, depending on whether the constant op1b
1743 also satisfies the other comparison against op2b. */
1744 if (code1 == EQ_EXPR)
1745 {
1746 bool done = true;
1747 bool val;
1748 switch (code2)
1749 {
1750 case EQ_EXPR: val = (cmp == 0); break;
1751 case NE_EXPR: val = (cmp != 0); break;
1752 case LT_EXPR: val = (cmp < 0); break;
1753 case GT_EXPR: val = (cmp > 0); break;
1754 case LE_EXPR: val = (cmp <= 0); break;
1755 case GE_EXPR: val = (cmp >= 0); break;
1756 default: done = false;
1757 }
1758 if (done)
1759 {
1760 if (val)
1761 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1762 else
1763 return boolean_false_node;
1764 }
1765 }
1766 /* Likewise if the second comparison is an == comparison. */
1767 else if (code2 == EQ_EXPR)
1768 {
1769 bool done = true;
1770 bool val;
1771 switch (code1)
1772 {
1773 case EQ_EXPR: val = (cmp == 0); break;
1774 case NE_EXPR: val = (cmp != 0); break;
1775 case LT_EXPR: val = (cmp > 0); break;
1776 case GT_EXPR: val = (cmp < 0); break;
1777 case LE_EXPR: val = (cmp >= 0); break;
1778 case GE_EXPR: val = (cmp <= 0); break;
1779 default: done = false;
1780 }
1781 if (done)
1782 {
1783 if (val)
1784 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1785 else
1786 return boolean_false_node;
1787 }
1788 }
1789
1790 /* Same business with inequality tests. */
1791 else if (code1 == NE_EXPR)
1792 {
1793 bool val;
1794 switch (code2)
1795 {
1796 case EQ_EXPR: val = (cmp != 0); break;
1797 case NE_EXPR: val = (cmp == 0); break;
1798 case LT_EXPR: val = (cmp >= 0); break;
1799 case GT_EXPR: val = (cmp <= 0); break;
1800 case LE_EXPR: val = (cmp > 0); break;
1801 case GE_EXPR: val = (cmp < 0); break;
1802 default:
1803 val = false;
1804 }
1805 if (val)
1806 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1807 }
1808 else if (code2 == NE_EXPR)
1809 {
1810 bool val;
1811 switch (code1)
1812 {
1813 case EQ_EXPR: val = (cmp == 0); break;
1814 case NE_EXPR: val = (cmp != 0); break;
1815 case LT_EXPR: val = (cmp <= 0); break;
1816 case GT_EXPR: val = (cmp >= 0); break;
1817 case LE_EXPR: val = (cmp < 0); break;
1818 case GE_EXPR: val = (cmp > 0); break;
1819 default:
1820 val = false;
1821 }
1822 if (val)
1823 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1824 }
1825
1826 /* Chose the more restrictive of two < or <= comparisons. */
1827 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1828 && (code2 == LT_EXPR || code2 == LE_EXPR))
1829 {
1830 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1831 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1832 else
1833 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1834 }
1835
1836 /* Likewise chose the more restrictive of two > or >= comparisons. */
1837 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1838 && (code2 == GT_EXPR || code2 == GE_EXPR))
1839 {
1840 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1841 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1842 else
1843 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1844 }
1845
1846 /* Check for singleton ranges. */
1847 else if (cmp == 0
1848 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1849 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1850 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1851
1852 /* Check for disjoint ranges. */
1853 else if (cmp <= 0
1854 && (code1 == LT_EXPR || code1 == LE_EXPR)
1855 && (code2 == GT_EXPR || code2 == GE_EXPR))
1856 return boolean_false_node;
1857 else if (cmp >= 0
1858 && (code1 == GT_EXPR || code1 == GE_EXPR)
1859 && (code2 == LT_EXPR || code2 == LE_EXPR))
1860 return boolean_false_node;
1861 }
1862
1863 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1864 NAME's definition is a truth value. See if there are any simplifications
1865 that can be done against the NAME's definition. */
1866 if (TREE_CODE (op1a) == SSA_NAME
1867 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1868 && (integer_zerop (op1b) || integer_onep (op1b)))
1869 {
1870 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1871 || (code1 == NE_EXPR && integer_onep (op1b)));
1872 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1873 switch (gimple_code (stmt))
1874 {
1875 case GIMPLE_ASSIGN:
1876 /* Try to simplify by copy-propagating the definition. */
1877 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1878
1879 case GIMPLE_PHI:
1880 /* If every argument to the PHI produces the same result when
1881 ANDed with the second comparison, we win.
1882 Do not do this unless the type is bool since we need a bool
1883 result here anyway. */
1884 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1885 {
1886 tree result = NULL_TREE;
1887 unsigned i;
1888 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1889 {
1890 tree arg = gimple_phi_arg_def (stmt, i);
1891
1892 /* If this PHI has itself as an argument, ignore it.
1893 If all the other args produce the same result,
1894 we're still OK. */
1895 if (arg == gimple_phi_result (stmt))
1896 continue;
1897 else if (TREE_CODE (arg) == INTEGER_CST)
1898 {
1899 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1900 {
1901 if (!result)
1902 result = boolean_false_node;
1903 else if (!integer_zerop (result))
1904 return NULL_TREE;
1905 }
1906 else if (!result)
1907 result = fold_build2 (code2, boolean_type_node,
1908 op2a, op2b);
1909 else if (!same_bool_comparison_p (result,
1910 code2, op2a, op2b))
1911 return NULL_TREE;
1912 }
1913 else if (TREE_CODE (arg) == SSA_NAME
1914 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1915 {
1916 tree temp;
1917 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1918 /* In simple cases we can look through PHI nodes,
1919 but we have to be careful with loops.
1920 See PR49073. */
1921 if (! dom_info_available_p (CDI_DOMINATORS)
1922 || gimple_bb (def_stmt) == gimple_bb (stmt)
1923 || dominated_by_p (CDI_DOMINATORS,
1924 gimple_bb (def_stmt),
1925 gimple_bb (stmt)))
1926 return NULL_TREE;
1927 temp = and_var_with_comparison (arg, invert, code2,
1928 op2a, op2b);
1929 if (!temp)
1930 return NULL_TREE;
1931 else if (!result)
1932 result = temp;
1933 else if (!same_bool_result_p (result, temp))
1934 return NULL_TREE;
1935 }
1936 else
1937 return NULL_TREE;
1938 }
1939 return result;
1940 }
1941
1942 default:
1943 break;
1944 }
1945 }
1946 return NULL_TREE;
1947 }
1948
1949 /* Try to simplify the AND of two comparisons, specified by
1950 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1951 If this can be simplified to a single expression (without requiring
1952 introducing more SSA variables to hold intermediate values),
1953 return the resulting tree. Otherwise return NULL_TREE.
1954 If the result expression is non-null, it has boolean type. */
1955
1956 tree
1957 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1958 enum tree_code code2, tree op2a, tree op2b)
1959 {
1960 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1961 if (t)
1962 return t;
1963 else
1964 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1965 }
1966
1967 /* Helper function for or_comparisons_1: try to simplify the OR of the
1968 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1969 If INVERT is true, invert the value of VAR before doing the OR.
1970 Return NULL_EXPR if we can't simplify this to a single expression. */
1971
1972 static tree
1973 or_var_with_comparison (tree var, bool invert,
1974 enum tree_code code2, tree op2a, tree op2b)
1975 {
1976 tree t;
1977 gimple stmt = SSA_NAME_DEF_STMT (var);
1978
1979 /* We can only deal with variables whose definitions are assignments. */
1980 if (!is_gimple_assign (stmt))
1981 return NULL_TREE;
1982
1983 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1984 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1985 Then we only have to consider the simpler non-inverted cases. */
1986 if (invert)
1987 t = and_var_with_comparison_1 (stmt,
1988 invert_tree_comparison (code2, false),
1989 op2a, op2b);
1990 else
1991 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1992 return canonicalize_bool (t, invert);
1993 }
1994
1995 /* Try to simplify the OR of the ssa variable defined by the assignment
1996 STMT with the comparison specified by (OP2A CODE2 OP2B).
1997 Return NULL_EXPR if we can't simplify this to a single expression. */
1998
1999 static tree
2000 or_var_with_comparison_1 (gimple stmt,
2001 enum tree_code code2, tree op2a, tree op2b)
2002 {
2003 tree var = gimple_assign_lhs (stmt);
2004 tree true_test_var = NULL_TREE;
2005 tree false_test_var = NULL_TREE;
2006 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2007
2008 /* Check for identities like (var OR (var != 0)) => true . */
2009 if (TREE_CODE (op2a) == SSA_NAME
2010 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2011 {
2012 if ((code2 == NE_EXPR && integer_zerop (op2b))
2013 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2014 {
2015 true_test_var = op2a;
2016 if (var == true_test_var)
2017 return var;
2018 }
2019 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2020 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2021 {
2022 false_test_var = op2a;
2023 if (var == false_test_var)
2024 return boolean_true_node;
2025 }
2026 }
2027
2028 /* If the definition is a comparison, recurse on it. */
2029 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2030 {
2031 tree t = or_comparisons_1 (innercode,
2032 gimple_assign_rhs1 (stmt),
2033 gimple_assign_rhs2 (stmt),
2034 code2,
2035 op2a,
2036 op2b);
2037 if (t)
2038 return t;
2039 }
2040
2041 /* If the definition is an AND or OR expression, we may be able to
2042 simplify by reassociating. */
2043 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2044 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2045 {
2046 tree inner1 = gimple_assign_rhs1 (stmt);
2047 tree inner2 = gimple_assign_rhs2 (stmt);
2048 gimple s;
2049 tree t;
2050 tree partial = NULL_TREE;
2051 bool is_or = (innercode == BIT_IOR_EXPR);
2052
2053 /* Check for boolean identities that don't require recursive examination
2054 of inner1/inner2:
2055 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2056 inner1 OR (inner1 AND inner2) => inner1
2057 !inner1 OR (inner1 OR inner2) => true
2058 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2059 */
2060 if (inner1 == true_test_var)
2061 return (is_or ? var : inner1);
2062 else if (inner2 == true_test_var)
2063 return (is_or ? var : inner2);
2064 else if (inner1 == false_test_var)
2065 return (is_or
2066 ? boolean_true_node
2067 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2068 else if (inner2 == false_test_var)
2069 return (is_or
2070 ? boolean_true_node
2071 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2072
2073 /* Next, redistribute/reassociate the OR across the inner tests.
2074 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2075 if (TREE_CODE (inner1) == SSA_NAME
2076 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2077 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2078 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2079 gimple_assign_rhs1 (s),
2080 gimple_assign_rhs2 (s),
2081 code2, op2a, op2b)))
2082 {
2083 /* Handle the OR case, where we are reassociating:
2084 (inner1 OR inner2) OR (op2a code2 op2b)
2085 => (t OR inner2)
2086 If the partial result t is a constant, we win. Otherwise
2087 continue on to try reassociating with the other inner test. */
2088 if (is_or)
2089 {
2090 if (integer_onep (t))
2091 return boolean_true_node;
2092 else if (integer_zerop (t))
2093 return inner2;
2094 }
2095
2096 /* Handle the AND case, where we are redistributing:
2097 (inner1 AND inner2) OR (op2a code2 op2b)
2098 => (t AND (inner2 OR (op2a code op2b))) */
2099 else if (integer_zerop (t))
2100 return boolean_false_node;
2101
2102 /* Save partial result for later. */
2103 partial = t;
2104 }
2105
2106 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2107 if (TREE_CODE (inner2) == SSA_NAME
2108 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2109 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2110 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2111 gimple_assign_rhs1 (s),
2112 gimple_assign_rhs2 (s),
2113 code2, op2a, op2b)))
2114 {
2115 /* Handle the OR case, where we are reassociating:
2116 (inner1 OR inner2) OR (op2a code2 op2b)
2117 => (inner1 OR t)
2118 => (t OR partial) */
2119 if (is_or)
2120 {
2121 if (integer_zerop (t))
2122 return inner1;
2123 else if (integer_onep (t))
2124 return boolean_true_node;
2125 /* If both are the same, we can apply the identity
2126 (x OR x) == x. */
2127 else if (partial && same_bool_result_p (t, partial))
2128 return t;
2129 }
2130
2131 /* Handle the AND case, where we are redistributing:
2132 (inner1 AND inner2) OR (op2a code2 op2b)
2133 => (t AND (inner1 OR (op2a code2 op2b)))
2134 => (t AND partial) */
2135 else
2136 {
2137 if (integer_zerop (t))
2138 return boolean_false_node;
2139 else if (partial)
2140 {
2141 /* We already got a simplification for the other
2142 operand to the redistributed AND expression. The
2143 interesting case is when at least one is true.
2144 Or, if both are the same, we can apply the identity
2145 (x AND x) == x. */
2146 if (integer_onep (partial))
2147 return t;
2148 else if (integer_onep (t))
2149 return partial;
2150 else if (same_bool_result_p (t, partial))
2151 return t;
2152 }
2153 }
2154 }
2155 }
2156 return NULL_TREE;
2157 }
2158
2159 /* Try to simplify the OR of two comparisons defined by
2160 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2161 If this can be done without constructing an intermediate value,
2162 return the resulting tree; otherwise NULL_TREE is returned.
2163 This function is deliberately asymmetric as it recurses on SSA_DEFs
2164 in the first comparison but not the second. */
2165
2166 static tree
2167 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2168 enum tree_code code2, tree op2a, tree op2b)
2169 {
2170 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2171 if (operand_equal_p (op1a, op2a, 0)
2172 && operand_equal_p (op1b, op2b, 0))
2173 {
2174 /* Result will be either NULL_TREE, or a combined comparison. */
2175 tree t = combine_comparisons (UNKNOWN_LOCATION,
2176 TRUTH_ORIF_EXPR, code1, code2,
2177 boolean_type_node, op1a, op1b);
2178 if (t)
2179 return t;
2180 }
2181
2182 /* Likewise the swapped case of the above. */
2183 if (operand_equal_p (op1a, op2b, 0)
2184 && operand_equal_p (op1b, op2a, 0))
2185 {
2186 /* Result will be either NULL_TREE, or a combined comparison. */
2187 tree t = combine_comparisons (UNKNOWN_LOCATION,
2188 TRUTH_ORIF_EXPR, code1,
2189 swap_tree_comparison (code2),
2190 boolean_type_node, op1a, op1b);
2191 if (t)
2192 return t;
2193 }
2194
2195 /* If both comparisons are of the same value against constants, we might
2196 be able to merge them. */
2197 if (operand_equal_p (op1a, op2a, 0)
2198 && TREE_CODE (op1b) == INTEGER_CST
2199 && TREE_CODE (op2b) == INTEGER_CST)
2200 {
2201 int cmp = tree_int_cst_compare (op1b, op2b);
2202
2203 /* If we have (op1a != op1b), we should either be able to
2204 return that or TRUE, depending on whether the constant op1b
2205 also satisfies the other comparison against op2b. */
2206 if (code1 == NE_EXPR)
2207 {
2208 bool done = true;
2209 bool val;
2210 switch (code2)
2211 {
2212 case EQ_EXPR: val = (cmp == 0); break;
2213 case NE_EXPR: val = (cmp != 0); break;
2214 case LT_EXPR: val = (cmp < 0); break;
2215 case GT_EXPR: val = (cmp > 0); break;
2216 case LE_EXPR: val = (cmp <= 0); break;
2217 case GE_EXPR: val = (cmp >= 0); break;
2218 default: done = false;
2219 }
2220 if (done)
2221 {
2222 if (val)
2223 return boolean_true_node;
2224 else
2225 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2226 }
2227 }
2228 /* Likewise if the second comparison is a != comparison. */
2229 else if (code2 == NE_EXPR)
2230 {
2231 bool done = true;
2232 bool val;
2233 switch (code1)
2234 {
2235 case EQ_EXPR: val = (cmp == 0); break;
2236 case NE_EXPR: val = (cmp != 0); break;
2237 case LT_EXPR: val = (cmp > 0); break;
2238 case GT_EXPR: val = (cmp < 0); break;
2239 case LE_EXPR: val = (cmp >= 0); break;
2240 case GE_EXPR: val = (cmp <= 0); break;
2241 default: done = false;
2242 }
2243 if (done)
2244 {
2245 if (val)
2246 return boolean_true_node;
2247 else
2248 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2249 }
2250 }
2251
2252 /* See if an equality test is redundant with the other comparison. */
2253 else if (code1 == EQ_EXPR)
2254 {
2255 bool val;
2256 switch (code2)
2257 {
2258 case EQ_EXPR: val = (cmp == 0); break;
2259 case NE_EXPR: val = (cmp != 0); break;
2260 case LT_EXPR: val = (cmp < 0); break;
2261 case GT_EXPR: val = (cmp > 0); break;
2262 case LE_EXPR: val = (cmp <= 0); break;
2263 case GE_EXPR: val = (cmp >= 0); break;
2264 default:
2265 val = false;
2266 }
2267 if (val)
2268 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2269 }
2270 else if (code2 == EQ_EXPR)
2271 {
2272 bool val;
2273 switch (code1)
2274 {
2275 case EQ_EXPR: val = (cmp == 0); break;
2276 case NE_EXPR: val = (cmp != 0); break;
2277 case LT_EXPR: val = (cmp > 0); break;
2278 case GT_EXPR: val = (cmp < 0); break;
2279 case LE_EXPR: val = (cmp >= 0); break;
2280 case GE_EXPR: val = (cmp <= 0); break;
2281 default:
2282 val = false;
2283 }
2284 if (val)
2285 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2286 }
2287
2288 /* Chose the less restrictive of two < or <= comparisons. */
2289 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2290 && (code2 == LT_EXPR || code2 == LE_EXPR))
2291 {
2292 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2293 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2294 else
2295 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2296 }
2297
2298 /* Likewise chose the less restrictive of two > or >= comparisons. */
2299 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2300 && (code2 == GT_EXPR || code2 == GE_EXPR))
2301 {
2302 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2303 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2304 else
2305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2306 }
2307
2308 /* Check for singleton ranges. */
2309 else if (cmp == 0
2310 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2311 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2312 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2313
2314 /* Check for less/greater pairs that don't restrict the range at all. */
2315 else if (cmp >= 0
2316 && (code1 == LT_EXPR || code1 == LE_EXPR)
2317 && (code2 == GT_EXPR || code2 == GE_EXPR))
2318 return boolean_true_node;
2319 else if (cmp <= 0
2320 && (code1 == GT_EXPR || code1 == GE_EXPR)
2321 && (code2 == LT_EXPR || code2 == LE_EXPR))
2322 return boolean_true_node;
2323 }
2324
2325 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2326 NAME's definition is a truth value. See if there are any simplifications
2327 that can be done against the NAME's definition. */
2328 if (TREE_CODE (op1a) == SSA_NAME
2329 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2330 && (integer_zerop (op1b) || integer_onep (op1b)))
2331 {
2332 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2333 || (code1 == NE_EXPR && integer_onep (op1b)));
2334 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2335 switch (gimple_code (stmt))
2336 {
2337 case GIMPLE_ASSIGN:
2338 /* Try to simplify by copy-propagating the definition. */
2339 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2340
2341 case GIMPLE_PHI:
2342 /* If every argument to the PHI produces the same result when
2343 ORed with the second comparison, we win.
2344 Do not do this unless the type is bool since we need a bool
2345 result here anyway. */
2346 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2347 {
2348 tree result = NULL_TREE;
2349 unsigned i;
2350 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2351 {
2352 tree arg = gimple_phi_arg_def (stmt, i);
2353
2354 /* If this PHI has itself as an argument, ignore it.
2355 If all the other args produce the same result,
2356 we're still OK. */
2357 if (arg == gimple_phi_result (stmt))
2358 continue;
2359 else if (TREE_CODE (arg) == INTEGER_CST)
2360 {
2361 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2362 {
2363 if (!result)
2364 result = boolean_true_node;
2365 else if (!integer_onep (result))
2366 return NULL_TREE;
2367 }
2368 else if (!result)
2369 result = fold_build2 (code2, boolean_type_node,
2370 op2a, op2b);
2371 else if (!same_bool_comparison_p (result,
2372 code2, op2a, op2b))
2373 return NULL_TREE;
2374 }
2375 else if (TREE_CODE (arg) == SSA_NAME
2376 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2377 {
2378 tree temp;
2379 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2380 /* In simple cases we can look through PHI nodes,
2381 but we have to be careful with loops.
2382 See PR49073. */
2383 if (! dom_info_available_p (CDI_DOMINATORS)
2384 || gimple_bb (def_stmt) == gimple_bb (stmt)
2385 || dominated_by_p (CDI_DOMINATORS,
2386 gimple_bb (def_stmt),
2387 gimple_bb (stmt)))
2388 return NULL_TREE;
2389 temp = or_var_with_comparison (arg, invert, code2,
2390 op2a, op2b);
2391 if (!temp)
2392 return NULL_TREE;
2393 else if (!result)
2394 result = temp;
2395 else if (!same_bool_result_p (result, temp))
2396 return NULL_TREE;
2397 }
2398 else
2399 return NULL_TREE;
2400 }
2401 return result;
2402 }
2403
2404 default:
2405 break;
2406 }
2407 }
2408 return NULL_TREE;
2409 }
2410
2411 /* Try to simplify the OR of two comparisons, specified by
2412 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2413 If this can be simplified to a single expression (without requiring
2414 introducing more SSA variables to hold intermediate values),
2415 return the resulting tree. Otherwise return NULL_TREE.
2416 If the result expression is non-null, it has boolean type. */
2417
2418 tree
2419 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2420 enum tree_code code2, tree op2a, tree op2b)
2421 {
2422 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2423 if (t)
2424 return t;
2425 else
2426 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2427 }
2428
2429
2430 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2431
2432 Either NULL_TREE, a simplified but non-constant or a constant
2433 is returned.
2434
2435 ??? This should go into a gimple-fold-inline.h file to be eventually
2436 privatized with the single valueize function used in the various TUs
2437 to avoid the indirect function call overhead. */
2438
2439 tree
2440 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2441 {
2442 location_t loc = gimple_location (stmt);
2443 switch (gimple_code (stmt))
2444 {
2445 case GIMPLE_ASSIGN:
2446 {
2447 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2448
2449 switch (get_gimple_rhs_class (subcode))
2450 {
2451 case GIMPLE_SINGLE_RHS:
2452 {
2453 tree rhs = gimple_assign_rhs1 (stmt);
2454 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2455
2456 if (TREE_CODE (rhs) == SSA_NAME)
2457 {
2458 /* If the RHS is an SSA_NAME, return its known constant value,
2459 if any. */
2460 return (*valueize) (rhs);
2461 }
2462 /* Handle propagating invariant addresses into address
2463 operations. */
2464 else if (TREE_CODE (rhs) == ADDR_EXPR
2465 && !is_gimple_min_invariant (rhs))
2466 {
2467 HOST_WIDE_INT offset;
2468 tree base;
2469 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2470 &offset,
2471 valueize);
2472 if (base
2473 && (CONSTANT_CLASS_P (base)
2474 || decl_address_invariant_p (base)))
2475 return build_invariant_address (TREE_TYPE (rhs),
2476 base, offset);
2477 }
2478 else if (TREE_CODE (rhs) == CONSTRUCTOR
2479 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2480 && (CONSTRUCTOR_NELTS (rhs)
2481 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2482 {
2483 unsigned i;
2484 tree val, list;
2485
2486 list = NULL_TREE;
2487 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2488 {
2489 val = (*valueize) (val);
2490 if (TREE_CODE (val) == INTEGER_CST
2491 || TREE_CODE (val) == REAL_CST
2492 || TREE_CODE (val) == FIXED_CST)
2493 list = tree_cons (NULL_TREE, val, list);
2494 else
2495 return NULL_TREE;
2496 }
2497
2498 return build_vector (TREE_TYPE (rhs), nreverse (list));
2499 }
2500
2501 if (kind == tcc_reference)
2502 {
2503 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2504 || TREE_CODE (rhs) == REALPART_EXPR
2505 || TREE_CODE (rhs) == IMAGPART_EXPR)
2506 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2507 {
2508 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2509 return fold_unary_loc (EXPR_LOCATION (rhs),
2510 TREE_CODE (rhs),
2511 TREE_TYPE (rhs), val);
2512 }
2513 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2514 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2515 {
2516 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2517 return fold_ternary_loc (EXPR_LOCATION (rhs),
2518 TREE_CODE (rhs),
2519 TREE_TYPE (rhs), val,
2520 TREE_OPERAND (rhs, 1),
2521 TREE_OPERAND (rhs, 2));
2522 }
2523 else if (TREE_CODE (rhs) == MEM_REF
2524 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2525 {
2526 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2527 if (TREE_CODE (val) == ADDR_EXPR
2528 && is_gimple_min_invariant (val))
2529 {
2530 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2531 unshare_expr (val),
2532 TREE_OPERAND (rhs, 1));
2533 if (tem)
2534 rhs = tem;
2535 }
2536 }
2537 return fold_const_aggregate_ref_1 (rhs, valueize);
2538 }
2539 else if (kind == tcc_declaration)
2540 return get_symbol_constant_value (rhs);
2541 return rhs;
2542 }
2543
2544 case GIMPLE_UNARY_RHS:
2545 {
2546 /* Handle unary operators that can appear in GIMPLE form.
2547 Note that we know the single operand must be a constant,
2548 so this should almost always return a simplified RHS. */
2549 tree lhs = gimple_assign_lhs (stmt);
2550 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2551
2552 /* Conversions are useless for CCP purposes if they are
2553 value-preserving. Thus the restrictions that
2554 useless_type_conversion_p places for restrict qualification
2555 of pointer types should not apply here.
2556 Substitution later will only substitute to allowed places. */
2557 if (CONVERT_EXPR_CODE_P (subcode)
2558 && POINTER_TYPE_P (TREE_TYPE (lhs))
2559 && POINTER_TYPE_P (TREE_TYPE (op0))
2560 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2561 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2562 return op0;
2563
2564 return
2565 fold_unary_ignore_overflow_loc (loc, subcode,
2566 gimple_expr_type (stmt), op0);
2567 }
2568
2569 case GIMPLE_BINARY_RHS:
2570 {
2571 /* Handle binary operators that can appear in GIMPLE form. */
2572 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2573 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2574
2575 /* Translate &x + CST into an invariant form suitable for
2576 further propagation. */
2577 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2578 && TREE_CODE (op0) == ADDR_EXPR
2579 && TREE_CODE (op1) == INTEGER_CST)
2580 {
2581 tree off = fold_convert (ptr_type_node, op1);
2582 return build_fold_addr_expr_loc
2583 (loc,
2584 fold_build2 (MEM_REF,
2585 TREE_TYPE (TREE_TYPE (op0)),
2586 unshare_expr (op0), off));
2587 }
2588
2589 return fold_binary_loc (loc, subcode,
2590 gimple_expr_type (stmt), op0, op1);
2591 }
2592
2593 case GIMPLE_TERNARY_RHS:
2594 {
2595 /* Handle ternary operators that can appear in GIMPLE form. */
2596 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2597 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2598 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2599
2600 return fold_ternary_loc (loc, subcode,
2601 gimple_expr_type (stmt), op0, op1, op2);
2602 }
2603
2604 default:
2605 gcc_unreachable ();
2606 }
2607 }
2608
2609 case GIMPLE_CALL:
2610 {
2611 tree fn;
2612
2613 if (gimple_call_internal_p (stmt))
2614 /* No folding yet for these functions. */
2615 return NULL_TREE;
2616
2617 fn = (*valueize) (gimple_call_fn (stmt));
2618 if (TREE_CODE (fn) == ADDR_EXPR
2619 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2620 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2621 {
2622 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2623 tree call, retval;
2624 unsigned i;
2625 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2626 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2627 call = build_call_array_loc (loc,
2628 gimple_call_return_type (stmt),
2629 fn, gimple_call_num_args (stmt), args);
2630 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2631 if (retval)
2632 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2633 STRIP_NOPS (retval);
2634 return retval;
2635 }
2636 return NULL_TREE;
2637 }
2638
2639 default:
2640 return NULL_TREE;
2641 }
2642 }
2643
2644 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2645 Returns NULL_TREE if folding to a constant is not possible, otherwise
2646 returns a constant according to is_gimple_min_invariant. */
2647
2648 tree
2649 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2650 {
2651 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2652 if (res && is_gimple_min_invariant (res))
2653 return res;
2654 return NULL_TREE;
2655 }
2656
2657
2658 /* The following set of functions are supposed to fold references using
2659 their constant initializers. */
2660
2661 static tree fold_ctor_reference (tree type, tree ctor,
2662 unsigned HOST_WIDE_INT offset,
2663 unsigned HOST_WIDE_INT size);
2664
2665 /* See if we can find constructor defining value of BASE.
2666 When we know the consructor with constant offset (such as
2667 base is array[40] and we do know constructor of array), then
2668 BIT_OFFSET is adjusted accordingly.
2669
2670 As a special case, return error_mark_node when constructor
2671 is not explicitly available, but it is known to be zero
2672 such as 'static const int a;'. */
2673 static tree
2674 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2675 tree (*valueize)(tree))
2676 {
2677 HOST_WIDE_INT bit_offset2, size, max_size;
2678 if (TREE_CODE (base) == MEM_REF)
2679 {
2680 if (!integer_zerop (TREE_OPERAND (base, 1)))
2681 {
2682 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2683 return NULL_TREE;
2684 *bit_offset += (mem_ref_offset (base).low
2685 * BITS_PER_UNIT);
2686 }
2687
2688 if (valueize
2689 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2690 base = valueize (TREE_OPERAND (base, 0));
2691 if (!base || TREE_CODE (base) != ADDR_EXPR)
2692 return NULL_TREE;
2693 base = TREE_OPERAND (base, 0);
2694 }
2695
2696 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2697 DECL_INITIAL. If BASE is a nested reference into another
2698 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2699 the inner reference. */
2700 switch (TREE_CODE (base))
2701 {
2702 case VAR_DECL:
2703 if (!const_value_known_p (base))
2704 return NULL_TREE;
2705
2706 /* Fallthru. */
2707 case CONST_DECL:
2708 if (!DECL_INITIAL (base)
2709 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2710 return error_mark_node;
2711 return DECL_INITIAL (base);
2712
2713 case ARRAY_REF:
2714 case COMPONENT_REF:
2715 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2716 if (max_size == -1 || size != max_size)
2717 return NULL_TREE;
2718 *bit_offset += bit_offset2;
2719 return get_base_constructor (base, bit_offset, valueize);
2720
2721 case STRING_CST:
2722 case CONSTRUCTOR:
2723 return base;
2724
2725 default:
2726 return NULL_TREE;
2727 }
2728 }
2729
2730 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2731 to the memory at bit OFFSET.
2732
2733 We do only simple job of folding byte accesses. */
2734
2735 static tree
2736 fold_string_cst_ctor_reference (tree type, tree ctor,
2737 unsigned HOST_WIDE_INT offset,
2738 unsigned HOST_WIDE_INT size)
2739 {
2740 if (INTEGRAL_TYPE_P (type)
2741 && (TYPE_MODE (type)
2742 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2743 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2744 == MODE_INT)
2745 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2746 && size == BITS_PER_UNIT
2747 && !(offset % BITS_PER_UNIT))
2748 {
2749 offset /= BITS_PER_UNIT;
2750 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2751 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2752 [offset]));
2753 /* Folding
2754 const char a[20]="hello";
2755 return a[10];
2756
2757 might lead to offset greater than string length. In this case we
2758 know value is either initialized to 0 or out of bounds. Return 0
2759 in both cases. */
2760 return build_zero_cst (type);
2761 }
2762 return NULL_TREE;
2763 }
2764
2765 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2766 SIZE to the memory at bit OFFSET. */
2767
2768 static tree
2769 fold_array_ctor_reference (tree type, tree ctor,
2770 unsigned HOST_WIDE_INT offset,
2771 unsigned HOST_WIDE_INT size)
2772 {
2773 unsigned HOST_WIDE_INT cnt;
2774 tree cfield, cval;
2775 double_int low_bound, elt_size;
2776 double_int index, max_index;
2777 double_int access_index;
2778 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2779 HOST_WIDE_INT inner_offset;
2780
2781 /* Compute low bound and elt size. */
2782 if (domain_type && TYPE_MIN_VALUE (domain_type))
2783 {
2784 /* Static constructors for variably sized objects makes no sense. */
2785 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2786 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2787 }
2788 else
2789 low_bound = double_int_zero;
2790 /* Static constructors for variably sized objects makes no sense. */
2791 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2792 == INTEGER_CST);
2793 elt_size =
2794 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2795
2796
2797 /* We can handle only constantly sized accesses that are known to not
2798 be larger than size of array element. */
2799 if (!TYPE_SIZE_UNIT (type)
2800 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2801 || double_int_cmp (elt_size,
2802 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2803 return NULL_TREE;
2804
2805 /* Compute the array index we look for. */
2806 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2807 elt_size, TRUNC_DIV_EXPR);
2808 access_index = double_int_add (access_index, low_bound);
2809
2810 /* And offset within the access. */
2811 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2812
2813 /* See if the array field is large enough to span whole access. We do not
2814 care to fold accesses spanning multiple array indexes. */
2815 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2816 return NULL_TREE;
2817
2818 index = double_int_sub (low_bound, double_int_one);
2819 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2820 {
2821 /* Array constructor might explicitely set index, or specify range
2822 or leave index NULL meaning that it is next index after previous
2823 one. */
2824 if (cfield)
2825 {
2826 if (TREE_CODE (cfield) == INTEGER_CST)
2827 max_index = index = tree_to_double_int (cfield);
2828 else
2829 {
2830 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2831 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2832 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2833 }
2834 }
2835 else
2836 max_index = index = double_int_add (index, double_int_one);
2837
2838 /* Do we have match? */
2839 if (double_int_cmp (access_index, index, 1) >= 0
2840 && double_int_cmp (access_index, max_index, 1) <= 0)
2841 return fold_ctor_reference (type, cval, inner_offset, size);
2842 }
2843 /* When memory is not explicitely mentioned in constructor,
2844 it is 0 (or out of range). */
2845 return build_zero_cst (type);
2846 }
2847
2848 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2849 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2850
2851 static tree
2852 fold_nonarray_ctor_reference (tree type, tree ctor,
2853 unsigned HOST_WIDE_INT offset,
2854 unsigned HOST_WIDE_INT size)
2855 {
2856 unsigned HOST_WIDE_INT cnt;
2857 tree cfield, cval;
2858
2859 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2860 cval)
2861 {
2862 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2863 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2864 tree field_size = DECL_SIZE (cfield);
2865 double_int bitoffset;
2866 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2867 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2868 double_int bitoffset_end, access_end;
2869
2870 /* Variable sized objects in static constructors makes no sense,
2871 but field_size can be NULL for flexible array members. */
2872 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2873 && TREE_CODE (byte_offset) == INTEGER_CST
2874 && (field_size != NULL_TREE
2875 ? TREE_CODE (field_size) == INTEGER_CST
2876 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2877
2878 /* Compute bit offset of the field. */
2879 bitoffset = double_int_add (tree_to_double_int (field_offset),
2880 double_int_mul (byte_offset_cst,
2881 bits_per_unit_cst));
2882 /* Compute bit offset where the field ends. */
2883 if (field_size != NULL_TREE)
2884 bitoffset_end = double_int_add (bitoffset,
2885 tree_to_double_int (field_size));
2886 else
2887 bitoffset_end = double_int_zero;
2888
2889 access_end = double_int_add (uhwi_to_double_int (offset),
2890 uhwi_to_double_int (size));
2891
2892 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2893 [BITOFFSET, BITOFFSET_END)? */
2894 if (double_int_cmp (access_end, bitoffset, 0) > 0
2895 && (field_size == NULL_TREE
2896 || double_int_cmp (uhwi_to_double_int (offset),
2897 bitoffset_end, 0) < 0))
2898 {
2899 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2900 bitoffset);
2901 /* We do have overlap. Now see if field is large enough to
2902 cover the access. Give up for accesses spanning multiple
2903 fields. */
2904 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2905 return NULL_TREE;
2906 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2907 return NULL_TREE;
2908 return fold_ctor_reference (type, cval,
2909 double_int_to_uhwi (inner_offset), size);
2910 }
2911 }
2912 /* When memory is not explicitely mentioned in constructor, it is 0. */
2913 return build_zero_cst (type);
2914 }
2915
2916 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2917 to the memory at bit OFFSET. */
2918
2919 static tree
2920 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2921 unsigned HOST_WIDE_INT size)
2922 {
2923 tree ret;
2924
2925 /* We found the field with exact match. */
2926 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2927 && !offset)
2928 return canonicalize_constructor_val (ctor);
2929
2930 /* We are at the end of walk, see if we can view convert the
2931 result. */
2932 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2933 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2934 && operand_equal_p (TYPE_SIZE (type),
2935 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2936 {
2937 ret = canonicalize_constructor_val (ctor);
2938 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2939 if (ret)
2940 STRIP_NOPS (ret);
2941 return ret;
2942 }
2943 if (TREE_CODE (ctor) == STRING_CST)
2944 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2945 if (TREE_CODE (ctor) == CONSTRUCTOR)
2946 {
2947
2948 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2949 return fold_array_ctor_reference (type, ctor, offset, size);
2950 else
2951 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2952 }
2953
2954 return NULL_TREE;
2955 }
2956
2957 /* Return the tree representing the element referenced by T if T is an
2958 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2959 names using VALUEIZE. Return NULL_TREE otherwise. */
2960
2961 tree
2962 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2963 {
2964 tree ctor, idx, base;
2965 HOST_WIDE_INT offset, size, max_size;
2966 tree tem;
2967
2968 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2969 return get_symbol_constant_value (t);
2970
2971 tem = fold_read_from_constant_string (t);
2972 if (tem)
2973 return tem;
2974
2975 switch (TREE_CODE (t))
2976 {
2977 case ARRAY_REF:
2978 case ARRAY_RANGE_REF:
2979 /* Constant indexes are handled well by get_base_constructor.
2980 Only special case variable offsets.
2981 FIXME: This code can't handle nested references with variable indexes
2982 (they will be handled only by iteration of ccp). Perhaps we can bring
2983 get_ref_base_and_extent here and make it use a valueize callback. */
2984 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2985 && valueize
2986 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2987 && host_integerp (idx, 0))
2988 {
2989 tree low_bound, unit_size;
2990
2991 /* If the resulting bit-offset is constant, track it. */
2992 if ((low_bound = array_ref_low_bound (t),
2993 host_integerp (low_bound, 0))
2994 && (unit_size = array_ref_element_size (t),
2995 host_integerp (unit_size, 1)))
2996 {
2997 offset = TREE_INT_CST_LOW (idx);
2998 offset -= TREE_INT_CST_LOW (low_bound);
2999 offset *= TREE_INT_CST_LOW (unit_size);
3000 offset *= BITS_PER_UNIT;
3001
3002 base = TREE_OPERAND (t, 0);
3003 ctor = get_base_constructor (base, &offset, valueize);
3004 /* Empty constructor. Always fold to 0. */
3005 if (ctor == error_mark_node)
3006 return build_zero_cst (TREE_TYPE (t));
3007 /* Out of bound array access. Value is undefined,
3008 but don't fold. */
3009 if (offset < 0)
3010 return NULL_TREE;
3011 /* We can not determine ctor. */
3012 if (!ctor)
3013 return NULL_TREE;
3014 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3015 TREE_INT_CST_LOW (unit_size)
3016 * BITS_PER_UNIT);
3017 }
3018 }
3019 /* Fallthru. */
3020
3021 case COMPONENT_REF:
3022 case BIT_FIELD_REF:
3023 case TARGET_MEM_REF:
3024 case MEM_REF:
3025 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3026 ctor = get_base_constructor (base, &offset, valueize);
3027
3028 /* Empty constructor. Always fold to 0. */
3029 if (ctor == error_mark_node)
3030 return build_zero_cst (TREE_TYPE (t));
3031 /* We do not know precise address. */
3032 if (max_size == -1 || max_size != size)
3033 return NULL_TREE;
3034 /* We can not determine ctor. */
3035 if (!ctor)
3036 return NULL_TREE;
3037
3038 /* Out of bound array access. Value is undefined, but don't fold. */
3039 if (offset < 0)
3040 return NULL_TREE;
3041
3042 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3043
3044 case REALPART_EXPR:
3045 case IMAGPART_EXPR:
3046 {
3047 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3048 if (c && TREE_CODE (c) == COMPLEX_CST)
3049 return fold_build1_loc (EXPR_LOCATION (t),
3050 TREE_CODE (t), TREE_TYPE (t), c);
3051 break;
3052 }
3053
3054 default:
3055 break;
3056 }
3057
3058 return NULL_TREE;
3059 }
3060
3061 tree
3062 fold_const_aggregate_ref (tree t)
3063 {
3064 return fold_const_aggregate_ref_1 (t, NULL);
3065 }
3066
3067 /* Return true iff VAL is a gimple expression that is known to be
3068 non-negative. Restricted to floating-point inputs. */
3069
3070 bool
3071 gimple_val_nonnegative_real_p (tree val)
3072 {
3073 gimple def_stmt;
3074
3075 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3076
3077 /* Use existing logic for non-gimple trees. */
3078 if (tree_expr_nonnegative_p (val))
3079 return true;
3080
3081 if (TREE_CODE (val) != SSA_NAME)
3082 return false;
3083
3084 /* Currently we look only at the immediately defining statement
3085 to make this determination, since recursion on defining
3086 statements of operands can lead to quadratic behavior in the
3087 worst case. This is expected to catch almost all occurrences
3088 in practice. It would be possible to implement limited-depth
3089 recursion if important cases are lost. Alternatively, passes
3090 that need this information (such as the pow/powi lowering code
3091 in the cse_sincos pass) could be revised to provide it through
3092 dataflow propagation. */
3093
3094 def_stmt = SSA_NAME_DEF_STMT (val);
3095
3096 if (is_gimple_assign (def_stmt))
3097 {
3098 tree op0, op1;
3099
3100 /* See fold-const.c:tree_expr_nonnegative_p for additional
3101 cases that could be handled with recursion. */
3102
3103 switch (gimple_assign_rhs_code (def_stmt))
3104 {
3105 case ABS_EXPR:
3106 /* Always true for floating-point operands. */
3107 return true;
3108
3109 case MULT_EXPR:
3110 /* True if the two operands are identical (since we are
3111 restricted to floating-point inputs). */
3112 op0 = gimple_assign_rhs1 (def_stmt);
3113 op1 = gimple_assign_rhs2 (def_stmt);
3114
3115 if (op0 == op1
3116 || operand_equal_p (op0, op1, 0))
3117 return true;
3118
3119 default:
3120 return false;
3121 }
3122 }
3123 else if (is_gimple_call (def_stmt))
3124 {
3125 tree fndecl = gimple_call_fndecl (def_stmt);
3126 if (fndecl
3127 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3128 {
3129 tree arg1;
3130
3131 switch (DECL_FUNCTION_CODE (fndecl))
3132 {
3133 CASE_FLT_FN (BUILT_IN_ACOS):
3134 CASE_FLT_FN (BUILT_IN_ACOSH):
3135 CASE_FLT_FN (BUILT_IN_CABS):
3136 CASE_FLT_FN (BUILT_IN_COSH):
3137 CASE_FLT_FN (BUILT_IN_ERFC):
3138 CASE_FLT_FN (BUILT_IN_EXP):
3139 CASE_FLT_FN (BUILT_IN_EXP10):
3140 CASE_FLT_FN (BUILT_IN_EXP2):
3141 CASE_FLT_FN (BUILT_IN_FABS):
3142 CASE_FLT_FN (BUILT_IN_FDIM):
3143 CASE_FLT_FN (BUILT_IN_HYPOT):
3144 CASE_FLT_FN (BUILT_IN_POW10):
3145 return true;
3146
3147 CASE_FLT_FN (BUILT_IN_SQRT):
3148 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3149 nonnegative inputs. */
3150 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3151 return true;
3152
3153 break;
3154
3155 CASE_FLT_FN (BUILT_IN_POWI):
3156 /* True if the second argument is an even integer. */
3157 arg1 = gimple_call_arg (def_stmt, 1);
3158
3159 if (TREE_CODE (arg1) == INTEGER_CST
3160 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3161 return true;
3162
3163 break;
3164
3165 CASE_FLT_FN (BUILT_IN_POW):
3166 /* True if the second argument is an even integer-valued
3167 real. */
3168 arg1 = gimple_call_arg (def_stmt, 1);
3169
3170 if (TREE_CODE (arg1) == REAL_CST)
3171 {
3172 REAL_VALUE_TYPE c;
3173 HOST_WIDE_INT n;
3174
3175 c = TREE_REAL_CST (arg1);
3176 n = real_to_integer (&c);
3177
3178 if ((n & 1) == 0)
3179 {
3180 REAL_VALUE_TYPE cint;
3181 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3182 if (real_identical (&c, &cint))
3183 return true;
3184 }
3185 }
3186
3187 break;
3188
3189 default:
3190 return false;
3191 }
3192 }
3193 }
3194
3195 return false;
3196 }