re PR tree-optimization/55253 (Revision 193298 miscompiles sqlite with -Os)
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
33
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
38
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
58 {
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
62
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
77 are always safe. */
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
80 return true;
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
87 return false;
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
92 return true;
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
95 produced.
96
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
103 was privatized. */
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
105 return true;
106
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
110 {
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
116 to be compiled. */
117 if (!node || !node->analyzed || node->global.inlined_to)
118 {
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
120 return false;
121 }
122 }
123 else if (TREE_CODE (decl) == VAR_DECL)
124 {
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
127 {
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
130 }
131 }
132 return true;
133 }
134
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
138
139 tree
140 canonicalize_constructor_val (tree cval, tree from_decl)
141 {
142 tree orig_cval = cval;
143 STRIP_NOPS (cval);
144 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
146 {
147 tree ptr = TREE_OPERAND (cval, 0);
148 if (is_gimple_min_invariant (ptr))
149 cval = build1_loc (EXPR_LOCATION (cval),
150 ADDR_EXPR, TREE_TYPE (ptr),
151 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
152 ptr,
153 fold_convert (ptr_type_node,
154 TREE_OPERAND (cval, 1))));
155 }
156 if (TREE_CODE (cval) == ADDR_EXPR)
157 {
158 tree base = NULL_TREE;
159 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
160 {
161 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
162 if (base)
163 TREE_OPERAND (cval, 0) = base;
164 }
165 else
166 base = get_base_address (TREE_OPERAND (cval, 0));
167 if (!base)
168 return NULL_TREE;
169
170 if ((TREE_CODE (base) == VAR_DECL
171 || TREE_CODE (base) == FUNCTION_DECL)
172 && !can_refer_decl_in_current_unit_p (base, from_decl))
173 return NULL_TREE;
174 if (TREE_CODE (base) == VAR_DECL)
175 TREE_ADDRESSABLE (base) = 1;
176 else if (TREE_CODE (base) == FUNCTION_DECL)
177 {
178 /* Make sure we create a cgraph node for functions we'll reference.
179 They can be non-existent if the reference comes from an entry
180 of an external vtable for example. */
181 cgraph_get_create_node (base);
182 }
183 /* Fixup types in global initializers. */
184 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
185 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
186
187 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
188 cval = fold_convert (TREE_TYPE (orig_cval), cval);
189 return cval;
190 }
191 return orig_cval;
192 }
193
194 /* If SYM is a constant variable with known value, return the value.
195 NULL_TREE is returned otherwise. */
196
197 tree
198 get_symbol_constant_value (tree sym)
199 {
200 if (const_value_known_p (sym))
201 {
202 tree val = DECL_INITIAL (sym);
203 if (val)
204 {
205 val = canonicalize_constructor_val (val, sym);
206 if (val && is_gimple_min_invariant (val))
207 return val;
208 else
209 return NULL_TREE;
210 }
211 /* Variables declared 'const' without an initializer
212 have zero as the initializer if they may not be
213 overridden at link or run time. */
214 if (!val
215 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
216 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
217 return build_zero_cst (TREE_TYPE (sym));
218 }
219
220 return NULL_TREE;
221 }
222
223
224
225 /* Subroutine of fold_stmt. We perform several simplifications of the
226 memory reference tree EXPR and make sure to re-gimplify them properly
227 after propagation of constant addresses. IS_LHS is true if the
228 reference is supposed to be an lvalue. */
229
230 static tree
231 maybe_fold_reference (tree expr, bool is_lhs)
232 {
233 tree *t = &expr;
234 tree result;
235
236 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
237 || TREE_CODE (expr) == REALPART_EXPR
238 || TREE_CODE (expr) == IMAGPART_EXPR)
239 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
240 return fold_unary_loc (EXPR_LOCATION (expr),
241 TREE_CODE (expr),
242 TREE_TYPE (expr),
243 TREE_OPERAND (expr, 0));
244 else if (TREE_CODE (expr) == BIT_FIELD_REF
245 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
246 return fold_ternary_loc (EXPR_LOCATION (expr),
247 TREE_CODE (expr),
248 TREE_TYPE (expr),
249 TREE_OPERAND (expr, 0),
250 TREE_OPERAND (expr, 1),
251 TREE_OPERAND (expr, 2));
252
253 while (handled_component_p (*t))
254 t = &TREE_OPERAND (*t, 0);
255
256 /* Canonicalize MEM_REFs invariant address operand. Do this first
257 to avoid feeding non-canonical MEM_REFs elsewhere. */
258 if (TREE_CODE (*t) == MEM_REF
259 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
260 {
261 bool volatile_p = TREE_THIS_VOLATILE (*t);
262 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
263 TREE_OPERAND (*t, 0),
264 TREE_OPERAND (*t, 1));
265 if (tem)
266 {
267 TREE_THIS_VOLATILE (tem) = volatile_p;
268 *t = tem;
269 tem = maybe_fold_reference (expr, is_lhs);
270 if (tem)
271 return tem;
272 return expr;
273 }
274 }
275
276 if (!is_lhs
277 && (result = fold_const_aggregate_ref (expr))
278 && is_gimple_min_invariant (result))
279 return result;
280
281 /* Fold back MEM_REFs to reference trees. */
282 if (TREE_CODE (*t) == MEM_REF
283 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
284 && integer_zerop (TREE_OPERAND (*t, 1))
285 && (TREE_THIS_VOLATILE (*t)
286 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
287 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
288 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
289 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
290 /* We have to look out here to not drop a required conversion
291 from the rhs to the lhs if is_lhs, but we don't have the
292 rhs here to verify that. Thus require strict type
293 compatibility. */
294 && types_compatible_p (TREE_TYPE (*t),
295 TREE_TYPE (TREE_OPERAND
296 (TREE_OPERAND (*t, 0), 0))))
297 {
298 tree tem;
299 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
300 tem = maybe_fold_reference (expr, is_lhs);
301 if (tem)
302 return tem;
303 return expr;
304 }
305 else if (TREE_CODE (*t) == TARGET_MEM_REF)
306 {
307 tree tem = maybe_fold_tmr (*t);
308 if (tem)
309 {
310 *t = tem;
311 tem = maybe_fold_reference (expr, is_lhs);
312 if (tem)
313 return tem;
314 return expr;
315 }
316 }
317
318 return NULL_TREE;
319 }
320
321
322 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
323 replacement rhs for the statement or NULL_TREE if no simplification
324 could be made. It is assumed that the operands have been previously
325 folded. */
326
327 static tree
328 fold_gimple_assign (gimple_stmt_iterator *si)
329 {
330 gimple stmt = gsi_stmt (*si);
331 enum tree_code subcode = gimple_assign_rhs_code (stmt);
332 location_t loc = gimple_location (stmt);
333
334 tree result = NULL_TREE;
335
336 switch (get_gimple_rhs_class (subcode))
337 {
338 case GIMPLE_SINGLE_RHS:
339 {
340 tree rhs = gimple_assign_rhs1 (stmt);
341
342 if (REFERENCE_CLASS_P (rhs))
343 return maybe_fold_reference (rhs, false);
344
345 else if (TREE_CODE (rhs) == ADDR_EXPR)
346 {
347 tree ref = TREE_OPERAND (rhs, 0);
348 tree tem = maybe_fold_reference (ref, true);
349 if (tem
350 && TREE_CODE (tem) == MEM_REF
351 && integer_zerop (TREE_OPERAND (tem, 1)))
352 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
353 else if (tem)
354 result = fold_convert (TREE_TYPE (rhs),
355 build_fold_addr_expr_loc (loc, tem));
356 else if (TREE_CODE (ref) == MEM_REF
357 && integer_zerop (TREE_OPERAND (ref, 1)))
358 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
359 }
360
361 else if (TREE_CODE (rhs) == CONSTRUCTOR
362 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
363 && (CONSTRUCTOR_NELTS (rhs)
364 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
365 {
366 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
367 unsigned i;
368 tree val;
369
370 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
371 if (TREE_CODE (val) != INTEGER_CST
372 && TREE_CODE (val) != REAL_CST
373 && TREE_CODE (val) != FIXED_CST)
374 return NULL_TREE;
375
376 return build_vector_from_ctor (TREE_TYPE (rhs),
377 CONSTRUCTOR_ELTS (rhs));
378 }
379
380 else if (DECL_P (rhs))
381 return unshare_expr (get_symbol_constant_value (rhs));
382
383 /* If we couldn't fold the RHS, hand over to the generic
384 fold routines. */
385 if (result == NULL_TREE)
386 result = fold (rhs);
387
388 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
389 that may have been added by fold, and "useless" type
390 conversions that might now be apparent due to propagation. */
391 STRIP_USELESS_TYPE_CONVERSION (result);
392
393 if (result != rhs && valid_gimple_rhs_p (result))
394 return result;
395
396 return NULL_TREE;
397 }
398 break;
399
400 case GIMPLE_UNARY_RHS:
401 {
402 tree rhs = gimple_assign_rhs1 (stmt);
403
404 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
405 if (result)
406 {
407 /* If the operation was a conversion do _not_ mark a
408 resulting constant with TREE_OVERFLOW if the original
409 constant was not. These conversions have implementation
410 defined behavior and retaining the TREE_OVERFLOW flag
411 here would confuse later passes such as VRP. */
412 if (CONVERT_EXPR_CODE_P (subcode)
413 && TREE_CODE (result) == INTEGER_CST
414 && TREE_CODE (rhs) == INTEGER_CST)
415 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
416
417 STRIP_USELESS_TYPE_CONVERSION (result);
418 if (valid_gimple_rhs_p (result))
419 return result;
420 }
421 }
422 break;
423
424 case GIMPLE_BINARY_RHS:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
434
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
442 {
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
445
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
451
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
463
464 }
465 }
466
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
472
473 if (result)
474 {
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
478 }
479 break;
480
481 case GIMPLE_TERNARY_RHS:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
484 {
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
489
490 if (COMPARISON_CLASS_P (op0))
491 {
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
503 }
504 else if (is_gimple_min_invariant (op0))
505 {
506 tem = op0;
507 set = true;
508 }
509 else
510 return NULL_TREE;
511
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
517 }
518
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
525
526 if (result)
527 {
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
531 }
532 break;
533
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
536 }
537
538 return NULL_TREE;
539 }
540
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
545
546 static bool
547 fold_gimple_cond (gimple stmt)
548 {
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
554
555 if (result)
556 {
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
559 {
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
562 }
563 }
564
565 return false;
566 }
567
568 /* Convert EXPR into a GIMPLE value suitable for substitution on the
569 RHS of an assignment. Insert the necessary statements before
570 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
571 is replaced. If the call is expected to produces a result, then it
572 is replaced by an assignment of the new RHS to the result variable.
573 If the result is to be ignored, then the call is replaced by a
574 GIMPLE_NOP. A proper VDEF chain is retained by making the first
575 VUSE and the last VDEF of the whole sequence be the same as the replaced
576 statement and using new SSA names for stores in between. */
577
578 void
579 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
580 {
581 tree lhs;
582 gimple stmt, new_stmt;
583 gimple_stmt_iterator i;
584 gimple_seq stmts = NULL;
585 struct gimplify_ctx gctx;
586 gimple laststore;
587 tree reaching_vuse;
588
589 stmt = gsi_stmt (*si_p);
590
591 gcc_assert (is_gimple_call (stmt));
592
593 push_gimplify_context (&gctx);
594 gctx.into_ssa = gimple_in_ssa_p (cfun);
595
596 lhs = gimple_call_lhs (stmt);
597 if (lhs == NULL_TREE)
598 {
599 gimplify_and_add (expr, &stmts);
600 /* We can end up with folding a memcpy of an empty class assignment
601 which gets optimized away by C++ gimplification. */
602 if (gimple_seq_empty_p (stmts))
603 {
604 pop_gimplify_context (NULL);
605 if (gimple_in_ssa_p (cfun))
606 {
607 unlink_stmt_vdef (stmt);
608 release_defs (stmt);
609 }
610 gsi_remove (si_p, true);
611 return;
612 }
613 }
614 else
615 {
616 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
617 new_stmt = gimple_build_assign (lhs, tmp);
618 i = gsi_last (stmts);
619 gsi_insert_after_without_update (&i, new_stmt,
620 GSI_CONTINUE_LINKING);
621 }
622
623 pop_gimplify_context (NULL);
624
625 if (gimple_has_location (stmt))
626 annotate_all_with_location (stmts, gimple_location (stmt));
627
628 /* First iterate over the replacement statements backward, assigning
629 virtual operands to their defining statements. */
630 laststore = NULL;
631 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
632 {
633 new_stmt = gsi_stmt (i);
634 if ((gimple_assign_single_p (new_stmt)
635 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
636 || (is_gimple_call (new_stmt)
637 && (gimple_call_flags (new_stmt)
638 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
639 {
640 tree vdef;
641 if (!laststore)
642 vdef = gimple_vdef (stmt);
643 else
644 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
645 gimple_set_vdef (new_stmt, vdef);
646 if (vdef && TREE_CODE (vdef) == SSA_NAME)
647 SSA_NAME_DEF_STMT (vdef) = new_stmt;
648 laststore = new_stmt;
649 }
650 }
651
652 /* Second iterate over the statements forward, assigning virtual
653 operands to their uses. */
654 reaching_vuse = gimple_vuse (stmt);
655 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
656 {
657 new_stmt = gsi_stmt (i);
658 /* If the new statement possibly has a VUSE, update it with exact SSA
659 name we know will reach this one. */
660 if (gimple_has_mem_ops (new_stmt))
661 gimple_set_vuse (new_stmt, reaching_vuse);
662 gimple_set_modified (new_stmt, true);
663 if (gimple_vdef (new_stmt))
664 reaching_vuse = gimple_vdef (new_stmt);
665 }
666
667 /* If the new sequence does not do a store release the virtual
668 definition of the original statement. */
669 if (reaching_vuse
670 && reaching_vuse == gimple_vuse (stmt))
671 {
672 tree vdef = gimple_vdef (stmt);
673 if (vdef
674 && TREE_CODE (vdef) == SSA_NAME)
675 {
676 unlink_stmt_vdef (stmt);
677 release_ssa_name (vdef);
678 }
679 }
680
681 /* Finally replace the original statement with the sequence. */
682 gsi_replace_with_seq (si_p, stmts, false);
683 }
684
685 /* Return the string length, maximum string length or maximum value of
686 ARG in LENGTH.
687 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
688 is not NULL and, for TYPE == 0, its value is not equal to the length
689 we determine or if we are unable to determine the length or value,
690 return false. VISITED is a bitmap of visited variables.
691 TYPE is 0 if string length should be returned, 1 for maximum string
692 length and 2 for maximum value ARG can have. */
693
694 static bool
695 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
696 {
697 tree var, val;
698 gimple def_stmt;
699
700 if (TREE_CODE (arg) != SSA_NAME)
701 {
702 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
703 if (TREE_CODE (arg) == ADDR_EXPR
704 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
706 {
707 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
708 if (TREE_CODE (aop0) == INDIRECT_REF
709 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
710 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
711 length, visited, type);
712 }
713
714 if (type == 2)
715 {
716 val = arg;
717 if (TREE_CODE (val) != INTEGER_CST
718 || tree_int_cst_sgn (val) < 0)
719 return false;
720 }
721 else
722 val = c_strlen (arg, 1);
723 if (!val)
724 return false;
725
726 if (*length)
727 {
728 if (type > 0)
729 {
730 if (TREE_CODE (*length) != INTEGER_CST
731 || TREE_CODE (val) != INTEGER_CST)
732 return false;
733
734 if (tree_int_cst_lt (*length, val))
735 *length = val;
736 return true;
737 }
738 else if (simple_cst_equal (val, *length) != 1)
739 return false;
740 }
741
742 *length = val;
743 return true;
744 }
745
746 /* If ARG is registered for SSA update we cannot look at its defining
747 statement. */
748 if (name_registered_for_update_p (arg))
749 return false;
750
751 /* If we were already here, break the infinite cycle. */
752 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
753 return true;
754
755 var = arg;
756 def_stmt = SSA_NAME_DEF_STMT (var);
757
758 switch (gimple_code (def_stmt))
759 {
760 case GIMPLE_ASSIGN:
761 /* The RHS of the statement defining VAR must either have a
762 constant length or come from another SSA_NAME with a constant
763 length. */
764 if (gimple_assign_single_p (def_stmt)
765 || gimple_assign_unary_nop_p (def_stmt))
766 {
767 tree rhs = gimple_assign_rhs1 (def_stmt);
768 return get_maxval_strlen (rhs, length, visited, type);
769 }
770 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
771 {
772 tree op2 = gimple_assign_rhs2 (def_stmt);
773 tree op3 = gimple_assign_rhs3 (def_stmt);
774 return get_maxval_strlen (op2, length, visited, type)
775 && get_maxval_strlen (op3, length, visited, type);
776 }
777 return false;
778
779 case GIMPLE_PHI:
780 {
781 /* All the arguments of the PHI node must have the same constant
782 length. */
783 unsigned i;
784
785 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
786 {
787 tree arg = gimple_phi_arg (def_stmt, i)->def;
788
789 /* If this PHI has itself as an argument, we cannot
790 determine the string length of this argument. However,
791 if we can find a constant string length for the other
792 PHI args then we can still be sure that this is a
793 constant string length. So be optimistic and just
794 continue with the next argument. */
795 if (arg == gimple_phi_result (def_stmt))
796 continue;
797
798 if (!get_maxval_strlen (arg, length, visited, type))
799 return false;
800 }
801 }
802 return true;
803
804 default:
805 return false;
806 }
807 }
808
809
810 /* Fold builtin call in statement STMT. Returns a simplified tree.
811 We may return a non-constant expression, including another call
812 to a different function and with different arguments, e.g.,
813 substituting memcpy for strcpy when the string length is known.
814 Note that some builtins expand into inline code that may not
815 be valid in GIMPLE. Callers must take care. */
816
817 tree
818 gimple_fold_builtin (gimple stmt)
819 {
820 tree result, val[3];
821 tree callee, a;
822 int arg_idx, type;
823 bitmap visited;
824 bool ignore;
825 int nargs;
826 location_t loc = gimple_location (stmt);
827
828 gcc_assert (is_gimple_call (stmt));
829
830 ignore = (gimple_call_lhs (stmt) == NULL);
831
832 /* First try the generic builtin folder. If that succeeds, return the
833 result directly. */
834 result = fold_call_stmt (stmt, ignore);
835 if (result)
836 {
837 if (ignore)
838 STRIP_NOPS (result);
839 return result;
840 }
841
842 /* Ignore MD builtins. */
843 callee = gimple_call_fndecl (stmt);
844 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
845 return NULL_TREE;
846
847 /* Give up for always_inline inline builtins until they are
848 inlined. */
849 if (avoid_folding_inline_builtin (callee))
850 return NULL_TREE;
851
852 /* If the builtin could not be folded, and it has no argument list,
853 we're done. */
854 nargs = gimple_call_num_args (stmt);
855 if (nargs == 0)
856 return NULL_TREE;
857
858 /* Limit the work only for builtins we know how to simplify. */
859 switch (DECL_FUNCTION_CODE (callee))
860 {
861 case BUILT_IN_STRLEN:
862 case BUILT_IN_FPUTS:
863 case BUILT_IN_FPUTS_UNLOCKED:
864 arg_idx = 0;
865 type = 0;
866 break;
867 case BUILT_IN_STRCPY:
868 case BUILT_IN_STRNCPY:
869 arg_idx = 1;
870 type = 0;
871 break;
872 case BUILT_IN_MEMCPY_CHK:
873 case BUILT_IN_MEMPCPY_CHK:
874 case BUILT_IN_MEMMOVE_CHK:
875 case BUILT_IN_MEMSET_CHK:
876 case BUILT_IN_STRNCPY_CHK:
877 case BUILT_IN_STPNCPY_CHK:
878 arg_idx = 2;
879 type = 2;
880 break;
881 case BUILT_IN_STRCPY_CHK:
882 case BUILT_IN_STPCPY_CHK:
883 arg_idx = 1;
884 type = 1;
885 break;
886 case BUILT_IN_SNPRINTF_CHK:
887 case BUILT_IN_VSNPRINTF_CHK:
888 arg_idx = 1;
889 type = 2;
890 break;
891 default:
892 return NULL_TREE;
893 }
894
895 if (arg_idx >= nargs)
896 return NULL_TREE;
897
898 /* Try to use the dataflow information gathered by the CCP process. */
899 visited = BITMAP_ALLOC (NULL);
900 bitmap_clear (visited);
901
902 memset (val, 0, sizeof (val));
903 a = gimple_call_arg (stmt, arg_idx);
904 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
905 val[arg_idx] = NULL_TREE;
906
907 BITMAP_FREE (visited);
908
909 result = NULL_TREE;
910 switch (DECL_FUNCTION_CODE (callee))
911 {
912 case BUILT_IN_STRLEN:
913 if (val[0] && nargs == 1)
914 {
915 tree new_val =
916 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
917
918 /* If the result is not a valid gimple value, or not a cast
919 of a valid gimple value, then we cannot use the result. */
920 if (is_gimple_val (new_val)
921 || (CONVERT_EXPR_P (new_val)
922 && is_gimple_val (TREE_OPERAND (new_val, 0))))
923 return new_val;
924 }
925 break;
926
927 case BUILT_IN_STRCPY:
928 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
929 result = fold_builtin_strcpy (loc, callee,
930 gimple_call_arg (stmt, 0),
931 gimple_call_arg (stmt, 1),
932 val[1]);
933 break;
934
935 case BUILT_IN_STRNCPY:
936 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
937 result = fold_builtin_strncpy (loc, callee,
938 gimple_call_arg (stmt, 0),
939 gimple_call_arg (stmt, 1),
940 gimple_call_arg (stmt, 2),
941 val[1]);
942 break;
943
944 case BUILT_IN_FPUTS:
945 if (nargs == 2)
946 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
947 gimple_call_arg (stmt, 1),
948 ignore, false, val[0]);
949 break;
950
951 case BUILT_IN_FPUTS_UNLOCKED:
952 if (nargs == 2)
953 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
954 gimple_call_arg (stmt, 1),
955 ignore, true, val[0]);
956 break;
957
958 case BUILT_IN_MEMCPY_CHK:
959 case BUILT_IN_MEMPCPY_CHK:
960 case BUILT_IN_MEMMOVE_CHK:
961 case BUILT_IN_MEMSET_CHK:
962 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
963 result = fold_builtin_memory_chk (loc, callee,
964 gimple_call_arg (stmt, 0),
965 gimple_call_arg (stmt, 1),
966 gimple_call_arg (stmt, 2),
967 gimple_call_arg (stmt, 3),
968 val[2], ignore,
969 DECL_FUNCTION_CODE (callee));
970 break;
971
972 case BUILT_IN_STRCPY_CHK:
973 case BUILT_IN_STPCPY_CHK:
974 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
975 result = fold_builtin_stxcpy_chk (loc, callee,
976 gimple_call_arg (stmt, 0),
977 gimple_call_arg (stmt, 1),
978 gimple_call_arg (stmt, 2),
979 val[1], ignore,
980 DECL_FUNCTION_CODE (callee));
981 break;
982
983 case BUILT_IN_STRNCPY_CHK:
984 case BUILT_IN_STPNCPY_CHK:
985 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
986 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
987 gimple_call_arg (stmt, 1),
988 gimple_call_arg (stmt, 2),
989 gimple_call_arg (stmt, 3),
990 val[2], ignore,
991 DECL_FUNCTION_CODE (callee));
992 break;
993
994 case BUILT_IN_SNPRINTF_CHK:
995 case BUILT_IN_VSNPRINTF_CHK:
996 if (val[1] && is_gimple_val (val[1]))
997 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
998 DECL_FUNCTION_CODE (callee));
999 break;
1000
1001 default:
1002 gcc_unreachable ();
1003 }
1004
1005 if (result && ignore)
1006 result = fold_ignored_result (result);
1007 return result;
1008 }
1009
1010
1011 /* Return a binfo to be used for devirtualization of calls based on an object
1012 represented by a declaration (i.e. a global or automatically allocated one)
1013 or NULL if it cannot be found or is not safe. CST is expected to be an
1014 ADDR_EXPR of such object or the function will return NULL. Currently it is
1015 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1016
1017 tree
1018 gimple_extract_devirt_binfo_from_cst (tree cst)
1019 {
1020 HOST_WIDE_INT offset, size, max_size;
1021 tree base, type, expected_type, binfo;
1022 bool last_artificial = false;
1023
1024 if (!flag_devirtualize
1025 || TREE_CODE (cst) != ADDR_EXPR
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1027 return NULL_TREE;
1028
1029 cst = TREE_OPERAND (cst, 0);
1030 expected_type = TREE_TYPE (cst);
1031 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1032 type = TREE_TYPE (base);
1033 if (!DECL_P (base)
1034 || max_size == -1
1035 || max_size != size
1036 || TREE_CODE (type) != RECORD_TYPE)
1037 return NULL_TREE;
1038
1039 /* Find the sub-object the constant actually refers to and mark whether it is
1040 an artificial one (as opposed to a user-defined one). */
1041 while (true)
1042 {
1043 HOST_WIDE_INT pos, size;
1044 tree fld;
1045
1046 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1047 break;
1048 if (offset < 0)
1049 return NULL_TREE;
1050
1051 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1052 {
1053 if (TREE_CODE (fld) != FIELD_DECL)
1054 continue;
1055
1056 pos = int_bit_position (fld);
1057 size = tree_low_cst (DECL_SIZE (fld), 1);
1058 if (pos <= offset && (pos + size) > offset)
1059 break;
1060 }
1061 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1062 return NULL_TREE;
1063
1064 last_artificial = DECL_ARTIFICIAL (fld);
1065 type = TREE_TYPE (fld);
1066 offset -= pos;
1067 }
1068 /* Artificial sub-objects are ancestors, we do not want to use them for
1069 devirtualization, at least not here. */
1070 if (last_artificial)
1071 return NULL_TREE;
1072 binfo = TYPE_BINFO (type);
1073 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1074 return NULL_TREE;
1075 else
1076 return binfo;
1077 }
1078
1079 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1080 The statement may be replaced by another statement, e.g., if the call
1081 simplifies to a constant value. Return true if any changes were made.
1082 It is assumed that the operands have been previously folded. */
1083
1084 static bool
1085 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1086 {
1087 gimple stmt = gsi_stmt (*gsi);
1088 tree callee;
1089 bool changed = false;
1090 unsigned i;
1091
1092 /* Fold *& in call arguments. */
1093 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1094 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1095 {
1096 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1097 if (tmp)
1098 {
1099 gimple_call_set_arg (stmt, i, tmp);
1100 changed = true;
1101 }
1102 }
1103
1104 /* Check for virtual calls that became direct calls. */
1105 callee = gimple_call_fn (stmt);
1106 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1107 {
1108 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1109 {
1110 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1111 changed = true;
1112 }
1113 else
1114 {
1115 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1116 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1117 if (binfo)
1118 {
1119 HOST_WIDE_INT token
1120 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1121 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1122 if (fndecl)
1123 {
1124 gimple_call_set_fndecl (stmt, fndecl);
1125 changed = true;
1126 }
1127 }
1128 }
1129 }
1130
1131 if (inplace)
1132 return changed;
1133
1134 /* Check for builtins that CCP can handle using information not
1135 available in the generic fold routines. */
1136 callee = gimple_call_fndecl (stmt);
1137 if (callee && DECL_BUILT_IN (callee))
1138 {
1139 tree result = gimple_fold_builtin (stmt);
1140 if (result)
1141 {
1142 if (!update_call_from_tree (gsi, result))
1143 gimplify_and_update_call_from_tree (gsi, result);
1144 changed = true;
1145 }
1146 }
1147
1148 return changed;
1149 }
1150
1151 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1152 distinguishes both cases. */
1153
1154 static bool
1155 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1156 {
1157 bool changed = false;
1158 gimple stmt = gsi_stmt (*gsi);
1159 unsigned i;
1160 gimple_stmt_iterator gsinext = *gsi;
1161 gimple next_stmt;
1162
1163 gsi_next (&gsinext);
1164 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1165
1166 /* Fold the main computation performed by the statement. */
1167 switch (gimple_code (stmt))
1168 {
1169 case GIMPLE_ASSIGN:
1170 {
1171 unsigned old_num_ops = gimple_num_ops (stmt);
1172 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1173 tree lhs = gimple_assign_lhs (stmt);
1174 tree new_rhs;
1175 /* First canonicalize operand order. This avoids building new
1176 trees if this is the only thing fold would later do. */
1177 if ((commutative_tree_code (subcode)
1178 || commutative_ternary_tree_code (subcode))
1179 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1180 gimple_assign_rhs2 (stmt), false))
1181 {
1182 tree tem = gimple_assign_rhs1 (stmt);
1183 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1184 gimple_assign_set_rhs2 (stmt, tem);
1185 changed = true;
1186 }
1187 new_rhs = fold_gimple_assign (gsi);
1188 if (new_rhs
1189 && !useless_type_conversion_p (TREE_TYPE (lhs),
1190 TREE_TYPE (new_rhs)))
1191 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1192 if (new_rhs
1193 && (!inplace
1194 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1195 {
1196 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1197 changed = true;
1198 }
1199 break;
1200 }
1201
1202 case GIMPLE_COND:
1203 changed |= fold_gimple_cond (stmt);
1204 break;
1205
1206 case GIMPLE_CALL:
1207 changed |= gimple_fold_call (gsi, inplace);
1208 break;
1209
1210 case GIMPLE_ASM:
1211 /* Fold *& in asm operands. */
1212 {
1213 size_t noutputs;
1214 const char **oconstraints;
1215 const char *constraint;
1216 bool allows_mem, allows_reg;
1217
1218 noutputs = gimple_asm_noutputs (stmt);
1219 oconstraints = XALLOCAVEC (const char *, noutputs);
1220
1221 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1222 {
1223 tree link = gimple_asm_output_op (stmt, i);
1224 tree op = TREE_VALUE (link);
1225 oconstraints[i]
1226 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1227 if (REFERENCE_CLASS_P (op)
1228 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1229 {
1230 TREE_VALUE (link) = op;
1231 changed = true;
1232 }
1233 }
1234 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1235 {
1236 tree link = gimple_asm_input_op (stmt, i);
1237 tree op = TREE_VALUE (link);
1238 constraint
1239 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1240 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1241 oconstraints, &allows_mem, &allows_reg);
1242 if (REFERENCE_CLASS_P (op)
1243 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1244 != NULL_TREE)
1245 {
1246 TREE_VALUE (link) = op;
1247 changed = true;
1248 }
1249 }
1250 }
1251 break;
1252
1253 case GIMPLE_DEBUG:
1254 if (gimple_debug_bind_p (stmt))
1255 {
1256 tree val = gimple_debug_bind_get_value (stmt);
1257 if (val
1258 && REFERENCE_CLASS_P (val))
1259 {
1260 tree tem = maybe_fold_reference (val, false);
1261 if (tem)
1262 {
1263 gimple_debug_bind_set_value (stmt, tem);
1264 changed = true;
1265 }
1266 }
1267 else if (val
1268 && TREE_CODE (val) == ADDR_EXPR)
1269 {
1270 tree ref = TREE_OPERAND (val, 0);
1271 tree tem = maybe_fold_reference (ref, false);
1272 if (tem)
1273 {
1274 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1275 gimple_debug_bind_set_value (stmt, tem);
1276 changed = true;
1277 }
1278 }
1279 }
1280 break;
1281
1282 default:;
1283 }
1284
1285 /* If stmt folds into nothing and it was the last stmt in a bb,
1286 don't call gsi_stmt. */
1287 if (gsi_end_p (*gsi))
1288 {
1289 gcc_assert (next_stmt == NULL);
1290 return changed;
1291 }
1292
1293 stmt = gsi_stmt (*gsi);
1294
1295 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1296 as we'd changing the next stmt. */
1297 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1298 {
1299 tree lhs = gimple_get_lhs (stmt);
1300 if (lhs && REFERENCE_CLASS_P (lhs))
1301 {
1302 tree new_lhs = maybe_fold_reference (lhs, true);
1303 if (new_lhs)
1304 {
1305 gimple_set_lhs (stmt, new_lhs);
1306 changed = true;
1307 }
1308 }
1309 }
1310
1311 return changed;
1312 }
1313
1314 /* Fold the statement pointed to by GSI. In some cases, this function may
1315 replace the whole statement with a new one. Returns true iff folding
1316 makes any changes.
1317 The statement pointed to by GSI should be in valid gimple form but may
1318 be in unfolded state as resulting from for example constant propagation
1319 which can produce *&x = 0. */
1320
1321 bool
1322 fold_stmt (gimple_stmt_iterator *gsi)
1323 {
1324 return fold_stmt_1 (gsi, false);
1325 }
1326
1327 /* Perform the minimal folding on statement *GSI. Only operations like
1328 *&x created by constant propagation are handled. The statement cannot
1329 be replaced with a new one. Return true if the statement was
1330 changed, false otherwise.
1331 The statement *GSI should be in valid gimple form but may
1332 be in unfolded state as resulting from for example constant propagation
1333 which can produce *&x = 0. */
1334
1335 bool
1336 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1337 {
1338 gimple stmt = gsi_stmt (*gsi);
1339 bool changed = fold_stmt_1 (gsi, true);
1340 gcc_assert (gsi_stmt (*gsi) == stmt);
1341 return changed;
1342 }
1343
1344 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1345 if EXPR is null or we don't know how.
1346 If non-null, the result always has boolean type. */
1347
1348 static tree
1349 canonicalize_bool (tree expr, bool invert)
1350 {
1351 if (!expr)
1352 return NULL_TREE;
1353 else if (invert)
1354 {
1355 if (integer_nonzerop (expr))
1356 return boolean_false_node;
1357 else if (integer_zerop (expr))
1358 return boolean_true_node;
1359 else if (TREE_CODE (expr) == SSA_NAME)
1360 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1361 build_int_cst (TREE_TYPE (expr), 0));
1362 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1363 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1364 boolean_type_node,
1365 TREE_OPERAND (expr, 0),
1366 TREE_OPERAND (expr, 1));
1367 else
1368 return NULL_TREE;
1369 }
1370 else
1371 {
1372 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1373 return expr;
1374 if (integer_nonzerop (expr))
1375 return boolean_true_node;
1376 else if (integer_zerop (expr))
1377 return boolean_false_node;
1378 else if (TREE_CODE (expr) == SSA_NAME)
1379 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1380 build_int_cst (TREE_TYPE (expr), 0));
1381 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1382 return fold_build2 (TREE_CODE (expr),
1383 boolean_type_node,
1384 TREE_OPERAND (expr, 0),
1385 TREE_OPERAND (expr, 1));
1386 else
1387 return NULL_TREE;
1388 }
1389 }
1390
1391 /* Check to see if a boolean expression EXPR is logically equivalent to the
1392 comparison (OP1 CODE OP2). Check for various identities involving
1393 SSA_NAMEs. */
1394
1395 static bool
1396 same_bool_comparison_p (const_tree expr, enum tree_code code,
1397 const_tree op1, const_tree op2)
1398 {
1399 gimple s;
1400
1401 /* The obvious case. */
1402 if (TREE_CODE (expr) == code
1403 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1404 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1405 return true;
1406
1407 /* Check for comparing (name, name != 0) and the case where expr
1408 is an SSA_NAME with a definition matching the comparison. */
1409 if (TREE_CODE (expr) == SSA_NAME
1410 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1411 {
1412 if (operand_equal_p (expr, op1, 0))
1413 return ((code == NE_EXPR && integer_zerop (op2))
1414 || (code == EQ_EXPR && integer_nonzerop (op2)));
1415 s = SSA_NAME_DEF_STMT (expr);
1416 if (is_gimple_assign (s)
1417 && gimple_assign_rhs_code (s) == code
1418 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1419 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1420 return true;
1421 }
1422
1423 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1424 of name is a comparison, recurse. */
1425 if (TREE_CODE (op1) == SSA_NAME
1426 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1427 {
1428 s = SSA_NAME_DEF_STMT (op1);
1429 if (is_gimple_assign (s)
1430 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1431 {
1432 enum tree_code c = gimple_assign_rhs_code (s);
1433 if ((c == NE_EXPR && integer_zerop (op2))
1434 || (c == EQ_EXPR && integer_nonzerop (op2)))
1435 return same_bool_comparison_p (expr, c,
1436 gimple_assign_rhs1 (s),
1437 gimple_assign_rhs2 (s));
1438 if ((c == EQ_EXPR && integer_zerop (op2))
1439 || (c == NE_EXPR && integer_nonzerop (op2)))
1440 return same_bool_comparison_p (expr,
1441 invert_tree_comparison (c, false),
1442 gimple_assign_rhs1 (s),
1443 gimple_assign_rhs2 (s));
1444 }
1445 }
1446 return false;
1447 }
1448
1449 /* Check to see if two boolean expressions OP1 and OP2 are logically
1450 equivalent. */
1451
1452 static bool
1453 same_bool_result_p (const_tree op1, const_tree op2)
1454 {
1455 /* Simple cases first. */
1456 if (operand_equal_p (op1, op2, 0))
1457 return true;
1458
1459 /* Check the cases where at least one of the operands is a comparison.
1460 These are a bit smarter than operand_equal_p in that they apply some
1461 identifies on SSA_NAMEs. */
1462 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1463 && same_bool_comparison_p (op1, TREE_CODE (op2),
1464 TREE_OPERAND (op2, 0),
1465 TREE_OPERAND (op2, 1)))
1466 return true;
1467 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1468 && same_bool_comparison_p (op2, TREE_CODE (op1),
1469 TREE_OPERAND (op1, 0),
1470 TREE_OPERAND (op1, 1)))
1471 return true;
1472
1473 /* Default case. */
1474 return false;
1475 }
1476
1477 /* Forward declarations for some mutually recursive functions. */
1478
1479 static tree
1480 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1481 enum tree_code code2, tree op2a, tree op2b);
1482 static tree
1483 and_var_with_comparison (tree var, bool invert,
1484 enum tree_code code2, tree op2a, tree op2b);
1485 static tree
1486 and_var_with_comparison_1 (gimple stmt,
1487 enum tree_code code2, tree op2a, tree op2b);
1488 static tree
1489 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1490 enum tree_code code2, tree op2a, tree op2b);
1491 static tree
1492 or_var_with_comparison (tree var, bool invert,
1493 enum tree_code code2, tree op2a, tree op2b);
1494 static tree
1495 or_var_with_comparison_1 (gimple stmt,
1496 enum tree_code code2, tree op2a, tree op2b);
1497
1498 /* Helper function for and_comparisons_1: try to simplify the AND of the
1499 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1500 If INVERT is true, invert the value of the VAR before doing the AND.
1501 Return NULL_EXPR if we can't simplify this to a single expression. */
1502
1503 static tree
1504 and_var_with_comparison (tree var, bool invert,
1505 enum tree_code code2, tree op2a, tree op2b)
1506 {
1507 tree t;
1508 gimple stmt = SSA_NAME_DEF_STMT (var);
1509
1510 /* We can only deal with variables whose definitions are assignments. */
1511 if (!is_gimple_assign (stmt))
1512 return NULL_TREE;
1513
1514 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1515 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1516 Then we only have to consider the simpler non-inverted cases. */
1517 if (invert)
1518 t = or_var_with_comparison_1 (stmt,
1519 invert_tree_comparison (code2, false),
1520 op2a, op2b);
1521 else
1522 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1523 return canonicalize_bool (t, invert);
1524 }
1525
1526 /* Try to simplify the AND of the ssa variable defined by the assignment
1527 STMT with the comparison specified by (OP2A CODE2 OP2B).
1528 Return NULL_EXPR if we can't simplify this to a single expression. */
1529
1530 static tree
1531 and_var_with_comparison_1 (gimple stmt,
1532 enum tree_code code2, tree op2a, tree op2b)
1533 {
1534 tree var = gimple_assign_lhs (stmt);
1535 tree true_test_var = NULL_TREE;
1536 tree false_test_var = NULL_TREE;
1537 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1538
1539 /* Check for identities like (var AND (var == 0)) => false. */
1540 if (TREE_CODE (op2a) == SSA_NAME
1541 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1542 {
1543 if ((code2 == NE_EXPR && integer_zerop (op2b))
1544 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1545 {
1546 true_test_var = op2a;
1547 if (var == true_test_var)
1548 return var;
1549 }
1550 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1551 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1552 {
1553 false_test_var = op2a;
1554 if (var == false_test_var)
1555 return boolean_false_node;
1556 }
1557 }
1558
1559 /* If the definition is a comparison, recurse on it. */
1560 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1561 {
1562 tree t = and_comparisons_1 (innercode,
1563 gimple_assign_rhs1 (stmt),
1564 gimple_assign_rhs2 (stmt),
1565 code2,
1566 op2a,
1567 op2b);
1568 if (t)
1569 return t;
1570 }
1571
1572 /* If the definition is an AND or OR expression, we may be able to
1573 simplify by reassociating. */
1574 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1575 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1576 {
1577 tree inner1 = gimple_assign_rhs1 (stmt);
1578 tree inner2 = gimple_assign_rhs2 (stmt);
1579 gimple s;
1580 tree t;
1581 tree partial = NULL_TREE;
1582 bool is_and = (innercode == BIT_AND_EXPR);
1583
1584 /* Check for boolean identities that don't require recursive examination
1585 of inner1/inner2:
1586 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1587 inner1 AND (inner1 OR inner2) => inner1
1588 !inner1 AND (inner1 AND inner2) => false
1589 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1590 Likewise for similar cases involving inner2. */
1591 if (inner1 == true_test_var)
1592 return (is_and ? var : inner1);
1593 else if (inner2 == true_test_var)
1594 return (is_and ? var : inner2);
1595 else if (inner1 == false_test_var)
1596 return (is_and
1597 ? boolean_false_node
1598 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1599 else if (inner2 == false_test_var)
1600 return (is_and
1601 ? boolean_false_node
1602 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1603
1604 /* Next, redistribute/reassociate the AND across the inner tests.
1605 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1606 if (TREE_CODE (inner1) == SSA_NAME
1607 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1608 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1609 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1610 gimple_assign_rhs1 (s),
1611 gimple_assign_rhs2 (s),
1612 code2, op2a, op2b)))
1613 {
1614 /* Handle the AND case, where we are reassociating:
1615 (inner1 AND inner2) AND (op2a code2 op2b)
1616 => (t AND inner2)
1617 If the partial result t is a constant, we win. Otherwise
1618 continue on to try reassociating with the other inner test. */
1619 if (is_and)
1620 {
1621 if (integer_onep (t))
1622 return inner2;
1623 else if (integer_zerop (t))
1624 return boolean_false_node;
1625 }
1626
1627 /* Handle the OR case, where we are redistributing:
1628 (inner1 OR inner2) AND (op2a code2 op2b)
1629 => (t OR (inner2 AND (op2a code2 op2b))) */
1630 else if (integer_onep (t))
1631 return boolean_true_node;
1632
1633 /* Save partial result for later. */
1634 partial = t;
1635 }
1636
1637 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1638 if (TREE_CODE (inner2) == SSA_NAME
1639 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1640 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1641 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1642 gimple_assign_rhs1 (s),
1643 gimple_assign_rhs2 (s),
1644 code2, op2a, op2b)))
1645 {
1646 /* Handle the AND case, where we are reassociating:
1647 (inner1 AND inner2) AND (op2a code2 op2b)
1648 => (inner1 AND t) */
1649 if (is_and)
1650 {
1651 if (integer_onep (t))
1652 return inner1;
1653 else if (integer_zerop (t))
1654 return boolean_false_node;
1655 /* If both are the same, we can apply the identity
1656 (x AND x) == x. */
1657 else if (partial && same_bool_result_p (t, partial))
1658 return t;
1659 }
1660
1661 /* Handle the OR case. where we are redistributing:
1662 (inner1 OR inner2) AND (op2a code2 op2b)
1663 => (t OR (inner1 AND (op2a code2 op2b)))
1664 => (t OR partial) */
1665 else
1666 {
1667 if (integer_onep (t))
1668 return boolean_true_node;
1669 else if (partial)
1670 {
1671 /* We already got a simplification for the other
1672 operand to the redistributed OR expression. The
1673 interesting case is when at least one is false.
1674 Or, if both are the same, we can apply the identity
1675 (x OR x) == x. */
1676 if (integer_zerop (partial))
1677 return t;
1678 else if (integer_zerop (t))
1679 return partial;
1680 else if (same_bool_result_p (t, partial))
1681 return t;
1682 }
1683 }
1684 }
1685 }
1686 return NULL_TREE;
1687 }
1688
1689 /* Try to simplify the AND of two comparisons defined by
1690 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1691 If this can be done without constructing an intermediate value,
1692 return the resulting tree; otherwise NULL_TREE is returned.
1693 This function is deliberately asymmetric as it recurses on SSA_DEFs
1694 in the first comparison but not the second. */
1695
1696 static tree
1697 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1698 enum tree_code code2, tree op2a, tree op2b)
1699 {
1700 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1701
1702 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1703 if (operand_equal_p (op1a, op2a, 0)
1704 && operand_equal_p (op1b, op2b, 0))
1705 {
1706 /* Result will be either NULL_TREE, or a combined comparison. */
1707 tree t = combine_comparisons (UNKNOWN_LOCATION,
1708 TRUTH_ANDIF_EXPR, code1, code2,
1709 truth_type, op1a, op1b);
1710 if (t)
1711 return t;
1712 }
1713
1714 /* Likewise the swapped case of the above. */
1715 if (operand_equal_p (op1a, op2b, 0)
1716 && operand_equal_p (op1b, op2a, 0))
1717 {
1718 /* Result will be either NULL_TREE, or a combined comparison. */
1719 tree t = combine_comparisons (UNKNOWN_LOCATION,
1720 TRUTH_ANDIF_EXPR, code1,
1721 swap_tree_comparison (code2),
1722 truth_type, op1a, op1b);
1723 if (t)
1724 return t;
1725 }
1726
1727 /* If both comparisons are of the same value against constants, we might
1728 be able to merge them. */
1729 if (operand_equal_p (op1a, op2a, 0)
1730 && TREE_CODE (op1b) == INTEGER_CST
1731 && TREE_CODE (op2b) == INTEGER_CST)
1732 {
1733 int cmp = tree_int_cst_compare (op1b, op2b);
1734
1735 /* If we have (op1a == op1b), we should either be able to
1736 return that or FALSE, depending on whether the constant op1b
1737 also satisfies the other comparison against op2b. */
1738 if (code1 == EQ_EXPR)
1739 {
1740 bool done = true;
1741 bool val;
1742 switch (code2)
1743 {
1744 case EQ_EXPR: val = (cmp == 0); break;
1745 case NE_EXPR: val = (cmp != 0); break;
1746 case LT_EXPR: val = (cmp < 0); break;
1747 case GT_EXPR: val = (cmp > 0); break;
1748 case LE_EXPR: val = (cmp <= 0); break;
1749 case GE_EXPR: val = (cmp >= 0); break;
1750 default: done = false;
1751 }
1752 if (done)
1753 {
1754 if (val)
1755 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1756 else
1757 return boolean_false_node;
1758 }
1759 }
1760 /* Likewise if the second comparison is an == comparison. */
1761 else if (code2 == EQ_EXPR)
1762 {
1763 bool done = true;
1764 bool val;
1765 switch (code1)
1766 {
1767 case EQ_EXPR: val = (cmp == 0); break;
1768 case NE_EXPR: val = (cmp != 0); break;
1769 case LT_EXPR: val = (cmp > 0); break;
1770 case GT_EXPR: val = (cmp < 0); break;
1771 case LE_EXPR: val = (cmp >= 0); break;
1772 case GE_EXPR: val = (cmp <= 0); break;
1773 default: done = false;
1774 }
1775 if (done)
1776 {
1777 if (val)
1778 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1779 else
1780 return boolean_false_node;
1781 }
1782 }
1783
1784 /* Same business with inequality tests. */
1785 else if (code1 == NE_EXPR)
1786 {
1787 bool val;
1788 switch (code2)
1789 {
1790 case EQ_EXPR: val = (cmp != 0); break;
1791 case NE_EXPR: val = (cmp == 0); break;
1792 case LT_EXPR: val = (cmp >= 0); break;
1793 case GT_EXPR: val = (cmp <= 0); break;
1794 case LE_EXPR: val = (cmp > 0); break;
1795 case GE_EXPR: val = (cmp < 0); break;
1796 default:
1797 val = false;
1798 }
1799 if (val)
1800 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1801 }
1802 else if (code2 == NE_EXPR)
1803 {
1804 bool val;
1805 switch (code1)
1806 {
1807 case EQ_EXPR: val = (cmp == 0); break;
1808 case NE_EXPR: val = (cmp != 0); break;
1809 case LT_EXPR: val = (cmp <= 0); break;
1810 case GT_EXPR: val = (cmp >= 0); break;
1811 case LE_EXPR: val = (cmp < 0); break;
1812 case GE_EXPR: val = (cmp > 0); break;
1813 default:
1814 val = false;
1815 }
1816 if (val)
1817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1818 }
1819
1820 /* Chose the more restrictive of two < or <= comparisons. */
1821 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1822 && (code2 == LT_EXPR || code2 == LE_EXPR))
1823 {
1824 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1825 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1826 else
1827 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1828 }
1829
1830 /* Likewise chose the more restrictive of two > or >= comparisons. */
1831 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1832 && (code2 == GT_EXPR || code2 == GE_EXPR))
1833 {
1834 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1835 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1836 else
1837 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1838 }
1839
1840 /* Check for singleton ranges. */
1841 else if (cmp == 0
1842 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1843 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1844 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1845
1846 /* Check for disjoint ranges. */
1847 else if (cmp <= 0
1848 && (code1 == LT_EXPR || code1 == LE_EXPR)
1849 && (code2 == GT_EXPR || code2 == GE_EXPR))
1850 return boolean_false_node;
1851 else if (cmp >= 0
1852 && (code1 == GT_EXPR || code1 == GE_EXPR)
1853 && (code2 == LT_EXPR || code2 == LE_EXPR))
1854 return boolean_false_node;
1855 }
1856
1857 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1858 NAME's definition is a truth value. See if there are any simplifications
1859 that can be done against the NAME's definition. */
1860 if (TREE_CODE (op1a) == SSA_NAME
1861 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1862 && (integer_zerop (op1b) || integer_onep (op1b)))
1863 {
1864 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1865 || (code1 == NE_EXPR && integer_onep (op1b)));
1866 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1867 switch (gimple_code (stmt))
1868 {
1869 case GIMPLE_ASSIGN:
1870 /* Try to simplify by copy-propagating the definition. */
1871 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1872
1873 case GIMPLE_PHI:
1874 /* If every argument to the PHI produces the same result when
1875 ANDed with the second comparison, we win.
1876 Do not do this unless the type is bool since we need a bool
1877 result here anyway. */
1878 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1879 {
1880 tree result = NULL_TREE;
1881 unsigned i;
1882 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1883 {
1884 tree arg = gimple_phi_arg_def (stmt, i);
1885
1886 /* If this PHI has itself as an argument, ignore it.
1887 If all the other args produce the same result,
1888 we're still OK. */
1889 if (arg == gimple_phi_result (stmt))
1890 continue;
1891 else if (TREE_CODE (arg) == INTEGER_CST)
1892 {
1893 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1894 {
1895 if (!result)
1896 result = boolean_false_node;
1897 else if (!integer_zerop (result))
1898 return NULL_TREE;
1899 }
1900 else if (!result)
1901 result = fold_build2 (code2, boolean_type_node,
1902 op2a, op2b);
1903 else if (!same_bool_comparison_p (result,
1904 code2, op2a, op2b))
1905 return NULL_TREE;
1906 }
1907 else if (TREE_CODE (arg) == SSA_NAME
1908 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1909 {
1910 tree temp;
1911 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1912 /* In simple cases we can look through PHI nodes,
1913 but we have to be careful with loops.
1914 See PR49073. */
1915 if (! dom_info_available_p (CDI_DOMINATORS)
1916 || gimple_bb (def_stmt) == gimple_bb (stmt)
1917 || dominated_by_p (CDI_DOMINATORS,
1918 gimple_bb (def_stmt),
1919 gimple_bb (stmt)))
1920 return NULL_TREE;
1921 temp = and_var_with_comparison (arg, invert, code2,
1922 op2a, op2b);
1923 if (!temp)
1924 return NULL_TREE;
1925 else if (!result)
1926 result = temp;
1927 else if (!same_bool_result_p (result, temp))
1928 return NULL_TREE;
1929 }
1930 else
1931 return NULL_TREE;
1932 }
1933 return result;
1934 }
1935
1936 default:
1937 break;
1938 }
1939 }
1940 return NULL_TREE;
1941 }
1942
1943 /* Try to simplify the AND of two comparisons, specified by
1944 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1945 If this can be simplified to a single expression (without requiring
1946 introducing more SSA variables to hold intermediate values),
1947 return the resulting tree. Otherwise return NULL_TREE.
1948 If the result expression is non-null, it has boolean type. */
1949
1950 tree
1951 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1952 enum tree_code code2, tree op2a, tree op2b)
1953 {
1954 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1955 if (t)
1956 return t;
1957 else
1958 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1959 }
1960
1961 /* Helper function for or_comparisons_1: try to simplify the OR of the
1962 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1963 If INVERT is true, invert the value of VAR before doing the OR.
1964 Return NULL_EXPR if we can't simplify this to a single expression. */
1965
1966 static tree
1967 or_var_with_comparison (tree var, bool invert,
1968 enum tree_code code2, tree op2a, tree op2b)
1969 {
1970 tree t;
1971 gimple stmt = SSA_NAME_DEF_STMT (var);
1972
1973 /* We can only deal with variables whose definitions are assignments. */
1974 if (!is_gimple_assign (stmt))
1975 return NULL_TREE;
1976
1977 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1978 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1979 Then we only have to consider the simpler non-inverted cases. */
1980 if (invert)
1981 t = and_var_with_comparison_1 (stmt,
1982 invert_tree_comparison (code2, false),
1983 op2a, op2b);
1984 else
1985 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1986 return canonicalize_bool (t, invert);
1987 }
1988
1989 /* Try to simplify the OR of the ssa variable defined by the assignment
1990 STMT with the comparison specified by (OP2A CODE2 OP2B).
1991 Return NULL_EXPR if we can't simplify this to a single expression. */
1992
1993 static tree
1994 or_var_with_comparison_1 (gimple stmt,
1995 enum tree_code code2, tree op2a, tree op2b)
1996 {
1997 tree var = gimple_assign_lhs (stmt);
1998 tree true_test_var = NULL_TREE;
1999 tree false_test_var = NULL_TREE;
2000 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2001
2002 /* Check for identities like (var OR (var != 0)) => true . */
2003 if (TREE_CODE (op2a) == SSA_NAME
2004 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2005 {
2006 if ((code2 == NE_EXPR && integer_zerop (op2b))
2007 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2008 {
2009 true_test_var = op2a;
2010 if (var == true_test_var)
2011 return var;
2012 }
2013 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2014 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2015 {
2016 false_test_var = op2a;
2017 if (var == false_test_var)
2018 return boolean_true_node;
2019 }
2020 }
2021
2022 /* If the definition is a comparison, recurse on it. */
2023 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2024 {
2025 tree t = or_comparisons_1 (innercode,
2026 gimple_assign_rhs1 (stmt),
2027 gimple_assign_rhs2 (stmt),
2028 code2,
2029 op2a,
2030 op2b);
2031 if (t)
2032 return t;
2033 }
2034
2035 /* If the definition is an AND or OR expression, we may be able to
2036 simplify by reassociating. */
2037 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2038 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2039 {
2040 tree inner1 = gimple_assign_rhs1 (stmt);
2041 tree inner2 = gimple_assign_rhs2 (stmt);
2042 gimple s;
2043 tree t;
2044 tree partial = NULL_TREE;
2045 bool is_or = (innercode == BIT_IOR_EXPR);
2046
2047 /* Check for boolean identities that don't require recursive examination
2048 of inner1/inner2:
2049 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2050 inner1 OR (inner1 AND inner2) => inner1
2051 !inner1 OR (inner1 OR inner2) => true
2052 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2053 */
2054 if (inner1 == true_test_var)
2055 return (is_or ? var : inner1);
2056 else if (inner2 == true_test_var)
2057 return (is_or ? var : inner2);
2058 else if (inner1 == false_test_var)
2059 return (is_or
2060 ? boolean_true_node
2061 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2062 else if (inner2 == false_test_var)
2063 return (is_or
2064 ? boolean_true_node
2065 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2066
2067 /* Next, redistribute/reassociate the OR across the inner tests.
2068 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2069 if (TREE_CODE (inner1) == SSA_NAME
2070 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2071 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2072 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2073 gimple_assign_rhs1 (s),
2074 gimple_assign_rhs2 (s),
2075 code2, op2a, op2b)))
2076 {
2077 /* Handle the OR case, where we are reassociating:
2078 (inner1 OR inner2) OR (op2a code2 op2b)
2079 => (t OR inner2)
2080 If the partial result t is a constant, we win. Otherwise
2081 continue on to try reassociating with the other inner test. */
2082 if (is_or)
2083 {
2084 if (integer_onep (t))
2085 return boolean_true_node;
2086 else if (integer_zerop (t))
2087 return inner2;
2088 }
2089
2090 /* Handle the AND case, where we are redistributing:
2091 (inner1 AND inner2) OR (op2a code2 op2b)
2092 => (t AND (inner2 OR (op2a code op2b))) */
2093 else if (integer_zerop (t))
2094 return boolean_false_node;
2095
2096 /* Save partial result for later. */
2097 partial = t;
2098 }
2099
2100 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2101 if (TREE_CODE (inner2) == SSA_NAME
2102 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2103 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2104 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2105 gimple_assign_rhs1 (s),
2106 gimple_assign_rhs2 (s),
2107 code2, op2a, op2b)))
2108 {
2109 /* Handle the OR case, where we are reassociating:
2110 (inner1 OR inner2) OR (op2a code2 op2b)
2111 => (inner1 OR t)
2112 => (t OR partial) */
2113 if (is_or)
2114 {
2115 if (integer_zerop (t))
2116 return inner1;
2117 else if (integer_onep (t))
2118 return boolean_true_node;
2119 /* If both are the same, we can apply the identity
2120 (x OR x) == x. */
2121 else if (partial && same_bool_result_p (t, partial))
2122 return t;
2123 }
2124
2125 /* Handle the AND case, where we are redistributing:
2126 (inner1 AND inner2) OR (op2a code2 op2b)
2127 => (t AND (inner1 OR (op2a code2 op2b)))
2128 => (t AND partial) */
2129 else
2130 {
2131 if (integer_zerop (t))
2132 return boolean_false_node;
2133 else if (partial)
2134 {
2135 /* We already got a simplification for the other
2136 operand to the redistributed AND expression. The
2137 interesting case is when at least one is true.
2138 Or, if both are the same, we can apply the identity
2139 (x AND x) == x. */
2140 if (integer_onep (partial))
2141 return t;
2142 else if (integer_onep (t))
2143 return partial;
2144 else if (same_bool_result_p (t, partial))
2145 return t;
2146 }
2147 }
2148 }
2149 }
2150 return NULL_TREE;
2151 }
2152
2153 /* Try to simplify the OR of two comparisons defined by
2154 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2155 If this can be done without constructing an intermediate value,
2156 return the resulting tree; otherwise NULL_TREE is returned.
2157 This function is deliberately asymmetric as it recurses on SSA_DEFs
2158 in the first comparison but not the second. */
2159
2160 static tree
2161 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2162 enum tree_code code2, tree op2a, tree op2b)
2163 {
2164 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2165
2166 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2167 if (operand_equal_p (op1a, op2a, 0)
2168 && operand_equal_p (op1b, op2b, 0))
2169 {
2170 /* Result will be either NULL_TREE, or a combined comparison. */
2171 tree t = combine_comparisons (UNKNOWN_LOCATION,
2172 TRUTH_ORIF_EXPR, code1, code2,
2173 truth_type, op1a, op1b);
2174 if (t)
2175 return t;
2176 }
2177
2178 /* Likewise the swapped case of the above. */
2179 if (operand_equal_p (op1a, op2b, 0)
2180 && operand_equal_p (op1b, op2a, 0))
2181 {
2182 /* Result will be either NULL_TREE, or a combined comparison. */
2183 tree t = combine_comparisons (UNKNOWN_LOCATION,
2184 TRUTH_ORIF_EXPR, code1,
2185 swap_tree_comparison (code2),
2186 truth_type, op1a, op1b);
2187 if (t)
2188 return t;
2189 }
2190
2191 /* If both comparisons are of the same value against constants, we might
2192 be able to merge them. */
2193 if (operand_equal_p (op1a, op2a, 0)
2194 && TREE_CODE (op1b) == INTEGER_CST
2195 && TREE_CODE (op2b) == INTEGER_CST)
2196 {
2197 int cmp = tree_int_cst_compare (op1b, op2b);
2198
2199 /* If we have (op1a != op1b), we should either be able to
2200 return that or TRUE, depending on whether the constant op1b
2201 also satisfies the other comparison against op2b. */
2202 if (code1 == NE_EXPR)
2203 {
2204 bool done = true;
2205 bool val;
2206 switch (code2)
2207 {
2208 case EQ_EXPR: val = (cmp == 0); break;
2209 case NE_EXPR: val = (cmp != 0); break;
2210 case LT_EXPR: val = (cmp < 0); break;
2211 case GT_EXPR: val = (cmp > 0); break;
2212 case LE_EXPR: val = (cmp <= 0); break;
2213 case GE_EXPR: val = (cmp >= 0); break;
2214 default: done = false;
2215 }
2216 if (done)
2217 {
2218 if (val)
2219 return boolean_true_node;
2220 else
2221 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2222 }
2223 }
2224 /* Likewise if the second comparison is a != comparison. */
2225 else if (code2 == NE_EXPR)
2226 {
2227 bool done = true;
2228 bool val;
2229 switch (code1)
2230 {
2231 case EQ_EXPR: val = (cmp == 0); break;
2232 case NE_EXPR: val = (cmp != 0); break;
2233 case LT_EXPR: val = (cmp > 0); break;
2234 case GT_EXPR: val = (cmp < 0); break;
2235 case LE_EXPR: val = (cmp >= 0); break;
2236 case GE_EXPR: val = (cmp <= 0); break;
2237 default: done = false;
2238 }
2239 if (done)
2240 {
2241 if (val)
2242 return boolean_true_node;
2243 else
2244 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2245 }
2246 }
2247
2248 /* See if an equality test is redundant with the other comparison. */
2249 else if (code1 == EQ_EXPR)
2250 {
2251 bool val;
2252 switch (code2)
2253 {
2254 case EQ_EXPR: val = (cmp == 0); break;
2255 case NE_EXPR: val = (cmp != 0); break;
2256 case LT_EXPR: val = (cmp < 0); break;
2257 case GT_EXPR: val = (cmp > 0); break;
2258 case LE_EXPR: val = (cmp <= 0); break;
2259 case GE_EXPR: val = (cmp >= 0); break;
2260 default:
2261 val = false;
2262 }
2263 if (val)
2264 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2265 }
2266 else if (code2 == EQ_EXPR)
2267 {
2268 bool val;
2269 switch (code1)
2270 {
2271 case EQ_EXPR: val = (cmp == 0); break;
2272 case NE_EXPR: val = (cmp != 0); break;
2273 case LT_EXPR: val = (cmp > 0); break;
2274 case GT_EXPR: val = (cmp < 0); break;
2275 case LE_EXPR: val = (cmp >= 0); break;
2276 case GE_EXPR: val = (cmp <= 0); break;
2277 default:
2278 val = false;
2279 }
2280 if (val)
2281 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2282 }
2283
2284 /* Chose the less restrictive of two < or <= comparisons. */
2285 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2286 && (code2 == LT_EXPR || code2 == LE_EXPR))
2287 {
2288 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2289 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2290 else
2291 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2292 }
2293
2294 /* Likewise chose the less restrictive of two > or >= comparisons. */
2295 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2296 && (code2 == GT_EXPR || code2 == GE_EXPR))
2297 {
2298 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2299 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2300 else
2301 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2302 }
2303
2304 /* Check for singleton ranges. */
2305 else if (cmp == 0
2306 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2307 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2308 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2309
2310 /* Check for less/greater pairs that don't restrict the range at all. */
2311 else if (cmp >= 0
2312 && (code1 == LT_EXPR || code1 == LE_EXPR)
2313 && (code2 == GT_EXPR || code2 == GE_EXPR))
2314 return boolean_true_node;
2315 else if (cmp <= 0
2316 && (code1 == GT_EXPR || code1 == GE_EXPR)
2317 && (code2 == LT_EXPR || code2 == LE_EXPR))
2318 return boolean_true_node;
2319 }
2320
2321 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2322 NAME's definition is a truth value. See if there are any simplifications
2323 that can be done against the NAME's definition. */
2324 if (TREE_CODE (op1a) == SSA_NAME
2325 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2326 && (integer_zerop (op1b) || integer_onep (op1b)))
2327 {
2328 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2329 || (code1 == NE_EXPR && integer_onep (op1b)));
2330 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2331 switch (gimple_code (stmt))
2332 {
2333 case GIMPLE_ASSIGN:
2334 /* Try to simplify by copy-propagating the definition. */
2335 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2336
2337 case GIMPLE_PHI:
2338 /* If every argument to the PHI produces the same result when
2339 ORed with the second comparison, we win.
2340 Do not do this unless the type is bool since we need a bool
2341 result here anyway. */
2342 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2343 {
2344 tree result = NULL_TREE;
2345 unsigned i;
2346 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2347 {
2348 tree arg = gimple_phi_arg_def (stmt, i);
2349
2350 /* If this PHI has itself as an argument, ignore it.
2351 If all the other args produce the same result,
2352 we're still OK. */
2353 if (arg == gimple_phi_result (stmt))
2354 continue;
2355 else if (TREE_CODE (arg) == INTEGER_CST)
2356 {
2357 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2358 {
2359 if (!result)
2360 result = boolean_true_node;
2361 else if (!integer_onep (result))
2362 return NULL_TREE;
2363 }
2364 else if (!result)
2365 result = fold_build2 (code2, boolean_type_node,
2366 op2a, op2b);
2367 else if (!same_bool_comparison_p (result,
2368 code2, op2a, op2b))
2369 return NULL_TREE;
2370 }
2371 else if (TREE_CODE (arg) == SSA_NAME
2372 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2373 {
2374 tree temp;
2375 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2376 /* In simple cases we can look through PHI nodes,
2377 but we have to be careful with loops.
2378 See PR49073. */
2379 if (! dom_info_available_p (CDI_DOMINATORS)
2380 || gimple_bb (def_stmt) == gimple_bb (stmt)
2381 || dominated_by_p (CDI_DOMINATORS,
2382 gimple_bb (def_stmt),
2383 gimple_bb (stmt)))
2384 return NULL_TREE;
2385 temp = or_var_with_comparison (arg, invert, code2,
2386 op2a, op2b);
2387 if (!temp)
2388 return NULL_TREE;
2389 else if (!result)
2390 result = temp;
2391 else if (!same_bool_result_p (result, temp))
2392 return NULL_TREE;
2393 }
2394 else
2395 return NULL_TREE;
2396 }
2397 return result;
2398 }
2399
2400 default:
2401 break;
2402 }
2403 }
2404 return NULL_TREE;
2405 }
2406
2407 /* Try to simplify the OR of two comparisons, specified by
2408 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2409 If this can be simplified to a single expression (without requiring
2410 introducing more SSA variables to hold intermediate values),
2411 return the resulting tree. Otherwise return NULL_TREE.
2412 If the result expression is non-null, it has boolean type. */
2413
2414 tree
2415 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2416 enum tree_code code2, tree op2a, tree op2b)
2417 {
2418 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2419 if (t)
2420 return t;
2421 else
2422 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2423 }
2424
2425
2426 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2427
2428 Either NULL_TREE, a simplified but non-constant or a constant
2429 is returned.
2430
2431 ??? This should go into a gimple-fold-inline.h file to be eventually
2432 privatized with the single valueize function used in the various TUs
2433 to avoid the indirect function call overhead. */
2434
2435 tree
2436 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2437 {
2438 location_t loc = gimple_location (stmt);
2439 switch (gimple_code (stmt))
2440 {
2441 case GIMPLE_ASSIGN:
2442 {
2443 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2444
2445 switch (get_gimple_rhs_class (subcode))
2446 {
2447 case GIMPLE_SINGLE_RHS:
2448 {
2449 tree rhs = gimple_assign_rhs1 (stmt);
2450 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2451
2452 if (TREE_CODE (rhs) == SSA_NAME)
2453 {
2454 /* If the RHS is an SSA_NAME, return its known constant value,
2455 if any. */
2456 return (*valueize) (rhs);
2457 }
2458 /* Handle propagating invariant addresses into address
2459 operations. */
2460 else if (TREE_CODE (rhs) == ADDR_EXPR
2461 && !is_gimple_min_invariant (rhs))
2462 {
2463 HOST_WIDE_INT offset = 0;
2464 tree base;
2465 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2466 &offset,
2467 valueize);
2468 if (base
2469 && (CONSTANT_CLASS_P (base)
2470 || decl_address_invariant_p (base)))
2471 return build_invariant_address (TREE_TYPE (rhs),
2472 base, offset);
2473 }
2474 else if (TREE_CODE (rhs) == CONSTRUCTOR
2475 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2476 && (CONSTRUCTOR_NELTS (rhs)
2477 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2478 {
2479 unsigned i;
2480 tree val, *vec;
2481
2482 vec = XALLOCAVEC (tree,
2483 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2485 {
2486 val = (*valueize) (val);
2487 if (TREE_CODE (val) == INTEGER_CST
2488 || TREE_CODE (val) == REAL_CST
2489 || TREE_CODE (val) == FIXED_CST)
2490 vec[i] = val;
2491 else
2492 return NULL_TREE;
2493 }
2494
2495 return build_vector (TREE_TYPE (rhs), vec);
2496 }
2497
2498 if (kind == tcc_reference)
2499 {
2500 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2501 || TREE_CODE (rhs) == REALPART_EXPR
2502 || TREE_CODE (rhs) == IMAGPART_EXPR)
2503 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2504 {
2505 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2506 return fold_unary_loc (EXPR_LOCATION (rhs),
2507 TREE_CODE (rhs),
2508 TREE_TYPE (rhs), val);
2509 }
2510 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2511 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2512 {
2513 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2514 return fold_ternary_loc (EXPR_LOCATION (rhs),
2515 TREE_CODE (rhs),
2516 TREE_TYPE (rhs), val,
2517 TREE_OPERAND (rhs, 1),
2518 TREE_OPERAND (rhs, 2));
2519 }
2520 else if (TREE_CODE (rhs) == MEM_REF
2521 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2522 {
2523 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2524 if (TREE_CODE (val) == ADDR_EXPR
2525 && is_gimple_min_invariant (val))
2526 {
2527 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2528 unshare_expr (val),
2529 TREE_OPERAND (rhs, 1));
2530 if (tem)
2531 rhs = tem;
2532 }
2533 }
2534 return fold_const_aggregate_ref_1 (rhs, valueize);
2535 }
2536 else if (kind == tcc_declaration)
2537 return get_symbol_constant_value (rhs);
2538 return rhs;
2539 }
2540
2541 case GIMPLE_UNARY_RHS:
2542 {
2543 /* Handle unary operators that can appear in GIMPLE form.
2544 Note that we know the single operand must be a constant,
2545 so this should almost always return a simplified RHS. */
2546 tree lhs = gimple_assign_lhs (stmt);
2547 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2548
2549 /* Conversions are useless for CCP purposes if they are
2550 value-preserving. Thus the restrictions that
2551 useless_type_conversion_p places for restrict qualification
2552 of pointer types should not apply here.
2553 Substitution later will only substitute to allowed places. */
2554 if (CONVERT_EXPR_CODE_P (subcode)
2555 && POINTER_TYPE_P (TREE_TYPE (lhs))
2556 && POINTER_TYPE_P (TREE_TYPE (op0))
2557 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2558 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2559 && TYPE_MODE (TREE_TYPE (lhs))
2560 == TYPE_MODE (TREE_TYPE (op0)))
2561 return op0;
2562
2563 return
2564 fold_unary_ignore_overflow_loc (loc, subcode,
2565 gimple_expr_type (stmt), op0);
2566 }
2567
2568 case GIMPLE_BINARY_RHS:
2569 {
2570 /* Handle binary operators that can appear in GIMPLE form. */
2571 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2572 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2573
2574 /* Translate &x + CST into an invariant form suitable for
2575 further propagation. */
2576 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2577 && TREE_CODE (op0) == ADDR_EXPR
2578 && TREE_CODE (op1) == INTEGER_CST)
2579 {
2580 tree off = fold_convert (ptr_type_node, op1);
2581 return build_fold_addr_expr_loc
2582 (loc,
2583 fold_build2 (MEM_REF,
2584 TREE_TYPE (TREE_TYPE (op0)),
2585 unshare_expr (op0), off));
2586 }
2587
2588 return fold_binary_loc (loc, subcode,
2589 gimple_expr_type (stmt), op0, op1);
2590 }
2591
2592 case GIMPLE_TERNARY_RHS:
2593 {
2594 /* Handle ternary operators that can appear in GIMPLE form. */
2595 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2596 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2597 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2598
2599 /* Fold embedded expressions in ternary codes. */
2600 if ((subcode == COND_EXPR
2601 || subcode == VEC_COND_EXPR)
2602 && COMPARISON_CLASS_P (op0))
2603 {
2604 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2605 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2606 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2607 TREE_TYPE (op0), op00, op01);
2608 if (tem)
2609 op0 = tem;
2610 }
2611
2612 return fold_ternary_loc (loc, subcode,
2613 gimple_expr_type (stmt), op0, op1, op2);
2614 }
2615
2616 default:
2617 gcc_unreachable ();
2618 }
2619 }
2620
2621 case GIMPLE_CALL:
2622 {
2623 tree fn;
2624
2625 if (gimple_call_internal_p (stmt))
2626 /* No folding yet for these functions. */
2627 return NULL_TREE;
2628
2629 fn = (*valueize) (gimple_call_fn (stmt));
2630 if (TREE_CODE (fn) == ADDR_EXPR
2631 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2632 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2633 {
2634 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2635 tree call, retval;
2636 unsigned i;
2637 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2638 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2639 call = build_call_array_loc (loc,
2640 gimple_call_return_type (stmt),
2641 fn, gimple_call_num_args (stmt), args);
2642 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2643 if (retval)
2644 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2645 STRIP_NOPS (retval);
2646 return retval;
2647 }
2648 return NULL_TREE;
2649 }
2650
2651 default:
2652 return NULL_TREE;
2653 }
2654 }
2655
2656 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2657 Returns NULL_TREE if folding to a constant is not possible, otherwise
2658 returns a constant according to is_gimple_min_invariant. */
2659
2660 tree
2661 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2662 {
2663 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2664 if (res && is_gimple_min_invariant (res))
2665 return res;
2666 return NULL_TREE;
2667 }
2668
2669
2670 /* The following set of functions are supposed to fold references using
2671 their constant initializers. */
2672
2673 static tree fold_ctor_reference (tree type, tree ctor,
2674 unsigned HOST_WIDE_INT offset,
2675 unsigned HOST_WIDE_INT size, tree);
2676
2677 /* See if we can find constructor defining value of BASE.
2678 When we know the consructor with constant offset (such as
2679 base is array[40] and we do know constructor of array), then
2680 BIT_OFFSET is adjusted accordingly.
2681
2682 As a special case, return error_mark_node when constructor
2683 is not explicitly available, but it is known to be zero
2684 such as 'static const int a;'. */
2685 static tree
2686 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2687 tree (*valueize)(tree))
2688 {
2689 HOST_WIDE_INT bit_offset2, size, max_size;
2690 if (TREE_CODE (base) == MEM_REF)
2691 {
2692 if (!integer_zerop (TREE_OPERAND (base, 1)))
2693 {
2694 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2695 return NULL_TREE;
2696 *bit_offset += (mem_ref_offset (base).low
2697 * BITS_PER_UNIT);
2698 }
2699
2700 if (valueize
2701 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2702 base = valueize (TREE_OPERAND (base, 0));
2703 if (!base || TREE_CODE (base) != ADDR_EXPR)
2704 return NULL_TREE;
2705 base = TREE_OPERAND (base, 0);
2706 }
2707
2708 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2709 DECL_INITIAL. If BASE is a nested reference into another
2710 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2711 the inner reference. */
2712 switch (TREE_CODE (base))
2713 {
2714 case VAR_DECL:
2715 if (!const_value_known_p (base))
2716 return NULL_TREE;
2717
2718 /* Fallthru. */
2719 case CONST_DECL:
2720 if (!DECL_INITIAL (base)
2721 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2722 return error_mark_node;
2723 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2724 as special marker (_not_ zero ...) for its own purposes. */
2725 if (DECL_INITIAL (base) == error_mark_node)
2726 return NULL_TREE;
2727 return DECL_INITIAL (base);
2728
2729 case ARRAY_REF:
2730 case COMPONENT_REF:
2731 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2732 if (max_size == -1 || size != max_size)
2733 return NULL_TREE;
2734 *bit_offset += bit_offset2;
2735 return get_base_constructor (base, bit_offset, valueize);
2736
2737 case STRING_CST:
2738 case CONSTRUCTOR:
2739 return base;
2740
2741 default:
2742 return NULL_TREE;
2743 }
2744 }
2745
2746 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2747 to the memory at bit OFFSET.
2748
2749 We do only simple job of folding byte accesses. */
2750
2751 static tree
2752 fold_string_cst_ctor_reference (tree type, tree ctor,
2753 unsigned HOST_WIDE_INT offset,
2754 unsigned HOST_WIDE_INT size)
2755 {
2756 if (INTEGRAL_TYPE_P (type)
2757 && (TYPE_MODE (type)
2758 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2759 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2760 == MODE_INT)
2761 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2762 && size == BITS_PER_UNIT
2763 && !(offset % BITS_PER_UNIT))
2764 {
2765 offset /= BITS_PER_UNIT;
2766 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2767 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2768 [offset]));
2769 /* Folding
2770 const char a[20]="hello";
2771 return a[10];
2772
2773 might lead to offset greater than string length. In this case we
2774 know value is either initialized to 0 or out of bounds. Return 0
2775 in both cases. */
2776 return build_zero_cst (type);
2777 }
2778 return NULL_TREE;
2779 }
2780
2781 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2782 SIZE to the memory at bit OFFSET. */
2783
2784 static tree
2785 fold_array_ctor_reference (tree type, tree ctor,
2786 unsigned HOST_WIDE_INT offset,
2787 unsigned HOST_WIDE_INT size,
2788 tree from_decl)
2789 {
2790 unsigned HOST_WIDE_INT cnt;
2791 tree cfield, cval;
2792 double_int low_bound, elt_size;
2793 double_int index, max_index;
2794 double_int access_index;
2795 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2796 HOST_WIDE_INT inner_offset;
2797
2798 /* Compute low bound and elt size. */
2799 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2800 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2801 if (domain_type && TYPE_MIN_VALUE (domain_type))
2802 {
2803 /* Static constructors for variably sized objects makes no sense. */
2804 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2805 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2806 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2807 }
2808 else
2809 low_bound = double_int_zero;
2810 /* Static constructors for variably sized objects makes no sense. */
2811 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2812 == INTEGER_CST);
2813 elt_size =
2814 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2815
2816
2817 /* We can handle only constantly sized accesses that are known to not
2818 be larger than size of array element. */
2819 if (!TYPE_SIZE_UNIT (type)
2820 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2821 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2822 return NULL_TREE;
2823
2824 /* Compute the array index we look for. */
2825 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2826 .udiv (elt_size, TRUNC_DIV_EXPR);
2827 access_index += low_bound;
2828 if (index_type)
2829 access_index = access_index.ext (TYPE_PRECISION (index_type),
2830 TYPE_UNSIGNED (index_type));
2831
2832 /* And offset within the access. */
2833 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2834
2835 /* See if the array field is large enough to span whole access. We do not
2836 care to fold accesses spanning multiple array indexes. */
2837 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2838 return NULL_TREE;
2839
2840 index = low_bound - double_int_one;
2841 if (index_type)
2842 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2843
2844 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2845 {
2846 /* Array constructor might explicitely set index, or specify range
2847 or leave index NULL meaning that it is next index after previous
2848 one. */
2849 if (cfield)
2850 {
2851 if (TREE_CODE (cfield) == INTEGER_CST)
2852 max_index = index = tree_to_double_int (cfield);
2853 else
2854 {
2855 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2856 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2857 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2858 }
2859 }
2860 else
2861 {
2862 index += double_int_one;
2863 if (index_type)
2864 index = index.ext (TYPE_PRECISION (index_type),
2865 TYPE_UNSIGNED (index_type));
2866 max_index = index;
2867 }
2868
2869 /* Do we have match? */
2870 if (access_index.cmp (index, 1) >= 0
2871 && access_index.cmp (max_index, 1) <= 0)
2872 return fold_ctor_reference (type, cval, inner_offset, size,
2873 from_decl);
2874 }
2875 /* When memory is not explicitely mentioned in constructor,
2876 it is 0 (or out of range). */
2877 return build_zero_cst (type);
2878 }
2879
2880 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2881 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2882
2883 static tree
2884 fold_nonarray_ctor_reference (tree type, tree ctor,
2885 unsigned HOST_WIDE_INT offset,
2886 unsigned HOST_WIDE_INT size,
2887 tree from_decl)
2888 {
2889 unsigned HOST_WIDE_INT cnt;
2890 tree cfield, cval;
2891
2892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2893 cval)
2894 {
2895 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2896 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2897 tree field_size = DECL_SIZE (cfield);
2898 double_int bitoffset;
2899 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2900 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2901 double_int bitoffset_end, access_end;
2902
2903 /* Variable sized objects in static constructors makes no sense,
2904 but field_size can be NULL for flexible array members. */
2905 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2906 && TREE_CODE (byte_offset) == INTEGER_CST
2907 && (field_size != NULL_TREE
2908 ? TREE_CODE (field_size) == INTEGER_CST
2909 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2910
2911 /* Compute bit offset of the field. */
2912 bitoffset = tree_to_double_int (field_offset)
2913 + byte_offset_cst * bits_per_unit_cst;
2914 /* Compute bit offset where the field ends. */
2915 if (field_size != NULL_TREE)
2916 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2917 else
2918 bitoffset_end = double_int_zero;
2919
2920 access_end = double_int::from_uhwi (offset)
2921 + double_int::from_uhwi (size);
2922
2923 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2924 [BITOFFSET, BITOFFSET_END)? */
2925 if (access_end.cmp (bitoffset, 0) > 0
2926 && (field_size == NULL_TREE
2927 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2928 {
2929 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2930 /* We do have overlap. Now see if field is large enough to
2931 cover the access. Give up for accesses spanning multiple
2932 fields. */
2933 if (access_end.cmp (bitoffset_end, 0) > 0)
2934 return NULL_TREE;
2935 if (double_int::from_uhwi (offset).slt (bitoffset))
2936 return NULL_TREE;
2937 return fold_ctor_reference (type, cval,
2938 inner_offset.to_uhwi (), size,
2939 from_decl);
2940 }
2941 }
2942 /* When memory is not explicitely mentioned in constructor, it is 0. */
2943 return build_zero_cst (type);
2944 }
2945
2946 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2947 to the memory at bit OFFSET. */
2948
2949 static tree
2950 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2951 unsigned HOST_WIDE_INT size, tree from_decl)
2952 {
2953 tree ret;
2954
2955 /* We found the field with exact match. */
2956 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2957 && !offset)
2958 return canonicalize_constructor_val (ctor, from_decl);
2959
2960 /* We are at the end of walk, see if we can view convert the
2961 result. */
2962 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2963 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2964 && operand_equal_p (TYPE_SIZE (type),
2965 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2966 {
2967 ret = canonicalize_constructor_val (ctor, from_decl);
2968 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2969 if (ret)
2970 STRIP_NOPS (ret);
2971 return ret;
2972 }
2973 if (TREE_CODE (ctor) == STRING_CST)
2974 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2975 if (TREE_CODE (ctor) == CONSTRUCTOR)
2976 {
2977
2978 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2979 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2980 return fold_array_ctor_reference (type, ctor, offset, size,
2981 from_decl);
2982 else
2983 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2984 from_decl);
2985 }
2986
2987 return NULL_TREE;
2988 }
2989
2990 /* Return the tree representing the element referenced by T if T is an
2991 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2992 names using VALUEIZE. Return NULL_TREE otherwise. */
2993
2994 tree
2995 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2996 {
2997 tree ctor, idx, base;
2998 HOST_WIDE_INT offset, size, max_size;
2999 tree tem;
3000
3001 if (TREE_THIS_VOLATILE (t))
3002 return NULL_TREE;
3003
3004 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3005 return get_symbol_constant_value (t);
3006
3007 tem = fold_read_from_constant_string (t);
3008 if (tem)
3009 return tem;
3010
3011 switch (TREE_CODE (t))
3012 {
3013 case ARRAY_REF:
3014 case ARRAY_RANGE_REF:
3015 /* Constant indexes are handled well by get_base_constructor.
3016 Only special case variable offsets.
3017 FIXME: This code can't handle nested references with variable indexes
3018 (they will be handled only by iteration of ccp). Perhaps we can bring
3019 get_ref_base_and_extent here and make it use a valueize callback. */
3020 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3021 && valueize
3022 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3023 && TREE_CODE (idx) == INTEGER_CST)
3024 {
3025 tree low_bound, unit_size;
3026 double_int doffset;
3027
3028 /* If the resulting bit-offset is constant, track it. */
3029 if ((low_bound = array_ref_low_bound (t),
3030 TREE_CODE (low_bound) == INTEGER_CST)
3031 && (unit_size = array_ref_element_size (t),
3032 host_integerp (unit_size, 1))
3033 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3034 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3035 doffset.fits_shwi ()))
3036 {
3037 offset = doffset.to_shwi ();
3038 offset *= TREE_INT_CST_LOW (unit_size);
3039 offset *= BITS_PER_UNIT;
3040
3041 base = TREE_OPERAND (t, 0);
3042 ctor = get_base_constructor (base, &offset, valueize);
3043 /* Empty constructor. Always fold to 0. */
3044 if (ctor == error_mark_node)
3045 return build_zero_cst (TREE_TYPE (t));
3046 /* Out of bound array access. Value is undefined,
3047 but don't fold. */
3048 if (offset < 0)
3049 return NULL_TREE;
3050 /* We can not determine ctor. */
3051 if (!ctor)
3052 return NULL_TREE;
3053 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3054 TREE_INT_CST_LOW (unit_size)
3055 * BITS_PER_UNIT,
3056 base);
3057 }
3058 }
3059 /* Fallthru. */
3060
3061 case COMPONENT_REF:
3062 case BIT_FIELD_REF:
3063 case TARGET_MEM_REF:
3064 case MEM_REF:
3065 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3066 ctor = get_base_constructor (base, &offset, valueize);
3067
3068 /* Empty constructor. Always fold to 0. */
3069 if (ctor == error_mark_node)
3070 return build_zero_cst (TREE_TYPE (t));
3071 /* We do not know precise address. */
3072 if (max_size == -1 || max_size != size)
3073 return NULL_TREE;
3074 /* We can not determine ctor. */
3075 if (!ctor)
3076 return NULL_TREE;
3077
3078 /* Out of bound array access. Value is undefined, but don't fold. */
3079 if (offset < 0)
3080 return NULL_TREE;
3081
3082 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3083 base);
3084
3085 case REALPART_EXPR:
3086 case IMAGPART_EXPR:
3087 {
3088 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3089 if (c && TREE_CODE (c) == COMPLEX_CST)
3090 return fold_build1_loc (EXPR_LOCATION (t),
3091 TREE_CODE (t), TREE_TYPE (t), c);
3092 break;
3093 }
3094
3095 default:
3096 break;
3097 }
3098
3099 return NULL_TREE;
3100 }
3101
3102 tree
3103 fold_const_aggregate_ref (tree t)
3104 {
3105 return fold_const_aggregate_ref_1 (t, NULL);
3106 }
3107
3108 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3109 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3110 KNOWN_BINFO carries the binfo describing the true type of
3111 OBJ_TYPE_REF_OBJECT(REF). */
3112
3113 tree
3114 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3115 {
3116 unsigned HOST_WIDE_INT offset, size;
3117 tree v, fn, vtable;
3118
3119 vtable = v = BINFO_VTABLE (known_binfo);
3120 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3121 if (!v)
3122 return NULL_TREE;
3123
3124 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3125 {
3126 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3127 v = TREE_OPERAND (v, 0);
3128 }
3129 else
3130 offset = 0;
3131
3132 if (TREE_CODE (v) != ADDR_EXPR)
3133 return NULL_TREE;
3134 v = TREE_OPERAND (v, 0);
3135
3136 if (TREE_CODE (v) != VAR_DECL
3137 || !DECL_VIRTUAL_P (v)
3138 || !DECL_INITIAL (v)
3139 || DECL_INITIAL (v) == error_mark_node)
3140 return NULL_TREE;
3141 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3142 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3143 offset += token * size;
3144 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3145 offset, size, vtable);
3146 if (!fn || integer_zerop (fn))
3147 return NULL_TREE;
3148 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3149 || TREE_CODE (fn) == FDESC_EXPR);
3150 fn = TREE_OPERAND (fn, 0);
3151 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3152
3153 /* When cgraph node is missing and function is not public, we cannot
3154 devirtualize. This can happen in WHOPR when the actual method
3155 ends up in other partition, because we found devirtualization
3156 possibility too late. */
3157 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3158 return NULL_TREE;
3159
3160 /* Make sure we create a cgraph node for functions we'll reference.
3161 They can be non-existent if the reference comes from an entry
3162 of an external vtable for example. */
3163 cgraph_get_create_node (fn);
3164
3165 return fn;
3166 }
3167
3168 /* Return true iff VAL is a gimple expression that is known to be
3169 non-negative. Restricted to floating-point inputs. */
3170
3171 bool
3172 gimple_val_nonnegative_real_p (tree val)
3173 {
3174 gimple def_stmt;
3175
3176 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3177
3178 /* Use existing logic for non-gimple trees. */
3179 if (tree_expr_nonnegative_p (val))
3180 return true;
3181
3182 if (TREE_CODE (val) != SSA_NAME)
3183 return false;
3184
3185 /* Currently we look only at the immediately defining statement
3186 to make this determination, since recursion on defining
3187 statements of operands can lead to quadratic behavior in the
3188 worst case. This is expected to catch almost all occurrences
3189 in practice. It would be possible to implement limited-depth
3190 recursion if important cases are lost. Alternatively, passes
3191 that need this information (such as the pow/powi lowering code
3192 in the cse_sincos pass) could be revised to provide it through
3193 dataflow propagation. */
3194
3195 def_stmt = SSA_NAME_DEF_STMT (val);
3196
3197 if (is_gimple_assign (def_stmt))
3198 {
3199 tree op0, op1;
3200
3201 /* See fold-const.c:tree_expr_nonnegative_p for additional
3202 cases that could be handled with recursion. */
3203
3204 switch (gimple_assign_rhs_code (def_stmt))
3205 {
3206 case ABS_EXPR:
3207 /* Always true for floating-point operands. */
3208 return true;
3209
3210 case MULT_EXPR:
3211 /* True if the two operands are identical (since we are
3212 restricted to floating-point inputs). */
3213 op0 = gimple_assign_rhs1 (def_stmt);
3214 op1 = gimple_assign_rhs2 (def_stmt);
3215
3216 if (op0 == op1
3217 || operand_equal_p (op0, op1, 0))
3218 return true;
3219
3220 default:
3221 return false;
3222 }
3223 }
3224 else if (is_gimple_call (def_stmt))
3225 {
3226 tree fndecl = gimple_call_fndecl (def_stmt);
3227 if (fndecl
3228 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3229 {
3230 tree arg1;
3231
3232 switch (DECL_FUNCTION_CODE (fndecl))
3233 {
3234 CASE_FLT_FN (BUILT_IN_ACOS):
3235 CASE_FLT_FN (BUILT_IN_ACOSH):
3236 CASE_FLT_FN (BUILT_IN_CABS):
3237 CASE_FLT_FN (BUILT_IN_COSH):
3238 CASE_FLT_FN (BUILT_IN_ERFC):
3239 CASE_FLT_FN (BUILT_IN_EXP):
3240 CASE_FLT_FN (BUILT_IN_EXP10):
3241 CASE_FLT_FN (BUILT_IN_EXP2):
3242 CASE_FLT_FN (BUILT_IN_FABS):
3243 CASE_FLT_FN (BUILT_IN_FDIM):
3244 CASE_FLT_FN (BUILT_IN_HYPOT):
3245 CASE_FLT_FN (BUILT_IN_POW10):
3246 return true;
3247
3248 CASE_FLT_FN (BUILT_IN_SQRT):
3249 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3250 nonnegative inputs. */
3251 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3252 return true;
3253
3254 break;
3255
3256 CASE_FLT_FN (BUILT_IN_POWI):
3257 /* True if the second argument is an even integer. */
3258 arg1 = gimple_call_arg (def_stmt, 1);
3259
3260 if (TREE_CODE (arg1) == INTEGER_CST
3261 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3262 return true;
3263
3264 break;
3265
3266 CASE_FLT_FN (BUILT_IN_POW):
3267 /* True if the second argument is an even integer-valued
3268 real. */
3269 arg1 = gimple_call_arg (def_stmt, 1);
3270
3271 if (TREE_CODE (arg1) == REAL_CST)
3272 {
3273 REAL_VALUE_TYPE c;
3274 HOST_WIDE_INT n;
3275
3276 c = TREE_REAL_CST (arg1);
3277 n = real_to_integer (&c);
3278
3279 if ((n & 1) == 0)
3280 {
3281 REAL_VALUE_TYPE cint;
3282 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3283 if (real_identical (&c, &cint))
3284 return true;
3285 }
3286 }
3287
3288 break;
3289
3290 default:
3291 return false;
3292 }
3293 }
3294 }
3295
3296 return false;
3297 }