re PR c++/60572 (ICE deriving from class with invalid member)
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
59 reasons:
60
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
66 set.
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
71 declaring the body.
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
76 directly. */
77
78 static bool
79 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
80 {
81 varpool_node *vnode;
82 struct cgraph_node *node;
83 symtab_node *snode;
84
85 if (DECL_ABSTRACT (decl))
86 return false;
87
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91 return true;
92
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
95 {
96 snode = symtab_get_node (decl);
97 if (!snode)
98 return false;
99 node = dyn_cast <cgraph_node> (snode);
100 return !node || !node->global.inlined_to;
101 }
102
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
105 external var. */
106 if (!from_decl
107 || TREE_CODE (from_decl) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl)
109 || (flag_ltrans
110 && symtab_get_node (from_decl)->in_other_partition))
111 return true;
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl)
116 && DECL_EXTERNAL (decl)
117 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
118 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
119 return false;
120 /* When function is public, we always can introduce new reference.
121 Exception are the COMDAT functions where introducing a direct
122 reference imply need to include function body in the curren tunit. */
123 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
124 return true;
125 /* We are not at ltrans stage; so don't worry about WHOPR.
126 Also when still gimplifying all referred comdat functions will be
127 produced.
128
129 As observed in PR20991 for already optimized out comdat virtual functions
130 it may be tempting to not necessarily give up because the copy will be
131 output elsewhere when corresponding vtable is output.
132 This is however not possible - ABI specify that COMDATs are output in
133 units where they are used and when the other unit was compiled with LTO
134 it is possible that vtable was kept public while the function itself
135 was privatized. */
136 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
137 return true;
138
139 /* OK we are seeing either COMDAT or static variable. In this case we must
140 check that the definition is still around so we can refer it. */
141 if (TREE_CODE (decl) == FUNCTION_DECL)
142 {
143 node = cgraph_get_node (decl);
144 /* Check that we still have function body and that we didn't took
145 the decision to eliminate offline copy of the function yet.
146 The second is important when devirtualization happens during final
147 compilation stage when making a new reference no longer makes callee
148 to be compiled. */
149 if (!node || !node->definition || node->global.inlined_to)
150 {
151 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
152 return false;
153 }
154 }
155 else if (TREE_CODE (decl) == VAR_DECL)
156 {
157 vnode = varpool_get_node (decl);
158 if (!vnode || !vnode->definition)
159 {
160 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
161 return false;
162 }
163 }
164 return true;
165 }
166
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170
171 tree
172 canonicalize_constructor_val (tree cval, tree from_decl)
173 {
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
178 {
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
187 }
188 if (TREE_CODE (cval) == ADDR_EXPR)
189 {
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
192 {
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
196 }
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
199 if (!base)
200 return NULL_TREE;
201
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
205 return NULL_TREE;
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
209 {
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_get_create_node (base);
214 }
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
218
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
222 }
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
225 return orig_cval;
226 }
227
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
230
231 tree
232 get_symbol_constant_value (tree sym)
233 {
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
236 {
237 if (val)
238 {
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
241 return val;
242 else
243 return NULL_TREE;
244 }
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
249 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
250 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
251 return build_zero_cst (TREE_TYPE (sym));
252 }
253
254 return NULL_TREE;
255 }
256
257
258
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
263
264 static tree
265 maybe_fold_reference (tree expr, bool is_lhs)
266 {
267 tree *t = &expr;
268 tree result;
269
270 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
271 || TREE_CODE (expr) == REALPART_EXPR
272 || TREE_CODE (expr) == IMAGPART_EXPR)
273 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
274 return fold_unary_loc (EXPR_LOCATION (expr),
275 TREE_CODE (expr),
276 TREE_TYPE (expr),
277 TREE_OPERAND (expr, 0));
278 else if (TREE_CODE (expr) == BIT_FIELD_REF
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_ternary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0),
284 TREE_OPERAND (expr, 1),
285 TREE_OPERAND (expr, 2));
286
287 while (handled_component_p (*t))
288 t = &TREE_OPERAND (*t, 0);
289
290 /* Canonicalize MEM_REFs invariant address operand. Do this first
291 to avoid feeding non-canonical MEM_REFs elsewhere. */
292 if (TREE_CODE (*t) == MEM_REF
293 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
294 {
295 bool volatile_p = TREE_THIS_VOLATILE (*t);
296 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
297 TREE_OPERAND (*t, 0),
298 TREE_OPERAND (*t, 1));
299 if (tem)
300 {
301 TREE_THIS_VOLATILE (tem) = volatile_p;
302 *t = tem;
303 tem = maybe_fold_reference (expr, is_lhs);
304 if (tem)
305 return tem;
306 return expr;
307 }
308 }
309
310 if (!is_lhs
311 && (result = fold_const_aggregate_ref (expr))
312 && is_gimple_min_invariant (result))
313 return result;
314
315 /* Fold back MEM_REFs to reference trees. */
316 if (TREE_CODE (*t) == MEM_REF
317 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
318 && integer_zerop (TREE_OPERAND (*t, 1))
319 && (TREE_THIS_VOLATILE (*t)
320 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
321 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
322 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
323 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
324 /* We have to look out here to not drop a required conversion
325 from the rhs to the lhs if is_lhs, but we don't have the
326 rhs here to verify that. Thus require strict type
327 compatibility. */
328 && types_compatible_p (TREE_TYPE (*t),
329 TREE_TYPE (TREE_OPERAND
330 (TREE_OPERAND (*t, 0), 0))))
331 {
332 tree tem;
333 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
334 tem = maybe_fold_reference (expr, is_lhs);
335 if (tem)
336 return tem;
337 return expr;
338 }
339 else if (TREE_CODE (*t) == TARGET_MEM_REF)
340 {
341 tree tem = maybe_fold_tmr (*t);
342 if (tem)
343 {
344 *t = tem;
345 tem = maybe_fold_reference (expr, is_lhs);
346 if (tem)
347 return tem;
348 return expr;
349 }
350 }
351
352 return NULL_TREE;
353 }
354
355
356 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
357 replacement rhs for the statement or NULL_TREE if no simplification
358 could be made. It is assumed that the operands have been previously
359 folded. */
360
361 static tree
362 fold_gimple_assign (gimple_stmt_iterator *si)
363 {
364 gimple stmt = gsi_stmt (*si);
365 enum tree_code subcode = gimple_assign_rhs_code (stmt);
366 location_t loc = gimple_location (stmt);
367
368 tree result = NULL_TREE;
369
370 switch (get_gimple_rhs_class (subcode))
371 {
372 case GIMPLE_SINGLE_RHS:
373 {
374 tree rhs = gimple_assign_rhs1 (stmt);
375
376 if (REFERENCE_CLASS_P (rhs))
377 return maybe_fold_reference (rhs, false);
378
379 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
380 {
381 tree val = OBJ_TYPE_REF_EXPR (rhs);
382 if (is_gimple_min_invariant (val))
383 return val;
384 else if (flag_devirtualize && virtual_method_call_p (val))
385 {
386 bool final;
387 vec <cgraph_node *>targets
388 = possible_polymorphic_call_targets (val, &final);
389 if (final && targets.length () <= 1)
390 {
391 tree fndecl;
392 if (targets.length () == 1)
393 fndecl = targets[0]->decl;
394 else
395 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
396 val = fold_convert (TREE_TYPE (val), fndecl);
397 STRIP_USELESS_TYPE_CONVERSION (val);
398 return val;
399 }
400 }
401
402 }
403 else if (TREE_CODE (rhs) == ADDR_EXPR)
404 {
405 tree ref = TREE_OPERAND (rhs, 0);
406 tree tem = maybe_fold_reference (ref, true);
407 if (tem
408 && TREE_CODE (tem) == MEM_REF
409 && integer_zerop (TREE_OPERAND (tem, 1)))
410 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
411 else if (tem)
412 result = fold_convert (TREE_TYPE (rhs),
413 build_fold_addr_expr_loc (loc, tem));
414 else if (TREE_CODE (ref) == MEM_REF
415 && integer_zerop (TREE_OPERAND (ref, 1)))
416 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
417 }
418
419 else if (TREE_CODE (rhs) == CONSTRUCTOR
420 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
421 && (CONSTRUCTOR_NELTS (rhs)
422 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
423 {
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
427
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
429 if (TREE_CODE (val) != INTEGER_CST
430 && TREE_CODE (val) != REAL_CST
431 && TREE_CODE (val) != FIXED_CST)
432 return NULL_TREE;
433
434 return build_vector_from_ctor (TREE_TYPE (rhs),
435 CONSTRUCTOR_ELTS (rhs));
436 }
437
438 else if (DECL_P (rhs))
439 return get_symbol_constant_value (rhs);
440
441 /* If we couldn't fold the RHS, hand over to the generic
442 fold routines. */
443 if (result == NULL_TREE)
444 result = fold (rhs);
445
446 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
447 that may have been added by fold, and "useless" type
448 conversions that might now be apparent due to propagation. */
449 STRIP_USELESS_TYPE_CONVERSION (result);
450
451 if (result != rhs && valid_gimple_rhs_p (result))
452 return result;
453
454 return NULL_TREE;
455 }
456 break;
457
458 case GIMPLE_UNARY_RHS:
459 {
460 tree rhs = gimple_assign_rhs1 (stmt);
461
462 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
463 if (result)
464 {
465 /* If the operation was a conversion do _not_ mark a
466 resulting constant with TREE_OVERFLOW if the original
467 constant was not. These conversions have implementation
468 defined behavior and retaining the TREE_OVERFLOW flag
469 here would confuse later passes such as VRP. */
470 if (CONVERT_EXPR_CODE_P (subcode)
471 && TREE_CODE (result) == INTEGER_CST
472 && TREE_CODE (rhs) == INTEGER_CST)
473 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
474
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
478 }
479 }
480 break;
481
482 case GIMPLE_BINARY_RHS:
483 /* Try to canonicalize for boolean-typed X the comparisons
484 X == 0, X == 1, X != 0, and X != 1. */
485 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
486 || gimple_assign_rhs_code (stmt) == NE_EXPR)
487 {
488 tree lhs = gimple_assign_lhs (stmt);
489 tree op1 = gimple_assign_rhs1 (stmt);
490 tree op2 = gimple_assign_rhs2 (stmt);
491 tree type = TREE_TYPE (op1);
492
493 /* Check whether the comparison operands are of the same boolean
494 type as the result type is.
495 Check that second operand is an integer-constant with value
496 one or zero. */
497 if (TREE_CODE (op2) == INTEGER_CST
498 && (integer_zerop (op2) || integer_onep (op2))
499 && useless_type_conversion_p (TREE_TYPE (lhs), type))
500 {
501 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
502 bool is_logical_not = false;
503
504 /* X == 0 and X != 1 is a logical-not.of X
505 X == 1 and X != 0 is X */
506 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
507 || (cmp_code == NE_EXPR && integer_onep (op2)))
508 is_logical_not = true;
509
510 if (is_logical_not == false)
511 result = op1;
512 /* Only for one-bit precision typed X the transformation
513 !X -> ~X is valied. */
514 else if (TYPE_PRECISION (type) == 1)
515 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
516 type, op1);
517 /* Otherwise we use !X -> X ^ 1. */
518 else
519 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
520 type, op1, build_int_cst (type, 1));
521
522 }
523 }
524
525 if (!result)
526 result = fold_binary_loc (loc, subcode,
527 TREE_TYPE (gimple_assign_lhs (stmt)),
528 gimple_assign_rhs1 (stmt),
529 gimple_assign_rhs2 (stmt));
530
531 if (result)
532 {
533 STRIP_USELESS_TYPE_CONVERSION (result);
534 if (valid_gimple_rhs_p (result))
535 return result;
536 }
537 break;
538
539 case GIMPLE_TERNARY_RHS:
540 /* Try to fold a conditional expression. */
541 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
542 {
543 tree op0 = gimple_assign_rhs1 (stmt);
544 tree tem;
545 bool set = false;
546 location_t cond_loc = gimple_location (stmt);
547
548 if (COMPARISON_CLASS_P (op0))
549 {
550 fold_defer_overflow_warnings ();
551 tem = fold_binary_loc (cond_loc,
552 TREE_CODE (op0), TREE_TYPE (op0),
553 TREE_OPERAND (op0, 0),
554 TREE_OPERAND (op0, 1));
555 /* This is actually a conditional expression, not a GIMPLE
556 conditional statement, however, the valid_gimple_rhs_p
557 test still applies. */
558 set = (tem && is_gimple_condexpr (tem)
559 && valid_gimple_rhs_p (tem));
560 fold_undefer_overflow_warnings (set, stmt, 0);
561 }
562 else if (is_gimple_min_invariant (op0))
563 {
564 tem = op0;
565 set = true;
566 }
567 else
568 return NULL_TREE;
569
570 if (set)
571 result = fold_build3_loc (cond_loc, COND_EXPR,
572 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
573 gimple_assign_rhs2 (stmt),
574 gimple_assign_rhs3 (stmt));
575 }
576
577 if (!result)
578 result = fold_ternary_loc (loc, subcode,
579 TREE_TYPE (gimple_assign_lhs (stmt)),
580 gimple_assign_rhs1 (stmt),
581 gimple_assign_rhs2 (stmt),
582 gimple_assign_rhs3 (stmt));
583
584 if (result)
585 {
586 STRIP_USELESS_TYPE_CONVERSION (result);
587 if (valid_gimple_rhs_p (result))
588 return result;
589 }
590 break;
591
592 case GIMPLE_INVALID_RHS:
593 gcc_unreachable ();
594 }
595
596 return NULL_TREE;
597 }
598
599 /* Attempt to fold a conditional statement. Return true if any changes were
600 made. We only attempt to fold the condition expression, and do not perform
601 any transformation that would require alteration of the cfg. It is
602 assumed that the operands have been previously folded. */
603
604 static bool
605 fold_gimple_cond (gimple stmt)
606 {
607 tree result = fold_binary_loc (gimple_location (stmt),
608 gimple_cond_code (stmt),
609 boolean_type_node,
610 gimple_cond_lhs (stmt),
611 gimple_cond_rhs (stmt));
612
613 if (result)
614 {
615 STRIP_USELESS_TYPE_CONVERSION (result);
616 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
617 {
618 gimple_cond_set_condition_from_tree (stmt, result);
619 return true;
620 }
621 }
622
623 return false;
624 }
625
626 /* Convert EXPR into a GIMPLE value suitable for substitution on the
627 RHS of an assignment. Insert the necessary statements before
628 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
629 is replaced. If the call is expected to produces a result, then it
630 is replaced by an assignment of the new RHS to the result variable.
631 If the result is to be ignored, then the call is replaced by a
632 GIMPLE_NOP. A proper VDEF chain is retained by making the first
633 VUSE and the last VDEF of the whole sequence be the same as the replaced
634 statement and using new SSA names for stores in between. */
635
636 void
637 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
638 {
639 tree lhs;
640 gimple stmt, new_stmt;
641 gimple_stmt_iterator i;
642 gimple_seq stmts = NULL;
643 gimple laststore;
644 tree reaching_vuse;
645
646 stmt = gsi_stmt (*si_p);
647
648 gcc_assert (is_gimple_call (stmt));
649
650 push_gimplify_context (gimple_in_ssa_p (cfun));
651
652 lhs = gimple_call_lhs (stmt);
653 if (lhs == NULL_TREE)
654 {
655 gimplify_and_add (expr, &stmts);
656 /* We can end up with folding a memcpy of an empty class assignment
657 which gets optimized away by C++ gimplification. */
658 if (gimple_seq_empty_p (stmts))
659 {
660 pop_gimplify_context (NULL);
661 if (gimple_in_ssa_p (cfun))
662 {
663 unlink_stmt_vdef (stmt);
664 release_defs (stmt);
665 }
666 gsi_replace (si_p, gimple_build_nop (), true);
667 return;
668 }
669 }
670 else
671 {
672 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
673 new_stmt = gimple_build_assign (lhs, tmp);
674 i = gsi_last (stmts);
675 gsi_insert_after_without_update (&i, new_stmt,
676 GSI_CONTINUE_LINKING);
677 }
678
679 pop_gimplify_context (NULL);
680
681 if (gimple_has_location (stmt))
682 annotate_all_with_location (stmts, gimple_location (stmt));
683
684 /* First iterate over the replacement statements backward, assigning
685 virtual operands to their defining statements. */
686 laststore = NULL;
687 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
688 {
689 new_stmt = gsi_stmt (i);
690 if ((gimple_assign_single_p (new_stmt)
691 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
692 || (is_gimple_call (new_stmt)
693 && (gimple_call_flags (new_stmt)
694 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
695 {
696 tree vdef;
697 if (!laststore)
698 vdef = gimple_vdef (stmt);
699 else
700 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
701 gimple_set_vdef (new_stmt, vdef);
702 if (vdef && TREE_CODE (vdef) == SSA_NAME)
703 SSA_NAME_DEF_STMT (vdef) = new_stmt;
704 laststore = new_stmt;
705 }
706 }
707
708 /* Second iterate over the statements forward, assigning virtual
709 operands to their uses. */
710 reaching_vuse = gimple_vuse (stmt);
711 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
712 {
713 new_stmt = gsi_stmt (i);
714 /* If the new statement possibly has a VUSE, update it with exact SSA
715 name we know will reach this one. */
716 if (gimple_has_mem_ops (new_stmt))
717 gimple_set_vuse (new_stmt, reaching_vuse);
718 gimple_set_modified (new_stmt, true);
719 if (gimple_vdef (new_stmt))
720 reaching_vuse = gimple_vdef (new_stmt);
721 }
722
723 /* If the new sequence does not do a store release the virtual
724 definition of the original statement. */
725 if (reaching_vuse
726 && reaching_vuse == gimple_vuse (stmt))
727 {
728 tree vdef = gimple_vdef (stmt);
729 if (vdef
730 && TREE_CODE (vdef) == SSA_NAME)
731 {
732 unlink_stmt_vdef (stmt);
733 release_ssa_name (vdef);
734 }
735 }
736
737 /* Finally replace the original statement with the sequence. */
738 gsi_replace_with_seq (si_p, stmts, false);
739 }
740
741 /* Return the string length, maximum string length or maximum value of
742 ARG in LENGTH.
743 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
744 is not NULL and, for TYPE == 0, its value is not equal to the length
745 we determine or if we are unable to determine the length or value,
746 return false. VISITED is a bitmap of visited variables.
747 TYPE is 0 if string length should be returned, 1 for maximum string
748 length and 2 for maximum value ARG can have. */
749
750 static bool
751 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
752 {
753 tree var, val;
754 gimple def_stmt;
755
756 if (TREE_CODE (arg) != SSA_NAME)
757 {
758 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
759 if (TREE_CODE (arg) == ADDR_EXPR
760 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
761 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
762 {
763 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
764 if (TREE_CODE (aop0) == INDIRECT_REF
765 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
766 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
767 length, visited, type);
768 }
769
770 if (type == 2)
771 {
772 val = arg;
773 if (TREE_CODE (val) != INTEGER_CST
774 || tree_int_cst_sgn (val) < 0)
775 return false;
776 }
777 else
778 val = c_strlen (arg, 1);
779 if (!val)
780 return false;
781
782 if (*length)
783 {
784 if (type > 0)
785 {
786 if (TREE_CODE (*length) != INTEGER_CST
787 || TREE_CODE (val) != INTEGER_CST)
788 return false;
789
790 if (tree_int_cst_lt (*length, val))
791 *length = val;
792 return true;
793 }
794 else if (simple_cst_equal (val, *length) != 1)
795 return false;
796 }
797
798 *length = val;
799 return true;
800 }
801
802 /* If ARG is registered for SSA update we cannot look at its defining
803 statement. */
804 if (name_registered_for_update_p (arg))
805 return false;
806
807 /* If we were already here, break the infinite cycle. */
808 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
809 return true;
810
811 var = arg;
812 def_stmt = SSA_NAME_DEF_STMT (var);
813
814 switch (gimple_code (def_stmt))
815 {
816 case GIMPLE_ASSIGN:
817 /* The RHS of the statement defining VAR must either have a
818 constant length or come from another SSA_NAME with a constant
819 length. */
820 if (gimple_assign_single_p (def_stmt)
821 || gimple_assign_unary_nop_p (def_stmt))
822 {
823 tree rhs = gimple_assign_rhs1 (def_stmt);
824 return get_maxval_strlen (rhs, length, visited, type);
825 }
826 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
827 {
828 tree op2 = gimple_assign_rhs2 (def_stmt);
829 tree op3 = gimple_assign_rhs3 (def_stmt);
830 return get_maxval_strlen (op2, length, visited, type)
831 && get_maxval_strlen (op3, length, visited, type);
832 }
833 return false;
834
835 case GIMPLE_PHI:
836 {
837 /* All the arguments of the PHI node must have the same constant
838 length. */
839 unsigned i;
840
841 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
842 {
843 tree arg = gimple_phi_arg (def_stmt, i)->def;
844
845 /* If this PHI has itself as an argument, we cannot
846 determine the string length of this argument. However,
847 if we can find a constant string length for the other
848 PHI args then we can still be sure that this is a
849 constant string length. So be optimistic and just
850 continue with the next argument. */
851 if (arg == gimple_phi_result (def_stmt))
852 continue;
853
854 if (!get_maxval_strlen (arg, length, visited, type))
855 return false;
856 }
857 }
858 return true;
859
860 default:
861 return false;
862 }
863 }
864
865
866 /* Fold builtin call in statement STMT. Returns a simplified tree.
867 We may return a non-constant expression, including another call
868 to a different function and with different arguments, e.g.,
869 substituting memcpy for strcpy when the string length is known.
870 Note that some builtins expand into inline code that may not
871 be valid in GIMPLE. Callers must take care. */
872
873 tree
874 gimple_fold_builtin (gimple stmt)
875 {
876 tree result, val[3];
877 tree callee, a;
878 int arg_idx, type;
879 bitmap visited;
880 bool ignore;
881 int nargs;
882 location_t loc = gimple_location (stmt);
883
884 ignore = (gimple_call_lhs (stmt) == NULL);
885
886 /* First try the generic builtin folder. If that succeeds, return the
887 result directly. */
888 result = fold_call_stmt (stmt, ignore);
889 if (result)
890 {
891 if (ignore)
892 STRIP_NOPS (result);
893 else
894 result = fold_convert (gimple_call_return_type (stmt), result);
895 return result;
896 }
897
898 /* Ignore MD builtins. */
899 callee = gimple_call_fndecl (stmt);
900 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
901 return NULL_TREE;
902
903 /* Give up for always_inline inline builtins until they are
904 inlined. */
905 if (avoid_folding_inline_builtin (callee))
906 return NULL_TREE;
907
908 /* If the builtin could not be folded, and it has no argument list,
909 we're done. */
910 nargs = gimple_call_num_args (stmt);
911 if (nargs == 0)
912 return NULL_TREE;
913
914 /* Limit the work only for builtins we know how to simplify. */
915 switch (DECL_FUNCTION_CODE (callee))
916 {
917 case BUILT_IN_STRLEN:
918 case BUILT_IN_FPUTS:
919 case BUILT_IN_FPUTS_UNLOCKED:
920 arg_idx = 0;
921 type = 0;
922 break;
923 case BUILT_IN_STRCPY:
924 case BUILT_IN_STRNCPY:
925 case BUILT_IN_STRCAT:
926 arg_idx = 1;
927 type = 0;
928 break;
929 case BUILT_IN_MEMCPY_CHK:
930 case BUILT_IN_MEMPCPY_CHK:
931 case BUILT_IN_MEMMOVE_CHK:
932 case BUILT_IN_MEMSET_CHK:
933 case BUILT_IN_STRNCPY_CHK:
934 case BUILT_IN_STPNCPY_CHK:
935 arg_idx = 2;
936 type = 2;
937 break;
938 case BUILT_IN_STRCPY_CHK:
939 case BUILT_IN_STPCPY_CHK:
940 arg_idx = 1;
941 type = 1;
942 break;
943 case BUILT_IN_SNPRINTF_CHK:
944 case BUILT_IN_VSNPRINTF_CHK:
945 arg_idx = 1;
946 type = 2;
947 break;
948 default:
949 return NULL_TREE;
950 }
951
952 if (arg_idx >= nargs)
953 return NULL_TREE;
954
955 /* Try to use the dataflow information gathered by the CCP process. */
956 visited = BITMAP_ALLOC (NULL);
957 bitmap_clear (visited);
958
959 memset (val, 0, sizeof (val));
960 a = gimple_call_arg (stmt, arg_idx);
961 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
962 val[arg_idx] = NULL_TREE;
963
964 BITMAP_FREE (visited);
965
966 result = NULL_TREE;
967 switch (DECL_FUNCTION_CODE (callee))
968 {
969 case BUILT_IN_STRLEN:
970 if (val[0] && nargs == 1)
971 {
972 tree new_val =
973 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
974
975 /* If the result is not a valid gimple value, or not a cast
976 of a valid gimple value, then we cannot use the result. */
977 if (is_gimple_val (new_val)
978 || (CONVERT_EXPR_P (new_val)
979 && is_gimple_val (TREE_OPERAND (new_val, 0))))
980 return new_val;
981 }
982 break;
983
984 case BUILT_IN_STRCPY:
985 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
986 result = fold_builtin_strcpy (loc, callee,
987 gimple_call_arg (stmt, 0),
988 gimple_call_arg (stmt, 1),
989 val[1]);
990 break;
991
992 case BUILT_IN_STRNCPY:
993 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
994 result = fold_builtin_strncpy (loc, callee,
995 gimple_call_arg (stmt, 0),
996 gimple_call_arg (stmt, 1),
997 gimple_call_arg (stmt, 2),
998 val[1]);
999 break;
1000
1001 case BUILT_IN_STRCAT:
1002 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1003 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1004 gimple_call_arg (stmt, 1),
1005 val[1]);
1006 break;
1007
1008 case BUILT_IN_FPUTS:
1009 if (nargs == 2)
1010 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1011 gimple_call_arg (stmt, 1),
1012 ignore, false, val[0]);
1013 break;
1014
1015 case BUILT_IN_FPUTS_UNLOCKED:
1016 if (nargs == 2)
1017 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1018 gimple_call_arg (stmt, 1),
1019 ignore, true, val[0]);
1020 break;
1021
1022 case BUILT_IN_MEMCPY_CHK:
1023 case BUILT_IN_MEMPCPY_CHK:
1024 case BUILT_IN_MEMMOVE_CHK:
1025 case BUILT_IN_MEMSET_CHK:
1026 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1027 result = fold_builtin_memory_chk (loc, callee,
1028 gimple_call_arg (stmt, 0),
1029 gimple_call_arg (stmt, 1),
1030 gimple_call_arg (stmt, 2),
1031 gimple_call_arg (stmt, 3),
1032 val[2], ignore,
1033 DECL_FUNCTION_CODE (callee));
1034 break;
1035
1036 case BUILT_IN_STRCPY_CHK:
1037 case BUILT_IN_STPCPY_CHK:
1038 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1039 result = fold_builtin_stxcpy_chk (loc, callee,
1040 gimple_call_arg (stmt, 0),
1041 gimple_call_arg (stmt, 1),
1042 gimple_call_arg (stmt, 2),
1043 val[1], ignore,
1044 DECL_FUNCTION_CODE (callee));
1045 break;
1046
1047 case BUILT_IN_STRNCPY_CHK:
1048 case BUILT_IN_STPNCPY_CHK:
1049 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1050 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1051 gimple_call_arg (stmt, 1),
1052 gimple_call_arg (stmt, 2),
1053 gimple_call_arg (stmt, 3),
1054 val[2], ignore,
1055 DECL_FUNCTION_CODE (callee));
1056 break;
1057
1058 case BUILT_IN_SNPRINTF_CHK:
1059 case BUILT_IN_VSNPRINTF_CHK:
1060 if (val[1] && is_gimple_val (val[1]))
1061 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1062 DECL_FUNCTION_CODE (callee));
1063 break;
1064
1065 default:
1066 gcc_unreachable ();
1067 }
1068
1069 if (result && ignore)
1070 result = fold_ignored_result (result);
1071 return result;
1072 }
1073
1074
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1079
1080 static bool
1081 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1082 {
1083 gimple stmt = gsi_stmt (*gsi);
1084 tree callee;
1085 bool changed = false;
1086 unsigned i;
1087
1088 /* Fold *& in call arguments. */
1089 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1091 {
1092 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1093 if (tmp)
1094 {
1095 gimple_call_set_arg (stmt, i, tmp);
1096 changed = true;
1097 }
1098 }
1099
1100 /* Check for virtual calls that became direct calls. */
1101 callee = gimple_call_fn (stmt);
1102 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1103 {
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1105 {
1106 if (dump_file && virtual_method_call_p (callee)
1107 && !possible_polymorphic_call_target_p
1108 (callee, cgraph_get_node (gimple_call_addr_fndecl
1109 (OBJ_TYPE_REF_EXPR (callee)))))
1110 {
1111 fprintf (dump_file,
1112 "Type inheritance inconsistent devirtualization of ");
1113 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1114 fprintf (dump_file, " to ");
1115 print_generic_expr (dump_file, callee, TDF_SLIM);
1116 fprintf (dump_file, "\n");
1117 }
1118
1119 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1120 changed = true;
1121 }
1122 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1123 {
1124 bool final;
1125 vec <cgraph_node *>targets
1126 = possible_polymorphic_call_targets (callee, &final);
1127 if (final && targets.length () <= 1)
1128 {
1129 tree lhs = gimple_call_lhs (stmt);
1130 if (targets.length () == 1)
1131 {
1132 gimple_call_set_fndecl (stmt, targets[0]->decl);
1133 changed = true;
1134 /* If the call becomes noreturn, remove the lhs. */
1135 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1136 {
1137 if (TREE_CODE (lhs) == SSA_NAME)
1138 {
1139 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1140 tree def = get_or_create_ssa_default_def (cfun, var);
1141 gimple new_stmt = gimple_build_assign (lhs, def);
1142 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1143 }
1144 gimple_call_set_lhs (stmt, NULL_TREE);
1145 }
1146 }
1147 else
1148 {
1149 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1150 gimple new_stmt = gimple_build_call (fndecl, 0);
1151 gimple_set_location (new_stmt, gimple_location (stmt));
1152 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1153 {
1154 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1155 tree def = get_or_create_ssa_default_def (cfun, var);
1156
1157 /* To satisfy condition for
1158 cgraph_update_edges_for_call_stmt_node,
1159 we need to preserve GIMPLE_CALL statement
1160 at position of GSI iterator. */
1161 update_call_from_tree (gsi, def);
1162 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
1163 }
1164 else
1165 gsi_replace (gsi, new_stmt, true);
1166 return true;
1167 }
1168 }
1169 }
1170 }
1171
1172 if (inplace)
1173 return changed;
1174
1175 /* Check for builtins that CCP can handle using information not
1176 available in the generic fold routines. */
1177 if (gimple_call_builtin_p (stmt))
1178 {
1179 tree result = gimple_fold_builtin (stmt);
1180 if (result)
1181 {
1182 if (!update_call_from_tree (gsi, result))
1183 gimplify_and_update_call_from_tree (gsi, result);
1184 changed = true;
1185 }
1186 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1187 changed |= targetm.gimple_fold_builtin (gsi);
1188 }
1189 else if (gimple_call_internal_p (stmt)
1190 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)
1191 {
1192 tree result = fold_builtin_expect (gimple_location (stmt),
1193 gimple_call_arg (stmt, 0),
1194 gimple_call_arg (stmt, 1),
1195 gimple_call_arg (stmt, 2));
1196 if (result)
1197 {
1198 if (!update_call_from_tree (gsi, result))
1199 gimplify_and_update_call_from_tree (gsi, result);
1200 changed = true;
1201 }
1202 }
1203
1204 return changed;
1205 }
1206
1207 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1208 distinguishes both cases. */
1209
1210 static bool
1211 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1212 {
1213 bool changed = false;
1214 gimple stmt = gsi_stmt (*gsi);
1215 unsigned i;
1216
1217 /* Fold the main computation performed by the statement. */
1218 switch (gimple_code (stmt))
1219 {
1220 case GIMPLE_ASSIGN:
1221 {
1222 unsigned old_num_ops = gimple_num_ops (stmt);
1223 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1224 tree lhs = gimple_assign_lhs (stmt);
1225 tree new_rhs;
1226 /* First canonicalize operand order. This avoids building new
1227 trees if this is the only thing fold would later do. */
1228 if ((commutative_tree_code (subcode)
1229 || commutative_ternary_tree_code (subcode))
1230 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1231 gimple_assign_rhs2 (stmt), false))
1232 {
1233 tree tem = gimple_assign_rhs1 (stmt);
1234 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1235 gimple_assign_set_rhs2 (stmt, tem);
1236 changed = true;
1237 }
1238 new_rhs = fold_gimple_assign (gsi);
1239 if (new_rhs
1240 && !useless_type_conversion_p (TREE_TYPE (lhs),
1241 TREE_TYPE (new_rhs)))
1242 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1243 if (new_rhs
1244 && (!inplace
1245 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1246 {
1247 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1248 changed = true;
1249 }
1250 break;
1251 }
1252
1253 case GIMPLE_COND:
1254 changed |= fold_gimple_cond (stmt);
1255 break;
1256
1257 case GIMPLE_CALL:
1258 changed |= gimple_fold_call (gsi, inplace);
1259 break;
1260
1261 case GIMPLE_ASM:
1262 /* Fold *& in asm operands. */
1263 {
1264 size_t noutputs;
1265 const char **oconstraints;
1266 const char *constraint;
1267 bool allows_mem, allows_reg;
1268
1269 noutputs = gimple_asm_noutputs (stmt);
1270 oconstraints = XALLOCAVEC (const char *, noutputs);
1271
1272 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1273 {
1274 tree link = gimple_asm_output_op (stmt, i);
1275 tree op = TREE_VALUE (link);
1276 oconstraints[i]
1277 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1278 if (REFERENCE_CLASS_P (op)
1279 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1280 {
1281 TREE_VALUE (link) = op;
1282 changed = true;
1283 }
1284 }
1285 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1286 {
1287 tree link = gimple_asm_input_op (stmt, i);
1288 tree op = TREE_VALUE (link);
1289 constraint
1290 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1291 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1292 oconstraints, &allows_mem, &allows_reg);
1293 if (REFERENCE_CLASS_P (op)
1294 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1295 != NULL_TREE)
1296 {
1297 TREE_VALUE (link) = op;
1298 changed = true;
1299 }
1300 }
1301 }
1302 break;
1303
1304 case GIMPLE_DEBUG:
1305 if (gimple_debug_bind_p (stmt))
1306 {
1307 tree val = gimple_debug_bind_get_value (stmt);
1308 if (val
1309 && REFERENCE_CLASS_P (val))
1310 {
1311 tree tem = maybe_fold_reference (val, false);
1312 if (tem)
1313 {
1314 gimple_debug_bind_set_value (stmt, tem);
1315 changed = true;
1316 }
1317 }
1318 else if (val
1319 && TREE_CODE (val) == ADDR_EXPR)
1320 {
1321 tree ref = TREE_OPERAND (val, 0);
1322 tree tem = maybe_fold_reference (ref, false);
1323 if (tem)
1324 {
1325 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1326 gimple_debug_bind_set_value (stmt, tem);
1327 changed = true;
1328 }
1329 }
1330 }
1331 break;
1332
1333 default:;
1334 }
1335
1336 stmt = gsi_stmt (*gsi);
1337
1338 /* Fold *& on the lhs. */
1339 if (gimple_has_lhs (stmt))
1340 {
1341 tree lhs = gimple_get_lhs (stmt);
1342 if (lhs && REFERENCE_CLASS_P (lhs))
1343 {
1344 tree new_lhs = maybe_fold_reference (lhs, true);
1345 if (new_lhs)
1346 {
1347 gimple_set_lhs (stmt, new_lhs);
1348 changed = true;
1349 }
1350 }
1351 }
1352
1353 return changed;
1354 }
1355
1356 /* Fold the statement pointed to by GSI. In some cases, this function may
1357 replace the whole statement with a new one. Returns true iff folding
1358 makes any changes.
1359 The statement pointed to by GSI should be in valid gimple form but may
1360 be in unfolded state as resulting from for example constant propagation
1361 which can produce *&x = 0. */
1362
1363 bool
1364 fold_stmt (gimple_stmt_iterator *gsi)
1365 {
1366 return fold_stmt_1 (gsi, false);
1367 }
1368
1369 /* Perform the minimal folding on statement *GSI. Only operations like
1370 *&x created by constant propagation are handled. The statement cannot
1371 be replaced with a new one. Return true if the statement was
1372 changed, false otherwise.
1373 The statement *GSI should be in valid gimple form but may
1374 be in unfolded state as resulting from for example constant propagation
1375 which can produce *&x = 0. */
1376
1377 bool
1378 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1379 {
1380 gimple stmt = gsi_stmt (*gsi);
1381 bool changed = fold_stmt_1 (gsi, true);
1382 gcc_assert (gsi_stmt (*gsi) == stmt);
1383 return changed;
1384 }
1385
1386 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1387 if EXPR is null or we don't know how.
1388 If non-null, the result always has boolean type. */
1389
1390 static tree
1391 canonicalize_bool (tree expr, bool invert)
1392 {
1393 if (!expr)
1394 return NULL_TREE;
1395 else if (invert)
1396 {
1397 if (integer_nonzerop (expr))
1398 return boolean_false_node;
1399 else if (integer_zerop (expr))
1400 return boolean_true_node;
1401 else if (TREE_CODE (expr) == SSA_NAME)
1402 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1403 build_int_cst (TREE_TYPE (expr), 0));
1404 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1405 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1406 boolean_type_node,
1407 TREE_OPERAND (expr, 0),
1408 TREE_OPERAND (expr, 1));
1409 else
1410 return NULL_TREE;
1411 }
1412 else
1413 {
1414 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1415 return expr;
1416 if (integer_nonzerop (expr))
1417 return boolean_true_node;
1418 else if (integer_zerop (expr))
1419 return boolean_false_node;
1420 else if (TREE_CODE (expr) == SSA_NAME)
1421 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1422 build_int_cst (TREE_TYPE (expr), 0));
1423 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1424 return fold_build2 (TREE_CODE (expr),
1425 boolean_type_node,
1426 TREE_OPERAND (expr, 0),
1427 TREE_OPERAND (expr, 1));
1428 else
1429 return NULL_TREE;
1430 }
1431 }
1432
1433 /* Check to see if a boolean expression EXPR is logically equivalent to the
1434 comparison (OP1 CODE OP2). Check for various identities involving
1435 SSA_NAMEs. */
1436
1437 static bool
1438 same_bool_comparison_p (const_tree expr, enum tree_code code,
1439 const_tree op1, const_tree op2)
1440 {
1441 gimple s;
1442
1443 /* The obvious case. */
1444 if (TREE_CODE (expr) == code
1445 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1446 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1447 return true;
1448
1449 /* Check for comparing (name, name != 0) and the case where expr
1450 is an SSA_NAME with a definition matching the comparison. */
1451 if (TREE_CODE (expr) == SSA_NAME
1452 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1453 {
1454 if (operand_equal_p (expr, op1, 0))
1455 return ((code == NE_EXPR && integer_zerop (op2))
1456 || (code == EQ_EXPR && integer_nonzerop (op2)));
1457 s = SSA_NAME_DEF_STMT (expr);
1458 if (is_gimple_assign (s)
1459 && gimple_assign_rhs_code (s) == code
1460 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1461 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1462 return true;
1463 }
1464
1465 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1466 of name is a comparison, recurse. */
1467 if (TREE_CODE (op1) == SSA_NAME
1468 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1469 {
1470 s = SSA_NAME_DEF_STMT (op1);
1471 if (is_gimple_assign (s)
1472 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1473 {
1474 enum tree_code c = gimple_assign_rhs_code (s);
1475 if ((c == NE_EXPR && integer_zerop (op2))
1476 || (c == EQ_EXPR && integer_nonzerop (op2)))
1477 return same_bool_comparison_p (expr, c,
1478 gimple_assign_rhs1 (s),
1479 gimple_assign_rhs2 (s));
1480 if ((c == EQ_EXPR && integer_zerop (op2))
1481 || (c == NE_EXPR && integer_nonzerop (op2)))
1482 return same_bool_comparison_p (expr,
1483 invert_tree_comparison (c, false),
1484 gimple_assign_rhs1 (s),
1485 gimple_assign_rhs2 (s));
1486 }
1487 }
1488 return false;
1489 }
1490
1491 /* Check to see if two boolean expressions OP1 and OP2 are logically
1492 equivalent. */
1493
1494 static bool
1495 same_bool_result_p (const_tree op1, const_tree op2)
1496 {
1497 /* Simple cases first. */
1498 if (operand_equal_p (op1, op2, 0))
1499 return true;
1500
1501 /* Check the cases where at least one of the operands is a comparison.
1502 These are a bit smarter than operand_equal_p in that they apply some
1503 identifies on SSA_NAMEs. */
1504 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1505 && same_bool_comparison_p (op1, TREE_CODE (op2),
1506 TREE_OPERAND (op2, 0),
1507 TREE_OPERAND (op2, 1)))
1508 return true;
1509 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1510 && same_bool_comparison_p (op2, TREE_CODE (op1),
1511 TREE_OPERAND (op1, 0),
1512 TREE_OPERAND (op1, 1)))
1513 return true;
1514
1515 /* Default case. */
1516 return false;
1517 }
1518
1519 /* Forward declarations for some mutually recursive functions. */
1520
1521 static tree
1522 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1523 enum tree_code code2, tree op2a, tree op2b);
1524 static tree
1525 and_var_with_comparison (tree var, bool invert,
1526 enum tree_code code2, tree op2a, tree op2b);
1527 static tree
1528 and_var_with_comparison_1 (gimple stmt,
1529 enum tree_code code2, tree op2a, tree op2b);
1530 static tree
1531 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1532 enum tree_code code2, tree op2a, tree op2b);
1533 static tree
1534 or_var_with_comparison (tree var, bool invert,
1535 enum tree_code code2, tree op2a, tree op2b);
1536 static tree
1537 or_var_with_comparison_1 (gimple stmt,
1538 enum tree_code code2, tree op2a, tree op2b);
1539
1540 /* Helper function for and_comparisons_1: try to simplify the AND of the
1541 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1542 If INVERT is true, invert the value of the VAR before doing the AND.
1543 Return NULL_EXPR if we can't simplify this to a single expression. */
1544
1545 static tree
1546 and_var_with_comparison (tree var, bool invert,
1547 enum tree_code code2, tree op2a, tree op2b)
1548 {
1549 tree t;
1550 gimple stmt = SSA_NAME_DEF_STMT (var);
1551
1552 /* We can only deal with variables whose definitions are assignments. */
1553 if (!is_gimple_assign (stmt))
1554 return NULL_TREE;
1555
1556 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1557 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1558 Then we only have to consider the simpler non-inverted cases. */
1559 if (invert)
1560 t = or_var_with_comparison_1 (stmt,
1561 invert_tree_comparison (code2, false),
1562 op2a, op2b);
1563 else
1564 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1565 return canonicalize_bool (t, invert);
1566 }
1567
1568 /* Try to simplify the AND of the ssa variable defined by the assignment
1569 STMT with the comparison specified by (OP2A CODE2 OP2B).
1570 Return NULL_EXPR if we can't simplify this to a single expression. */
1571
1572 static tree
1573 and_var_with_comparison_1 (gimple stmt,
1574 enum tree_code code2, tree op2a, tree op2b)
1575 {
1576 tree var = gimple_assign_lhs (stmt);
1577 tree true_test_var = NULL_TREE;
1578 tree false_test_var = NULL_TREE;
1579 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1580
1581 /* Check for identities like (var AND (var == 0)) => false. */
1582 if (TREE_CODE (op2a) == SSA_NAME
1583 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1584 {
1585 if ((code2 == NE_EXPR && integer_zerop (op2b))
1586 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1587 {
1588 true_test_var = op2a;
1589 if (var == true_test_var)
1590 return var;
1591 }
1592 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1593 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1594 {
1595 false_test_var = op2a;
1596 if (var == false_test_var)
1597 return boolean_false_node;
1598 }
1599 }
1600
1601 /* If the definition is a comparison, recurse on it. */
1602 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1603 {
1604 tree t = and_comparisons_1 (innercode,
1605 gimple_assign_rhs1 (stmt),
1606 gimple_assign_rhs2 (stmt),
1607 code2,
1608 op2a,
1609 op2b);
1610 if (t)
1611 return t;
1612 }
1613
1614 /* If the definition is an AND or OR expression, we may be able to
1615 simplify by reassociating. */
1616 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1617 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1618 {
1619 tree inner1 = gimple_assign_rhs1 (stmt);
1620 tree inner2 = gimple_assign_rhs2 (stmt);
1621 gimple s;
1622 tree t;
1623 tree partial = NULL_TREE;
1624 bool is_and = (innercode == BIT_AND_EXPR);
1625
1626 /* Check for boolean identities that don't require recursive examination
1627 of inner1/inner2:
1628 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1629 inner1 AND (inner1 OR inner2) => inner1
1630 !inner1 AND (inner1 AND inner2) => false
1631 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1632 Likewise for similar cases involving inner2. */
1633 if (inner1 == true_test_var)
1634 return (is_and ? var : inner1);
1635 else if (inner2 == true_test_var)
1636 return (is_and ? var : inner2);
1637 else if (inner1 == false_test_var)
1638 return (is_and
1639 ? boolean_false_node
1640 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1641 else if (inner2 == false_test_var)
1642 return (is_and
1643 ? boolean_false_node
1644 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1645
1646 /* Next, redistribute/reassociate the AND across the inner tests.
1647 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1648 if (TREE_CODE (inner1) == SSA_NAME
1649 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1650 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1651 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1652 gimple_assign_rhs1 (s),
1653 gimple_assign_rhs2 (s),
1654 code2, op2a, op2b)))
1655 {
1656 /* Handle the AND case, where we are reassociating:
1657 (inner1 AND inner2) AND (op2a code2 op2b)
1658 => (t AND inner2)
1659 If the partial result t is a constant, we win. Otherwise
1660 continue on to try reassociating with the other inner test. */
1661 if (is_and)
1662 {
1663 if (integer_onep (t))
1664 return inner2;
1665 else if (integer_zerop (t))
1666 return boolean_false_node;
1667 }
1668
1669 /* Handle the OR case, where we are redistributing:
1670 (inner1 OR inner2) AND (op2a code2 op2b)
1671 => (t OR (inner2 AND (op2a code2 op2b))) */
1672 else if (integer_onep (t))
1673 return boolean_true_node;
1674
1675 /* Save partial result for later. */
1676 partial = t;
1677 }
1678
1679 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1680 if (TREE_CODE (inner2) == SSA_NAME
1681 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1682 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1683 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1684 gimple_assign_rhs1 (s),
1685 gimple_assign_rhs2 (s),
1686 code2, op2a, op2b)))
1687 {
1688 /* Handle the AND case, where we are reassociating:
1689 (inner1 AND inner2) AND (op2a code2 op2b)
1690 => (inner1 AND t) */
1691 if (is_and)
1692 {
1693 if (integer_onep (t))
1694 return inner1;
1695 else if (integer_zerop (t))
1696 return boolean_false_node;
1697 /* If both are the same, we can apply the identity
1698 (x AND x) == x. */
1699 else if (partial && same_bool_result_p (t, partial))
1700 return t;
1701 }
1702
1703 /* Handle the OR case. where we are redistributing:
1704 (inner1 OR inner2) AND (op2a code2 op2b)
1705 => (t OR (inner1 AND (op2a code2 op2b)))
1706 => (t OR partial) */
1707 else
1708 {
1709 if (integer_onep (t))
1710 return boolean_true_node;
1711 else if (partial)
1712 {
1713 /* We already got a simplification for the other
1714 operand to the redistributed OR expression. The
1715 interesting case is when at least one is false.
1716 Or, if both are the same, we can apply the identity
1717 (x OR x) == x. */
1718 if (integer_zerop (partial))
1719 return t;
1720 else if (integer_zerop (t))
1721 return partial;
1722 else if (same_bool_result_p (t, partial))
1723 return t;
1724 }
1725 }
1726 }
1727 }
1728 return NULL_TREE;
1729 }
1730
1731 /* Try to simplify the AND of two comparisons defined by
1732 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1733 If this can be done without constructing an intermediate value,
1734 return the resulting tree; otherwise NULL_TREE is returned.
1735 This function is deliberately asymmetric as it recurses on SSA_DEFs
1736 in the first comparison but not the second. */
1737
1738 static tree
1739 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1740 enum tree_code code2, tree op2a, tree op2b)
1741 {
1742 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1743
1744 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1745 if (operand_equal_p (op1a, op2a, 0)
1746 && operand_equal_p (op1b, op2b, 0))
1747 {
1748 /* Result will be either NULL_TREE, or a combined comparison. */
1749 tree t = combine_comparisons (UNKNOWN_LOCATION,
1750 TRUTH_ANDIF_EXPR, code1, code2,
1751 truth_type, op1a, op1b);
1752 if (t)
1753 return t;
1754 }
1755
1756 /* Likewise the swapped case of the above. */
1757 if (operand_equal_p (op1a, op2b, 0)
1758 && operand_equal_p (op1b, op2a, 0))
1759 {
1760 /* Result will be either NULL_TREE, or a combined comparison. */
1761 tree t = combine_comparisons (UNKNOWN_LOCATION,
1762 TRUTH_ANDIF_EXPR, code1,
1763 swap_tree_comparison (code2),
1764 truth_type, op1a, op1b);
1765 if (t)
1766 return t;
1767 }
1768
1769 /* If both comparisons are of the same value against constants, we might
1770 be able to merge them. */
1771 if (operand_equal_p (op1a, op2a, 0)
1772 && TREE_CODE (op1b) == INTEGER_CST
1773 && TREE_CODE (op2b) == INTEGER_CST)
1774 {
1775 int cmp = tree_int_cst_compare (op1b, op2b);
1776
1777 /* If we have (op1a == op1b), we should either be able to
1778 return that or FALSE, depending on whether the constant op1b
1779 also satisfies the other comparison against op2b. */
1780 if (code1 == EQ_EXPR)
1781 {
1782 bool done = true;
1783 bool val;
1784 switch (code2)
1785 {
1786 case EQ_EXPR: val = (cmp == 0); break;
1787 case NE_EXPR: val = (cmp != 0); break;
1788 case LT_EXPR: val = (cmp < 0); break;
1789 case GT_EXPR: val = (cmp > 0); break;
1790 case LE_EXPR: val = (cmp <= 0); break;
1791 case GE_EXPR: val = (cmp >= 0); break;
1792 default: done = false;
1793 }
1794 if (done)
1795 {
1796 if (val)
1797 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1798 else
1799 return boolean_false_node;
1800 }
1801 }
1802 /* Likewise if the second comparison is an == comparison. */
1803 else if (code2 == EQ_EXPR)
1804 {
1805 bool done = true;
1806 bool val;
1807 switch (code1)
1808 {
1809 case EQ_EXPR: val = (cmp == 0); break;
1810 case NE_EXPR: val = (cmp != 0); break;
1811 case LT_EXPR: val = (cmp > 0); break;
1812 case GT_EXPR: val = (cmp < 0); break;
1813 case LE_EXPR: val = (cmp >= 0); break;
1814 case GE_EXPR: val = (cmp <= 0); break;
1815 default: done = false;
1816 }
1817 if (done)
1818 {
1819 if (val)
1820 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1821 else
1822 return boolean_false_node;
1823 }
1824 }
1825
1826 /* Same business with inequality tests. */
1827 else if (code1 == NE_EXPR)
1828 {
1829 bool val;
1830 switch (code2)
1831 {
1832 case EQ_EXPR: val = (cmp != 0); break;
1833 case NE_EXPR: val = (cmp == 0); break;
1834 case LT_EXPR: val = (cmp >= 0); break;
1835 case GT_EXPR: val = (cmp <= 0); break;
1836 case LE_EXPR: val = (cmp > 0); break;
1837 case GE_EXPR: val = (cmp < 0); break;
1838 default:
1839 val = false;
1840 }
1841 if (val)
1842 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1843 }
1844 else if (code2 == NE_EXPR)
1845 {
1846 bool val;
1847 switch (code1)
1848 {
1849 case EQ_EXPR: val = (cmp == 0); break;
1850 case NE_EXPR: val = (cmp != 0); break;
1851 case LT_EXPR: val = (cmp <= 0); break;
1852 case GT_EXPR: val = (cmp >= 0); break;
1853 case LE_EXPR: val = (cmp < 0); break;
1854 case GE_EXPR: val = (cmp > 0); break;
1855 default:
1856 val = false;
1857 }
1858 if (val)
1859 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1860 }
1861
1862 /* Chose the more restrictive of two < or <= comparisons. */
1863 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1864 && (code2 == LT_EXPR || code2 == LE_EXPR))
1865 {
1866 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1867 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1868 else
1869 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1870 }
1871
1872 /* Likewise chose the more restrictive of two > or >= comparisons. */
1873 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1874 && (code2 == GT_EXPR || code2 == GE_EXPR))
1875 {
1876 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1877 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1878 else
1879 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1880 }
1881
1882 /* Check for singleton ranges. */
1883 else if (cmp == 0
1884 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1885 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1886 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1887
1888 /* Check for disjoint ranges. */
1889 else if (cmp <= 0
1890 && (code1 == LT_EXPR || code1 == LE_EXPR)
1891 && (code2 == GT_EXPR || code2 == GE_EXPR))
1892 return boolean_false_node;
1893 else if (cmp >= 0
1894 && (code1 == GT_EXPR || code1 == GE_EXPR)
1895 && (code2 == LT_EXPR || code2 == LE_EXPR))
1896 return boolean_false_node;
1897 }
1898
1899 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1900 NAME's definition is a truth value. See if there are any simplifications
1901 that can be done against the NAME's definition. */
1902 if (TREE_CODE (op1a) == SSA_NAME
1903 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1904 && (integer_zerop (op1b) || integer_onep (op1b)))
1905 {
1906 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1907 || (code1 == NE_EXPR && integer_onep (op1b)));
1908 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1909 switch (gimple_code (stmt))
1910 {
1911 case GIMPLE_ASSIGN:
1912 /* Try to simplify by copy-propagating the definition. */
1913 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1914
1915 case GIMPLE_PHI:
1916 /* If every argument to the PHI produces the same result when
1917 ANDed with the second comparison, we win.
1918 Do not do this unless the type is bool since we need a bool
1919 result here anyway. */
1920 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1921 {
1922 tree result = NULL_TREE;
1923 unsigned i;
1924 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1925 {
1926 tree arg = gimple_phi_arg_def (stmt, i);
1927
1928 /* If this PHI has itself as an argument, ignore it.
1929 If all the other args produce the same result,
1930 we're still OK. */
1931 if (arg == gimple_phi_result (stmt))
1932 continue;
1933 else if (TREE_CODE (arg) == INTEGER_CST)
1934 {
1935 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1936 {
1937 if (!result)
1938 result = boolean_false_node;
1939 else if (!integer_zerop (result))
1940 return NULL_TREE;
1941 }
1942 else if (!result)
1943 result = fold_build2 (code2, boolean_type_node,
1944 op2a, op2b);
1945 else if (!same_bool_comparison_p (result,
1946 code2, op2a, op2b))
1947 return NULL_TREE;
1948 }
1949 else if (TREE_CODE (arg) == SSA_NAME
1950 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1951 {
1952 tree temp;
1953 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1954 /* In simple cases we can look through PHI nodes,
1955 but we have to be careful with loops.
1956 See PR49073. */
1957 if (! dom_info_available_p (CDI_DOMINATORS)
1958 || gimple_bb (def_stmt) == gimple_bb (stmt)
1959 || dominated_by_p (CDI_DOMINATORS,
1960 gimple_bb (def_stmt),
1961 gimple_bb (stmt)))
1962 return NULL_TREE;
1963 temp = and_var_with_comparison (arg, invert, code2,
1964 op2a, op2b);
1965 if (!temp)
1966 return NULL_TREE;
1967 else if (!result)
1968 result = temp;
1969 else if (!same_bool_result_p (result, temp))
1970 return NULL_TREE;
1971 }
1972 else
1973 return NULL_TREE;
1974 }
1975 return result;
1976 }
1977
1978 default:
1979 break;
1980 }
1981 }
1982 return NULL_TREE;
1983 }
1984
1985 /* Try to simplify the AND of two comparisons, specified by
1986 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1987 If this can be simplified to a single expression (without requiring
1988 introducing more SSA variables to hold intermediate values),
1989 return the resulting tree. Otherwise return NULL_TREE.
1990 If the result expression is non-null, it has boolean type. */
1991
1992 tree
1993 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1994 enum tree_code code2, tree op2a, tree op2b)
1995 {
1996 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1997 if (t)
1998 return t;
1999 else
2000 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2001 }
2002
2003 /* Helper function for or_comparisons_1: try to simplify the OR of the
2004 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2005 If INVERT is true, invert the value of VAR before doing the OR.
2006 Return NULL_EXPR if we can't simplify this to a single expression. */
2007
2008 static tree
2009 or_var_with_comparison (tree var, bool invert,
2010 enum tree_code code2, tree op2a, tree op2b)
2011 {
2012 tree t;
2013 gimple stmt = SSA_NAME_DEF_STMT (var);
2014
2015 /* We can only deal with variables whose definitions are assignments. */
2016 if (!is_gimple_assign (stmt))
2017 return NULL_TREE;
2018
2019 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2020 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2021 Then we only have to consider the simpler non-inverted cases. */
2022 if (invert)
2023 t = and_var_with_comparison_1 (stmt,
2024 invert_tree_comparison (code2, false),
2025 op2a, op2b);
2026 else
2027 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2028 return canonicalize_bool (t, invert);
2029 }
2030
2031 /* Try to simplify the OR of the ssa variable defined by the assignment
2032 STMT with the comparison specified by (OP2A CODE2 OP2B).
2033 Return NULL_EXPR if we can't simplify this to a single expression. */
2034
2035 static tree
2036 or_var_with_comparison_1 (gimple stmt,
2037 enum tree_code code2, tree op2a, tree op2b)
2038 {
2039 tree var = gimple_assign_lhs (stmt);
2040 tree true_test_var = NULL_TREE;
2041 tree false_test_var = NULL_TREE;
2042 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2043
2044 /* Check for identities like (var OR (var != 0)) => true . */
2045 if (TREE_CODE (op2a) == SSA_NAME
2046 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2047 {
2048 if ((code2 == NE_EXPR && integer_zerop (op2b))
2049 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2050 {
2051 true_test_var = op2a;
2052 if (var == true_test_var)
2053 return var;
2054 }
2055 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2056 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2057 {
2058 false_test_var = op2a;
2059 if (var == false_test_var)
2060 return boolean_true_node;
2061 }
2062 }
2063
2064 /* If the definition is a comparison, recurse on it. */
2065 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2066 {
2067 tree t = or_comparisons_1 (innercode,
2068 gimple_assign_rhs1 (stmt),
2069 gimple_assign_rhs2 (stmt),
2070 code2,
2071 op2a,
2072 op2b);
2073 if (t)
2074 return t;
2075 }
2076
2077 /* If the definition is an AND or OR expression, we may be able to
2078 simplify by reassociating. */
2079 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2080 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2081 {
2082 tree inner1 = gimple_assign_rhs1 (stmt);
2083 tree inner2 = gimple_assign_rhs2 (stmt);
2084 gimple s;
2085 tree t;
2086 tree partial = NULL_TREE;
2087 bool is_or = (innercode == BIT_IOR_EXPR);
2088
2089 /* Check for boolean identities that don't require recursive examination
2090 of inner1/inner2:
2091 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2092 inner1 OR (inner1 AND inner2) => inner1
2093 !inner1 OR (inner1 OR inner2) => true
2094 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2095 */
2096 if (inner1 == true_test_var)
2097 return (is_or ? var : inner1);
2098 else if (inner2 == true_test_var)
2099 return (is_or ? var : inner2);
2100 else if (inner1 == false_test_var)
2101 return (is_or
2102 ? boolean_true_node
2103 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2104 else if (inner2 == false_test_var)
2105 return (is_or
2106 ? boolean_true_node
2107 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2108
2109 /* Next, redistribute/reassociate the OR across the inner tests.
2110 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2111 if (TREE_CODE (inner1) == SSA_NAME
2112 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2113 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2114 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2115 gimple_assign_rhs1 (s),
2116 gimple_assign_rhs2 (s),
2117 code2, op2a, op2b)))
2118 {
2119 /* Handle the OR case, where we are reassociating:
2120 (inner1 OR inner2) OR (op2a code2 op2b)
2121 => (t OR inner2)
2122 If the partial result t is a constant, we win. Otherwise
2123 continue on to try reassociating with the other inner test. */
2124 if (is_or)
2125 {
2126 if (integer_onep (t))
2127 return boolean_true_node;
2128 else if (integer_zerop (t))
2129 return inner2;
2130 }
2131
2132 /* Handle the AND case, where we are redistributing:
2133 (inner1 AND inner2) OR (op2a code2 op2b)
2134 => (t AND (inner2 OR (op2a code op2b))) */
2135 else if (integer_zerop (t))
2136 return boolean_false_node;
2137
2138 /* Save partial result for later. */
2139 partial = t;
2140 }
2141
2142 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2143 if (TREE_CODE (inner2) == SSA_NAME
2144 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2145 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2146 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2147 gimple_assign_rhs1 (s),
2148 gimple_assign_rhs2 (s),
2149 code2, op2a, op2b)))
2150 {
2151 /* Handle the OR case, where we are reassociating:
2152 (inner1 OR inner2) OR (op2a code2 op2b)
2153 => (inner1 OR t)
2154 => (t OR partial) */
2155 if (is_or)
2156 {
2157 if (integer_zerop (t))
2158 return inner1;
2159 else if (integer_onep (t))
2160 return boolean_true_node;
2161 /* If both are the same, we can apply the identity
2162 (x OR x) == x. */
2163 else if (partial && same_bool_result_p (t, partial))
2164 return t;
2165 }
2166
2167 /* Handle the AND case, where we are redistributing:
2168 (inner1 AND inner2) OR (op2a code2 op2b)
2169 => (t AND (inner1 OR (op2a code2 op2b)))
2170 => (t AND partial) */
2171 else
2172 {
2173 if (integer_zerop (t))
2174 return boolean_false_node;
2175 else if (partial)
2176 {
2177 /* We already got a simplification for the other
2178 operand to the redistributed AND expression. The
2179 interesting case is when at least one is true.
2180 Or, if both are the same, we can apply the identity
2181 (x AND x) == x. */
2182 if (integer_onep (partial))
2183 return t;
2184 else if (integer_onep (t))
2185 return partial;
2186 else if (same_bool_result_p (t, partial))
2187 return t;
2188 }
2189 }
2190 }
2191 }
2192 return NULL_TREE;
2193 }
2194
2195 /* Try to simplify the OR of two comparisons defined by
2196 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2197 If this can be done without constructing an intermediate value,
2198 return the resulting tree; otherwise NULL_TREE is returned.
2199 This function is deliberately asymmetric as it recurses on SSA_DEFs
2200 in the first comparison but not the second. */
2201
2202 static tree
2203 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2204 enum tree_code code2, tree op2a, tree op2b)
2205 {
2206 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2207
2208 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2209 if (operand_equal_p (op1a, op2a, 0)
2210 && operand_equal_p (op1b, op2b, 0))
2211 {
2212 /* Result will be either NULL_TREE, or a combined comparison. */
2213 tree t = combine_comparisons (UNKNOWN_LOCATION,
2214 TRUTH_ORIF_EXPR, code1, code2,
2215 truth_type, op1a, op1b);
2216 if (t)
2217 return t;
2218 }
2219
2220 /* Likewise the swapped case of the above. */
2221 if (operand_equal_p (op1a, op2b, 0)
2222 && operand_equal_p (op1b, op2a, 0))
2223 {
2224 /* Result will be either NULL_TREE, or a combined comparison. */
2225 tree t = combine_comparisons (UNKNOWN_LOCATION,
2226 TRUTH_ORIF_EXPR, code1,
2227 swap_tree_comparison (code2),
2228 truth_type, op1a, op1b);
2229 if (t)
2230 return t;
2231 }
2232
2233 /* If both comparisons are of the same value against constants, we might
2234 be able to merge them. */
2235 if (operand_equal_p (op1a, op2a, 0)
2236 && TREE_CODE (op1b) == INTEGER_CST
2237 && TREE_CODE (op2b) == INTEGER_CST)
2238 {
2239 int cmp = tree_int_cst_compare (op1b, op2b);
2240
2241 /* If we have (op1a != op1b), we should either be able to
2242 return that or TRUE, depending on whether the constant op1b
2243 also satisfies the other comparison against op2b. */
2244 if (code1 == NE_EXPR)
2245 {
2246 bool done = true;
2247 bool val;
2248 switch (code2)
2249 {
2250 case EQ_EXPR: val = (cmp == 0); break;
2251 case NE_EXPR: val = (cmp != 0); break;
2252 case LT_EXPR: val = (cmp < 0); break;
2253 case GT_EXPR: val = (cmp > 0); break;
2254 case LE_EXPR: val = (cmp <= 0); break;
2255 case GE_EXPR: val = (cmp >= 0); break;
2256 default: done = false;
2257 }
2258 if (done)
2259 {
2260 if (val)
2261 return boolean_true_node;
2262 else
2263 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2264 }
2265 }
2266 /* Likewise if the second comparison is a != comparison. */
2267 else if (code2 == NE_EXPR)
2268 {
2269 bool done = true;
2270 bool val;
2271 switch (code1)
2272 {
2273 case EQ_EXPR: val = (cmp == 0); break;
2274 case NE_EXPR: val = (cmp != 0); break;
2275 case LT_EXPR: val = (cmp > 0); break;
2276 case GT_EXPR: val = (cmp < 0); break;
2277 case LE_EXPR: val = (cmp >= 0); break;
2278 case GE_EXPR: val = (cmp <= 0); break;
2279 default: done = false;
2280 }
2281 if (done)
2282 {
2283 if (val)
2284 return boolean_true_node;
2285 else
2286 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2287 }
2288 }
2289
2290 /* See if an equality test is redundant with the other comparison. */
2291 else if (code1 == EQ_EXPR)
2292 {
2293 bool val;
2294 switch (code2)
2295 {
2296 case EQ_EXPR: val = (cmp == 0); break;
2297 case NE_EXPR: val = (cmp != 0); break;
2298 case LT_EXPR: val = (cmp < 0); break;
2299 case GT_EXPR: val = (cmp > 0); break;
2300 case LE_EXPR: val = (cmp <= 0); break;
2301 case GE_EXPR: val = (cmp >= 0); break;
2302 default:
2303 val = false;
2304 }
2305 if (val)
2306 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2307 }
2308 else if (code2 == EQ_EXPR)
2309 {
2310 bool val;
2311 switch (code1)
2312 {
2313 case EQ_EXPR: val = (cmp == 0); break;
2314 case NE_EXPR: val = (cmp != 0); break;
2315 case LT_EXPR: val = (cmp > 0); break;
2316 case GT_EXPR: val = (cmp < 0); break;
2317 case LE_EXPR: val = (cmp >= 0); break;
2318 case GE_EXPR: val = (cmp <= 0); break;
2319 default:
2320 val = false;
2321 }
2322 if (val)
2323 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2324 }
2325
2326 /* Chose the less restrictive of two < or <= comparisons. */
2327 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2328 && (code2 == LT_EXPR || code2 == LE_EXPR))
2329 {
2330 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2331 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2332 else
2333 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2334 }
2335
2336 /* Likewise chose the less restrictive of two > or >= comparisons. */
2337 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2338 && (code2 == GT_EXPR || code2 == GE_EXPR))
2339 {
2340 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2341 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2342 else
2343 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2344 }
2345
2346 /* Check for singleton ranges. */
2347 else if (cmp == 0
2348 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2349 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2350 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2351
2352 /* Check for less/greater pairs that don't restrict the range at all. */
2353 else if (cmp >= 0
2354 && (code1 == LT_EXPR || code1 == LE_EXPR)
2355 && (code2 == GT_EXPR || code2 == GE_EXPR))
2356 return boolean_true_node;
2357 else if (cmp <= 0
2358 && (code1 == GT_EXPR || code1 == GE_EXPR)
2359 && (code2 == LT_EXPR || code2 == LE_EXPR))
2360 return boolean_true_node;
2361 }
2362
2363 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2364 NAME's definition is a truth value. See if there are any simplifications
2365 that can be done against the NAME's definition. */
2366 if (TREE_CODE (op1a) == SSA_NAME
2367 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2368 && (integer_zerop (op1b) || integer_onep (op1b)))
2369 {
2370 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2371 || (code1 == NE_EXPR && integer_onep (op1b)));
2372 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2373 switch (gimple_code (stmt))
2374 {
2375 case GIMPLE_ASSIGN:
2376 /* Try to simplify by copy-propagating the definition. */
2377 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2378
2379 case GIMPLE_PHI:
2380 /* If every argument to the PHI produces the same result when
2381 ORed with the second comparison, we win.
2382 Do not do this unless the type is bool since we need a bool
2383 result here anyway. */
2384 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2385 {
2386 tree result = NULL_TREE;
2387 unsigned i;
2388 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2389 {
2390 tree arg = gimple_phi_arg_def (stmt, i);
2391
2392 /* If this PHI has itself as an argument, ignore it.
2393 If all the other args produce the same result,
2394 we're still OK. */
2395 if (arg == gimple_phi_result (stmt))
2396 continue;
2397 else if (TREE_CODE (arg) == INTEGER_CST)
2398 {
2399 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2400 {
2401 if (!result)
2402 result = boolean_true_node;
2403 else if (!integer_onep (result))
2404 return NULL_TREE;
2405 }
2406 else if (!result)
2407 result = fold_build2 (code2, boolean_type_node,
2408 op2a, op2b);
2409 else if (!same_bool_comparison_p (result,
2410 code2, op2a, op2b))
2411 return NULL_TREE;
2412 }
2413 else if (TREE_CODE (arg) == SSA_NAME
2414 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2415 {
2416 tree temp;
2417 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2418 /* In simple cases we can look through PHI nodes,
2419 but we have to be careful with loops.
2420 See PR49073. */
2421 if (! dom_info_available_p (CDI_DOMINATORS)
2422 || gimple_bb (def_stmt) == gimple_bb (stmt)
2423 || dominated_by_p (CDI_DOMINATORS,
2424 gimple_bb (def_stmt),
2425 gimple_bb (stmt)))
2426 return NULL_TREE;
2427 temp = or_var_with_comparison (arg, invert, code2,
2428 op2a, op2b);
2429 if (!temp)
2430 return NULL_TREE;
2431 else if (!result)
2432 result = temp;
2433 else if (!same_bool_result_p (result, temp))
2434 return NULL_TREE;
2435 }
2436 else
2437 return NULL_TREE;
2438 }
2439 return result;
2440 }
2441
2442 default:
2443 break;
2444 }
2445 }
2446 return NULL_TREE;
2447 }
2448
2449 /* Try to simplify the OR of two comparisons, specified by
2450 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2451 If this can be simplified to a single expression (without requiring
2452 introducing more SSA variables to hold intermediate values),
2453 return the resulting tree. Otherwise return NULL_TREE.
2454 If the result expression is non-null, it has boolean type. */
2455
2456 tree
2457 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2458 enum tree_code code2, tree op2a, tree op2b)
2459 {
2460 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2461 if (t)
2462 return t;
2463 else
2464 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2465 }
2466
2467
2468 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2469
2470 Either NULL_TREE, a simplified but non-constant or a constant
2471 is returned.
2472
2473 ??? This should go into a gimple-fold-inline.h file to be eventually
2474 privatized with the single valueize function used in the various TUs
2475 to avoid the indirect function call overhead. */
2476
2477 tree
2478 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2479 {
2480 location_t loc = gimple_location (stmt);
2481 switch (gimple_code (stmt))
2482 {
2483 case GIMPLE_ASSIGN:
2484 {
2485 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2486
2487 switch (get_gimple_rhs_class (subcode))
2488 {
2489 case GIMPLE_SINGLE_RHS:
2490 {
2491 tree rhs = gimple_assign_rhs1 (stmt);
2492 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2493
2494 if (TREE_CODE (rhs) == SSA_NAME)
2495 {
2496 /* If the RHS is an SSA_NAME, return its known constant value,
2497 if any. */
2498 return (*valueize) (rhs);
2499 }
2500 /* Handle propagating invariant addresses into address
2501 operations. */
2502 else if (TREE_CODE (rhs) == ADDR_EXPR
2503 && !is_gimple_min_invariant (rhs))
2504 {
2505 HOST_WIDE_INT offset = 0;
2506 tree base;
2507 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2508 &offset,
2509 valueize);
2510 if (base
2511 && (CONSTANT_CLASS_P (base)
2512 || decl_address_invariant_p (base)))
2513 return build_invariant_address (TREE_TYPE (rhs),
2514 base, offset);
2515 }
2516 else if (TREE_CODE (rhs) == CONSTRUCTOR
2517 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2518 && (CONSTRUCTOR_NELTS (rhs)
2519 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2520 {
2521 unsigned i;
2522 tree val, *vec;
2523
2524 vec = XALLOCAVEC (tree,
2525 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2526 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2527 {
2528 val = (*valueize) (val);
2529 if (TREE_CODE (val) == INTEGER_CST
2530 || TREE_CODE (val) == REAL_CST
2531 || TREE_CODE (val) == FIXED_CST)
2532 vec[i] = val;
2533 else
2534 return NULL_TREE;
2535 }
2536
2537 return build_vector (TREE_TYPE (rhs), vec);
2538 }
2539 if (subcode == OBJ_TYPE_REF)
2540 {
2541 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2542 /* If callee is constant, we can fold away the wrapper. */
2543 if (is_gimple_min_invariant (val))
2544 return val;
2545 }
2546
2547 if (kind == tcc_reference)
2548 {
2549 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2550 || TREE_CODE (rhs) == REALPART_EXPR
2551 || TREE_CODE (rhs) == IMAGPART_EXPR)
2552 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2553 {
2554 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2555 return fold_unary_loc (EXPR_LOCATION (rhs),
2556 TREE_CODE (rhs),
2557 TREE_TYPE (rhs), val);
2558 }
2559 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2560 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2561 {
2562 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2563 return fold_ternary_loc (EXPR_LOCATION (rhs),
2564 TREE_CODE (rhs),
2565 TREE_TYPE (rhs), val,
2566 TREE_OPERAND (rhs, 1),
2567 TREE_OPERAND (rhs, 2));
2568 }
2569 else if (TREE_CODE (rhs) == MEM_REF
2570 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2571 {
2572 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2573 if (TREE_CODE (val) == ADDR_EXPR
2574 && is_gimple_min_invariant (val))
2575 {
2576 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2577 unshare_expr (val),
2578 TREE_OPERAND (rhs, 1));
2579 if (tem)
2580 rhs = tem;
2581 }
2582 }
2583 return fold_const_aggregate_ref_1 (rhs, valueize);
2584 }
2585 else if (kind == tcc_declaration)
2586 return get_symbol_constant_value (rhs);
2587 return rhs;
2588 }
2589
2590 case GIMPLE_UNARY_RHS:
2591 {
2592 /* Handle unary operators that can appear in GIMPLE form.
2593 Note that we know the single operand must be a constant,
2594 so this should almost always return a simplified RHS. */
2595 tree lhs = gimple_assign_lhs (stmt);
2596 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2597
2598 /* Conversions are useless for CCP purposes if they are
2599 value-preserving. Thus the restrictions that
2600 useless_type_conversion_p places for restrict qualification
2601 of pointer types should not apply here.
2602 Substitution later will only substitute to allowed places. */
2603 if (CONVERT_EXPR_CODE_P (subcode)
2604 && POINTER_TYPE_P (TREE_TYPE (lhs))
2605 && POINTER_TYPE_P (TREE_TYPE (op0))
2606 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2607 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2608 && TYPE_MODE (TREE_TYPE (lhs))
2609 == TYPE_MODE (TREE_TYPE (op0)))
2610 return op0;
2611
2612 return
2613 fold_unary_ignore_overflow_loc (loc, subcode,
2614 gimple_expr_type (stmt), op0);
2615 }
2616
2617 case GIMPLE_BINARY_RHS:
2618 {
2619 /* Handle binary operators that can appear in GIMPLE form. */
2620 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2621 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2622
2623 /* Translate &x + CST into an invariant form suitable for
2624 further propagation. */
2625 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2626 && TREE_CODE (op0) == ADDR_EXPR
2627 && TREE_CODE (op1) == INTEGER_CST)
2628 {
2629 tree off = fold_convert (ptr_type_node, op1);
2630 return build_fold_addr_expr_loc
2631 (loc,
2632 fold_build2 (MEM_REF,
2633 TREE_TYPE (TREE_TYPE (op0)),
2634 unshare_expr (op0), off));
2635 }
2636
2637 return fold_binary_loc (loc, subcode,
2638 gimple_expr_type (stmt), op0, op1);
2639 }
2640
2641 case GIMPLE_TERNARY_RHS:
2642 {
2643 /* Handle ternary operators that can appear in GIMPLE form. */
2644 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2645 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2646 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2647
2648 /* Fold embedded expressions in ternary codes. */
2649 if ((subcode == COND_EXPR
2650 || subcode == VEC_COND_EXPR)
2651 && COMPARISON_CLASS_P (op0))
2652 {
2653 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2654 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2655 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2656 TREE_TYPE (op0), op00, op01);
2657 if (tem)
2658 op0 = tem;
2659 }
2660
2661 return fold_ternary_loc (loc, subcode,
2662 gimple_expr_type (stmt), op0, op1, op2);
2663 }
2664
2665 default:
2666 gcc_unreachable ();
2667 }
2668 }
2669
2670 case GIMPLE_CALL:
2671 {
2672 tree fn;
2673
2674 if (gimple_call_internal_p (stmt))
2675 {
2676 enum tree_code subcode = ERROR_MARK;
2677 switch (gimple_call_internal_fn (stmt))
2678 {
2679 case IFN_UBSAN_CHECK_ADD:
2680 subcode = PLUS_EXPR;
2681 break;
2682 case IFN_UBSAN_CHECK_SUB:
2683 subcode = MINUS_EXPR;
2684 break;
2685 case IFN_UBSAN_CHECK_MUL:
2686 subcode = MULT_EXPR;
2687 break;
2688 default:
2689 return NULL_TREE;
2690 }
2691 tree op0 = (*valueize) (gimple_call_arg (stmt, 0));
2692 tree op1 = (*valueize) (gimple_call_arg (stmt, 1));
2693
2694 if (TREE_CODE (op0) != INTEGER_CST
2695 || TREE_CODE (op1) != INTEGER_CST)
2696 return NULL_TREE;
2697 tree res = fold_binary_loc (loc, subcode,
2698 TREE_TYPE (gimple_call_arg (stmt, 0)),
2699 op0, op1);
2700 if (res
2701 && TREE_CODE (res) == INTEGER_CST
2702 && !TREE_OVERFLOW (res))
2703 return res;
2704 return NULL_TREE;
2705 }
2706
2707 fn = (*valueize) (gimple_call_fn (stmt));
2708 if (TREE_CODE (fn) == ADDR_EXPR
2709 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2710 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2711 && gimple_builtin_call_types_compatible_p (stmt,
2712 TREE_OPERAND (fn, 0)))
2713 {
2714 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2715 tree call, retval;
2716 unsigned i;
2717 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2718 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2719 call = build_call_array_loc (loc,
2720 gimple_call_return_type (stmt),
2721 fn, gimple_call_num_args (stmt), args);
2722 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2723 if (retval)
2724 {
2725 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2726 STRIP_NOPS (retval);
2727 retval = fold_convert (gimple_call_return_type (stmt), retval);
2728 }
2729 return retval;
2730 }
2731 return NULL_TREE;
2732 }
2733
2734 default:
2735 return NULL_TREE;
2736 }
2737 }
2738
2739 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2740 Returns NULL_TREE if folding to a constant is not possible, otherwise
2741 returns a constant according to is_gimple_min_invariant. */
2742
2743 tree
2744 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2745 {
2746 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2747 if (res && is_gimple_min_invariant (res))
2748 return res;
2749 return NULL_TREE;
2750 }
2751
2752
2753 /* The following set of functions are supposed to fold references using
2754 their constant initializers. */
2755
2756 static tree fold_ctor_reference (tree type, tree ctor,
2757 unsigned HOST_WIDE_INT offset,
2758 unsigned HOST_WIDE_INT size, tree);
2759
2760 /* See if we can find constructor defining value of BASE.
2761 When we know the consructor with constant offset (such as
2762 base is array[40] and we do know constructor of array), then
2763 BIT_OFFSET is adjusted accordingly.
2764
2765 As a special case, return error_mark_node when constructor
2766 is not explicitly available, but it is known to be zero
2767 such as 'static const int a;'. */
2768 static tree
2769 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2770 tree (*valueize)(tree))
2771 {
2772 HOST_WIDE_INT bit_offset2, size, max_size;
2773 if (TREE_CODE (base) == MEM_REF)
2774 {
2775 if (!integer_zerop (TREE_OPERAND (base, 1)))
2776 {
2777 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2778 return NULL_TREE;
2779 *bit_offset += (mem_ref_offset (base).low
2780 * BITS_PER_UNIT);
2781 }
2782
2783 if (valueize
2784 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2785 base = valueize (TREE_OPERAND (base, 0));
2786 if (!base || TREE_CODE (base) != ADDR_EXPR)
2787 return NULL_TREE;
2788 base = TREE_OPERAND (base, 0);
2789 }
2790
2791 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2792 DECL_INITIAL. If BASE is a nested reference into another
2793 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2794 the inner reference. */
2795 switch (TREE_CODE (base))
2796 {
2797 case VAR_DECL:
2798 case CONST_DECL:
2799 {
2800 tree init = ctor_for_folding (base);
2801
2802 /* Our semantic is exact opposite of ctor_for_folding;
2803 NULL means unknown, while error_mark_node is 0. */
2804 if (init == error_mark_node)
2805 return NULL_TREE;
2806 if (!init)
2807 return error_mark_node;
2808 return init;
2809 }
2810
2811 case ARRAY_REF:
2812 case COMPONENT_REF:
2813 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2814 if (max_size == -1 || size != max_size)
2815 return NULL_TREE;
2816 *bit_offset += bit_offset2;
2817 return get_base_constructor (base, bit_offset, valueize);
2818
2819 case STRING_CST:
2820 case CONSTRUCTOR:
2821 return base;
2822
2823 default:
2824 return NULL_TREE;
2825 }
2826 }
2827
2828 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2829 to the memory at bit OFFSET.
2830
2831 We do only simple job of folding byte accesses. */
2832
2833 static tree
2834 fold_string_cst_ctor_reference (tree type, tree ctor,
2835 unsigned HOST_WIDE_INT offset,
2836 unsigned HOST_WIDE_INT size)
2837 {
2838 if (INTEGRAL_TYPE_P (type)
2839 && (TYPE_MODE (type)
2840 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2841 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2842 == MODE_INT)
2843 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2844 && size == BITS_PER_UNIT
2845 && !(offset % BITS_PER_UNIT))
2846 {
2847 offset /= BITS_PER_UNIT;
2848 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2849 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2850 [offset]));
2851 /* Folding
2852 const char a[20]="hello";
2853 return a[10];
2854
2855 might lead to offset greater than string length. In this case we
2856 know value is either initialized to 0 or out of bounds. Return 0
2857 in both cases. */
2858 return build_zero_cst (type);
2859 }
2860 return NULL_TREE;
2861 }
2862
2863 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2864 SIZE to the memory at bit OFFSET. */
2865
2866 static tree
2867 fold_array_ctor_reference (tree type, tree ctor,
2868 unsigned HOST_WIDE_INT offset,
2869 unsigned HOST_WIDE_INT size,
2870 tree from_decl)
2871 {
2872 unsigned HOST_WIDE_INT cnt;
2873 tree cfield, cval;
2874 double_int low_bound, elt_size;
2875 double_int index, max_index;
2876 double_int access_index;
2877 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2878 HOST_WIDE_INT inner_offset;
2879
2880 /* Compute low bound and elt size. */
2881 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2882 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2883 if (domain_type && TYPE_MIN_VALUE (domain_type))
2884 {
2885 /* Static constructors for variably sized objects makes no sense. */
2886 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2887 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2888 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2889 }
2890 else
2891 low_bound = double_int_zero;
2892 /* Static constructors for variably sized objects makes no sense. */
2893 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2894 == INTEGER_CST);
2895 elt_size =
2896 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2897
2898
2899 /* We can handle only constantly sized accesses that are known to not
2900 be larger than size of array element. */
2901 if (!TYPE_SIZE_UNIT (type)
2902 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2903 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))
2904 || elt_size.is_zero ())
2905 return NULL_TREE;
2906
2907 /* Compute the array index we look for. */
2908 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2909 .udiv (elt_size, TRUNC_DIV_EXPR);
2910 access_index += low_bound;
2911 if (index_type)
2912 access_index = access_index.ext (TYPE_PRECISION (index_type),
2913 TYPE_UNSIGNED (index_type));
2914
2915 /* And offset within the access. */
2916 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2917
2918 /* See if the array field is large enough to span whole access. We do not
2919 care to fold accesses spanning multiple array indexes. */
2920 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2921 return NULL_TREE;
2922
2923 index = low_bound - double_int_one;
2924 if (index_type)
2925 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2926
2927 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2928 {
2929 /* Array constructor might explicitely set index, or specify range
2930 or leave index NULL meaning that it is next index after previous
2931 one. */
2932 if (cfield)
2933 {
2934 if (TREE_CODE (cfield) == INTEGER_CST)
2935 max_index = index = tree_to_double_int (cfield);
2936 else
2937 {
2938 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2939 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2940 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2941 }
2942 }
2943 else
2944 {
2945 index += double_int_one;
2946 if (index_type)
2947 index = index.ext (TYPE_PRECISION (index_type),
2948 TYPE_UNSIGNED (index_type));
2949 max_index = index;
2950 }
2951
2952 /* Do we have match? */
2953 if (access_index.cmp (index, 1) >= 0
2954 && access_index.cmp (max_index, 1) <= 0)
2955 return fold_ctor_reference (type, cval, inner_offset, size,
2956 from_decl);
2957 }
2958 /* When memory is not explicitely mentioned in constructor,
2959 it is 0 (or out of range). */
2960 return build_zero_cst (type);
2961 }
2962
2963 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2964 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2965
2966 static tree
2967 fold_nonarray_ctor_reference (tree type, tree ctor,
2968 unsigned HOST_WIDE_INT offset,
2969 unsigned HOST_WIDE_INT size,
2970 tree from_decl)
2971 {
2972 unsigned HOST_WIDE_INT cnt;
2973 tree cfield, cval;
2974
2975 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2976 cval)
2977 {
2978 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2979 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2980 tree field_size = DECL_SIZE (cfield);
2981 double_int bitoffset;
2982 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2983 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2984 double_int bitoffset_end, access_end;
2985
2986 /* Variable sized objects in static constructors makes no sense,
2987 but field_size can be NULL for flexible array members. */
2988 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2989 && TREE_CODE (byte_offset) == INTEGER_CST
2990 && (field_size != NULL_TREE
2991 ? TREE_CODE (field_size) == INTEGER_CST
2992 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2993
2994 /* Compute bit offset of the field. */
2995 bitoffset = tree_to_double_int (field_offset)
2996 + byte_offset_cst * bits_per_unit_cst;
2997 /* Compute bit offset where the field ends. */
2998 if (field_size != NULL_TREE)
2999 bitoffset_end = bitoffset + tree_to_double_int (field_size);
3000 else
3001 bitoffset_end = double_int_zero;
3002
3003 access_end = double_int::from_uhwi (offset)
3004 + double_int::from_uhwi (size);
3005
3006 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3007 [BITOFFSET, BITOFFSET_END)? */
3008 if (access_end.cmp (bitoffset, 0) > 0
3009 && (field_size == NULL_TREE
3010 || double_int::from_uhwi (offset).slt (bitoffset_end)))
3011 {
3012 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
3013 /* We do have overlap. Now see if field is large enough to
3014 cover the access. Give up for accesses spanning multiple
3015 fields. */
3016 if (access_end.cmp (bitoffset_end, 0) > 0)
3017 return NULL_TREE;
3018 if (double_int::from_uhwi (offset).slt (bitoffset))
3019 return NULL_TREE;
3020 return fold_ctor_reference (type, cval,
3021 inner_offset.to_uhwi (), size,
3022 from_decl);
3023 }
3024 }
3025 /* When memory is not explicitely mentioned in constructor, it is 0. */
3026 return build_zero_cst (type);
3027 }
3028
3029 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3030 to the memory at bit OFFSET. */
3031
3032 static tree
3033 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3034 unsigned HOST_WIDE_INT size, tree from_decl)
3035 {
3036 tree ret;
3037
3038 /* We found the field with exact match. */
3039 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3040 && !offset)
3041 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3042
3043 /* We are at the end of walk, see if we can view convert the
3044 result. */
3045 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3046 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3047 && operand_equal_p (TYPE_SIZE (type),
3048 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3049 {
3050 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3051 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3052 if (ret)
3053 STRIP_NOPS (ret);
3054 return ret;
3055 }
3056 if (TREE_CODE (ctor) == STRING_CST)
3057 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3058 if (TREE_CODE (ctor) == CONSTRUCTOR)
3059 {
3060
3061 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3062 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3063 return fold_array_ctor_reference (type, ctor, offset, size,
3064 from_decl);
3065 else
3066 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3067 from_decl);
3068 }
3069
3070 return NULL_TREE;
3071 }
3072
3073 /* Return the tree representing the element referenced by T if T is an
3074 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3075 names using VALUEIZE. Return NULL_TREE otherwise. */
3076
3077 tree
3078 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3079 {
3080 tree ctor, idx, base;
3081 HOST_WIDE_INT offset, size, max_size;
3082 tree tem;
3083
3084 if (TREE_THIS_VOLATILE (t))
3085 return NULL_TREE;
3086
3087 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3088 return get_symbol_constant_value (t);
3089
3090 tem = fold_read_from_constant_string (t);
3091 if (tem)
3092 return tem;
3093
3094 switch (TREE_CODE (t))
3095 {
3096 case ARRAY_REF:
3097 case ARRAY_RANGE_REF:
3098 /* Constant indexes are handled well by get_base_constructor.
3099 Only special case variable offsets.
3100 FIXME: This code can't handle nested references with variable indexes
3101 (they will be handled only by iteration of ccp). Perhaps we can bring
3102 get_ref_base_and_extent here and make it use a valueize callback. */
3103 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3104 && valueize
3105 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3106 && TREE_CODE (idx) == INTEGER_CST)
3107 {
3108 tree low_bound, unit_size;
3109 double_int doffset;
3110
3111 /* If the resulting bit-offset is constant, track it. */
3112 if ((low_bound = array_ref_low_bound (t),
3113 TREE_CODE (low_bound) == INTEGER_CST)
3114 && (unit_size = array_ref_element_size (t),
3115 tree_fits_uhwi_p (unit_size))
3116 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3117 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3118 doffset.fits_shwi ()))
3119 {
3120 offset = doffset.to_shwi ();
3121 offset *= tree_to_uhwi (unit_size);
3122 offset *= BITS_PER_UNIT;
3123
3124 base = TREE_OPERAND (t, 0);
3125 ctor = get_base_constructor (base, &offset, valueize);
3126 /* Empty constructor. Always fold to 0. */
3127 if (ctor == error_mark_node)
3128 return build_zero_cst (TREE_TYPE (t));
3129 /* Out of bound array access. Value is undefined,
3130 but don't fold. */
3131 if (offset < 0)
3132 return NULL_TREE;
3133 /* We can not determine ctor. */
3134 if (!ctor)
3135 return NULL_TREE;
3136 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3137 tree_to_uhwi (unit_size)
3138 * BITS_PER_UNIT,
3139 base);
3140 }
3141 }
3142 /* Fallthru. */
3143
3144 case COMPONENT_REF:
3145 case BIT_FIELD_REF:
3146 case TARGET_MEM_REF:
3147 case MEM_REF:
3148 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3149 ctor = get_base_constructor (base, &offset, valueize);
3150
3151 /* Empty constructor. Always fold to 0. */
3152 if (ctor == error_mark_node)
3153 return build_zero_cst (TREE_TYPE (t));
3154 /* We do not know precise address. */
3155 if (max_size == -1 || max_size != size)
3156 return NULL_TREE;
3157 /* We can not determine ctor. */
3158 if (!ctor)
3159 return NULL_TREE;
3160
3161 /* Out of bound array access. Value is undefined, but don't fold. */
3162 if (offset < 0)
3163 return NULL_TREE;
3164
3165 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3166 base);
3167
3168 case REALPART_EXPR:
3169 case IMAGPART_EXPR:
3170 {
3171 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3172 if (c && TREE_CODE (c) == COMPLEX_CST)
3173 return fold_build1_loc (EXPR_LOCATION (t),
3174 TREE_CODE (t), TREE_TYPE (t), c);
3175 break;
3176 }
3177
3178 default:
3179 break;
3180 }
3181
3182 return NULL_TREE;
3183 }
3184
3185 tree
3186 fold_const_aggregate_ref (tree t)
3187 {
3188 return fold_const_aggregate_ref_1 (t, NULL);
3189 }
3190
3191 /* Lookup virtual method with index TOKEN in a virtual table V
3192 at OFFSET.
3193 Set CAN_REFER if non-NULL to false if method
3194 is not referable or if the virtual table is ill-formed (such as rewriten
3195 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3196
3197 tree
3198 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3199 tree v,
3200 unsigned HOST_WIDE_INT offset,
3201 bool *can_refer)
3202 {
3203 tree vtable = v, init, fn;
3204 unsigned HOST_WIDE_INT size;
3205 unsigned HOST_WIDE_INT elt_size, access_index;
3206 tree domain_type;
3207
3208 if (can_refer)
3209 *can_refer = true;
3210
3211 /* First of all double check we have virtual table. */
3212 if (TREE_CODE (v) != VAR_DECL
3213 || !DECL_VIRTUAL_P (v))
3214 {
3215 gcc_assert (in_lto_p);
3216 /* Pass down that we lost track of the target. */
3217 if (can_refer)
3218 *can_refer = false;
3219 return NULL_TREE;
3220 }
3221
3222 init = ctor_for_folding (v);
3223
3224 /* The virtual tables should always be born with constructors
3225 and we always should assume that they are avaialble for
3226 folding. At the moment we do not stream them in all cases,
3227 but it should never happen that ctor seem unreachable. */
3228 gcc_assert (init);
3229 if (init == error_mark_node)
3230 {
3231 gcc_assert (in_lto_p);
3232 /* Pass down that we lost track of the target. */
3233 if (can_refer)
3234 *can_refer = false;
3235 return NULL_TREE;
3236 }
3237 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3238 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3239 offset *= BITS_PER_UNIT;
3240 offset += token * size;
3241
3242 /* Lookup the value in the constructor that is assumed to be array.
3243 This is equivalent to
3244 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3245 offset, size, NULL);
3246 but in a constant time. We expect that frontend produced a simple
3247 array without indexed initializers. */
3248
3249 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3250 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3251 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3252 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3253
3254 access_index = offset / BITS_PER_UNIT / elt_size;
3255 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3256
3257 /* This code makes an assumption that there are no
3258 indexed fileds produced by C++ FE, so we can directly index the array. */
3259 if (access_index < CONSTRUCTOR_NELTS (init))
3260 {
3261 fn = CONSTRUCTOR_ELT (init, access_index)->value;
3262 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3263 STRIP_NOPS (fn);
3264 }
3265 else
3266 fn = NULL;
3267
3268 /* For type inconsistent program we may end up looking up virtual method
3269 in virtual table that does not contain TOKEN entries. We may overrun
3270 the virtual table and pick up a constant or RTTI info pointer.
3271 In any case the call is undefined. */
3272 if (!fn
3273 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3274 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3275 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3276 else
3277 {
3278 fn = TREE_OPERAND (fn, 0);
3279
3280 /* When cgraph node is missing and function is not public, we cannot
3281 devirtualize. This can happen in WHOPR when the actual method
3282 ends up in other partition, because we found devirtualization
3283 possibility too late. */
3284 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3285 {
3286 if (can_refer)
3287 {
3288 *can_refer = false;
3289 return fn;
3290 }
3291 return NULL_TREE;
3292 }
3293 }
3294
3295 /* Make sure we create a cgraph node for functions we'll reference.
3296 They can be non-existent if the reference comes from an entry
3297 of an external vtable for example. */
3298 cgraph_get_create_node (fn);
3299
3300 return fn;
3301 }
3302
3303 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3304 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3305 KNOWN_BINFO carries the binfo describing the true type of
3306 OBJ_TYPE_REF_OBJECT(REF).
3307 Set CAN_REFER if non-NULL to false if method
3308 is not referable or if the virtual table is ill-formed (such as rewriten
3309 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3310
3311 tree
3312 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3313 bool *can_refer)
3314 {
3315 unsigned HOST_WIDE_INT offset;
3316 tree v;
3317
3318 v = BINFO_VTABLE (known_binfo);
3319 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3320 if (!v)
3321 return NULL_TREE;
3322
3323 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3324 {
3325 if (can_refer)
3326 *can_refer = false;
3327 return NULL_TREE;
3328 }
3329 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3330 }
3331
3332 /* Return true iff VAL is a gimple expression that is known to be
3333 non-negative. Restricted to floating-point inputs. */
3334
3335 bool
3336 gimple_val_nonnegative_real_p (tree val)
3337 {
3338 gimple def_stmt;
3339
3340 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3341
3342 /* Use existing logic for non-gimple trees. */
3343 if (tree_expr_nonnegative_p (val))
3344 return true;
3345
3346 if (TREE_CODE (val) != SSA_NAME)
3347 return false;
3348
3349 /* Currently we look only at the immediately defining statement
3350 to make this determination, since recursion on defining
3351 statements of operands can lead to quadratic behavior in the
3352 worst case. This is expected to catch almost all occurrences
3353 in practice. It would be possible to implement limited-depth
3354 recursion if important cases are lost. Alternatively, passes
3355 that need this information (such as the pow/powi lowering code
3356 in the cse_sincos pass) could be revised to provide it through
3357 dataflow propagation. */
3358
3359 def_stmt = SSA_NAME_DEF_STMT (val);
3360
3361 if (is_gimple_assign (def_stmt))
3362 {
3363 tree op0, op1;
3364
3365 /* See fold-const.c:tree_expr_nonnegative_p for additional
3366 cases that could be handled with recursion. */
3367
3368 switch (gimple_assign_rhs_code (def_stmt))
3369 {
3370 case ABS_EXPR:
3371 /* Always true for floating-point operands. */
3372 return true;
3373
3374 case MULT_EXPR:
3375 /* True if the two operands are identical (since we are
3376 restricted to floating-point inputs). */
3377 op0 = gimple_assign_rhs1 (def_stmt);
3378 op1 = gimple_assign_rhs2 (def_stmt);
3379
3380 if (op0 == op1
3381 || operand_equal_p (op0, op1, 0))
3382 return true;
3383
3384 default:
3385 return false;
3386 }
3387 }
3388 else if (is_gimple_call (def_stmt))
3389 {
3390 tree fndecl = gimple_call_fndecl (def_stmt);
3391 if (fndecl
3392 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3393 {
3394 tree arg1;
3395
3396 switch (DECL_FUNCTION_CODE (fndecl))
3397 {
3398 CASE_FLT_FN (BUILT_IN_ACOS):
3399 CASE_FLT_FN (BUILT_IN_ACOSH):
3400 CASE_FLT_FN (BUILT_IN_CABS):
3401 CASE_FLT_FN (BUILT_IN_COSH):
3402 CASE_FLT_FN (BUILT_IN_ERFC):
3403 CASE_FLT_FN (BUILT_IN_EXP):
3404 CASE_FLT_FN (BUILT_IN_EXP10):
3405 CASE_FLT_FN (BUILT_IN_EXP2):
3406 CASE_FLT_FN (BUILT_IN_FABS):
3407 CASE_FLT_FN (BUILT_IN_FDIM):
3408 CASE_FLT_FN (BUILT_IN_HYPOT):
3409 CASE_FLT_FN (BUILT_IN_POW10):
3410 return true;
3411
3412 CASE_FLT_FN (BUILT_IN_SQRT):
3413 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3414 nonnegative inputs. */
3415 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3416 return true;
3417
3418 break;
3419
3420 CASE_FLT_FN (BUILT_IN_POWI):
3421 /* True if the second argument is an even integer. */
3422 arg1 = gimple_call_arg (def_stmt, 1);
3423
3424 if (TREE_CODE (arg1) == INTEGER_CST
3425 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3426 return true;
3427
3428 break;
3429
3430 CASE_FLT_FN (BUILT_IN_POW):
3431 /* True if the second argument is an even integer-valued
3432 real. */
3433 arg1 = gimple_call_arg (def_stmt, 1);
3434
3435 if (TREE_CODE (arg1) == REAL_CST)
3436 {
3437 REAL_VALUE_TYPE c;
3438 HOST_WIDE_INT n;
3439
3440 c = TREE_REAL_CST (arg1);
3441 n = real_to_integer (&c);
3442
3443 if ((n & 1) == 0)
3444 {
3445 REAL_VALUE_TYPE cint;
3446 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3447 if (real_identical (&c, &cint))
3448 return true;
3449 }
3450 }
3451
3452 break;
3453
3454 default:
3455 return false;
3456 }
3457 }
3458 }
3459
3460 return false;
3461 }
3462
3463 /* Given a pointer value OP0, return a simplified version of an
3464 indirection through OP0, or NULL_TREE if no simplification is
3465 possible. Note that the resulting type may be different from
3466 the type pointed to in the sense that it is still compatible
3467 from the langhooks point of view. */
3468
3469 tree
3470 gimple_fold_indirect_ref (tree t)
3471 {
3472 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3473 tree sub = t;
3474 tree subtype;
3475
3476 STRIP_NOPS (sub);
3477 subtype = TREE_TYPE (sub);
3478 if (!POINTER_TYPE_P (subtype))
3479 return NULL_TREE;
3480
3481 if (TREE_CODE (sub) == ADDR_EXPR)
3482 {
3483 tree op = TREE_OPERAND (sub, 0);
3484 tree optype = TREE_TYPE (op);
3485 /* *&p => p */
3486 if (useless_type_conversion_p (type, optype))
3487 return op;
3488
3489 /* *(foo *)&fooarray => fooarray[0] */
3490 if (TREE_CODE (optype) == ARRAY_TYPE
3491 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3492 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3493 {
3494 tree type_domain = TYPE_DOMAIN (optype);
3495 tree min_val = size_zero_node;
3496 if (type_domain && TYPE_MIN_VALUE (type_domain))
3497 min_val = TYPE_MIN_VALUE (type_domain);
3498 if (TREE_CODE (min_val) == INTEGER_CST)
3499 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3500 }
3501 /* *(foo *)&complexfoo => __real__ complexfoo */
3502 else if (TREE_CODE (optype) == COMPLEX_TYPE
3503 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3504 return fold_build1 (REALPART_EXPR, type, op);
3505 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3506 else if (TREE_CODE (optype) == VECTOR_TYPE
3507 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3508 {
3509 tree part_width = TYPE_SIZE (type);
3510 tree index = bitsize_int (0);
3511 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3512 }
3513 }
3514
3515 /* *(p + CST) -> ... */
3516 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3517 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3518 {
3519 tree addr = TREE_OPERAND (sub, 0);
3520 tree off = TREE_OPERAND (sub, 1);
3521 tree addrtype;
3522
3523 STRIP_NOPS (addr);
3524 addrtype = TREE_TYPE (addr);
3525
3526 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3527 if (TREE_CODE (addr) == ADDR_EXPR
3528 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3529 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3530 && tree_fits_uhwi_p (off))
3531 {
3532 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3533 tree part_width = TYPE_SIZE (type);
3534 unsigned HOST_WIDE_INT part_widthi
3535 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3536 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3537 tree index = bitsize_int (indexi);
3538 if (offset / part_widthi
3539 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3540 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3541 part_width, index);
3542 }
3543
3544 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3545 if (TREE_CODE (addr) == ADDR_EXPR
3546 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3547 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3548 {
3549 tree size = TYPE_SIZE_UNIT (type);
3550 if (tree_int_cst_equal (size, off))
3551 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3552 }
3553
3554 /* *(p + CST) -> MEM_REF <p, CST>. */
3555 if (TREE_CODE (addr) != ADDR_EXPR
3556 || DECL_P (TREE_OPERAND (addr, 0)))
3557 return fold_build2 (MEM_REF, type,
3558 addr,
3559 build_int_cst_wide (ptype,
3560 TREE_INT_CST_LOW (off),
3561 TREE_INT_CST_HIGH (off)));
3562 }
3563
3564 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3565 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3566 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3567 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3568 {
3569 tree type_domain;
3570 tree min_val = size_zero_node;
3571 tree osub = sub;
3572 sub = gimple_fold_indirect_ref (sub);
3573 if (! sub)
3574 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3575 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3576 if (type_domain && TYPE_MIN_VALUE (type_domain))
3577 min_val = TYPE_MIN_VALUE (type_domain);
3578 if (TREE_CODE (min_val) == INTEGER_CST)
3579 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3580 }
3581
3582 return NULL_TREE;
3583 }
3584
3585 /* Return true if CODE is an operation that when operating on signed
3586 integer types involves undefined behavior on overflow and the
3587 operation can be expressed with unsigned arithmetic. */
3588
3589 bool
3590 arith_code_with_undefined_signed_overflow (tree_code code)
3591 {
3592 switch (code)
3593 {
3594 case PLUS_EXPR:
3595 case MINUS_EXPR:
3596 case MULT_EXPR:
3597 case NEGATE_EXPR:
3598 case POINTER_PLUS_EXPR:
3599 return true;
3600 default:
3601 return false;
3602 }
3603 }
3604
3605 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3606 operation that can be transformed to unsigned arithmetic by converting
3607 its operand, carrying out the operation in the corresponding unsigned
3608 type and converting the result back to the original type.
3609
3610 Returns a sequence of statements that replace STMT and also contain
3611 a modified form of STMT itself. */
3612
3613 gimple_seq
3614 rewrite_to_defined_overflow (gimple stmt)
3615 {
3616 if (dump_file && (dump_flags & TDF_DETAILS))
3617 {
3618 fprintf (dump_file, "rewriting stmt with undefined signed "
3619 "overflow ");
3620 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3621 }
3622
3623 tree lhs = gimple_assign_lhs (stmt);
3624 tree type = unsigned_type_for (TREE_TYPE (lhs));
3625 gimple_seq stmts = NULL;
3626 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3627 {
3628 gimple_seq stmts2 = NULL;
3629 gimple_set_op (stmt, i,
3630 force_gimple_operand (fold_convert (type,
3631 gimple_op (stmt, i)),
3632 &stmts2, true, NULL_TREE));
3633 gimple_seq_add_seq (&stmts, stmts2);
3634 }
3635 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3636 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3637 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3638 gimple_seq_add_stmt (&stmts, stmt);
3639 gimple cvt = gimple_build_assign_with_ops
3640 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3641 gimple_seq_add_stmt (&stmts, cvt);
3642
3643 return stmts;
3644 }