contrib.texi: Adjust my name.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
59 reasons:
60
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
66 set.
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
71 declaring the body.
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
76 directly. */
77
78 static bool
79 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
80 {
81 varpool_node *vnode;
82 struct cgraph_node *node;
83 symtab_node *snode;
84
85 if (DECL_ABSTRACT (decl))
86 return false;
87
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91 return true;
92
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
95 {
96 snode = symtab_get_node (decl);
97 if (!snode)
98 return false;
99 node = dyn_cast <cgraph_node> (snode);
100 return !node || !node->global.inlined_to;
101 }
102
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
105 external var. */
106 if (!from_decl
107 || TREE_CODE (from_decl) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl)
109 || (flag_ltrans
110 && symtab_get_node (from_decl)->in_other_partition))
111 return true;
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl)
116 && DECL_EXTERNAL (decl)
117 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
118 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
119 return false;
120 /* When function is public, we always can introduce new reference.
121 Exception are the COMDAT functions where introducing a direct
122 reference imply need to include function body in the curren tunit. */
123 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
124 return true;
125 /* We are not at ltrans stage; so don't worry about WHOPR.
126 Also when still gimplifying all referred comdat functions will be
127 produced.
128
129 As observed in PR20991 for already optimized out comdat virtual functions
130 it may be tempting to not necessarily give up because the copy will be
131 output elsewhere when corresponding vtable is output.
132 This is however not possible - ABI specify that COMDATs are output in
133 units where they are used and when the other unit was compiled with LTO
134 it is possible that vtable was kept public while the function itself
135 was privatized. */
136 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
137 return true;
138
139 /* OK we are seeing either COMDAT or static variable. In this case we must
140 check that the definition is still around so we can refer it. */
141 if (TREE_CODE (decl) == FUNCTION_DECL)
142 {
143 node = cgraph_get_node (decl);
144 /* Check that we still have function body and that we didn't took
145 the decision to eliminate offline copy of the function yet.
146 The second is important when devirtualization happens during final
147 compilation stage when making a new reference no longer makes callee
148 to be compiled. */
149 if (!node || !node->definition || node->global.inlined_to)
150 {
151 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
152 return false;
153 }
154 }
155 else if (TREE_CODE (decl) == VAR_DECL)
156 {
157 vnode = varpool_get_node (decl);
158 if (!vnode || !vnode->definition)
159 {
160 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
161 return false;
162 }
163 }
164 return true;
165 }
166
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170
171 tree
172 canonicalize_constructor_val (tree cval, tree from_decl)
173 {
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
178 {
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
187 }
188 if (TREE_CODE (cval) == ADDR_EXPR)
189 {
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
192 {
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
196 }
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
199 if (!base)
200 return NULL_TREE;
201
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
205 return NULL_TREE;
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
209 {
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_get_create_node (base);
214 }
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
218
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
222 }
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
225 return orig_cval;
226 }
227
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
230
231 tree
232 get_symbol_constant_value (tree sym)
233 {
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
236 {
237 if (val)
238 {
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
241 return val;
242 else
243 return NULL_TREE;
244 }
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
249 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
250 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
251 return build_zero_cst (TREE_TYPE (sym));
252 }
253
254 return NULL_TREE;
255 }
256
257
258
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
263
264 static tree
265 maybe_fold_reference (tree expr, bool is_lhs)
266 {
267 tree *t = &expr;
268 tree result;
269
270 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
271 || TREE_CODE (expr) == REALPART_EXPR
272 || TREE_CODE (expr) == IMAGPART_EXPR)
273 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
274 return fold_unary_loc (EXPR_LOCATION (expr),
275 TREE_CODE (expr),
276 TREE_TYPE (expr),
277 TREE_OPERAND (expr, 0));
278 else if (TREE_CODE (expr) == BIT_FIELD_REF
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_ternary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0),
284 TREE_OPERAND (expr, 1),
285 TREE_OPERAND (expr, 2));
286
287 while (handled_component_p (*t))
288 t = &TREE_OPERAND (*t, 0);
289
290 /* Canonicalize MEM_REFs invariant address operand. Do this first
291 to avoid feeding non-canonical MEM_REFs elsewhere. */
292 if (TREE_CODE (*t) == MEM_REF
293 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
294 {
295 bool volatile_p = TREE_THIS_VOLATILE (*t);
296 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
297 TREE_OPERAND (*t, 0),
298 TREE_OPERAND (*t, 1));
299 if (tem)
300 {
301 TREE_THIS_VOLATILE (tem) = volatile_p;
302 *t = tem;
303 tem = maybe_fold_reference (expr, is_lhs);
304 if (tem)
305 return tem;
306 return expr;
307 }
308 }
309
310 if (!is_lhs
311 && (result = fold_const_aggregate_ref (expr))
312 && is_gimple_min_invariant (result))
313 return result;
314
315 /* Fold back MEM_REFs to reference trees. */
316 if (TREE_CODE (*t) == MEM_REF
317 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
318 && integer_zerop (TREE_OPERAND (*t, 1))
319 && (TREE_THIS_VOLATILE (*t)
320 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
321 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
322 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
323 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
324 /* We have to look out here to not drop a required conversion
325 from the rhs to the lhs if is_lhs, but we don't have the
326 rhs here to verify that. Thus require strict type
327 compatibility. */
328 && types_compatible_p (TREE_TYPE (*t),
329 TREE_TYPE (TREE_OPERAND
330 (TREE_OPERAND (*t, 0), 0))))
331 {
332 tree tem;
333 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
334 tem = maybe_fold_reference (expr, is_lhs);
335 if (tem)
336 return tem;
337 return expr;
338 }
339 else if (TREE_CODE (*t) == TARGET_MEM_REF)
340 {
341 tree tem = maybe_fold_tmr (*t);
342 if (tem)
343 {
344 *t = tem;
345 tem = maybe_fold_reference (expr, is_lhs);
346 if (tem)
347 return tem;
348 return expr;
349 }
350 }
351
352 return NULL_TREE;
353 }
354
355
356 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
357 replacement rhs for the statement or NULL_TREE if no simplification
358 could be made. It is assumed that the operands have been previously
359 folded. */
360
361 static tree
362 fold_gimple_assign (gimple_stmt_iterator *si)
363 {
364 gimple stmt = gsi_stmt (*si);
365 enum tree_code subcode = gimple_assign_rhs_code (stmt);
366 location_t loc = gimple_location (stmt);
367
368 tree result = NULL_TREE;
369
370 switch (get_gimple_rhs_class (subcode))
371 {
372 case GIMPLE_SINGLE_RHS:
373 {
374 tree rhs = gimple_assign_rhs1 (stmt);
375
376 if (REFERENCE_CLASS_P (rhs))
377 return maybe_fold_reference (rhs, false);
378
379 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
380 {
381 tree val = OBJ_TYPE_REF_EXPR (rhs);
382 if (is_gimple_min_invariant (val))
383 return val;
384 else if (flag_devirtualize && virtual_method_call_p (val))
385 {
386 bool final;
387 vec <cgraph_node *>targets
388 = possible_polymorphic_call_targets (val, &final);
389 if (final && targets.length () <= 1)
390 {
391 tree fndecl;
392 if (targets.length () == 1)
393 fndecl = targets[0]->decl;
394 else
395 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
396 val = fold_convert (TREE_TYPE (val), fndecl);
397 STRIP_USELESS_TYPE_CONVERSION (val);
398 return val;
399 }
400 }
401
402 }
403 else if (TREE_CODE (rhs) == ADDR_EXPR)
404 {
405 tree ref = TREE_OPERAND (rhs, 0);
406 tree tem = maybe_fold_reference (ref, true);
407 if (tem
408 && TREE_CODE (tem) == MEM_REF
409 && integer_zerop (TREE_OPERAND (tem, 1)))
410 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
411 else if (tem)
412 result = fold_convert (TREE_TYPE (rhs),
413 build_fold_addr_expr_loc (loc, tem));
414 else if (TREE_CODE (ref) == MEM_REF
415 && integer_zerop (TREE_OPERAND (ref, 1)))
416 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
417 }
418
419 else if (TREE_CODE (rhs) == CONSTRUCTOR
420 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
421 && (CONSTRUCTOR_NELTS (rhs)
422 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
423 {
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
427
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
429 if (TREE_CODE (val) != INTEGER_CST
430 && TREE_CODE (val) != REAL_CST
431 && TREE_CODE (val) != FIXED_CST)
432 return NULL_TREE;
433
434 return build_vector_from_ctor (TREE_TYPE (rhs),
435 CONSTRUCTOR_ELTS (rhs));
436 }
437
438 else if (DECL_P (rhs))
439 return get_symbol_constant_value (rhs);
440
441 /* If we couldn't fold the RHS, hand over to the generic
442 fold routines. */
443 if (result == NULL_TREE)
444 result = fold (rhs);
445
446 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
447 that may have been added by fold, and "useless" type
448 conversions that might now be apparent due to propagation. */
449 STRIP_USELESS_TYPE_CONVERSION (result);
450
451 if (result != rhs && valid_gimple_rhs_p (result))
452 return result;
453
454 return NULL_TREE;
455 }
456 break;
457
458 case GIMPLE_UNARY_RHS:
459 {
460 tree rhs = gimple_assign_rhs1 (stmt);
461
462 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
463 if (result)
464 {
465 /* If the operation was a conversion do _not_ mark a
466 resulting constant with TREE_OVERFLOW if the original
467 constant was not. These conversions have implementation
468 defined behavior and retaining the TREE_OVERFLOW flag
469 here would confuse later passes such as VRP. */
470 if (CONVERT_EXPR_CODE_P (subcode)
471 && TREE_CODE (result) == INTEGER_CST
472 && TREE_CODE (rhs) == INTEGER_CST)
473 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
474
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
478 }
479 }
480 break;
481
482 case GIMPLE_BINARY_RHS:
483 /* Try to canonicalize for boolean-typed X the comparisons
484 X == 0, X == 1, X != 0, and X != 1. */
485 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
486 || gimple_assign_rhs_code (stmt) == NE_EXPR)
487 {
488 tree lhs = gimple_assign_lhs (stmt);
489 tree op1 = gimple_assign_rhs1 (stmt);
490 tree op2 = gimple_assign_rhs2 (stmt);
491 tree type = TREE_TYPE (op1);
492
493 /* Check whether the comparison operands are of the same boolean
494 type as the result type is.
495 Check that second operand is an integer-constant with value
496 one or zero. */
497 if (TREE_CODE (op2) == INTEGER_CST
498 && (integer_zerop (op2) || integer_onep (op2))
499 && useless_type_conversion_p (TREE_TYPE (lhs), type))
500 {
501 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
502 bool is_logical_not = false;
503
504 /* X == 0 and X != 1 is a logical-not.of X
505 X == 1 and X != 0 is X */
506 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
507 || (cmp_code == NE_EXPR && integer_onep (op2)))
508 is_logical_not = true;
509
510 if (is_logical_not == false)
511 result = op1;
512 /* Only for one-bit precision typed X the transformation
513 !X -> ~X is valied. */
514 else if (TYPE_PRECISION (type) == 1)
515 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
516 type, op1);
517 /* Otherwise we use !X -> X ^ 1. */
518 else
519 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
520 type, op1, build_int_cst (type, 1));
521
522 }
523 }
524
525 if (!result)
526 result = fold_binary_loc (loc, subcode,
527 TREE_TYPE (gimple_assign_lhs (stmt)),
528 gimple_assign_rhs1 (stmt),
529 gimple_assign_rhs2 (stmt));
530
531 if (result)
532 {
533 STRIP_USELESS_TYPE_CONVERSION (result);
534 if (valid_gimple_rhs_p (result))
535 return result;
536 }
537 break;
538
539 case GIMPLE_TERNARY_RHS:
540 /* Try to fold a conditional expression. */
541 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
542 {
543 tree op0 = gimple_assign_rhs1 (stmt);
544 tree tem;
545 bool set = false;
546 location_t cond_loc = gimple_location (stmt);
547
548 if (COMPARISON_CLASS_P (op0))
549 {
550 fold_defer_overflow_warnings ();
551 tem = fold_binary_loc (cond_loc,
552 TREE_CODE (op0), TREE_TYPE (op0),
553 TREE_OPERAND (op0, 0),
554 TREE_OPERAND (op0, 1));
555 /* This is actually a conditional expression, not a GIMPLE
556 conditional statement, however, the valid_gimple_rhs_p
557 test still applies. */
558 set = (tem && is_gimple_condexpr (tem)
559 && valid_gimple_rhs_p (tem));
560 fold_undefer_overflow_warnings (set, stmt, 0);
561 }
562 else if (is_gimple_min_invariant (op0))
563 {
564 tem = op0;
565 set = true;
566 }
567 else
568 return NULL_TREE;
569
570 if (set)
571 result = fold_build3_loc (cond_loc, COND_EXPR,
572 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
573 gimple_assign_rhs2 (stmt),
574 gimple_assign_rhs3 (stmt));
575 }
576
577 if (!result)
578 result = fold_ternary_loc (loc, subcode,
579 TREE_TYPE (gimple_assign_lhs (stmt)),
580 gimple_assign_rhs1 (stmt),
581 gimple_assign_rhs2 (stmt),
582 gimple_assign_rhs3 (stmt));
583
584 if (result)
585 {
586 STRIP_USELESS_TYPE_CONVERSION (result);
587 if (valid_gimple_rhs_p (result))
588 return result;
589 }
590 break;
591
592 case GIMPLE_INVALID_RHS:
593 gcc_unreachable ();
594 }
595
596 return NULL_TREE;
597 }
598
599 /* Attempt to fold a conditional statement. Return true if any changes were
600 made. We only attempt to fold the condition expression, and do not perform
601 any transformation that would require alteration of the cfg. It is
602 assumed that the operands have been previously folded. */
603
604 static bool
605 fold_gimple_cond (gimple stmt)
606 {
607 tree result = fold_binary_loc (gimple_location (stmt),
608 gimple_cond_code (stmt),
609 boolean_type_node,
610 gimple_cond_lhs (stmt),
611 gimple_cond_rhs (stmt));
612
613 if (result)
614 {
615 STRIP_USELESS_TYPE_CONVERSION (result);
616 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
617 {
618 gimple_cond_set_condition_from_tree (stmt, result);
619 return true;
620 }
621 }
622
623 return false;
624 }
625
626 /* Convert EXPR into a GIMPLE value suitable for substitution on the
627 RHS of an assignment. Insert the necessary statements before
628 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
629 is replaced. If the call is expected to produces a result, then it
630 is replaced by an assignment of the new RHS to the result variable.
631 If the result is to be ignored, then the call is replaced by a
632 GIMPLE_NOP. A proper VDEF chain is retained by making the first
633 VUSE and the last VDEF of the whole sequence be the same as the replaced
634 statement and using new SSA names for stores in between. */
635
636 void
637 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
638 {
639 tree lhs;
640 gimple stmt, new_stmt;
641 gimple_stmt_iterator i;
642 gimple_seq stmts = NULL;
643 gimple laststore;
644 tree reaching_vuse;
645
646 stmt = gsi_stmt (*si_p);
647
648 gcc_assert (is_gimple_call (stmt));
649
650 push_gimplify_context (gimple_in_ssa_p (cfun));
651
652 lhs = gimple_call_lhs (stmt);
653 if (lhs == NULL_TREE)
654 {
655 gimplify_and_add (expr, &stmts);
656 /* We can end up with folding a memcpy of an empty class assignment
657 which gets optimized away by C++ gimplification. */
658 if (gimple_seq_empty_p (stmts))
659 {
660 pop_gimplify_context (NULL);
661 if (gimple_in_ssa_p (cfun))
662 {
663 unlink_stmt_vdef (stmt);
664 release_defs (stmt);
665 }
666 gsi_replace (si_p, gimple_build_nop (), true);
667 return;
668 }
669 }
670 else
671 {
672 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
673 new_stmt = gimple_build_assign (lhs, tmp);
674 i = gsi_last (stmts);
675 gsi_insert_after_without_update (&i, new_stmt,
676 GSI_CONTINUE_LINKING);
677 }
678
679 pop_gimplify_context (NULL);
680
681 if (gimple_has_location (stmt))
682 annotate_all_with_location (stmts, gimple_location (stmt));
683
684 /* First iterate over the replacement statements backward, assigning
685 virtual operands to their defining statements. */
686 laststore = NULL;
687 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
688 {
689 new_stmt = gsi_stmt (i);
690 if ((gimple_assign_single_p (new_stmt)
691 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
692 || (is_gimple_call (new_stmt)
693 && (gimple_call_flags (new_stmt)
694 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
695 {
696 tree vdef;
697 if (!laststore)
698 vdef = gimple_vdef (stmt);
699 else
700 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
701 gimple_set_vdef (new_stmt, vdef);
702 if (vdef && TREE_CODE (vdef) == SSA_NAME)
703 SSA_NAME_DEF_STMT (vdef) = new_stmt;
704 laststore = new_stmt;
705 }
706 }
707
708 /* Second iterate over the statements forward, assigning virtual
709 operands to their uses. */
710 reaching_vuse = gimple_vuse (stmt);
711 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
712 {
713 new_stmt = gsi_stmt (i);
714 /* If the new statement possibly has a VUSE, update it with exact SSA
715 name we know will reach this one. */
716 if (gimple_has_mem_ops (new_stmt))
717 gimple_set_vuse (new_stmt, reaching_vuse);
718 gimple_set_modified (new_stmt, true);
719 if (gimple_vdef (new_stmt))
720 reaching_vuse = gimple_vdef (new_stmt);
721 }
722
723 /* If the new sequence does not do a store release the virtual
724 definition of the original statement. */
725 if (reaching_vuse
726 && reaching_vuse == gimple_vuse (stmt))
727 {
728 tree vdef = gimple_vdef (stmt);
729 if (vdef
730 && TREE_CODE (vdef) == SSA_NAME)
731 {
732 unlink_stmt_vdef (stmt);
733 release_ssa_name (vdef);
734 }
735 }
736
737 /* Finally replace the original statement with the sequence. */
738 gsi_replace_with_seq (si_p, stmts, false);
739 }
740
741 /* Return the string length, maximum string length or maximum value of
742 ARG in LENGTH.
743 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
744 is not NULL and, for TYPE == 0, its value is not equal to the length
745 we determine or if we are unable to determine the length or value,
746 return false. VISITED is a bitmap of visited variables.
747 TYPE is 0 if string length should be returned, 1 for maximum string
748 length and 2 for maximum value ARG can have. */
749
750 static bool
751 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
752 {
753 tree var, val;
754 gimple def_stmt;
755
756 if (TREE_CODE (arg) != SSA_NAME)
757 {
758 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
759 if (TREE_CODE (arg) == ADDR_EXPR
760 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
761 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
762 {
763 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
764 if (TREE_CODE (aop0) == INDIRECT_REF
765 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
766 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
767 length, visited, type);
768 }
769
770 if (type == 2)
771 {
772 val = arg;
773 if (TREE_CODE (val) != INTEGER_CST
774 || tree_int_cst_sgn (val) < 0)
775 return false;
776 }
777 else
778 val = c_strlen (arg, 1);
779 if (!val)
780 return false;
781
782 if (*length)
783 {
784 if (type > 0)
785 {
786 if (TREE_CODE (*length) != INTEGER_CST
787 || TREE_CODE (val) != INTEGER_CST)
788 return false;
789
790 if (tree_int_cst_lt (*length, val))
791 *length = val;
792 return true;
793 }
794 else if (simple_cst_equal (val, *length) != 1)
795 return false;
796 }
797
798 *length = val;
799 return true;
800 }
801
802 /* If ARG is registered for SSA update we cannot look at its defining
803 statement. */
804 if (name_registered_for_update_p (arg))
805 return false;
806
807 /* If we were already here, break the infinite cycle. */
808 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
809 return true;
810
811 var = arg;
812 def_stmt = SSA_NAME_DEF_STMT (var);
813
814 switch (gimple_code (def_stmt))
815 {
816 case GIMPLE_ASSIGN:
817 /* The RHS of the statement defining VAR must either have a
818 constant length or come from another SSA_NAME with a constant
819 length. */
820 if (gimple_assign_single_p (def_stmt)
821 || gimple_assign_unary_nop_p (def_stmt))
822 {
823 tree rhs = gimple_assign_rhs1 (def_stmt);
824 return get_maxval_strlen (rhs, length, visited, type);
825 }
826 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
827 {
828 tree op2 = gimple_assign_rhs2 (def_stmt);
829 tree op3 = gimple_assign_rhs3 (def_stmt);
830 return get_maxval_strlen (op2, length, visited, type)
831 && get_maxval_strlen (op3, length, visited, type);
832 }
833 return false;
834
835 case GIMPLE_PHI:
836 {
837 /* All the arguments of the PHI node must have the same constant
838 length. */
839 unsigned i;
840
841 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
842 {
843 tree arg = gimple_phi_arg (def_stmt, i)->def;
844
845 /* If this PHI has itself as an argument, we cannot
846 determine the string length of this argument. However,
847 if we can find a constant string length for the other
848 PHI args then we can still be sure that this is a
849 constant string length. So be optimistic and just
850 continue with the next argument. */
851 if (arg == gimple_phi_result (def_stmt))
852 continue;
853
854 if (!get_maxval_strlen (arg, length, visited, type))
855 return false;
856 }
857 }
858 return true;
859
860 default:
861 return false;
862 }
863 }
864
865
866 /* Fold builtin call in statement STMT. Returns a simplified tree.
867 We may return a non-constant expression, including another call
868 to a different function and with different arguments, e.g.,
869 substituting memcpy for strcpy when the string length is known.
870 Note that some builtins expand into inline code that may not
871 be valid in GIMPLE. Callers must take care. */
872
873 tree
874 gimple_fold_builtin (gimple stmt)
875 {
876 tree result, val[3];
877 tree callee, a;
878 int arg_idx, type;
879 bitmap visited;
880 bool ignore;
881 int nargs;
882 location_t loc = gimple_location (stmt);
883
884 ignore = (gimple_call_lhs (stmt) == NULL);
885
886 /* First try the generic builtin folder. If that succeeds, return the
887 result directly. */
888 result = fold_call_stmt (stmt, ignore);
889 if (result)
890 {
891 if (ignore)
892 STRIP_NOPS (result);
893 else
894 result = fold_convert (gimple_call_return_type (stmt), result);
895 return result;
896 }
897
898 /* Ignore MD builtins. */
899 callee = gimple_call_fndecl (stmt);
900 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
901 return NULL_TREE;
902
903 /* Give up for always_inline inline builtins until they are
904 inlined. */
905 if (avoid_folding_inline_builtin (callee))
906 return NULL_TREE;
907
908 /* If the builtin could not be folded, and it has no argument list,
909 we're done. */
910 nargs = gimple_call_num_args (stmt);
911 if (nargs == 0)
912 return NULL_TREE;
913
914 /* Limit the work only for builtins we know how to simplify. */
915 switch (DECL_FUNCTION_CODE (callee))
916 {
917 case BUILT_IN_STRLEN:
918 case BUILT_IN_FPUTS:
919 case BUILT_IN_FPUTS_UNLOCKED:
920 arg_idx = 0;
921 type = 0;
922 break;
923 case BUILT_IN_STRCPY:
924 case BUILT_IN_STRNCPY:
925 case BUILT_IN_STRCAT:
926 arg_idx = 1;
927 type = 0;
928 break;
929 case BUILT_IN_MEMCPY_CHK:
930 case BUILT_IN_MEMPCPY_CHK:
931 case BUILT_IN_MEMMOVE_CHK:
932 case BUILT_IN_MEMSET_CHK:
933 case BUILT_IN_STRNCPY_CHK:
934 case BUILT_IN_STPNCPY_CHK:
935 arg_idx = 2;
936 type = 2;
937 break;
938 case BUILT_IN_STRCPY_CHK:
939 case BUILT_IN_STPCPY_CHK:
940 arg_idx = 1;
941 type = 1;
942 break;
943 case BUILT_IN_SNPRINTF_CHK:
944 case BUILT_IN_VSNPRINTF_CHK:
945 arg_idx = 1;
946 type = 2;
947 break;
948 default:
949 return NULL_TREE;
950 }
951
952 if (arg_idx >= nargs)
953 return NULL_TREE;
954
955 /* Try to use the dataflow information gathered by the CCP process. */
956 visited = BITMAP_ALLOC (NULL);
957 bitmap_clear (visited);
958
959 memset (val, 0, sizeof (val));
960 a = gimple_call_arg (stmt, arg_idx);
961 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
962 val[arg_idx] = NULL_TREE;
963
964 BITMAP_FREE (visited);
965
966 result = NULL_TREE;
967 switch (DECL_FUNCTION_CODE (callee))
968 {
969 case BUILT_IN_STRLEN:
970 if (val[0] && nargs == 1)
971 {
972 tree new_val =
973 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
974
975 /* If the result is not a valid gimple value, or not a cast
976 of a valid gimple value, then we cannot use the result. */
977 if (is_gimple_val (new_val)
978 || (CONVERT_EXPR_P (new_val)
979 && is_gimple_val (TREE_OPERAND (new_val, 0))))
980 return new_val;
981 }
982 break;
983
984 case BUILT_IN_STRCPY:
985 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
986 result = fold_builtin_strcpy (loc, callee,
987 gimple_call_arg (stmt, 0),
988 gimple_call_arg (stmt, 1),
989 val[1]);
990 break;
991
992 case BUILT_IN_STRNCPY:
993 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
994 result = fold_builtin_strncpy (loc, callee,
995 gimple_call_arg (stmt, 0),
996 gimple_call_arg (stmt, 1),
997 gimple_call_arg (stmt, 2),
998 val[1]);
999 break;
1000
1001 case BUILT_IN_STRCAT:
1002 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1003 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1004 gimple_call_arg (stmt, 1),
1005 val[1]);
1006 break;
1007
1008 case BUILT_IN_FPUTS:
1009 if (nargs == 2)
1010 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1011 gimple_call_arg (stmt, 1),
1012 ignore, false, val[0]);
1013 break;
1014
1015 case BUILT_IN_FPUTS_UNLOCKED:
1016 if (nargs == 2)
1017 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1018 gimple_call_arg (stmt, 1),
1019 ignore, true, val[0]);
1020 break;
1021
1022 case BUILT_IN_MEMCPY_CHK:
1023 case BUILT_IN_MEMPCPY_CHK:
1024 case BUILT_IN_MEMMOVE_CHK:
1025 case BUILT_IN_MEMSET_CHK:
1026 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1027 result = fold_builtin_memory_chk (loc, callee,
1028 gimple_call_arg (stmt, 0),
1029 gimple_call_arg (stmt, 1),
1030 gimple_call_arg (stmt, 2),
1031 gimple_call_arg (stmt, 3),
1032 val[2], ignore,
1033 DECL_FUNCTION_CODE (callee));
1034 break;
1035
1036 case BUILT_IN_STRCPY_CHK:
1037 case BUILT_IN_STPCPY_CHK:
1038 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1039 result = fold_builtin_stxcpy_chk (loc, callee,
1040 gimple_call_arg (stmt, 0),
1041 gimple_call_arg (stmt, 1),
1042 gimple_call_arg (stmt, 2),
1043 val[1], ignore,
1044 DECL_FUNCTION_CODE (callee));
1045 break;
1046
1047 case BUILT_IN_STRNCPY_CHK:
1048 case BUILT_IN_STPNCPY_CHK:
1049 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1050 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1051 gimple_call_arg (stmt, 1),
1052 gimple_call_arg (stmt, 2),
1053 gimple_call_arg (stmt, 3),
1054 val[2], ignore,
1055 DECL_FUNCTION_CODE (callee));
1056 break;
1057
1058 case BUILT_IN_SNPRINTF_CHK:
1059 case BUILT_IN_VSNPRINTF_CHK:
1060 if (val[1] && is_gimple_val (val[1]))
1061 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1062 DECL_FUNCTION_CODE (callee));
1063 break;
1064
1065 default:
1066 gcc_unreachable ();
1067 }
1068
1069 if (result && ignore)
1070 result = fold_ignored_result (result);
1071 return result;
1072 }
1073
1074
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1079
1080 static bool
1081 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1082 {
1083 gimple stmt = gsi_stmt (*gsi);
1084 tree callee;
1085 bool changed = false;
1086 unsigned i;
1087
1088 /* Fold *& in call arguments. */
1089 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1091 {
1092 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1093 if (tmp)
1094 {
1095 gimple_call_set_arg (stmt, i, tmp);
1096 changed = true;
1097 }
1098 }
1099
1100 /* Check for virtual calls that became direct calls. */
1101 callee = gimple_call_fn (stmt);
1102 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1103 {
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1105 {
1106 if (dump_file && virtual_method_call_p (callee)
1107 && !possible_polymorphic_call_target_p
1108 (callee, cgraph_get_node (gimple_call_addr_fndecl
1109 (OBJ_TYPE_REF_EXPR (callee)))))
1110 {
1111 fprintf (dump_file,
1112 "Type inheritance inconsistent devirtualization of ");
1113 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1114 fprintf (dump_file, " to ");
1115 print_generic_expr (dump_file, callee, TDF_SLIM);
1116 fprintf (dump_file, "\n");
1117 }
1118
1119 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1120 changed = true;
1121 }
1122 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1123 {
1124 bool final;
1125 vec <cgraph_node *>targets
1126 = possible_polymorphic_call_targets (callee, &final);
1127 if (final && targets.length () <= 1)
1128 {
1129 tree lhs = gimple_call_lhs (stmt);
1130 if (targets.length () == 1)
1131 {
1132 gimple_call_set_fndecl (stmt, targets[0]->decl);
1133 changed = true;
1134 /* If the call becomes noreturn, remove the lhs. */
1135 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1136 {
1137 if (TREE_CODE (lhs) == SSA_NAME)
1138 {
1139 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1140 tree def = get_or_create_ssa_default_def (cfun, var);
1141 gimple new_stmt = gimple_build_assign (lhs, def);
1142 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1143 }
1144 gimple_call_set_lhs (stmt, NULL_TREE);
1145 }
1146 }
1147 else
1148 {
1149 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1150 gimple new_stmt = gimple_build_call (fndecl, 0);
1151 gimple_set_location (new_stmt, gimple_location (stmt));
1152 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1153 {
1154 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1155 tree def = get_or_create_ssa_default_def (cfun, var);
1156 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1157 update_call_from_tree (gsi, def);
1158 }
1159 else
1160 gsi_replace (gsi, new_stmt, true);
1161 return true;
1162 }
1163 }
1164 }
1165 }
1166
1167 if (inplace)
1168 return changed;
1169
1170 /* Check for builtins that CCP can handle using information not
1171 available in the generic fold routines. */
1172 if (gimple_call_builtin_p (stmt))
1173 {
1174 tree result = gimple_fold_builtin (stmt);
1175 if (result)
1176 {
1177 if (!update_call_from_tree (gsi, result))
1178 gimplify_and_update_call_from_tree (gsi, result);
1179 changed = true;
1180 }
1181 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1182 changed |= targetm.gimple_fold_builtin (gsi);
1183 }
1184 else if (gimple_call_internal_p (stmt)
1185 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)
1186 {
1187 tree result = fold_builtin_expect (gimple_location (stmt),
1188 gimple_call_arg (stmt, 0),
1189 gimple_call_arg (stmt, 1),
1190 gimple_call_arg (stmt, 2));
1191 if (result)
1192 {
1193 if (!update_call_from_tree (gsi, result))
1194 gimplify_and_update_call_from_tree (gsi, result);
1195 changed = true;
1196 }
1197 }
1198
1199 return changed;
1200 }
1201
1202 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1203 distinguishes both cases. */
1204
1205 static bool
1206 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1207 {
1208 bool changed = false;
1209 gimple stmt = gsi_stmt (*gsi);
1210 unsigned i;
1211
1212 /* Fold the main computation performed by the statement. */
1213 switch (gimple_code (stmt))
1214 {
1215 case GIMPLE_ASSIGN:
1216 {
1217 unsigned old_num_ops = gimple_num_ops (stmt);
1218 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1219 tree lhs = gimple_assign_lhs (stmt);
1220 tree new_rhs;
1221 /* First canonicalize operand order. This avoids building new
1222 trees if this is the only thing fold would later do. */
1223 if ((commutative_tree_code (subcode)
1224 || commutative_ternary_tree_code (subcode))
1225 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1226 gimple_assign_rhs2 (stmt), false))
1227 {
1228 tree tem = gimple_assign_rhs1 (stmt);
1229 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1230 gimple_assign_set_rhs2 (stmt, tem);
1231 changed = true;
1232 }
1233 new_rhs = fold_gimple_assign (gsi);
1234 if (new_rhs
1235 && !useless_type_conversion_p (TREE_TYPE (lhs),
1236 TREE_TYPE (new_rhs)))
1237 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1238 if (new_rhs
1239 && (!inplace
1240 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1241 {
1242 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1243 changed = true;
1244 }
1245 break;
1246 }
1247
1248 case GIMPLE_COND:
1249 changed |= fold_gimple_cond (stmt);
1250 break;
1251
1252 case GIMPLE_CALL:
1253 changed |= gimple_fold_call (gsi, inplace);
1254 break;
1255
1256 case GIMPLE_ASM:
1257 /* Fold *& in asm operands. */
1258 {
1259 size_t noutputs;
1260 const char **oconstraints;
1261 const char *constraint;
1262 bool allows_mem, allows_reg;
1263
1264 noutputs = gimple_asm_noutputs (stmt);
1265 oconstraints = XALLOCAVEC (const char *, noutputs);
1266
1267 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1268 {
1269 tree link = gimple_asm_output_op (stmt, i);
1270 tree op = TREE_VALUE (link);
1271 oconstraints[i]
1272 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1273 if (REFERENCE_CLASS_P (op)
1274 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1275 {
1276 TREE_VALUE (link) = op;
1277 changed = true;
1278 }
1279 }
1280 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1281 {
1282 tree link = gimple_asm_input_op (stmt, i);
1283 tree op = TREE_VALUE (link);
1284 constraint
1285 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1286 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1287 oconstraints, &allows_mem, &allows_reg);
1288 if (REFERENCE_CLASS_P (op)
1289 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1290 != NULL_TREE)
1291 {
1292 TREE_VALUE (link) = op;
1293 changed = true;
1294 }
1295 }
1296 }
1297 break;
1298
1299 case GIMPLE_DEBUG:
1300 if (gimple_debug_bind_p (stmt))
1301 {
1302 tree val = gimple_debug_bind_get_value (stmt);
1303 if (val
1304 && REFERENCE_CLASS_P (val))
1305 {
1306 tree tem = maybe_fold_reference (val, false);
1307 if (tem)
1308 {
1309 gimple_debug_bind_set_value (stmt, tem);
1310 changed = true;
1311 }
1312 }
1313 else if (val
1314 && TREE_CODE (val) == ADDR_EXPR)
1315 {
1316 tree ref = TREE_OPERAND (val, 0);
1317 tree tem = maybe_fold_reference (ref, false);
1318 if (tem)
1319 {
1320 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1321 gimple_debug_bind_set_value (stmt, tem);
1322 changed = true;
1323 }
1324 }
1325 }
1326 break;
1327
1328 default:;
1329 }
1330
1331 stmt = gsi_stmt (*gsi);
1332
1333 /* Fold *& on the lhs. */
1334 if (gimple_has_lhs (stmt))
1335 {
1336 tree lhs = gimple_get_lhs (stmt);
1337 if (lhs && REFERENCE_CLASS_P (lhs))
1338 {
1339 tree new_lhs = maybe_fold_reference (lhs, true);
1340 if (new_lhs)
1341 {
1342 gimple_set_lhs (stmt, new_lhs);
1343 changed = true;
1344 }
1345 }
1346 }
1347
1348 return changed;
1349 }
1350
1351 /* Fold the statement pointed to by GSI. In some cases, this function may
1352 replace the whole statement with a new one. Returns true iff folding
1353 makes any changes.
1354 The statement pointed to by GSI should be in valid gimple form but may
1355 be in unfolded state as resulting from for example constant propagation
1356 which can produce *&x = 0. */
1357
1358 bool
1359 fold_stmt (gimple_stmt_iterator *gsi)
1360 {
1361 return fold_stmt_1 (gsi, false);
1362 }
1363
1364 /* Perform the minimal folding on statement *GSI. Only operations like
1365 *&x created by constant propagation are handled. The statement cannot
1366 be replaced with a new one. Return true if the statement was
1367 changed, false otherwise.
1368 The statement *GSI should be in valid gimple form but may
1369 be in unfolded state as resulting from for example constant propagation
1370 which can produce *&x = 0. */
1371
1372 bool
1373 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1374 {
1375 gimple stmt = gsi_stmt (*gsi);
1376 bool changed = fold_stmt_1 (gsi, true);
1377 gcc_assert (gsi_stmt (*gsi) == stmt);
1378 return changed;
1379 }
1380
1381 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1382 if EXPR is null or we don't know how.
1383 If non-null, the result always has boolean type. */
1384
1385 static tree
1386 canonicalize_bool (tree expr, bool invert)
1387 {
1388 if (!expr)
1389 return NULL_TREE;
1390 else if (invert)
1391 {
1392 if (integer_nonzerop (expr))
1393 return boolean_false_node;
1394 else if (integer_zerop (expr))
1395 return boolean_true_node;
1396 else if (TREE_CODE (expr) == SSA_NAME)
1397 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1398 build_int_cst (TREE_TYPE (expr), 0));
1399 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1400 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1401 boolean_type_node,
1402 TREE_OPERAND (expr, 0),
1403 TREE_OPERAND (expr, 1));
1404 else
1405 return NULL_TREE;
1406 }
1407 else
1408 {
1409 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1410 return expr;
1411 if (integer_nonzerop (expr))
1412 return boolean_true_node;
1413 else if (integer_zerop (expr))
1414 return boolean_false_node;
1415 else if (TREE_CODE (expr) == SSA_NAME)
1416 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1417 build_int_cst (TREE_TYPE (expr), 0));
1418 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1419 return fold_build2 (TREE_CODE (expr),
1420 boolean_type_node,
1421 TREE_OPERAND (expr, 0),
1422 TREE_OPERAND (expr, 1));
1423 else
1424 return NULL_TREE;
1425 }
1426 }
1427
1428 /* Check to see if a boolean expression EXPR is logically equivalent to the
1429 comparison (OP1 CODE OP2). Check for various identities involving
1430 SSA_NAMEs. */
1431
1432 static bool
1433 same_bool_comparison_p (const_tree expr, enum tree_code code,
1434 const_tree op1, const_tree op2)
1435 {
1436 gimple s;
1437
1438 /* The obvious case. */
1439 if (TREE_CODE (expr) == code
1440 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1441 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1442 return true;
1443
1444 /* Check for comparing (name, name != 0) and the case where expr
1445 is an SSA_NAME with a definition matching the comparison. */
1446 if (TREE_CODE (expr) == SSA_NAME
1447 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1448 {
1449 if (operand_equal_p (expr, op1, 0))
1450 return ((code == NE_EXPR && integer_zerop (op2))
1451 || (code == EQ_EXPR && integer_nonzerop (op2)));
1452 s = SSA_NAME_DEF_STMT (expr);
1453 if (is_gimple_assign (s)
1454 && gimple_assign_rhs_code (s) == code
1455 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1456 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1457 return true;
1458 }
1459
1460 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1461 of name is a comparison, recurse. */
1462 if (TREE_CODE (op1) == SSA_NAME
1463 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1464 {
1465 s = SSA_NAME_DEF_STMT (op1);
1466 if (is_gimple_assign (s)
1467 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1468 {
1469 enum tree_code c = gimple_assign_rhs_code (s);
1470 if ((c == NE_EXPR && integer_zerop (op2))
1471 || (c == EQ_EXPR && integer_nonzerop (op2)))
1472 return same_bool_comparison_p (expr, c,
1473 gimple_assign_rhs1 (s),
1474 gimple_assign_rhs2 (s));
1475 if ((c == EQ_EXPR && integer_zerop (op2))
1476 || (c == NE_EXPR && integer_nonzerop (op2)))
1477 return same_bool_comparison_p (expr,
1478 invert_tree_comparison (c, false),
1479 gimple_assign_rhs1 (s),
1480 gimple_assign_rhs2 (s));
1481 }
1482 }
1483 return false;
1484 }
1485
1486 /* Check to see if two boolean expressions OP1 and OP2 are logically
1487 equivalent. */
1488
1489 static bool
1490 same_bool_result_p (const_tree op1, const_tree op2)
1491 {
1492 /* Simple cases first. */
1493 if (operand_equal_p (op1, op2, 0))
1494 return true;
1495
1496 /* Check the cases where at least one of the operands is a comparison.
1497 These are a bit smarter than operand_equal_p in that they apply some
1498 identifies on SSA_NAMEs. */
1499 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1500 && same_bool_comparison_p (op1, TREE_CODE (op2),
1501 TREE_OPERAND (op2, 0),
1502 TREE_OPERAND (op2, 1)))
1503 return true;
1504 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1505 && same_bool_comparison_p (op2, TREE_CODE (op1),
1506 TREE_OPERAND (op1, 0),
1507 TREE_OPERAND (op1, 1)))
1508 return true;
1509
1510 /* Default case. */
1511 return false;
1512 }
1513
1514 /* Forward declarations for some mutually recursive functions. */
1515
1516 static tree
1517 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1518 enum tree_code code2, tree op2a, tree op2b);
1519 static tree
1520 and_var_with_comparison (tree var, bool invert,
1521 enum tree_code code2, tree op2a, tree op2b);
1522 static tree
1523 and_var_with_comparison_1 (gimple stmt,
1524 enum tree_code code2, tree op2a, tree op2b);
1525 static tree
1526 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1527 enum tree_code code2, tree op2a, tree op2b);
1528 static tree
1529 or_var_with_comparison (tree var, bool invert,
1530 enum tree_code code2, tree op2a, tree op2b);
1531 static tree
1532 or_var_with_comparison_1 (gimple stmt,
1533 enum tree_code code2, tree op2a, tree op2b);
1534
1535 /* Helper function for and_comparisons_1: try to simplify the AND of the
1536 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1537 If INVERT is true, invert the value of the VAR before doing the AND.
1538 Return NULL_EXPR if we can't simplify this to a single expression. */
1539
1540 static tree
1541 and_var_with_comparison (tree var, bool invert,
1542 enum tree_code code2, tree op2a, tree op2b)
1543 {
1544 tree t;
1545 gimple stmt = SSA_NAME_DEF_STMT (var);
1546
1547 /* We can only deal with variables whose definitions are assignments. */
1548 if (!is_gimple_assign (stmt))
1549 return NULL_TREE;
1550
1551 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1552 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1553 Then we only have to consider the simpler non-inverted cases. */
1554 if (invert)
1555 t = or_var_with_comparison_1 (stmt,
1556 invert_tree_comparison (code2, false),
1557 op2a, op2b);
1558 else
1559 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1560 return canonicalize_bool (t, invert);
1561 }
1562
1563 /* Try to simplify the AND of the ssa variable defined by the assignment
1564 STMT with the comparison specified by (OP2A CODE2 OP2B).
1565 Return NULL_EXPR if we can't simplify this to a single expression. */
1566
1567 static tree
1568 and_var_with_comparison_1 (gimple stmt,
1569 enum tree_code code2, tree op2a, tree op2b)
1570 {
1571 tree var = gimple_assign_lhs (stmt);
1572 tree true_test_var = NULL_TREE;
1573 tree false_test_var = NULL_TREE;
1574 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1575
1576 /* Check for identities like (var AND (var == 0)) => false. */
1577 if (TREE_CODE (op2a) == SSA_NAME
1578 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1579 {
1580 if ((code2 == NE_EXPR && integer_zerop (op2b))
1581 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1582 {
1583 true_test_var = op2a;
1584 if (var == true_test_var)
1585 return var;
1586 }
1587 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1588 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1589 {
1590 false_test_var = op2a;
1591 if (var == false_test_var)
1592 return boolean_false_node;
1593 }
1594 }
1595
1596 /* If the definition is a comparison, recurse on it. */
1597 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1598 {
1599 tree t = and_comparisons_1 (innercode,
1600 gimple_assign_rhs1 (stmt),
1601 gimple_assign_rhs2 (stmt),
1602 code2,
1603 op2a,
1604 op2b);
1605 if (t)
1606 return t;
1607 }
1608
1609 /* If the definition is an AND or OR expression, we may be able to
1610 simplify by reassociating. */
1611 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1612 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1613 {
1614 tree inner1 = gimple_assign_rhs1 (stmt);
1615 tree inner2 = gimple_assign_rhs2 (stmt);
1616 gimple s;
1617 tree t;
1618 tree partial = NULL_TREE;
1619 bool is_and = (innercode == BIT_AND_EXPR);
1620
1621 /* Check for boolean identities that don't require recursive examination
1622 of inner1/inner2:
1623 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1624 inner1 AND (inner1 OR inner2) => inner1
1625 !inner1 AND (inner1 AND inner2) => false
1626 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1627 Likewise for similar cases involving inner2. */
1628 if (inner1 == true_test_var)
1629 return (is_and ? var : inner1);
1630 else if (inner2 == true_test_var)
1631 return (is_and ? var : inner2);
1632 else if (inner1 == false_test_var)
1633 return (is_and
1634 ? boolean_false_node
1635 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1636 else if (inner2 == false_test_var)
1637 return (is_and
1638 ? boolean_false_node
1639 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1640
1641 /* Next, redistribute/reassociate the AND across the inner tests.
1642 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1643 if (TREE_CODE (inner1) == SSA_NAME
1644 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1645 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1646 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1647 gimple_assign_rhs1 (s),
1648 gimple_assign_rhs2 (s),
1649 code2, op2a, op2b)))
1650 {
1651 /* Handle the AND case, where we are reassociating:
1652 (inner1 AND inner2) AND (op2a code2 op2b)
1653 => (t AND inner2)
1654 If the partial result t is a constant, we win. Otherwise
1655 continue on to try reassociating with the other inner test. */
1656 if (is_and)
1657 {
1658 if (integer_onep (t))
1659 return inner2;
1660 else if (integer_zerop (t))
1661 return boolean_false_node;
1662 }
1663
1664 /* Handle the OR case, where we are redistributing:
1665 (inner1 OR inner2) AND (op2a code2 op2b)
1666 => (t OR (inner2 AND (op2a code2 op2b))) */
1667 else if (integer_onep (t))
1668 return boolean_true_node;
1669
1670 /* Save partial result for later. */
1671 partial = t;
1672 }
1673
1674 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1675 if (TREE_CODE (inner2) == SSA_NAME
1676 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1677 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1678 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1679 gimple_assign_rhs1 (s),
1680 gimple_assign_rhs2 (s),
1681 code2, op2a, op2b)))
1682 {
1683 /* Handle the AND case, where we are reassociating:
1684 (inner1 AND inner2) AND (op2a code2 op2b)
1685 => (inner1 AND t) */
1686 if (is_and)
1687 {
1688 if (integer_onep (t))
1689 return inner1;
1690 else if (integer_zerop (t))
1691 return boolean_false_node;
1692 /* If both are the same, we can apply the identity
1693 (x AND x) == x. */
1694 else if (partial && same_bool_result_p (t, partial))
1695 return t;
1696 }
1697
1698 /* Handle the OR case. where we are redistributing:
1699 (inner1 OR inner2) AND (op2a code2 op2b)
1700 => (t OR (inner1 AND (op2a code2 op2b)))
1701 => (t OR partial) */
1702 else
1703 {
1704 if (integer_onep (t))
1705 return boolean_true_node;
1706 else if (partial)
1707 {
1708 /* We already got a simplification for the other
1709 operand to the redistributed OR expression. The
1710 interesting case is when at least one is false.
1711 Or, if both are the same, we can apply the identity
1712 (x OR x) == x. */
1713 if (integer_zerop (partial))
1714 return t;
1715 else if (integer_zerop (t))
1716 return partial;
1717 else if (same_bool_result_p (t, partial))
1718 return t;
1719 }
1720 }
1721 }
1722 }
1723 return NULL_TREE;
1724 }
1725
1726 /* Try to simplify the AND of two comparisons defined by
1727 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1728 If this can be done without constructing an intermediate value,
1729 return the resulting tree; otherwise NULL_TREE is returned.
1730 This function is deliberately asymmetric as it recurses on SSA_DEFs
1731 in the first comparison but not the second. */
1732
1733 static tree
1734 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1735 enum tree_code code2, tree op2a, tree op2b)
1736 {
1737 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1738
1739 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1740 if (operand_equal_p (op1a, op2a, 0)
1741 && operand_equal_p (op1b, op2b, 0))
1742 {
1743 /* Result will be either NULL_TREE, or a combined comparison. */
1744 tree t = combine_comparisons (UNKNOWN_LOCATION,
1745 TRUTH_ANDIF_EXPR, code1, code2,
1746 truth_type, op1a, op1b);
1747 if (t)
1748 return t;
1749 }
1750
1751 /* Likewise the swapped case of the above. */
1752 if (operand_equal_p (op1a, op2b, 0)
1753 && operand_equal_p (op1b, op2a, 0))
1754 {
1755 /* Result will be either NULL_TREE, or a combined comparison. */
1756 tree t = combine_comparisons (UNKNOWN_LOCATION,
1757 TRUTH_ANDIF_EXPR, code1,
1758 swap_tree_comparison (code2),
1759 truth_type, op1a, op1b);
1760 if (t)
1761 return t;
1762 }
1763
1764 /* If both comparisons are of the same value against constants, we might
1765 be able to merge them. */
1766 if (operand_equal_p (op1a, op2a, 0)
1767 && TREE_CODE (op1b) == INTEGER_CST
1768 && TREE_CODE (op2b) == INTEGER_CST)
1769 {
1770 int cmp = tree_int_cst_compare (op1b, op2b);
1771
1772 /* If we have (op1a == op1b), we should either be able to
1773 return that or FALSE, depending on whether the constant op1b
1774 also satisfies the other comparison against op2b. */
1775 if (code1 == EQ_EXPR)
1776 {
1777 bool done = true;
1778 bool val;
1779 switch (code2)
1780 {
1781 case EQ_EXPR: val = (cmp == 0); break;
1782 case NE_EXPR: val = (cmp != 0); break;
1783 case LT_EXPR: val = (cmp < 0); break;
1784 case GT_EXPR: val = (cmp > 0); break;
1785 case LE_EXPR: val = (cmp <= 0); break;
1786 case GE_EXPR: val = (cmp >= 0); break;
1787 default: done = false;
1788 }
1789 if (done)
1790 {
1791 if (val)
1792 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1793 else
1794 return boolean_false_node;
1795 }
1796 }
1797 /* Likewise if the second comparison is an == comparison. */
1798 else if (code2 == EQ_EXPR)
1799 {
1800 bool done = true;
1801 bool val;
1802 switch (code1)
1803 {
1804 case EQ_EXPR: val = (cmp == 0); break;
1805 case NE_EXPR: val = (cmp != 0); break;
1806 case LT_EXPR: val = (cmp > 0); break;
1807 case GT_EXPR: val = (cmp < 0); break;
1808 case LE_EXPR: val = (cmp >= 0); break;
1809 case GE_EXPR: val = (cmp <= 0); break;
1810 default: done = false;
1811 }
1812 if (done)
1813 {
1814 if (val)
1815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1816 else
1817 return boolean_false_node;
1818 }
1819 }
1820
1821 /* Same business with inequality tests. */
1822 else if (code1 == NE_EXPR)
1823 {
1824 bool val;
1825 switch (code2)
1826 {
1827 case EQ_EXPR: val = (cmp != 0); break;
1828 case NE_EXPR: val = (cmp == 0); break;
1829 case LT_EXPR: val = (cmp >= 0); break;
1830 case GT_EXPR: val = (cmp <= 0); break;
1831 case LE_EXPR: val = (cmp > 0); break;
1832 case GE_EXPR: val = (cmp < 0); break;
1833 default:
1834 val = false;
1835 }
1836 if (val)
1837 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1838 }
1839 else if (code2 == NE_EXPR)
1840 {
1841 bool val;
1842 switch (code1)
1843 {
1844 case EQ_EXPR: val = (cmp == 0); break;
1845 case NE_EXPR: val = (cmp != 0); break;
1846 case LT_EXPR: val = (cmp <= 0); break;
1847 case GT_EXPR: val = (cmp >= 0); break;
1848 case LE_EXPR: val = (cmp < 0); break;
1849 case GE_EXPR: val = (cmp > 0); break;
1850 default:
1851 val = false;
1852 }
1853 if (val)
1854 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1855 }
1856
1857 /* Chose the more restrictive of two < or <= comparisons. */
1858 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1859 && (code2 == LT_EXPR || code2 == LE_EXPR))
1860 {
1861 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1862 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1863 else
1864 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1865 }
1866
1867 /* Likewise chose the more restrictive of two > or >= comparisons. */
1868 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1869 && (code2 == GT_EXPR || code2 == GE_EXPR))
1870 {
1871 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1872 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1873 else
1874 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1875 }
1876
1877 /* Check for singleton ranges. */
1878 else if (cmp == 0
1879 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1880 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1881 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1882
1883 /* Check for disjoint ranges. */
1884 else if (cmp <= 0
1885 && (code1 == LT_EXPR || code1 == LE_EXPR)
1886 && (code2 == GT_EXPR || code2 == GE_EXPR))
1887 return boolean_false_node;
1888 else if (cmp >= 0
1889 && (code1 == GT_EXPR || code1 == GE_EXPR)
1890 && (code2 == LT_EXPR || code2 == LE_EXPR))
1891 return boolean_false_node;
1892 }
1893
1894 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1895 NAME's definition is a truth value. See if there are any simplifications
1896 that can be done against the NAME's definition. */
1897 if (TREE_CODE (op1a) == SSA_NAME
1898 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1899 && (integer_zerop (op1b) || integer_onep (op1b)))
1900 {
1901 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1902 || (code1 == NE_EXPR && integer_onep (op1b)));
1903 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1904 switch (gimple_code (stmt))
1905 {
1906 case GIMPLE_ASSIGN:
1907 /* Try to simplify by copy-propagating the definition. */
1908 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1909
1910 case GIMPLE_PHI:
1911 /* If every argument to the PHI produces the same result when
1912 ANDed with the second comparison, we win.
1913 Do not do this unless the type is bool since we need a bool
1914 result here anyway. */
1915 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1916 {
1917 tree result = NULL_TREE;
1918 unsigned i;
1919 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1920 {
1921 tree arg = gimple_phi_arg_def (stmt, i);
1922
1923 /* If this PHI has itself as an argument, ignore it.
1924 If all the other args produce the same result,
1925 we're still OK. */
1926 if (arg == gimple_phi_result (stmt))
1927 continue;
1928 else if (TREE_CODE (arg) == INTEGER_CST)
1929 {
1930 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1931 {
1932 if (!result)
1933 result = boolean_false_node;
1934 else if (!integer_zerop (result))
1935 return NULL_TREE;
1936 }
1937 else if (!result)
1938 result = fold_build2 (code2, boolean_type_node,
1939 op2a, op2b);
1940 else if (!same_bool_comparison_p (result,
1941 code2, op2a, op2b))
1942 return NULL_TREE;
1943 }
1944 else if (TREE_CODE (arg) == SSA_NAME
1945 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1946 {
1947 tree temp;
1948 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1949 /* In simple cases we can look through PHI nodes,
1950 but we have to be careful with loops.
1951 See PR49073. */
1952 if (! dom_info_available_p (CDI_DOMINATORS)
1953 || gimple_bb (def_stmt) == gimple_bb (stmt)
1954 || dominated_by_p (CDI_DOMINATORS,
1955 gimple_bb (def_stmt),
1956 gimple_bb (stmt)))
1957 return NULL_TREE;
1958 temp = and_var_with_comparison (arg, invert, code2,
1959 op2a, op2b);
1960 if (!temp)
1961 return NULL_TREE;
1962 else if (!result)
1963 result = temp;
1964 else if (!same_bool_result_p (result, temp))
1965 return NULL_TREE;
1966 }
1967 else
1968 return NULL_TREE;
1969 }
1970 return result;
1971 }
1972
1973 default:
1974 break;
1975 }
1976 }
1977 return NULL_TREE;
1978 }
1979
1980 /* Try to simplify the AND of two comparisons, specified by
1981 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1982 If this can be simplified to a single expression (without requiring
1983 introducing more SSA variables to hold intermediate values),
1984 return the resulting tree. Otherwise return NULL_TREE.
1985 If the result expression is non-null, it has boolean type. */
1986
1987 tree
1988 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1989 enum tree_code code2, tree op2a, tree op2b)
1990 {
1991 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1992 if (t)
1993 return t;
1994 else
1995 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1996 }
1997
1998 /* Helper function for or_comparisons_1: try to simplify the OR of the
1999 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2000 If INVERT is true, invert the value of VAR before doing the OR.
2001 Return NULL_EXPR if we can't simplify this to a single expression. */
2002
2003 static tree
2004 or_var_with_comparison (tree var, bool invert,
2005 enum tree_code code2, tree op2a, tree op2b)
2006 {
2007 tree t;
2008 gimple stmt = SSA_NAME_DEF_STMT (var);
2009
2010 /* We can only deal with variables whose definitions are assignments. */
2011 if (!is_gimple_assign (stmt))
2012 return NULL_TREE;
2013
2014 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2015 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2016 Then we only have to consider the simpler non-inverted cases. */
2017 if (invert)
2018 t = and_var_with_comparison_1 (stmt,
2019 invert_tree_comparison (code2, false),
2020 op2a, op2b);
2021 else
2022 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2023 return canonicalize_bool (t, invert);
2024 }
2025
2026 /* Try to simplify the OR of the ssa variable defined by the assignment
2027 STMT with the comparison specified by (OP2A CODE2 OP2B).
2028 Return NULL_EXPR if we can't simplify this to a single expression. */
2029
2030 static tree
2031 or_var_with_comparison_1 (gimple stmt,
2032 enum tree_code code2, tree op2a, tree op2b)
2033 {
2034 tree var = gimple_assign_lhs (stmt);
2035 tree true_test_var = NULL_TREE;
2036 tree false_test_var = NULL_TREE;
2037 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2038
2039 /* Check for identities like (var OR (var != 0)) => true . */
2040 if (TREE_CODE (op2a) == SSA_NAME
2041 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2042 {
2043 if ((code2 == NE_EXPR && integer_zerop (op2b))
2044 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2045 {
2046 true_test_var = op2a;
2047 if (var == true_test_var)
2048 return var;
2049 }
2050 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2051 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2052 {
2053 false_test_var = op2a;
2054 if (var == false_test_var)
2055 return boolean_true_node;
2056 }
2057 }
2058
2059 /* If the definition is a comparison, recurse on it. */
2060 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2061 {
2062 tree t = or_comparisons_1 (innercode,
2063 gimple_assign_rhs1 (stmt),
2064 gimple_assign_rhs2 (stmt),
2065 code2,
2066 op2a,
2067 op2b);
2068 if (t)
2069 return t;
2070 }
2071
2072 /* If the definition is an AND or OR expression, we may be able to
2073 simplify by reassociating. */
2074 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2075 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2076 {
2077 tree inner1 = gimple_assign_rhs1 (stmt);
2078 tree inner2 = gimple_assign_rhs2 (stmt);
2079 gimple s;
2080 tree t;
2081 tree partial = NULL_TREE;
2082 bool is_or = (innercode == BIT_IOR_EXPR);
2083
2084 /* Check for boolean identities that don't require recursive examination
2085 of inner1/inner2:
2086 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2087 inner1 OR (inner1 AND inner2) => inner1
2088 !inner1 OR (inner1 OR inner2) => true
2089 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2090 */
2091 if (inner1 == true_test_var)
2092 return (is_or ? var : inner1);
2093 else if (inner2 == true_test_var)
2094 return (is_or ? var : inner2);
2095 else if (inner1 == false_test_var)
2096 return (is_or
2097 ? boolean_true_node
2098 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2099 else if (inner2 == false_test_var)
2100 return (is_or
2101 ? boolean_true_node
2102 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2103
2104 /* Next, redistribute/reassociate the OR across the inner tests.
2105 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2106 if (TREE_CODE (inner1) == SSA_NAME
2107 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2108 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2109 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2110 gimple_assign_rhs1 (s),
2111 gimple_assign_rhs2 (s),
2112 code2, op2a, op2b)))
2113 {
2114 /* Handle the OR case, where we are reassociating:
2115 (inner1 OR inner2) OR (op2a code2 op2b)
2116 => (t OR inner2)
2117 If the partial result t is a constant, we win. Otherwise
2118 continue on to try reassociating with the other inner test. */
2119 if (is_or)
2120 {
2121 if (integer_onep (t))
2122 return boolean_true_node;
2123 else if (integer_zerop (t))
2124 return inner2;
2125 }
2126
2127 /* Handle the AND case, where we are redistributing:
2128 (inner1 AND inner2) OR (op2a code2 op2b)
2129 => (t AND (inner2 OR (op2a code op2b))) */
2130 else if (integer_zerop (t))
2131 return boolean_false_node;
2132
2133 /* Save partial result for later. */
2134 partial = t;
2135 }
2136
2137 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2138 if (TREE_CODE (inner2) == SSA_NAME
2139 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2140 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2141 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2142 gimple_assign_rhs1 (s),
2143 gimple_assign_rhs2 (s),
2144 code2, op2a, op2b)))
2145 {
2146 /* Handle the OR case, where we are reassociating:
2147 (inner1 OR inner2) OR (op2a code2 op2b)
2148 => (inner1 OR t)
2149 => (t OR partial) */
2150 if (is_or)
2151 {
2152 if (integer_zerop (t))
2153 return inner1;
2154 else if (integer_onep (t))
2155 return boolean_true_node;
2156 /* If both are the same, we can apply the identity
2157 (x OR x) == x. */
2158 else if (partial && same_bool_result_p (t, partial))
2159 return t;
2160 }
2161
2162 /* Handle the AND case, where we are redistributing:
2163 (inner1 AND inner2) OR (op2a code2 op2b)
2164 => (t AND (inner1 OR (op2a code2 op2b)))
2165 => (t AND partial) */
2166 else
2167 {
2168 if (integer_zerop (t))
2169 return boolean_false_node;
2170 else if (partial)
2171 {
2172 /* We already got a simplification for the other
2173 operand to the redistributed AND expression. The
2174 interesting case is when at least one is true.
2175 Or, if both are the same, we can apply the identity
2176 (x AND x) == x. */
2177 if (integer_onep (partial))
2178 return t;
2179 else if (integer_onep (t))
2180 return partial;
2181 else if (same_bool_result_p (t, partial))
2182 return t;
2183 }
2184 }
2185 }
2186 }
2187 return NULL_TREE;
2188 }
2189
2190 /* Try to simplify the OR of two comparisons defined by
2191 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2192 If this can be done without constructing an intermediate value,
2193 return the resulting tree; otherwise NULL_TREE is returned.
2194 This function is deliberately asymmetric as it recurses on SSA_DEFs
2195 in the first comparison but not the second. */
2196
2197 static tree
2198 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2199 enum tree_code code2, tree op2a, tree op2b)
2200 {
2201 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2202
2203 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2204 if (operand_equal_p (op1a, op2a, 0)
2205 && operand_equal_p (op1b, op2b, 0))
2206 {
2207 /* Result will be either NULL_TREE, or a combined comparison. */
2208 tree t = combine_comparisons (UNKNOWN_LOCATION,
2209 TRUTH_ORIF_EXPR, code1, code2,
2210 truth_type, op1a, op1b);
2211 if (t)
2212 return t;
2213 }
2214
2215 /* Likewise the swapped case of the above. */
2216 if (operand_equal_p (op1a, op2b, 0)
2217 && operand_equal_p (op1b, op2a, 0))
2218 {
2219 /* Result will be either NULL_TREE, or a combined comparison. */
2220 tree t = combine_comparisons (UNKNOWN_LOCATION,
2221 TRUTH_ORIF_EXPR, code1,
2222 swap_tree_comparison (code2),
2223 truth_type, op1a, op1b);
2224 if (t)
2225 return t;
2226 }
2227
2228 /* If both comparisons are of the same value against constants, we might
2229 be able to merge them. */
2230 if (operand_equal_p (op1a, op2a, 0)
2231 && TREE_CODE (op1b) == INTEGER_CST
2232 && TREE_CODE (op2b) == INTEGER_CST)
2233 {
2234 int cmp = tree_int_cst_compare (op1b, op2b);
2235
2236 /* If we have (op1a != op1b), we should either be able to
2237 return that or TRUE, depending on whether the constant op1b
2238 also satisfies the other comparison against op2b. */
2239 if (code1 == NE_EXPR)
2240 {
2241 bool done = true;
2242 bool val;
2243 switch (code2)
2244 {
2245 case EQ_EXPR: val = (cmp == 0); break;
2246 case NE_EXPR: val = (cmp != 0); break;
2247 case LT_EXPR: val = (cmp < 0); break;
2248 case GT_EXPR: val = (cmp > 0); break;
2249 case LE_EXPR: val = (cmp <= 0); break;
2250 case GE_EXPR: val = (cmp >= 0); break;
2251 default: done = false;
2252 }
2253 if (done)
2254 {
2255 if (val)
2256 return boolean_true_node;
2257 else
2258 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2259 }
2260 }
2261 /* Likewise if the second comparison is a != comparison. */
2262 else if (code2 == NE_EXPR)
2263 {
2264 bool done = true;
2265 bool val;
2266 switch (code1)
2267 {
2268 case EQ_EXPR: val = (cmp == 0); break;
2269 case NE_EXPR: val = (cmp != 0); break;
2270 case LT_EXPR: val = (cmp > 0); break;
2271 case GT_EXPR: val = (cmp < 0); break;
2272 case LE_EXPR: val = (cmp >= 0); break;
2273 case GE_EXPR: val = (cmp <= 0); break;
2274 default: done = false;
2275 }
2276 if (done)
2277 {
2278 if (val)
2279 return boolean_true_node;
2280 else
2281 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2282 }
2283 }
2284
2285 /* See if an equality test is redundant with the other comparison. */
2286 else if (code1 == EQ_EXPR)
2287 {
2288 bool val;
2289 switch (code2)
2290 {
2291 case EQ_EXPR: val = (cmp == 0); break;
2292 case NE_EXPR: val = (cmp != 0); break;
2293 case LT_EXPR: val = (cmp < 0); break;
2294 case GT_EXPR: val = (cmp > 0); break;
2295 case LE_EXPR: val = (cmp <= 0); break;
2296 case GE_EXPR: val = (cmp >= 0); break;
2297 default:
2298 val = false;
2299 }
2300 if (val)
2301 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2302 }
2303 else if (code2 == EQ_EXPR)
2304 {
2305 bool val;
2306 switch (code1)
2307 {
2308 case EQ_EXPR: val = (cmp == 0); break;
2309 case NE_EXPR: val = (cmp != 0); break;
2310 case LT_EXPR: val = (cmp > 0); break;
2311 case GT_EXPR: val = (cmp < 0); break;
2312 case LE_EXPR: val = (cmp >= 0); break;
2313 case GE_EXPR: val = (cmp <= 0); break;
2314 default:
2315 val = false;
2316 }
2317 if (val)
2318 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2319 }
2320
2321 /* Chose the less restrictive of two < or <= comparisons. */
2322 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2323 && (code2 == LT_EXPR || code2 == LE_EXPR))
2324 {
2325 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2326 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2327 else
2328 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2329 }
2330
2331 /* Likewise chose the less restrictive of two > or >= comparisons. */
2332 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2333 && (code2 == GT_EXPR || code2 == GE_EXPR))
2334 {
2335 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2336 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2337 else
2338 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2339 }
2340
2341 /* Check for singleton ranges. */
2342 else if (cmp == 0
2343 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2344 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2345 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2346
2347 /* Check for less/greater pairs that don't restrict the range at all. */
2348 else if (cmp >= 0
2349 && (code1 == LT_EXPR || code1 == LE_EXPR)
2350 && (code2 == GT_EXPR || code2 == GE_EXPR))
2351 return boolean_true_node;
2352 else if (cmp <= 0
2353 && (code1 == GT_EXPR || code1 == GE_EXPR)
2354 && (code2 == LT_EXPR || code2 == LE_EXPR))
2355 return boolean_true_node;
2356 }
2357
2358 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2359 NAME's definition is a truth value. See if there are any simplifications
2360 that can be done against the NAME's definition. */
2361 if (TREE_CODE (op1a) == SSA_NAME
2362 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2363 && (integer_zerop (op1b) || integer_onep (op1b)))
2364 {
2365 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2366 || (code1 == NE_EXPR && integer_onep (op1b)));
2367 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2368 switch (gimple_code (stmt))
2369 {
2370 case GIMPLE_ASSIGN:
2371 /* Try to simplify by copy-propagating the definition. */
2372 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2373
2374 case GIMPLE_PHI:
2375 /* If every argument to the PHI produces the same result when
2376 ORed with the second comparison, we win.
2377 Do not do this unless the type is bool since we need a bool
2378 result here anyway. */
2379 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2380 {
2381 tree result = NULL_TREE;
2382 unsigned i;
2383 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2384 {
2385 tree arg = gimple_phi_arg_def (stmt, i);
2386
2387 /* If this PHI has itself as an argument, ignore it.
2388 If all the other args produce the same result,
2389 we're still OK. */
2390 if (arg == gimple_phi_result (stmt))
2391 continue;
2392 else if (TREE_CODE (arg) == INTEGER_CST)
2393 {
2394 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2395 {
2396 if (!result)
2397 result = boolean_true_node;
2398 else if (!integer_onep (result))
2399 return NULL_TREE;
2400 }
2401 else if (!result)
2402 result = fold_build2 (code2, boolean_type_node,
2403 op2a, op2b);
2404 else if (!same_bool_comparison_p (result,
2405 code2, op2a, op2b))
2406 return NULL_TREE;
2407 }
2408 else if (TREE_CODE (arg) == SSA_NAME
2409 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2410 {
2411 tree temp;
2412 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2413 /* In simple cases we can look through PHI nodes,
2414 but we have to be careful with loops.
2415 See PR49073. */
2416 if (! dom_info_available_p (CDI_DOMINATORS)
2417 || gimple_bb (def_stmt) == gimple_bb (stmt)
2418 || dominated_by_p (CDI_DOMINATORS,
2419 gimple_bb (def_stmt),
2420 gimple_bb (stmt)))
2421 return NULL_TREE;
2422 temp = or_var_with_comparison (arg, invert, code2,
2423 op2a, op2b);
2424 if (!temp)
2425 return NULL_TREE;
2426 else if (!result)
2427 result = temp;
2428 else if (!same_bool_result_p (result, temp))
2429 return NULL_TREE;
2430 }
2431 else
2432 return NULL_TREE;
2433 }
2434 return result;
2435 }
2436
2437 default:
2438 break;
2439 }
2440 }
2441 return NULL_TREE;
2442 }
2443
2444 /* Try to simplify the OR of two comparisons, specified by
2445 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2446 If this can be simplified to a single expression (without requiring
2447 introducing more SSA variables to hold intermediate values),
2448 return the resulting tree. Otherwise return NULL_TREE.
2449 If the result expression is non-null, it has boolean type. */
2450
2451 tree
2452 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2453 enum tree_code code2, tree op2a, tree op2b)
2454 {
2455 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2456 if (t)
2457 return t;
2458 else
2459 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2460 }
2461
2462
2463 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2464
2465 Either NULL_TREE, a simplified but non-constant or a constant
2466 is returned.
2467
2468 ??? This should go into a gimple-fold-inline.h file to be eventually
2469 privatized with the single valueize function used in the various TUs
2470 to avoid the indirect function call overhead. */
2471
2472 tree
2473 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2474 {
2475 location_t loc = gimple_location (stmt);
2476 switch (gimple_code (stmt))
2477 {
2478 case GIMPLE_ASSIGN:
2479 {
2480 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2481
2482 switch (get_gimple_rhs_class (subcode))
2483 {
2484 case GIMPLE_SINGLE_RHS:
2485 {
2486 tree rhs = gimple_assign_rhs1 (stmt);
2487 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2488
2489 if (TREE_CODE (rhs) == SSA_NAME)
2490 {
2491 /* If the RHS is an SSA_NAME, return its known constant value,
2492 if any. */
2493 return (*valueize) (rhs);
2494 }
2495 /* Handle propagating invariant addresses into address
2496 operations. */
2497 else if (TREE_CODE (rhs) == ADDR_EXPR
2498 && !is_gimple_min_invariant (rhs))
2499 {
2500 HOST_WIDE_INT offset = 0;
2501 tree base;
2502 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2503 &offset,
2504 valueize);
2505 if (base
2506 && (CONSTANT_CLASS_P (base)
2507 || decl_address_invariant_p (base)))
2508 return build_invariant_address (TREE_TYPE (rhs),
2509 base, offset);
2510 }
2511 else if (TREE_CODE (rhs) == CONSTRUCTOR
2512 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2513 && (CONSTRUCTOR_NELTS (rhs)
2514 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2515 {
2516 unsigned i;
2517 tree val, *vec;
2518
2519 vec = XALLOCAVEC (tree,
2520 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2521 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2522 {
2523 val = (*valueize) (val);
2524 if (TREE_CODE (val) == INTEGER_CST
2525 || TREE_CODE (val) == REAL_CST
2526 || TREE_CODE (val) == FIXED_CST)
2527 vec[i] = val;
2528 else
2529 return NULL_TREE;
2530 }
2531
2532 return build_vector (TREE_TYPE (rhs), vec);
2533 }
2534 if (subcode == OBJ_TYPE_REF)
2535 {
2536 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2537 /* If callee is constant, we can fold away the wrapper. */
2538 if (is_gimple_min_invariant (val))
2539 return val;
2540 }
2541
2542 if (kind == tcc_reference)
2543 {
2544 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2545 || TREE_CODE (rhs) == REALPART_EXPR
2546 || TREE_CODE (rhs) == IMAGPART_EXPR)
2547 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2548 {
2549 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2550 return fold_unary_loc (EXPR_LOCATION (rhs),
2551 TREE_CODE (rhs),
2552 TREE_TYPE (rhs), val);
2553 }
2554 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2555 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2556 {
2557 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2558 return fold_ternary_loc (EXPR_LOCATION (rhs),
2559 TREE_CODE (rhs),
2560 TREE_TYPE (rhs), val,
2561 TREE_OPERAND (rhs, 1),
2562 TREE_OPERAND (rhs, 2));
2563 }
2564 else if (TREE_CODE (rhs) == MEM_REF
2565 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2566 {
2567 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2568 if (TREE_CODE (val) == ADDR_EXPR
2569 && is_gimple_min_invariant (val))
2570 {
2571 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2572 unshare_expr (val),
2573 TREE_OPERAND (rhs, 1));
2574 if (tem)
2575 rhs = tem;
2576 }
2577 }
2578 return fold_const_aggregate_ref_1 (rhs, valueize);
2579 }
2580 else if (kind == tcc_declaration)
2581 return get_symbol_constant_value (rhs);
2582 return rhs;
2583 }
2584
2585 case GIMPLE_UNARY_RHS:
2586 {
2587 /* Handle unary operators that can appear in GIMPLE form.
2588 Note that we know the single operand must be a constant,
2589 so this should almost always return a simplified RHS. */
2590 tree lhs = gimple_assign_lhs (stmt);
2591 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2592
2593 /* Conversions are useless for CCP purposes if they are
2594 value-preserving. Thus the restrictions that
2595 useless_type_conversion_p places for restrict qualification
2596 of pointer types should not apply here.
2597 Substitution later will only substitute to allowed places. */
2598 if (CONVERT_EXPR_CODE_P (subcode)
2599 && POINTER_TYPE_P (TREE_TYPE (lhs))
2600 && POINTER_TYPE_P (TREE_TYPE (op0))
2601 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2602 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2603 && TYPE_MODE (TREE_TYPE (lhs))
2604 == TYPE_MODE (TREE_TYPE (op0)))
2605 return op0;
2606
2607 return
2608 fold_unary_ignore_overflow_loc (loc, subcode,
2609 gimple_expr_type (stmt), op0);
2610 }
2611
2612 case GIMPLE_BINARY_RHS:
2613 {
2614 /* Handle binary operators that can appear in GIMPLE form. */
2615 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2616 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2617
2618 /* Translate &x + CST into an invariant form suitable for
2619 further propagation. */
2620 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2621 && TREE_CODE (op0) == ADDR_EXPR
2622 && TREE_CODE (op1) == INTEGER_CST)
2623 {
2624 tree off = fold_convert (ptr_type_node, op1);
2625 return build_fold_addr_expr_loc
2626 (loc,
2627 fold_build2 (MEM_REF,
2628 TREE_TYPE (TREE_TYPE (op0)),
2629 unshare_expr (op0), off));
2630 }
2631
2632 return fold_binary_loc (loc, subcode,
2633 gimple_expr_type (stmt), op0, op1);
2634 }
2635
2636 case GIMPLE_TERNARY_RHS:
2637 {
2638 /* Handle ternary operators that can appear in GIMPLE form. */
2639 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2640 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2641 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2642
2643 /* Fold embedded expressions in ternary codes. */
2644 if ((subcode == COND_EXPR
2645 || subcode == VEC_COND_EXPR)
2646 && COMPARISON_CLASS_P (op0))
2647 {
2648 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2649 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2650 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2651 TREE_TYPE (op0), op00, op01);
2652 if (tem)
2653 op0 = tem;
2654 }
2655
2656 return fold_ternary_loc (loc, subcode,
2657 gimple_expr_type (stmt), op0, op1, op2);
2658 }
2659
2660 default:
2661 gcc_unreachable ();
2662 }
2663 }
2664
2665 case GIMPLE_CALL:
2666 {
2667 tree fn;
2668
2669 if (gimple_call_internal_p (stmt))
2670 {
2671 enum tree_code subcode = ERROR_MARK;
2672 switch (gimple_call_internal_fn (stmt))
2673 {
2674 case IFN_UBSAN_CHECK_ADD:
2675 subcode = PLUS_EXPR;
2676 break;
2677 case IFN_UBSAN_CHECK_SUB:
2678 subcode = MINUS_EXPR;
2679 break;
2680 case IFN_UBSAN_CHECK_MUL:
2681 subcode = MULT_EXPR;
2682 break;
2683 default:
2684 return NULL_TREE;
2685 }
2686 tree op0 = (*valueize) (gimple_call_arg (stmt, 0));
2687 tree op1 = (*valueize) (gimple_call_arg (stmt, 1));
2688
2689 if (TREE_CODE (op0) != INTEGER_CST
2690 || TREE_CODE (op1) != INTEGER_CST)
2691 return NULL_TREE;
2692 tree res = fold_binary_loc (loc, subcode,
2693 TREE_TYPE (gimple_call_arg (stmt, 0)),
2694 op0, op1);
2695 if (res
2696 && TREE_CODE (res) == INTEGER_CST
2697 && !TREE_OVERFLOW (res))
2698 return res;
2699 return NULL_TREE;
2700 }
2701
2702 fn = (*valueize) (gimple_call_fn (stmt));
2703 if (TREE_CODE (fn) == ADDR_EXPR
2704 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2705 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2706 && gimple_builtin_call_types_compatible_p (stmt,
2707 TREE_OPERAND (fn, 0)))
2708 {
2709 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2710 tree call, retval;
2711 unsigned i;
2712 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2713 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2714 call = build_call_array_loc (loc,
2715 gimple_call_return_type (stmt),
2716 fn, gimple_call_num_args (stmt), args);
2717 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2718 if (retval)
2719 {
2720 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2721 STRIP_NOPS (retval);
2722 retval = fold_convert (gimple_call_return_type (stmt), retval);
2723 }
2724 return retval;
2725 }
2726 return NULL_TREE;
2727 }
2728
2729 default:
2730 return NULL_TREE;
2731 }
2732 }
2733
2734 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2735 Returns NULL_TREE if folding to a constant is not possible, otherwise
2736 returns a constant according to is_gimple_min_invariant. */
2737
2738 tree
2739 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2740 {
2741 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2742 if (res && is_gimple_min_invariant (res))
2743 return res;
2744 return NULL_TREE;
2745 }
2746
2747
2748 /* The following set of functions are supposed to fold references using
2749 their constant initializers. */
2750
2751 static tree fold_ctor_reference (tree type, tree ctor,
2752 unsigned HOST_WIDE_INT offset,
2753 unsigned HOST_WIDE_INT size, tree);
2754
2755 /* See if we can find constructor defining value of BASE.
2756 When we know the consructor with constant offset (such as
2757 base is array[40] and we do know constructor of array), then
2758 BIT_OFFSET is adjusted accordingly.
2759
2760 As a special case, return error_mark_node when constructor
2761 is not explicitly available, but it is known to be zero
2762 such as 'static const int a;'. */
2763 static tree
2764 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2765 tree (*valueize)(tree))
2766 {
2767 HOST_WIDE_INT bit_offset2, size, max_size;
2768 if (TREE_CODE (base) == MEM_REF)
2769 {
2770 if (!integer_zerop (TREE_OPERAND (base, 1)))
2771 {
2772 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2773 return NULL_TREE;
2774 *bit_offset += (mem_ref_offset (base).low
2775 * BITS_PER_UNIT);
2776 }
2777
2778 if (valueize
2779 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2780 base = valueize (TREE_OPERAND (base, 0));
2781 if (!base || TREE_CODE (base) != ADDR_EXPR)
2782 return NULL_TREE;
2783 base = TREE_OPERAND (base, 0);
2784 }
2785
2786 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2787 DECL_INITIAL. If BASE is a nested reference into another
2788 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2789 the inner reference. */
2790 switch (TREE_CODE (base))
2791 {
2792 case VAR_DECL:
2793 case CONST_DECL:
2794 {
2795 tree init = ctor_for_folding (base);
2796
2797 /* Our semantic is exact opposite of ctor_for_folding;
2798 NULL means unknown, while error_mark_node is 0. */
2799 if (init == error_mark_node)
2800 return NULL_TREE;
2801 if (!init)
2802 return error_mark_node;
2803 return init;
2804 }
2805
2806 case ARRAY_REF:
2807 case COMPONENT_REF:
2808 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2809 if (max_size == -1 || size != max_size)
2810 return NULL_TREE;
2811 *bit_offset += bit_offset2;
2812 return get_base_constructor (base, bit_offset, valueize);
2813
2814 case STRING_CST:
2815 case CONSTRUCTOR:
2816 return base;
2817
2818 default:
2819 return NULL_TREE;
2820 }
2821 }
2822
2823 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2824 to the memory at bit OFFSET.
2825
2826 We do only simple job of folding byte accesses. */
2827
2828 static tree
2829 fold_string_cst_ctor_reference (tree type, tree ctor,
2830 unsigned HOST_WIDE_INT offset,
2831 unsigned HOST_WIDE_INT size)
2832 {
2833 if (INTEGRAL_TYPE_P (type)
2834 && (TYPE_MODE (type)
2835 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2836 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2837 == MODE_INT)
2838 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2839 && size == BITS_PER_UNIT
2840 && !(offset % BITS_PER_UNIT))
2841 {
2842 offset /= BITS_PER_UNIT;
2843 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2844 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2845 [offset]));
2846 /* Folding
2847 const char a[20]="hello";
2848 return a[10];
2849
2850 might lead to offset greater than string length. In this case we
2851 know value is either initialized to 0 or out of bounds. Return 0
2852 in both cases. */
2853 return build_zero_cst (type);
2854 }
2855 return NULL_TREE;
2856 }
2857
2858 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2859 SIZE to the memory at bit OFFSET. */
2860
2861 static tree
2862 fold_array_ctor_reference (tree type, tree ctor,
2863 unsigned HOST_WIDE_INT offset,
2864 unsigned HOST_WIDE_INT size,
2865 tree from_decl)
2866 {
2867 unsigned HOST_WIDE_INT cnt;
2868 tree cfield, cval;
2869 double_int low_bound, elt_size;
2870 double_int index, max_index;
2871 double_int access_index;
2872 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2873 HOST_WIDE_INT inner_offset;
2874
2875 /* Compute low bound and elt size. */
2876 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2877 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2878 if (domain_type && TYPE_MIN_VALUE (domain_type))
2879 {
2880 /* Static constructors for variably sized objects makes no sense. */
2881 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2882 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2883 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2884 }
2885 else
2886 low_bound = double_int_zero;
2887 /* Static constructors for variably sized objects makes no sense. */
2888 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2889 == INTEGER_CST);
2890 elt_size =
2891 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2892
2893
2894 /* We can handle only constantly sized accesses that are known to not
2895 be larger than size of array element. */
2896 if (!TYPE_SIZE_UNIT (type)
2897 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2898 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))
2899 || elt_size.is_zero ())
2900 return NULL_TREE;
2901
2902 /* Compute the array index we look for. */
2903 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2904 .udiv (elt_size, TRUNC_DIV_EXPR);
2905 access_index += low_bound;
2906 if (index_type)
2907 access_index = access_index.ext (TYPE_PRECISION (index_type),
2908 TYPE_UNSIGNED (index_type));
2909
2910 /* And offset within the access. */
2911 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2912
2913 /* See if the array field is large enough to span whole access. We do not
2914 care to fold accesses spanning multiple array indexes. */
2915 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2916 return NULL_TREE;
2917
2918 index = low_bound - double_int_one;
2919 if (index_type)
2920 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2921
2922 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2923 {
2924 /* Array constructor might explicitely set index, or specify range
2925 or leave index NULL meaning that it is next index after previous
2926 one. */
2927 if (cfield)
2928 {
2929 if (TREE_CODE (cfield) == INTEGER_CST)
2930 max_index = index = tree_to_double_int (cfield);
2931 else
2932 {
2933 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2934 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2935 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2936 }
2937 }
2938 else
2939 {
2940 index += double_int_one;
2941 if (index_type)
2942 index = index.ext (TYPE_PRECISION (index_type),
2943 TYPE_UNSIGNED (index_type));
2944 max_index = index;
2945 }
2946
2947 /* Do we have match? */
2948 if (access_index.cmp (index, 1) >= 0
2949 && access_index.cmp (max_index, 1) <= 0)
2950 return fold_ctor_reference (type, cval, inner_offset, size,
2951 from_decl);
2952 }
2953 /* When memory is not explicitely mentioned in constructor,
2954 it is 0 (or out of range). */
2955 return build_zero_cst (type);
2956 }
2957
2958 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2959 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2960
2961 static tree
2962 fold_nonarray_ctor_reference (tree type, tree ctor,
2963 unsigned HOST_WIDE_INT offset,
2964 unsigned HOST_WIDE_INT size,
2965 tree from_decl)
2966 {
2967 unsigned HOST_WIDE_INT cnt;
2968 tree cfield, cval;
2969
2970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2971 cval)
2972 {
2973 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2974 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2975 tree field_size = DECL_SIZE (cfield);
2976 double_int bitoffset;
2977 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2978 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2979 double_int bitoffset_end, access_end;
2980
2981 /* Variable sized objects in static constructors makes no sense,
2982 but field_size can be NULL for flexible array members. */
2983 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2984 && TREE_CODE (byte_offset) == INTEGER_CST
2985 && (field_size != NULL_TREE
2986 ? TREE_CODE (field_size) == INTEGER_CST
2987 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2988
2989 /* Compute bit offset of the field. */
2990 bitoffset = tree_to_double_int (field_offset)
2991 + byte_offset_cst * bits_per_unit_cst;
2992 /* Compute bit offset where the field ends. */
2993 if (field_size != NULL_TREE)
2994 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2995 else
2996 bitoffset_end = double_int_zero;
2997
2998 access_end = double_int::from_uhwi (offset)
2999 + double_int::from_uhwi (size);
3000
3001 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3002 [BITOFFSET, BITOFFSET_END)? */
3003 if (access_end.cmp (bitoffset, 0) > 0
3004 && (field_size == NULL_TREE
3005 || double_int::from_uhwi (offset).slt (bitoffset_end)))
3006 {
3007 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
3008 /* We do have overlap. Now see if field is large enough to
3009 cover the access. Give up for accesses spanning multiple
3010 fields. */
3011 if (access_end.cmp (bitoffset_end, 0) > 0)
3012 return NULL_TREE;
3013 if (double_int::from_uhwi (offset).slt (bitoffset))
3014 return NULL_TREE;
3015 return fold_ctor_reference (type, cval,
3016 inner_offset.to_uhwi (), size,
3017 from_decl);
3018 }
3019 }
3020 /* When memory is not explicitely mentioned in constructor, it is 0. */
3021 return build_zero_cst (type);
3022 }
3023
3024 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3025 to the memory at bit OFFSET. */
3026
3027 static tree
3028 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3029 unsigned HOST_WIDE_INT size, tree from_decl)
3030 {
3031 tree ret;
3032
3033 /* We found the field with exact match. */
3034 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3035 && !offset)
3036 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3037
3038 /* We are at the end of walk, see if we can view convert the
3039 result. */
3040 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3041 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3042 && operand_equal_p (TYPE_SIZE (type),
3043 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3044 {
3045 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3046 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3047 if (ret)
3048 STRIP_NOPS (ret);
3049 return ret;
3050 }
3051 if (TREE_CODE (ctor) == STRING_CST)
3052 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3053 if (TREE_CODE (ctor) == CONSTRUCTOR)
3054 {
3055
3056 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3057 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3058 return fold_array_ctor_reference (type, ctor, offset, size,
3059 from_decl);
3060 else
3061 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3062 from_decl);
3063 }
3064
3065 return NULL_TREE;
3066 }
3067
3068 /* Return the tree representing the element referenced by T if T is an
3069 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3070 names using VALUEIZE. Return NULL_TREE otherwise. */
3071
3072 tree
3073 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3074 {
3075 tree ctor, idx, base;
3076 HOST_WIDE_INT offset, size, max_size;
3077 tree tem;
3078
3079 if (TREE_THIS_VOLATILE (t))
3080 return NULL_TREE;
3081
3082 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3083 return get_symbol_constant_value (t);
3084
3085 tem = fold_read_from_constant_string (t);
3086 if (tem)
3087 return tem;
3088
3089 switch (TREE_CODE (t))
3090 {
3091 case ARRAY_REF:
3092 case ARRAY_RANGE_REF:
3093 /* Constant indexes are handled well by get_base_constructor.
3094 Only special case variable offsets.
3095 FIXME: This code can't handle nested references with variable indexes
3096 (they will be handled only by iteration of ccp). Perhaps we can bring
3097 get_ref_base_and_extent here and make it use a valueize callback. */
3098 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3099 && valueize
3100 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3101 && TREE_CODE (idx) == INTEGER_CST)
3102 {
3103 tree low_bound, unit_size;
3104 double_int doffset;
3105
3106 /* If the resulting bit-offset is constant, track it. */
3107 if ((low_bound = array_ref_low_bound (t),
3108 TREE_CODE (low_bound) == INTEGER_CST)
3109 && (unit_size = array_ref_element_size (t),
3110 tree_fits_uhwi_p (unit_size))
3111 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3112 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3113 doffset.fits_shwi ()))
3114 {
3115 offset = doffset.to_shwi ();
3116 offset *= tree_to_uhwi (unit_size);
3117 offset *= BITS_PER_UNIT;
3118
3119 base = TREE_OPERAND (t, 0);
3120 ctor = get_base_constructor (base, &offset, valueize);
3121 /* Empty constructor. Always fold to 0. */
3122 if (ctor == error_mark_node)
3123 return build_zero_cst (TREE_TYPE (t));
3124 /* Out of bound array access. Value is undefined,
3125 but don't fold. */
3126 if (offset < 0)
3127 return NULL_TREE;
3128 /* We can not determine ctor. */
3129 if (!ctor)
3130 return NULL_TREE;
3131 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3132 tree_to_uhwi (unit_size)
3133 * BITS_PER_UNIT,
3134 base);
3135 }
3136 }
3137 /* Fallthru. */
3138
3139 case COMPONENT_REF:
3140 case BIT_FIELD_REF:
3141 case TARGET_MEM_REF:
3142 case MEM_REF:
3143 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3144 ctor = get_base_constructor (base, &offset, valueize);
3145
3146 /* Empty constructor. Always fold to 0. */
3147 if (ctor == error_mark_node)
3148 return build_zero_cst (TREE_TYPE (t));
3149 /* We do not know precise address. */
3150 if (max_size == -1 || max_size != size)
3151 return NULL_TREE;
3152 /* We can not determine ctor. */
3153 if (!ctor)
3154 return NULL_TREE;
3155
3156 /* Out of bound array access. Value is undefined, but don't fold. */
3157 if (offset < 0)
3158 return NULL_TREE;
3159
3160 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3161 base);
3162
3163 case REALPART_EXPR:
3164 case IMAGPART_EXPR:
3165 {
3166 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3167 if (c && TREE_CODE (c) == COMPLEX_CST)
3168 return fold_build1_loc (EXPR_LOCATION (t),
3169 TREE_CODE (t), TREE_TYPE (t), c);
3170 break;
3171 }
3172
3173 default:
3174 break;
3175 }
3176
3177 return NULL_TREE;
3178 }
3179
3180 tree
3181 fold_const_aggregate_ref (tree t)
3182 {
3183 return fold_const_aggregate_ref_1 (t, NULL);
3184 }
3185
3186 /* Lookup virtual method with index TOKEN in a virtual table V
3187 at OFFSET.
3188 Set CAN_REFER if non-NULL to false if method
3189 is not referable or if the virtual table is ill-formed (such as rewriten
3190 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3191
3192 tree
3193 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3194 tree v,
3195 unsigned HOST_WIDE_INT offset,
3196 bool *can_refer)
3197 {
3198 tree vtable = v, init, fn;
3199 unsigned HOST_WIDE_INT size;
3200 unsigned HOST_WIDE_INT elt_size, access_index;
3201 tree domain_type;
3202
3203 if (can_refer)
3204 *can_refer = true;
3205
3206 /* First of all double check we have virtual table. */
3207 if (TREE_CODE (v) != VAR_DECL
3208 || !DECL_VIRTUAL_P (v))
3209 {
3210 gcc_assert (in_lto_p);
3211 /* Pass down that we lost track of the target. */
3212 if (can_refer)
3213 *can_refer = false;
3214 return NULL_TREE;
3215 }
3216
3217 init = ctor_for_folding (v);
3218
3219 /* The virtual tables should always be born with constructors
3220 and we always should assume that they are avaialble for
3221 folding. At the moment we do not stream them in all cases,
3222 but it should never happen that ctor seem unreachable. */
3223 gcc_assert (init);
3224 if (init == error_mark_node)
3225 {
3226 gcc_assert (in_lto_p);
3227 /* Pass down that we lost track of the target. */
3228 if (can_refer)
3229 *can_refer = false;
3230 return NULL_TREE;
3231 }
3232 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3233 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3234 offset *= BITS_PER_UNIT;
3235 offset += token * size;
3236
3237 /* Lookup the value in the constructor that is assumed to be array.
3238 This is equivalent to
3239 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3240 offset, size, NULL);
3241 but in a constant time. We expect that frontend produced a simple
3242 array without indexed initializers. */
3243
3244 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3245 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3246 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3247 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3248
3249 access_index = offset / BITS_PER_UNIT / elt_size;
3250 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3251
3252 /* This code makes an assumption that there are no
3253 indexed fileds produced by C++ FE, so we can directly index the array. */
3254 if (access_index < CONSTRUCTOR_NELTS (init))
3255 {
3256 fn = CONSTRUCTOR_ELT (init, access_index)->value;
3257 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3258 STRIP_NOPS (fn);
3259 }
3260 else
3261 fn = NULL;
3262
3263 /* For type inconsistent program we may end up looking up virtual method
3264 in virtual table that does not contain TOKEN entries. We may overrun
3265 the virtual table and pick up a constant or RTTI info pointer.
3266 In any case the call is undefined. */
3267 if (!fn
3268 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3269 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3270 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3271 else
3272 {
3273 fn = TREE_OPERAND (fn, 0);
3274
3275 /* When cgraph node is missing and function is not public, we cannot
3276 devirtualize. This can happen in WHOPR when the actual method
3277 ends up in other partition, because we found devirtualization
3278 possibility too late. */
3279 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3280 {
3281 if (can_refer)
3282 {
3283 *can_refer = false;
3284 return fn;
3285 }
3286 return NULL_TREE;
3287 }
3288 }
3289
3290 /* Make sure we create a cgraph node for functions we'll reference.
3291 They can be non-existent if the reference comes from an entry
3292 of an external vtable for example. */
3293 cgraph_get_create_node (fn);
3294
3295 return fn;
3296 }
3297
3298 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3299 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3300 KNOWN_BINFO carries the binfo describing the true type of
3301 OBJ_TYPE_REF_OBJECT(REF).
3302 Set CAN_REFER if non-NULL to false if method
3303 is not referable or if the virtual table is ill-formed (such as rewriten
3304 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3305
3306 tree
3307 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3308 bool *can_refer)
3309 {
3310 unsigned HOST_WIDE_INT offset;
3311 tree v;
3312
3313 v = BINFO_VTABLE (known_binfo);
3314 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3315 if (!v)
3316 return NULL_TREE;
3317
3318 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3319 {
3320 if (can_refer)
3321 *can_refer = false;
3322 return NULL_TREE;
3323 }
3324 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3325 }
3326
3327 /* Return true iff VAL is a gimple expression that is known to be
3328 non-negative. Restricted to floating-point inputs. */
3329
3330 bool
3331 gimple_val_nonnegative_real_p (tree val)
3332 {
3333 gimple def_stmt;
3334
3335 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3336
3337 /* Use existing logic for non-gimple trees. */
3338 if (tree_expr_nonnegative_p (val))
3339 return true;
3340
3341 if (TREE_CODE (val) != SSA_NAME)
3342 return false;
3343
3344 /* Currently we look only at the immediately defining statement
3345 to make this determination, since recursion on defining
3346 statements of operands can lead to quadratic behavior in the
3347 worst case. This is expected to catch almost all occurrences
3348 in practice. It would be possible to implement limited-depth
3349 recursion if important cases are lost. Alternatively, passes
3350 that need this information (such as the pow/powi lowering code
3351 in the cse_sincos pass) could be revised to provide it through
3352 dataflow propagation. */
3353
3354 def_stmt = SSA_NAME_DEF_STMT (val);
3355
3356 if (is_gimple_assign (def_stmt))
3357 {
3358 tree op0, op1;
3359
3360 /* See fold-const.c:tree_expr_nonnegative_p for additional
3361 cases that could be handled with recursion. */
3362
3363 switch (gimple_assign_rhs_code (def_stmt))
3364 {
3365 case ABS_EXPR:
3366 /* Always true for floating-point operands. */
3367 return true;
3368
3369 case MULT_EXPR:
3370 /* True if the two operands are identical (since we are
3371 restricted to floating-point inputs). */
3372 op0 = gimple_assign_rhs1 (def_stmt);
3373 op1 = gimple_assign_rhs2 (def_stmt);
3374
3375 if (op0 == op1
3376 || operand_equal_p (op0, op1, 0))
3377 return true;
3378
3379 default:
3380 return false;
3381 }
3382 }
3383 else if (is_gimple_call (def_stmt))
3384 {
3385 tree fndecl = gimple_call_fndecl (def_stmt);
3386 if (fndecl
3387 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3388 {
3389 tree arg1;
3390
3391 switch (DECL_FUNCTION_CODE (fndecl))
3392 {
3393 CASE_FLT_FN (BUILT_IN_ACOS):
3394 CASE_FLT_FN (BUILT_IN_ACOSH):
3395 CASE_FLT_FN (BUILT_IN_CABS):
3396 CASE_FLT_FN (BUILT_IN_COSH):
3397 CASE_FLT_FN (BUILT_IN_ERFC):
3398 CASE_FLT_FN (BUILT_IN_EXP):
3399 CASE_FLT_FN (BUILT_IN_EXP10):
3400 CASE_FLT_FN (BUILT_IN_EXP2):
3401 CASE_FLT_FN (BUILT_IN_FABS):
3402 CASE_FLT_FN (BUILT_IN_FDIM):
3403 CASE_FLT_FN (BUILT_IN_HYPOT):
3404 CASE_FLT_FN (BUILT_IN_POW10):
3405 return true;
3406
3407 CASE_FLT_FN (BUILT_IN_SQRT):
3408 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3409 nonnegative inputs. */
3410 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3411 return true;
3412
3413 break;
3414
3415 CASE_FLT_FN (BUILT_IN_POWI):
3416 /* True if the second argument is an even integer. */
3417 arg1 = gimple_call_arg (def_stmt, 1);
3418
3419 if (TREE_CODE (arg1) == INTEGER_CST
3420 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3421 return true;
3422
3423 break;
3424
3425 CASE_FLT_FN (BUILT_IN_POW):
3426 /* True if the second argument is an even integer-valued
3427 real. */
3428 arg1 = gimple_call_arg (def_stmt, 1);
3429
3430 if (TREE_CODE (arg1) == REAL_CST)
3431 {
3432 REAL_VALUE_TYPE c;
3433 HOST_WIDE_INT n;
3434
3435 c = TREE_REAL_CST (arg1);
3436 n = real_to_integer (&c);
3437
3438 if ((n & 1) == 0)
3439 {
3440 REAL_VALUE_TYPE cint;
3441 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3442 if (real_identical (&c, &cint))
3443 return true;
3444 }
3445 }
3446
3447 break;
3448
3449 default:
3450 return false;
3451 }
3452 }
3453 }
3454
3455 return false;
3456 }
3457
3458 /* Given a pointer value OP0, return a simplified version of an
3459 indirection through OP0, or NULL_TREE if no simplification is
3460 possible. Note that the resulting type may be different from
3461 the type pointed to in the sense that it is still compatible
3462 from the langhooks point of view. */
3463
3464 tree
3465 gimple_fold_indirect_ref (tree t)
3466 {
3467 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3468 tree sub = t;
3469 tree subtype;
3470
3471 STRIP_NOPS (sub);
3472 subtype = TREE_TYPE (sub);
3473 if (!POINTER_TYPE_P (subtype))
3474 return NULL_TREE;
3475
3476 if (TREE_CODE (sub) == ADDR_EXPR)
3477 {
3478 tree op = TREE_OPERAND (sub, 0);
3479 tree optype = TREE_TYPE (op);
3480 /* *&p => p */
3481 if (useless_type_conversion_p (type, optype))
3482 return op;
3483
3484 /* *(foo *)&fooarray => fooarray[0] */
3485 if (TREE_CODE (optype) == ARRAY_TYPE
3486 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3487 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3488 {
3489 tree type_domain = TYPE_DOMAIN (optype);
3490 tree min_val = size_zero_node;
3491 if (type_domain && TYPE_MIN_VALUE (type_domain))
3492 min_val = TYPE_MIN_VALUE (type_domain);
3493 if (TREE_CODE (min_val) == INTEGER_CST)
3494 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3495 }
3496 /* *(foo *)&complexfoo => __real__ complexfoo */
3497 else if (TREE_CODE (optype) == COMPLEX_TYPE
3498 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3499 return fold_build1 (REALPART_EXPR, type, op);
3500 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3501 else if (TREE_CODE (optype) == VECTOR_TYPE
3502 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3503 {
3504 tree part_width = TYPE_SIZE (type);
3505 tree index = bitsize_int (0);
3506 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3507 }
3508 }
3509
3510 /* *(p + CST) -> ... */
3511 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3512 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3513 {
3514 tree addr = TREE_OPERAND (sub, 0);
3515 tree off = TREE_OPERAND (sub, 1);
3516 tree addrtype;
3517
3518 STRIP_NOPS (addr);
3519 addrtype = TREE_TYPE (addr);
3520
3521 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3522 if (TREE_CODE (addr) == ADDR_EXPR
3523 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3524 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3525 && tree_fits_uhwi_p (off))
3526 {
3527 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3528 tree part_width = TYPE_SIZE (type);
3529 unsigned HOST_WIDE_INT part_widthi
3530 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3531 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3532 tree index = bitsize_int (indexi);
3533 if (offset / part_widthi
3534 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3535 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3536 part_width, index);
3537 }
3538
3539 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3540 if (TREE_CODE (addr) == ADDR_EXPR
3541 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3542 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3543 {
3544 tree size = TYPE_SIZE_UNIT (type);
3545 if (tree_int_cst_equal (size, off))
3546 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3547 }
3548
3549 /* *(p + CST) -> MEM_REF <p, CST>. */
3550 if (TREE_CODE (addr) != ADDR_EXPR
3551 || DECL_P (TREE_OPERAND (addr, 0)))
3552 return fold_build2 (MEM_REF, type,
3553 addr,
3554 build_int_cst_wide (ptype,
3555 TREE_INT_CST_LOW (off),
3556 TREE_INT_CST_HIGH (off)));
3557 }
3558
3559 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3560 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3562 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3563 {
3564 tree type_domain;
3565 tree min_val = size_zero_node;
3566 tree osub = sub;
3567 sub = gimple_fold_indirect_ref (sub);
3568 if (! sub)
3569 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3570 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3571 if (type_domain && TYPE_MIN_VALUE (type_domain))
3572 min_val = TYPE_MIN_VALUE (type_domain);
3573 if (TREE_CODE (min_val) == INTEGER_CST)
3574 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3575 }
3576
3577 return NULL_TREE;
3578 }
3579
3580 /* Return true if CODE is an operation that when operating on signed
3581 integer types involves undefined behavior on overflow and the
3582 operation can be expressed with unsigned arithmetic. */
3583
3584 bool
3585 arith_code_with_undefined_signed_overflow (tree_code code)
3586 {
3587 switch (code)
3588 {
3589 case PLUS_EXPR:
3590 case MINUS_EXPR:
3591 case MULT_EXPR:
3592 case NEGATE_EXPR:
3593 case POINTER_PLUS_EXPR:
3594 return true;
3595 default:
3596 return false;
3597 }
3598 }
3599
3600 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3601 operation that can be transformed to unsigned arithmetic by converting
3602 its operand, carrying out the operation in the corresponding unsigned
3603 type and converting the result back to the original type.
3604
3605 Returns a sequence of statements that replace STMT and also contain
3606 a modified form of STMT itself. */
3607
3608 gimple_seq
3609 rewrite_to_defined_overflow (gimple stmt)
3610 {
3611 if (dump_file && (dump_flags & TDF_DETAILS))
3612 {
3613 fprintf (dump_file, "rewriting stmt with undefined signed "
3614 "overflow ");
3615 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3616 }
3617
3618 tree lhs = gimple_assign_lhs (stmt);
3619 tree type = unsigned_type_for (TREE_TYPE (lhs));
3620 gimple_seq stmts = NULL;
3621 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3622 {
3623 gimple_seq stmts2 = NULL;
3624 gimple_set_op (stmt, i,
3625 force_gimple_operand (fold_convert (type,
3626 gimple_op (stmt, i)),
3627 &stmts2, true, NULL_TREE));
3628 gimple_seq_add_seq (&stmts, stmts2);
3629 }
3630 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3631 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3632 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3633 gimple_seq_add_stmt (&stmts, stmt);
3634 gimple cvt = gimple_build_assign_with_ops
3635 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3636 gimple_seq_add_stmt (&stmts, cvt);
3637
3638 return stmts;
3639 }