re PR rtl-optimization/58997 (ICE on valid code at -O3 on x86_64-linux-gnu (affecting...
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "bitmap.h"
30 #include "gimple.h"
31 #include "gimple-ssa.h"
32 #include "tree-ssanames.h"
33 #include "tree-into-ssa.h"
34 #include "tree-dfa.h"
35 #include "tree-ssa.h"
36 #include "tree-ssa-propagate.h"
37 #include "target.h"
38 #include "ipa-utils.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-ssa-address.h"
41
42 /* Return true when DECL can be referenced from current unit.
43 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
44 We can get declarations that are not possible to reference for various
45 reasons:
46
47 1) When analyzing C++ virtual tables.
48 C++ virtual tables do have known constructors even
49 when they are keyed to other compilation unit.
50 Those tables can contain pointers to methods and vars
51 in other units. Those methods have both STATIC and EXTERNAL
52 set.
53 2) In WHOPR mode devirtualization might lead to reference
54 to method that was partitioned elsehwere.
55 In this case we have static VAR_DECL or FUNCTION_DECL
56 that has no corresponding callgraph/varpool node
57 declaring the body.
58 3) COMDAT functions referred by external vtables that
59 we devirtualize only during final copmilation stage.
60 At this time we already decided that we will not output
61 the function body and thus we can't reference the symbol
62 directly. */
63
64 static bool
65 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
66 {
67 struct varpool_node *vnode;
68 struct cgraph_node *node;
69 symtab_node *snode;
70
71 if (DECL_ABSTRACT (decl))
72 return false;
73
74 /* We are concerned only about static/external vars and functions. */
75 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
76 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
77 return true;
78
79 /* Static objects can be referred only if they was not optimized out yet. */
80 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
81 {
82 snode = symtab_get_node (decl);
83 if (!snode)
84 return false;
85 node = dyn_cast <cgraph_node> (snode);
86 return !node || !node->global.inlined_to;
87 }
88
89 /* We will later output the initializer, so we can refer to it.
90 So we are concerned only when DECL comes from initializer of
91 external var. */
92 if (!from_decl
93 || TREE_CODE (from_decl) != VAR_DECL
94 || !DECL_EXTERNAL (from_decl)
95 || (flag_ltrans
96 && symtab_get_node (from_decl)->in_other_partition))
97 return true;
98 /* We are folding reference from external vtable. The vtable may reffer
99 to a symbol keyed to other compilation unit. The other compilation
100 unit may be in separate DSO and the symbol may be hidden. */
101 if (DECL_VISIBILITY_SPECIFIED (decl)
102 && DECL_EXTERNAL (decl)
103 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
104 return false;
105 /* When function is public, we always can introduce new reference.
106 Exception are the COMDAT functions where introducing a direct
107 reference imply need to include function body in the curren tunit. */
108 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
109 return true;
110 /* We are not at ltrans stage; so don't worry about WHOPR.
111 Also when still gimplifying all referred comdat functions will be
112 produced.
113
114 As observed in PR20991 for already optimized out comdat virtual functions
115 it may be tempting to not necessarily give up because the copy will be
116 output elsewhere when corresponding vtable is output.
117 This is however not possible - ABI specify that COMDATs are output in
118 units where they are used and when the other unit was compiled with LTO
119 it is possible that vtable was kept public while the function itself
120 was privatized. */
121 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
122 return true;
123
124 /* OK we are seeing either COMDAT or static variable. In this case we must
125 check that the definition is still around so we can refer it. */
126 if (TREE_CODE (decl) == FUNCTION_DECL)
127 {
128 node = cgraph_get_node (decl);
129 /* Check that we still have function body and that we didn't took
130 the decision to eliminate offline copy of the function yet.
131 The second is important when devirtualization happens during final
132 compilation stage when making a new reference no longer makes callee
133 to be compiled. */
134 if (!node || !node->definition || node->global.inlined_to)
135 {
136 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
137 return false;
138 }
139 }
140 else if (TREE_CODE (decl) == VAR_DECL)
141 {
142 vnode = varpool_get_node (decl);
143 if (!vnode || !vnode->definition)
144 {
145 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
146 return false;
147 }
148 }
149 return true;
150 }
151
152 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
153 acceptable form for is_gimple_min_invariant.
154 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
155
156 tree
157 canonicalize_constructor_val (tree cval, tree from_decl)
158 {
159 tree orig_cval = cval;
160 STRIP_NOPS (cval);
161 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
162 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
163 {
164 tree ptr = TREE_OPERAND (cval, 0);
165 if (is_gimple_min_invariant (ptr))
166 cval = build1_loc (EXPR_LOCATION (cval),
167 ADDR_EXPR, TREE_TYPE (ptr),
168 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
169 ptr,
170 fold_convert (ptr_type_node,
171 TREE_OPERAND (cval, 1))));
172 }
173 if (TREE_CODE (cval) == ADDR_EXPR)
174 {
175 tree base = NULL_TREE;
176 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
177 {
178 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
179 if (base)
180 TREE_OPERAND (cval, 0) = base;
181 }
182 else
183 base = get_base_address (TREE_OPERAND (cval, 0));
184 if (!base)
185 return NULL_TREE;
186
187 if ((TREE_CODE (base) == VAR_DECL
188 || TREE_CODE (base) == FUNCTION_DECL)
189 && !can_refer_decl_in_current_unit_p (base, from_decl))
190 return NULL_TREE;
191 if (TREE_CODE (base) == VAR_DECL)
192 TREE_ADDRESSABLE (base) = 1;
193 else if (TREE_CODE (base) == FUNCTION_DECL)
194 {
195 /* Make sure we create a cgraph node for functions we'll reference.
196 They can be non-existent if the reference comes from an entry
197 of an external vtable for example. */
198 cgraph_get_create_real_symbol_node (base);
199 }
200 /* Fixup types in global initializers. */
201 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
202 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
203
204 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
205 cval = fold_convert (TREE_TYPE (orig_cval), cval);
206 return cval;
207 }
208 return orig_cval;
209 }
210
211 /* If SYM is a constant variable with known value, return the value.
212 NULL_TREE is returned otherwise. */
213
214 tree
215 get_symbol_constant_value (tree sym)
216 {
217 tree val = ctor_for_folding (sym);
218 if (val != error_mark_node)
219 {
220 if (val)
221 {
222 val = canonicalize_constructor_val (unshare_expr (val), sym);
223 if (val && is_gimple_min_invariant (val))
224 return val;
225 else
226 return NULL_TREE;
227 }
228 /* Variables declared 'const' without an initializer
229 have zero as the initializer if they may not be
230 overridden at link or run time. */
231 if (!val
232 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
233 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
234 return build_zero_cst (TREE_TYPE (sym));
235 }
236
237 return NULL_TREE;
238 }
239
240
241
242 /* Subroutine of fold_stmt. We perform several simplifications of the
243 memory reference tree EXPR and make sure to re-gimplify them properly
244 after propagation of constant addresses. IS_LHS is true if the
245 reference is supposed to be an lvalue. */
246
247 static tree
248 maybe_fold_reference (tree expr, bool is_lhs)
249 {
250 tree *t = &expr;
251 tree result;
252
253 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
254 || TREE_CODE (expr) == REALPART_EXPR
255 || TREE_CODE (expr) == IMAGPART_EXPR)
256 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
257 return fold_unary_loc (EXPR_LOCATION (expr),
258 TREE_CODE (expr),
259 TREE_TYPE (expr),
260 TREE_OPERAND (expr, 0));
261 else if (TREE_CODE (expr) == BIT_FIELD_REF
262 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
263 return fold_ternary_loc (EXPR_LOCATION (expr),
264 TREE_CODE (expr),
265 TREE_TYPE (expr),
266 TREE_OPERAND (expr, 0),
267 TREE_OPERAND (expr, 1),
268 TREE_OPERAND (expr, 2));
269
270 while (handled_component_p (*t))
271 t = &TREE_OPERAND (*t, 0);
272
273 /* Canonicalize MEM_REFs invariant address operand. Do this first
274 to avoid feeding non-canonical MEM_REFs elsewhere. */
275 if (TREE_CODE (*t) == MEM_REF
276 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
277 {
278 bool volatile_p = TREE_THIS_VOLATILE (*t);
279 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
280 TREE_OPERAND (*t, 0),
281 TREE_OPERAND (*t, 1));
282 if (tem)
283 {
284 TREE_THIS_VOLATILE (tem) = volatile_p;
285 *t = tem;
286 tem = maybe_fold_reference (expr, is_lhs);
287 if (tem)
288 return tem;
289 return expr;
290 }
291 }
292
293 if (!is_lhs
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
296 return result;
297
298 /* Fold back MEM_REFs to reference trees. */
299 if (TREE_CODE (*t) == MEM_REF
300 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
301 && integer_zerop (TREE_OPERAND (*t, 1))
302 && (TREE_THIS_VOLATILE (*t)
303 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
304 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
305 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
306 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
307 /* We have to look out here to not drop a required conversion
308 from the rhs to the lhs if is_lhs, but we don't have the
309 rhs here to verify that. Thus require strict type
310 compatibility. */
311 && types_compatible_p (TREE_TYPE (*t),
312 TREE_TYPE (TREE_OPERAND
313 (TREE_OPERAND (*t, 0), 0))))
314 {
315 tree tem;
316 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
317 tem = maybe_fold_reference (expr, is_lhs);
318 if (tem)
319 return tem;
320 return expr;
321 }
322 else if (TREE_CODE (*t) == TARGET_MEM_REF)
323 {
324 tree tem = maybe_fold_tmr (*t);
325 if (tem)
326 {
327 *t = tem;
328 tem = maybe_fold_reference (expr, is_lhs);
329 if (tem)
330 return tem;
331 return expr;
332 }
333 }
334
335 return NULL_TREE;
336 }
337
338
339 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
340 replacement rhs for the statement or NULL_TREE if no simplification
341 could be made. It is assumed that the operands have been previously
342 folded. */
343
344 static tree
345 fold_gimple_assign (gimple_stmt_iterator *si)
346 {
347 gimple stmt = gsi_stmt (*si);
348 enum tree_code subcode = gimple_assign_rhs_code (stmt);
349 location_t loc = gimple_location (stmt);
350
351 tree result = NULL_TREE;
352
353 switch (get_gimple_rhs_class (subcode))
354 {
355 case GIMPLE_SINGLE_RHS:
356 {
357 tree rhs = gimple_assign_rhs1 (stmt);
358
359 if (REFERENCE_CLASS_P (rhs))
360 return maybe_fold_reference (rhs, false);
361
362 else if (TREE_CODE (rhs) == ADDR_EXPR)
363 {
364 tree ref = TREE_OPERAND (rhs, 0);
365 tree tem = maybe_fold_reference (ref, true);
366 if (tem
367 && TREE_CODE (tem) == MEM_REF
368 && integer_zerop (TREE_OPERAND (tem, 1)))
369 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
370 else if (tem)
371 result = fold_convert (TREE_TYPE (rhs),
372 build_fold_addr_expr_loc (loc, tem));
373 else if (TREE_CODE (ref) == MEM_REF
374 && integer_zerop (TREE_OPERAND (ref, 1)))
375 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
376 }
377
378 else if (TREE_CODE (rhs) == CONSTRUCTOR
379 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
380 && (CONSTRUCTOR_NELTS (rhs)
381 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
382 {
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
384 unsigned i;
385 tree val;
386
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 if (TREE_CODE (val) != INTEGER_CST
389 && TREE_CODE (val) != REAL_CST
390 && TREE_CODE (val) != FIXED_CST)
391 return NULL_TREE;
392
393 return build_vector_from_ctor (TREE_TYPE (rhs),
394 CONSTRUCTOR_ELTS (rhs));
395 }
396
397 else if (DECL_P (rhs))
398 return get_symbol_constant_value (rhs);
399
400 /* If we couldn't fold the RHS, hand over to the generic
401 fold routines. */
402 if (result == NULL_TREE)
403 result = fold (rhs);
404
405 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
406 that may have been added by fold, and "useless" type
407 conversions that might now be apparent due to propagation. */
408 STRIP_USELESS_TYPE_CONVERSION (result);
409
410 if (result != rhs && valid_gimple_rhs_p (result))
411 return result;
412
413 return NULL_TREE;
414 }
415 break;
416
417 case GIMPLE_UNARY_RHS:
418 {
419 tree rhs = gimple_assign_rhs1 (stmt);
420
421 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
422 if (result)
423 {
424 /* If the operation was a conversion do _not_ mark a
425 resulting constant with TREE_OVERFLOW if the original
426 constant was not. These conversions have implementation
427 defined behavior and retaining the TREE_OVERFLOW flag
428 here would confuse later passes such as VRP. */
429 if (CONVERT_EXPR_CODE_P (subcode)
430 && TREE_CODE (result) == INTEGER_CST
431 && TREE_CODE (rhs) == INTEGER_CST)
432 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
433
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
436 return result;
437 }
438 }
439 break;
440
441 case GIMPLE_BINARY_RHS:
442 /* Try to canonicalize for boolean-typed X the comparisons
443 X == 0, X == 1, X != 0, and X != 1. */
444 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
445 || gimple_assign_rhs_code (stmt) == NE_EXPR)
446 {
447 tree lhs = gimple_assign_lhs (stmt);
448 tree op1 = gimple_assign_rhs1 (stmt);
449 tree op2 = gimple_assign_rhs2 (stmt);
450 tree type = TREE_TYPE (op1);
451
452 /* Check whether the comparison operands are of the same boolean
453 type as the result type is.
454 Check that second operand is an integer-constant with value
455 one or zero. */
456 if (TREE_CODE (op2) == INTEGER_CST
457 && (integer_zerop (op2) || integer_onep (op2))
458 && useless_type_conversion_p (TREE_TYPE (lhs), type))
459 {
460 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
461 bool is_logical_not = false;
462
463 /* X == 0 and X != 1 is a logical-not.of X
464 X == 1 and X != 0 is X */
465 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
466 || (cmp_code == NE_EXPR && integer_onep (op2)))
467 is_logical_not = true;
468
469 if (is_logical_not == false)
470 result = op1;
471 /* Only for one-bit precision typed X the transformation
472 !X -> ~X is valied. */
473 else if (TYPE_PRECISION (type) == 1)
474 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
475 type, op1);
476 /* Otherwise we use !X -> X ^ 1. */
477 else
478 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
479 type, op1, build_int_cst (type, 1));
480
481 }
482 }
483
484 if (!result)
485 result = fold_binary_loc (loc, subcode,
486 TREE_TYPE (gimple_assign_lhs (stmt)),
487 gimple_assign_rhs1 (stmt),
488 gimple_assign_rhs2 (stmt));
489
490 if (result)
491 {
492 STRIP_USELESS_TYPE_CONVERSION (result);
493 if (valid_gimple_rhs_p (result))
494 return result;
495 }
496 break;
497
498 case GIMPLE_TERNARY_RHS:
499 /* Try to fold a conditional expression. */
500 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
501 {
502 tree op0 = gimple_assign_rhs1 (stmt);
503 tree tem;
504 bool set = false;
505 location_t cond_loc = gimple_location (stmt);
506
507 if (COMPARISON_CLASS_P (op0))
508 {
509 fold_defer_overflow_warnings ();
510 tem = fold_binary_loc (cond_loc,
511 TREE_CODE (op0), TREE_TYPE (op0),
512 TREE_OPERAND (op0, 0),
513 TREE_OPERAND (op0, 1));
514 /* This is actually a conditional expression, not a GIMPLE
515 conditional statement, however, the valid_gimple_rhs_p
516 test still applies. */
517 set = (tem && is_gimple_condexpr (tem)
518 && valid_gimple_rhs_p (tem));
519 fold_undefer_overflow_warnings (set, stmt, 0);
520 }
521 else if (is_gimple_min_invariant (op0))
522 {
523 tem = op0;
524 set = true;
525 }
526 else
527 return NULL_TREE;
528
529 if (set)
530 result = fold_build3_loc (cond_loc, COND_EXPR,
531 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
532 gimple_assign_rhs2 (stmt),
533 gimple_assign_rhs3 (stmt));
534 }
535
536 if (!result)
537 result = fold_ternary_loc (loc, subcode,
538 TREE_TYPE (gimple_assign_lhs (stmt)),
539 gimple_assign_rhs1 (stmt),
540 gimple_assign_rhs2 (stmt),
541 gimple_assign_rhs3 (stmt));
542
543 if (result)
544 {
545 STRIP_USELESS_TYPE_CONVERSION (result);
546 if (valid_gimple_rhs_p (result))
547 return result;
548 }
549 break;
550
551 case GIMPLE_INVALID_RHS:
552 gcc_unreachable ();
553 }
554
555 return NULL_TREE;
556 }
557
558 /* Attempt to fold a conditional statement. Return true if any changes were
559 made. We only attempt to fold the condition expression, and do not perform
560 any transformation that would require alteration of the cfg. It is
561 assumed that the operands have been previously folded. */
562
563 static bool
564 fold_gimple_cond (gimple stmt)
565 {
566 tree result = fold_binary_loc (gimple_location (stmt),
567 gimple_cond_code (stmt),
568 boolean_type_node,
569 gimple_cond_lhs (stmt),
570 gimple_cond_rhs (stmt));
571
572 if (result)
573 {
574 STRIP_USELESS_TYPE_CONVERSION (result);
575 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
576 {
577 gimple_cond_set_condition_from_tree (stmt, result);
578 return true;
579 }
580 }
581
582 return false;
583 }
584
585 /* Convert EXPR into a GIMPLE value suitable for substitution on the
586 RHS of an assignment. Insert the necessary statements before
587 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
588 is replaced. If the call is expected to produces a result, then it
589 is replaced by an assignment of the new RHS to the result variable.
590 If the result is to be ignored, then the call is replaced by a
591 GIMPLE_NOP. A proper VDEF chain is retained by making the first
592 VUSE and the last VDEF of the whole sequence be the same as the replaced
593 statement and using new SSA names for stores in between. */
594
595 void
596 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
597 {
598 tree lhs;
599 gimple stmt, new_stmt;
600 gimple_stmt_iterator i;
601 gimple_seq stmts = NULL;
602 struct gimplify_ctx gctx;
603 gimple laststore;
604 tree reaching_vuse;
605
606 stmt = gsi_stmt (*si_p);
607
608 gcc_assert (is_gimple_call (stmt));
609
610 push_gimplify_context (&gctx);
611 gctx.into_ssa = gimple_in_ssa_p (cfun);
612
613 lhs = gimple_call_lhs (stmt);
614 if (lhs == NULL_TREE)
615 {
616 gimplify_and_add (expr, &stmts);
617 /* We can end up with folding a memcpy of an empty class assignment
618 which gets optimized away by C++ gimplification. */
619 if (gimple_seq_empty_p (stmts))
620 {
621 pop_gimplify_context (NULL);
622 if (gimple_in_ssa_p (cfun))
623 {
624 unlink_stmt_vdef (stmt);
625 release_defs (stmt);
626 }
627 gsi_replace (si_p, gimple_build_nop (), true);
628 return;
629 }
630 }
631 else
632 {
633 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
634 new_stmt = gimple_build_assign (lhs, tmp);
635 i = gsi_last (stmts);
636 gsi_insert_after_without_update (&i, new_stmt,
637 GSI_CONTINUE_LINKING);
638 }
639
640 pop_gimplify_context (NULL);
641
642 if (gimple_has_location (stmt))
643 annotate_all_with_location (stmts, gimple_location (stmt));
644
645 /* First iterate over the replacement statements backward, assigning
646 virtual operands to their defining statements. */
647 laststore = NULL;
648 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
649 {
650 new_stmt = gsi_stmt (i);
651 if ((gimple_assign_single_p (new_stmt)
652 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
653 || (is_gimple_call (new_stmt)
654 && (gimple_call_flags (new_stmt)
655 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
656 {
657 tree vdef;
658 if (!laststore)
659 vdef = gimple_vdef (stmt);
660 else
661 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
662 gimple_set_vdef (new_stmt, vdef);
663 if (vdef && TREE_CODE (vdef) == SSA_NAME)
664 SSA_NAME_DEF_STMT (vdef) = new_stmt;
665 laststore = new_stmt;
666 }
667 }
668
669 /* Second iterate over the statements forward, assigning virtual
670 operands to their uses. */
671 reaching_vuse = gimple_vuse (stmt);
672 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
673 {
674 new_stmt = gsi_stmt (i);
675 /* If the new statement possibly has a VUSE, update it with exact SSA
676 name we know will reach this one. */
677 if (gimple_has_mem_ops (new_stmt))
678 gimple_set_vuse (new_stmt, reaching_vuse);
679 gimple_set_modified (new_stmt, true);
680 if (gimple_vdef (new_stmt))
681 reaching_vuse = gimple_vdef (new_stmt);
682 }
683
684 /* If the new sequence does not do a store release the virtual
685 definition of the original statement. */
686 if (reaching_vuse
687 && reaching_vuse == gimple_vuse (stmt))
688 {
689 tree vdef = gimple_vdef (stmt);
690 if (vdef
691 && TREE_CODE (vdef) == SSA_NAME)
692 {
693 unlink_stmt_vdef (stmt);
694 release_ssa_name (vdef);
695 }
696 }
697
698 /* Finally replace the original statement with the sequence. */
699 gsi_replace_with_seq (si_p, stmts, false);
700 }
701
702 /* Return the string length, maximum string length or maximum value of
703 ARG in LENGTH.
704 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
705 is not NULL and, for TYPE == 0, its value is not equal to the length
706 we determine or if we are unable to determine the length or value,
707 return false. VISITED is a bitmap of visited variables.
708 TYPE is 0 if string length should be returned, 1 for maximum string
709 length and 2 for maximum value ARG can have. */
710
711 static bool
712 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
713 {
714 tree var, val;
715 gimple def_stmt;
716
717 if (TREE_CODE (arg) != SSA_NAME)
718 {
719 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
720 if (TREE_CODE (arg) == ADDR_EXPR
721 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
722 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
723 {
724 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
725 if (TREE_CODE (aop0) == INDIRECT_REF
726 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
727 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
728 length, visited, type);
729 }
730
731 if (type == 2)
732 {
733 val = arg;
734 if (TREE_CODE (val) != INTEGER_CST
735 || tree_int_cst_sgn (val) < 0)
736 return false;
737 }
738 else
739 val = c_strlen (arg, 1);
740 if (!val)
741 return false;
742
743 if (*length)
744 {
745 if (type > 0)
746 {
747 if (TREE_CODE (*length) != INTEGER_CST
748 || TREE_CODE (val) != INTEGER_CST)
749 return false;
750
751 if (tree_int_cst_lt (*length, val))
752 *length = val;
753 return true;
754 }
755 else if (simple_cst_equal (val, *length) != 1)
756 return false;
757 }
758
759 *length = val;
760 return true;
761 }
762
763 /* If ARG is registered for SSA update we cannot look at its defining
764 statement. */
765 if (name_registered_for_update_p (arg))
766 return false;
767
768 /* If we were already here, break the infinite cycle. */
769 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
770 return true;
771
772 var = arg;
773 def_stmt = SSA_NAME_DEF_STMT (var);
774
775 switch (gimple_code (def_stmt))
776 {
777 case GIMPLE_ASSIGN:
778 /* The RHS of the statement defining VAR must either have a
779 constant length or come from another SSA_NAME with a constant
780 length. */
781 if (gimple_assign_single_p (def_stmt)
782 || gimple_assign_unary_nop_p (def_stmt))
783 {
784 tree rhs = gimple_assign_rhs1 (def_stmt);
785 return get_maxval_strlen (rhs, length, visited, type);
786 }
787 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
788 {
789 tree op2 = gimple_assign_rhs2 (def_stmt);
790 tree op3 = gimple_assign_rhs3 (def_stmt);
791 return get_maxval_strlen (op2, length, visited, type)
792 && get_maxval_strlen (op3, length, visited, type);
793 }
794 return false;
795
796 case GIMPLE_PHI:
797 {
798 /* All the arguments of the PHI node must have the same constant
799 length. */
800 unsigned i;
801
802 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
803 {
804 tree arg = gimple_phi_arg (def_stmt, i)->def;
805
806 /* If this PHI has itself as an argument, we cannot
807 determine the string length of this argument. However,
808 if we can find a constant string length for the other
809 PHI args then we can still be sure that this is a
810 constant string length. So be optimistic and just
811 continue with the next argument. */
812 if (arg == gimple_phi_result (def_stmt))
813 continue;
814
815 if (!get_maxval_strlen (arg, length, visited, type))
816 return false;
817 }
818 }
819 return true;
820
821 default:
822 return false;
823 }
824 }
825
826
827 /* Fold builtin call in statement STMT. Returns a simplified tree.
828 We may return a non-constant expression, including another call
829 to a different function and with different arguments, e.g.,
830 substituting memcpy for strcpy when the string length is known.
831 Note that some builtins expand into inline code that may not
832 be valid in GIMPLE. Callers must take care. */
833
834 tree
835 gimple_fold_builtin (gimple stmt)
836 {
837 tree result, val[3];
838 tree callee, a;
839 int arg_idx, type;
840 bitmap visited;
841 bool ignore;
842 int nargs;
843 location_t loc = gimple_location (stmt);
844
845 gcc_assert (is_gimple_call (stmt));
846
847 ignore = (gimple_call_lhs (stmt) == NULL);
848
849 /* First try the generic builtin folder. If that succeeds, return the
850 result directly. */
851 result = fold_call_stmt (stmt, ignore);
852 if (result)
853 {
854 if (ignore)
855 STRIP_NOPS (result);
856 return result;
857 }
858
859 /* Ignore MD builtins. */
860 callee = gimple_call_fndecl (stmt);
861 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
862 return NULL_TREE;
863
864 /* Give up for always_inline inline builtins until they are
865 inlined. */
866 if (avoid_folding_inline_builtin (callee))
867 return NULL_TREE;
868
869 /* If the builtin could not be folded, and it has no argument list,
870 we're done. */
871 nargs = gimple_call_num_args (stmt);
872 if (nargs == 0)
873 return NULL_TREE;
874
875 /* Limit the work only for builtins we know how to simplify. */
876 switch (DECL_FUNCTION_CODE (callee))
877 {
878 case BUILT_IN_STRLEN:
879 case BUILT_IN_FPUTS:
880 case BUILT_IN_FPUTS_UNLOCKED:
881 arg_idx = 0;
882 type = 0;
883 break;
884 case BUILT_IN_STRCPY:
885 case BUILT_IN_STRNCPY:
886 arg_idx = 1;
887 type = 0;
888 break;
889 case BUILT_IN_MEMCPY_CHK:
890 case BUILT_IN_MEMPCPY_CHK:
891 case BUILT_IN_MEMMOVE_CHK:
892 case BUILT_IN_MEMSET_CHK:
893 case BUILT_IN_STRNCPY_CHK:
894 case BUILT_IN_STPNCPY_CHK:
895 arg_idx = 2;
896 type = 2;
897 break;
898 case BUILT_IN_STRCPY_CHK:
899 case BUILT_IN_STPCPY_CHK:
900 arg_idx = 1;
901 type = 1;
902 break;
903 case BUILT_IN_SNPRINTF_CHK:
904 case BUILT_IN_VSNPRINTF_CHK:
905 arg_idx = 1;
906 type = 2;
907 break;
908 default:
909 return NULL_TREE;
910 }
911
912 if (arg_idx >= nargs)
913 return NULL_TREE;
914
915 /* Try to use the dataflow information gathered by the CCP process. */
916 visited = BITMAP_ALLOC (NULL);
917 bitmap_clear (visited);
918
919 memset (val, 0, sizeof (val));
920 a = gimple_call_arg (stmt, arg_idx);
921 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
922 val[arg_idx] = NULL_TREE;
923
924 BITMAP_FREE (visited);
925
926 result = NULL_TREE;
927 switch (DECL_FUNCTION_CODE (callee))
928 {
929 case BUILT_IN_STRLEN:
930 if (val[0] && nargs == 1)
931 {
932 tree new_val =
933 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
934
935 /* If the result is not a valid gimple value, or not a cast
936 of a valid gimple value, then we cannot use the result. */
937 if (is_gimple_val (new_val)
938 || (CONVERT_EXPR_P (new_val)
939 && is_gimple_val (TREE_OPERAND (new_val, 0))))
940 return new_val;
941 }
942 break;
943
944 case BUILT_IN_STRCPY:
945 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
946 result = fold_builtin_strcpy (loc, callee,
947 gimple_call_arg (stmt, 0),
948 gimple_call_arg (stmt, 1),
949 val[1]);
950 break;
951
952 case BUILT_IN_STRNCPY:
953 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
954 result = fold_builtin_strncpy (loc, callee,
955 gimple_call_arg (stmt, 0),
956 gimple_call_arg (stmt, 1),
957 gimple_call_arg (stmt, 2),
958 val[1]);
959 break;
960
961 case BUILT_IN_FPUTS:
962 if (nargs == 2)
963 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
964 gimple_call_arg (stmt, 1),
965 ignore, false, val[0]);
966 break;
967
968 case BUILT_IN_FPUTS_UNLOCKED:
969 if (nargs == 2)
970 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
971 gimple_call_arg (stmt, 1),
972 ignore, true, val[0]);
973 break;
974
975 case BUILT_IN_MEMCPY_CHK:
976 case BUILT_IN_MEMPCPY_CHK:
977 case BUILT_IN_MEMMOVE_CHK:
978 case BUILT_IN_MEMSET_CHK:
979 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
980 result = fold_builtin_memory_chk (loc, callee,
981 gimple_call_arg (stmt, 0),
982 gimple_call_arg (stmt, 1),
983 gimple_call_arg (stmt, 2),
984 gimple_call_arg (stmt, 3),
985 val[2], ignore,
986 DECL_FUNCTION_CODE (callee));
987 break;
988
989 case BUILT_IN_STRCPY_CHK:
990 case BUILT_IN_STPCPY_CHK:
991 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
992 result = fold_builtin_stxcpy_chk (loc, callee,
993 gimple_call_arg (stmt, 0),
994 gimple_call_arg (stmt, 1),
995 gimple_call_arg (stmt, 2),
996 val[1], ignore,
997 DECL_FUNCTION_CODE (callee));
998 break;
999
1000 case BUILT_IN_STRNCPY_CHK:
1001 case BUILT_IN_STPNCPY_CHK:
1002 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1003 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1004 gimple_call_arg (stmt, 1),
1005 gimple_call_arg (stmt, 2),
1006 gimple_call_arg (stmt, 3),
1007 val[2], ignore,
1008 DECL_FUNCTION_CODE (callee));
1009 break;
1010
1011 case BUILT_IN_SNPRINTF_CHK:
1012 case BUILT_IN_VSNPRINTF_CHK:
1013 if (val[1] && is_gimple_val (val[1]))
1014 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1015 DECL_FUNCTION_CODE (callee));
1016 break;
1017
1018 default:
1019 gcc_unreachable ();
1020 }
1021
1022 if (result && ignore)
1023 result = fold_ignored_result (result);
1024 return result;
1025 }
1026
1027
1028 /* Return a binfo to be used for devirtualization of calls based on an object
1029 represented by a declaration (i.e. a global or automatically allocated one)
1030 or NULL if it cannot be found or is not safe. CST is expected to be an
1031 ADDR_EXPR of such object or the function will return NULL. Currently it is
1032 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1033 EXPECTED_TYPE is type of the class virtual belongs to. */
1034
1035 tree
1036 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1037 {
1038 HOST_WIDE_INT offset, size, max_size;
1039 tree base, type, binfo;
1040 bool last_artificial = false;
1041
1042 if (!flag_devirtualize
1043 || TREE_CODE (cst) != ADDR_EXPR
1044 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1045 return NULL_TREE;
1046
1047 cst = TREE_OPERAND (cst, 0);
1048 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1049 type = TREE_TYPE (base);
1050 if (!DECL_P (base)
1051 || max_size == -1
1052 || max_size != size
1053 || TREE_CODE (type) != RECORD_TYPE)
1054 return NULL_TREE;
1055
1056 /* Find the sub-object the constant actually refers to and mark whether it is
1057 an artificial one (as opposed to a user-defined one). */
1058 while (true)
1059 {
1060 HOST_WIDE_INT pos, size;
1061 tree fld;
1062
1063 if (types_same_for_odr (type, expected_type))
1064 break;
1065 if (offset < 0)
1066 return NULL_TREE;
1067
1068 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1069 {
1070 if (TREE_CODE (fld) != FIELD_DECL)
1071 continue;
1072
1073 pos = int_bit_position (fld);
1074 size = tree_low_cst (DECL_SIZE (fld), 1);
1075 if (pos <= offset && (pos + size) > offset)
1076 break;
1077 }
1078 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1079 return NULL_TREE;
1080
1081 last_artificial = DECL_ARTIFICIAL (fld);
1082 type = TREE_TYPE (fld);
1083 offset -= pos;
1084 }
1085 /* Artificial sub-objects are ancestors, we do not want to use them for
1086 devirtualization, at least not here. */
1087 if (last_artificial)
1088 return NULL_TREE;
1089 binfo = TYPE_BINFO (type);
1090 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1091 return NULL_TREE;
1092 else
1093 return binfo;
1094 }
1095
1096 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1097 The statement may be replaced by another statement, e.g., if the call
1098 simplifies to a constant value. Return true if any changes were made.
1099 It is assumed that the operands have been previously folded. */
1100
1101 static bool
1102 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1103 {
1104 gimple stmt = gsi_stmt (*gsi);
1105 tree callee;
1106 bool changed = false;
1107 unsigned i;
1108
1109 /* Fold *& in call arguments. */
1110 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1111 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1112 {
1113 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1114 if (tmp)
1115 {
1116 gimple_call_set_arg (stmt, i, tmp);
1117 changed = true;
1118 }
1119 }
1120
1121 /* Check for virtual calls that became direct calls. */
1122 callee = gimple_call_fn (stmt);
1123 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1124 {
1125 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1126 {
1127 if (dump_file && virtual_method_call_p (callee)
1128 && !possible_polymorphic_call_target_p
1129 (callee, cgraph_get_node (gimple_call_addr_fndecl
1130 (OBJ_TYPE_REF_EXPR (callee)))))
1131 {
1132 fprintf (dump_file,
1133 "Type inheritnace inconsistent devirtualization of ");
1134 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1135 fprintf (dump_file, " to ");
1136 print_generic_expr (dump_file, callee, TDF_SLIM);
1137 fprintf (dump_file, "\n");
1138 }
1139
1140 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1141 changed = true;
1142 }
1143 else if (virtual_method_call_p (callee))
1144 {
1145 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1146 tree binfo = gimple_extract_devirt_binfo_from_cst
1147 (obj, obj_type_ref_class (callee));
1148 if (binfo)
1149 {
1150 HOST_WIDE_INT token
1151 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1152 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1153 if (fndecl)
1154 {
1155 #ifdef ENABLE_CHECKING
1156 gcc_assert (possible_polymorphic_call_target_p
1157 (callee, cgraph_get_node (fndecl)));
1158
1159 #endif
1160 gimple_call_set_fndecl (stmt, fndecl);
1161 changed = true;
1162 }
1163 }
1164 }
1165 }
1166
1167 if (inplace)
1168 return changed;
1169
1170 /* Check for builtins that CCP can handle using information not
1171 available in the generic fold routines. */
1172 callee = gimple_call_fndecl (stmt);
1173 if (callee && DECL_BUILT_IN (callee))
1174 {
1175 tree result = gimple_fold_builtin (stmt);
1176 if (result)
1177 {
1178 if (!update_call_from_tree (gsi, result))
1179 gimplify_and_update_call_from_tree (gsi, result);
1180 changed = true;
1181 }
1182 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1183 changed |= targetm.gimple_fold_builtin (gsi);
1184 }
1185
1186 return changed;
1187 }
1188
1189 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1190 distinguishes both cases. */
1191
1192 static bool
1193 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1194 {
1195 bool changed = false;
1196 gimple stmt = gsi_stmt (*gsi);
1197 unsigned i;
1198
1199 /* Fold the main computation performed by the statement. */
1200 switch (gimple_code (stmt))
1201 {
1202 case GIMPLE_ASSIGN:
1203 {
1204 unsigned old_num_ops = gimple_num_ops (stmt);
1205 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1206 tree lhs = gimple_assign_lhs (stmt);
1207 tree new_rhs;
1208 /* First canonicalize operand order. This avoids building new
1209 trees if this is the only thing fold would later do. */
1210 if ((commutative_tree_code (subcode)
1211 || commutative_ternary_tree_code (subcode))
1212 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1213 gimple_assign_rhs2 (stmt), false))
1214 {
1215 tree tem = gimple_assign_rhs1 (stmt);
1216 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1217 gimple_assign_set_rhs2 (stmt, tem);
1218 changed = true;
1219 }
1220 new_rhs = fold_gimple_assign (gsi);
1221 if (new_rhs
1222 && !useless_type_conversion_p (TREE_TYPE (lhs),
1223 TREE_TYPE (new_rhs)))
1224 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1225 if (new_rhs
1226 && (!inplace
1227 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1228 {
1229 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1230 changed = true;
1231 }
1232 break;
1233 }
1234
1235 case GIMPLE_COND:
1236 changed |= fold_gimple_cond (stmt);
1237 break;
1238
1239 case GIMPLE_CALL:
1240 changed |= gimple_fold_call (gsi, inplace);
1241 break;
1242
1243 case GIMPLE_ASM:
1244 /* Fold *& in asm operands. */
1245 {
1246 size_t noutputs;
1247 const char **oconstraints;
1248 const char *constraint;
1249 bool allows_mem, allows_reg;
1250
1251 noutputs = gimple_asm_noutputs (stmt);
1252 oconstraints = XALLOCAVEC (const char *, noutputs);
1253
1254 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1255 {
1256 tree link = gimple_asm_output_op (stmt, i);
1257 tree op = TREE_VALUE (link);
1258 oconstraints[i]
1259 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1260 if (REFERENCE_CLASS_P (op)
1261 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1262 {
1263 TREE_VALUE (link) = op;
1264 changed = true;
1265 }
1266 }
1267 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1268 {
1269 tree link = gimple_asm_input_op (stmt, i);
1270 tree op = TREE_VALUE (link);
1271 constraint
1272 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1273 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1274 oconstraints, &allows_mem, &allows_reg);
1275 if (REFERENCE_CLASS_P (op)
1276 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1277 != NULL_TREE)
1278 {
1279 TREE_VALUE (link) = op;
1280 changed = true;
1281 }
1282 }
1283 }
1284 break;
1285
1286 case GIMPLE_DEBUG:
1287 if (gimple_debug_bind_p (stmt))
1288 {
1289 tree val = gimple_debug_bind_get_value (stmt);
1290 if (val
1291 && REFERENCE_CLASS_P (val))
1292 {
1293 tree tem = maybe_fold_reference (val, false);
1294 if (tem)
1295 {
1296 gimple_debug_bind_set_value (stmt, tem);
1297 changed = true;
1298 }
1299 }
1300 else if (val
1301 && TREE_CODE (val) == ADDR_EXPR)
1302 {
1303 tree ref = TREE_OPERAND (val, 0);
1304 tree tem = maybe_fold_reference (ref, false);
1305 if (tem)
1306 {
1307 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1308 gimple_debug_bind_set_value (stmt, tem);
1309 changed = true;
1310 }
1311 }
1312 }
1313 break;
1314
1315 default:;
1316 }
1317
1318 stmt = gsi_stmt (*gsi);
1319
1320 /* Fold *& on the lhs. */
1321 if (gimple_has_lhs (stmt))
1322 {
1323 tree lhs = gimple_get_lhs (stmt);
1324 if (lhs && REFERENCE_CLASS_P (lhs))
1325 {
1326 tree new_lhs = maybe_fold_reference (lhs, true);
1327 if (new_lhs)
1328 {
1329 gimple_set_lhs (stmt, new_lhs);
1330 changed = true;
1331 }
1332 }
1333 }
1334
1335 return changed;
1336 }
1337
1338 /* Fold the statement pointed to by GSI. In some cases, this function may
1339 replace the whole statement with a new one. Returns true iff folding
1340 makes any changes.
1341 The statement pointed to by GSI should be in valid gimple form but may
1342 be in unfolded state as resulting from for example constant propagation
1343 which can produce *&x = 0. */
1344
1345 bool
1346 fold_stmt (gimple_stmt_iterator *gsi)
1347 {
1348 return fold_stmt_1 (gsi, false);
1349 }
1350
1351 /* Perform the minimal folding on statement *GSI. Only operations like
1352 *&x created by constant propagation are handled. The statement cannot
1353 be replaced with a new one. Return true if the statement was
1354 changed, false otherwise.
1355 The statement *GSI should be in valid gimple form but may
1356 be in unfolded state as resulting from for example constant propagation
1357 which can produce *&x = 0. */
1358
1359 bool
1360 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1361 {
1362 gimple stmt = gsi_stmt (*gsi);
1363 bool changed = fold_stmt_1 (gsi, true);
1364 gcc_assert (gsi_stmt (*gsi) == stmt);
1365 return changed;
1366 }
1367
1368 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1369 if EXPR is null or we don't know how.
1370 If non-null, the result always has boolean type. */
1371
1372 static tree
1373 canonicalize_bool (tree expr, bool invert)
1374 {
1375 if (!expr)
1376 return NULL_TREE;
1377 else if (invert)
1378 {
1379 if (integer_nonzerop (expr))
1380 return boolean_false_node;
1381 else if (integer_zerop (expr))
1382 return boolean_true_node;
1383 else if (TREE_CODE (expr) == SSA_NAME)
1384 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1385 build_int_cst (TREE_TYPE (expr), 0));
1386 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1387 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1388 boolean_type_node,
1389 TREE_OPERAND (expr, 0),
1390 TREE_OPERAND (expr, 1));
1391 else
1392 return NULL_TREE;
1393 }
1394 else
1395 {
1396 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1397 return expr;
1398 if (integer_nonzerop (expr))
1399 return boolean_true_node;
1400 else if (integer_zerop (expr))
1401 return boolean_false_node;
1402 else if (TREE_CODE (expr) == SSA_NAME)
1403 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1404 build_int_cst (TREE_TYPE (expr), 0));
1405 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1406 return fold_build2 (TREE_CODE (expr),
1407 boolean_type_node,
1408 TREE_OPERAND (expr, 0),
1409 TREE_OPERAND (expr, 1));
1410 else
1411 return NULL_TREE;
1412 }
1413 }
1414
1415 /* Check to see if a boolean expression EXPR is logically equivalent to the
1416 comparison (OP1 CODE OP2). Check for various identities involving
1417 SSA_NAMEs. */
1418
1419 static bool
1420 same_bool_comparison_p (const_tree expr, enum tree_code code,
1421 const_tree op1, const_tree op2)
1422 {
1423 gimple s;
1424
1425 /* The obvious case. */
1426 if (TREE_CODE (expr) == code
1427 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1428 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1429 return true;
1430
1431 /* Check for comparing (name, name != 0) and the case where expr
1432 is an SSA_NAME with a definition matching the comparison. */
1433 if (TREE_CODE (expr) == SSA_NAME
1434 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1435 {
1436 if (operand_equal_p (expr, op1, 0))
1437 return ((code == NE_EXPR && integer_zerop (op2))
1438 || (code == EQ_EXPR && integer_nonzerop (op2)));
1439 s = SSA_NAME_DEF_STMT (expr);
1440 if (is_gimple_assign (s)
1441 && gimple_assign_rhs_code (s) == code
1442 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1443 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1444 return true;
1445 }
1446
1447 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1448 of name is a comparison, recurse. */
1449 if (TREE_CODE (op1) == SSA_NAME
1450 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1451 {
1452 s = SSA_NAME_DEF_STMT (op1);
1453 if (is_gimple_assign (s)
1454 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1455 {
1456 enum tree_code c = gimple_assign_rhs_code (s);
1457 if ((c == NE_EXPR && integer_zerop (op2))
1458 || (c == EQ_EXPR && integer_nonzerop (op2)))
1459 return same_bool_comparison_p (expr, c,
1460 gimple_assign_rhs1 (s),
1461 gimple_assign_rhs2 (s));
1462 if ((c == EQ_EXPR && integer_zerop (op2))
1463 || (c == NE_EXPR && integer_nonzerop (op2)))
1464 return same_bool_comparison_p (expr,
1465 invert_tree_comparison (c, false),
1466 gimple_assign_rhs1 (s),
1467 gimple_assign_rhs2 (s));
1468 }
1469 }
1470 return false;
1471 }
1472
1473 /* Check to see if two boolean expressions OP1 and OP2 are logically
1474 equivalent. */
1475
1476 static bool
1477 same_bool_result_p (const_tree op1, const_tree op2)
1478 {
1479 /* Simple cases first. */
1480 if (operand_equal_p (op1, op2, 0))
1481 return true;
1482
1483 /* Check the cases where at least one of the operands is a comparison.
1484 These are a bit smarter than operand_equal_p in that they apply some
1485 identifies on SSA_NAMEs. */
1486 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1487 && same_bool_comparison_p (op1, TREE_CODE (op2),
1488 TREE_OPERAND (op2, 0),
1489 TREE_OPERAND (op2, 1)))
1490 return true;
1491 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1492 && same_bool_comparison_p (op2, TREE_CODE (op1),
1493 TREE_OPERAND (op1, 0),
1494 TREE_OPERAND (op1, 1)))
1495 return true;
1496
1497 /* Default case. */
1498 return false;
1499 }
1500
1501 /* Forward declarations for some mutually recursive functions. */
1502
1503 static tree
1504 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1505 enum tree_code code2, tree op2a, tree op2b);
1506 static tree
1507 and_var_with_comparison (tree var, bool invert,
1508 enum tree_code code2, tree op2a, tree op2b);
1509 static tree
1510 and_var_with_comparison_1 (gimple stmt,
1511 enum tree_code code2, tree op2a, tree op2b);
1512 static tree
1513 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1514 enum tree_code code2, tree op2a, tree op2b);
1515 static tree
1516 or_var_with_comparison (tree var, bool invert,
1517 enum tree_code code2, tree op2a, tree op2b);
1518 static tree
1519 or_var_with_comparison_1 (gimple stmt,
1520 enum tree_code code2, tree op2a, tree op2b);
1521
1522 /* Helper function for and_comparisons_1: try to simplify the AND of the
1523 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1524 If INVERT is true, invert the value of the VAR before doing the AND.
1525 Return NULL_EXPR if we can't simplify this to a single expression. */
1526
1527 static tree
1528 and_var_with_comparison (tree var, bool invert,
1529 enum tree_code code2, tree op2a, tree op2b)
1530 {
1531 tree t;
1532 gimple stmt = SSA_NAME_DEF_STMT (var);
1533
1534 /* We can only deal with variables whose definitions are assignments. */
1535 if (!is_gimple_assign (stmt))
1536 return NULL_TREE;
1537
1538 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1539 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1540 Then we only have to consider the simpler non-inverted cases. */
1541 if (invert)
1542 t = or_var_with_comparison_1 (stmt,
1543 invert_tree_comparison (code2, false),
1544 op2a, op2b);
1545 else
1546 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1547 return canonicalize_bool (t, invert);
1548 }
1549
1550 /* Try to simplify the AND of the ssa variable defined by the assignment
1551 STMT with the comparison specified by (OP2A CODE2 OP2B).
1552 Return NULL_EXPR if we can't simplify this to a single expression. */
1553
1554 static tree
1555 and_var_with_comparison_1 (gimple stmt,
1556 enum tree_code code2, tree op2a, tree op2b)
1557 {
1558 tree var = gimple_assign_lhs (stmt);
1559 tree true_test_var = NULL_TREE;
1560 tree false_test_var = NULL_TREE;
1561 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1562
1563 /* Check for identities like (var AND (var == 0)) => false. */
1564 if (TREE_CODE (op2a) == SSA_NAME
1565 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1566 {
1567 if ((code2 == NE_EXPR && integer_zerop (op2b))
1568 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1569 {
1570 true_test_var = op2a;
1571 if (var == true_test_var)
1572 return var;
1573 }
1574 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1575 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1576 {
1577 false_test_var = op2a;
1578 if (var == false_test_var)
1579 return boolean_false_node;
1580 }
1581 }
1582
1583 /* If the definition is a comparison, recurse on it. */
1584 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1585 {
1586 tree t = and_comparisons_1 (innercode,
1587 gimple_assign_rhs1 (stmt),
1588 gimple_assign_rhs2 (stmt),
1589 code2,
1590 op2a,
1591 op2b);
1592 if (t)
1593 return t;
1594 }
1595
1596 /* If the definition is an AND or OR expression, we may be able to
1597 simplify by reassociating. */
1598 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1599 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1600 {
1601 tree inner1 = gimple_assign_rhs1 (stmt);
1602 tree inner2 = gimple_assign_rhs2 (stmt);
1603 gimple s;
1604 tree t;
1605 tree partial = NULL_TREE;
1606 bool is_and = (innercode == BIT_AND_EXPR);
1607
1608 /* Check for boolean identities that don't require recursive examination
1609 of inner1/inner2:
1610 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1611 inner1 AND (inner1 OR inner2) => inner1
1612 !inner1 AND (inner1 AND inner2) => false
1613 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1614 Likewise for similar cases involving inner2. */
1615 if (inner1 == true_test_var)
1616 return (is_and ? var : inner1);
1617 else if (inner2 == true_test_var)
1618 return (is_and ? var : inner2);
1619 else if (inner1 == false_test_var)
1620 return (is_and
1621 ? boolean_false_node
1622 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1623 else if (inner2 == false_test_var)
1624 return (is_and
1625 ? boolean_false_node
1626 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1627
1628 /* Next, redistribute/reassociate the AND across the inner tests.
1629 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1630 if (TREE_CODE (inner1) == SSA_NAME
1631 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1632 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1633 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1634 gimple_assign_rhs1 (s),
1635 gimple_assign_rhs2 (s),
1636 code2, op2a, op2b)))
1637 {
1638 /* Handle the AND case, where we are reassociating:
1639 (inner1 AND inner2) AND (op2a code2 op2b)
1640 => (t AND inner2)
1641 If the partial result t is a constant, we win. Otherwise
1642 continue on to try reassociating with the other inner test. */
1643 if (is_and)
1644 {
1645 if (integer_onep (t))
1646 return inner2;
1647 else if (integer_zerop (t))
1648 return boolean_false_node;
1649 }
1650
1651 /* Handle the OR case, where we are redistributing:
1652 (inner1 OR inner2) AND (op2a code2 op2b)
1653 => (t OR (inner2 AND (op2a code2 op2b))) */
1654 else if (integer_onep (t))
1655 return boolean_true_node;
1656
1657 /* Save partial result for later. */
1658 partial = t;
1659 }
1660
1661 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1662 if (TREE_CODE (inner2) == SSA_NAME
1663 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1664 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1665 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1666 gimple_assign_rhs1 (s),
1667 gimple_assign_rhs2 (s),
1668 code2, op2a, op2b)))
1669 {
1670 /* Handle the AND case, where we are reassociating:
1671 (inner1 AND inner2) AND (op2a code2 op2b)
1672 => (inner1 AND t) */
1673 if (is_and)
1674 {
1675 if (integer_onep (t))
1676 return inner1;
1677 else if (integer_zerop (t))
1678 return boolean_false_node;
1679 /* If both are the same, we can apply the identity
1680 (x AND x) == x. */
1681 else if (partial && same_bool_result_p (t, partial))
1682 return t;
1683 }
1684
1685 /* Handle the OR case. where we are redistributing:
1686 (inner1 OR inner2) AND (op2a code2 op2b)
1687 => (t OR (inner1 AND (op2a code2 op2b)))
1688 => (t OR partial) */
1689 else
1690 {
1691 if (integer_onep (t))
1692 return boolean_true_node;
1693 else if (partial)
1694 {
1695 /* We already got a simplification for the other
1696 operand to the redistributed OR expression. The
1697 interesting case is when at least one is false.
1698 Or, if both are the same, we can apply the identity
1699 (x OR x) == x. */
1700 if (integer_zerop (partial))
1701 return t;
1702 else if (integer_zerop (t))
1703 return partial;
1704 else if (same_bool_result_p (t, partial))
1705 return t;
1706 }
1707 }
1708 }
1709 }
1710 return NULL_TREE;
1711 }
1712
1713 /* Try to simplify the AND of two comparisons defined by
1714 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1715 If this can be done without constructing an intermediate value,
1716 return the resulting tree; otherwise NULL_TREE is returned.
1717 This function is deliberately asymmetric as it recurses on SSA_DEFs
1718 in the first comparison but not the second. */
1719
1720 static tree
1721 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1722 enum tree_code code2, tree op2a, tree op2b)
1723 {
1724 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1725
1726 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1727 if (operand_equal_p (op1a, op2a, 0)
1728 && operand_equal_p (op1b, op2b, 0))
1729 {
1730 /* Result will be either NULL_TREE, or a combined comparison. */
1731 tree t = combine_comparisons (UNKNOWN_LOCATION,
1732 TRUTH_ANDIF_EXPR, code1, code2,
1733 truth_type, op1a, op1b);
1734 if (t)
1735 return t;
1736 }
1737
1738 /* Likewise the swapped case of the above. */
1739 if (operand_equal_p (op1a, op2b, 0)
1740 && operand_equal_p (op1b, op2a, 0))
1741 {
1742 /* Result will be either NULL_TREE, or a combined comparison. */
1743 tree t = combine_comparisons (UNKNOWN_LOCATION,
1744 TRUTH_ANDIF_EXPR, code1,
1745 swap_tree_comparison (code2),
1746 truth_type, op1a, op1b);
1747 if (t)
1748 return t;
1749 }
1750
1751 /* If both comparisons are of the same value against constants, we might
1752 be able to merge them. */
1753 if (operand_equal_p (op1a, op2a, 0)
1754 && TREE_CODE (op1b) == INTEGER_CST
1755 && TREE_CODE (op2b) == INTEGER_CST)
1756 {
1757 int cmp = tree_int_cst_compare (op1b, op2b);
1758
1759 /* If we have (op1a == op1b), we should either be able to
1760 return that or FALSE, depending on whether the constant op1b
1761 also satisfies the other comparison against op2b. */
1762 if (code1 == EQ_EXPR)
1763 {
1764 bool done = true;
1765 bool val;
1766 switch (code2)
1767 {
1768 case EQ_EXPR: val = (cmp == 0); break;
1769 case NE_EXPR: val = (cmp != 0); break;
1770 case LT_EXPR: val = (cmp < 0); break;
1771 case GT_EXPR: val = (cmp > 0); break;
1772 case LE_EXPR: val = (cmp <= 0); break;
1773 case GE_EXPR: val = (cmp >= 0); break;
1774 default: done = false;
1775 }
1776 if (done)
1777 {
1778 if (val)
1779 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1780 else
1781 return boolean_false_node;
1782 }
1783 }
1784 /* Likewise if the second comparison is an == comparison. */
1785 else if (code2 == EQ_EXPR)
1786 {
1787 bool done = true;
1788 bool val;
1789 switch (code1)
1790 {
1791 case EQ_EXPR: val = (cmp == 0); break;
1792 case NE_EXPR: val = (cmp != 0); break;
1793 case LT_EXPR: val = (cmp > 0); break;
1794 case GT_EXPR: val = (cmp < 0); break;
1795 case LE_EXPR: val = (cmp >= 0); break;
1796 case GE_EXPR: val = (cmp <= 0); break;
1797 default: done = false;
1798 }
1799 if (done)
1800 {
1801 if (val)
1802 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1803 else
1804 return boolean_false_node;
1805 }
1806 }
1807
1808 /* Same business with inequality tests. */
1809 else if (code1 == NE_EXPR)
1810 {
1811 bool val;
1812 switch (code2)
1813 {
1814 case EQ_EXPR: val = (cmp != 0); break;
1815 case NE_EXPR: val = (cmp == 0); break;
1816 case LT_EXPR: val = (cmp >= 0); break;
1817 case GT_EXPR: val = (cmp <= 0); break;
1818 case LE_EXPR: val = (cmp > 0); break;
1819 case GE_EXPR: val = (cmp < 0); break;
1820 default:
1821 val = false;
1822 }
1823 if (val)
1824 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1825 }
1826 else if (code2 == NE_EXPR)
1827 {
1828 bool val;
1829 switch (code1)
1830 {
1831 case EQ_EXPR: val = (cmp == 0); break;
1832 case NE_EXPR: val = (cmp != 0); break;
1833 case LT_EXPR: val = (cmp <= 0); break;
1834 case GT_EXPR: val = (cmp >= 0); break;
1835 case LE_EXPR: val = (cmp < 0); break;
1836 case GE_EXPR: val = (cmp > 0); break;
1837 default:
1838 val = false;
1839 }
1840 if (val)
1841 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1842 }
1843
1844 /* Chose the more restrictive of two < or <= comparisons. */
1845 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1846 && (code2 == LT_EXPR || code2 == LE_EXPR))
1847 {
1848 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1849 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1850 else
1851 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1852 }
1853
1854 /* Likewise chose the more restrictive of two > or >= comparisons. */
1855 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1856 && (code2 == GT_EXPR || code2 == GE_EXPR))
1857 {
1858 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1859 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1860 else
1861 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1862 }
1863
1864 /* Check for singleton ranges. */
1865 else if (cmp == 0
1866 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1867 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1868 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1869
1870 /* Check for disjoint ranges. */
1871 else if (cmp <= 0
1872 && (code1 == LT_EXPR || code1 == LE_EXPR)
1873 && (code2 == GT_EXPR || code2 == GE_EXPR))
1874 return boolean_false_node;
1875 else if (cmp >= 0
1876 && (code1 == GT_EXPR || code1 == GE_EXPR)
1877 && (code2 == LT_EXPR || code2 == LE_EXPR))
1878 return boolean_false_node;
1879 }
1880
1881 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1882 NAME's definition is a truth value. See if there are any simplifications
1883 that can be done against the NAME's definition. */
1884 if (TREE_CODE (op1a) == SSA_NAME
1885 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1886 && (integer_zerop (op1b) || integer_onep (op1b)))
1887 {
1888 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1889 || (code1 == NE_EXPR && integer_onep (op1b)));
1890 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1891 switch (gimple_code (stmt))
1892 {
1893 case GIMPLE_ASSIGN:
1894 /* Try to simplify by copy-propagating the definition. */
1895 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1896
1897 case GIMPLE_PHI:
1898 /* If every argument to the PHI produces the same result when
1899 ANDed with the second comparison, we win.
1900 Do not do this unless the type is bool since we need a bool
1901 result here anyway. */
1902 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1903 {
1904 tree result = NULL_TREE;
1905 unsigned i;
1906 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1907 {
1908 tree arg = gimple_phi_arg_def (stmt, i);
1909
1910 /* If this PHI has itself as an argument, ignore it.
1911 If all the other args produce the same result,
1912 we're still OK. */
1913 if (arg == gimple_phi_result (stmt))
1914 continue;
1915 else if (TREE_CODE (arg) == INTEGER_CST)
1916 {
1917 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1918 {
1919 if (!result)
1920 result = boolean_false_node;
1921 else if (!integer_zerop (result))
1922 return NULL_TREE;
1923 }
1924 else if (!result)
1925 result = fold_build2 (code2, boolean_type_node,
1926 op2a, op2b);
1927 else if (!same_bool_comparison_p (result,
1928 code2, op2a, op2b))
1929 return NULL_TREE;
1930 }
1931 else if (TREE_CODE (arg) == SSA_NAME
1932 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1933 {
1934 tree temp;
1935 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1936 /* In simple cases we can look through PHI nodes,
1937 but we have to be careful with loops.
1938 See PR49073. */
1939 if (! dom_info_available_p (CDI_DOMINATORS)
1940 || gimple_bb (def_stmt) == gimple_bb (stmt)
1941 || dominated_by_p (CDI_DOMINATORS,
1942 gimple_bb (def_stmt),
1943 gimple_bb (stmt)))
1944 return NULL_TREE;
1945 temp = and_var_with_comparison (arg, invert, code2,
1946 op2a, op2b);
1947 if (!temp)
1948 return NULL_TREE;
1949 else if (!result)
1950 result = temp;
1951 else if (!same_bool_result_p (result, temp))
1952 return NULL_TREE;
1953 }
1954 else
1955 return NULL_TREE;
1956 }
1957 return result;
1958 }
1959
1960 default:
1961 break;
1962 }
1963 }
1964 return NULL_TREE;
1965 }
1966
1967 /* Try to simplify the AND of two comparisons, specified by
1968 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1969 If this can be simplified to a single expression (without requiring
1970 introducing more SSA variables to hold intermediate values),
1971 return the resulting tree. Otherwise return NULL_TREE.
1972 If the result expression is non-null, it has boolean type. */
1973
1974 tree
1975 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1976 enum tree_code code2, tree op2a, tree op2b)
1977 {
1978 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1979 if (t)
1980 return t;
1981 else
1982 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1983 }
1984
1985 /* Helper function for or_comparisons_1: try to simplify the OR of the
1986 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1987 If INVERT is true, invert the value of VAR before doing the OR.
1988 Return NULL_EXPR if we can't simplify this to a single expression. */
1989
1990 static tree
1991 or_var_with_comparison (tree var, bool invert,
1992 enum tree_code code2, tree op2a, tree op2b)
1993 {
1994 tree t;
1995 gimple stmt = SSA_NAME_DEF_STMT (var);
1996
1997 /* We can only deal with variables whose definitions are assignments. */
1998 if (!is_gimple_assign (stmt))
1999 return NULL_TREE;
2000
2001 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2002 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2003 Then we only have to consider the simpler non-inverted cases. */
2004 if (invert)
2005 t = and_var_with_comparison_1 (stmt,
2006 invert_tree_comparison (code2, false),
2007 op2a, op2b);
2008 else
2009 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2010 return canonicalize_bool (t, invert);
2011 }
2012
2013 /* Try to simplify the OR of the ssa variable defined by the assignment
2014 STMT with the comparison specified by (OP2A CODE2 OP2B).
2015 Return NULL_EXPR if we can't simplify this to a single expression. */
2016
2017 static tree
2018 or_var_with_comparison_1 (gimple stmt,
2019 enum tree_code code2, tree op2a, tree op2b)
2020 {
2021 tree var = gimple_assign_lhs (stmt);
2022 tree true_test_var = NULL_TREE;
2023 tree false_test_var = NULL_TREE;
2024 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2025
2026 /* Check for identities like (var OR (var != 0)) => true . */
2027 if (TREE_CODE (op2a) == SSA_NAME
2028 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2029 {
2030 if ((code2 == NE_EXPR && integer_zerop (op2b))
2031 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2032 {
2033 true_test_var = op2a;
2034 if (var == true_test_var)
2035 return var;
2036 }
2037 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2038 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2039 {
2040 false_test_var = op2a;
2041 if (var == false_test_var)
2042 return boolean_true_node;
2043 }
2044 }
2045
2046 /* If the definition is a comparison, recurse on it. */
2047 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2048 {
2049 tree t = or_comparisons_1 (innercode,
2050 gimple_assign_rhs1 (stmt),
2051 gimple_assign_rhs2 (stmt),
2052 code2,
2053 op2a,
2054 op2b);
2055 if (t)
2056 return t;
2057 }
2058
2059 /* If the definition is an AND or OR expression, we may be able to
2060 simplify by reassociating. */
2061 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2062 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2063 {
2064 tree inner1 = gimple_assign_rhs1 (stmt);
2065 tree inner2 = gimple_assign_rhs2 (stmt);
2066 gimple s;
2067 tree t;
2068 tree partial = NULL_TREE;
2069 bool is_or = (innercode == BIT_IOR_EXPR);
2070
2071 /* Check for boolean identities that don't require recursive examination
2072 of inner1/inner2:
2073 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2074 inner1 OR (inner1 AND inner2) => inner1
2075 !inner1 OR (inner1 OR inner2) => true
2076 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2077 */
2078 if (inner1 == true_test_var)
2079 return (is_or ? var : inner1);
2080 else if (inner2 == true_test_var)
2081 return (is_or ? var : inner2);
2082 else if (inner1 == false_test_var)
2083 return (is_or
2084 ? boolean_true_node
2085 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2086 else if (inner2 == false_test_var)
2087 return (is_or
2088 ? boolean_true_node
2089 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2090
2091 /* Next, redistribute/reassociate the OR across the inner tests.
2092 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2093 if (TREE_CODE (inner1) == SSA_NAME
2094 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2095 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2096 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2097 gimple_assign_rhs1 (s),
2098 gimple_assign_rhs2 (s),
2099 code2, op2a, op2b)))
2100 {
2101 /* Handle the OR case, where we are reassociating:
2102 (inner1 OR inner2) OR (op2a code2 op2b)
2103 => (t OR inner2)
2104 If the partial result t is a constant, we win. Otherwise
2105 continue on to try reassociating with the other inner test. */
2106 if (is_or)
2107 {
2108 if (integer_onep (t))
2109 return boolean_true_node;
2110 else if (integer_zerop (t))
2111 return inner2;
2112 }
2113
2114 /* Handle the AND case, where we are redistributing:
2115 (inner1 AND inner2) OR (op2a code2 op2b)
2116 => (t AND (inner2 OR (op2a code op2b))) */
2117 else if (integer_zerop (t))
2118 return boolean_false_node;
2119
2120 /* Save partial result for later. */
2121 partial = t;
2122 }
2123
2124 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2125 if (TREE_CODE (inner2) == SSA_NAME
2126 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2127 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2128 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2129 gimple_assign_rhs1 (s),
2130 gimple_assign_rhs2 (s),
2131 code2, op2a, op2b)))
2132 {
2133 /* Handle the OR case, where we are reassociating:
2134 (inner1 OR inner2) OR (op2a code2 op2b)
2135 => (inner1 OR t)
2136 => (t OR partial) */
2137 if (is_or)
2138 {
2139 if (integer_zerop (t))
2140 return inner1;
2141 else if (integer_onep (t))
2142 return boolean_true_node;
2143 /* If both are the same, we can apply the identity
2144 (x OR x) == x. */
2145 else if (partial && same_bool_result_p (t, partial))
2146 return t;
2147 }
2148
2149 /* Handle the AND case, where we are redistributing:
2150 (inner1 AND inner2) OR (op2a code2 op2b)
2151 => (t AND (inner1 OR (op2a code2 op2b)))
2152 => (t AND partial) */
2153 else
2154 {
2155 if (integer_zerop (t))
2156 return boolean_false_node;
2157 else if (partial)
2158 {
2159 /* We already got a simplification for the other
2160 operand to the redistributed AND expression. The
2161 interesting case is when at least one is true.
2162 Or, if both are the same, we can apply the identity
2163 (x AND x) == x. */
2164 if (integer_onep (partial))
2165 return t;
2166 else if (integer_onep (t))
2167 return partial;
2168 else if (same_bool_result_p (t, partial))
2169 return t;
2170 }
2171 }
2172 }
2173 }
2174 return NULL_TREE;
2175 }
2176
2177 /* Try to simplify the OR of two comparisons defined by
2178 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2179 If this can be done without constructing an intermediate value,
2180 return the resulting tree; otherwise NULL_TREE is returned.
2181 This function is deliberately asymmetric as it recurses on SSA_DEFs
2182 in the first comparison but not the second. */
2183
2184 static tree
2185 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2186 enum tree_code code2, tree op2a, tree op2b)
2187 {
2188 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2189
2190 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2191 if (operand_equal_p (op1a, op2a, 0)
2192 && operand_equal_p (op1b, op2b, 0))
2193 {
2194 /* Result will be either NULL_TREE, or a combined comparison. */
2195 tree t = combine_comparisons (UNKNOWN_LOCATION,
2196 TRUTH_ORIF_EXPR, code1, code2,
2197 truth_type, op1a, op1b);
2198 if (t)
2199 return t;
2200 }
2201
2202 /* Likewise the swapped case of the above. */
2203 if (operand_equal_p (op1a, op2b, 0)
2204 && operand_equal_p (op1b, op2a, 0))
2205 {
2206 /* Result will be either NULL_TREE, or a combined comparison. */
2207 tree t = combine_comparisons (UNKNOWN_LOCATION,
2208 TRUTH_ORIF_EXPR, code1,
2209 swap_tree_comparison (code2),
2210 truth_type, op1a, op1b);
2211 if (t)
2212 return t;
2213 }
2214
2215 /* If both comparisons are of the same value against constants, we might
2216 be able to merge them. */
2217 if (operand_equal_p (op1a, op2a, 0)
2218 && TREE_CODE (op1b) == INTEGER_CST
2219 && TREE_CODE (op2b) == INTEGER_CST)
2220 {
2221 int cmp = tree_int_cst_compare (op1b, op2b);
2222
2223 /* If we have (op1a != op1b), we should either be able to
2224 return that or TRUE, depending on whether the constant op1b
2225 also satisfies the other comparison against op2b. */
2226 if (code1 == NE_EXPR)
2227 {
2228 bool done = true;
2229 bool val;
2230 switch (code2)
2231 {
2232 case EQ_EXPR: val = (cmp == 0); break;
2233 case NE_EXPR: val = (cmp != 0); break;
2234 case LT_EXPR: val = (cmp < 0); break;
2235 case GT_EXPR: val = (cmp > 0); break;
2236 case LE_EXPR: val = (cmp <= 0); break;
2237 case GE_EXPR: val = (cmp >= 0); break;
2238 default: done = false;
2239 }
2240 if (done)
2241 {
2242 if (val)
2243 return boolean_true_node;
2244 else
2245 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2246 }
2247 }
2248 /* Likewise if the second comparison is a != comparison. */
2249 else if (code2 == NE_EXPR)
2250 {
2251 bool done = true;
2252 bool val;
2253 switch (code1)
2254 {
2255 case EQ_EXPR: val = (cmp == 0); break;
2256 case NE_EXPR: val = (cmp != 0); break;
2257 case LT_EXPR: val = (cmp > 0); break;
2258 case GT_EXPR: val = (cmp < 0); break;
2259 case LE_EXPR: val = (cmp >= 0); break;
2260 case GE_EXPR: val = (cmp <= 0); break;
2261 default: done = false;
2262 }
2263 if (done)
2264 {
2265 if (val)
2266 return boolean_true_node;
2267 else
2268 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2269 }
2270 }
2271
2272 /* See if an equality test is redundant with the other comparison. */
2273 else if (code1 == EQ_EXPR)
2274 {
2275 bool val;
2276 switch (code2)
2277 {
2278 case EQ_EXPR: val = (cmp == 0); break;
2279 case NE_EXPR: val = (cmp != 0); break;
2280 case LT_EXPR: val = (cmp < 0); break;
2281 case GT_EXPR: val = (cmp > 0); break;
2282 case LE_EXPR: val = (cmp <= 0); break;
2283 case GE_EXPR: val = (cmp >= 0); break;
2284 default:
2285 val = false;
2286 }
2287 if (val)
2288 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2289 }
2290 else if (code2 == EQ_EXPR)
2291 {
2292 bool val;
2293 switch (code1)
2294 {
2295 case EQ_EXPR: val = (cmp == 0); break;
2296 case NE_EXPR: val = (cmp != 0); break;
2297 case LT_EXPR: val = (cmp > 0); break;
2298 case GT_EXPR: val = (cmp < 0); break;
2299 case LE_EXPR: val = (cmp >= 0); break;
2300 case GE_EXPR: val = (cmp <= 0); break;
2301 default:
2302 val = false;
2303 }
2304 if (val)
2305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2306 }
2307
2308 /* Chose the less restrictive of two < or <= comparisons. */
2309 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2310 && (code2 == LT_EXPR || code2 == LE_EXPR))
2311 {
2312 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2313 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2314 else
2315 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2316 }
2317
2318 /* Likewise chose the less restrictive of two > or >= comparisons. */
2319 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2320 && (code2 == GT_EXPR || code2 == GE_EXPR))
2321 {
2322 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2323 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2324 else
2325 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2326 }
2327
2328 /* Check for singleton ranges. */
2329 else if (cmp == 0
2330 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2331 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2332 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2333
2334 /* Check for less/greater pairs that don't restrict the range at all. */
2335 else if (cmp >= 0
2336 && (code1 == LT_EXPR || code1 == LE_EXPR)
2337 && (code2 == GT_EXPR || code2 == GE_EXPR))
2338 return boolean_true_node;
2339 else if (cmp <= 0
2340 && (code1 == GT_EXPR || code1 == GE_EXPR)
2341 && (code2 == LT_EXPR || code2 == LE_EXPR))
2342 return boolean_true_node;
2343 }
2344
2345 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2346 NAME's definition is a truth value. See if there are any simplifications
2347 that can be done against the NAME's definition. */
2348 if (TREE_CODE (op1a) == SSA_NAME
2349 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2350 && (integer_zerop (op1b) || integer_onep (op1b)))
2351 {
2352 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2353 || (code1 == NE_EXPR && integer_onep (op1b)));
2354 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2355 switch (gimple_code (stmt))
2356 {
2357 case GIMPLE_ASSIGN:
2358 /* Try to simplify by copy-propagating the definition. */
2359 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2360
2361 case GIMPLE_PHI:
2362 /* If every argument to the PHI produces the same result when
2363 ORed with the second comparison, we win.
2364 Do not do this unless the type is bool since we need a bool
2365 result here anyway. */
2366 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2367 {
2368 tree result = NULL_TREE;
2369 unsigned i;
2370 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2371 {
2372 tree arg = gimple_phi_arg_def (stmt, i);
2373
2374 /* If this PHI has itself as an argument, ignore it.
2375 If all the other args produce the same result,
2376 we're still OK. */
2377 if (arg == gimple_phi_result (stmt))
2378 continue;
2379 else if (TREE_CODE (arg) == INTEGER_CST)
2380 {
2381 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2382 {
2383 if (!result)
2384 result = boolean_true_node;
2385 else if (!integer_onep (result))
2386 return NULL_TREE;
2387 }
2388 else if (!result)
2389 result = fold_build2 (code2, boolean_type_node,
2390 op2a, op2b);
2391 else if (!same_bool_comparison_p (result,
2392 code2, op2a, op2b))
2393 return NULL_TREE;
2394 }
2395 else if (TREE_CODE (arg) == SSA_NAME
2396 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2397 {
2398 tree temp;
2399 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2400 /* In simple cases we can look through PHI nodes,
2401 but we have to be careful with loops.
2402 See PR49073. */
2403 if (! dom_info_available_p (CDI_DOMINATORS)
2404 || gimple_bb (def_stmt) == gimple_bb (stmt)
2405 || dominated_by_p (CDI_DOMINATORS,
2406 gimple_bb (def_stmt),
2407 gimple_bb (stmt)))
2408 return NULL_TREE;
2409 temp = or_var_with_comparison (arg, invert, code2,
2410 op2a, op2b);
2411 if (!temp)
2412 return NULL_TREE;
2413 else if (!result)
2414 result = temp;
2415 else if (!same_bool_result_p (result, temp))
2416 return NULL_TREE;
2417 }
2418 else
2419 return NULL_TREE;
2420 }
2421 return result;
2422 }
2423
2424 default:
2425 break;
2426 }
2427 }
2428 return NULL_TREE;
2429 }
2430
2431 /* Try to simplify the OR of two comparisons, specified by
2432 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2433 If this can be simplified to a single expression (without requiring
2434 introducing more SSA variables to hold intermediate values),
2435 return the resulting tree. Otherwise return NULL_TREE.
2436 If the result expression is non-null, it has boolean type. */
2437
2438 tree
2439 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2440 enum tree_code code2, tree op2a, tree op2b)
2441 {
2442 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2443 if (t)
2444 return t;
2445 else
2446 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2447 }
2448
2449
2450 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2451
2452 Either NULL_TREE, a simplified but non-constant or a constant
2453 is returned.
2454
2455 ??? This should go into a gimple-fold-inline.h file to be eventually
2456 privatized with the single valueize function used in the various TUs
2457 to avoid the indirect function call overhead. */
2458
2459 tree
2460 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2461 {
2462 location_t loc = gimple_location (stmt);
2463 switch (gimple_code (stmt))
2464 {
2465 case GIMPLE_ASSIGN:
2466 {
2467 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2468
2469 switch (get_gimple_rhs_class (subcode))
2470 {
2471 case GIMPLE_SINGLE_RHS:
2472 {
2473 tree rhs = gimple_assign_rhs1 (stmt);
2474 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2475
2476 if (TREE_CODE (rhs) == SSA_NAME)
2477 {
2478 /* If the RHS is an SSA_NAME, return its known constant value,
2479 if any. */
2480 return (*valueize) (rhs);
2481 }
2482 /* Handle propagating invariant addresses into address
2483 operations. */
2484 else if (TREE_CODE (rhs) == ADDR_EXPR
2485 && !is_gimple_min_invariant (rhs))
2486 {
2487 HOST_WIDE_INT offset = 0;
2488 tree base;
2489 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2490 &offset,
2491 valueize);
2492 if (base
2493 && (CONSTANT_CLASS_P (base)
2494 || decl_address_invariant_p (base)))
2495 return build_invariant_address (TREE_TYPE (rhs),
2496 base, offset);
2497 }
2498 else if (TREE_CODE (rhs) == CONSTRUCTOR
2499 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2500 && (CONSTRUCTOR_NELTS (rhs)
2501 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2502 {
2503 unsigned i;
2504 tree val, *vec;
2505
2506 vec = XALLOCAVEC (tree,
2507 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2508 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2509 {
2510 val = (*valueize) (val);
2511 if (TREE_CODE (val) == INTEGER_CST
2512 || TREE_CODE (val) == REAL_CST
2513 || TREE_CODE (val) == FIXED_CST)
2514 vec[i] = val;
2515 else
2516 return NULL_TREE;
2517 }
2518
2519 return build_vector (TREE_TYPE (rhs), vec);
2520 }
2521
2522 if (kind == tcc_reference)
2523 {
2524 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2525 || TREE_CODE (rhs) == REALPART_EXPR
2526 || TREE_CODE (rhs) == IMAGPART_EXPR)
2527 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2528 {
2529 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2530 return fold_unary_loc (EXPR_LOCATION (rhs),
2531 TREE_CODE (rhs),
2532 TREE_TYPE (rhs), val);
2533 }
2534 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2535 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2536 {
2537 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2538 return fold_ternary_loc (EXPR_LOCATION (rhs),
2539 TREE_CODE (rhs),
2540 TREE_TYPE (rhs), val,
2541 TREE_OPERAND (rhs, 1),
2542 TREE_OPERAND (rhs, 2));
2543 }
2544 else if (TREE_CODE (rhs) == MEM_REF
2545 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2546 {
2547 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2548 if (TREE_CODE (val) == ADDR_EXPR
2549 && is_gimple_min_invariant (val))
2550 {
2551 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2552 unshare_expr (val),
2553 TREE_OPERAND (rhs, 1));
2554 if (tem)
2555 rhs = tem;
2556 }
2557 }
2558 return fold_const_aggregate_ref_1 (rhs, valueize);
2559 }
2560 else if (kind == tcc_declaration)
2561 return get_symbol_constant_value (rhs);
2562 return rhs;
2563 }
2564
2565 case GIMPLE_UNARY_RHS:
2566 {
2567 /* Handle unary operators that can appear in GIMPLE form.
2568 Note that we know the single operand must be a constant,
2569 so this should almost always return a simplified RHS. */
2570 tree lhs = gimple_assign_lhs (stmt);
2571 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2572
2573 /* Conversions are useless for CCP purposes if they are
2574 value-preserving. Thus the restrictions that
2575 useless_type_conversion_p places for restrict qualification
2576 of pointer types should not apply here.
2577 Substitution later will only substitute to allowed places. */
2578 if (CONVERT_EXPR_CODE_P (subcode)
2579 && POINTER_TYPE_P (TREE_TYPE (lhs))
2580 && POINTER_TYPE_P (TREE_TYPE (op0))
2581 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2582 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2583 && TYPE_MODE (TREE_TYPE (lhs))
2584 == TYPE_MODE (TREE_TYPE (op0)))
2585 return op0;
2586
2587 return
2588 fold_unary_ignore_overflow_loc (loc, subcode,
2589 gimple_expr_type (stmt), op0);
2590 }
2591
2592 case GIMPLE_BINARY_RHS:
2593 {
2594 /* Handle binary operators that can appear in GIMPLE form. */
2595 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2596 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2597
2598 /* Translate &x + CST into an invariant form suitable for
2599 further propagation. */
2600 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2601 && TREE_CODE (op0) == ADDR_EXPR
2602 && TREE_CODE (op1) == INTEGER_CST)
2603 {
2604 tree off = fold_convert (ptr_type_node, op1);
2605 return build_fold_addr_expr_loc
2606 (loc,
2607 fold_build2 (MEM_REF,
2608 TREE_TYPE (TREE_TYPE (op0)),
2609 unshare_expr (op0), off));
2610 }
2611
2612 return fold_binary_loc (loc, subcode,
2613 gimple_expr_type (stmt), op0, op1);
2614 }
2615
2616 case GIMPLE_TERNARY_RHS:
2617 {
2618 /* Handle ternary operators that can appear in GIMPLE form. */
2619 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2620 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2621 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2622
2623 /* Fold embedded expressions in ternary codes. */
2624 if ((subcode == COND_EXPR
2625 || subcode == VEC_COND_EXPR)
2626 && COMPARISON_CLASS_P (op0))
2627 {
2628 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2629 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2630 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2631 TREE_TYPE (op0), op00, op01);
2632 if (tem)
2633 op0 = tem;
2634 }
2635
2636 return fold_ternary_loc (loc, subcode,
2637 gimple_expr_type (stmt), op0, op1, op2);
2638 }
2639
2640 default:
2641 gcc_unreachable ();
2642 }
2643 }
2644
2645 case GIMPLE_CALL:
2646 {
2647 tree fn;
2648
2649 if (gimple_call_internal_p (stmt))
2650 /* No folding yet for these functions. */
2651 return NULL_TREE;
2652
2653 fn = (*valueize) (gimple_call_fn (stmt));
2654 if (TREE_CODE (fn) == ADDR_EXPR
2655 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2656 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2657 {
2658 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2659 tree call, retval;
2660 unsigned i;
2661 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2662 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2663 call = build_call_array_loc (loc,
2664 gimple_call_return_type (stmt),
2665 fn, gimple_call_num_args (stmt), args);
2666 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2667 if (retval)
2668 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2669 STRIP_NOPS (retval);
2670 return retval;
2671 }
2672 return NULL_TREE;
2673 }
2674
2675 default:
2676 return NULL_TREE;
2677 }
2678 }
2679
2680 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2681 Returns NULL_TREE if folding to a constant is not possible, otherwise
2682 returns a constant according to is_gimple_min_invariant. */
2683
2684 tree
2685 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2686 {
2687 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2688 if (res && is_gimple_min_invariant (res))
2689 return res;
2690 return NULL_TREE;
2691 }
2692
2693
2694 /* The following set of functions are supposed to fold references using
2695 their constant initializers. */
2696
2697 static tree fold_ctor_reference (tree type, tree ctor,
2698 unsigned HOST_WIDE_INT offset,
2699 unsigned HOST_WIDE_INT size, tree);
2700
2701 /* See if we can find constructor defining value of BASE.
2702 When we know the consructor with constant offset (such as
2703 base is array[40] and we do know constructor of array), then
2704 BIT_OFFSET is adjusted accordingly.
2705
2706 As a special case, return error_mark_node when constructor
2707 is not explicitly available, but it is known to be zero
2708 such as 'static const int a;'. */
2709 static tree
2710 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2711 tree (*valueize)(tree))
2712 {
2713 HOST_WIDE_INT bit_offset2, size, max_size;
2714 if (TREE_CODE (base) == MEM_REF)
2715 {
2716 if (!integer_zerop (TREE_OPERAND (base, 1)))
2717 {
2718 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2719 return NULL_TREE;
2720 *bit_offset += (mem_ref_offset (base).low
2721 * BITS_PER_UNIT);
2722 }
2723
2724 if (valueize
2725 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2726 base = valueize (TREE_OPERAND (base, 0));
2727 if (!base || TREE_CODE (base) != ADDR_EXPR)
2728 return NULL_TREE;
2729 base = TREE_OPERAND (base, 0);
2730 }
2731
2732 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2733 DECL_INITIAL. If BASE is a nested reference into another
2734 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2735 the inner reference. */
2736 switch (TREE_CODE (base))
2737 {
2738 case VAR_DECL:
2739 case CONST_DECL:
2740 {
2741 tree init = ctor_for_folding (base);
2742
2743 /* Our semantic is exact opposite of ctor_for_folding;
2744 NULL means unknown, while error_mark_node is 0. */
2745 if (init == error_mark_node)
2746 return NULL_TREE;
2747 if (!init)
2748 return error_mark_node;
2749 return init;
2750 }
2751
2752 case ARRAY_REF:
2753 case COMPONENT_REF:
2754 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2755 if (max_size == -1 || size != max_size)
2756 return NULL_TREE;
2757 *bit_offset += bit_offset2;
2758 return get_base_constructor (base, bit_offset, valueize);
2759
2760 case STRING_CST:
2761 case CONSTRUCTOR:
2762 return base;
2763
2764 default:
2765 return NULL_TREE;
2766 }
2767 }
2768
2769 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2770 to the memory at bit OFFSET.
2771
2772 We do only simple job of folding byte accesses. */
2773
2774 static tree
2775 fold_string_cst_ctor_reference (tree type, tree ctor,
2776 unsigned HOST_WIDE_INT offset,
2777 unsigned HOST_WIDE_INT size)
2778 {
2779 if (INTEGRAL_TYPE_P (type)
2780 && (TYPE_MODE (type)
2781 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2782 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2783 == MODE_INT)
2784 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2785 && size == BITS_PER_UNIT
2786 && !(offset % BITS_PER_UNIT))
2787 {
2788 offset /= BITS_PER_UNIT;
2789 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2790 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2791 [offset]));
2792 /* Folding
2793 const char a[20]="hello";
2794 return a[10];
2795
2796 might lead to offset greater than string length. In this case we
2797 know value is either initialized to 0 or out of bounds. Return 0
2798 in both cases. */
2799 return build_zero_cst (type);
2800 }
2801 return NULL_TREE;
2802 }
2803
2804 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2805 SIZE to the memory at bit OFFSET. */
2806
2807 static tree
2808 fold_array_ctor_reference (tree type, tree ctor,
2809 unsigned HOST_WIDE_INT offset,
2810 unsigned HOST_WIDE_INT size,
2811 tree from_decl)
2812 {
2813 unsigned HOST_WIDE_INT cnt;
2814 tree cfield, cval;
2815 double_int low_bound, elt_size;
2816 double_int index, max_index;
2817 double_int access_index;
2818 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2819 HOST_WIDE_INT inner_offset;
2820
2821 /* Compute low bound and elt size. */
2822 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2823 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2824 if (domain_type && TYPE_MIN_VALUE (domain_type))
2825 {
2826 /* Static constructors for variably sized objects makes no sense. */
2827 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2828 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2829 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2830 }
2831 else
2832 low_bound = double_int_zero;
2833 /* Static constructors for variably sized objects makes no sense. */
2834 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2835 == INTEGER_CST);
2836 elt_size =
2837 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2838
2839
2840 /* We can handle only constantly sized accesses that are known to not
2841 be larger than size of array element. */
2842 if (!TYPE_SIZE_UNIT (type)
2843 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2844 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2845 return NULL_TREE;
2846
2847 /* Compute the array index we look for. */
2848 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2849 .udiv (elt_size, TRUNC_DIV_EXPR);
2850 access_index += low_bound;
2851 if (index_type)
2852 access_index = access_index.ext (TYPE_PRECISION (index_type),
2853 TYPE_UNSIGNED (index_type));
2854
2855 /* And offset within the access. */
2856 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2857
2858 /* See if the array field is large enough to span whole access. We do not
2859 care to fold accesses spanning multiple array indexes. */
2860 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2861 return NULL_TREE;
2862
2863 index = low_bound - double_int_one;
2864 if (index_type)
2865 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2866
2867 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2868 {
2869 /* Array constructor might explicitely set index, or specify range
2870 or leave index NULL meaning that it is next index after previous
2871 one. */
2872 if (cfield)
2873 {
2874 if (TREE_CODE (cfield) == INTEGER_CST)
2875 max_index = index = tree_to_double_int (cfield);
2876 else
2877 {
2878 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2879 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2880 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2881 }
2882 }
2883 else
2884 {
2885 index += double_int_one;
2886 if (index_type)
2887 index = index.ext (TYPE_PRECISION (index_type),
2888 TYPE_UNSIGNED (index_type));
2889 max_index = index;
2890 }
2891
2892 /* Do we have match? */
2893 if (access_index.cmp (index, 1) >= 0
2894 && access_index.cmp (max_index, 1) <= 0)
2895 return fold_ctor_reference (type, cval, inner_offset, size,
2896 from_decl);
2897 }
2898 /* When memory is not explicitely mentioned in constructor,
2899 it is 0 (or out of range). */
2900 return build_zero_cst (type);
2901 }
2902
2903 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2904 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2905
2906 static tree
2907 fold_nonarray_ctor_reference (tree type, tree ctor,
2908 unsigned HOST_WIDE_INT offset,
2909 unsigned HOST_WIDE_INT size,
2910 tree from_decl)
2911 {
2912 unsigned HOST_WIDE_INT cnt;
2913 tree cfield, cval;
2914
2915 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2916 cval)
2917 {
2918 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2919 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2920 tree field_size = DECL_SIZE (cfield);
2921 double_int bitoffset;
2922 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2923 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2924 double_int bitoffset_end, access_end;
2925
2926 /* Variable sized objects in static constructors makes no sense,
2927 but field_size can be NULL for flexible array members. */
2928 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2929 && TREE_CODE (byte_offset) == INTEGER_CST
2930 && (field_size != NULL_TREE
2931 ? TREE_CODE (field_size) == INTEGER_CST
2932 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2933
2934 /* Compute bit offset of the field. */
2935 bitoffset = tree_to_double_int (field_offset)
2936 + byte_offset_cst * bits_per_unit_cst;
2937 /* Compute bit offset where the field ends. */
2938 if (field_size != NULL_TREE)
2939 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2940 else
2941 bitoffset_end = double_int_zero;
2942
2943 access_end = double_int::from_uhwi (offset)
2944 + double_int::from_uhwi (size);
2945
2946 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2947 [BITOFFSET, BITOFFSET_END)? */
2948 if (access_end.cmp (bitoffset, 0) > 0
2949 && (field_size == NULL_TREE
2950 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2951 {
2952 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2953 /* We do have overlap. Now see if field is large enough to
2954 cover the access. Give up for accesses spanning multiple
2955 fields. */
2956 if (access_end.cmp (bitoffset_end, 0) > 0)
2957 return NULL_TREE;
2958 if (double_int::from_uhwi (offset).slt (bitoffset))
2959 return NULL_TREE;
2960 return fold_ctor_reference (type, cval,
2961 inner_offset.to_uhwi (), size,
2962 from_decl);
2963 }
2964 }
2965 /* When memory is not explicitely mentioned in constructor, it is 0. */
2966 return build_zero_cst (type);
2967 }
2968
2969 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2970 to the memory at bit OFFSET. */
2971
2972 static tree
2973 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2974 unsigned HOST_WIDE_INT size, tree from_decl)
2975 {
2976 tree ret;
2977
2978 /* We found the field with exact match. */
2979 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2980 && !offset)
2981 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2982
2983 /* We are at the end of walk, see if we can view convert the
2984 result. */
2985 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2986 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2987 && operand_equal_p (TYPE_SIZE (type),
2988 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2989 {
2990 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2991 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2992 if (ret)
2993 STRIP_NOPS (ret);
2994 return ret;
2995 }
2996 if (TREE_CODE (ctor) == STRING_CST)
2997 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2998 if (TREE_CODE (ctor) == CONSTRUCTOR)
2999 {
3000
3001 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3002 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3003 return fold_array_ctor_reference (type, ctor, offset, size,
3004 from_decl);
3005 else
3006 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3007 from_decl);
3008 }
3009
3010 return NULL_TREE;
3011 }
3012
3013 /* Return the tree representing the element referenced by T if T is an
3014 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3015 names using VALUEIZE. Return NULL_TREE otherwise. */
3016
3017 tree
3018 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3019 {
3020 tree ctor, idx, base;
3021 HOST_WIDE_INT offset, size, max_size;
3022 tree tem;
3023
3024 if (TREE_THIS_VOLATILE (t))
3025 return NULL_TREE;
3026
3027 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3028 return get_symbol_constant_value (t);
3029
3030 tem = fold_read_from_constant_string (t);
3031 if (tem)
3032 return tem;
3033
3034 switch (TREE_CODE (t))
3035 {
3036 case ARRAY_REF:
3037 case ARRAY_RANGE_REF:
3038 /* Constant indexes are handled well by get_base_constructor.
3039 Only special case variable offsets.
3040 FIXME: This code can't handle nested references with variable indexes
3041 (they will be handled only by iteration of ccp). Perhaps we can bring
3042 get_ref_base_and_extent here and make it use a valueize callback. */
3043 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3044 && valueize
3045 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3046 && TREE_CODE (idx) == INTEGER_CST)
3047 {
3048 tree low_bound, unit_size;
3049 double_int doffset;
3050
3051 /* If the resulting bit-offset is constant, track it. */
3052 if ((low_bound = array_ref_low_bound (t),
3053 TREE_CODE (low_bound) == INTEGER_CST)
3054 && (unit_size = array_ref_element_size (t),
3055 host_integerp (unit_size, 1))
3056 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3057 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3058 doffset.fits_shwi ()))
3059 {
3060 offset = doffset.to_shwi ();
3061 offset *= TREE_INT_CST_LOW (unit_size);
3062 offset *= BITS_PER_UNIT;
3063
3064 base = TREE_OPERAND (t, 0);
3065 ctor = get_base_constructor (base, &offset, valueize);
3066 /* Empty constructor. Always fold to 0. */
3067 if (ctor == error_mark_node)
3068 return build_zero_cst (TREE_TYPE (t));
3069 /* Out of bound array access. Value is undefined,
3070 but don't fold. */
3071 if (offset < 0)
3072 return NULL_TREE;
3073 /* We can not determine ctor. */
3074 if (!ctor)
3075 return NULL_TREE;
3076 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3077 TREE_INT_CST_LOW (unit_size)
3078 * BITS_PER_UNIT,
3079 base);
3080 }
3081 }
3082 /* Fallthru. */
3083
3084 case COMPONENT_REF:
3085 case BIT_FIELD_REF:
3086 case TARGET_MEM_REF:
3087 case MEM_REF:
3088 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3089 ctor = get_base_constructor (base, &offset, valueize);
3090
3091 /* Empty constructor. Always fold to 0. */
3092 if (ctor == error_mark_node)
3093 return build_zero_cst (TREE_TYPE (t));
3094 /* We do not know precise address. */
3095 if (max_size == -1 || max_size != size)
3096 return NULL_TREE;
3097 /* We can not determine ctor. */
3098 if (!ctor)
3099 return NULL_TREE;
3100
3101 /* Out of bound array access. Value is undefined, but don't fold. */
3102 if (offset < 0)
3103 return NULL_TREE;
3104
3105 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3106 base);
3107
3108 case REALPART_EXPR:
3109 case IMAGPART_EXPR:
3110 {
3111 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3112 if (c && TREE_CODE (c) == COMPLEX_CST)
3113 return fold_build1_loc (EXPR_LOCATION (t),
3114 TREE_CODE (t), TREE_TYPE (t), c);
3115 break;
3116 }
3117
3118 default:
3119 break;
3120 }
3121
3122 return NULL_TREE;
3123 }
3124
3125 tree
3126 fold_const_aggregate_ref (tree t)
3127 {
3128 return fold_const_aggregate_ref_1 (t, NULL);
3129 }
3130
3131 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3132 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3133 KNOWN_BINFO carries the binfo describing the true type of
3134 OBJ_TYPE_REF_OBJECT(REF). */
3135
3136 tree
3137 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3138 {
3139 unsigned HOST_WIDE_INT offset, size;
3140 tree v, fn, vtable, init;
3141
3142 vtable = v = BINFO_VTABLE (known_binfo);
3143 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3144 if (!v)
3145 return NULL_TREE;
3146
3147 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3148 {
3149 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3150 v = TREE_OPERAND (v, 0);
3151 }
3152 else
3153 offset = 0;
3154
3155 if (TREE_CODE (v) != ADDR_EXPR)
3156 return NULL_TREE;
3157 v = TREE_OPERAND (v, 0);
3158
3159 if (TREE_CODE (v) != VAR_DECL
3160 || !DECL_VIRTUAL_P (v))
3161 return NULL_TREE;
3162 init = ctor_for_folding (v);
3163
3164 /* The virtual tables should always be born with constructors.
3165 and we always should assume that they are avaialble for
3166 folding. At the moment we do not stream them in all cases,
3167 but it should never happen that ctor seem unreachable. */
3168 gcc_assert (init);
3169 if (init == error_mark_node)
3170 {
3171 gcc_assert (in_lto_p);
3172 return NULL_TREE;
3173 }
3174 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3175 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3176 offset += token * size;
3177 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3178 offset, size, v);
3179 if (!fn || integer_zerop (fn))
3180 return NULL_TREE;
3181 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3182 || TREE_CODE (fn) == FDESC_EXPR);
3183 fn = TREE_OPERAND (fn, 0);
3184 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3185
3186 /* When cgraph node is missing and function is not public, we cannot
3187 devirtualize. This can happen in WHOPR when the actual method
3188 ends up in other partition, because we found devirtualization
3189 possibility too late. */
3190 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3191 return NULL_TREE;
3192
3193 /* Make sure we create a cgraph node for functions we'll reference.
3194 They can be non-existent if the reference comes from an entry
3195 of an external vtable for example. */
3196 cgraph_get_create_node (fn);
3197
3198 return fn;
3199 }
3200
3201 /* Return true iff VAL is a gimple expression that is known to be
3202 non-negative. Restricted to floating-point inputs. */
3203
3204 bool
3205 gimple_val_nonnegative_real_p (tree val)
3206 {
3207 gimple def_stmt;
3208
3209 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3210
3211 /* Use existing logic for non-gimple trees. */
3212 if (tree_expr_nonnegative_p (val))
3213 return true;
3214
3215 if (TREE_CODE (val) != SSA_NAME)
3216 return false;
3217
3218 /* Currently we look only at the immediately defining statement
3219 to make this determination, since recursion on defining
3220 statements of operands can lead to quadratic behavior in the
3221 worst case. This is expected to catch almost all occurrences
3222 in practice. It would be possible to implement limited-depth
3223 recursion if important cases are lost. Alternatively, passes
3224 that need this information (such as the pow/powi lowering code
3225 in the cse_sincos pass) could be revised to provide it through
3226 dataflow propagation. */
3227
3228 def_stmt = SSA_NAME_DEF_STMT (val);
3229
3230 if (is_gimple_assign (def_stmt))
3231 {
3232 tree op0, op1;
3233
3234 /* See fold-const.c:tree_expr_nonnegative_p for additional
3235 cases that could be handled with recursion. */
3236
3237 switch (gimple_assign_rhs_code (def_stmt))
3238 {
3239 case ABS_EXPR:
3240 /* Always true for floating-point operands. */
3241 return true;
3242
3243 case MULT_EXPR:
3244 /* True if the two operands are identical (since we are
3245 restricted to floating-point inputs). */
3246 op0 = gimple_assign_rhs1 (def_stmt);
3247 op1 = gimple_assign_rhs2 (def_stmt);
3248
3249 if (op0 == op1
3250 || operand_equal_p (op0, op1, 0))
3251 return true;
3252
3253 default:
3254 return false;
3255 }
3256 }
3257 else if (is_gimple_call (def_stmt))
3258 {
3259 tree fndecl = gimple_call_fndecl (def_stmt);
3260 if (fndecl
3261 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3262 {
3263 tree arg1;
3264
3265 switch (DECL_FUNCTION_CODE (fndecl))
3266 {
3267 CASE_FLT_FN (BUILT_IN_ACOS):
3268 CASE_FLT_FN (BUILT_IN_ACOSH):
3269 CASE_FLT_FN (BUILT_IN_CABS):
3270 CASE_FLT_FN (BUILT_IN_COSH):
3271 CASE_FLT_FN (BUILT_IN_ERFC):
3272 CASE_FLT_FN (BUILT_IN_EXP):
3273 CASE_FLT_FN (BUILT_IN_EXP10):
3274 CASE_FLT_FN (BUILT_IN_EXP2):
3275 CASE_FLT_FN (BUILT_IN_FABS):
3276 CASE_FLT_FN (BUILT_IN_FDIM):
3277 CASE_FLT_FN (BUILT_IN_HYPOT):
3278 CASE_FLT_FN (BUILT_IN_POW10):
3279 return true;
3280
3281 CASE_FLT_FN (BUILT_IN_SQRT):
3282 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3283 nonnegative inputs. */
3284 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3285 return true;
3286
3287 break;
3288
3289 CASE_FLT_FN (BUILT_IN_POWI):
3290 /* True if the second argument is an even integer. */
3291 arg1 = gimple_call_arg (def_stmt, 1);
3292
3293 if (TREE_CODE (arg1) == INTEGER_CST
3294 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3295 return true;
3296
3297 break;
3298
3299 CASE_FLT_FN (BUILT_IN_POW):
3300 /* True if the second argument is an even integer-valued
3301 real. */
3302 arg1 = gimple_call_arg (def_stmt, 1);
3303
3304 if (TREE_CODE (arg1) == REAL_CST)
3305 {
3306 REAL_VALUE_TYPE c;
3307 HOST_WIDE_INT n;
3308
3309 c = TREE_REAL_CST (arg1);
3310 n = real_to_integer (&c);
3311
3312 if ((n & 1) == 0)
3313 {
3314 REAL_VALUE_TYPE cint;
3315 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3316 if (real_identical (&c, &cint))
3317 return true;
3318 }
3319 }
3320
3321 break;
3322
3323 default:
3324 return false;
3325 }
3326 }
3327 }
3328
3329 return false;
3330 }
3331
3332 /* Given a pointer value OP0, return a simplified version of an
3333 indirection through OP0, or NULL_TREE if no simplification is
3334 possible. Note that the resulting type may be different from
3335 the type pointed to in the sense that it is still compatible
3336 from the langhooks point of view. */
3337
3338 tree
3339 gimple_fold_indirect_ref (tree t)
3340 {
3341 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3342 tree sub = t;
3343 tree subtype;
3344
3345 STRIP_NOPS (sub);
3346 subtype = TREE_TYPE (sub);
3347 if (!POINTER_TYPE_P (subtype))
3348 return NULL_TREE;
3349
3350 if (TREE_CODE (sub) == ADDR_EXPR)
3351 {
3352 tree op = TREE_OPERAND (sub, 0);
3353 tree optype = TREE_TYPE (op);
3354 /* *&p => p */
3355 if (useless_type_conversion_p (type, optype))
3356 return op;
3357
3358 /* *(foo *)&fooarray => fooarray[0] */
3359 if (TREE_CODE (optype) == ARRAY_TYPE
3360 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3361 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3362 {
3363 tree type_domain = TYPE_DOMAIN (optype);
3364 tree min_val = size_zero_node;
3365 if (type_domain && TYPE_MIN_VALUE (type_domain))
3366 min_val = TYPE_MIN_VALUE (type_domain);
3367 if (TREE_CODE (min_val) == INTEGER_CST)
3368 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3369 }
3370 /* *(foo *)&complexfoo => __real__ complexfoo */
3371 else if (TREE_CODE (optype) == COMPLEX_TYPE
3372 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3373 return fold_build1 (REALPART_EXPR, type, op);
3374 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3375 else if (TREE_CODE (optype) == VECTOR_TYPE
3376 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3377 {
3378 tree part_width = TYPE_SIZE (type);
3379 tree index = bitsize_int (0);
3380 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3381 }
3382 }
3383
3384 /* *(p + CST) -> ... */
3385 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3386 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3387 {
3388 tree addr = TREE_OPERAND (sub, 0);
3389 tree off = TREE_OPERAND (sub, 1);
3390 tree addrtype;
3391
3392 STRIP_NOPS (addr);
3393 addrtype = TREE_TYPE (addr);
3394
3395 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3396 if (TREE_CODE (addr) == ADDR_EXPR
3397 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3398 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3399 && host_integerp (off, 1))
3400 {
3401 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
3402 tree part_width = TYPE_SIZE (type);
3403 unsigned HOST_WIDE_INT part_widthi
3404 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
3405 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3406 tree index = bitsize_int (indexi);
3407 if (offset / part_widthi
3408 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3409 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3410 part_width, index);
3411 }
3412
3413 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3414 if (TREE_CODE (addr) == ADDR_EXPR
3415 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3416 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3417 {
3418 tree size = TYPE_SIZE_UNIT (type);
3419 if (tree_int_cst_equal (size, off))
3420 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3421 }
3422
3423 /* *(p + CST) -> MEM_REF <p, CST>. */
3424 if (TREE_CODE (addr) != ADDR_EXPR
3425 || DECL_P (TREE_OPERAND (addr, 0)))
3426 return fold_build2 (MEM_REF, type,
3427 addr,
3428 build_int_cst_wide (ptype,
3429 TREE_INT_CST_LOW (off),
3430 TREE_INT_CST_HIGH (off)));
3431 }
3432
3433 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3434 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3435 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3436 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3437 {
3438 tree type_domain;
3439 tree min_val = size_zero_node;
3440 tree osub = sub;
3441 sub = gimple_fold_indirect_ref (sub);
3442 if (! sub)
3443 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3444 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3445 if (type_domain && TYPE_MIN_VALUE (type_domain))
3446 min_val = TYPE_MIN_VALUE (type_domain);
3447 if (TREE_CODE (min_val) == INTEGER_CST)
3448 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3449 }
3450
3451 return NULL_TREE;
3452 }