ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
54 #include "tree-dfa.h"
55 #include "tree-ssa.h"
56 #include "tree-ssa-propagate.h"
57 #include "target.h"
58 #include "hash-map.h"
59 #include "plugin-api.h"
60 #include "ipa-ref.h"
61 #include "cgraph.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
67 #include "dbgcnt.h"
68 #include "builtins.h"
69 #include "output.h"
70 #include "tree-eh.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
74
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
79
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
97 static bool
98 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
99 {
100 varpool_node *vnode;
101 struct cgraph_node *node;
102 symtab_node *snode;
103
104 if (DECL_ABSTRACT_P (decl))
105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab->function_flags_ready)
118 return true;
119 snode = symtab_node::get (decl);
120 if (!snode || !snode->definition)
121 return false;
122 node = dyn_cast <cgraph_node *> (snode);
123 return !node || !node->global.inlined_to;
124 }
125
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl)
132 && (vnode = varpool_node::get (from_decl)) != NULL
133 && vnode->definition)
134 || (flag_ltrans
135 && (vnode = varpool_node::get (from_decl)) != NULL
136 && vnode->in_other_partition))
137 return true;
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
145 return false;
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
154
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
162 if (!symtab->function_flags_ready)
163 return true;
164
165 snode = symtab_node::get (decl);
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
173 }
174
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
178
179 tree
180 canonicalize_constructor_val (tree cval, tree from_decl)
181 {
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
186 {
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
207 if (!base)
208 return NULL_TREE;
209
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
212 && !can_refer_decl_in_current_unit_p (base, from_decl))
213 return NULL_TREE;
214 if (TREE_CODE (base) == VAR_DECL)
215 TREE_ADDRESSABLE (base) = 1;
216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base);
222 }
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
230 }
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
233 return orig_cval;
234 }
235
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239 tree
240 get_symbol_constant_value (tree sym)
241 {
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
244 {
245 if (val)
246 {
247 val = canonicalize_constructor_val (unshare_expr (val), sym);
248 if (val && is_gimple_min_invariant (val))
249 return val;
250 else
251 return NULL_TREE;
252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
259 return build_zero_cst (TREE_TYPE (sym));
260 }
261
262 return NULL_TREE;
263 }
264
265
266
267 /* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
271
272 static tree
273 maybe_fold_reference (tree expr, bool is_lhs)
274 {
275 tree result;
276
277 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr) == REALPART_EXPR
279 || TREE_CODE (expr) == IMAGPART_EXPR)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr),
282 TREE_CODE (expr),
283 TREE_TYPE (expr),
284 TREE_OPERAND (expr, 0));
285 else if (TREE_CODE (expr) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0),
291 TREE_OPERAND (expr, 1),
292 TREE_OPERAND (expr, 2));
293
294 if (!is_lhs
295 && (result = fold_const_aggregate_ref (expr))
296 && is_gimple_min_invariant (result))
297 return result;
298
299 return NULL_TREE;
300 }
301
302
303 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
306 folded. */
307
308 static tree
309 fold_gimple_assign (gimple_stmt_iterator *si)
310 {
311 gimple stmt = gsi_stmt (*si);
312 enum tree_code subcode = gimple_assign_rhs_code (stmt);
313 location_t loc = gimple_location (stmt);
314
315 tree result = NULL_TREE;
316
317 switch (get_gimple_rhs_class (subcode))
318 {
319 case GIMPLE_SINGLE_RHS:
320 {
321 tree rhs = gimple_assign_rhs1 (stmt);
322
323 if (TREE_CLOBBER_P (rhs))
324 return NULL_TREE;
325
326 if (REFERENCE_CLASS_P (rhs))
327 return maybe_fold_reference (rhs, false);
328
329 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
330 {
331 tree val = OBJ_TYPE_REF_EXPR (rhs);
332 if (is_gimple_min_invariant (val))
333 return val;
334 else if (flag_devirtualize && virtual_method_call_p (rhs))
335 {
336 bool final;
337 vec <cgraph_node *>targets
338 = possible_polymorphic_call_targets (rhs, stmt, &final);
339 if (final && targets.length () <= 1 && dbg_cnt (devirt))
340 {
341 if (dump_enabled_p ())
342 {
343 location_t loc = gimple_location_safe (stmt);
344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
345 "resolving virtual function address "
346 "reference to function %s\n",
347 targets.length () == 1
348 ? targets[0]->name ()
349 : "NULL");
350 }
351 if (targets.length () == 1)
352 {
353 val = fold_convert (TREE_TYPE (val),
354 build_fold_addr_expr_loc
355 (loc, targets[0]->decl));
356 STRIP_USELESS_TYPE_CONVERSION (val);
357 }
358 else
359 /* We can not use __builtin_unreachable here because it
360 can not have address taken. */
361 val = build_int_cst (TREE_TYPE (val), 0);
362 return val;
363 }
364 }
365
366 }
367 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 {
369 tree ref = TREE_OPERAND (rhs, 0);
370 tree tem = maybe_fold_reference (ref, true);
371 if (tem
372 && TREE_CODE (tem) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem, 1)))
374 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
375 else if (tem)
376 result = fold_convert (TREE_TYPE (rhs),
377 build_fold_addr_expr_loc (loc, tem));
378 else if (TREE_CODE (ref) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
381 }
382
383 else if (TREE_CODE (rhs) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 {
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
389 unsigned i;
390 tree val;
391
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
393 if (TREE_CODE (val) != INTEGER_CST
394 && TREE_CODE (val) != REAL_CST
395 && TREE_CODE (val) != FIXED_CST)
396 return NULL_TREE;
397
398 return build_vector_from_ctor (TREE_TYPE (rhs),
399 CONSTRUCTOR_ELTS (rhs));
400 }
401
402 else if (DECL_P (rhs))
403 return get_symbol_constant_value (rhs);
404
405 /* If we couldn't fold the RHS, hand over to the generic
406 fold routines. */
407 if (result == NULL_TREE)
408 result = fold (rhs);
409
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result);
414
415 if (result != rhs && valid_gimple_rhs_p (result))
416 return result;
417
418 return NULL_TREE;
419 }
420 break;
421
422 case GIMPLE_UNARY_RHS:
423 break;
424
425 case GIMPLE_BINARY_RHS:
426 /* Try to canonicalize for boolean-typed X the comparisons
427 X == 0, X == 1, X != 0, and X != 1. */
428 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
429 || gimple_assign_rhs_code (stmt) == NE_EXPR)
430 {
431 tree lhs = gimple_assign_lhs (stmt);
432 tree op1 = gimple_assign_rhs1 (stmt);
433 tree op2 = gimple_assign_rhs2 (stmt);
434 tree type = TREE_TYPE (op1);
435
436 /* Check whether the comparison operands are of the same boolean
437 type as the result type is.
438 Check that second operand is an integer-constant with value
439 one or zero. */
440 if (TREE_CODE (op2) == INTEGER_CST
441 && (integer_zerop (op2) || integer_onep (op2))
442 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 {
444 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
445 bool is_logical_not = false;
446
447 /* X == 0 and X != 1 is a logical-not.of X
448 X == 1 and X != 0 is X */
449 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
450 || (cmp_code == NE_EXPR && integer_onep (op2)))
451 is_logical_not = true;
452
453 if (is_logical_not == false)
454 result = op1;
455 /* Only for one-bit precision typed X the transformation
456 !X -> ~X is valied. */
457 else if (TYPE_PRECISION (type) == 1)
458 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
459 type, op1);
460 /* Otherwise we use !X -> X ^ 1. */
461 else
462 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
463 type, op1, build_int_cst (type, 1));
464
465 }
466 }
467
468 if (!result)
469 result = fold_binary_loc (loc, subcode,
470 TREE_TYPE (gimple_assign_lhs (stmt)),
471 gimple_assign_rhs1 (stmt),
472 gimple_assign_rhs2 (stmt));
473
474 if (result)
475 {
476 STRIP_USELESS_TYPE_CONVERSION (result);
477 if (valid_gimple_rhs_p (result))
478 return result;
479 }
480 break;
481
482 case GIMPLE_TERNARY_RHS:
483 /* Try to fold a conditional expression. */
484 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 {
486 tree op0 = gimple_assign_rhs1 (stmt);
487 tree tem;
488 bool set = false;
489 location_t cond_loc = gimple_location (stmt);
490
491 if (COMPARISON_CLASS_P (op0))
492 {
493 fold_defer_overflow_warnings ();
494 tem = fold_binary_loc (cond_loc,
495 TREE_CODE (op0), TREE_TYPE (op0),
496 TREE_OPERAND (op0, 0),
497 TREE_OPERAND (op0, 1));
498 /* This is actually a conditional expression, not a GIMPLE
499 conditional statement, however, the valid_gimple_rhs_p
500 test still applies. */
501 set = (tem && is_gimple_condexpr (tem)
502 && valid_gimple_rhs_p (tem));
503 fold_undefer_overflow_warnings (set, stmt, 0);
504 }
505 else if (is_gimple_min_invariant (op0))
506 {
507 tem = op0;
508 set = true;
509 }
510 else
511 return NULL_TREE;
512
513 if (set)
514 result = fold_build3_loc (cond_loc, COND_EXPR,
515 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
516 gimple_assign_rhs2 (stmt),
517 gimple_assign_rhs3 (stmt));
518 }
519
520 if (!result)
521 result = fold_ternary_loc (loc, subcode,
522 TREE_TYPE (gimple_assign_lhs (stmt)),
523 gimple_assign_rhs1 (stmt),
524 gimple_assign_rhs2 (stmt),
525 gimple_assign_rhs3 (stmt));
526
527 if (result)
528 {
529 STRIP_USELESS_TYPE_CONVERSION (result);
530 if (valid_gimple_rhs_p (result))
531 return result;
532 }
533 break;
534
535 case GIMPLE_INVALID_RHS:
536 gcc_unreachable ();
537 }
538
539 return NULL_TREE;
540 }
541
542 /* Attempt to fold a conditional statement. Return true if any changes were
543 made. We only attempt to fold the condition expression, and do not perform
544 any transformation that would require alteration of the cfg. It is
545 assumed that the operands have been previously folded. */
546
547 static bool
548 fold_gimple_cond (gimple stmt)
549 {
550 tree result = fold_binary_loc (gimple_location (stmt),
551 gimple_cond_code (stmt),
552 boolean_type_node,
553 gimple_cond_lhs (stmt),
554 gimple_cond_rhs (stmt));
555
556 if (result)
557 {
558 STRIP_USELESS_TYPE_CONVERSION (result);
559 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 {
561 gimple_cond_set_condition_from_tree (stmt, result);
562 return true;
563 }
564 }
565
566 return false;
567 }
568
569
570 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
571 adjusting the replacement stmts location and virtual operands.
572 If the statement has a lhs the last stmt in the sequence is expected
573 to assign to that lhs. */
574
575 static void
576 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
577 {
578 gimple stmt = gsi_stmt (*si_p);
579
580 if (gimple_has_location (stmt))
581 annotate_all_with_location (stmts, gimple_location (stmt));
582
583 /* First iterate over the replacement statements backward, assigning
584 virtual operands to their defining statements. */
585 gimple laststore = NULL;
586 for (gimple_stmt_iterator i = gsi_last (stmts);
587 !gsi_end_p (i); gsi_prev (&i))
588 {
589 gimple new_stmt = gsi_stmt (i);
590 if ((gimple_assign_single_p (new_stmt)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
592 || (is_gimple_call (new_stmt)
593 && (gimple_call_flags (new_stmt)
594 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
595 {
596 tree vdef;
597 if (!laststore)
598 vdef = gimple_vdef (stmt);
599 else
600 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
601 gimple_set_vdef (new_stmt, vdef);
602 if (vdef && TREE_CODE (vdef) == SSA_NAME)
603 SSA_NAME_DEF_STMT (vdef) = new_stmt;
604 laststore = new_stmt;
605 }
606 }
607
608 /* Second iterate over the statements forward, assigning virtual
609 operands to their uses. */
610 tree reaching_vuse = gimple_vuse (stmt);
611 for (gimple_stmt_iterator i = gsi_start (stmts);
612 !gsi_end_p (i); gsi_next (&i))
613 {
614 gimple new_stmt = gsi_stmt (i);
615 /* If the new statement possibly has a VUSE, update it with exact SSA
616 name we know will reach this one. */
617 if (gimple_has_mem_ops (new_stmt))
618 gimple_set_vuse (new_stmt, reaching_vuse);
619 gimple_set_modified (new_stmt, true);
620 if (gimple_vdef (new_stmt))
621 reaching_vuse = gimple_vdef (new_stmt);
622 }
623
624 /* If the new sequence does not do a store release the virtual
625 definition of the original statement. */
626 if (reaching_vuse
627 && reaching_vuse == gimple_vuse (stmt))
628 {
629 tree vdef = gimple_vdef (stmt);
630 if (vdef
631 && TREE_CODE (vdef) == SSA_NAME)
632 {
633 unlink_stmt_vdef (stmt);
634 release_ssa_name (vdef);
635 }
636 }
637
638 /* Finally replace the original statement with the sequence. */
639 gsi_replace_with_seq (si_p, stmts, false);
640 }
641
642 /* Convert EXPR into a GIMPLE value suitable for substitution on the
643 RHS of an assignment. Insert the necessary statements before
644 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
645 is replaced. If the call is expected to produces a result, then it
646 is replaced by an assignment of the new RHS to the result variable.
647 If the result is to be ignored, then the call is replaced by a
648 GIMPLE_NOP. A proper VDEF chain is retained by making the first
649 VUSE and the last VDEF of the whole sequence be the same as the replaced
650 statement and using new SSA names for stores in between. */
651
652 void
653 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
654 {
655 tree lhs;
656 gimple stmt, new_stmt;
657 gimple_stmt_iterator i;
658 gimple_seq stmts = NULL;
659
660 stmt = gsi_stmt (*si_p);
661
662 gcc_assert (is_gimple_call (stmt));
663
664 push_gimplify_context (gimple_in_ssa_p (cfun));
665
666 lhs = gimple_call_lhs (stmt);
667 if (lhs == NULL_TREE)
668 {
669 gimplify_and_add (expr, &stmts);
670 /* We can end up with folding a memcpy of an empty class assignment
671 which gets optimized away by C++ gimplification. */
672 if (gimple_seq_empty_p (stmts))
673 {
674 pop_gimplify_context (NULL);
675 if (gimple_in_ssa_p (cfun))
676 {
677 unlink_stmt_vdef (stmt);
678 release_defs (stmt);
679 }
680 gsi_replace (si_p, gimple_build_nop (), true);
681 return;
682 }
683 }
684 else
685 {
686 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
687 new_stmt = gimple_build_assign (lhs, tmp);
688 i = gsi_last (stmts);
689 gsi_insert_after_without_update (&i, new_stmt,
690 GSI_CONTINUE_LINKING);
691 }
692
693 pop_gimplify_context (NULL);
694
695 gsi_replace_with_seq_vops (si_p, stmts);
696 }
697
698
699 /* Replace the call at *GSI with the gimple value VAL. */
700
701 static void
702 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
703 {
704 gimple stmt = gsi_stmt (*gsi);
705 tree lhs = gimple_call_lhs (stmt);
706 gimple repl;
707 if (lhs)
708 {
709 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
710 val = fold_convert (TREE_TYPE (lhs), val);
711 repl = gimple_build_assign (lhs, val);
712 }
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, true);
722 }
723
724 /* Replace the call at *GSI with the new call REPL and fold that
725 again. */
726
727 static void
728 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
729 {
730 gimple stmt = gsi_stmt (*gsi);
731 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
732 gimple_set_location (repl, gimple_location (stmt));
733 if (gimple_vdef (stmt)
734 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
735 {
736 gimple_set_vdef (repl, gimple_vdef (stmt));
737 gimple_set_vuse (repl, gimple_vuse (stmt));
738 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
739 }
740 gsi_replace (gsi, repl, true);
741 fold_stmt (gsi);
742 }
743
744 /* Return true if VAR is a VAR_DECL or a component thereof. */
745
746 static bool
747 var_decl_component_p (tree var)
748 {
749 tree inner = var;
750 while (handled_component_p (inner))
751 inner = TREE_OPERAND (inner, 0);
752 return SSA_VAR_P (inner);
753 }
754
755 /* Fold function call to builtin mem{{,p}cpy,move}. Return
756 NULL_TREE if no simplification can be made.
757 If ENDP is 0, return DEST (like memcpy).
758 If ENDP is 1, return DEST+LEN (like mempcpy).
759 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
760 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
761 (memmove). */
762
763 static bool
764 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
765 tree dest, tree src, int endp)
766 {
767 gimple stmt = gsi_stmt (*gsi);
768 tree lhs = gimple_call_lhs (stmt);
769 tree len = gimple_call_arg (stmt, 2);
770 tree destvar, srcvar;
771 location_t loc = gimple_location (stmt);
772
773 /* If the LEN parameter is zero, return DEST. */
774 if (integer_zerop (len))
775 {
776 gimple repl;
777 if (gimple_call_lhs (stmt))
778 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
779 else
780 repl = gimple_build_nop ();
781 tree vdef = gimple_vdef (stmt);
782 if (vdef && TREE_CODE (vdef) == SSA_NAME)
783 {
784 unlink_stmt_vdef (stmt);
785 release_ssa_name (vdef);
786 }
787 gsi_replace (gsi, repl, true);
788 return true;
789 }
790
791 /* If SRC and DEST are the same (and not volatile), return
792 DEST{,+LEN,+LEN-1}. */
793 if (operand_equal_p (src, dest, 0))
794 {
795 unlink_stmt_vdef (stmt);
796 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
797 release_ssa_name (gimple_vdef (stmt));
798 if (!lhs)
799 {
800 gsi_replace (gsi, gimple_build_nop (), true);
801 return true;
802 }
803 goto done;
804 }
805 else
806 {
807 tree srctype, desttype;
808 unsigned int src_align, dest_align;
809 tree off0;
810
811 /* Build accesses at offset zero with a ref-all character type. */
812 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
813 ptr_mode, true), 0);
814
815 /* If we can perform the copy efficiently with first doing all loads
816 and then all stores inline it that way. Currently efficiently
817 means that we can load all the memory into a single integer
818 register which is what MOVE_MAX gives us. */
819 src_align = get_pointer_alignment (src);
820 dest_align = get_pointer_alignment (dest);
821 if (tree_fits_uhwi_p (len)
822 && compare_tree_int (len, MOVE_MAX) <= 0
823 /* ??? Don't transform copies from strings with known length this
824 confuses the tree-ssa-strlen.c. This doesn't handle
825 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
826 reason. */
827 && !c_strlen (src, 2))
828 {
829 unsigned ilen = tree_to_uhwi (len);
830 if (exact_log2 (ilen) != -1)
831 {
832 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
833 if (type
834 && TYPE_MODE (type) != BLKmode
835 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
836 == ilen * 8)
837 /* If the destination pointer is not aligned we must be able
838 to emit an unaligned store. */
839 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
840 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
841 {
842 tree srctype = type;
843 tree desttype = type;
844 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
845 srctype = build_aligned_type (type, src_align);
846 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
847 tree tem = fold_const_aggregate_ref (srcmem);
848 if (tem)
849 srcmem = tem;
850 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
851 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
852 src_align))
853 srcmem = NULL_TREE;
854 if (srcmem)
855 {
856 gimple new_stmt;
857 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
858 {
859 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
860 if (gimple_in_ssa_p (cfun))
861 srcmem = make_ssa_name (TREE_TYPE (srcmem),
862 new_stmt);
863 else
864 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
865 NULL);
866 gimple_assign_set_lhs (new_stmt, srcmem);
867 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
868 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
869 }
870 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
871 desttype = build_aligned_type (type, dest_align);
872 new_stmt
873 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
874 dest, off0),
875 srcmem);
876 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
877 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
878 if (gimple_vdef (new_stmt)
879 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
881 if (!lhs)
882 {
883 gsi_replace (gsi, new_stmt, true);
884 return true;
885 }
886 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
887 goto done;
888 }
889 }
890 }
891 }
892
893 if (endp == 3)
894 {
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
897 really mandatory?
898
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align || !src_align)
901 return false;
902 if (readonly_data_expr (src)
903 || (tree_fits_uhwi_p (len)
904 && (MIN (src_align, dest_align) / BITS_PER_UNIT
905 >= tree_to_uhwi (len))))
906 {
907 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src) == ADDR_EXPR
919 && TREE_CODE (dest) == ADDR_EXPR)
920 {
921 tree src_base, dest_base, fn;
922 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
923 HOST_WIDE_INT size = -1;
924 HOST_WIDE_INT maxsize = -1;
925
926 srcvar = TREE_OPERAND (src, 0);
927 src_base = get_ref_base_and_extent (srcvar, &src_offset,
928 &size, &maxsize);
929 destvar = TREE_OPERAND (dest, 0);
930 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
931 &size, &maxsize);
932 if (tree_fits_uhwi_p (len))
933 maxsize = tree_to_uhwi (len);
934 else
935 maxsize = -1;
936 src_offset /= BITS_PER_UNIT;
937 dest_offset /= BITS_PER_UNIT;
938 if (SSA_VAR_P (src_base)
939 && SSA_VAR_P (dest_base))
940 {
941 if (operand_equal_p (src_base, dest_base, 0)
942 && ranges_overlap_p (src_offset, maxsize,
943 dest_offset, maxsize))
944 return false;
945 }
946 else if (TREE_CODE (src_base) == MEM_REF
947 && TREE_CODE (dest_base) == MEM_REF)
948 {
949 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
950 TREE_OPERAND (dest_base, 0), 0))
951 return false;
952 offset_int off = mem_ref_offset (src_base) + src_offset;
953 if (!wi::fits_shwi_p (off))
954 return false;
955 src_offset = off.to_shwi ();
956
957 off = mem_ref_offset (dest_base) + dest_offset;
958 if (!wi::fits_shwi_p (off))
959 return false;
960 dest_offset = off.to_shwi ();
961 if (ranges_overlap_p (src_offset, maxsize,
962 dest_offset, maxsize))
963 return false;
964 }
965 else
966 return false;
967
968 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
976 }
977
978 /* If the destination and source do not alias optimize into
979 memcpy as well. */
980 if ((is_gimple_min_invariant (dest)
981 || TREE_CODE (dest) == SSA_NAME)
982 && (is_gimple_min_invariant (src)
983 || TREE_CODE (src) == SSA_NAME))
984 {
985 ao_ref destr, srcr;
986 ao_ref_init_from_ptr_and_size (&destr, dest, len);
987 ao_ref_init_from_ptr_and_size (&srcr, src, len);
988 if (!refs_may_alias_p_1 (&destr, &srcr, false))
989 {
990 tree fn;
991 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
992 if (!fn)
993 return false;
994 gimple_call_set_fndecl (stmt, fn);
995 gimple_call_set_arg (stmt, 0, dest);
996 gimple_call_set_arg (stmt, 1, src);
997 fold_stmt (gsi);
998 return true;
999 }
1000 }
1001
1002 return false;
1003 }
1004
1005 if (!tree_fits_shwi_p (len))
1006 return false;
1007 /* FIXME:
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1011 STRIP_NOPS (src);
1012 STRIP_NOPS (dest);
1013 if (!POINTER_TYPE_P (TREE_TYPE (src))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1015 return false;
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1024 {
1025 tree tem = TREE_OPERAND (src, 0);
1026 STRIP_NOPS (tem);
1027 if (tem != TREE_OPERAND (src, 0))
1028 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1029 }
1030 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1031 {
1032 tree tem = TREE_OPERAND (dest, 0);
1033 STRIP_NOPS (tem);
1034 if (tem != TREE_OPERAND (dest, 0))
1035 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1036 }
1037 srctype = TREE_TYPE (TREE_TYPE (src));
1038 if (TREE_CODE (srctype) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1040 {
1041 srctype = TREE_TYPE (srctype);
1042 STRIP_NOPS (src);
1043 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1044 }
1045 desttype = TREE_TYPE (TREE_TYPE (dest));
1046 if (TREE_CODE (desttype) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1048 {
1049 desttype = TREE_TYPE (desttype);
1050 STRIP_NOPS (dest);
1051 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1052 }
1053 if (TREE_ADDRESSABLE (srctype)
1054 || TREE_ADDRESSABLE (desttype))
1055 return false;
1056
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1060 || TREE_CODE (desttype) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1062 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1064 || TREE_CODE (srctype) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1066 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1067 if (!srctype)
1068 srctype = desttype;
1069 if (!desttype)
1070 desttype = srctype;
1071 if (!srctype)
1072 return false;
1073
1074 src_align = get_pointer_alignment (src);
1075 dest_align = get_pointer_alignment (dest);
1076 if (dest_align < TYPE_ALIGN (desttype)
1077 || src_align < TYPE_ALIGN (srctype))
1078 return false;
1079
1080 destvar = dest;
1081 STRIP_NOPS (destvar);
1082 if (TREE_CODE (destvar) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1085 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1086 else
1087 destvar = NULL_TREE;
1088
1089 srcvar = src;
1090 STRIP_NOPS (srcvar);
1091 if (TREE_CODE (srcvar) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1094 {
1095 if (!destvar
1096 || src_align >= TYPE_ALIGN (desttype))
1097 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1098 srcvar, off0);
1099 else if (!STRICT_ALIGNMENT)
1100 {
1101 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1102 src_align);
1103 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1104 }
1105 else
1106 srcvar = NULL_TREE;
1107 }
1108 else
1109 srcvar = NULL_TREE;
1110
1111 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1112 return false;
1113
1114 if (srcvar == NULL_TREE)
1115 {
1116 STRIP_NOPS (src);
1117 if (src_align >= TYPE_ALIGN (desttype))
1118 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1119 else
1120 {
1121 if (STRICT_ALIGNMENT)
1122 return false;
1123 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1124 src_align);
1125 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1126 }
1127 }
1128 else if (destvar == NULL_TREE)
1129 {
1130 STRIP_NOPS (dest);
1131 if (dest_align >= TYPE_ALIGN (srctype))
1132 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1133 else
1134 {
1135 if (STRICT_ALIGNMENT)
1136 return false;
1137 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1138 dest_align);
1139 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1140 }
1141 }
1142
1143 gimple new_stmt;
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1145 {
1146 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1147 if (gimple_in_ssa_p (cfun))
1148 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1149 else
1150 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1151 gimple_assign_set_lhs (new_stmt, srcvar);
1152 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1153 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1154 }
1155 new_stmt = gimple_build_assign (destvar, srcvar);
1156 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1157 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1158 if (gimple_vdef (new_stmt)
1159 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1161 if (!lhs)
1162 {
1163 gsi_replace (gsi, new_stmt, true);
1164 return true;
1165 }
1166 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1167 }
1168
1169 done:
1170 if (endp == 0 || endp == 3)
1171 len = NULL_TREE;
1172 else if (endp == 2)
1173 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1174 ssize_int (1));
1175 if (endp == 2 || endp == 1)
1176 dest = fold_build_pointer_plus_loc (loc, dest, len);
1177
1178 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1179 GSI_SAME_STMT);
1180 gimple repl = gimple_build_assign (lhs, dest);
1181 gsi_replace (gsi, repl, true);
1182 return true;
1183 }
1184
1185 /* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1187
1188 static bool
1189 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1190 {
1191 gimple stmt = gsi_stmt (*gsi);
1192 tree etype;
1193 unsigned HOST_WIDE_INT length, cval;
1194
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len))
1197 {
1198 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1199 return true;
1200 }
1201
1202 if (! tree_fits_uhwi_p (len))
1203 return false;
1204
1205 if (TREE_CODE (c) != INTEGER_CST)
1206 return false;
1207
1208 tree dest = gimple_call_arg (stmt, 0);
1209 tree var = dest;
1210 if (TREE_CODE (var) != ADDR_EXPR)
1211 return false;
1212
1213 var = TREE_OPERAND (var, 0);
1214 if (TREE_THIS_VOLATILE (var))
1215 return false;
1216
1217 etype = TREE_TYPE (var);
1218 if (TREE_CODE (etype) == ARRAY_TYPE)
1219 etype = TREE_TYPE (etype);
1220
1221 if (!INTEGRAL_TYPE_P (etype)
1222 && !POINTER_TYPE_P (etype))
1223 return NULL_TREE;
1224
1225 if (! var_decl_component_p (var))
1226 return NULL_TREE;
1227
1228 length = tree_to_uhwi (len);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1230 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1231 return NULL_TREE;
1232
1233 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1234 return NULL_TREE;
1235
1236 if (integer_zerop (c))
1237 cval = 0;
1238 else
1239 {
1240 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1241 return NULL_TREE;
1242
1243 cval = TREE_INT_CST_LOW (c);
1244 cval &= 0xff;
1245 cval |= cval << 8;
1246 cval |= cval << 16;
1247 cval |= (cval << 31) << 1;
1248 }
1249
1250 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1251 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1252 gimple_set_vuse (store, gimple_vuse (stmt));
1253 tree vdef = gimple_vdef (stmt);
1254 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1255 {
1256 gimple_set_vdef (store, gimple_vdef (stmt));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1258 }
1259 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1260 if (gimple_call_lhs (stmt))
1261 {
1262 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1263 gsi_replace (gsi, asgn, true);
1264 }
1265 else
1266 {
1267 gimple_stmt_iterator gsi2 = *gsi;
1268 gsi_prev (gsi);
1269 gsi_remove (&gsi2, true);
1270 }
1271
1272 return true;
1273 }
1274
1275
1276 /* Return the string length, maximum string length or maximum value of
1277 ARG in LENGTH.
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1284
1285 static bool
1286 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1287 {
1288 tree var, val;
1289 gimple def_stmt;
1290
1291 if (TREE_CODE (arg) != SSA_NAME)
1292 {
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 {
1298 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1299 if (TREE_CODE (aop0) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1301 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1302 length, visited, type);
1303 }
1304
1305 if (type == 2)
1306 {
1307 val = arg;
1308 if (TREE_CODE (val) != INTEGER_CST
1309 || tree_int_cst_sgn (val) < 0)
1310 return false;
1311 }
1312 else
1313 val = c_strlen (arg, 1);
1314 if (!val)
1315 return false;
1316
1317 if (*length)
1318 {
1319 if (type > 0)
1320 {
1321 if (TREE_CODE (*length) != INTEGER_CST
1322 || TREE_CODE (val) != INTEGER_CST)
1323 return false;
1324
1325 if (tree_int_cst_lt (*length, val))
1326 *length = val;
1327 return true;
1328 }
1329 else if (simple_cst_equal (val, *length) != 1)
1330 return false;
1331 }
1332
1333 *length = val;
1334 return true;
1335 }
1336
1337 /* If ARG is registered for SSA update we cannot look at its defining
1338 statement. */
1339 if (name_registered_for_update_p (arg))
1340 return false;
1341
1342 /* If we were already here, break the infinite cycle. */
1343 if (!*visited)
1344 *visited = BITMAP_ALLOC (NULL);
1345 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1346 return true;
1347
1348 var = arg;
1349 def_stmt = SSA_NAME_DEF_STMT (var);
1350
1351 switch (gimple_code (def_stmt))
1352 {
1353 case GIMPLE_ASSIGN:
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1356 length. */
1357 if (gimple_assign_single_p (def_stmt)
1358 || gimple_assign_unary_nop_p (def_stmt))
1359 {
1360 tree rhs = gimple_assign_rhs1 (def_stmt);
1361 return get_maxval_strlen (rhs, length, visited, type);
1362 }
1363 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1364 {
1365 tree op2 = gimple_assign_rhs2 (def_stmt);
1366 tree op3 = gimple_assign_rhs3 (def_stmt);
1367 return get_maxval_strlen (op2, length, visited, type)
1368 && get_maxval_strlen (op3, length, visited, type);
1369 }
1370 return false;
1371
1372 case GIMPLE_PHI:
1373 {
1374 /* All the arguments of the PHI node must have the same constant
1375 length. */
1376 unsigned i;
1377
1378 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1379 {
1380 tree arg = gimple_phi_arg (def_stmt, i)->def;
1381
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg == gimple_phi_result (def_stmt))
1389 continue;
1390
1391 if (!get_maxval_strlen (arg, length, visited, type))
1392 return false;
1393 }
1394 }
1395 return true;
1396
1397 default:
1398 return false;
1399 }
1400 }
1401
1402 tree
1403 get_maxval_strlen (tree arg, int type)
1404 {
1405 bitmap visited = NULL;
1406 tree len = NULL_TREE;
1407 if (!get_maxval_strlen (arg, &len, &visited, type))
1408 len = NULL_TREE;
1409 if (visited)
1410 BITMAP_FREE (visited);
1411
1412 return len;
1413 }
1414
1415
1416 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1419
1420 static bool
1421 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1422 tree dest, tree src)
1423 {
1424 location_t loc = gimple_location (gsi_stmt (*gsi));
1425 tree fn;
1426
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src, dest, 0))
1429 {
1430 replace_call_with_value (gsi, dest);
1431 return true;
1432 }
1433
1434 if (optimize_function_for_size_p (cfun))
1435 return false;
1436
1437 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1438 if (!fn)
1439 return false;
1440
1441 tree len = get_maxval_strlen (src, 0);
1442 if (!len)
1443 return false;
1444
1445 len = fold_convert_loc (loc, size_type_node, len);
1446 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1447 len = force_gimple_operand_gsi (gsi, len, true,
1448 NULL_TREE, true, GSI_SAME_STMT);
1449 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1450 replace_call_with_call_and_fold (gsi, repl);
1451 return true;
1452 }
1453
1454 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1457
1458 static bool
1459 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1460 tree dest, tree src, tree len)
1461 {
1462 location_t loc = gimple_location (gsi_stmt (*gsi));
1463 tree fn;
1464
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len))
1467 {
1468 replace_call_with_value (gsi, dest);
1469 return true;
1470 }
1471
1472 /* We can't compare slen with len as constants below if len is not a
1473 constant. */
1474 if (TREE_CODE (len) != INTEGER_CST)
1475 return false;
1476
1477 /* Now, we must be passed a constant src ptr parameter. */
1478 tree slen = get_maxval_strlen (src, 0);
1479 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1480 return false;
1481
1482 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1483
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen, len))
1488 return false;
1489
1490 /* OK transform into builtin memcpy. */
1491 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1492 if (!fn)
1493 return false;
1494
1495 len = fold_convert_loc (loc, size_type_node, len);
1496 len = force_gimple_operand_gsi (gsi, len, true,
1497 NULL_TREE, true, GSI_SAME_STMT);
1498 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1499 replace_call_with_call_and_fold (gsi, repl);
1500 return true;
1501 }
1502
1503 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1504 to the call.
1505
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1508
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1512
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1520
1521 static bool
1522 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1523 {
1524 gimple stmt = gsi_stmt (*gsi);
1525 location_t loc = gimple_location (stmt);
1526
1527 const char *p = c_getstr (src);
1528
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p && *p == '\0')
1531 {
1532 replace_call_with_value (gsi, dst);
1533 return true;
1534 }
1535
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1537 return false;
1538
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1540 tree newdst;
1541 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1542 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1543
1544 if (!strlen_fn || !memcpy_fn)
1545 return false;
1546
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
1549 tree len = get_maxval_strlen (src, 0);
1550 if (! len)
1551 return false;
1552
1553 /* Create strlen (dst). */
1554 gimple_seq stmts = NULL, stmts2;
1555 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1556 gimple_set_location (repl, loc);
1557 if (gimple_in_ssa_p (cfun))
1558 newdst = make_ssa_name (size_type_node, NULL);
1559 else
1560 newdst = create_tmp_reg (size_type_node, NULL);
1561 gimple_call_set_lhs (repl, newdst);
1562 gimple_seq_add_stmt_without_update (&stmts, repl);
1563
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1566 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1567 gimple_seq_add_seq_without_update (&stmts, stmts2);
1568
1569 len = fold_convert_loc (loc, size_type_node, len);
1570 len = size_binop_loc (loc, PLUS_EXPR, len,
1571 build_int_cst (size_type_node, 1));
1572 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1573 gimple_seq_add_seq_without_update (&stmts, stmts2);
1574
1575 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1576 gimple_seq_add_stmt_without_update (&stmts, repl);
1577 if (gimple_call_lhs (stmt))
1578 {
1579 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1580 gimple_seq_add_stmt_without_update (&stmts, repl);
1581 gsi_replace_with_seq_vops (gsi, stmts);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2 = *gsi;
1587 gsi_prev (&gsi2);
1588 fold_stmt (&gsi2);
1589 }
1590 else
1591 {
1592 gsi_replace_with_seq_vops (gsi, stmts);
1593 fold_stmt (gsi);
1594 }
1595 return true;
1596 }
1597
1598 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1600
1601 static bool
1602 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1603 {
1604 gimple stmt = gsi_stmt (*gsi);
1605 tree dest = gimple_call_arg (stmt, 0);
1606 tree src = gimple_call_arg (stmt, 1);
1607 tree size = gimple_call_arg (stmt, 2);
1608 tree fn;
1609 const char *p;
1610
1611
1612 p = c_getstr (src);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p && *p == '\0')
1615 {
1616 replace_call_with_value (gsi, dest);
1617 return true;
1618 }
1619
1620 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1621 return false;
1622
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1625 if (!fn)
1626 return false;
1627
1628 gimple repl = gimple_build_call (fn, 2, dest, src);
1629 replace_call_with_call_and_fold (gsi, repl);
1630 return true;
1631 }
1632
1633 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1634 LEN, and SIZE. */
1635
1636 static bool
1637 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1638 {
1639 gimple stmt = gsi_stmt (*gsi);
1640 tree dest = gimple_call_arg (stmt, 0);
1641 tree src = gimple_call_arg (stmt, 1);
1642 tree len = gimple_call_arg (stmt, 2);
1643 tree size = gimple_call_arg (stmt, 3);
1644 tree fn;
1645 const char *p;
1646
1647 p = c_getstr (src);
1648 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1649 if ((p && *p == '\0')
1650 || integer_zerop (len))
1651 {
1652 replace_call_with_value (gsi, dest);
1653 return true;
1654 }
1655
1656 if (! tree_fits_uhwi_p (size))
1657 return false;
1658
1659 if (! integer_all_onesp (size))
1660 {
1661 tree src_len = c_strlen (src, 1);
1662 if (src_len
1663 && tree_fits_uhwi_p (src_len)
1664 && tree_fits_uhwi_p (len)
1665 && ! tree_int_cst_lt (len, src_len))
1666 {
1667 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1668 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1669 if (!fn)
1670 return false;
1671
1672 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1673 replace_call_with_call_and_fold (gsi, repl);
1674 return true;
1675 }
1676 return false;
1677 }
1678
1679 /* If __builtin_strncat_chk is used, assume strncat is available. */
1680 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1681 if (!fn)
1682 return false;
1683
1684 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1685 replace_call_with_call_and_fold (gsi, repl);
1686 return true;
1687 }
1688
1689 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1690 to the call. IGNORE is true if the value returned
1691 by the builtin will be ignored. UNLOCKED is true is true if this
1692 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1693 the known length of the string. Return NULL_TREE if no simplification
1694 was possible. */
1695
1696 static bool
1697 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1698 tree arg0, tree arg1,
1699 bool unlocked)
1700 {
1701 gimple stmt = gsi_stmt (*gsi);
1702
1703 /* If we're using an unlocked function, assume the other unlocked
1704 functions exist explicitly. */
1705 tree const fn_fputc = (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1707 : builtin_decl_implicit (BUILT_IN_FPUTC));
1708 tree const fn_fwrite = (unlocked
1709 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1710 : builtin_decl_implicit (BUILT_IN_FWRITE));
1711
1712 /* If the return value is used, don't do the transformation. */
1713 if (gimple_call_lhs (stmt))
1714 return false;
1715
1716 /* Get the length of the string passed to fputs. If the length
1717 can't be determined, punt. */
1718 tree len = get_maxval_strlen (arg0, 0);
1719 if (!len
1720 || TREE_CODE (len) != INTEGER_CST)
1721 return false;
1722
1723 switch (compare_tree_int (len, 1))
1724 {
1725 case -1: /* length is 0, delete the call entirely . */
1726 replace_call_with_value (gsi, integer_zero_node);
1727 return true;
1728
1729 case 0: /* length is 1, call fputc. */
1730 {
1731 const char *p = c_getstr (arg0);
1732 if (p != NULL)
1733 {
1734 if (!fn_fputc)
1735 return false;
1736
1737 gimple repl = gimple_build_call (fn_fputc, 2,
1738 build_int_cst
1739 (integer_type_node, p[0]), arg1);
1740 replace_call_with_call_and_fold (gsi, repl);
1741 return true;
1742 }
1743 }
1744 /* FALLTHROUGH */
1745 case 1: /* length is greater than 1, call fwrite. */
1746 {
1747 /* If optimizing for size keep fputs. */
1748 if (optimize_function_for_size_p (cfun))
1749 return false;
1750 /* New argument list transforming fputs(string, stream) to
1751 fwrite(string, 1, len, stream). */
1752 if (!fn_fwrite)
1753 return false;
1754
1755 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1756 size_one_node, len, arg1);
1757 replace_call_with_call_and_fold (gsi, repl);
1758 return true;
1759 }
1760 default:
1761 gcc_unreachable ();
1762 }
1763 return false;
1764 }
1765
1766 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1767 DEST, SRC, LEN, and SIZE are the arguments to the call.
1768 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1769 code of the builtin. If MAXLEN is not NULL, it is maximum length
1770 passed as third argument. */
1771
1772 static bool
1773 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1774 tree dest, tree src, tree len, tree size,
1775 enum built_in_function fcode)
1776 {
1777 gimple stmt = gsi_stmt (*gsi);
1778 location_t loc = gimple_location (stmt);
1779 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1780 tree fn;
1781
1782 /* If SRC and DEST are the same (and not volatile), return DEST
1783 (resp. DEST+LEN for __mempcpy_chk). */
1784 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1785 {
1786 if (fcode != BUILT_IN_MEMPCPY_CHK)
1787 {
1788 replace_call_with_value (gsi, dest);
1789 return true;
1790 }
1791 else
1792 {
1793 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1794 temp = force_gimple_operand_gsi (gsi, temp,
1795 false, NULL_TREE, true,
1796 GSI_SAME_STMT);
1797 replace_call_with_value (gsi, temp);
1798 return true;
1799 }
1800 }
1801
1802 if (! tree_fits_uhwi_p (size))
1803 return false;
1804
1805 tree maxlen = get_maxval_strlen (len, 2);
1806 if (! integer_all_onesp (size))
1807 {
1808 if (! tree_fits_uhwi_p (len))
1809 {
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 {
1815 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1816 {
1817 /* (void) __mempcpy_chk () can be optimized into
1818 (void) __memcpy_chk (). */
1819 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1820 if (!fn)
1821 return false;
1822
1823 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1824 replace_call_with_call_and_fold (gsi, repl);
1825 return true;
1826 }
1827 return false;
1828 }
1829 }
1830 else
1831 maxlen = len;
1832
1833 if (tree_int_cst_lt (size, maxlen))
1834 return false;
1835 }
1836
1837 fn = NULL_TREE;
1838 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1839 mem{cpy,pcpy,move,set} is available. */
1840 switch (fcode)
1841 {
1842 case BUILT_IN_MEMCPY_CHK:
1843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1844 break;
1845 case BUILT_IN_MEMPCPY_CHK:
1846 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1847 break;
1848 case BUILT_IN_MEMMOVE_CHK:
1849 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1850 break;
1851 case BUILT_IN_MEMSET_CHK:
1852 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1853 break;
1854 default:
1855 break;
1856 }
1857
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864 }
1865
1866 /* Fold a call to the __st[rp]cpy_chk builtin.
1867 DEST, SRC, and SIZE are the arguments to the call.
1868 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1870 strings passed as second argument. */
1871
1872 static bool
1873 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1874 tree dest,
1875 tree src, tree size,
1876 enum built_in_function fcode)
1877 {
1878 gimple stmt = gsi_stmt (*gsi);
1879 location_t loc = gimple_location (stmt);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1881 tree len, fn;
1882
1883 /* If SRC and DEST are the same (and not volatile), return DEST. */
1884 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1885 {
1886 replace_call_with_value (gsi, dest);
1887 return true;
1888 }
1889
1890 if (! tree_fits_uhwi_p (size))
1891 return false;
1892
1893 tree maxlen = get_maxval_strlen (src, 1);
1894 if (! integer_all_onesp (size))
1895 {
1896 len = c_strlen (src, 1);
1897 if (! len || ! tree_fits_uhwi_p (len))
1898 {
1899 /* If LEN is not constant, try MAXLEN too.
1900 For MAXLEN only allow optimizing into non-_ocs function
1901 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1902 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1903 {
1904 if (fcode == BUILT_IN_STPCPY_CHK)
1905 {
1906 if (! ignore)
1907 return false;
1908
1909 /* If return value of __stpcpy_chk is ignored,
1910 optimize into __strcpy_chk. */
1911 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1912 if (!fn)
1913 return false;
1914
1915 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1916 replace_call_with_call_and_fold (gsi, repl);
1917 return true;
1918 }
1919
1920 if (! len || TREE_SIDE_EFFECTS (len))
1921 return false;
1922
1923 /* If c_strlen returned something, but not a constant,
1924 transform __strcpy_chk into __memcpy_chk. */
1925 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1926 if (!fn)
1927 return false;
1928
1929 len = fold_convert_loc (loc, size_type_node, len);
1930 len = size_binop_loc (loc, PLUS_EXPR, len,
1931 build_int_cst (size_type_node, 1));
1932 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1933 true, GSI_SAME_STMT);
1934 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1935 replace_call_with_call_and_fold (gsi, repl);
1936 return true;
1937 }
1938 }
1939 else
1940 maxlen = len;
1941
1942 if (! tree_int_cst_lt (maxlen, size))
1943 return false;
1944 }
1945
1946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1947 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1948 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1949 if (!fn)
1950 return false;
1951
1952 gimple repl = gimple_build_call (fn, 2, dest, src);
1953 replace_call_with_call_and_fold (gsi, repl);
1954 return true;
1955 }
1956
1957 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1959 length passed as third argument. IGNORE is true if return value can be
1960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1961
1962 static bool
1963 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1964 tree dest, tree src,
1965 tree len, tree size,
1966 enum built_in_function fcode)
1967 {
1968 gimple stmt = gsi_stmt (*gsi);
1969 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1970 tree fn;
1971
1972 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1973 {
1974 /* If return value of __stpncpy_chk is ignored,
1975 optimize into __strncpy_chk. */
1976 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1977 if (fn)
1978 {
1979 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1980 replace_call_with_call_and_fold (gsi, repl);
1981 return true;
1982 }
1983 }
1984
1985 if (! tree_fits_uhwi_p (size))
1986 return false;
1987
1988 tree maxlen = get_maxval_strlen (len, 2);
1989 if (! integer_all_onesp (size))
1990 {
1991 if (! tree_fits_uhwi_p (len))
1992 {
1993 /* If LEN is not constant, try MAXLEN too.
1994 For MAXLEN only allow optimizing into non-_ocs function
1995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1996 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1997 return false;
1998 }
1999 else
2000 maxlen = len;
2001
2002 if (tree_int_cst_lt (size, maxlen))
2003 return false;
2004 }
2005
2006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2007 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2008 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2009 if (!fn)
2010 return false;
2011
2012 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2013 replace_call_with_call_and_fold (gsi, repl);
2014 return true;
2015 }
2016
2017 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2018 NULL_TREE if a normal call should be emitted rather than expanding
2019 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2020 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2021 passed as second argument. */
2022
2023 static bool
2024 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2025 enum built_in_function fcode)
2026 {
2027 gimple stmt = gsi_stmt (*gsi);
2028 tree dest, size, len, fn, fmt, flag;
2029 const char *fmt_str;
2030
2031 /* Verify the required arguments in the original call. */
2032 if (gimple_call_num_args (stmt) < 5)
2033 return false;
2034
2035 dest = gimple_call_arg (stmt, 0);
2036 len = gimple_call_arg (stmt, 1);
2037 flag = gimple_call_arg (stmt, 2);
2038 size = gimple_call_arg (stmt, 3);
2039 fmt = gimple_call_arg (stmt, 4);
2040
2041 if (! tree_fits_uhwi_p (size))
2042 return false;
2043
2044 if (! integer_all_onesp (size))
2045 {
2046 tree maxlen = get_maxval_strlen (len, 2);
2047 if (! tree_fits_uhwi_p (len))
2048 {
2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2053 return false;
2054 }
2055 else
2056 maxlen = len;
2057
2058 if (tree_int_cst_lt (size, maxlen))
2059 return false;
2060 }
2061
2062 if (!init_target_chars ())
2063 return false;
2064
2065 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2066 or if format doesn't contain % chars or is "%s". */
2067 if (! integer_zerop (flag))
2068 {
2069 fmt_str = c_getstr (fmt);
2070 if (fmt_str == NULL)
2071 return false;
2072 if (strchr (fmt_str, target_percent) != NULL
2073 && strcmp (fmt_str, target_percent_s))
2074 return false;
2075 }
2076
2077 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2078 available. */
2079 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2080 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2081 if (!fn)
2082 return false;
2083
2084 /* Replace the called function and the first 5 argument by 3 retaining
2085 trailing varargs. */
2086 gimple_call_set_fndecl (stmt, fn);
2087 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2088 gimple_call_set_arg (stmt, 0, dest);
2089 gimple_call_set_arg (stmt, 1, len);
2090 gimple_call_set_arg (stmt, 2, fmt);
2091 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2092 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2093 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2094 fold_stmt (gsi);
2095 return true;
2096 }
2097
2098 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2099 Return NULL_TREE if a normal call should be emitted rather than
2100 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2101 or BUILT_IN_VSPRINTF_CHK. */
2102
2103 static bool
2104 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2105 enum built_in_function fcode)
2106 {
2107 gimple stmt = gsi_stmt (*gsi);
2108 tree dest, size, len, fn, fmt, flag;
2109 const char *fmt_str;
2110 unsigned nargs = gimple_call_num_args (stmt);
2111
2112 /* Verify the required arguments in the original call. */
2113 if (nargs < 4)
2114 return false;
2115 dest = gimple_call_arg (stmt, 0);
2116 flag = gimple_call_arg (stmt, 1);
2117 size = gimple_call_arg (stmt, 2);
2118 fmt = gimple_call_arg (stmt, 3);
2119
2120 if (! tree_fits_uhwi_p (size))
2121 return false;
2122
2123 len = NULL_TREE;
2124
2125 if (!init_target_chars ())
2126 return false;
2127
2128 /* Check whether the format is a literal string constant. */
2129 fmt_str = c_getstr (fmt);
2130 if (fmt_str != NULL)
2131 {
2132 /* If the format doesn't contain % args or %%, we know the size. */
2133 if (strchr (fmt_str, target_percent) == 0)
2134 {
2135 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2136 len = build_int_cstu (size_type_node, strlen (fmt_str));
2137 }
2138 /* If the format is "%s" and first ... argument is a string literal,
2139 we know the size too. */
2140 else if (fcode == BUILT_IN_SPRINTF_CHK
2141 && strcmp (fmt_str, target_percent_s) == 0)
2142 {
2143 tree arg;
2144
2145 if (nargs == 5)
2146 {
2147 arg = gimple_call_arg (stmt, 4);
2148 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2149 {
2150 len = c_strlen (arg, 1);
2151 if (! len || ! tree_fits_uhwi_p (len))
2152 len = NULL_TREE;
2153 }
2154 }
2155 }
2156 }
2157
2158 if (! integer_all_onesp (size))
2159 {
2160 if (! len || ! tree_int_cst_lt (len, size))
2161 return false;
2162 }
2163
2164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2165 or if format doesn't contain % chars or is "%s". */
2166 if (! integer_zerop (flag))
2167 {
2168 if (fmt_str == NULL)
2169 return false;
2170 if (strchr (fmt_str, target_percent) != NULL
2171 && strcmp (fmt_str, target_percent_s))
2172 return false;
2173 }
2174
2175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2176 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2177 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2178 if (!fn)
2179 return false;
2180
2181 /* Replace the called function and the first 4 argument by 2 retaining
2182 trailing varargs. */
2183 gimple_call_set_fndecl (stmt, fn);
2184 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2185 gimple_call_set_arg (stmt, 0, dest);
2186 gimple_call_set_arg (stmt, 1, fmt);
2187 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2188 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2189 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2190 fold_stmt (gsi);
2191 return true;
2192 }
2193
2194 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2195 ORIG may be null if this is a 2-argument call. We don't attempt to
2196 simplify calls with more than 3 arguments.
2197
2198 Return NULL_TREE if no simplification was possible, otherwise return the
2199 simplified form of the call as a tree. If IGNORED is true, it means that
2200 the caller does not use the returned value of the function. */
2201
2202 static bool
2203 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2204 {
2205 gimple stmt = gsi_stmt (*gsi);
2206 tree dest = gimple_call_arg (stmt, 0);
2207 tree fmt = gimple_call_arg (stmt, 1);
2208 tree orig = NULL_TREE;
2209 const char *fmt_str = NULL;
2210
2211 /* Verify the required arguments in the original call. We deal with two
2212 types of sprintf() calls: 'sprintf (str, fmt)' and
2213 'sprintf (dest, "%s", orig)'. */
2214 if (gimple_call_num_args (stmt) > 3)
2215 return false;
2216
2217 if (gimple_call_num_args (stmt) == 3)
2218 orig = gimple_call_arg (stmt, 2);
2219
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str = c_getstr (fmt);
2222 if (fmt_str == NULL)
2223 return false;
2224
2225 if (!init_target_chars ())
2226 return false;
2227
2228 /* If the format doesn't contain % args or %%, use strcpy. */
2229 if (strchr (fmt_str, target_percent) == NULL)
2230 {
2231 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2232
2233 if (!fn)
2234 return false;
2235
2236 /* Don't optimize sprintf (buf, "abc", ptr++). */
2237 if (orig)
2238 return false;
2239
2240 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2241 'format' is known to contain no % formats. */
2242 gimple_seq stmts = NULL;
2243 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2244 gimple_seq_add_stmt_without_update (&stmts, repl);
2245 if (gimple_call_lhs (stmt))
2246 {
2247 repl = gimple_build_assign (gimple_call_lhs (stmt),
2248 build_int_cst (integer_type_node,
2249 strlen (fmt_str)));
2250 gimple_seq_add_stmt_without_update (&stmts, repl);
2251 gsi_replace_with_seq_vops (gsi, stmts);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2 = *gsi;
2257 gsi_prev (&gsi2);
2258 fold_stmt (&gsi2);
2259 }
2260 else
2261 {
2262 gsi_replace_with_seq_vops (gsi, stmts);
2263 fold_stmt (gsi);
2264 }
2265 return true;
2266 }
2267
2268 /* If the format is "%s", use strcpy if the result isn't used. */
2269 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2270 {
2271 tree fn;
2272 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2273
2274 if (!fn)
2275 return false;
2276
2277 /* Don't crash on sprintf (str1, "%s"). */
2278 if (!orig)
2279 return false;
2280
2281 tree orig_len = NULL_TREE;
2282 if (gimple_call_lhs (stmt))
2283 {
2284 orig_len = get_maxval_strlen (orig, 0);
2285 if (!orig_len)
2286 return false;
2287 }
2288
2289 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2290 gimple_seq stmts = NULL;
2291 gimple repl = gimple_build_call (fn, 2, dest, orig);
2292 gimple_seq_add_stmt_without_update (&stmts, repl);
2293 if (gimple_call_lhs (stmt))
2294 {
2295 if (!useless_type_conversion_p (integer_type_node,
2296 TREE_TYPE (orig_len)))
2297 orig_len = fold_convert (integer_type_node, orig_len);
2298 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2299 gimple_seq_add_stmt_without_update (&stmts, repl);
2300 gsi_replace_with_seq_vops (gsi, stmts);
2301 /* gsi now points at the assignment to the lhs, get a
2302 stmt iterator to the memcpy call.
2303 ??? We can't use gsi_for_stmt as that doesn't work when the
2304 CFG isn't built yet. */
2305 gimple_stmt_iterator gsi2 = *gsi;
2306 gsi_prev (&gsi2);
2307 fold_stmt (&gsi2);
2308 }
2309 else
2310 {
2311 gsi_replace_with_seq_vops (gsi, stmts);
2312 fold_stmt (gsi);
2313 }
2314 return true;
2315 }
2316 return false;
2317 }
2318
2319 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2321 attempt to simplify calls with more than 4 arguments.
2322
2323 Return NULL_TREE if no simplification was possible, otherwise return the
2324 simplified form of the call as a tree. If IGNORED is true, it means that
2325 the caller does not use the returned value of the function. */
2326
2327 static bool
2328 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2329 {
2330 gimple stmt = gsi_stmt (*gsi);
2331 tree dest = gimple_call_arg (stmt, 0);
2332 tree destsize = gimple_call_arg (stmt, 1);
2333 tree fmt = gimple_call_arg (stmt, 2);
2334 tree orig = NULL_TREE;
2335 const char *fmt_str = NULL;
2336
2337 if (gimple_call_num_args (stmt) > 4)
2338 return false;
2339
2340 if (gimple_call_num_args (stmt) == 4)
2341 orig = gimple_call_arg (stmt, 3);
2342
2343 if (!tree_fits_uhwi_p (destsize))
2344 return false;
2345 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2346
2347 /* Check whether the format is a literal string constant. */
2348 fmt_str = c_getstr (fmt);
2349 if (fmt_str == NULL)
2350 return false;
2351
2352 if (!init_target_chars ())
2353 return false;
2354
2355 /* If the format doesn't contain % args or %%, use strcpy. */
2356 if (strchr (fmt_str, target_percent) == NULL)
2357 {
2358 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2359 if (!fn)
2360 return false;
2361
2362 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2363 if (orig)
2364 return false;
2365
2366 /* We could expand this as
2367 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2368 or to
2369 memcpy (str, fmt_with_nul_at_cstm1, cst);
2370 but in the former case that might increase code size
2371 and in the latter case grow .rodata section too much.
2372 So punt for now. */
2373 size_t len = strlen (fmt_str);
2374 if (len >= destlen)
2375 return false;
2376
2377 gimple_seq stmts = NULL;
2378 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380 if (gimple_call_lhs (stmt))
2381 {
2382 repl = gimple_build_assign (gimple_call_lhs (stmt),
2383 build_int_cst (integer_type_node, len));
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 gsi_replace_with_seq_vops (gsi, stmts);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2 = *gsi;
2391 gsi_prev (&gsi2);
2392 fold_stmt (&gsi2);
2393 }
2394 else
2395 {
2396 gsi_replace_with_seq_vops (gsi, stmts);
2397 fold_stmt (gsi);
2398 }
2399 return true;
2400 }
2401
2402 /* If the format is "%s", use strcpy if the result isn't used. */
2403 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2404 {
2405 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2406 if (!fn)
2407 return false;
2408
2409 /* Don't crash on snprintf (str1, cst, "%s"). */
2410 if (!orig)
2411 return false;
2412
2413 tree orig_len = get_maxval_strlen (orig, 0);
2414 if (!orig_len)
2415 return false;
2416
2417 /* We could expand this as
2418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2419 or to
2420 memcpy (str1, str2_with_nul_at_cstm1, cst);
2421 but in the former case that might increase code size
2422 and in the latter case grow .rodata section too much.
2423 So punt for now. */
2424 if (compare_tree_int (orig_len, destlen) >= 0)
2425 return false;
2426
2427 /* Convert snprintf (str1, cst, "%s", str2) into
2428 strcpy (str1, str2) if strlen (str2) < cst. */
2429 gimple_seq stmts = NULL;
2430 gimple repl = gimple_build_call (fn, 2, dest, orig);
2431 gimple_seq_add_stmt_without_update (&stmts, repl);
2432 if (gimple_call_lhs (stmt))
2433 {
2434 if (!useless_type_conversion_p (integer_type_node,
2435 TREE_TYPE (orig_len)))
2436 orig_len = fold_convert (integer_type_node, orig_len);
2437 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2438 gimple_seq_add_stmt_without_update (&stmts, repl);
2439 gsi_replace_with_seq_vops (gsi, stmts);
2440 /* gsi now points at the assignment to the lhs, get a
2441 stmt iterator to the memcpy call.
2442 ??? We can't use gsi_for_stmt as that doesn't work when the
2443 CFG isn't built yet. */
2444 gimple_stmt_iterator gsi2 = *gsi;
2445 gsi_prev (&gsi2);
2446 fold_stmt (&gsi2);
2447 }
2448 else
2449 {
2450 gsi_replace_with_seq_vops (gsi, stmts);
2451 fold_stmt (gsi);
2452 }
2453 return true;
2454 }
2455 return false;
2456 }
2457
2458
2459 /* Fold a call to __builtin_strlen with known length LEN. */
2460
2461 static bool
2462 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2463 {
2464 gimple stmt = gsi_stmt (*gsi);
2465 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2466 if (!len)
2467 return false;
2468 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2469 replace_call_with_value (gsi, len);
2470 return true;
2471 }
2472
2473
2474 /* Fold the non-target builtin at *GSI and return whether any simplification
2475 was made. */
2476
2477 static bool
2478 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2479 {
2480 gimple stmt = gsi_stmt (*gsi);
2481 tree callee = gimple_call_fndecl (stmt);
2482
2483 /* Give up for always_inline inline builtins until they are
2484 inlined. */
2485 if (avoid_folding_inline_builtin (callee))
2486 return false;
2487
2488 switch (DECL_FUNCTION_CODE (callee))
2489 {
2490 case BUILT_IN_BZERO:
2491 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2492 gimple_call_arg (stmt, 1));
2493 case BUILT_IN_MEMSET:
2494 return gimple_fold_builtin_memset (gsi,
2495 gimple_call_arg (stmt, 1),
2496 gimple_call_arg (stmt, 2));
2497 case BUILT_IN_BCOPY:
2498 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2499 gimple_call_arg (stmt, 0), 3);
2500 case BUILT_IN_MEMCPY:
2501 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2502 gimple_call_arg (stmt, 1), 0);
2503 case BUILT_IN_MEMPCPY:
2504 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2505 gimple_call_arg (stmt, 1), 1);
2506 case BUILT_IN_MEMMOVE:
2507 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2508 gimple_call_arg (stmt, 1), 3);
2509 case BUILT_IN_SPRINTF_CHK:
2510 case BUILT_IN_VSPRINTF_CHK:
2511 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2512 case BUILT_IN_STRCAT_CHK:
2513 return gimple_fold_builtin_strcat_chk (gsi);
2514 case BUILT_IN_STRNCAT_CHK:
2515 return gimple_fold_builtin_strncat_chk (gsi);
2516 case BUILT_IN_STRLEN:
2517 return gimple_fold_builtin_strlen (gsi);
2518 case BUILT_IN_STRCPY:
2519 return gimple_fold_builtin_strcpy (gsi,
2520 gimple_call_arg (stmt, 0),
2521 gimple_call_arg (stmt, 1));
2522 case BUILT_IN_STRNCPY:
2523 return gimple_fold_builtin_strncpy (gsi,
2524 gimple_call_arg (stmt, 0),
2525 gimple_call_arg (stmt, 1),
2526 gimple_call_arg (stmt, 2));
2527 case BUILT_IN_STRCAT:
2528 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2529 gimple_call_arg (stmt, 1));
2530 case BUILT_IN_FPUTS:
2531 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2532 gimple_call_arg (stmt, 1), false);
2533 case BUILT_IN_FPUTS_UNLOCKED:
2534 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1), true);
2536 case BUILT_IN_MEMCPY_CHK:
2537 case BUILT_IN_MEMPCPY_CHK:
2538 case BUILT_IN_MEMMOVE_CHK:
2539 case BUILT_IN_MEMSET_CHK:
2540 return gimple_fold_builtin_memory_chk (gsi,
2541 gimple_call_arg (stmt, 0),
2542 gimple_call_arg (stmt, 1),
2543 gimple_call_arg (stmt, 2),
2544 gimple_call_arg (stmt, 3),
2545 DECL_FUNCTION_CODE (callee));
2546 case BUILT_IN_STRCPY_CHK:
2547 case BUILT_IN_STPCPY_CHK:
2548 return gimple_fold_builtin_stxcpy_chk (gsi,
2549 gimple_call_arg (stmt, 0),
2550 gimple_call_arg (stmt, 1),
2551 gimple_call_arg (stmt, 2),
2552 DECL_FUNCTION_CODE (callee));
2553 case BUILT_IN_STRNCPY_CHK:
2554 case BUILT_IN_STPNCPY_CHK:
2555 return gimple_fold_builtin_stxncpy_chk (gsi,
2556 gimple_call_arg (stmt, 0),
2557 gimple_call_arg (stmt, 1),
2558 gimple_call_arg (stmt, 2),
2559 gimple_call_arg (stmt, 3),
2560 DECL_FUNCTION_CODE (callee));
2561 case BUILT_IN_SNPRINTF_CHK:
2562 case BUILT_IN_VSNPRINTF_CHK:
2563 return gimple_fold_builtin_snprintf_chk (gsi,
2564 DECL_FUNCTION_CODE (callee));
2565 case BUILT_IN_SNPRINTF:
2566 return gimple_fold_builtin_snprintf (gsi);
2567 case BUILT_IN_SPRINTF:
2568 return gimple_fold_builtin_sprintf (gsi);
2569 default:;
2570 }
2571
2572 /* Try the generic builtin folder. */
2573 bool ignore = (gimple_call_lhs (stmt) == NULL);
2574 tree result = fold_call_stmt (stmt, ignore);
2575 if (result)
2576 {
2577 if (ignore)
2578 STRIP_NOPS (result);
2579 else
2580 result = fold_convert (gimple_call_return_type (stmt), result);
2581 if (!update_call_from_tree (gsi, result))
2582 gimplify_and_update_call_from_tree (gsi, result);
2583 return true;
2584 }
2585
2586 return false;
2587 }
2588
2589 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2590 doesn't fit into TYPE. The test for overflow should be regardless of
2591 -fwrapv, and even for unsigned types. */
2592
2593 bool
2594 arith_overflowed_p (enum tree_code code, const_tree type,
2595 const_tree arg0, const_tree arg1)
2596 {
2597 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2598 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2599 widest2_int_cst;
2600 widest2_int warg0 = widest2_int_cst (arg0);
2601 widest2_int warg1 = widest2_int_cst (arg1);
2602 widest2_int wres;
2603 switch (code)
2604 {
2605 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2606 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2607 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2608 default: gcc_unreachable ();
2609 }
2610 signop sign = TYPE_SIGN (type);
2611 if (sign == UNSIGNED && wi::neg_p (wres))
2612 return true;
2613 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2614 }
2615
2616 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2617 The statement may be replaced by another statement, e.g., if the call
2618 simplifies to a constant value. Return true if any changes were made.
2619 It is assumed that the operands have been previously folded. */
2620
2621 static bool
2622 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2623 {
2624 gimple stmt = gsi_stmt (*gsi);
2625 tree callee;
2626 bool changed = false;
2627 unsigned i;
2628
2629 /* Fold *& in call arguments. */
2630 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2631 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2632 {
2633 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2634 if (tmp)
2635 {
2636 gimple_call_set_arg (stmt, i, tmp);
2637 changed = true;
2638 }
2639 }
2640
2641 /* Check for virtual calls that became direct calls. */
2642 callee = gimple_call_fn (stmt);
2643 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2644 {
2645 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2646 {
2647 if (dump_file && virtual_method_call_p (callee)
2648 && !possible_polymorphic_call_target_p
2649 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2650 (OBJ_TYPE_REF_EXPR (callee)))))
2651 {
2652 fprintf (dump_file,
2653 "Type inheritance inconsistent devirtualization of ");
2654 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2655 fprintf (dump_file, " to ");
2656 print_generic_expr (dump_file, callee, TDF_SLIM);
2657 fprintf (dump_file, "\n");
2658 }
2659
2660 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2661 changed = true;
2662 }
2663 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2664 {
2665 bool final;
2666 vec <cgraph_node *>targets
2667 = possible_polymorphic_call_targets (callee, stmt, &final);
2668 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2669 {
2670 tree lhs = gimple_call_lhs (stmt);
2671 if (dump_enabled_p ())
2672 {
2673 location_t loc = gimple_location_safe (stmt);
2674 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2675 "folding virtual function call to %s\n",
2676 targets.length () == 1
2677 ? targets[0]->name ()
2678 : "__builtin_unreachable");
2679 }
2680 if (targets.length () == 1)
2681 {
2682 gimple_call_set_fndecl (stmt, targets[0]->decl);
2683 changed = true;
2684 /* If the call becomes noreturn, remove the lhs. */
2685 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2686 {
2687 if (TREE_CODE (lhs) == SSA_NAME)
2688 {
2689 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2690 tree def = get_or_create_ssa_default_def (cfun, var);
2691 gimple new_stmt = gimple_build_assign (lhs, def);
2692 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2693 }
2694 gimple_call_set_lhs (stmt, NULL_TREE);
2695 }
2696 }
2697 else
2698 {
2699 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2700 gimple new_stmt = gimple_build_call (fndecl, 0);
2701 gimple_set_location (new_stmt, gimple_location (stmt));
2702 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2703 {
2704 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2705 tree def = get_or_create_ssa_default_def (cfun, var);
2706
2707 /* To satisfy condition for
2708 cgraph_update_edges_for_call_stmt_node,
2709 we need to preserve GIMPLE_CALL statement
2710 at position of GSI iterator. */
2711 update_call_from_tree (gsi, def);
2712 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2713 }
2714 else
2715 {
2716 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2717 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2718 gsi_replace (gsi, new_stmt, false);
2719 }
2720 return true;
2721 }
2722 }
2723 }
2724 }
2725
2726 if (inplace)
2727 return changed;
2728
2729 /* Check for builtins that CCP can handle using information not
2730 available in the generic fold routines. */
2731 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2732 {
2733 if (gimple_fold_builtin (gsi))
2734 changed = true;
2735 }
2736 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2737 {
2738 changed |= targetm.gimple_fold_builtin (gsi);
2739 }
2740 else if (gimple_call_internal_p (stmt))
2741 {
2742 enum tree_code subcode = ERROR_MARK;
2743 tree result = NULL_TREE;
2744 bool cplx_result = false;
2745 tree overflow = NULL_TREE;
2746 switch (gimple_call_internal_fn (stmt))
2747 {
2748 case IFN_BUILTIN_EXPECT:
2749 result = fold_builtin_expect (gimple_location (stmt),
2750 gimple_call_arg (stmt, 0),
2751 gimple_call_arg (stmt, 1),
2752 gimple_call_arg (stmt, 2));
2753 break;
2754 case IFN_UBSAN_OBJECT_SIZE:
2755 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2756 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2757 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2758 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2759 gimple_call_arg (stmt, 2))))
2760 {
2761 gsi_replace (gsi, gimple_build_nop (), true);
2762 unlink_stmt_vdef (stmt);
2763 release_defs (stmt);
2764 return true;
2765 }
2766 break;
2767 case IFN_UBSAN_CHECK_ADD:
2768 subcode = PLUS_EXPR;
2769 break;
2770 case IFN_UBSAN_CHECK_SUB:
2771 subcode = MINUS_EXPR;
2772 break;
2773 case IFN_UBSAN_CHECK_MUL:
2774 subcode = MULT_EXPR;
2775 break;
2776 case IFN_ADD_OVERFLOW:
2777 subcode = PLUS_EXPR;
2778 cplx_result = true;
2779 break;
2780 case IFN_SUB_OVERFLOW:
2781 subcode = MINUS_EXPR;
2782 cplx_result = true;
2783 break;
2784 case IFN_MUL_OVERFLOW:
2785 subcode = MULT_EXPR;
2786 cplx_result = true;
2787 break;
2788 default:
2789 break;
2790 }
2791 if (subcode != ERROR_MARK)
2792 {
2793 tree arg0 = gimple_call_arg (stmt, 0);
2794 tree arg1 = gimple_call_arg (stmt, 1);
2795 tree type = TREE_TYPE (arg0);
2796 if (cplx_result)
2797 {
2798 tree lhs = gimple_call_lhs (stmt);
2799 if (lhs == NULL_TREE)
2800 type = NULL_TREE;
2801 else
2802 type = TREE_TYPE (TREE_TYPE (lhs));
2803 }
2804 if (type == NULL_TREE)
2805 ;
2806 /* x = y + 0; x = y - 0; x = y * 0; */
2807 else if (integer_zerop (arg1))
2808 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
2809 /* x = 0 + y; x = 0 * y; */
2810 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2811 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
2812 /* x = y - y; */
2813 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2814 result = integer_zero_node;
2815 /* x = y * 1; x = 1 * y; */
2816 else if (subcode == MULT_EXPR && integer_onep (arg1))
2817 result = arg0;
2818 else if (subcode == MULT_EXPR && integer_onep (arg0))
2819 result = arg1;
2820 else if (TREE_CODE (arg0) == INTEGER_CST
2821 && TREE_CODE (arg1) == INTEGER_CST)
2822 {
2823 if (cplx_result)
2824 result = int_const_binop (subcode, fold_convert (type, arg0),
2825 fold_convert (type, arg1));
2826 else
2827 result = int_const_binop (subcode, arg0, arg1);
2828 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
2829 {
2830 if (cplx_result)
2831 overflow = build_one_cst (type);
2832 else
2833 result = NULL_TREE;
2834 }
2835 }
2836 if (result)
2837 {
2838 if (result == integer_zero_node)
2839 result = build_zero_cst (type);
2840 else if (cplx_result && TREE_TYPE (result) != type)
2841 {
2842 if (TREE_CODE (result) == INTEGER_CST)
2843 {
2844 if (arith_overflowed_p (PLUS_EXPR, type, result,
2845 integer_zero_node))
2846 overflow = build_one_cst (type);
2847 }
2848 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
2849 && TYPE_UNSIGNED (type))
2850 || (TYPE_PRECISION (type)
2851 < (TYPE_PRECISION (TREE_TYPE (result))
2852 + (TYPE_UNSIGNED (TREE_TYPE (result))
2853 && !TYPE_UNSIGNED (type)))))
2854 result = NULL_TREE;
2855 if (result)
2856 result = fold_convert (type, result);
2857 }
2858 }
2859 }
2860
2861 if (result)
2862 {
2863 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
2864 result = drop_tree_overflow (result);
2865 if (cplx_result)
2866 {
2867 if (overflow == NULL_TREE)
2868 overflow = build_zero_cst (TREE_TYPE (result));
2869 tree ctype = build_complex_type (TREE_TYPE (result));
2870 if (TREE_CODE (result) == INTEGER_CST
2871 && TREE_CODE (overflow) == INTEGER_CST)
2872 result = build_complex (ctype, result, overflow);
2873 else
2874 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
2875 ctype, result, overflow);
2876 }
2877 if (!update_call_from_tree (gsi, result))
2878 gimplify_and_update_call_from_tree (gsi, result);
2879 changed = true;
2880 }
2881 }
2882
2883 return changed;
2884 }
2885
2886
2887 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2888 gimple_simplify.
2889
2890 Replaces *GSI with the simplification result in RCODE and OPS
2891 and the associated statements in *SEQ. Does the replacement
2892 according to INPLACE and returns true if the operation succeeded. */
2893
2894 static bool
2895 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2896 code_helper rcode, tree *ops,
2897 gimple_seq *seq, bool inplace)
2898 {
2899 gimple stmt = gsi_stmt (*gsi);
2900
2901 /* Play safe and do not allow abnormals to be mentioned in
2902 newly created statements. See also maybe_push_res_to_seq. */
2903 if ((TREE_CODE (ops[0]) == SSA_NAME
2904 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2905 || (ops[1]
2906 && TREE_CODE (ops[1]) == SSA_NAME
2907 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2908 || (ops[2]
2909 && TREE_CODE (ops[2]) == SSA_NAME
2910 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2911 return false;
2912
2913 if (gimple_code (stmt) == GIMPLE_COND)
2914 {
2915 gcc_assert (rcode.is_tree_code ());
2916 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2917 /* GIMPLE_CONDs condition may not throw. */
2918 && (!flag_exceptions
2919 || !cfun->can_throw_non_call_exceptions
2920 || !operation_could_trap_p (rcode,
2921 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2922 false, NULL_TREE)))
2923 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2924 else if (rcode == SSA_NAME)
2925 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2926 build_zero_cst (TREE_TYPE (ops[0])));
2927 else if (rcode == INTEGER_CST)
2928 {
2929 if (integer_zerop (ops[0]))
2930 gimple_cond_make_false (stmt);
2931 else
2932 gimple_cond_make_true (stmt);
2933 }
2934 else if (!inplace)
2935 {
2936 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2937 ops, seq);
2938 if (!res)
2939 return false;
2940 gimple_cond_set_condition (stmt, NE_EXPR, res,
2941 build_zero_cst (TREE_TYPE (res)));
2942 }
2943 else
2944 return false;
2945 if (dump_file && (dump_flags & TDF_DETAILS))
2946 {
2947 fprintf (dump_file, "gimple_simplified to ");
2948 if (!gimple_seq_empty_p (*seq))
2949 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2950 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2951 0, TDF_SLIM);
2952 }
2953 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2954 return true;
2955 }
2956 else if (is_gimple_assign (stmt)
2957 && rcode.is_tree_code ())
2958 {
2959 if (!inplace
2960 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
2961 {
2962 maybe_build_generic_op (rcode,
2963 TREE_TYPE (gimple_assign_lhs (stmt)),
2964 &ops[0], ops[1], ops[2]);
2965 gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
2966 ops[0], ops[1], ops[2]);
2967 if (dump_file && (dump_flags & TDF_DETAILS))
2968 {
2969 fprintf (dump_file, "gimple_simplified to ");
2970 if (!gimple_seq_empty_p (*seq))
2971 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2972 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2973 0, TDF_SLIM);
2974 }
2975 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2976 return true;
2977 }
2978 }
2979 else if (!inplace)
2980 {
2981 if (gimple_has_lhs (stmt))
2982 {
2983 tree lhs = gimple_get_lhs (stmt);
2984 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
2985 ops, seq, lhs);
2986 if (dump_file && (dump_flags & TDF_DETAILS))
2987 {
2988 fprintf (dump_file, "gimple_simplified to ");
2989 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2990 }
2991 gsi_replace_with_seq_vops (gsi, *seq);
2992 return true;
2993 }
2994 else
2995 gcc_unreachable ();
2996 }
2997
2998 return false;
2999 }
3000
3001 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3002
3003 static bool
3004 maybe_canonicalize_mem_ref_addr (tree *t)
3005 {
3006 bool res = false;
3007
3008 if (TREE_CODE (*t) == ADDR_EXPR)
3009 t = &TREE_OPERAND (*t, 0);
3010
3011 while (handled_component_p (*t))
3012 t = &TREE_OPERAND (*t, 0);
3013
3014 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3015 of invariant addresses into a SSA name MEM_REF address. */
3016 if (TREE_CODE (*t) == MEM_REF
3017 || TREE_CODE (*t) == TARGET_MEM_REF)
3018 {
3019 tree addr = TREE_OPERAND (*t, 0);
3020 if (TREE_CODE (addr) == ADDR_EXPR
3021 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3022 || handled_component_p (TREE_OPERAND (addr, 0))))
3023 {
3024 tree base;
3025 HOST_WIDE_INT coffset;
3026 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3027 &coffset);
3028 if (!base)
3029 gcc_unreachable ();
3030
3031 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3032 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3033 TREE_OPERAND (*t, 1),
3034 size_int (coffset));
3035 res = true;
3036 }
3037 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3038 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3039 }
3040
3041 /* Canonicalize back MEM_REFs to plain reference trees if the object
3042 accessed is a decl that has the same access semantics as the MEM_REF. */
3043 if (TREE_CODE (*t) == MEM_REF
3044 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3045 && integer_zerop (TREE_OPERAND (*t, 1)))
3046 {
3047 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3048 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3049 if (/* Same volatile qualification. */
3050 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3051 /* Same TBAA behavior with -fstrict-aliasing. */
3052 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3053 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3054 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3055 /* Same alignment. */
3056 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3057 /* We have to look out here to not drop a required conversion
3058 from the rhs to the lhs if *t appears on the lhs or vice-versa
3059 if it appears on the rhs. Thus require strict type
3060 compatibility. */
3061 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3062 {
3063 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3064 res = true;
3065 }
3066 }
3067
3068 /* Canonicalize TARGET_MEM_REF in particular with respect to
3069 the indexes becoming constant. */
3070 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3071 {
3072 tree tem = maybe_fold_tmr (*t);
3073 if (tem)
3074 {
3075 *t = tem;
3076 res = true;
3077 }
3078 }
3079
3080 return res;
3081 }
3082
3083 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3084 distinguishes both cases. */
3085
3086 static bool
3087 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3088 {
3089 bool changed = false;
3090 gimple stmt = gsi_stmt (*gsi);
3091 unsigned i;
3092
3093 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3094 after propagation.
3095 ??? This shouldn't be done in generic folding but in the
3096 propagation helpers which also know whether an address was
3097 propagated. */
3098 switch (gimple_code (stmt))
3099 {
3100 case GIMPLE_ASSIGN:
3101 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3102 {
3103 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3104 if ((REFERENCE_CLASS_P (*rhs)
3105 || TREE_CODE (*rhs) == ADDR_EXPR)
3106 && maybe_canonicalize_mem_ref_addr (rhs))
3107 changed = true;
3108 tree *lhs = gimple_assign_lhs_ptr (stmt);
3109 if (REFERENCE_CLASS_P (*lhs)
3110 && maybe_canonicalize_mem_ref_addr (lhs))
3111 changed = true;
3112 }
3113 break;
3114 case GIMPLE_CALL:
3115 {
3116 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3117 {
3118 tree *arg = gimple_call_arg_ptr (stmt, i);
3119 if (REFERENCE_CLASS_P (*arg)
3120 && maybe_canonicalize_mem_ref_addr (arg))
3121 changed = true;
3122 }
3123 tree *lhs = gimple_call_lhs_ptr (stmt);
3124 if (*lhs
3125 && REFERENCE_CLASS_P (*lhs)
3126 && maybe_canonicalize_mem_ref_addr (lhs))
3127 changed = true;
3128 break;
3129 }
3130 case GIMPLE_ASM:
3131 {
3132 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3133 {
3134 tree link = gimple_asm_output_op (stmt, i);
3135 tree op = TREE_VALUE (link);
3136 if (REFERENCE_CLASS_P (op)
3137 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3138 changed = true;
3139 }
3140 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3141 {
3142 tree link = gimple_asm_input_op (stmt, i);
3143 tree op = TREE_VALUE (link);
3144 if ((REFERENCE_CLASS_P (op)
3145 || TREE_CODE (op) == ADDR_EXPR)
3146 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3147 changed = true;
3148 }
3149 }
3150 break;
3151 case GIMPLE_DEBUG:
3152 if (gimple_debug_bind_p (stmt))
3153 {
3154 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3155 if (*val
3156 && (REFERENCE_CLASS_P (*val)
3157 || TREE_CODE (*val) == ADDR_EXPR)
3158 && maybe_canonicalize_mem_ref_addr (val))
3159 changed = true;
3160 }
3161 break;
3162 default:;
3163 }
3164
3165 /* Dispatch to pattern-based folding. */
3166 if (!inplace
3167 || is_gimple_assign (stmt)
3168 || gimple_code (stmt) == GIMPLE_COND)
3169 {
3170 gimple_seq seq = NULL;
3171 code_helper rcode;
3172 tree ops[3] = {};
3173 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3174 {
3175 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3176 changed = true;
3177 else
3178 gimple_seq_discard (seq);
3179 }
3180 }
3181
3182 stmt = gsi_stmt (*gsi);
3183
3184 /* Fold the main computation performed by the statement. */
3185 switch (gimple_code (stmt))
3186 {
3187 case GIMPLE_ASSIGN:
3188 {
3189 unsigned old_num_ops = gimple_num_ops (stmt);
3190 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3191 tree lhs = gimple_assign_lhs (stmt);
3192 tree new_rhs;
3193 /* First canonicalize operand order. This avoids building new
3194 trees if this is the only thing fold would later do. */
3195 if ((commutative_tree_code (subcode)
3196 || commutative_ternary_tree_code (subcode))
3197 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3198 gimple_assign_rhs2 (stmt), false))
3199 {
3200 tree tem = gimple_assign_rhs1 (stmt);
3201 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3202 gimple_assign_set_rhs2 (stmt, tem);
3203 changed = true;
3204 }
3205 new_rhs = fold_gimple_assign (gsi);
3206 if (new_rhs
3207 && !useless_type_conversion_p (TREE_TYPE (lhs),
3208 TREE_TYPE (new_rhs)))
3209 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3210 if (new_rhs
3211 && (!inplace
3212 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3213 {
3214 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3215 changed = true;
3216 }
3217 break;
3218 }
3219
3220 case GIMPLE_COND:
3221 changed |= fold_gimple_cond (stmt);
3222 break;
3223
3224 case GIMPLE_CALL:
3225 changed |= gimple_fold_call (gsi, inplace);
3226 break;
3227
3228 case GIMPLE_ASM:
3229 /* Fold *& in asm operands. */
3230 {
3231 size_t noutputs;
3232 const char **oconstraints;
3233 const char *constraint;
3234 bool allows_mem, allows_reg;
3235
3236 noutputs = gimple_asm_noutputs (stmt);
3237 oconstraints = XALLOCAVEC (const char *, noutputs);
3238
3239 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3240 {
3241 tree link = gimple_asm_output_op (stmt, i);
3242 tree op = TREE_VALUE (link);
3243 oconstraints[i]
3244 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3245 if (REFERENCE_CLASS_P (op)
3246 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3247 {
3248 TREE_VALUE (link) = op;
3249 changed = true;
3250 }
3251 }
3252 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3253 {
3254 tree link = gimple_asm_input_op (stmt, i);
3255 tree op = TREE_VALUE (link);
3256 constraint
3257 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3258 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3259 oconstraints, &allows_mem, &allows_reg);
3260 if (REFERENCE_CLASS_P (op)
3261 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3262 != NULL_TREE)
3263 {
3264 TREE_VALUE (link) = op;
3265 changed = true;
3266 }
3267 }
3268 }
3269 break;
3270
3271 case GIMPLE_DEBUG:
3272 if (gimple_debug_bind_p (stmt))
3273 {
3274 tree val = gimple_debug_bind_get_value (stmt);
3275 if (val
3276 && REFERENCE_CLASS_P (val))
3277 {
3278 tree tem = maybe_fold_reference (val, false);
3279 if (tem)
3280 {
3281 gimple_debug_bind_set_value (stmt, tem);
3282 changed = true;
3283 }
3284 }
3285 else if (val
3286 && TREE_CODE (val) == ADDR_EXPR)
3287 {
3288 tree ref = TREE_OPERAND (val, 0);
3289 tree tem = maybe_fold_reference (ref, false);
3290 if (tem)
3291 {
3292 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3293 gimple_debug_bind_set_value (stmt, tem);
3294 changed = true;
3295 }
3296 }
3297 }
3298 break;
3299
3300 default:;
3301 }
3302
3303 stmt = gsi_stmt (*gsi);
3304
3305 /* Fold *& on the lhs. */
3306 if (gimple_has_lhs (stmt))
3307 {
3308 tree lhs = gimple_get_lhs (stmt);
3309 if (lhs && REFERENCE_CLASS_P (lhs))
3310 {
3311 tree new_lhs = maybe_fold_reference (lhs, true);
3312 if (new_lhs)
3313 {
3314 gimple_set_lhs (stmt, new_lhs);
3315 changed = true;
3316 }
3317 }
3318 }
3319
3320 return changed;
3321 }
3322
3323 /* Valueziation callback that ends up not following SSA edges. */
3324
3325 tree
3326 no_follow_ssa_edges (tree)
3327 {
3328 return NULL_TREE;
3329 }
3330
3331 /* Valueization callback that ends up following single-use SSA edges only. */
3332
3333 tree
3334 follow_single_use_edges (tree val)
3335 {
3336 if (TREE_CODE (val) == SSA_NAME
3337 && !has_single_use (val))
3338 return NULL_TREE;
3339 return val;
3340 }
3341
3342 /* Fold the statement pointed to by GSI. In some cases, this function may
3343 replace the whole statement with a new one. Returns true iff folding
3344 makes any changes.
3345 The statement pointed to by GSI should be in valid gimple form but may
3346 be in unfolded state as resulting from for example constant propagation
3347 which can produce *&x = 0. */
3348
3349 bool
3350 fold_stmt (gimple_stmt_iterator *gsi)
3351 {
3352 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3353 }
3354
3355 bool
3356 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3357 {
3358 return fold_stmt_1 (gsi, false, valueize);
3359 }
3360
3361 /* Perform the minimal folding on statement *GSI. Only operations like
3362 *&x created by constant propagation are handled. The statement cannot
3363 be replaced with a new one. Return true if the statement was
3364 changed, false otherwise.
3365 The statement *GSI should be in valid gimple form but may
3366 be in unfolded state as resulting from for example constant propagation
3367 which can produce *&x = 0. */
3368
3369 bool
3370 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3371 {
3372 gimple stmt = gsi_stmt (*gsi);
3373 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3374 gcc_assert (gsi_stmt (*gsi) == stmt);
3375 return changed;
3376 }
3377
3378 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3379 if EXPR is null or we don't know how.
3380 If non-null, the result always has boolean type. */
3381
3382 static tree
3383 canonicalize_bool (tree expr, bool invert)
3384 {
3385 if (!expr)
3386 return NULL_TREE;
3387 else if (invert)
3388 {
3389 if (integer_nonzerop (expr))
3390 return boolean_false_node;
3391 else if (integer_zerop (expr))
3392 return boolean_true_node;
3393 else if (TREE_CODE (expr) == SSA_NAME)
3394 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3395 build_int_cst (TREE_TYPE (expr), 0));
3396 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3397 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3398 boolean_type_node,
3399 TREE_OPERAND (expr, 0),
3400 TREE_OPERAND (expr, 1));
3401 else
3402 return NULL_TREE;
3403 }
3404 else
3405 {
3406 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3407 return expr;
3408 if (integer_nonzerop (expr))
3409 return boolean_true_node;
3410 else if (integer_zerop (expr))
3411 return boolean_false_node;
3412 else if (TREE_CODE (expr) == SSA_NAME)
3413 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3414 build_int_cst (TREE_TYPE (expr), 0));
3415 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3416 return fold_build2 (TREE_CODE (expr),
3417 boolean_type_node,
3418 TREE_OPERAND (expr, 0),
3419 TREE_OPERAND (expr, 1));
3420 else
3421 return NULL_TREE;
3422 }
3423 }
3424
3425 /* Check to see if a boolean expression EXPR is logically equivalent to the
3426 comparison (OP1 CODE OP2). Check for various identities involving
3427 SSA_NAMEs. */
3428
3429 static bool
3430 same_bool_comparison_p (const_tree expr, enum tree_code code,
3431 const_tree op1, const_tree op2)
3432 {
3433 gimple s;
3434
3435 /* The obvious case. */
3436 if (TREE_CODE (expr) == code
3437 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3438 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3439 return true;
3440
3441 /* Check for comparing (name, name != 0) and the case where expr
3442 is an SSA_NAME with a definition matching the comparison. */
3443 if (TREE_CODE (expr) == SSA_NAME
3444 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3445 {
3446 if (operand_equal_p (expr, op1, 0))
3447 return ((code == NE_EXPR && integer_zerop (op2))
3448 || (code == EQ_EXPR && integer_nonzerop (op2)));
3449 s = SSA_NAME_DEF_STMT (expr);
3450 if (is_gimple_assign (s)
3451 && gimple_assign_rhs_code (s) == code
3452 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3453 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3454 return true;
3455 }
3456
3457 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3458 of name is a comparison, recurse. */
3459 if (TREE_CODE (op1) == SSA_NAME
3460 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3461 {
3462 s = SSA_NAME_DEF_STMT (op1);
3463 if (is_gimple_assign (s)
3464 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3465 {
3466 enum tree_code c = gimple_assign_rhs_code (s);
3467 if ((c == NE_EXPR && integer_zerop (op2))
3468 || (c == EQ_EXPR && integer_nonzerop (op2)))
3469 return same_bool_comparison_p (expr, c,
3470 gimple_assign_rhs1 (s),
3471 gimple_assign_rhs2 (s));
3472 if ((c == EQ_EXPR && integer_zerop (op2))
3473 || (c == NE_EXPR && integer_nonzerop (op2)))
3474 return same_bool_comparison_p (expr,
3475 invert_tree_comparison (c, false),
3476 gimple_assign_rhs1 (s),
3477 gimple_assign_rhs2 (s));
3478 }
3479 }
3480 return false;
3481 }
3482
3483 /* Check to see if two boolean expressions OP1 and OP2 are logically
3484 equivalent. */
3485
3486 static bool
3487 same_bool_result_p (const_tree op1, const_tree op2)
3488 {
3489 /* Simple cases first. */
3490 if (operand_equal_p (op1, op2, 0))
3491 return true;
3492
3493 /* Check the cases where at least one of the operands is a comparison.
3494 These are a bit smarter than operand_equal_p in that they apply some
3495 identifies on SSA_NAMEs. */
3496 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3497 && same_bool_comparison_p (op1, TREE_CODE (op2),
3498 TREE_OPERAND (op2, 0),
3499 TREE_OPERAND (op2, 1)))
3500 return true;
3501 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3502 && same_bool_comparison_p (op2, TREE_CODE (op1),
3503 TREE_OPERAND (op1, 0),
3504 TREE_OPERAND (op1, 1)))
3505 return true;
3506
3507 /* Default case. */
3508 return false;
3509 }
3510
3511 /* Forward declarations for some mutually recursive functions. */
3512
3513 static tree
3514 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3515 enum tree_code code2, tree op2a, tree op2b);
3516 static tree
3517 and_var_with_comparison (tree var, bool invert,
3518 enum tree_code code2, tree op2a, tree op2b);
3519 static tree
3520 and_var_with_comparison_1 (gimple stmt,
3521 enum tree_code code2, tree op2a, tree op2b);
3522 static tree
3523 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3524 enum tree_code code2, tree op2a, tree op2b);
3525 static tree
3526 or_var_with_comparison (tree var, bool invert,
3527 enum tree_code code2, tree op2a, tree op2b);
3528 static tree
3529 or_var_with_comparison_1 (gimple stmt,
3530 enum tree_code code2, tree op2a, tree op2b);
3531
3532 /* Helper function for and_comparisons_1: try to simplify the AND of the
3533 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3534 If INVERT is true, invert the value of the VAR before doing the AND.
3535 Return NULL_EXPR if we can't simplify this to a single expression. */
3536
3537 static tree
3538 and_var_with_comparison (tree var, bool invert,
3539 enum tree_code code2, tree op2a, tree op2b)
3540 {
3541 tree t;
3542 gimple stmt = SSA_NAME_DEF_STMT (var);
3543
3544 /* We can only deal with variables whose definitions are assignments. */
3545 if (!is_gimple_assign (stmt))
3546 return NULL_TREE;
3547
3548 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3549 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3550 Then we only have to consider the simpler non-inverted cases. */
3551 if (invert)
3552 t = or_var_with_comparison_1 (stmt,
3553 invert_tree_comparison (code2, false),
3554 op2a, op2b);
3555 else
3556 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3557 return canonicalize_bool (t, invert);
3558 }
3559
3560 /* Try to simplify the AND of the ssa variable defined by the assignment
3561 STMT with the comparison specified by (OP2A CODE2 OP2B).
3562 Return NULL_EXPR if we can't simplify this to a single expression. */
3563
3564 static tree
3565 and_var_with_comparison_1 (gimple stmt,
3566 enum tree_code code2, tree op2a, tree op2b)
3567 {
3568 tree var = gimple_assign_lhs (stmt);
3569 tree true_test_var = NULL_TREE;
3570 tree false_test_var = NULL_TREE;
3571 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3572
3573 /* Check for identities like (var AND (var == 0)) => false. */
3574 if (TREE_CODE (op2a) == SSA_NAME
3575 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3576 {
3577 if ((code2 == NE_EXPR && integer_zerop (op2b))
3578 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3579 {
3580 true_test_var = op2a;
3581 if (var == true_test_var)
3582 return var;
3583 }
3584 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3585 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3586 {
3587 false_test_var = op2a;
3588 if (var == false_test_var)
3589 return boolean_false_node;
3590 }
3591 }
3592
3593 /* If the definition is a comparison, recurse on it. */
3594 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3595 {
3596 tree t = and_comparisons_1 (innercode,
3597 gimple_assign_rhs1 (stmt),
3598 gimple_assign_rhs2 (stmt),
3599 code2,
3600 op2a,
3601 op2b);
3602 if (t)
3603 return t;
3604 }
3605
3606 /* If the definition is an AND or OR expression, we may be able to
3607 simplify by reassociating. */
3608 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3609 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3610 {
3611 tree inner1 = gimple_assign_rhs1 (stmt);
3612 tree inner2 = gimple_assign_rhs2 (stmt);
3613 gimple s;
3614 tree t;
3615 tree partial = NULL_TREE;
3616 bool is_and = (innercode == BIT_AND_EXPR);
3617
3618 /* Check for boolean identities that don't require recursive examination
3619 of inner1/inner2:
3620 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3621 inner1 AND (inner1 OR inner2) => inner1
3622 !inner1 AND (inner1 AND inner2) => false
3623 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3624 Likewise for similar cases involving inner2. */
3625 if (inner1 == true_test_var)
3626 return (is_and ? var : inner1);
3627 else if (inner2 == true_test_var)
3628 return (is_and ? var : inner2);
3629 else if (inner1 == false_test_var)
3630 return (is_and
3631 ? boolean_false_node
3632 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3633 else if (inner2 == false_test_var)
3634 return (is_and
3635 ? boolean_false_node
3636 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3637
3638 /* Next, redistribute/reassociate the AND across the inner tests.
3639 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3640 if (TREE_CODE (inner1) == SSA_NAME
3641 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3642 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3643 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3644 gimple_assign_rhs1 (s),
3645 gimple_assign_rhs2 (s),
3646 code2, op2a, op2b)))
3647 {
3648 /* Handle the AND case, where we are reassociating:
3649 (inner1 AND inner2) AND (op2a code2 op2b)
3650 => (t AND inner2)
3651 If the partial result t is a constant, we win. Otherwise
3652 continue on to try reassociating with the other inner test. */
3653 if (is_and)
3654 {
3655 if (integer_onep (t))
3656 return inner2;
3657 else if (integer_zerop (t))
3658 return boolean_false_node;
3659 }
3660
3661 /* Handle the OR case, where we are redistributing:
3662 (inner1 OR inner2) AND (op2a code2 op2b)
3663 => (t OR (inner2 AND (op2a code2 op2b))) */
3664 else if (integer_onep (t))
3665 return boolean_true_node;
3666
3667 /* Save partial result for later. */
3668 partial = t;
3669 }
3670
3671 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3672 if (TREE_CODE (inner2) == SSA_NAME
3673 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3674 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3675 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3676 gimple_assign_rhs1 (s),
3677 gimple_assign_rhs2 (s),
3678 code2, op2a, op2b)))
3679 {
3680 /* Handle the AND case, where we are reassociating:
3681 (inner1 AND inner2) AND (op2a code2 op2b)
3682 => (inner1 AND t) */
3683 if (is_and)
3684 {
3685 if (integer_onep (t))
3686 return inner1;
3687 else if (integer_zerop (t))
3688 return boolean_false_node;
3689 /* If both are the same, we can apply the identity
3690 (x AND x) == x. */
3691 else if (partial && same_bool_result_p (t, partial))
3692 return t;
3693 }
3694
3695 /* Handle the OR case. where we are redistributing:
3696 (inner1 OR inner2) AND (op2a code2 op2b)
3697 => (t OR (inner1 AND (op2a code2 op2b)))
3698 => (t OR partial) */
3699 else
3700 {
3701 if (integer_onep (t))
3702 return boolean_true_node;
3703 else if (partial)
3704 {
3705 /* We already got a simplification for the other
3706 operand to the redistributed OR expression. The
3707 interesting case is when at least one is false.
3708 Or, if both are the same, we can apply the identity
3709 (x OR x) == x. */
3710 if (integer_zerop (partial))
3711 return t;
3712 else if (integer_zerop (t))
3713 return partial;
3714 else if (same_bool_result_p (t, partial))
3715 return t;
3716 }
3717 }
3718 }
3719 }
3720 return NULL_TREE;
3721 }
3722
3723 /* Try to simplify the AND of two comparisons defined by
3724 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3725 If this can be done without constructing an intermediate value,
3726 return the resulting tree; otherwise NULL_TREE is returned.
3727 This function is deliberately asymmetric as it recurses on SSA_DEFs
3728 in the first comparison but not the second. */
3729
3730 static tree
3731 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3732 enum tree_code code2, tree op2a, tree op2b)
3733 {
3734 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3735
3736 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3737 if (operand_equal_p (op1a, op2a, 0)
3738 && operand_equal_p (op1b, op2b, 0))
3739 {
3740 /* Result will be either NULL_TREE, or a combined comparison. */
3741 tree t = combine_comparisons (UNKNOWN_LOCATION,
3742 TRUTH_ANDIF_EXPR, code1, code2,
3743 truth_type, op1a, op1b);
3744 if (t)
3745 return t;
3746 }
3747
3748 /* Likewise the swapped case of the above. */
3749 if (operand_equal_p (op1a, op2b, 0)
3750 && operand_equal_p (op1b, op2a, 0))
3751 {
3752 /* Result will be either NULL_TREE, or a combined comparison. */
3753 tree t = combine_comparisons (UNKNOWN_LOCATION,
3754 TRUTH_ANDIF_EXPR, code1,
3755 swap_tree_comparison (code2),
3756 truth_type, op1a, op1b);
3757 if (t)
3758 return t;
3759 }
3760
3761 /* If both comparisons are of the same value against constants, we might
3762 be able to merge them. */
3763 if (operand_equal_p (op1a, op2a, 0)
3764 && TREE_CODE (op1b) == INTEGER_CST
3765 && TREE_CODE (op2b) == INTEGER_CST)
3766 {
3767 int cmp = tree_int_cst_compare (op1b, op2b);
3768
3769 /* If we have (op1a == op1b), we should either be able to
3770 return that or FALSE, depending on whether the constant op1b
3771 also satisfies the other comparison against op2b. */
3772 if (code1 == EQ_EXPR)
3773 {
3774 bool done = true;
3775 bool val;
3776 switch (code2)
3777 {
3778 case EQ_EXPR: val = (cmp == 0); break;
3779 case NE_EXPR: val = (cmp != 0); break;
3780 case LT_EXPR: val = (cmp < 0); break;
3781 case GT_EXPR: val = (cmp > 0); break;
3782 case LE_EXPR: val = (cmp <= 0); break;
3783 case GE_EXPR: val = (cmp >= 0); break;
3784 default: done = false;
3785 }
3786 if (done)
3787 {
3788 if (val)
3789 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3790 else
3791 return boolean_false_node;
3792 }
3793 }
3794 /* Likewise if the second comparison is an == comparison. */
3795 else if (code2 == EQ_EXPR)
3796 {
3797 bool done = true;
3798 bool val;
3799 switch (code1)
3800 {
3801 case EQ_EXPR: val = (cmp == 0); break;
3802 case NE_EXPR: val = (cmp != 0); break;
3803 case LT_EXPR: val = (cmp > 0); break;
3804 case GT_EXPR: val = (cmp < 0); break;
3805 case LE_EXPR: val = (cmp >= 0); break;
3806 case GE_EXPR: val = (cmp <= 0); break;
3807 default: done = false;
3808 }
3809 if (done)
3810 {
3811 if (val)
3812 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3813 else
3814 return boolean_false_node;
3815 }
3816 }
3817
3818 /* Same business with inequality tests. */
3819 else if (code1 == NE_EXPR)
3820 {
3821 bool val;
3822 switch (code2)
3823 {
3824 case EQ_EXPR: val = (cmp != 0); break;
3825 case NE_EXPR: val = (cmp == 0); break;
3826 case LT_EXPR: val = (cmp >= 0); break;
3827 case GT_EXPR: val = (cmp <= 0); break;
3828 case LE_EXPR: val = (cmp > 0); break;
3829 case GE_EXPR: val = (cmp < 0); break;
3830 default:
3831 val = false;
3832 }
3833 if (val)
3834 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3835 }
3836 else if (code2 == NE_EXPR)
3837 {
3838 bool val;
3839 switch (code1)
3840 {
3841 case EQ_EXPR: val = (cmp == 0); break;
3842 case NE_EXPR: val = (cmp != 0); break;
3843 case LT_EXPR: val = (cmp <= 0); break;
3844 case GT_EXPR: val = (cmp >= 0); break;
3845 case LE_EXPR: val = (cmp < 0); break;
3846 case GE_EXPR: val = (cmp > 0); break;
3847 default:
3848 val = false;
3849 }
3850 if (val)
3851 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3852 }
3853
3854 /* Chose the more restrictive of two < or <= comparisons. */
3855 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3856 && (code2 == LT_EXPR || code2 == LE_EXPR))
3857 {
3858 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3859 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3860 else
3861 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3862 }
3863
3864 /* Likewise chose the more restrictive of two > or >= comparisons. */
3865 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3866 && (code2 == GT_EXPR || code2 == GE_EXPR))
3867 {
3868 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3869 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3870 else
3871 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3872 }
3873
3874 /* Check for singleton ranges. */
3875 else if (cmp == 0
3876 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3877 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3878 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3879
3880 /* Check for disjoint ranges. */
3881 else if (cmp <= 0
3882 && (code1 == LT_EXPR || code1 == LE_EXPR)
3883 && (code2 == GT_EXPR || code2 == GE_EXPR))
3884 return boolean_false_node;
3885 else if (cmp >= 0
3886 && (code1 == GT_EXPR || code1 == GE_EXPR)
3887 && (code2 == LT_EXPR || code2 == LE_EXPR))
3888 return boolean_false_node;
3889 }
3890
3891 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3892 NAME's definition is a truth value. See if there are any simplifications
3893 that can be done against the NAME's definition. */
3894 if (TREE_CODE (op1a) == SSA_NAME
3895 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3896 && (integer_zerop (op1b) || integer_onep (op1b)))
3897 {
3898 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3899 || (code1 == NE_EXPR && integer_onep (op1b)));
3900 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3901 switch (gimple_code (stmt))
3902 {
3903 case GIMPLE_ASSIGN:
3904 /* Try to simplify by copy-propagating the definition. */
3905 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3906
3907 case GIMPLE_PHI:
3908 /* If every argument to the PHI produces the same result when
3909 ANDed with the second comparison, we win.
3910 Do not do this unless the type is bool since we need a bool
3911 result here anyway. */
3912 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3913 {
3914 tree result = NULL_TREE;
3915 unsigned i;
3916 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3917 {
3918 tree arg = gimple_phi_arg_def (stmt, i);
3919
3920 /* If this PHI has itself as an argument, ignore it.
3921 If all the other args produce the same result,
3922 we're still OK. */
3923 if (arg == gimple_phi_result (stmt))
3924 continue;
3925 else if (TREE_CODE (arg) == INTEGER_CST)
3926 {
3927 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3928 {
3929 if (!result)
3930 result = boolean_false_node;
3931 else if (!integer_zerop (result))
3932 return NULL_TREE;
3933 }
3934 else if (!result)
3935 result = fold_build2 (code2, boolean_type_node,
3936 op2a, op2b);
3937 else if (!same_bool_comparison_p (result,
3938 code2, op2a, op2b))
3939 return NULL_TREE;
3940 }
3941 else if (TREE_CODE (arg) == SSA_NAME
3942 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3943 {
3944 tree temp;
3945 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3946 /* In simple cases we can look through PHI nodes,
3947 but we have to be careful with loops.
3948 See PR49073. */
3949 if (! dom_info_available_p (CDI_DOMINATORS)
3950 || gimple_bb (def_stmt) == gimple_bb (stmt)
3951 || dominated_by_p (CDI_DOMINATORS,
3952 gimple_bb (def_stmt),
3953 gimple_bb (stmt)))
3954 return NULL_TREE;
3955 temp = and_var_with_comparison (arg, invert, code2,
3956 op2a, op2b);
3957 if (!temp)
3958 return NULL_TREE;
3959 else if (!result)
3960 result = temp;
3961 else if (!same_bool_result_p (result, temp))
3962 return NULL_TREE;
3963 }
3964 else
3965 return NULL_TREE;
3966 }
3967 return result;
3968 }
3969
3970 default:
3971 break;
3972 }
3973 }
3974 return NULL_TREE;
3975 }
3976
3977 /* Try to simplify the AND of two comparisons, specified by
3978 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3979 If this can be simplified to a single expression (without requiring
3980 introducing more SSA variables to hold intermediate values),
3981 return the resulting tree. Otherwise return NULL_TREE.
3982 If the result expression is non-null, it has boolean type. */
3983
3984 tree
3985 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3986 enum tree_code code2, tree op2a, tree op2b)
3987 {
3988 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3989 if (t)
3990 return t;
3991 else
3992 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3993 }
3994
3995 /* Helper function for or_comparisons_1: try to simplify the OR of the
3996 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3997 If INVERT is true, invert the value of VAR before doing the OR.
3998 Return NULL_EXPR if we can't simplify this to a single expression. */
3999
4000 static tree
4001 or_var_with_comparison (tree var, bool invert,
4002 enum tree_code code2, tree op2a, tree op2b)
4003 {
4004 tree t;
4005 gimple stmt = SSA_NAME_DEF_STMT (var);
4006
4007 /* We can only deal with variables whose definitions are assignments. */
4008 if (!is_gimple_assign (stmt))
4009 return NULL_TREE;
4010
4011 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4012 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4013 Then we only have to consider the simpler non-inverted cases. */
4014 if (invert)
4015 t = and_var_with_comparison_1 (stmt,
4016 invert_tree_comparison (code2, false),
4017 op2a, op2b);
4018 else
4019 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4020 return canonicalize_bool (t, invert);
4021 }
4022
4023 /* Try to simplify the OR of the ssa variable defined by the assignment
4024 STMT with the comparison specified by (OP2A CODE2 OP2B).
4025 Return NULL_EXPR if we can't simplify this to a single expression. */
4026
4027 static tree
4028 or_var_with_comparison_1 (gimple stmt,
4029 enum tree_code code2, tree op2a, tree op2b)
4030 {
4031 tree var = gimple_assign_lhs (stmt);
4032 tree true_test_var = NULL_TREE;
4033 tree false_test_var = NULL_TREE;
4034 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4035
4036 /* Check for identities like (var OR (var != 0)) => true . */
4037 if (TREE_CODE (op2a) == SSA_NAME
4038 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4039 {
4040 if ((code2 == NE_EXPR && integer_zerop (op2b))
4041 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4042 {
4043 true_test_var = op2a;
4044 if (var == true_test_var)
4045 return var;
4046 }
4047 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4048 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4049 {
4050 false_test_var = op2a;
4051 if (var == false_test_var)
4052 return boolean_true_node;
4053 }
4054 }
4055
4056 /* If the definition is a comparison, recurse on it. */
4057 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4058 {
4059 tree t = or_comparisons_1 (innercode,
4060 gimple_assign_rhs1 (stmt),
4061 gimple_assign_rhs2 (stmt),
4062 code2,
4063 op2a,
4064 op2b);
4065 if (t)
4066 return t;
4067 }
4068
4069 /* If the definition is an AND or OR expression, we may be able to
4070 simplify by reassociating. */
4071 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4072 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4073 {
4074 tree inner1 = gimple_assign_rhs1 (stmt);
4075 tree inner2 = gimple_assign_rhs2 (stmt);
4076 gimple s;
4077 tree t;
4078 tree partial = NULL_TREE;
4079 bool is_or = (innercode == BIT_IOR_EXPR);
4080
4081 /* Check for boolean identities that don't require recursive examination
4082 of inner1/inner2:
4083 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4084 inner1 OR (inner1 AND inner2) => inner1
4085 !inner1 OR (inner1 OR inner2) => true
4086 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4087 */
4088 if (inner1 == true_test_var)
4089 return (is_or ? var : inner1);
4090 else if (inner2 == true_test_var)
4091 return (is_or ? var : inner2);
4092 else if (inner1 == false_test_var)
4093 return (is_or
4094 ? boolean_true_node
4095 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4096 else if (inner2 == false_test_var)
4097 return (is_or
4098 ? boolean_true_node
4099 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4100
4101 /* Next, redistribute/reassociate the OR across the inner tests.
4102 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4103 if (TREE_CODE (inner1) == SSA_NAME
4104 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4105 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4106 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4107 gimple_assign_rhs1 (s),
4108 gimple_assign_rhs2 (s),
4109 code2, op2a, op2b)))
4110 {
4111 /* Handle the OR case, where we are reassociating:
4112 (inner1 OR inner2) OR (op2a code2 op2b)
4113 => (t OR inner2)
4114 If the partial result t is a constant, we win. Otherwise
4115 continue on to try reassociating with the other inner test. */
4116 if (is_or)
4117 {
4118 if (integer_onep (t))
4119 return boolean_true_node;
4120 else if (integer_zerop (t))
4121 return inner2;
4122 }
4123
4124 /* Handle the AND case, where we are redistributing:
4125 (inner1 AND inner2) OR (op2a code2 op2b)
4126 => (t AND (inner2 OR (op2a code op2b))) */
4127 else if (integer_zerop (t))
4128 return boolean_false_node;
4129
4130 /* Save partial result for later. */
4131 partial = t;
4132 }
4133
4134 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4135 if (TREE_CODE (inner2) == SSA_NAME
4136 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4137 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4138 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4139 gimple_assign_rhs1 (s),
4140 gimple_assign_rhs2 (s),
4141 code2, op2a, op2b)))
4142 {
4143 /* Handle the OR case, where we are reassociating:
4144 (inner1 OR inner2) OR (op2a code2 op2b)
4145 => (inner1 OR t)
4146 => (t OR partial) */
4147 if (is_or)
4148 {
4149 if (integer_zerop (t))
4150 return inner1;
4151 else if (integer_onep (t))
4152 return boolean_true_node;
4153 /* If both are the same, we can apply the identity
4154 (x OR x) == x. */
4155 else if (partial && same_bool_result_p (t, partial))
4156 return t;
4157 }
4158
4159 /* Handle the AND case, where we are redistributing:
4160 (inner1 AND inner2) OR (op2a code2 op2b)
4161 => (t AND (inner1 OR (op2a code2 op2b)))
4162 => (t AND partial) */
4163 else
4164 {
4165 if (integer_zerop (t))
4166 return boolean_false_node;
4167 else if (partial)
4168 {
4169 /* We already got a simplification for the other
4170 operand to the redistributed AND expression. The
4171 interesting case is when at least one is true.
4172 Or, if both are the same, we can apply the identity
4173 (x AND x) == x. */
4174 if (integer_onep (partial))
4175 return t;
4176 else if (integer_onep (t))
4177 return partial;
4178 else if (same_bool_result_p (t, partial))
4179 return t;
4180 }
4181 }
4182 }
4183 }
4184 return NULL_TREE;
4185 }
4186
4187 /* Try to simplify the OR of two comparisons defined by
4188 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4189 If this can be done without constructing an intermediate value,
4190 return the resulting tree; otherwise NULL_TREE is returned.
4191 This function is deliberately asymmetric as it recurses on SSA_DEFs
4192 in the first comparison but not the second. */
4193
4194 static tree
4195 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4196 enum tree_code code2, tree op2a, tree op2b)
4197 {
4198 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4199
4200 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4201 if (operand_equal_p (op1a, op2a, 0)
4202 && operand_equal_p (op1b, op2b, 0))
4203 {
4204 /* Result will be either NULL_TREE, or a combined comparison. */
4205 tree t = combine_comparisons (UNKNOWN_LOCATION,
4206 TRUTH_ORIF_EXPR, code1, code2,
4207 truth_type, op1a, op1b);
4208 if (t)
4209 return t;
4210 }
4211
4212 /* Likewise the swapped case of the above. */
4213 if (operand_equal_p (op1a, op2b, 0)
4214 && operand_equal_p (op1b, op2a, 0))
4215 {
4216 /* Result will be either NULL_TREE, or a combined comparison. */
4217 tree t = combine_comparisons (UNKNOWN_LOCATION,
4218 TRUTH_ORIF_EXPR, code1,
4219 swap_tree_comparison (code2),
4220 truth_type, op1a, op1b);
4221 if (t)
4222 return t;
4223 }
4224
4225 /* If both comparisons are of the same value against constants, we might
4226 be able to merge them. */
4227 if (operand_equal_p (op1a, op2a, 0)
4228 && TREE_CODE (op1b) == INTEGER_CST
4229 && TREE_CODE (op2b) == INTEGER_CST)
4230 {
4231 int cmp = tree_int_cst_compare (op1b, op2b);
4232
4233 /* If we have (op1a != op1b), we should either be able to
4234 return that or TRUE, depending on whether the constant op1b
4235 also satisfies the other comparison against op2b. */
4236 if (code1 == NE_EXPR)
4237 {
4238 bool done = true;
4239 bool val;
4240 switch (code2)
4241 {
4242 case EQ_EXPR: val = (cmp == 0); break;
4243 case NE_EXPR: val = (cmp != 0); break;
4244 case LT_EXPR: val = (cmp < 0); break;
4245 case GT_EXPR: val = (cmp > 0); break;
4246 case LE_EXPR: val = (cmp <= 0); break;
4247 case GE_EXPR: val = (cmp >= 0); break;
4248 default: done = false;
4249 }
4250 if (done)
4251 {
4252 if (val)
4253 return boolean_true_node;
4254 else
4255 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4256 }
4257 }
4258 /* Likewise if the second comparison is a != comparison. */
4259 else if (code2 == NE_EXPR)
4260 {
4261 bool done = true;
4262 bool val;
4263 switch (code1)
4264 {
4265 case EQ_EXPR: val = (cmp == 0); break;
4266 case NE_EXPR: val = (cmp != 0); break;
4267 case LT_EXPR: val = (cmp > 0); break;
4268 case GT_EXPR: val = (cmp < 0); break;
4269 case LE_EXPR: val = (cmp >= 0); break;
4270 case GE_EXPR: val = (cmp <= 0); break;
4271 default: done = false;
4272 }
4273 if (done)
4274 {
4275 if (val)
4276 return boolean_true_node;
4277 else
4278 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4279 }
4280 }
4281
4282 /* See if an equality test is redundant with the other comparison. */
4283 else if (code1 == EQ_EXPR)
4284 {
4285 bool val;
4286 switch (code2)
4287 {
4288 case EQ_EXPR: val = (cmp == 0); break;
4289 case NE_EXPR: val = (cmp != 0); break;
4290 case LT_EXPR: val = (cmp < 0); break;
4291 case GT_EXPR: val = (cmp > 0); break;
4292 case LE_EXPR: val = (cmp <= 0); break;
4293 case GE_EXPR: val = (cmp >= 0); break;
4294 default:
4295 val = false;
4296 }
4297 if (val)
4298 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4299 }
4300 else if (code2 == EQ_EXPR)
4301 {
4302 bool val;
4303 switch (code1)
4304 {
4305 case EQ_EXPR: val = (cmp == 0); break;
4306 case NE_EXPR: val = (cmp != 0); break;
4307 case LT_EXPR: val = (cmp > 0); break;
4308 case GT_EXPR: val = (cmp < 0); break;
4309 case LE_EXPR: val = (cmp >= 0); break;
4310 case GE_EXPR: val = (cmp <= 0); break;
4311 default:
4312 val = false;
4313 }
4314 if (val)
4315 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4316 }
4317
4318 /* Chose the less restrictive of two < or <= comparisons. */
4319 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4320 && (code2 == LT_EXPR || code2 == LE_EXPR))
4321 {
4322 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4323 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4324 else
4325 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4326 }
4327
4328 /* Likewise chose the less restrictive of two > or >= comparisons. */
4329 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4330 && (code2 == GT_EXPR || code2 == GE_EXPR))
4331 {
4332 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4333 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4334 else
4335 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4336 }
4337
4338 /* Check for singleton ranges. */
4339 else if (cmp == 0
4340 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4341 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4342 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4343
4344 /* Check for less/greater pairs that don't restrict the range at all. */
4345 else if (cmp >= 0
4346 && (code1 == LT_EXPR || code1 == LE_EXPR)
4347 && (code2 == GT_EXPR || code2 == GE_EXPR))
4348 return boolean_true_node;
4349 else if (cmp <= 0
4350 && (code1 == GT_EXPR || code1 == GE_EXPR)
4351 && (code2 == LT_EXPR || code2 == LE_EXPR))
4352 return boolean_true_node;
4353 }
4354
4355 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4356 NAME's definition is a truth value. See if there are any simplifications
4357 that can be done against the NAME's definition. */
4358 if (TREE_CODE (op1a) == SSA_NAME
4359 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4360 && (integer_zerop (op1b) || integer_onep (op1b)))
4361 {
4362 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4363 || (code1 == NE_EXPR && integer_onep (op1b)));
4364 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4365 switch (gimple_code (stmt))
4366 {
4367 case GIMPLE_ASSIGN:
4368 /* Try to simplify by copy-propagating the definition. */
4369 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4370
4371 case GIMPLE_PHI:
4372 /* If every argument to the PHI produces the same result when
4373 ORed with the second comparison, we win.
4374 Do not do this unless the type is bool since we need a bool
4375 result here anyway. */
4376 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4377 {
4378 tree result = NULL_TREE;
4379 unsigned i;
4380 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4381 {
4382 tree arg = gimple_phi_arg_def (stmt, i);
4383
4384 /* If this PHI has itself as an argument, ignore it.
4385 If all the other args produce the same result,
4386 we're still OK. */
4387 if (arg == gimple_phi_result (stmt))
4388 continue;
4389 else if (TREE_CODE (arg) == INTEGER_CST)
4390 {
4391 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4392 {
4393 if (!result)
4394 result = boolean_true_node;
4395 else if (!integer_onep (result))
4396 return NULL_TREE;
4397 }
4398 else if (!result)
4399 result = fold_build2 (code2, boolean_type_node,
4400 op2a, op2b);
4401 else if (!same_bool_comparison_p (result,
4402 code2, op2a, op2b))
4403 return NULL_TREE;
4404 }
4405 else if (TREE_CODE (arg) == SSA_NAME
4406 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4407 {
4408 tree temp;
4409 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4410 /* In simple cases we can look through PHI nodes,
4411 but we have to be careful with loops.
4412 See PR49073. */
4413 if (! dom_info_available_p (CDI_DOMINATORS)
4414 || gimple_bb (def_stmt) == gimple_bb (stmt)
4415 || dominated_by_p (CDI_DOMINATORS,
4416 gimple_bb (def_stmt),
4417 gimple_bb (stmt)))
4418 return NULL_TREE;
4419 temp = or_var_with_comparison (arg, invert, code2,
4420 op2a, op2b);
4421 if (!temp)
4422 return NULL_TREE;
4423 else if (!result)
4424 result = temp;
4425 else if (!same_bool_result_p (result, temp))
4426 return NULL_TREE;
4427 }
4428 else
4429 return NULL_TREE;
4430 }
4431 return result;
4432 }
4433
4434 default:
4435 break;
4436 }
4437 }
4438 return NULL_TREE;
4439 }
4440
4441 /* Try to simplify the OR of two comparisons, specified by
4442 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4443 If this can be simplified to a single expression (without requiring
4444 introducing more SSA variables to hold intermediate values),
4445 return the resulting tree. Otherwise return NULL_TREE.
4446 If the result expression is non-null, it has boolean type. */
4447
4448 tree
4449 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4450 enum tree_code code2, tree op2a, tree op2b)
4451 {
4452 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4453 if (t)
4454 return t;
4455 else
4456 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4457 }
4458
4459
4460 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4461
4462 Either NULL_TREE, a simplified but non-constant or a constant
4463 is returned.
4464
4465 ??? This should go into a gimple-fold-inline.h file to be eventually
4466 privatized with the single valueize function used in the various TUs
4467 to avoid the indirect function call overhead. */
4468
4469 tree
4470 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4471 tree (*gvalueize) (tree))
4472 {
4473 code_helper rcode;
4474 tree ops[3] = {};
4475 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4476 edges if there are intermediate VARYING defs. For this reason
4477 do not follow SSA edges here even though SCCVN can technically
4478 just deal fine with that. */
4479 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
4480 && rcode.is_tree_code ()
4481 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4482 || ((tree_code) rcode) == ADDR_EXPR)
4483 && is_gimple_val (ops[0]))
4484 {
4485 tree res = ops[0];
4486 if (dump_file && dump_flags & TDF_DETAILS)
4487 {
4488 fprintf (dump_file, "Match-and-simplified ");
4489 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4490 fprintf (dump_file, " to ");
4491 print_generic_expr (dump_file, res, 0);
4492 fprintf (dump_file, "\n");
4493 }
4494 return res;
4495 }
4496
4497 location_t loc = gimple_location (stmt);
4498 switch (gimple_code (stmt))
4499 {
4500 case GIMPLE_ASSIGN:
4501 {
4502 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4503
4504 switch (get_gimple_rhs_class (subcode))
4505 {
4506 case GIMPLE_SINGLE_RHS:
4507 {
4508 tree rhs = gimple_assign_rhs1 (stmt);
4509 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4510
4511 if (TREE_CODE (rhs) == SSA_NAME)
4512 {
4513 /* If the RHS is an SSA_NAME, return its known constant value,
4514 if any. */
4515 return (*valueize) (rhs);
4516 }
4517 /* Handle propagating invariant addresses into address
4518 operations. */
4519 else if (TREE_CODE (rhs) == ADDR_EXPR
4520 && !is_gimple_min_invariant (rhs))
4521 {
4522 HOST_WIDE_INT offset = 0;
4523 tree base;
4524 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4525 &offset,
4526 valueize);
4527 if (base
4528 && (CONSTANT_CLASS_P (base)
4529 || decl_address_invariant_p (base)))
4530 return build_invariant_address (TREE_TYPE (rhs),
4531 base, offset);
4532 }
4533 else if (TREE_CODE (rhs) == CONSTRUCTOR
4534 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4535 && (CONSTRUCTOR_NELTS (rhs)
4536 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4537 {
4538 unsigned i;
4539 tree val, *vec;
4540
4541 vec = XALLOCAVEC (tree,
4542 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4544 {
4545 val = (*valueize) (val);
4546 if (TREE_CODE (val) == INTEGER_CST
4547 || TREE_CODE (val) == REAL_CST
4548 || TREE_CODE (val) == FIXED_CST)
4549 vec[i] = val;
4550 else
4551 return NULL_TREE;
4552 }
4553
4554 return build_vector (TREE_TYPE (rhs), vec);
4555 }
4556 if (subcode == OBJ_TYPE_REF)
4557 {
4558 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4559 /* If callee is constant, we can fold away the wrapper. */
4560 if (is_gimple_min_invariant (val))
4561 return val;
4562 }
4563
4564 if (kind == tcc_reference)
4565 {
4566 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4567 || TREE_CODE (rhs) == REALPART_EXPR
4568 || TREE_CODE (rhs) == IMAGPART_EXPR)
4569 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4570 {
4571 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4572 return fold_unary_loc (EXPR_LOCATION (rhs),
4573 TREE_CODE (rhs),
4574 TREE_TYPE (rhs), val);
4575 }
4576 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4577 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4578 {
4579 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4580 return fold_ternary_loc (EXPR_LOCATION (rhs),
4581 TREE_CODE (rhs),
4582 TREE_TYPE (rhs), val,
4583 TREE_OPERAND (rhs, 1),
4584 TREE_OPERAND (rhs, 2));
4585 }
4586 else if (TREE_CODE (rhs) == MEM_REF
4587 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4588 {
4589 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4590 if (TREE_CODE (val) == ADDR_EXPR
4591 && is_gimple_min_invariant (val))
4592 {
4593 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4594 unshare_expr (val),
4595 TREE_OPERAND (rhs, 1));
4596 if (tem)
4597 rhs = tem;
4598 }
4599 }
4600 return fold_const_aggregate_ref_1 (rhs, valueize);
4601 }
4602 else if (kind == tcc_declaration)
4603 return get_symbol_constant_value (rhs);
4604 return rhs;
4605 }
4606
4607 case GIMPLE_UNARY_RHS:
4608 return NULL_TREE;
4609
4610 case GIMPLE_BINARY_RHS:
4611 {
4612 /* Handle binary operators that can appear in GIMPLE form. */
4613 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4614 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4615
4616 /* Translate &x + CST into an invariant form suitable for
4617 further propagation. */
4618 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4619 && TREE_CODE (op0) == ADDR_EXPR
4620 && TREE_CODE (op1) == INTEGER_CST)
4621 {
4622 tree off = fold_convert (ptr_type_node, op1);
4623 return build_fold_addr_expr_loc
4624 (loc,
4625 fold_build2 (MEM_REF,
4626 TREE_TYPE (TREE_TYPE (op0)),
4627 unshare_expr (op0), off));
4628 }
4629
4630 return fold_binary_loc (loc, subcode,
4631 gimple_expr_type (stmt), op0, op1);
4632 }
4633
4634 case GIMPLE_TERNARY_RHS:
4635 {
4636 /* Handle ternary operators that can appear in GIMPLE form. */
4637 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4638 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4639 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4640
4641 /* Fold embedded expressions in ternary codes. */
4642 if ((subcode == COND_EXPR
4643 || subcode == VEC_COND_EXPR)
4644 && COMPARISON_CLASS_P (op0))
4645 {
4646 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4647 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4648 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4649 TREE_TYPE (op0), op00, op01);
4650 if (tem)
4651 op0 = tem;
4652 }
4653
4654 return fold_ternary_loc (loc, subcode,
4655 gimple_expr_type (stmt), op0, op1, op2);
4656 }
4657
4658 default:
4659 gcc_unreachable ();
4660 }
4661 }
4662
4663 case GIMPLE_CALL:
4664 {
4665 tree fn;
4666
4667 if (gimple_call_internal_p (stmt))
4668 {
4669 enum tree_code subcode = ERROR_MARK;
4670 switch (gimple_call_internal_fn (stmt))
4671 {
4672 case IFN_UBSAN_CHECK_ADD:
4673 subcode = PLUS_EXPR;
4674 break;
4675 case IFN_UBSAN_CHECK_SUB:
4676 subcode = MINUS_EXPR;
4677 break;
4678 case IFN_UBSAN_CHECK_MUL:
4679 subcode = MULT_EXPR;
4680 break;
4681 default:
4682 return NULL_TREE;
4683 }
4684 tree arg0 = gimple_call_arg (stmt, 0);
4685 tree arg1 = gimple_call_arg (stmt, 1);
4686 tree op0 = (*valueize) (arg0);
4687 tree op1 = (*valueize) (arg1);
4688
4689 if (TREE_CODE (op0) != INTEGER_CST
4690 || TREE_CODE (op1) != INTEGER_CST)
4691 {
4692 switch (subcode)
4693 {
4694 case MULT_EXPR:
4695 /* x * 0 = 0 * x = 0 without overflow. */
4696 if (integer_zerop (op0) || integer_zerop (op1))
4697 return build_zero_cst (TREE_TYPE (arg0));
4698 break;
4699 case MINUS_EXPR:
4700 /* y - y = 0 without overflow. */
4701 if (operand_equal_p (op0, op1, 0))
4702 return build_zero_cst (TREE_TYPE (arg0));
4703 break;
4704 default:
4705 break;
4706 }
4707 }
4708 tree res
4709 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4710 if (res
4711 && TREE_CODE (res) == INTEGER_CST
4712 && !TREE_OVERFLOW (res))
4713 return res;
4714 return NULL_TREE;
4715 }
4716
4717 fn = (*valueize) (gimple_call_fn (stmt));
4718 if (TREE_CODE (fn) == ADDR_EXPR
4719 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4720 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4721 && gimple_builtin_call_types_compatible_p (stmt,
4722 TREE_OPERAND (fn, 0)))
4723 {
4724 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4725 tree call, retval;
4726 unsigned i;
4727 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4728 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4729 call = build_call_array_loc (loc,
4730 gimple_call_return_type (stmt),
4731 fn, gimple_call_num_args (stmt), args);
4732 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4733 if (retval)
4734 {
4735 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4736 STRIP_NOPS (retval);
4737 retval = fold_convert (gimple_call_return_type (stmt), retval);
4738 }
4739 return retval;
4740 }
4741 return NULL_TREE;
4742 }
4743
4744 default:
4745 return NULL_TREE;
4746 }
4747 }
4748
4749 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4750 Returns NULL_TREE if folding to a constant is not possible, otherwise
4751 returns a constant according to is_gimple_min_invariant. */
4752
4753 tree
4754 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4755 {
4756 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4757 if (res && is_gimple_min_invariant (res))
4758 return res;
4759 return NULL_TREE;
4760 }
4761
4762
4763 /* The following set of functions are supposed to fold references using
4764 their constant initializers. */
4765
4766 static tree fold_ctor_reference (tree type, tree ctor,
4767 unsigned HOST_WIDE_INT offset,
4768 unsigned HOST_WIDE_INT size, tree);
4769
4770 /* See if we can find constructor defining value of BASE.
4771 When we know the consructor with constant offset (such as
4772 base is array[40] and we do know constructor of array), then
4773 BIT_OFFSET is adjusted accordingly.
4774
4775 As a special case, return error_mark_node when constructor
4776 is not explicitly available, but it is known to be zero
4777 such as 'static const int a;'. */
4778 static tree
4779 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4780 tree (*valueize)(tree))
4781 {
4782 HOST_WIDE_INT bit_offset2, size, max_size;
4783 if (TREE_CODE (base) == MEM_REF)
4784 {
4785 if (!integer_zerop (TREE_OPERAND (base, 1)))
4786 {
4787 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4788 return NULL_TREE;
4789 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4790 * BITS_PER_UNIT);
4791 }
4792
4793 if (valueize
4794 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4795 base = valueize (TREE_OPERAND (base, 0));
4796 if (!base || TREE_CODE (base) != ADDR_EXPR)
4797 return NULL_TREE;
4798 base = TREE_OPERAND (base, 0);
4799 }
4800
4801 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4802 DECL_INITIAL. If BASE is a nested reference into another
4803 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4804 the inner reference. */
4805 switch (TREE_CODE (base))
4806 {
4807 case VAR_DECL:
4808 case CONST_DECL:
4809 {
4810 tree init = ctor_for_folding (base);
4811
4812 /* Our semantic is exact opposite of ctor_for_folding;
4813 NULL means unknown, while error_mark_node is 0. */
4814 if (init == error_mark_node)
4815 return NULL_TREE;
4816 if (!init)
4817 return error_mark_node;
4818 return init;
4819 }
4820
4821 case ARRAY_REF:
4822 case COMPONENT_REF:
4823 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4824 if (max_size == -1 || size != max_size)
4825 return NULL_TREE;
4826 *bit_offset += bit_offset2;
4827 return get_base_constructor (base, bit_offset, valueize);
4828
4829 case STRING_CST:
4830 case CONSTRUCTOR:
4831 return base;
4832
4833 default:
4834 return NULL_TREE;
4835 }
4836 }
4837
4838 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4839 SIZE to the memory at bit OFFSET. */
4840
4841 static tree
4842 fold_array_ctor_reference (tree type, tree ctor,
4843 unsigned HOST_WIDE_INT offset,
4844 unsigned HOST_WIDE_INT size,
4845 tree from_decl)
4846 {
4847 unsigned HOST_WIDE_INT cnt;
4848 tree cfield, cval;
4849 offset_int low_bound;
4850 offset_int elt_size;
4851 offset_int index, max_index;
4852 offset_int access_index;
4853 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4854 HOST_WIDE_INT inner_offset;
4855
4856 /* Compute low bound and elt size. */
4857 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4858 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4859 if (domain_type && TYPE_MIN_VALUE (domain_type))
4860 {
4861 /* Static constructors for variably sized objects makes no sense. */
4862 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4863 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4864 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4865 }
4866 else
4867 low_bound = 0;
4868 /* Static constructors for variably sized objects makes no sense. */
4869 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4870 == INTEGER_CST);
4871 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4872
4873 /* We can handle only constantly sized accesses that are known to not
4874 be larger than size of array element. */
4875 if (!TYPE_SIZE_UNIT (type)
4876 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4877 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4878 || elt_size == 0)
4879 return NULL_TREE;
4880
4881 /* Compute the array index we look for. */
4882 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4883 elt_size);
4884 access_index += low_bound;
4885 if (index_type)
4886 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4887 TYPE_SIGN (index_type));
4888
4889 /* And offset within the access. */
4890 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4891
4892 /* See if the array field is large enough to span whole access. We do not
4893 care to fold accesses spanning multiple array indexes. */
4894 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4895 return NULL_TREE;
4896
4897 index = low_bound - 1;
4898 if (index_type)
4899 index = wi::ext (index, TYPE_PRECISION (index_type),
4900 TYPE_SIGN (index_type));
4901
4902 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4903 {
4904 /* Array constructor might explicitely set index, or specify range
4905 or leave index NULL meaning that it is next index after previous
4906 one. */
4907 if (cfield)
4908 {
4909 if (TREE_CODE (cfield) == INTEGER_CST)
4910 max_index = index = wi::to_offset (cfield);
4911 else
4912 {
4913 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4914 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4915 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4916 }
4917 }
4918 else
4919 {
4920 index += 1;
4921 if (index_type)
4922 index = wi::ext (index, TYPE_PRECISION (index_type),
4923 TYPE_SIGN (index_type));
4924 max_index = index;
4925 }
4926
4927 /* Do we have match? */
4928 if (wi::cmpu (access_index, index) >= 0
4929 && wi::cmpu (access_index, max_index) <= 0)
4930 return fold_ctor_reference (type, cval, inner_offset, size,
4931 from_decl);
4932 }
4933 /* When memory is not explicitely mentioned in constructor,
4934 it is 0 (or out of range). */
4935 return build_zero_cst (type);
4936 }
4937
4938 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4939 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4940
4941 static tree
4942 fold_nonarray_ctor_reference (tree type, tree ctor,
4943 unsigned HOST_WIDE_INT offset,
4944 unsigned HOST_WIDE_INT size,
4945 tree from_decl)
4946 {
4947 unsigned HOST_WIDE_INT cnt;
4948 tree cfield, cval;
4949
4950 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4951 cval)
4952 {
4953 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4954 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4955 tree field_size = DECL_SIZE (cfield);
4956 offset_int bitoffset;
4957 offset_int bitoffset_end, access_end;
4958
4959 /* Variable sized objects in static constructors makes no sense,
4960 but field_size can be NULL for flexible array members. */
4961 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4962 && TREE_CODE (byte_offset) == INTEGER_CST
4963 && (field_size != NULL_TREE
4964 ? TREE_CODE (field_size) == INTEGER_CST
4965 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4966
4967 /* Compute bit offset of the field. */
4968 bitoffset = (wi::to_offset (field_offset)
4969 + wi::lshift (wi::to_offset (byte_offset),
4970 LOG2_BITS_PER_UNIT));
4971 /* Compute bit offset where the field ends. */
4972 if (field_size != NULL_TREE)
4973 bitoffset_end = bitoffset + wi::to_offset (field_size);
4974 else
4975 bitoffset_end = 0;
4976
4977 access_end = offset_int (offset) + size;
4978
4979 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4980 [BITOFFSET, BITOFFSET_END)? */
4981 if (wi::cmps (access_end, bitoffset) > 0
4982 && (field_size == NULL_TREE
4983 || wi::lts_p (offset, bitoffset_end)))
4984 {
4985 offset_int inner_offset = offset_int (offset) - bitoffset;
4986 /* We do have overlap. Now see if field is large enough to
4987 cover the access. Give up for accesses spanning multiple
4988 fields. */
4989 if (wi::cmps (access_end, bitoffset_end) > 0)
4990 return NULL_TREE;
4991 if (wi::lts_p (offset, bitoffset))
4992 return NULL_TREE;
4993 return fold_ctor_reference (type, cval,
4994 inner_offset.to_uhwi (), size,
4995 from_decl);
4996 }
4997 }
4998 /* When memory is not explicitely mentioned in constructor, it is 0. */
4999 return build_zero_cst (type);
5000 }
5001
5002 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5003 to the memory at bit OFFSET. */
5004
5005 static tree
5006 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5007 unsigned HOST_WIDE_INT size, tree from_decl)
5008 {
5009 tree ret;
5010
5011 /* We found the field with exact match. */
5012 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5013 && !offset)
5014 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5015
5016 /* We are at the end of walk, see if we can view convert the
5017 result. */
5018 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5019 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5020 && !compare_tree_int (TYPE_SIZE (type), size)
5021 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5022 {
5023 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5024 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5025 if (ret)
5026 STRIP_NOPS (ret);
5027 return ret;
5028 }
5029 /* For constants and byte-aligned/sized reads try to go through
5030 native_encode/interpret. */
5031 if (CONSTANT_CLASS_P (ctor)
5032 && BITS_PER_UNIT == 8
5033 && offset % BITS_PER_UNIT == 0
5034 && size % BITS_PER_UNIT == 0
5035 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5036 {
5037 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5038 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5039 offset / BITS_PER_UNIT) > 0)
5040 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5041 }
5042 if (TREE_CODE (ctor) == CONSTRUCTOR)
5043 {
5044
5045 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5046 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5047 return fold_array_ctor_reference (type, ctor, offset, size,
5048 from_decl);
5049 else
5050 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5051 from_decl);
5052 }
5053
5054 return NULL_TREE;
5055 }
5056
5057 /* Return the tree representing the element referenced by T if T is an
5058 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5059 names using VALUEIZE. Return NULL_TREE otherwise. */
5060
5061 tree
5062 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5063 {
5064 tree ctor, idx, base;
5065 HOST_WIDE_INT offset, size, max_size;
5066 tree tem;
5067
5068 if (TREE_THIS_VOLATILE (t))
5069 return NULL_TREE;
5070
5071 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5072 return get_symbol_constant_value (t);
5073
5074 tem = fold_read_from_constant_string (t);
5075 if (tem)
5076 return tem;
5077
5078 switch (TREE_CODE (t))
5079 {
5080 case ARRAY_REF:
5081 case ARRAY_RANGE_REF:
5082 /* Constant indexes are handled well by get_base_constructor.
5083 Only special case variable offsets.
5084 FIXME: This code can't handle nested references with variable indexes
5085 (they will be handled only by iteration of ccp). Perhaps we can bring
5086 get_ref_base_and_extent here and make it use a valueize callback. */
5087 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5088 && valueize
5089 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5090 && TREE_CODE (idx) == INTEGER_CST)
5091 {
5092 tree low_bound, unit_size;
5093
5094 /* If the resulting bit-offset is constant, track it. */
5095 if ((low_bound = array_ref_low_bound (t),
5096 TREE_CODE (low_bound) == INTEGER_CST)
5097 && (unit_size = array_ref_element_size (t),
5098 tree_fits_uhwi_p (unit_size)))
5099 {
5100 offset_int woffset
5101 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5102 TYPE_PRECISION (TREE_TYPE (idx)));
5103
5104 if (wi::fits_shwi_p (woffset))
5105 {
5106 offset = woffset.to_shwi ();
5107 /* TODO: This code seems wrong, multiply then check
5108 to see if it fits. */
5109 offset *= tree_to_uhwi (unit_size);
5110 offset *= BITS_PER_UNIT;
5111
5112 base = TREE_OPERAND (t, 0);
5113 ctor = get_base_constructor (base, &offset, valueize);
5114 /* Empty constructor. Always fold to 0. */
5115 if (ctor == error_mark_node)
5116 return build_zero_cst (TREE_TYPE (t));
5117 /* Out of bound array access. Value is undefined,
5118 but don't fold. */
5119 if (offset < 0)
5120 return NULL_TREE;
5121 /* We can not determine ctor. */
5122 if (!ctor)
5123 return NULL_TREE;
5124 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5125 tree_to_uhwi (unit_size)
5126 * BITS_PER_UNIT,
5127 base);
5128 }
5129 }
5130 }
5131 /* Fallthru. */
5132
5133 case COMPONENT_REF:
5134 case BIT_FIELD_REF:
5135 case TARGET_MEM_REF:
5136 case MEM_REF:
5137 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5138 ctor = get_base_constructor (base, &offset, valueize);
5139
5140 /* Empty constructor. Always fold to 0. */
5141 if (ctor == error_mark_node)
5142 return build_zero_cst (TREE_TYPE (t));
5143 /* We do not know precise address. */
5144 if (max_size == -1 || max_size != size)
5145 return NULL_TREE;
5146 /* We can not determine ctor. */
5147 if (!ctor)
5148 return NULL_TREE;
5149
5150 /* Out of bound array access. Value is undefined, but don't fold. */
5151 if (offset < 0)
5152 return NULL_TREE;
5153
5154 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5155 base);
5156
5157 case REALPART_EXPR:
5158 case IMAGPART_EXPR:
5159 {
5160 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5161 if (c && TREE_CODE (c) == COMPLEX_CST)
5162 return fold_build1_loc (EXPR_LOCATION (t),
5163 TREE_CODE (t), TREE_TYPE (t), c);
5164 break;
5165 }
5166
5167 default:
5168 break;
5169 }
5170
5171 return NULL_TREE;
5172 }
5173
5174 tree
5175 fold_const_aggregate_ref (tree t)
5176 {
5177 return fold_const_aggregate_ref_1 (t, NULL);
5178 }
5179
5180 /* Lookup virtual method with index TOKEN in a virtual table V
5181 at OFFSET.
5182 Set CAN_REFER if non-NULL to false if method
5183 is not referable or if the virtual table is ill-formed (such as rewriten
5184 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5185
5186 tree
5187 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5188 tree v,
5189 unsigned HOST_WIDE_INT offset,
5190 bool *can_refer)
5191 {
5192 tree vtable = v, init, fn;
5193 unsigned HOST_WIDE_INT size;
5194 unsigned HOST_WIDE_INT elt_size, access_index;
5195 tree domain_type;
5196
5197 if (can_refer)
5198 *can_refer = true;
5199
5200 /* First of all double check we have virtual table. */
5201 if (TREE_CODE (v) != VAR_DECL
5202 || !DECL_VIRTUAL_P (v))
5203 {
5204 gcc_assert (in_lto_p);
5205 /* Pass down that we lost track of the target. */
5206 if (can_refer)
5207 *can_refer = false;
5208 return NULL_TREE;
5209 }
5210
5211 init = ctor_for_folding (v);
5212
5213 /* The virtual tables should always be born with constructors
5214 and we always should assume that they are avaialble for
5215 folding. At the moment we do not stream them in all cases,
5216 but it should never happen that ctor seem unreachable. */
5217 gcc_assert (init);
5218 if (init == error_mark_node)
5219 {
5220 gcc_assert (in_lto_p);
5221 /* Pass down that we lost track of the target. */
5222 if (can_refer)
5223 *can_refer = false;
5224 return NULL_TREE;
5225 }
5226 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5227 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5228 offset *= BITS_PER_UNIT;
5229 offset += token * size;
5230
5231 /* Lookup the value in the constructor that is assumed to be array.
5232 This is equivalent to
5233 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5234 offset, size, NULL);
5235 but in a constant time. We expect that frontend produced a simple
5236 array without indexed initializers. */
5237
5238 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5239 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5240 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5241 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5242
5243 access_index = offset / BITS_PER_UNIT / elt_size;
5244 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5245
5246 /* This code makes an assumption that there are no
5247 indexed fileds produced by C++ FE, so we can directly index the array. */
5248 if (access_index < CONSTRUCTOR_NELTS (init))
5249 {
5250 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5251 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5252 STRIP_NOPS (fn);
5253 }
5254 else
5255 fn = NULL;
5256
5257 /* For type inconsistent program we may end up looking up virtual method
5258 in virtual table that does not contain TOKEN entries. We may overrun
5259 the virtual table and pick up a constant or RTTI info pointer.
5260 In any case the call is undefined. */
5261 if (!fn
5262 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5263 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5264 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5265 else
5266 {
5267 fn = TREE_OPERAND (fn, 0);
5268
5269 /* When cgraph node is missing and function is not public, we cannot
5270 devirtualize. This can happen in WHOPR when the actual method
5271 ends up in other partition, because we found devirtualization
5272 possibility too late. */
5273 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5274 {
5275 if (can_refer)
5276 {
5277 *can_refer = false;
5278 return fn;
5279 }
5280 return NULL_TREE;
5281 }
5282 }
5283
5284 /* Make sure we create a cgraph node for functions we'll reference.
5285 They can be non-existent if the reference comes from an entry
5286 of an external vtable for example. */
5287 cgraph_node::get_create (fn);
5288
5289 return fn;
5290 }
5291
5292 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5293 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5294 KNOWN_BINFO carries the binfo describing the true type of
5295 OBJ_TYPE_REF_OBJECT(REF).
5296 Set CAN_REFER if non-NULL to false if method
5297 is not referable or if the virtual table is ill-formed (such as rewriten
5298 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5299
5300 tree
5301 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5302 bool *can_refer)
5303 {
5304 unsigned HOST_WIDE_INT offset;
5305 tree v;
5306
5307 v = BINFO_VTABLE (known_binfo);
5308 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5309 if (!v)
5310 return NULL_TREE;
5311
5312 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5313 {
5314 if (can_refer)
5315 *can_refer = false;
5316 return NULL_TREE;
5317 }
5318 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5319 }
5320
5321 /* Return true iff VAL is a gimple expression that is known to be
5322 non-negative. Restricted to floating-point inputs. */
5323
5324 bool
5325 gimple_val_nonnegative_real_p (tree val)
5326 {
5327 gimple def_stmt;
5328
5329 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5330
5331 /* Use existing logic for non-gimple trees. */
5332 if (tree_expr_nonnegative_p (val))
5333 return true;
5334
5335 if (TREE_CODE (val) != SSA_NAME)
5336 return false;
5337
5338 /* Currently we look only at the immediately defining statement
5339 to make this determination, since recursion on defining
5340 statements of operands can lead to quadratic behavior in the
5341 worst case. This is expected to catch almost all occurrences
5342 in practice. It would be possible to implement limited-depth
5343 recursion if important cases are lost. Alternatively, passes
5344 that need this information (such as the pow/powi lowering code
5345 in the cse_sincos pass) could be revised to provide it through
5346 dataflow propagation. */
5347
5348 def_stmt = SSA_NAME_DEF_STMT (val);
5349
5350 if (is_gimple_assign (def_stmt))
5351 {
5352 tree op0, op1;
5353
5354 /* See fold-const.c:tree_expr_nonnegative_p for additional
5355 cases that could be handled with recursion. */
5356
5357 switch (gimple_assign_rhs_code (def_stmt))
5358 {
5359 case ABS_EXPR:
5360 /* Always true for floating-point operands. */
5361 return true;
5362
5363 case MULT_EXPR:
5364 /* True if the two operands are identical (since we are
5365 restricted to floating-point inputs). */
5366 op0 = gimple_assign_rhs1 (def_stmt);
5367 op1 = gimple_assign_rhs2 (def_stmt);
5368
5369 if (op0 == op1
5370 || operand_equal_p (op0, op1, 0))
5371 return true;
5372
5373 default:
5374 return false;
5375 }
5376 }
5377 else if (is_gimple_call (def_stmt))
5378 {
5379 tree fndecl = gimple_call_fndecl (def_stmt);
5380 if (fndecl
5381 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5382 {
5383 tree arg1;
5384
5385 switch (DECL_FUNCTION_CODE (fndecl))
5386 {
5387 CASE_FLT_FN (BUILT_IN_ACOS):
5388 CASE_FLT_FN (BUILT_IN_ACOSH):
5389 CASE_FLT_FN (BUILT_IN_CABS):
5390 CASE_FLT_FN (BUILT_IN_COSH):
5391 CASE_FLT_FN (BUILT_IN_ERFC):
5392 CASE_FLT_FN (BUILT_IN_EXP):
5393 CASE_FLT_FN (BUILT_IN_EXP10):
5394 CASE_FLT_FN (BUILT_IN_EXP2):
5395 CASE_FLT_FN (BUILT_IN_FABS):
5396 CASE_FLT_FN (BUILT_IN_FDIM):
5397 CASE_FLT_FN (BUILT_IN_HYPOT):
5398 CASE_FLT_FN (BUILT_IN_POW10):
5399 return true;
5400
5401 CASE_FLT_FN (BUILT_IN_SQRT):
5402 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5403 nonnegative inputs. */
5404 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5405 return true;
5406
5407 break;
5408
5409 CASE_FLT_FN (BUILT_IN_POWI):
5410 /* True if the second argument is an even integer. */
5411 arg1 = gimple_call_arg (def_stmt, 1);
5412
5413 if (TREE_CODE (arg1) == INTEGER_CST
5414 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5415 return true;
5416
5417 break;
5418
5419 CASE_FLT_FN (BUILT_IN_POW):
5420 /* True if the second argument is an even integer-valued
5421 real. */
5422 arg1 = gimple_call_arg (def_stmt, 1);
5423
5424 if (TREE_CODE (arg1) == REAL_CST)
5425 {
5426 REAL_VALUE_TYPE c;
5427 HOST_WIDE_INT n;
5428
5429 c = TREE_REAL_CST (arg1);
5430 n = real_to_integer (&c);
5431
5432 if ((n & 1) == 0)
5433 {
5434 REAL_VALUE_TYPE cint;
5435 real_from_integer (&cint, VOIDmode, n, SIGNED);
5436 if (real_identical (&c, &cint))
5437 return true;
5438 }
5439 }
5440
5441 break;
5442
5443 default:
5444 return false;
5445 }
5446 }
5447 }
5448
5449 return false;
5450 }
5451
5452 /* Given a pointer value OP0, return a simplified version of an
5453 indirection through OP0, or NULL_TREE if no simplification is
5454 possible. Note that the resulting type may be different from
5455 the type pointed to in the sense that it is still compatible
5456 from the langhooks point of view. */
5457
5458 tree
5459 gimple_fold_indirect_ref (tree t)
5460 {
5461 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5462 tree sub = t;
5463 tree subtype;
5464
5465 STRIP_NOPS (sub);
5466 subtype = TREE_TYPE (sub);
5467 if (!POINTER_TYPE_P (subtype))
5468 return NULL_TREE;
5469
5470 if (TREE_CODE (sub) == ADDR_EXPR)
5471 {
5472 tree op = TREE_OPERAND (sub, 0);
5473 tree optype = TREE_TYPE (op);
5474 /* *&p => p */
5475 if (useless_type_conversion_p (type, optype))
5476 return op;
5477
5478 /* *(foo *)&fooarray => fooarray[0] */
5479 if (TREE_CODE (optype) == ARRAY_TYPE
5480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5481 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5482 {
5483 tree type_domain = TYPE_DOMAIN (optype);
5484 tree min_val = size_zero_node;
5485 if (type_domain && TYPE_MIN_VALUE (type_domain))
5486 min_val = TYPE_MIN_VALUE (type_domain);
5487 if (TREE_CODE (min_val) == INTEGER_CST)
5488 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5489 }
5490 /* *(foo *)&complexfoo => __real__ complexfoo */
5491 else if (TREE_CODE (optype) == COMPLEX_TYPE
5492 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5493 return fold_build1 (REALPART_EXPR, type, op);
5494 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5495 else if (TREE_CODE (optype) == VECTOR_TYPE
5496 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5497 {
5498 tree part_width = TYPE_SIZE (type);
5499 tree index = bitsize_int (0);
5500 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5501 }
5502 }
5503
5504 /* *(p + CST) -> ... */
5505 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5506 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5507 {
5508 tree addr = TREE_OPERAND (sub, 0);
5509 tree off = TREE_OPERAND (sub, 1);
5510 tree addrtype;
5511
5512 STRIP_NOPS (addr);
5513 addrtype = TREE_TYPE (addr);
5514
5515 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5516 if (TREE_CODE (addr) == ADDR_EXPR
5517 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5518 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5519 && tree_fits_uhwi_p (off))
5520 {
5521 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5522 tree part_width = TYPE_SIZE (type);
5523 unsigned HOST_WIDE_INT part_widthi
5524 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5525 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5526 tree index = bitsize_int (indexi);
5527 if (offset / part_widthi
5528 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5529 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5530 part_width, index);
5531 }
5532
5533 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5534 if (TREE_CODE (addr) == ADDR_EXPR
5535 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5536 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5537 {
5538 tree size = TYPE_SIZE_UNIT (type);
5539 if (tree_int_cst_equal (size, off))
5540 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5541 }
5542
5543 /* *(p + CST) -> MEM_REF <p, CST>. */
5544 if (TREE_CODE (addr) != ADDR_EXPR
5545 || DECL_P (TREE_OPERAND (addr, 0)))
5546 return fold_build2 (MEM_REF, type,
5547 addr,
5548 wide_int_to_tree (ptype, off));
5549 }
5550
5551 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5552 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5553 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5554 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5555 {
5556 tree type_domain;
5557 tree min_val = size_zero_node;
5558 tree osub = sub;
5559 sub = gimple_fold_indirect_ref (sub);
5560 if (! sub)
5561 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5562 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5563 if (type_domain && TYPE_MIN_VALUE (type_domain))
5564 min_val = TYPE_MIN_VALUE (type_domain);
5565 if (TREE_CODE (min_val) == INTEGER_CST)
5566 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5567 }
5568
5569 return NULL_TREE;
5570 }
5571
5572 /* Return true if CODE is an operation that when operating on signed
5573 integer types involves undefined behavior on overflow and the
5574 operation can be expressed with unsigned arithmetic. */
5575
5576 bool
5577 arith_code_with_undefined_signed_overflow (tree_code code)
5578 {
5579 switch (code)
5580 {
5581 case PLUS_EXPR:
5582 case MINUS_EXPR:
5583 case MULT_EXPR:
5584 case NEGATE_EXPR:
5585 case POINTER_PLUS_EXPR:
5586 return true;
5587 default:
5588 return false;
5589 }
5590 }
5591
5592 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5593 operation that can be transformed to unsigned arithmetic by converting
5594 its operand, carrying out the operation in the corresponding unsigned
5595 type and converting the result back to the original type.
5596
5597 Returns a sequence of statements that replace STMT and also contain
5598 a modified form of STMT itself. */
5599
5600 gimple_seq
5601 rewrite_to_defined_overflow (gimple stmt)
5602 {
5603 if (dump_file && (dump_flags & TDF_DETAILS))
5604 {
5605 fprintf (dump_file, "rewriting stmt with undefined signed "
5606 "overflow ");
5607 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5608 }
5609
5610 tree lhs = gimple_assign_lhs (stmt);
5611 tree type = unsigned_type_for (TREE_TYPE (lhs));
5612 gimple_seq stmts = NULL;
5613 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5614 {
5615 gimple_seq stmts2 = NULL;
5616 gimple_set_op (stmt, i,
5617 force_gimple_operand (fold_convert (type,
5618 gimple_op (stmt, i)),
5619 &stmts2, true, NULL_TREE));
5620 gimple_seq_add_seq (&stmts, stmts2);
5621 }
5622 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5623 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5624 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5625 gimple_seq_add_stmt (&stmts, stmt);
5626 gimple cvt = gimple_build_assign_with_ops
5627 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5628 gimple_seq_add_stmt (&stmts, cvt);
5629
5630 return stmts;
5631 }
5632
5633
5634 /* Build the expression CODE OP0 of type TYPE with location LOC,
5635 simplifying it first if possible using VALUEIZE if not NULL.
5636 OP0 is expected to be valueized already. Returns the built
5637 expression value and appends statements possibly defining it
5638 to SEQ. */
5639
5640 tree
5641 gimple_build (gimple_seq *seq, location_t loc,
5642 enum tree_code code, tree type, tree op0,
5643 tree (*valueize)(tree))
5644 {
5645 tree res = gimple_simplify (code, type, op0, seq, valueize);
5646 if (!res)
5647 {
5648 if (gimple_in_ssa_p (cfun))
5649 res = make_ssa_name (type, NULL);
5650 else
5651 res = create_tmp_reg (type, NULL);
5652 gimple stmt;
5653 if (code == REALPART_EXPR
5654 || code == IMAGPART_EXPR
5655 || code == VIEW_CONVERT_EXPR)
5656 stmt = gimple_build_assign_with_ops (code, res,
5657 build1 (code, type,
5658 op0), NULL_TREE);
5659 else
5660 stmt = gimple_build_assign_with_ops (code, res, op0, NULL_TREE);
5661 gimple_set_location (stmt, loc);
5662 gimple_seq_add_stmt_without_update (seq, stmt);
5663 }
5664 return res;
5665 }
5666
5667 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5668 simplifying it first if possible using VALUEIZE if not NULL.
5669 OP0 and OP1 are expected to be valueized already. Returns the built
5670 expression value and appends statements possibly defining it
5671 to SEQ. */
5672
5673 tree
5674 gimple_build (gimple_seq *seq, location_t loc,
5675 enum tree_code code, tree type, tree op0, tree op1,
5676 tree (*valueize)(tree))
5677 {
5678 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5679 if (!res)
5680 {
5681 if (gimple_in_ssa_p (cfun))
5682 res = make_ssa_name (type, NULL);
5683 else
5684 res = create_tmp_reg (type, NULL);
5685 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5686 gimple_set_location (stmt, loc);
5687 gimple_seq_add_stmt_without_update (seq, stmt);
5688 }
5689 return res;
5690 }
5691
5692 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5693 simplifying it first if possible using VALUEIZE if not NULL.
5694 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5695 expression value and appends statements possibly defining it
5696 to SEQ. */
5697
5698 tree
5699 gimple_build (gimple_seq *seq, location_t loc,
5700 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5701 tree (*valueize)(tree))
5702 {
5703 tree res = gimple_simplify (code, type, op0, op1, op2,
5704 seq, valueize);
5705 if (!res)
5706 {
5707 if (gimple_in_ssa_p (cfun))
5708 res = make_ssa_name (type, NULL);
5709 else
5710 res = create_tmp_reg (type, NULL);
5711 gimple stmt;
5712 if (code == BIT_FIELD_REF)
5713 stmt = gimple_build_assign_with_ops (code, res,
5714 build3 (BIT_FIELD_REF, type,
5715 op0, op1, op2),
5716 NULL_TREE);
5717 else
5718 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5719 gimple_set_location (stmt, loc);
5720 gimple_seq_add_stmt_without_update (seq, stmt);
5721 }
5722 return res;
5723 }
5724
5725 /* Build the call FN (ARG0) with a result of type TYPE
5726 (or no result if TYPE is void) with location LOC,
5727 simplifying it first if possible using VALUEIZE if not NULL.
5728 ARG0 is expected to be valueized already. Returns the built
5729 expression value (or NULL_TREE if TYPE is void) and appends
5730 statements possibly defining it to SEQ. */
5731
5732 tree
5733 gimple_build (gimple_seq *seq, location_t loc,
5734 enum built_in_function fn, tree type, tree arg0,
5735 tree (*valueize)(tree))
5736 {
5737 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5738 if (!res)
5739 {
5740 tree decl = builtin_decl_implicit (fn);
5741 gimple stmt = gimple_build_call (decl, 1, arg0);
5742 if (!VOID_TYPE_P (type))
5743 {
5744 if (gimple_in_ssa_p (cfun))
5745 res = make_ssa_name (type, NULL);
5746 else
5747 res = create_tmp_reg (type, NULL);
5748 gimple_call_set_lhs (stmt, res);
5749 }
5750 gimple_set_location (stmt, loc);
5751 gimple_seq_add_stmt_without_update (seq, stmt);
5752 }
5753 return res;
5754 }
5755
5756 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5757 (or no result if TYPE is void) with location LOC,
5758 simplifying it first if possible using VALUEIZE if not NULL.
5759 ARG0 is expected to be valueized already. Returns the built
5760 expression value (or NULL_TREE if TYPE is void) and appends
5761 statements possibly defining it to SEQ. */
5762
5763 tree
5764 gimple_build (gimple_seq *seq, location_t loc,
5765 enum built_in_function fn, tree type, tree arg0, tree arg1,
5766 tree (*valueize)(tree))
5767 {
5768 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5769 if (!res)
5770 {
5771 tree decl = builtin_decl_implicit (fn);
5772 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5773 if (!VOID_TYPE_P (type))
5774 {
5775 if (gimple_in_ssa_p (cfun))
5776 res = make_ssa_name (type, NULL);
5777 else
5778 res = create_tmp_reg (type, NULL);
5779 gimple_call_set_lhs (stmt, res);
5780 }
5781 gimple_set_location (stmt, loc);
5782 gimple_seq_add_stmt_without_update (seq, stmt);
5783 }
5784 return res;
5785 }
5786
5787 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5788 (or no result if TYPE is void) with location LOC,
5789 simplifying it first if possible using VALUEIZE if not NULL.
5790 ARG0 is expected to be valueized already. Returns the built
5791 expression value (or NULL_TREE if TYPE is void) and appends
5792 statements possibly defining it to SEQ. */
5793
5794 tree
5795 gimple_build (gimple_seq *seq, location_t loc,
5796 enum built_in_function fn, tree type,
5797 tree arg0, tree arg1, tree arg2,
5798 tree (*valueize)(tree))
5799 {
5800 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5801 if (!res)
5802 {
5803 tree decl = builtin_decl_implicit (fn);
5804 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5805 if (!VOID_TYPE_P (type))
5806 {
5807 if (gimple_in_ssa_p (cfun))
5808 res = make_ssa_name (type, NULL);
5809 else
5810 res = create_tmp_reg (type, NULL);
5811 gimple_call_set_lhs (stmt, res);
5812 }
5813 gimple_set_location (stmt, loc);
5814 gimple_seq_add_stmt_without_update (seq, stmt);
5815 }
5816 return res;
5817 }
5818
5819 /* Build the conversion (TYPE) OP with a result of type TYPE
5820 with location LOC if such conversion is neccesary in GIMPLE,
5821 simplifying it first.
5822 Returns the built expression value and appends
5823 statements possibly defining it to SEQ. */
5824
5825 tree
5826 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5827 {
5828 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5829 return op;
5830 return gimple_build (seq, loc, NOP_EXPR, type, op);
5831 }