Allow the front-end to create calls with a static chain
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
54 #include "tree-dfa.h"
55 #include "tree-ssa.h"
56 #include "tree-ssa-propagate.h"
57 #include "target.h"
58 #include "hash-map.h"
59 #include "plugin-api.h"
60 #include "ipa-ref.h"
61 #include "cgraph.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
67 #include "dbgcnt.h"
68 #include "builtins.h"
69 #include "output.h"
70 #include "tree-eh.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
74
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
79
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
97 static bool
98 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
99 {
100 varpool_node *vnode;
101 struct cgraph_node *node;
102 symtab_node *snode;
103
104 if (DECL_ABSTRACT_P (decl))
105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab->function_flags_ready)
118 return true;
119 snode = symtab_node::get (decl);
120 if (!snode || !snode->definition)
121 return false;
122 node = dyn_cast <cgraph_node *> (snode);
123 return !node || !node->global.inlined_to;
124 }
125
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl)
132 && (vnode = varpool_node::get (from_decl)) != NULL
133 && vnode->definition)
134 || (flag_ltrans
135 && (vnode = varpool_node::get (from_decl)) != NULL
136 && vnode->in_other_partition))
137 return true;
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
145 return false;
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
154
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
162 if (!symtab->function_flags_ready)
163 return true;
164
165 snode = symtab_node::get (decl);
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
173 }
174
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
178
179 tree
180 canonicalize_constructor_val (tree cval, tree from_decl)
181 {
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
186 {
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
207 if (!base)
208 return NULL_TREE;
209
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
212 && !can_refer_decl_in_current_unit_p (base, from_decl))
213 return NULL_TREE;
214 if (TREE_CODE (base) == VAR_DECL)
215 TREE_ADDRESSABLE (base) = 1;
216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base);
222 }
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
230 }
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
233 return orig_cval;
234 }
235
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239 tree
240 get_symbol_constant_value (tree sym)
241 {
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
244 {
245 if (val)
246 {
247 val = canonicalize_constructor_val (unshare_expr (val), sym);
248 if (val && is_gimple_min_invariant (val))
249 return val;
250 else
251 return NULL_TREE;
252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
259 return build_zero_cst (TREE_TYPE (sym));
260 }
261
262 return NULL_TREE;
263 }
264
265
266
267 /* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
271
272 static tree
273 maybe_fold_reference (tree expr, bool is_lhs)
274 {
275 tree result;
276
277 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr) == REALPART_EXPR
279 || TREE_CODE (expr) == IMAGPART_EXPR)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr),
282 TREE_CODE (expr),
283 TREE_TYPE (expr),
284 TREE_OPERAND (expr, 0));
285 else if (TREE_CODE (expr) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0),
291 TREE_OPERAND (expr, 1),
292 TREE_OPERAND (expr, 2));
293
294 if (!is_lhs
295 && (result = fold_const_aggregate_ref (expr))
296 && is_gimple_min_invariant (result))
297 return result;
298
299 return NULL_TREE;
300 }
301
302
303 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
306 folded. */
307
308 static tree
309 fold_gimple_assign (gimple_stmt_iterator *si)
310 {
311 gimple stmt = gsi_stmt (*si);
312 enum tree_code subcode = gimple_assign_rhs_code (stmt);
313 location_t loc = gimple_location (stmt);
314
315 tree result = NULL_TREE;
316
317 switch (get_gimple_rhs_class (subcode))
318 {
319 case GIMPLE_SINGLE_RHS:
320 {
321 tree rhs = gimple_assign_rhs1 (stmt);
322
323 if (TREE_CLOBBER_P (rhs))
324 return NULL_TREE;
325
326 if (REFERENCE_CLASS_P (rhs))
327 return maybe_fold_reference (rhs, false);
328
329 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
330 {
331 tree val = OBJ_TYPE_REF_EXPR (rhs);
332 if (is_gimple_min_invariant (val))
333 return val;
334 else if (flag_devirtualize && virtual_method_call_p (rhs))
335 {
336 bool final;
337 vec <cgraph_node *>targets
338 = possible_polymorphic_call_targets (rhs, stmt, &final);
339 if (final && targets.length () <= 1 && dbg_cnt (devirt))
340 {
341 if (dump_enabled_p ())
342 {
343 location_t loc = gimple_location_safe (stmt);
344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
345 "resolving virtual function address "
346 "reference to function %s\n",
347 targets.length () == 1
348 ? targets[0]->name ()
349 : "NULL");
350 }
351 if (targets.length () == 1)
352 {
353 val = fold_convert (TREE_TYPE (val),
354 build_fold_addr_expr_loc
355 (loc, targets[0]->decl));
356 STRIP_USELESS_TYPE_CONVERSION (val);
357 }
358 else
359 /* We can not use __builtin_unreachable here because it
360 can not have address taken. */
361 val = build_int_cst (TREE_TYPE (val), 0);
362 return val;
363 }
364 }
365
366 }
367 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 {
369 tree ref = TREE_OPERAND (rhs, 0);
370 tree tem = maybe_fold_reference (ref, true);
371 if (tem
372 && TREE_CODE (tem) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem, 1)))
374 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
375 else if (tem)
376 result = fold_convert (TREE_TYPE (rhs),
377 build_fold_addr_expr_loc (loc, tem));
378 else if (TREE_CODE (ref) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
381 }
382
383 else if (TREE_CODE (rhs) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 {
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
389 unsigned i;
390 tree val;
391
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
393 if (TREE_CODE (val) != INTEGER_CST
394 && TREE_CODE (val) != REAL_CST
395 && TREE_CODE (val) != FIXED_CST)
396 return NULL_TREE;
397
398 return build_vector_from_ctor (TREE_TYPE (rhs),
399 CONSTRUCTOR_ELTS (rhs));
400 }
401
402 else if (DECL_P (rhs))
403 return get_symbol_constant_value (rhs);
404
405 /* If we couldn't fold the RHS, hand over to the generic
406 fold routines. */
407 if (result == NULL_TREE)
408 result = fold (rhs);
409
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result);
414
415 if (result != rhs && valid_gimple_rhs_p (result))
416 return result;
417
418 return NULL_TREE;
419 }
420 break;
421
422 case GIMPLE_UNARY_RHS:
423 break;
424
425 case GIMPLE_BINARY_RHS:
426 /* Try to canonicalize for boolean-typed X the comparisons
427 X == 0, X == 1, X != 0, and X != 1. */
428 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
429 || gimple_assign_rhs_code (stmt) == NE_EXPR)
430 {
431 tree lhs = gimple_assign_lhs (stmt);
432 tree op1 = gimple_assign_rhs1 (stmt);
433 tree op2 = gimple_assign_rhs2 (stmt);
434 tree type = TREE_TYPE (op1);
435
436 /* Check whether the comparison operands are of the same boolean
437 type as the result type is.
438 Check that second operand is an integer-constant with value
439 one or zero. */
440 if (TREE_CODE (op2) == INTEGER_CST
441 && (integer_zerop (op2) || integer_onep (op2))
442 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 {
444 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
445 bool is_logical_not = false;
446
447 /* X == 0 and X != 1 is a logical-not.of X
448 X == 1 and X != 0 is X */
449 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
450 || (cmp_code == NE_EXPR && integer_onep (op2)))
451 is_logical_not = true;
452
453 if (is_logical_not == false)
454 result = op1;
455 /* Only for one-bit precision typed X the transformation
456 !X -> ~X is valied. */
457 else if (TYPE_PRECISION (type) == 1)
458 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
459 type, op1);
460 /* Otherwise we use !X -> X ^ 1. */
461 else
462 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
463 type, op1, build_int_cst (type, 1));
464
465 }
466 }
467
468 if (!result)
469 result = fold_binary_loc (loc, subcode,
470 TREE_TYPE (gimple_assign_lhs (stmt)),
471 gimple_assign_rhs1 (stmt),
472 gimple_assign_rhs2 (stmt));
473
474 if (result)
475 {
476 STRIP_USELESS_TYPE_CONVERSION (result);
477 if (valid_gimple_rhs_p (result))
478 return result;
479 }
480 break;
481
482 case GIMPLE_TERNARY_RHS:
483 /* Try to fold a conditional expression. */
484 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 {
486 tree op0 = gimple_assign_rhs1 (stmt);
487 tree tem;
488 bool set = false;
489 location_t cond_loc = gimple_location (stmt);
490
491 if (COMPARISON_CLASS_P (op0))
492 {
493 fold_defer_overflow_warnings ();
494 tem = fold_binary_loc (cond_loc,
495 TREE_CODE (op0), TREE_TYPE (op0),
496 TREE_OPERAND (op0, 0),
497 TREE_OPERAND (op0, 1));
498 /* This is actually a conditional expression, not a GIMPLE
499 conditional statement, however, the valid_gimple_rhs_p
500 test still applies. */
501 set = (tem && is_gimple_condexpr (tem)
502 && valid_gimple_rhs_p (tem));
503 fold_undefer_overflow_warnings (set, stmt, 0);
504 }
505 else if (is_gimple_min_invariant (op0))
506 {
507 tem = op0;
508 set = true;
509 }
510 else
511 return NULL_TREE;
512
513 if (set)
514 result = fold_build3_loc (cond_loc, COND_EXPR,
515 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
516 gimple_assign_rhs2 (stmt),
517 gimple_assign_rhs3 (stmt));
518 }
519
520 if (!result)
521 result = fold_ternary_loc (loc, subcode,
522 TREE_TYPE (gimple_assign_lhs (stmt)),
523 gimple_assign_rhs1 (stmt),
524 gimple_assign_rhs2 (stmt),
525 gimple_assign_rhs3 (stmt));
526
527 if (result)
528 {
529 STRIP_USELESS_TYPE_CONVERSION (result);
530 if (valid_gimple_rhs_p (result))
531 return result;
532 }
533 break;
534
535 case GIMPLE_INVALID_RHS:
536 gcc_unreachable ();
537 }
538
539 return NULL_TREE;
540 }
541
542 /* Attempt to fold a conditional statement. Return true if any changes were
543 made. We only attempt to fold the condition expression, and do not perform
544 any transformation that would require alteration of the cfg. It is
545 assumed that the operands have been previously folded. */
546
547 static bool
548 fold_gimple_cond (gimple stmt)
549 {
550 tree result = fold_binary_loc (gimple_location (stmt),
551 gimple_cond_code (stmt),
552 boolean_type_node,
553 gimple_cond_lhs (stmt),
554 gimple_cond_rhs (stmt));
555
556 if (result)
557 {
558 STRIP_USELESS_TYPE_CONVERSION (result);
559 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 {
561 gimple_cond_set_condition_from_tree (stmt, result);
562 return true;
563 }
564 }
565
566 return false;
567 }
568
569
570 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
571 adjusting the replacement stmts location and virtual operands.
572 If the statement has a lhs the last stmt in the sequence is expected
573 to assign to that lhs. */
574
575 static void
576 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
577 {
578 gimple stmt = gsi_stmt (*si_p);
579
580 if (gimple_has_location (stmt))
581 annotate_all_with_location (stmts, gimple_location (stmt));
582
583 /* First iterate over the replacement statements backward, assigning
584 virtual operands to their defining statements. */
585 gimple laststore = NULL;
586 for (gimple_stmt_iterator i = gsi_last (stmts);
587 !gsi_end_p (i); gsi_prev (&i))
588 {
589 gimple new_stmt = gsi_stmt (i);
590 if ((gimple_assign_single_p (new_stmt)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
592 || (is_gimple_call (new_stmt)
593 && (gimple_call_flags (new_stmt)
594 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
595 {
596 tree vdef;
597 if (!laststore)
598 vdef = gimple_vdef (stmt);
599 else
600 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
601 gimple_set_vdef (new_stmt, vdef);
602 if (vdef && TREE_CODE (vdef) == SSA_NAME)
603 SSA_NAME_DEF_STMT (vdef) = new_stmt;
604 laststore = new_stmt;
605 }
606 }
607
608 /* Second iterate over the statements forward, assigning virtual
609 operands to their uses. */
610 tree reaching_vuse = gimple_vuse (stmt);
611 for (gimple_stmt_iterator i = gsi_start (stmts);
612 !gsi_end_p (i); gsi_next (&i))
613 {
614 gimple new_stmt = gsi_stmt (i);
615 /* If the new statement possibly has a VUSE, update it with exact SSA
616 name we know will reach this one. */
617 if (gimple_has_mem_ops (new_stmt))
618 gimple_set_vuse (new_stmt, reaching_vuse);
619 gimple_set_modified (new_stmt, true);
620 if (gimple_vdef (new_stmt))
621 reaching_vuse = gimple_vdef (new_stmt);
622 }
623
624 /* If the new sequence does not do a store release the virtual
625 definition of the original statement. */
626 if (reaching_vuse
627 && reaching_vuse == gimple_vuse (stmt))
628 {
629 tree vdef = gimple_vdef (stmt);
630 if (vdef
631 && TREE_CODE (vdef) == SSA_NAME)
632 {
633 unlink_stmt_vdef (stmt);
634 release_ssa_name (vdef);
635 }
636 }
637
638 /* Finally replace the original statement with the sequence. */
639 gsi_replace_with_seq (si_p, stmts, false);
640 }
641
642 /* Convert EXPR into a GIMPLE value suitable for substitution on the
643 RHS of an assignment. Insert the necessary statements before
644 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
645 is replaced. If the call is expected to produces a result, then it
646 is replaced by an assignment of the new RHS to the result variable.
647 If the result is to be ignored, then the call is replaced by a
648 GIMPLE_NOP. A proper VDEF chain is retained by making the first
649 VUSE and the last VDEF of the whole sequence be the same as the replaced
650 statement and using new SSA names for stores in between. */
651
652 void
653 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
654 {
655 tree lhs;
656 gimple stmt, new_stmt;
657 gimple_stmt_iterator i;
658 gimple_seq stmts = NULL;
659
660 stmt = gsi_stmt (*si_p);
661
662 gcc_assert (is_gimple_call (stmt));
663
664 push_gimplify_context (gimple_in_ssa_p (cfun));
665
666 lhs = gimple_call_lhs (stmt);
667 if (lhs == NULL_TREE)
668 {
669 gimplify_and_add (expr, &stmts);
670 /* We can end up with folding a memcpy of an empty class assignment
671 which gets optimized away by C++ gimplification. */
672 if (gimple_seq_empty_p (stmts))
673 {
674 pop_gimplify_context (NULL);
675 if (gimple_in_ssa_p (cfun))
676 {
677 unlink_stmt_vdef (stmt);
678 release_defs (stmt);
679 }
680 gsi_replace (si_p, gimple_build_nop (), true);
681 return;
682 }
683 }
684 else
685 {
686 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
687 new_stmt = gimple_build_assign (lhs, tmp);
688 i = gsi_last (stmts);
689 gsi_insert_after_without_update (&i, new_stmt,
690 GSI_CONTINUE_LINKING);
691 }
692
693 pop_gimplify_context (NULL);
694
695 gsi_replace_with_seq_vops (si_p, stmts);
696 }
697
698
699 /* Replace the call at *GSI with the gimple value VAL. */
700
701 static void
702 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
703 {
704 gimple stmt = gsi_stmt (*gsi);
705 tree lhs = gimple_call_lhs (stmt);
706 gimple repl;
707 if (lhs)
708 {
709 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
710 val = fold_convert (TREE_TYPE (lhs), val);
711 repl = gimple_build_assign (lhs, val);
712 }
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, true);
722 }
723
724 /* Replace the call at *GSI with the new call REPL and fold that
725 again. */
726
727 static void
728 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
729 {
730 gimple stmt = gsi_stmt (*gsi);
731 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
732 gimple_set_location (repl, gimple_location (stmt));
733 if (gimple_vdef (stmt)
734 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
735 {
736 gimple_set_vdef (repl, gimple_vdef (stmt));
737 gimple_set_vuse (repl, gimple_vuse (stmt));
738 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
739 }
740 gsi_replace (gsi, repl, true);
741 fold_stmt (gsi);
742 }
743
744 /* Return true if VAR is a VAR_DECL or a component thereof. */
745
746 static bool
747 var_decl_component_p (tree var)
748 {
749 tree inner = var;
750 while (handled_component_p (inner))
751 inner = TREE_OPERAND (inner, 0);
752 return SSA_VAR_P (inner);
753 }
754
755 /* Fold function call to builtin mem{{,p}cpy,move}. Return
756 NULL_TREE if no simplification can be made.
757 If ENDP is 0, return DEST (like memcpy).
758 If ENDP is 1, return DEST+LEN (like mempcpy).
759 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
760 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
761 (memmove). */
762
763 static bool
764 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
765 tree dest, tree src, int endp)
766 {
767 gimple stmt = gsi_stmt (*gsi);
768 tree lhs = gimple_call_lhs (stmt);
769 tree len = gimple_call_arg (stmt, 2);
770 tree destvar, srcvar;
771 location_t loc = gimple_location (stmt);
772
773 /* If the LEN parameter is zero, return DEST. */
774 if (integer_zerop (len))
775 {
776 gimple repl;
777 if (gimple_call_lhs (stmt))
778 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
779 else
780 repl = gimple_build_nop ();
781 tree vdef = gimple_vdef (stmt);
782 if (vdef && TREE_CODE (vdef) == SSA_NAME)
783 {
784 unlink_stmt_vdef (stmt);
785 release_ssa_name (vdef);
786 }
787 gsi_replace (gsi, repl, true);
788 return true;
789 }
790
791 /* If SRC and DEST are the same (and not volatile), return
792 DEST{,+LEN,+LEN-1}. */
793 if (operand_equal_p (src, dest, 0))
794 {
795 unlink_stmt_vdef (stmt);
796 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
797 release_ssa_name (gimple_vdef (stmt));
798 if (!lhs)
799 {
800 gsi_replace (gsi, gimple_build_nop (), true);
801 return true;
802 }
803 goto done;
804 }
805 else
806 {
807 tree srctype, desttype;
808 unsigned int src_align, dest_align;
809 tree off0;
810
811 /* Build accesses at offset zero with a ref-all character type. */
812 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
813 ptr_mode, true), 0);
814
815 /* If we can perform the copy efficiently with first doing all loads
816 and then all stores inline it that way. Currently efficiently
817 means that we can load all the memory into a single integer
818 register which is what MOVE_MAX gives us. */
819 src_align = get_pointer_alignment (src);
820 dest_align = get_pointer_alignment (dest);
821 if (tree_fits_uhwi_p (len)
822 && compare_tree_int (len, MOVE_MAX) <= 0
823 /* ??? Don't transform copies from strings with known length this
824 confuses the tree-ssa-strlen.c. This doesn't handle
825 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
826 reason. */
827 && !c_strlen (src, 2))
828 {
829 unsigned ilen = tree_to_uhwi (len);
830 if (exact_log2 (ilen) != -1)
831 {
832 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
833 if (type
834 && TYPE_MODE (type) != BLKmode
835 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
836 == ilen * 8)
837 /* If the destination pointer is not aligned we must be able
838 to emit an unaligned store. */
839 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
840 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
841 {
842 tree srctype = type;
843 tree desttype = type;
844 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
845 srctype = build_aligned_type (type, src_align);
846 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
847 tree tem = fold_const_aggregate_ref (srcmem);
848 if (tem)
849 srcmem = tem;
850 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
851 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
852 src_align))
853 srcmem = NULL_TREE;
854 if (srcmem)
855 {
856 gimple new_stmt;
857 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
858 {
859 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
860 if (gimple_in_ssa_p (cfun))
861 srcmem = make_ssa_name (TREE_TYPE (srcmem),
862 new_stmt);
863 else
864 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
865 NULL);
866 gimple_assign_set_lhs (new_stmt, srcmem);
867 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
868 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
869 }
870 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
871 desttype = build_aligned_type (type, dest_align);
872 new_stmt
873 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
874 dest, off0),
875 srcmem);
876 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
877 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
878 if (gimple_vdef (new_stmt)
879 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
881 if (!lhs)
882 {
883 gsi_replace (gsi, new_stmt, true);
884 return true;
885 }
886 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
887 goto done;
888 }
889 }
890 }
891 }
892
893 if (endp == 3)
894 {
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
897 really mandatory?
898
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align || !src_align)
901 return false;
902 if (readonly_data_expr (src)
903 || (tree_fits_uhwi_p (len)
904 && (MIN (src_align, dest_align) / BITS_PER_UNIT
905 >= tree_to_uhwi (len))))
906 {
907 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src) == ADDR_EXPR
919 && TREE_CODE (dest) == ADDR_EXPR)
920 {
921 tree src_base, dest_base, fn;
922 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
923 HOST_WIDE_INT size = -1;
924 HOST_WIDE_INT maxsize = -1;
925
926 srcvar = TREE_OPERAND (src, 0);
927 src_base = get_ref_base_and_extent (srcvar, &src_offset,
928 &size, &maxsize);
929 destvar = TREE_OPERAND (dest, 0);
930 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
931 &size, &maxsize);
932 if (tree_fits_uhwi_p (len))
933 maxsize = tree_to_uhwi (len);
934 else
935 maxsize = -1;
936 src_offset /= BITS_PER_UNIT;
937 dest_offset /= BITS_PER_UNIT;
938 if (SSA_VAR_P (src_base)
939 && SSA_VAR_P (dest_base))
940 {
941 if (operand_equal_p (src_base, dest_base, 0)
942 && ranges_overlap_p (src_offset, maxsize,
943 dest_offset, maxsize))
944 return false;
945 }
946 else if (TREE_CODE (src_base) == MEM_REF
947 && TREE_CODE (dest_base) == MEM_REF)
948 {
949 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
950 TREE_OPERAND (dest_base, 0), 0))
951 return false;
952 offset_int off = mem_ref_offset (src_base) + src_offset;
953 if (!wi::fits_shwi_p (off))
954 return false;
955 src_offset = off.to_shwi ();
956
957 off = mem_ref_offset (dest_base) + dest_offset;
958 if (!wi::fits_shwi_p (off))
959 return false;
960 dest_offset = off.to_shwi ();
961 if (ranges_overlap_p (src_offset, maxsize,
962 dest_offset, maxsize))
963 return false;
964 }
965 else
966 return false;
967
968 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
976 }
977
978 /* If the destination and source do not alias optimize into
979 memcpy as well. */
980 if ((is_gimple_min_invariant (dest)
981 || TREE_CODE (dest) == SSA_NAME)
982 && (is_gimple_min_invariant (src)
983 || TREE_CODE (src) == SSA_NAME))
984 {
985 ao_ref destr, srcr;
986 ao_ref_init_from_ptr_and_size (&destr, dest, len);
987 ao_ref_init_from_ptr_and_size (&srcr, src, len);
988 if (!refs_may_alias_p_1 (&destr, &srcr, false))
989 {
990 tree fn;
991 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
992 if (!fn)
993 return false;
994 gimple_call_set_fndecl (stmt, fn);
995 gimple_call_set_arg (stmt, 0, dest);
996 gimple_call_set_arg (stmt, 1, src);
997 fold_stmt (gsi);
998 return true;
999 }
1000 }
1001
1002 return false;
1003 }
1004
1005 if (!tree_fits_shwi_p (len))
1006 return false;
1007 /* FIXME:
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1011 STRIP_NOPS (src);
1012 STRIP_NOPS (dest);
1013 if (!POINTER_TYPE_P (TREE_TYPE (src))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1015 return false;
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1024 {
1025 tree tem = TREE_OPERAND (src, 0);
1026 STRIP_NOPS (tem);
1027 if (tem != TREE_OPERAND (src, 0))
1028 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1029 }
1030 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1031 {
1032 tree tem = TREE_OPERAND (dest, 0);
1033 STRIP_NOPS (tem);
1034 if (tem != TREE_OPERAND (dest, 0))
1035 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1036 }
1037 srctype = TREE_TYPE (TREE_TYPE (src));
1038 if (TREE_CODE (srctype) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1040 {
1041 srctype = TREE_TYPE (srctype);
1042 STRIP_NOPS (src);
1043 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1044 }
1045 desttype = TREE_TYPE (TREE_TYPE (dest));
1046 if (TREE_CODE (desttype) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1048 {
1049 desttype = TREE_TYPE (desttype);
1050 STRIP_NOPS (dest);
1051 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1052 }
1053 if (TREE_ADDRESSABLE (srctype)
1054 || TREE_ADDRESSABLE (desttype))
1055 return false;
1056
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1060 || TREE_CODE (desttype) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1062 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1064 || TREE_CODE (srctype) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1066 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1067 if (!srctype)
1068 srctype = desttype;
1069 if (!desttype)
1070 desttype = srctype;
1071 if (!srctype)
1072 return false;
1073
1074 src_align = get_pointer_alignment (src);
1075 dest_align = get_pointer_alignment (dest);
1076 if (dest_align < TYPE_ALIGN (desttype)
1077 || src_align < TYPE_ALIGN (srctype))
1078 return false;
1079
1080 destvar = dest;
1081 STRIP_NOPS (destvar);
1082 if (TREE_CODE (destvar) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1085 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1086 else
1087 destvar = NULL_TREE;
1088
1089 srcvar = src;
1090 STRIP_NOPS (srcvar);
1091 if (TREE_CODE (srcvar) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1094 {
1095 if (!destvar
1096 || src_align >= TYPE_ALIGN (desttype))
1097 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1098 srcvar, off0);
1099 else if (!STRICT_ALIGNMENT)
1100 {
1101 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1102 src_align);
1103 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1104 }
1105 else
1106 srcvar = NULL_TREE;
1107 }
1108 else
1109 srcvar = NULL_TREE;
1110
1111 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1112 return false;
1113
1114 if (srcvar == NULL_TREE)
1115 {
1116 STRIP_NOPS (src);
1117 if (src_align >= TYPE_ALIGN (desttype))
1118 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1119 else
1120 {
1121 if (STRICT_ALIGNMENT)
1122 return false;
1123 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1124 src_align);
1125 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1126 }
1127 }
1128 else if (destvar == NULL_TREE)
1129 {
1130 STRIP_NOPS (dest);
1131 if (dest_align >= TYPE_ALIGN (srctype))
1132 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1133 else
1134 {
1135 if (STRICT_ALIGNMENT)
1136 return false;
1137 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1138 dest_align);
1139 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1140 }
1141 }
1142
1143 gimple new_stmt;
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1145 {
1146 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1147 if (gimple_in_ssa_p (cfun))
1148 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1149 else
1150 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1151 gimple_assign_set_lhs (new_stmt, srcvar);
1152 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1153 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1154 }
1155 new_stmt = gimple_build_assign (destvar, srcvar);
1156 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1157 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1158 if (gimple_vdef (new_stmt)
1159 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1161 if (!lhs)
1162 {
1163 gsi_replace (gsi, new_stmt, true);
1164 return true;
1165 }
1166 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1167 }
1168
1169 done:
1170 if (endp == 0 || endp == 3)
1171 len = NULL_TREE;
1172 else if (endp == 2)
1173 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1174 ssize_int (1));
1175 if (endp == 2 || endp == 1)
1176 dest = fold_build_pointer_plus_loc (loc, dest, len);
1177
1178 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1179 GSI_SAME_STMT);
1180 gimple repl = gimple_build_assign (lhs, dest);
1181 gsi_replace (gsi, repl, true);
1182 return true;
1183 }
1184
1185 /* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1187
1188 static bool
1189 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1190 {
1191 gimple stmt = gsi_stmt (*gsi);
1192 tree etype;
1193 unsigned HOST_WIDE_INT length, cval;
1194
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len))
1197 {
1198 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1199 return true;
1200 }
1201
1202 if (! tree_fits_uhwi_p (len))
1203 return false;
1204
1205 if (TREE_CODE (c) != INTEGER_CST)
1206 return false;
1207
1208 tree dest = gimple_call_arg (stmt, 0);
1209 tree var = dest;
1210 if (TREE_CODE (var) != ADDR_EXPR)
1211 return false;
1212
1213 var = TREE_OPERAND (var, 0);
1214 if (TREE_THIS_VOLATILE (var))
1215 return false;
1216
1217 etype = TREE_TYPE (var);
1218 if (TREE_CODE (etype) == ARRAY_TYPE)
1219 etype = TREE_TYPE (etype);
1220
1221 if (!INTEGRAL_TYPE_P (etype)
1222 && !POINTER_TYPE_P (etype))
1223 return NULL_TREE;
1224
1225 if (! var_decl_component_p (var))
1226 return NULL_TREE;
1227
1228 length = tree_to_uhwi (len);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1230 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1231 return NULL_TREE;
1232
1233 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1234 return NULL_TREE;
1235
1236 if (integer_zerop (c))
1237 cval = 0;
1238 else
1239 {
1240 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1241 return NULL_TREE;
1242
1243 cval = TREE_INT_CST_LOW (c);
1244 cval &= 0xff;
1245 cval |= cval << 8;
1246 cval |= cval << 16;
1247 cval |= (cval << 31) << 1;
1248 }
1249
1250 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1251 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1252 gimple_set_vuse (store, gimple_vuse (stmt));
1253 tree vdef = gimple_vdef (stmt);
1254 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1255 {
1256 gimple_set_vdef (store, gimple_vdef (stmt));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1258 }
1259 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1260 if (gimple_call_lhs (stmt))
1261 {
1262 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1263 gsi_replace (gsi, asgn, true);
1264 }
1265 else
1266 {
1267 gimple_stmt_iterator gsi2 = *gsi;
1268 gsi_prev (gsi);
1269 gsi_remove (&gsi2, true);
1270 }
1271
1272 return true;
1273 }
1274
1275
1276 /* Return the string length, maximum string length or maximum value of
1277 ARG in LENGTH.
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1284
1285 static bool
1286 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1287 {
1288 tree var, val;
1289 gimple def_stmt;
1290
1291 if (TREE_CODE (arg) != SSA_NAME)
1292 {
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 {
1298 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1299 if (TREE_CODE (aop0) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1301 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1302 length, visited, type);
1303 }
1304
1305 if (type == 2)
1306 {
1307 val = arg;
1308 if (TREE_CODE (val) != INTEGER_CST
1309 || tree_int_cst_sgn (val) < 0)
1310 return false;
1311 }
1312 else
1313 val = c_strlen (arg, 1);
1314 if (!val)
1315 return false;
1316
1317 if (*length)
1318 {
1319 if (type > 0)
1320 {
1321 if (TREE_CODE (*length) != INTEGER_CST
1322 || TREE_CODE (val) != INTEGER_CST)
1323 return false;
1324
1325 if (tree_int_cst_lt (*length, val))
1326 *length = val;
1327 return true;
1328 }
1329 else if (simple_cst_equal (val, *length) != 1)
1330 return false;
1331 }
1332
1333 *length = val;
1334 return true;
1335 }
1336
1337 /* If ARG is registered for SSA update we cannot look at its defining
1338 statement. */
1339 if (name_registered_for_update_p (arg))
1340 return false;
1341
1342 /* If we were already here, break the infinite cycle. */
1343 if (!*visited)
1344 *visited = BITMAP_ALLOC (NULL);
1345 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1346 return true;
1347
1348 var = arg;
1349 def_stmt = SSA_NAME_DEF_STMT (var);
1350
1351 switch (gimple_code (def_stmt))
1352 {
1353 case GIMPLE_ASSIGN:
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1356 length. */
1357 if (gimple_assign_single_p (def_stmt)
1358 || gimple_assign_unary_nop_p (def_stmt))
1359 {
1360 tree rhs = gimple_assign_rhs1 (def_stmt);
1361 return get_maxval_strlen (rhs, length, visited, type);
1362 }
1363 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1364 {
1365 tree op2 = gimple_assign_rhs2 (def_stmt);
1366 tree op3 = gimple_assign_rhs3 (def_stmt);
1367 return get_maxval_strlen (op2, length, visited, type)
1368 && get_maxval_strlen (op3, length, visited, type);
1369 }
1370 return false;
1371
1372 case GIMPLE_PHI:
1373 {
1374 /* All the arguments of the PHI node must have the same constant
1375 length. */
1376 unsigned i;
1377
1378 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1379 {
1380 tree arg = gimple_phi_arg (def_stmt, i)->def;
1381
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg == gimple_phi_result (def_stmt))
1389 continue;
1390
1391 if (!get_maxval_strlen (arg, length, visited, type))
1392 return false;
1393 }
1394 }
1395 return true;
1396
1397 default:
1398 return false;
1399 }
1400 }
1401
1402 tree
1403 get_maxval_strlen (tree arg, int type)
1404 {
1405 bitmap visited = NULL;
1406 tree len = NULL_TREE;
1407 if (!get_maxval_strlen (arg, &len, &visited, type))
1408 len = NULL_TREE;
1409 if (visited)
1410 BITMAP_FREE (visited);
1411
1412 return len;
1413 }
1414
1415
1416 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1419
1420 static bool
1421 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1422 tree dest, tree src)
1423 {
1424 location_t loc = gimple_location (gsi_stmt (*gsi));
1425 tree fn;
1426
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src, dest, 0))
1429 {
1430 replace_call_with_value (gsi, dest);
1431 return true;
1432 }
1433
1434 if (optimize_function_for_size_p (cfun))
1435 return false;
1436
1437 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1438 if (!fn)
1439 return false;
1440
1441 tree len = get_maxval_strlen (src, 0);
1442 if (!len)
1443 return false;
1444
1445 len = fold_convert_loc (loc, size_type_node, len);
1446 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1447 len = force_gimple_operand_gsi (gsi, len, true,
1448 NULL_TREE, true, GSI_SAME_STMT);
1449 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1450 replace_call_with_call_and_fold (gsi, repl);
1451 return true;
1452 }
1453
1454 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1457
1458 static bool
1459 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1460 tree dest, tree src, tree len)
1461 {
1462 location_t loc = gimple_location (gsi_stmt (*gsi));
1463 tree fn;
1464
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len))
1467 {
1468 replace_call_with_value (gsi, dest);
1469 return true;
1470 }
1471
1472 /* We can't compare slen with len as constants below if len is not a
1473 constant. */
1474 if (TREE_CODE (len) != INTEGER_CST)
1475 return false;
1476
1477 /* Now, we must be passed a constant src ptr parameter. */
1478 tree slen = get_maxval_strlen (src, 0);
1479 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1480 return false;
1481
1482 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1483
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen, len))
1488 return false;
1489
1490 /* OK transform into builtin memcpy. */
1491 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1492 if (!fn)
1493 return false;
1494
1495 len = fold_convert_loc (loc, size_type_node, len);
1496 len = force_gimple_operand_gsi (gsi, len, true,
1497 NULL_TREE, true, GSI_SAME_STMT);
1498 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1499 replace_call_with_call_and_fold (gsi, repl);
1500 return true;
1501 }
1502
1503 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1504 to the call.
1505
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1508
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1512
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1520
1521 static bool
1522 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1523 {
1524 gimple stmt = gsi_stmt (*gsi);
1525 location_t loc = gimple_location (stmt);
1526
1527 const char *p = c_getstr (src);
1528
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p && *p == '\0')
1531 {
1532 replace_call_with_value (gsi, dst);
1533 return true;
1534 }
1535
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1537 return false;
1538
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1540 tree newdst;
1541 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1542 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1543
1544 if (!strlen_fn || !memcpy_fn)
1545 return false;
1546
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
1549 tree len = get_maxval_strlen (src, 0);
1550 if (! len)
1551 return false;
1552
1553 /* Create strlen (dst). */
1554 gimple_seq stmts = NULL, stmts2;
1555 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1556 gimple_set_location (repl, loc);
1557 if (gimple_in_ssa_p (cfun))
1558 newdst = make_ssa_name (size_type_node, NULL);
1559 else
1560 newdst = create_tmp_reg (size_type_node, NULL);
1561 gimple_call_set_lhs (repl, newdst);
1562 gimple_seq_add_stmt_without_update (&stmts, repl);
1563
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1566 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1567 gimple_seq_add_seq_without_update (&stmts, stmts2);
1568
1569 len = fold_convert_loc (loc, size_type_node, len);
1570 len = size_binop_loc (loc, PLUS_EXPR, len,
1571 build_int_cst (size_type_node, 1));
1572 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1573 gimple_seq_add_seq_without_update (&stmts, stmts2);
1574
1575 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1576 gimple_seq_add_stmt_without_update (&stmts, repl);
1577 if (gimple_call_lhs (stmt))
1578 {
1579 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1580 gimple_seq_add_stmt_without_update (&stmts, repl);
1581 gsi_replace_with_seq_vops (gsi, stmts);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2 = *gsi;
1587 gsi_prev (&gsi2);
1588 fold_stmt (&gsi2);
1589 }
1590 else
1591 {
1592 gsi_replace_with_seq_vops (gsi, stmts);
1593 fold_stmt (gsi);
1594 }
1595 return true;
1596 }
1597
1598 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1600
1601 static bool
1602 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1603 {
1604 gimple stmt = gsi_stmt (*gsi);
1605 tree dest = gimple_call_arg (stmt, 0);
1606 tree src = gimple_call_arg (stmt, 1);
1607 tree size = gimple_call_arg (stmt, 2);
1608 tree fn;
1609 const char *p;
1610
1611
1612 p = c_getstr (src);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p && *p == '\0')
1615 {
1616 replace_call_with_value (gsi, dest);
1617 return true;
1618 }
1619
1620 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1621 return false;
1622
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1625 if (!fn)
1626 return false;
1627
1628 gimple repl = gimple_build_call (fn, 2, dest, src);
1629 replace_call_with_call_and_fold (gsi, repl);
1630 return true;
1631 }
1632
1633 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1634 LEN, and SIZE. */
1635
1636 static bool
1637 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1638 {
1639 gimple stmt = gsi_stmt (*gsi);
1640 tree dest = gimple_call_arg (stmt, 0);
1641 tree src = gimple_call_arg (stmt, 1);
1642 tree len = gimple_call_arg (stmt, 2);
1643 tree size = gimple_call_arg (stmt, 3);
1644 tree fn;
1645 const char *p;
1646
1647 p = c_getstr (src);
1648 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1649 if ((p && *p == '\0')
1650 || integer_zerop (len))
1651 {
1652 replace_call_with_value (gsi, dest);
1653 return true;
1654 }
1655
1656 if (! tree_fits_uhwi_p (size))
1657 return false;
1658
1659 if (! integer_all_onesp (size))
1660 {
1661 tree src_len = c_strlen (src, 1);
1662 if (src_len
1663 && tree_fits_uhwi_p (src_len)
1664 && tree_fits_uhwi_p (len)
1665 && ! tree_int_cst_lt (len, src_len))
1666 {
1667 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1668 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1669 if (!fn)
1670 return false;
1671
1672 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1673 replace_call_with_call_and_fold (gsi, repl);
1674 return true;
1675 }
1676 return false;
1677 }
1678
1679 /* If __builtin_strncat_chk is used, assume strncat is available. */
1680 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1681 if (!fn)
1682 return false;
1683
1684 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1685 replace_call_with_call_and_fold (gsi, repl);
1686 return true;
1687 }
1688
1689 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1690 to the call. IGNORE is true if the value returned
1691 by the builtin will be ignored. UNLOCKED is true is true if this
1692 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1693 the known length of the string. Return NULL_TREE if no simplification
1694 was possible. */
1695
1696 static bool
1697 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1698 tree arg0, tree arg1,
1699 bool unlocked)
1700 {
1701 gimple stmt = gsi_stmt (*gsi);
1702
1703 /* If we're using an unlocked function, assume the other unlocked
1704 functions exist explicitly. */
1705 tree const fn_fputc = (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1707 : builtin_decl_implicit (BUILT_IN_FPUTC));
1708 tree const fn_fwrite = (unlocked
1709 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1710 : builtin_decl_implicit (BUILT_IN_FWRITE));
1711
1712 /* If the return value is used, don't do the transformation. */
1713 if (gimple_call_lhs (stmt))
1714 return false;
1715
1716 /* Get the length of the string passed to fputs. If the length
1717 can't be determined, punt. */
1718 tree len = get_maxval_strlen (arg0, 0);
1719 if (!len
1720 || TREE_CODE (len) != INTEGER_CST)
1721 return false;
1722
1723 switch (compare_tree_int (len, 1))
1724 {
1725 case -1: /* length is 0, delete the call entirely . */
1726 replace_call_with_value (gsi, integer_zero_node);
1727 return true;
1728
1729 case 0: /* length is 1, call fputc. */
1730 {
1731 const char *p = c_getstr (arg0);
1732 if (p != NULL)
1733 {
1734 if (!fn_fputc)
1735 return false;
1736
1737 gimple repl = gimple_build_call (fn_fputc, 2,
1738 build_int_cst
1739 (integer_type_node, p[0]), arg1);
1740 replace_call_with_call_and_fold (gsi, repl);
1741 return true;
1742 }
1743 }
1744 /* FALLTHROUGH */
1745 case 1: /* length is greater than 1, call fwrite. */
1746 {
1747 /* If optimizing for size keep fputs. */
1748 if (optimize_function_for_size_p (cfun))
1749 return false;
1750 /* New argument list transforming fputs(string, stream) to
1751 fwrite(string, 1, len, stream). */
1752 if (!fn_fwrite)
1753 return false;
1754
1755 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1756 size_one_node, len, arg1);
1757 replace_call_with_call_and_fold (gsi, repl);
1758 return true;
1759 }
1760 default:
1761 gcc_unreachable ();
1762 }
1763 return false;
1764 }
1765
1766 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1767 DEST, SRC, LEN, and SIZE are the arguments to the call.
1768 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1769 code of the builtin. If MAXLEN is not NULL, it is maximum length
1770 passed as third argument. */
1771
1772 static bool
1773 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1774 tree dest, tree src, tree len, tree size,
1775 enum built_in_function fcode)
1776 {
1777 gimple stmt = gsi_stmt (*gsi);
1778 location_t loc = gimple_location (stmt);
1779 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1780 tree fn;
1781
1782 /* If SRC and DEST are the same (and not volatile), return DEST
1783 (resp. DEST+LEN for __mempcpy_chk). */
1784 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1785 {
1786 if (fcode != BUILT_IN_MEMPCPY_CHK)
1787 {
1788 replace_call_with_value (gsi, dest);
1789 return true;
1790 }
1791 else
1792 {
1793 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1794 temp = force_gimple_operand_gsi (gsi, temp,
1795 false, NULL_TREE, true,
1796 GSI_SAME_STMT);
1797 replace_call_with_value (gsi, temp);
1798 return true;
1799 }
1800 }
1801
1802 if (! tree_fits_uhwi_p (size))
1803 return false;
1804
1805 tree maxlen = get_maxval_strlen (len, 2);
1806 if (! integer_all_onesp (size))
1807 {
1808 if (! tree_fits_uhwi_p (len))
1809 {
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 {
1815 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1816 {
1817 /* (void) __mempcpy_chk () can be optimized into
1818 (void) __memcpy_chk (). */
1819 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1820 if (!fn)
1821 return false;
1822
1823 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1824 replace_call_with_call_and_fold (gsi, repl);
1825 return true;
1826 }
1827 return false;
1828 }
1829 }
1830 else
1831 maxlen = len;
1832
1833 if (tree_int_cst_lt (size, maxlen))
1834 return false;
1835 }
1836
1837 fn = NULL_TREE;
1838 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1839 mem{cpy,pcpy,move,set} is available. */
1840 switch (fcode)
1841 {
1842 case BUILT_IN_MEMCPY_CHK:
1843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1844 break;
1845 case BUILT_IN_MEMPCPY_CHK:
1846 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1847 break;
1848 case BUILT_IN_MEMMOVE_CHK:
1849 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1850 break;
1851 case BUILT_IN_MEMSET_CHK:
1852 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1853 break;
1854 default:
1855 break;
1856 }
1857
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864 }
1865
1866 /* Fold a call to the __st[rp]cpy_chk builtin.
1867 DEST, SRC, and SIZE are the arguments to the call.
1868 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1870 strings passed as second argument. */
1871
1872 static bool
1873 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1874 tree dest,
1875 tree src, tree size,
1876 enum built_in_function fcode)
1877 {
1878 gimple stmt = gsi_stmt (*gsi);
1879 location_t loc = gimple_location (stmt);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1881 tree len, fn;
1882
1883 /* If SRC and DEST are the same (and not volatile), return DEST. */
1884 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1885 {
1886 replace_call_with_value (gsi, dest);
1887 return true;
1888 }
1889
1890 if (! tree_fits_uhwi_p (size))
1891 return false;
1892
1893 tree maxlen = get_maxval_strlen (src, 1);
1894 if (! integer_all_onesp (size))
1895 {
1896 len = c_strlen (src, 1);
1897 if (! len || ! tree_fits_uhwi_p (len))
1898 {
1899 /* If LEN is not constant, try MAXLEN too.
1900 For MAXLEN only allow optimizing into non-_ocs function
1901 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1902 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1903 {
1904 if (fcode == BUILT_IN_STPCPY_CHK)
1905 {
1906 if (! ignore)
1907 return false;
1908
1909 /* If return value of __stpcpy_chk is ignored,
1910 optimize into __strcpy_chk. */
1911 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1912 if (!fn)
1913 return false;
1914
1915 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1916 replace_call_with_call_and_fold (gsi, repl);
1917 return true;
1918 }
1919
1920 if (! len || TREE_SIDE_EFFECTS (len))
1921 return false;
1922
1923 /* If c_strlen returned something, but not a constant,
1924 transform __strcpy_chk into __memcpy_chk. */
1925 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1926 if (!fn)
1927 return false;
1928
1929 len = fold_convert_loc (loc, size_type_node, len);
1930 len = size_binop_loc (loc, PLUS_EXPR, len,
1931 build_int_cst (size_type_node, 1));
1932 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1933 true, GSI_SAME_STMT);
1934 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1935 replace_call_with_call_and_fold (gsi, repl);
1936 return true;
1937 }
1938 }
1939 else
1940 maxlen = len;
1941
1942 if (! tree_int_cst_lt (maxlen, size))
1943 return false;
1944 }
1945
1946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1947 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1948 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1949 if (!fn)
1950 return false;
1951
1952 gimple repl = gimple_build_call (fn, 2, dest, src);
1953 replace_call_with_call_and_fold (gsi, repl);
1954 return true;
1955 }
1956
1957 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1959 length passed as third argument. IGNORE is true if return value can be
1960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1961
1962 static bool
1963 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1964 tree dest, tree src,
1965 tree len, tree size,
1966 enum built_in_function fcode)
1967 {
1968 gimple stmt = gsi_stmt (*gsi);
1969 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1970 tree fn;
1971
1972 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1973 {
1974 /* If return value of __stpncpy_chk is ignored,
1975 optimize into __strncpy_chk. */
1976 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1977 if (fn)
1978 {
1979 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1980 replace_call_with_call_and_fold (gsi, repl);
1981 return true;
1982 }
1983 }
1984
1985 if (! tree_fits_uhwi_p (size))
1986 return false;
1987
1988 tree maxlen = get_maxval_strlen (len, 2);
1989 if (! integer_all_onesp (size))
1990 {
1991 if (! tree_fits_uhwi_p (len))
1992 {
1993 /* If LEN is not constant, try MAXLEN too.
1994 For MAXLEN only allow optimizing into non-_ocs function
1995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1996 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1997 return false;
1998 }
1999 else
2000 maxlen = len;
2001
2002 if (tree_int_cst_lt (size, maxlen))
2003 return false;
2004 }
2005
2006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2007 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2008 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2009 if (!fn)
2010 return false;
2011
2012 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2013 replace_call_with_call_and_fold (gsi, repl);
2014 return true;
2015 }
2016
2017 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2018 NULL_TREE if a normal call should be emitted rather than expanding
2019 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2020 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2021 passed as second argument. */
2022
2023 static bool
2024 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2025 enum built_in_function fcode)
2026 {
2027 gimple stmt = gsi_stmt (*gsi);
2028 tree dest, size, len, fn, fmt, flag;
2029 const char *fmt_str;
2030
2031 /* Verify the required arguments in the original call. */
2032 if (gimple_call_num_args (stmt) < 5)
2033 return false;
2034
2035 dest = gimple_call_arg (stmt, 0);
2036 len = gimple_call_arg (stmt, 1);
2037 flag = gimple_call_arg (stmt, 2);
2038 size = gimple_call_arg (stmt, 3);
2039 fmt = gimple_call_arg (stmt, 4);
2040
2041 if (! tree_fits_uhwi_p (size))
2042 return false;
2043
2044 if (! integer_all_onesp (size))
2045 {
2046 tree maxlen = get_maxval_strlen (len, 2);
2047 if (! tree_fits_uhwi_p (len))
2048 {
2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2053 return false;
2054 }
2055 else
2056 maxlen = len;
2057
2058 if (tree_int_cst_lt (size, maxlen))
2059 return false;
2060 }
2061
2062 if (!init_target_chars ())
2063 return false;
2064
2065 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2066 or if format doesn't contain % chars or is "%s". */
2067 if (! integer_zerop (flag))
2068 {
2069 fmt_str = c_getstr (fmt);
2070 if (fmt_str == NULL)
2071 return false;
2072 if (strchr (fmt_str, target_percent) != NULL
2073 && strcmp (fmt_str, target_percent_s))
2074 return false;
2075 }
2076
2077 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2078 available. */
2079 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2080 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2081 if (!fn)
2082 return false;
2083
2084 /* Replace the called function and the first 5 argument by 3 retaining
2085 trailing varargs. */
2086 gimple_call_set_fndecl (stmt, fn);
2087 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2088 gimple_call_set_arg (stmt, 0, dest);
2089 gimple_call_set_arg (stmt, 1, len);
2090 gimple_call_set_arg (stmt, 2, fmt);
2091 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2092 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2093 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2094 fold_stmt (gsi);
2095 return true;
2096 }
2097
2098 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2099 Return NULL_TREE if a normal call should be emitted rather than
2100 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2101 or BUILT_IN_VSPRINTF_CHK. */
2102
2103 static bool
2104 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2105 enum built_in_function fcode)
2106 {
2107 gimple stmt = gsi_stmt (*gsi);
2108 tree dest, size, len, fn, fmt, flag;
2109 const char *fmt_str;
2110 unsigned nargs = gimple_call_num_args (stmt);
2111
2112 /* Verify the required arguments in the original call. */
2113 if (nargs < 4)
2114 return false;
2115 dest = gimple_call_arg (stmt, 0);
2116 flag = gimple_call_arg (stmt, 1);
2117 size = gimple_call_arg (stmt, 2);
2118 fmt = gimple_call_arg (stmt, 3);
2119
2120 if (! tree_fits_uhwi_p (size))
2121 return false;
2122
2123 len = NULL_TREE;
2124
2125 if (!init_target_chars ())
2126 return false;
2127
2128 /* Check whether the format is a literal string constant. */
2129 fmt_str = c_getstr (fmt);
2130 if (fmt_str != NULL)
2131 {
2132 /* If the format doesn't contain % args or %%, we know the size. */
2133 if (strchr (fmt_str, target_percent) == 0)
2134 {
2135 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2136 len = build_int_cstu (size_type_node, strlen (fmt_str));
2137 }
2138 /* If the format is "%s" and first ... argument is a string literal,
2139 we know the size too. */
2140 else if (fcode == BUILT_IN_SPRINTF_CHK
2141 && strcmp (fmt_str, target_percent_s) == 0)
2142 {
2143 tree arg;
2144
2145 if (nargs == 5)
2146 {
2147 arg = gimple_call_arg (stmt, 4);
2148 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2149 {
2150 len = c_strlen (arg, 1);
2151 if (! len || ! tree_fits_uhwi_p (len))
2152 len = NULL_TREE;
2153 }
2154 }
2155 }
2156 }
2157
2158 if (! integer_all_onesp (size))
2159 {
2160 if (! len || ! tree_int_cst_lt (len, size))
2161 return false;
2162 }
2163
2164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2165 or if format doesn't contain % chars or is "%s". */
2166 if (! integer_zerop (flag))
2167 {
2168 if (fmt_str == NULL)
2169 return false;
2170 if (strchr (fmt_str, target_percent) != NULL
2171 && strcmp (fmt_str, target_percent_s))
2172 return false;
2173 }
2174
2175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2176 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2177 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2178 if (!fn)
2179 return false;
2180
2181 /* Replace the called function and the first 4 argument by 2 retaining
2182 trailing varargs. */
2183 gimple_call_set_fndecl (stmt, fn);
2184 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2185 gimple_call_set_arg (stmt, 0, dest);
2186 gimple_call_set_arg (stmt, 1, fmt);
2187 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2188 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2189 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2190 fold_stmt (gsi);
2191 return true;
2192 }
2193
2194 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2195 ORIG may be null if this is a 2-argument call. We don't attempt to
2196 simplify calls with more than 3 arguments.
2197
2198 Return NULL_TREE if no simplification was possible, otherwise return the
2199 simplified form of the call as a tree. If IGNORED is true, it means that
2200 the caller does not use the returned value of the function. */
2201
2202 static bool
2203 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2204 {
2205 gimple stmt = gsi_stmt (*gsi);
2206 tree dest = gimple_call_arg (stmt, 0);
2207 tree fmt = gimple_call_arg (stmt, 1);
2208 tree orig = NULL_TREE;
2209 const char *fmt_str = NULL;
2210
2211 /* Verify the required arguments in the original call. We deal with two
2212 types of sprintf() calls: 'sprintf (str, fmt)' and
2213 'sprintf (dest, "%s", orig)'. */
2214 if (gimple_call_num_args (stmt) > 3)
2215 return false;
2216
2217 if (gimple_call_num_args (stmt) == 3)
2218 orig = gimple_call_arg (stmt, 2);
2219
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str = c_getstr (fmt);
2222 if (fmt_str == NULL)
2223 return false;
2224
2225 if (!init_target_chars ())
2226 return false;
2227
2228 /* If the format doesn't contain % args or %%, use strcpy. */
2229 if (strchr (fmt_str, target_percent) == NULL)
2230 {
2231 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2232
2233 if (!fn)
2234 return false;
2235
2236 /* Don't optimize sprintf (buf, "abc", ptr++). */
2237 if (orig)
2238 return false;
2239
2240 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2241 'format' is known to contain no % formats. */
2242 gimple_seq stmts = NULL;
2243 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2244 gimple_seq_add_stmt_without_update (&stmts, repl);
2245 if (gimple_call_lhs (stmt))
2246 {
2247 repl = gimple_build_assign (gimple_call_lhs (stmt),
2248 build_int_cst (integer_type_node,
2249 strlen (fmt_str)));
2250 gimple_seq_add_stmt_without_update (&stmts, repl);
2251 gsi_replace_with_seq_vops (gsi, stmts);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2 = *gsi;
2257 gsi_prev (&gsi2);
2258 fold_stmt (&gsi2);
2259 }
2260 else
2261 {
2262 gsi_replace_with_seq_vops (gsi, stmts);
2263 fold_stmt (gsi);
2264 }
2265 return true;
2266 }
2267
2268 /* If the format is "%s", use strcpy if the result isn't used. */
2269 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2270 {
2271 tree fn;
2272 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2273
2274 if (!fn)
2275 return false;
2276
2277 /* Don't crash on sprintf (str1, "%s"). */
2278 if (!orig)
2279 return false;
2280
2281 tree orig_len = NULL_TREE;
2282 if (gimple_call_lhs (stmt))
2283 {
2284 orig_len = get_maxval_strlen (orig, 0);
2285 if (!orig_len)
2286 return false;
2287 }
2288
2289 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2290 gimple_seq stmts = NULL;
2291 gimple repl = gimple_build_call (fn, 2, dest, orig);
2292 gimple_seq_add_stmt_without_update (&stmts, repl);
2293 if (gimple_call_lhs (stmt))
2294 {
2295 if (!useless_type_conversion_p (integer_type_node,
2296 TREE_TYPE (orig_len)))
2297 orig_len = fold_convert (integer_type_node, orig_len);
2298 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2299 gimple_seq_add_stmt_without_update (&stmts, repl);
2300 gsi_replace_with_seq_vops (gsi, stmts);
2301 /* gsi now points at the assignment to the lhs, get a
2302 stmt iterator to the memcpy call.
2303 ??? We can't use gsi_for_stmt as that doesn't work when the
2304 CFG isn't built yet. */
2305 gimple_stmt_iterator gsi2 = *gsi;
2306 gsi_prev (&gsi2);
2307 fold_stmt (&gsi2);
2308 }
2309 else
2310 {
2311 gsi_replace_with_seq_vops (gsi, stmts);
2312 fold_stmt (gsi);
2313 }
2314 return true;
2315 }
2316 return false;
2317 }
2318
2319 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2321 attempt to simplify calls with more than 4 arguments.
2322
2323 Return NULL_TREE if no simplification was possible, otherwise return the
2324 simplified form of the call as a tree. If IGNORED is true, it means that
2325 the caller does not use the returned value of the function. */
2326
2327 static bool
2328 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2329 {
2330 gimple stmt = gsi_stmt (*gsi);
2331 tree dest = gimple_call_arg (stmt, 0);
2332 tree destsize = gimple_call_arg (stmt, 1);
2333 tree fmt = gimple_call_arg (stmt, 2);
2334 tree orig = NULL_TREE;
2335 const char *fmt_str = NULL;
2336
2337 if (gimple_call_num_args (stmt) > 4)
2338 return false;
2339
2340 if (gimple_call_num_args (stmt) == 4)
2341 orig = gimple_call_arg (stmt, 3);
2342
2343 if (!tree_fits_uhwi_p (destsize))
2344 return false;
2345 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2346
2347 /* Check whether the format is a literal string constant. */
2348 fmt_str = c_getstr (fmt);
2349 if (fmt_str == NULL)
2350 return false;
2351
2352 if (!init_target_chars ())
2353 return false;
2354
2355 /* If the format doesn't contain % args or %%, use strcpy. */
2356 if (strchr (fmt_str, target_percent) == NULL)
2357 {
2358 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2359 if (!fn)
2360 return false;
2361
2362 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2363 if (orig)
2364 return false;
2365
2366 /* We could expand this as
2367 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2368 or to
2369 memcpy (str, fmt_with_nul_at_cstm1, cst);
2370 but in the former case that might increase code size
2371 and in the latter case grow .rodata section too much.
2372 So punt for now. */
2373 size_t len = strlen (fmt_str);
2374 if (len >= destlen)
2375 return false;
2376
2377 gimple_seq stmts = NULL;
2378 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380 if (gimple_call_lhs (stmt))
2381 {
2382 repl = gimple_build_assign (gimple_call_lhs (stmt),
2383 build_int_cst (integer_type_node, len));
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 gsi_replace_with_seq_vops (gsi, stmts);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2 = *gsi;
2391 gsi_prev (&gsi2);
2392 fold_stmt (&gsi2);
2393 }
2394 else
2395 {
2396 gsi_replace_with_seq_vops (gsi, stmts);
2397 fold_stmt (gsi);
2398 }
2399 return true;
2400 }
2401
2402 /* If the format is "%s", use strcpy if the result isn't used. */
2403 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2404 {
2405 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2406 if (!fn)
2407 return false;
2408
2409 /* Don't crash on snprintf (str1, cst, "%s"). */
2410 if (!orig)
2411 return false;
2412
2413 tree orig_len = get_maxval_strlen (orig, 0);
2414 if (!orig_len)
2415 return false;
2416
2417 /* We could expand this as
2418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2419 or to
2420 memcpy (str1, str2_with_nul_at_cstm1, cst);
2421 but in the former case that might increase code size
2422 and in the latter case grow .rodata section too much.
2423 So punt for now. */
2424 if (compare_tree_int (orig_len, destlen) >= 0)
2425 return false;
2426
2427 /* Convert snprintf (str1, cst, "%s", str2) into
2428 strcpy (str1, str2) if strlen (str2) < cst. */
2429 gimple_seq stmts = NULL;
2430 gimple repl = gimple_build_call (fn, 2, dest, orig);
2431 gimple_seq_add_stmt_without_update (&stmts, repl);
2432 if (gimple_call_lhs (stmt))
2433 {
2434 if (!useless_type_conversion_p (integer_type_node,
2435 TREE_TYPE (orig_len)))
2436 orig_len = fold_convert (integer_type_node, orig_len);
2437 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2438 gimple_seq_add_stmt_without_update (&stmts, repl);
2439 gsi_replace_with_seq_vops (gsi, stmts);
2440 /* gsi now points at the assignment to the lhs, get a
2441 stmt iterator to the memcpy call.
2442 ??? We can't use gsi_for_stmt as that doesn't work when the
2443 CFG isn't built yet. */
2444 gimple_stmt_iterator gsi2 = *gsi;
2445 gsi_prev (&gsi2);
2446 fold_stmt (&gsi2);
2447 }
2448 else
2449 {
2450 gsi_replace_with_seq_vops (gsi, stmts);
2451 fold_stmt (gsi);
2452 }
2453 return true;
2454 }
2455 return false;
2456 }
2457
2458
2459 /* Fold a call to __builtin_strlen with known length LEN. */
2460
2461 static bool
2462 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2463 {
2464 gimple stmt = gsi_stmt (*gsi);
2465 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2466 if (!len)
2467 return false;
2468 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2469 replace_call_with_value (gsi, len);
2470 return true;
2471 }
2472
2473
2474 /* Fold the non-target builtin at *GSI and return whether any simplification
2475 was made. */
2476
2477 static bool
2478 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2479 {
2480 gimple stmt = gsi_stmt (*gsi);
2481 tree callee = gimple_call_fndecl (stmt);
2482
2483 /* Give up for always_inline inline builtins until they are
2484 inlined. */
2485 if (avoid_folding_inline_builtin (callee))
2486 return false;
2487
2488 switch (DECL_FUNCTION_CODE (callee))
2489 {
2490 case BUILT_IN_BZERO:
2491 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2492 gimple_call_arg (stmt, 1));
2493 case BUILT_IN_MEMSET:
2494 return gimple_fold_builtin_memset (gsi,
2495 gimple_call_arg (stmt, 1),
2496 gimple_call_arg (stmt, 2));
2497 case BUILT_IN_BCOPY:
2498 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2499 gimple_call_arg (stmt, 0), 3);
2500 case BUILT_IN_MEMCPY:
2501 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2502 gimple_call_arg (stmt, 1), 0);
2503 case BUILT_IN_MEMPCPY:
2504 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2505 gimple_call_arg (stmt, 1), 1);
2506 case BUILT_IN_MEMMOVE:
2507 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2508 gimple_call_arg (stmt, 1), 3);
2509 case BUILT_IN_SPRINTF_CHK:
2510 case BUILT_IN_VSPRINTF_CHK:
2511 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2512 case BUILT_IN_STRCAT_CHK:
2513 return gimple_fold_builtin_strcat_chk (gsi);
2514 case BUILT_IN_STRNCAT_CHK:
2515 return gimple_fold_builtin_strncat_chk (gsi);
2516 case BUILT_IN_STRLEN:
2517 return gimple_fold_builtin_strlen (gsi);
2518 case BUILT_IN_STRCPY:
2519 return gimple_fold_builtin_strcpy (gsi,
2520 gimple_call_arg (stmt, 0),
2521 gimple_call_arg (stmt, 1));
2522 case BUILT_IN_STRNCPY:
2523 return gimple_fold_builtin_strncpy (gsi,
2524 gimple_call_arg (stmt, 0),
2525 gimple_call_arg (stmt, 1),
2526 gimple_call_arg (stmt, 2));
2527 case BUILT_IN_STRCAT:
2528 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2529 gimple_call_arg (stmt, 1));
2530 case BUILT_IN_FPUTS:
2531 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2532 gimple_call_arg (stmt, 1), false);
2533 case BUILT_IN_FPUTS_UNLOCKED:
2534 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1), true);
2536 case BUILT_IN_MEMCPY_CHK:
2537 case BUILT_IN_MEMPCPY_CHK:
2538 case BUILT_IN_MEMMOVE_CHK:
2539 case BUILT_IN_MEMSET_CHK:
2540 return gimple_fold_builtin_memory_chk (gsi,
2541 gimple_call_arg (stmt, 0),
2542 gimple_call_arg (stmt, 1),
2543 gimple_call_arg (stmt, 2),
2544 gimple_call_arg (stmt, 3),
2545 DECL_FUNCTION_CODE (callee));
2546 case BUILT_IN_STRCPY_CHK:
2547 case BUILT_IN_STPCPY_CHK:
2548 return gimple_fold_builtin_stxcpy_chk (gsi,
2549 gimple_call_arg (stmt, 0),
2550 gimple_call_arg (stmt, 1),
2551 gimple_call_arg (stmt, 2),
2552 DECL_FUNCTION_CODE (callee));
2553 case BUILT_IN_STRNCPY_CHK:
2554 case BUILT_IN_STPNCPY_CHK:
2555 return gimple_fold_builtin_stxncpy_chk (gsi,
2556 gimple_call_arg (stmt, 0),
2557 gimple_call_arg (stmt, 1),
2558 gimple_call_arg (stmt, 2),
2559 gimple_call_arg (stmt, 3),
2560 DECL_FUNCTION_CODE (callee));
2561 case BUILT_IN_SNPRINTF_CHK:
2562 case BUILT_IN_VSNPRINTF_CHK:
2563 return gimple_fold_builtin_snprintf_chk (gsi,
2564 DECL_FUNCTION_CODE (callee));
2565 case BUILT_IN_SNPRINTF:
2566 return gimple_fold_builtin_snprintf (gsi);
2567 case BUILT_IN_SPRINTF:
2568 return gimple_fold_builtin_sprintf (gsi);
2569 default:;
2570 }
2571
2572 /* Try the generic builtin folder. */
2573 bool ignore = (gimple_call_lhs (stmt) == NULL);
2574 tree result = fold_call_stmt (stmt, ignore);
2575 if (result)
2576 {
2577 if (ignore)
2578 STRIP_NOPS (result);
2579 else
2580 result = fold_convert (gimple_call_return_type (stmt), result);
2581 if (!update_call_from_tree (gsi, result))
2582 gimplify_and_update_call_from_tree (gsi, result);
2583 return true;
2584 }
2585
2586 return false;
2587 }
2588
2589 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2590 doesn't fit into TYPE. The test for overflow should be regardless of
2591 -fwrapv, and even for unsigned types. */
2592
2593 bool
2594 arith_overflowed_p (enum tree_code code, const_tree type,
2595 const_tree arg0, const_tree arg1)
2596 {
2597 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2598 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2599 widest2_int_cst;
2600 widest2_int warg0 = widest2_int_cst (arg0);
2601 widest2_int warg1 = widest2_int_cst (arg1);
2602 widest2_int wres;
2603 switch (code)
2604 {
2605 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2606 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2607 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2608 default: gcc_unreachable ();
2609 }
2610 signop sign = TYPE_SIGN (type);
2611 if (sign == UNSIGNED && wi::neg_p (wres))
2612 return true;
2613 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2614 }
2615
2616 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2617 The statement may be replaced by another statement, e.g., if the call
2618 simplifies to a constant value. Return true if any changes were made.
2619 It is assumed that the operands have been previously folded. */
2620
2621 static bool
2622 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2623 {
2624 gimple stmt = gsi_stmt (*gsi);
2625 tree callee;
2626 bool changed = false;
2627 unsigned i;
2628
2629 /* Fold *& in call arguments. */
2630 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2631 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2632 {
2633 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2634 if (tmp)
2635 {
2636 gimple_call_set_arg (stmt, i, tmp);
2637 changed = true;
2638 }
2639 }
2640
2641 /* Check for virtual calls that became direct calls. */
2642 callee = gimple_call_fn (stmt);
2643 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2644 {
2645 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2646 {
2647 if (dump_file && virtual_method_call_p (callee)
2648 && !possible_polymorphic_call_target_p
2649 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2650 (OBJ_TYPE_REF_EXPR (callee)))))
2651 {
2652 fprintf (dump_file,
2653 "Type inheritance inconsistent devirtualization of ");
2654 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2655 fprintf (dump_file, " to ");
2656 print_generic_expr (dump_file, callee, TDF_SLIM);
2657 fprintf (dump_file, "\n");
2658 }
2659
2660 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2661 changed = true;
2662 }
2663 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2664 {
2665 bool final;
2666 vec <cgraph_node *>targets
2667 = possible_polymorphic_call_targets (callee, stmt, &final);
2668 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2669 {
2670 tree lhs = gimple_call_lhs (stmt);
2671 if (dump_enabled_p ())
2672 {
2673 location_t loc = gimple_location_safe (stmt);
2674 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2675 "folding virtual function call to %s\n",
2676 targets.length () == 1
2677 ? targets[0]->name ()
2678 : "__builtin_unreachable");
2679 }
2680 if (targets.length () == 1)
2681 {
2682 gimple_call_set_fndecl (stmt, targets[0]->decl);
2683 changed = true;
2684 /* If the call becomes noreturn, remove the lhs. */
2685 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2686 {
2687 if (TREE_CODE (lhs) == SSA_NAME)
2688 {
2689 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2690 tree def = get_or_create_ssa_default_def (cfun, var);
2691 gimple new_stmt = gimple_build_assign (lhs, def);
2692 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2693 }
2694 gimple_call_set_lhs (stmt, NULL_TREE);
2695 }
2696 }
2697 else
2698 {
2699 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2700 gimple new_stmt = gimple_build_call (fndecl, 0);
2701 gimple_set_location (new_stmt, gimple_location (stmt));
2702 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2703 {
2704 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2705 tree def = get_or_create_ssa_default_def (cfun, var);
2706
2707 /* To satisfy condition for
2708 cgraph_update_edges_for_call_stmt_node,
2709 we need to preserve GIMPLE_CALL statement
2710 at position of GSI iterator. */
2711 update_call_from_tree (gsi, def);
2712 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2713 }
2714 else
2715 {
2716 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2717 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2718 gsi_replace (gsi, new_stmt, false);
2719 }
2720 return true;
2721 }
2722 }
2723 }
2724 }
2725
2726 /* Check for indirect calls that became direct calls, and then
2727 no longer require a static chain. */
2728 if (gimple_call_chain (stmt))
2729 {
2730 tree fn = gimple_call_fndecl (stmt);
2731 if (fn && !DECL_STATIC_CHAIN (fn))
2732 {
2733 gimple_call_set_chain (stmt, NULL);
2734 changed = true;
2735 }
2736 else
2737 {
2738 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
2739 if (tmp)
2740 {
2741 gimple_call_set_chain (stmt, tmp);
2742 changed = true;
2743 }
2744 }
2745 }
2746
2747 if (inplace)
2748 return changed;
2749
2750 /* Check for builtins that CCP can handle using information not
2751 available in the generic fold routines. */
2752 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2753 {
2754 if (gimple_fold_builtin (gsi))
2755 changed = true;
2756 }
2757 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2758 {
2759 changed |= targetm.gimple_fold_builtin (gsi);
2760 }
2761 else if (gimple_call_internal_p (stmt))
2762 {
2763 enum tree_code subcode = ERROR_MARK;
2764 tree result = NULL_TREE;
2765 bool cplx_result = false;
2766 tree overflow = NULL_TREE;
2767 switch (gimple_call_internal_fn (stmt))
2768 {
2769 case IFN_BUILTIN_EXPECT:
2770 result = fold_builtin_expect (gimple_location (stmt),
2771 gimple_call_arg (stmt, 0),
2772 gimple_call_arg (stmt, 1),
2773 gimple_call_arg (stmt, 2));
2774 break;
2775 case IFN_UBSAN_OBJECT_SIZE:
2776 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2777 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2778 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2779 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2780 gimple_call_arg (stmt, 2))))
2781 {
2782 gsi_replace (gsi, gimple_build_nop (), true);
2783 unlink_stmt_vdef (stmt);
2784 release_defs (stmt);
2785 return true;
2786 }
2787 break;
2788 case IFN_UBSAN_CHECK_ADD:
2789 subcode = PLUS_EXPR;
2790 break;
2791 case IFN_UBSAN_CHECK_SUB:
2792 subcode = MINUS_EXPR;
2793 break;
2794 case IFN_UBSAN_CHECK_MUL:
2795 subcode = MULT_EXPR;
2796 break;
2797 case IFN_ADD_OVERFLOW:
2798 subcode = PLUS_EXPR;
2799 cplx_result = true;
2800 break;
2801 case IFN_SUB_OVERFLOW:
2802 subcode = MINUS_EXPR;
2803 cplx_result = true;
2804 break;
2805 case IFN_MUL_OVERFLOW:
2806 subcode = MULT_EXPR;
2807 cplx_result = true;
2808 break;
2809 default:
2810 break;
2811 }
2812 if (subcode != ERROR_MARK)
2813 {
2814 tree arg0 = gimple_call_arg (stmt, 0);
2815 tree arg1 = gimple_call_arg (stmt, 1);
2816 tree type = TREE_TYPE (arg0);
2817 if (cplx_result)
2818 {
2819 tree lhs = gimple_call_lhs (stmt);
2820 if (lhs == NULL_TREE)
2821 type = NULL_TREE;
2822 else
2823 type = TREE_TYPE (TREE_TYPE (lhs));
2824 }
2825 if (type == NULL_TREE)
2826 ;
2827 /* x = y + 0; x = y - 0; x = y * 0; */
2828 else if (integer_zerop (arg1))
2829 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
2830 /* x = 0 + y; x = 0 * y; */
2831 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2832 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
2833 /* x = y - y; */
2834 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2835 result = integer_zero_node;
2836 /* x = y * 1; x = 1 * y; */
2837 else if (subcode == MULT_EXPR && integer_onep (arg1))
2838 result = arg0;
2839 else if (subcode == MULT_EXPR && integer_onep (arg0))
2840 result = arg1;
2841 else if (TREE_CODE (arg0) == INTEGER_CST
2842 && TREE_CODE (arg1) == INTEGER_CST)
2843 {
2844 if (cplx_result)
2845 result = int_const_binop (subcode, fold_convert (type, arg0),
2846 fold_convert (type, arg1));
2847 else
2848 result = int_const_binop (subcode, arg0, arg1);
2849 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
2850 {
2851 if (cplx_result)
2852 overflow = build_one_cst (type);
2853 else
2854 result = NULL_TREE;
2855 }
2856 }
2857 if (result)
2858 {
2859 if (result == integer_zero_node)
2860 result = build_zero_cst (type);
2861 else if (cplx_result && TREE_TYPE (result) != type)
2862 {
2863 if (TREE_CODE (result) == INTEGER_CST)
2864 {
2865 if (arith_overflowed_p (PLUS_EXPR, type, result,
2866 integer_zero_node))
2867 overflow = build_one_cst (type);
2868 }
2869 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
2870 && TYPE_UNSIGNED (type))
2871 || (TYPE_PRECISION (type)
2872 < (TYPE_PRECISION (TREE_TYPE (result))
2873 + (TYPE_UNSIGNED (TREE_TYPE (result))
2874 && !TYPE_UNSIGNED (type)))))
2875 result = NULL_TREE;
2876 if (result)
2877 result = fold_convert (type, result);
2878 }
2879 }
2880 }
2881
2882 if (result)
2883 {
2884 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
2885 result = drop_tree_overflow (result);
2886 if (cplx_result)
2887 {
2888 if (overflow == NULL_TREE)
2889 overflow = build_zero_cst (TREE_TYPE (result));
2890 tree ctype = build_complex_type (TREE_TYPE (result));
2891 if (TREE_CODE (result) == INTEGER_CST
2892 && TREE_CODE (overflow) == INTEGER_CST)
2893 result = build_complex (ctype, result, overflow);
2894 else
2895 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
2896 ctype, result, overflow);
2897 }
2898 if (!update_call_from_tree (gsi, result))
2899 gimplify_and_update_call_from_tree (gsi, result);
2900 changed = true;
2901 }
2902 }
2903
2904 return changed;
2905 }
2906
2907
2908 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2909 gimple_simplify.
2910
2911 Replaces *GSI with the simplification result in RCODE and OPS
2912 and the associated statements in *SEQ. Does the replacement
2913 according to INPLACE and returns true if the operation succeeded. */
2914
2915 static bool
2916 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2917 code_helper rcode, tree *ops,
2918 gimple_seq *seq, bool inplace)
2919 {
2920 gimple stmt = gsi_stmt (*gsi);
2921
2922 /* Play safe and do not allow abnormals to be mentioned in
2923 newly created statements. See also maybe_push_res_to_seq. */
2924 if ((TREE_CODE (ops[0]) == SSA_NAME
2925 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2926 || (ops[1]
2927 && TREE_CODE (ops[1]) == SSA_NAME
2928 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2929 || (ops[2]
2930 && TREE_CODE (ops[2]) == SSA_NAME
2931 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2932 return false;
2933
2934 if (gimple_code (stmt) == GIMPLE_COND)
2935 {
2936 gcc_assert (rcode.is_tree_code ());
2937 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2938 /* GIMPLE_CONDs condition may not throw. */
2939 && (!flag_exceptions
2940 || !cfun->can_throw_non_call_exceptions
2941 || !operation_could_trap_p (rcode,
2942 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2943 false, NULL_TREE)))
2944 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2945 else if (rcode == SSA_NAME)
2946 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2947 build_zero_cst (TREE_TYPE (ops[0])));
2948 else if (rcode == INTEGER_CST)
2949 {
2950 if (integer_zerop (ops[0]))
2951 gimple_cond_make_false (stmt);
2952 else
2953 gimple_cond_make_true (stmt);
2954 }
2955 else if (!inplace)
2956 {
2957 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2958 ops, seq);
2959 if (!res)
2960 return false;
2961 gimple_cond_set_condition (stmt, NE_EXPR, res,
2962 build_zero_cst (TREE_TYPE (res)));
2963 }
2964 else
2965 return false;
2966 if (dump_file && (dump_flags & TDF_DETAILS))
2967 {
2968 fprintf (dump_file, "gimple_simplified to ");
2969 if (!gimple_seq_empty_p (*seq))
2970 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2971 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2972 0, TDF_SLIM);
2973 }
2974 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2975 return true;
2976 }
2977 else if (is_gimple_assign (stmt)
2978 && rcode.is_tree_code ())
2979 {
2980 if (!inplace
2981 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
2982 {
2983 maybe_build_generic_op (rcode,
2984 TREE_TYPE (gimple_assign_lhs (stmt)),
2985 &ops[0], ops[1], ops[2]);
2986 gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
2987 ops[0], ops[1], ops[2]);
2988 if (dump_file && (dump_flags & TDF_DETAILS))
2989 {
2990 fprintf (dump_file, "gimple_simplified to ");
2991 if (!gimple_seq_empty_p (*seq))
2992 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2993 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2994 0, TDF_SLIM);
2995 }
2996 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2997 return true;
2998 }
2999 }
3000 else if (!inplace)
3001 {
3002 if (gimple_has_lhs (stmt))
3003 {
3004 tree lhs = gimple_get_lhs (stmt);
3005 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3006 ops, seq, lhs);
3007 if (dump_file && (dump_flags & TDF_DETAILS))
3008 {
3009 fprintf (dump_file, "gimple_simplified to ");
3010 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3011 }
3012 gsi_replace_with_seq_vops (gsi, *seq);
3013 return true;
3014 }
3015 else
3016 gcc_unreachable ();
3017 }
3018
3019 return false;
3020 }
3021
3022 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3023
3024 static bool
3025 maybe_canonicalize_mem_ref_addr (tree *t)
3026 {
3027 bool res = false;
3028
3029 if (TREE_CODE (*t) == ADDR_EXPR)
3030 t = &TREE_OPERAND (*t, 0);
3031
3032 while (handled_component_p (*t))
3033 t = &TREE_OPERAND (*t, 0);
3034
3035 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3036 of invariant addresses into a SSA name MEM_REF address. */
3037 if (TREE_CODE (*t) == MEM_REF
3038 || TREE_CODE (*t) == TARGET_MEM_REF)
3039 {
3040 tree addr = TREE_OPERAND (*t, 0);
3041 if (TREE_CODE (addr) == ADDR_EXPR
3042 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3043 || handled_component_p (TREE_OPERAND (addr, 0))))
3044 {
3045 tree base;
3046 HOST_WIDE_INT coffset;
3047 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3048 &coffset);
3049 if (!base)
3050 gcc_unreachable ();
3051
3052 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3053 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3054 TREE_OPERAND (*t, 1),
3055 size_int (coffset));
3056 res = true;
3057 }
3058 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3059 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3060 }
3061
3062 /* Canonicalize back MEM_REFs to plain reference trees if the object
3063 accessed is a decl that has the same access semantics as the MEM_REF. */
3064 if (TREE_CODE (*t) == MEM_REF
3065 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3066 && integer_zerop (TREE_OPERAND (*t, 1)))
3067 {
3068 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3069 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3070 if (/* Same volatile qualification. */
3071 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3072 /* Same TBAA behavior with -fstrict-aliasing. */
3073 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3074 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3075 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3076 /* Same alignment. */
3077 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3078 /* We have to look out here to not drop a required conversion
3079 from the rhs to the lhs if *t appears on the lhs or vice-versa
3080 if it appears on the rhs. Thus require strict type
3081 compatibility. */
3082 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3083 {
3084 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3085 res = true;
3086 }
3087 }
3088
3089 /* Canonicalize TARGET_MEM_REF in particular with respect to
3090 the indexes becoming constant. */
3091 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3092 {
3093 tree tem = maybe_fold_tmr (*t);
3094 if (tem)
3095 {
3096 *t = tem;
3097 res = true;
3098 }
3099 }
3100
3101 return res;
3102 }
3103
3104 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3105 distinguishes both cases. */
3106
3107 static bool
3108 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3109 {
3110 bool changed = false;
3111 gimple stmt = gsi_stmt (*gsi);
3112 unsigned i;
3113
3114 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3115 after propagation.
3116 ??? This shouldn't be done in generic folding but in the
3117 propagation helpers which also know whether an address was
3118 propagated. */
3119 switch (gimple_code (stmt))
3120 {
3121 case GIMPLE_ASSIGN:
3122 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3123 {
3124 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3125 if ((REFERENCE_CLASS_P (*rhs)
3126 || TREE_CODE (*rhs) == ADDR_EXPR)
3127 && maybe_canonicalize_mem_ref_addr (rhs))
3128 changed = true;
3129 tree *lhs = gimple_assign_lhs_ptr (stmt);
3130 if (REFERENCE_CLASS_P (*lhs)
3131 && maybe_canonicalize_mem_ref_addr (lhs))
3132 changed = true;
3133 }
3134 break;
3135 case GIMPLE_CALL:
3136 {
3137 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3138 {
3139 tree *arg = gimple_call_arg_ptr (stmt, i);
3140 if (REFERENCE_CLASS_P (*arg)
3141 && maybe_canonicalize_mem_ref_addr (arg))
3142 changed = true;
3143 }
3144 tree *lhs = gimple_call_lhs_ptr (stmt);
3145 if (*lhs
3146 && REFERENCE_CLASS_P (*lhs)
3147 && maybe_canonicalize_mem_ref_addr (lhs))
3148 changed = true;
3149 break;
3150 }
3151 case GIMPLE_ASM:
3152 {
3153 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3154 {
3155 tree link = gimple_asm_output_op (stmt, i);
3156 tree op = TREE_VALUE (link);
3157 if (REFERENCE_CLASS_P (op)
3158 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3159 changed = true;
3160 }
3161 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3162 {
3163 tree link = gimple_asm_input_op (stmt, i);
3164 tree op = TREE_VALUE (link);
3165 if ((REFERENCE_CLASS_P (op)
3166 || TREE_CODE (op) == ADDR_EXPR)
3167 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3168 changed = true;
3169 }
3170 }
3171 break;
3172 case GIMPLE_DEBUG:
3173 if (gimple_debug_bind_p (stmt))
3174 {
3175 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3176 if (*val
3177 && (REFERENCE_CLASS_P (*val)
3178 || TREE_CODE (*val) == ADDR_EXPR)
3179 && maybe_canonicalize_mem_ref_addr (val))
3180 changed = true;
3181 }
3182 break;
3183 default:;
3184 }
3185
3186 /* Dispatch to pattern-based folding. */
3187 if (!inplace
3188 || is_gimple_assign (stmt)
3189 || gimple_code (stmt) == GIMPLE_COND)
3190 {
3191 gimple_seq seq = NULL;
3192 code_helper rcode;
3193 tree ops[3] = {};
3194 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3195 {
3196 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3197 changed = true;
3198 else
3199 gimple_seq_discard (seq);
3200 }
3201 }
3202
3203 stmt = gsi_stmt (*gsi);
3204
3205 /* Fold the main computation performed by the statement. */
3206 switch (gimple_code (stmt))
3207 {
3208 case GIMPLE_ASSIGN:
3209 {
3210 unsigned old_num_ops = gimple_num_ops (stmt);
3211 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3212 tree lhs = gimple_assign_lhs (stmt);
3213 tree new_rhs;
3214 /* First canonicalize operand order. This avoids building new
3215 trees if this is the only thing fold would later do. */
3216 if ((commutative_tree_code (subcode)
3217 || commutative_ternary_tree_code (subcode))
3218 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3219 gimple_assign_rhs2 (stmt), false))
3220 {
3221 tree tem = gimple_assign_rhs1 (stmt);
3222 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3223 gimple_assign_set_rhs2 (stmt, tem);
3224 changed = true;
3225 }
3226 new_rhs = fold_gimple_assign (gsi);
3227 if (new_rhs
3228 && !useless_type_conversion_p (TREE_TYPE (lhs),
3229 TREE_TYPE (new_rhs)))
3230 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3231 if (new_rhs
3232 && (!inplace
3233 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3234 {
3235 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3236 changed = true;
3237 }
3238 break;
3239 }
3240
3241 case GIMPLE_COND:
3242 changed |= fold_gimple_cond (stmt);
3243 break;
3244
3245 case GIMPLE_CALL:
3246 changed |= gimple_fold_call (gsi, inplace);
3247 break;
3248
3249 case GIMPLE_ASM:
3250 /* Fold *& in asm operands. */
3251 {
3252 size_t noutputs;
3253 const char **oconstraints;
3254 const char *constraint;
3255 bool allows_mem, allows_reg;
3256
3257 noutputs = gimple_asm_noutputs (stmt);
3258 oconstraints = XALLOCAVEC (const char *, noutputs);
3259
3260 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3261 {
3262 tree link = gimple_asm_output_op (stmt, i);
3263 tree op = TREE_VALUE (link);
3264 oconstraints[i]
3265 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3266 if (REFERENCE_CLASS_P (op)
3267 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3268 {
3269 TREE_VALUE (link) = op;
3270 changed = true;
3271 }
3272 }
3273 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3274 {
3275 tree link = gimple_asm_input_op (stmt, i);
3276 tree op = TREE_VALUE (link);
3277 constraint
3278 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3279 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3280 oconstraints, &allows_mem, &allows_reg);
3281 if (REFERENCE_CLASS_P (op)
3282 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3283 != NULL_TREE)
3284 {
3285 TREE_VALUE (link) = op;
3286 changed = true;
3287 }
3288 }
3289 }
3290 break;
3291
3292 case GIMPLE_DEBUG:
3293 if (gimple_debug_bind_p (stmt))
3294 {
3295 tree val = gimple_debug_bind_get_value (stmt);
3296 if (val
3297 && REFERENCE_CLASS_P (val))
3298 {
3299 tree tem = maybe_fold_reference (val, false);
3300 if (tem)
3301 {
3302 gimple_debug_bind_set_value (stmt, tem);
3303 changed = true;
3304 }
3305 }
3306 else if (val
3307 && TREE_CODE (val) == ADDR_EXPR)
3308 {
3309 tree ref = TREE_OPERAND (val, 0);
3310 tree tem = maybe_fold_reference (ref, false);
3311 if (tem)
3312 {
3313 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3314 gimple_debug_bind_set_value (stmt, tem);
3315 changed = true;
3316 }
3317 }
3318 }
3319 break;
3320
3321 default:;
3322 }
3323
3324 stmt = gsi_stmt (*gsi);
3325
3326 /* Fold *& on the lhs. */
3327 if (gimple_has_lhs (stmt))
3328 {
3329 tree lhs = gimple_get_lhs (stmt);
3330 if (lhs && REFERENCE_CLASS_P (lhs))
3331 {
3332 tree new_lhs = maybe_fold_reference (lhs, true);
3333 if (new_lhs)
3334 {
3335 gimple_set_lhs (stmt, new_lhs);
3336 changed = true;
3337 }
3338 }
3339 }
3340
3341 return changed;
3342 }
3343
3344 /* Valueziation callback that ends up not following SSA edges. */
3345
3346 tree
3347 no_follow_ssa_edges (tree)
3348 {
3349 return NULL_TREE;
3350 }
3351
3352 /* Valueization callback that ends up following single-use SSA edges only. */
3353
3354 tree
3355 follow_single_use_edges (tree val)
3356 {
3357 if (TREE_CODE (val) == SSA_NAME
3358 && !has_single_use (val))
3359 return NULL_TREE;
3360 return val;
3361 }
3362
3363 /* Fold the statement pointed to by GSI. In some cases, this function may
3364 replace the whole statement with a new one. Returns true iff folding
3365 makes any changes.
3366 The statement pointed to by GSI should be in valid gimple form but may
3367 be in unfolded state as resulting from for example constant propagation
3368 which can produce *&x = 0. */
3369
3370 bool
3371 fold_stmt (gimple_stmt_iterator *gsi)
3372 {
3373 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3374 }
3375
3376 bool
3377 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3378 {
3379 return fold_stmt_1 (gsi, false, valueize);
3380 }
3381
3382 /* Perform the minimal folding on statement *GSI. Only operations like
3383 *&x created by constant propagation are handled. The statement cannot
3384 be replaced with a new one. Return true if the statement was
3385 changed, false otherwise.
3386 The statement *GSI should be in valid gimple form but may
3387 be in unfolded state as resulting from for example constant propagation
3388 which can produce *&x = 0. */
3389
3390 bool
3391 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3392 {
3393 gimple stmt = gsi_stmt (*gsi);
3394 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3395 gcc_assert (gsi_stmt (*gsi) == stmt);
3396 return changed;
3397 }
3398
3399 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3400 if EXPR is null or we don't know how.
3401 If non-null, the result always has boolean type. */
3402
3403 static tree
3404 canonicalize_bool (tree expr, bool invert)
3405 {
3406 if (!expr)
3407 return NULL_TREE;
3408 else if (invert)
3409 {
3410 if (integer_nonzerop (expr))
3411 return boolean_false_node;
3412 else if (integer_zerop (expr))
3413 return boolean_true_node;
3414 else if (TREE_CODE (expr) == SSA_NAME)
3415 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3416 build_int_cst (TREE_TYPE (expr), 0));
3417 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3418 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3419 boolean_type_node,
3420 TREE_OPERAND (expr, 0),
3421 TREE_OPERAND (expr, 1));
3422 else
3423 return NULL_TREE;
3424 }
3425 else
3426 {
3427 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3428 return expr;
3429 if (integer_nonzerop (expr))
3430 return boolean_true_node;
3431 else if (integer_zerop (expr))
3432 return boolean_false_node;
3433 else if (TREE_CODE (expr) == SSA_NAME)
3434 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3435 build_int_cst (TREE_TYPE (expr), 0));
3436 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3437 return fold_build2 (TREE_CODE (expr),
3438 boolean_type_node,
3439 TREE_OPERAND (expr, 0),
3440 TREE_OPERAND (expr, 1));
3441 else
3442 return NULL_TREE;
3443 }
3444 }
3445
3446 /* Check to see if a boolean expression EXPR is logically equivalent to the
3447 comparison (OP1 CODE OP2). Check for various identities involving
3448 SSA_NAMEs. */
3449
3450 static bool
3451 same_bool_comparison_p (const_tree expr, enum tree_code code,
3452 const_tree op1, const_tree op2)
3453 {
3454 gimple s;
3455
3456 /* The obvious case. */
3457 if (TREE_CODE (expr) == code
3458 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3459 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3460 return true;
3461
3462 /* Check for comparing (name, name != 0) and the case where expr
3463 is an SSA_NAME with a definition matching the comparison. */
3464 if (TREE_CODE (expr) == SSA_NAME
3465 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3466 {
3467 if (operand_equal_p (expr, op1, 0))
3468 return ((code == NE_EXPR && integer_zerop (op2))
3469 || (code == EQ_EXPR && integer_nonzerop (op2)));
3470 s = SSA_NAME_DEF_STMT (expr);
3471 if (is_gimple_assign (s)
3472 && gimple_assign_rhs_code (s) == code
3473 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3474 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3475 return true;
3476 }
3477
3478 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3479 of name is a comparison, recurse. */
3480 if (TREE_CODE (op1) == SSA_NAME
3481 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3482 {
3483 s = SSA_NAME_DEF_STMT (op1);
3484 if (is_gimple_assign (s)
3485 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3486 {
3487 enum tree_code c = gimple_assign_rhs_code (s);
3488 if ((c == NE_EXPR && integer_zerop (op2))
3489 || (c == EQ_EXPR && integer_nonzerop (op2)))
3490 return same_bool_comparison_p (expr, c,
3491 gimple_assign_rhs1 (s),
3492 gimple_assign_rhs2 (s));
3493 if ((c == EQ_EXPR && integer_zerop (op2))
3494 || (c == NE_EXPR && integer_nonzerop (op2)))
3495 return same_bool_comparison_p (expr,
3496 invert_tree_comparison (c, false),
3497 gimple_assign_rhs1 (s),
3498 gimple_assign_rhs2 (s));
3499 }
3500 }
3501 return false;
3502 }
3503
3504 /* Check to see if two boolean expressions OP1 and OP2 are logically
3505 equivalent. */
3506
3507 static bool
3508 same_bool_result_p (const_tree op1, const_tree op2)
3509 {
3510 /* Simple cases first. */
3511 if (operand_equal_p (op1, op2, 0))
3512 return true;
3513
3514 /* Check the cases where at least one of the operands is a comparison.
3515 These are a bit smarter than operand_equal_p in that they apply some
3516 identifies on SSA_NAMEs. */
3517 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3518 && same_bool_comparison_p (op1, TREE_CODE (op2),
3519 TREE_OPERAND (op2, 0),
3520 TREE_OPERAND (op2, 1)))
3521 return true;
3522 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3523 && same_bool_comparison_p (op2, TREE_CODE (op1),
3524 TREE_OPERAND (op1, 0),
3525 TREE_OPERAND (op1, 1)))
3526 return true;
3527
3528 /* Default case. */
3529 return false;
3530 }
3531
3532 /* Forward declarations for some mutually recursive functions. */
3533
3534 static tree
3535 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3536 enum tree_code code2, tree op2a, tree op2b);
3537 static tree
3538 and_var_with_comparison (tree var, bool invert,
3539 enum tree_code code2, tree op2a, tree op2b);
3540 static tree
3541 and_var_with_comparison_1 (gimple stmt,
3542 enum tree_code code2, tree op2a, tree op2b);
3543 static tree
3544 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3545 enum tree_code code2, tree op2a, tree op2b);
3546 static tree
3547 or_var_with_comparison (tree var, bool invert,
3548 enum tree_code code2, tree op2a, tree op2b);
3549 static tree
3550 or_var_with_comparison_1 (gimple stmt,
3551 enum tree_code code2, tree op2a, tree op2b);
3552
3553 /* Helper function for and_comparisons_1: try to simplify the AND of the
3554 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3555 If INVERT is true, invert the value of the VAR before doing the AND.
3556 Return NULL_EXPR if we can't simplify this to a single expression. */
3557
3558 static tree
3559 and_var_with_comparison (tree var, bool invert,
3560 enum tree_code code2, tree op2a, tree op2b)
3561 {
3562 tree t;
3563 gimple stmt = SSA_NAME_DEF_STMT (var);
3564
3565 /* We can only deal with variables whose definitions are assignments. */
3566 if (!is_gimple_assign (stmt))
3567 return NULL_TREE;
3568
3569 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3570 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3571 Then we only have to consider the simpler non-inverted cases. */
3572 if (invert)
3573 t = or_var_with_comparison_1 (stmt,
3574 invert_tree_comparison (code2, false),
3575 op2a, op2b);
3576 else
3577 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3578 return canonicalize_bool (t, invert);
3579 }
3580
3581 /* Try to simplify the AND of the ssa variable defined by the assignment
3582 STMT with the comparison specified by (OP2A CODE2 OP2B).
3583 Return NULL_EXPR if we can't simplify this to a single expression. */
3584
3585 static tree
3586 and_var_with_comparison_1 (gimple stmt,
3587 enum tree_code code2, tree op2a, tree op2b)
3588 {
3589 tree var = gimple_assign_lhs (stmt);
3590 tree true_test_var = NULL_TREE;
3591 tree false_test_var = NULL_TREE;
3592 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3593
3594 /* Check for identities like (var AND (var == 0)) => false. */
3595 if (TREE_CODE (op2a) == SSA_NAME
3596 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3597 {
3598 if ((code2 == NE_EXPR && integer_zerop (op2b))
3599 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3600 {
3601 true_test_var = op2a;
3602 if (var == true_test_var)
3603 return var;
3604 }
3605 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3606 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3607 {
3608 false_test_var = op2a;
3609 if (var == false_test_var)
3610 return boolean_false_node;
3611 }
3612 }
3613
3614 /* If the definition is a comparison, recurse on it. */
3615 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3616 {
3617 tree t = and_comparisons_1 (innercode,
3618 gimple_assign_rhs1 (stmt),
3619 gimple_assign_rhs2 (stmt),
3620 code2,
3621 op2a,
3622 op2b);
3623 if (t)
3624 return t;
3625 }
3626
3627 /* If the definition is an AND or OR expression, we may be able to
3628 simplify by reassociating. */
3629 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3630 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3631 {
3632 tree inner1 = gimple_assign_rhs1 (stmt);
3633 tree inner2 = gimple_assign_rhs2 (stmt);
3634 gimple s;
3635 tree t;
3636 tree partial = NULL_TREE;
3637 bool is_and = (innercode == BIT_AND_EXPR);
3638
3639 /* Check for boolean identities that don't require recursive examination
3640 of inner1/inner2:
3641 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3642 inner1 AND (inner1 OR inner2) => inner1
3643 !inner1 AND (inner1 AND inner2) => false
3644 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3645 Likewise for similar cases involving inner2. */
3646 if (inner1 == true_test_var)
3647 return (is_and ? var : inner1);
3648 else if (inner2 == true_test_var)
3649 return (is_and ? var : inner2);
3650 else if (inner1 == false_test_var)
3651 return (is_and
3652 ? boolean_false_node
3653 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3654 else if (inner2 == false_test_var)
3655 return (is_and
3656 ? boolean_false_node
3657 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3658
3659 /* Next, redistribute/reassociate the AND across the inner tests.
3660 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3661 if (TREE_CODE (inner1) == SSA_NAME
3662 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3663 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3664 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3665 gimple_assign_rhs1 (s),
3666 gimple_assign_rhs2 (s),
3667 code2, op2a, op2b)))
3668 {
3669 /* Handle the AND case, where we are reassociating:
3670 (inner1 AND inner2) AND (op2a code2 op2b)
3671 => (t AND inner2)
3672 If the partial result t is a constant, we win. Otherwise
3673 continue on to try reassociating with the other inner test. */
3674 if (is_and)
3675 {
3676 if (integer_onep (t))
3677 return inner2;
3678 else if (integer_zerop (t))
3679 return boolean_false_node;
3680 }
3681
3682 /* Handle the OR case, where we are redistributing:
3683 (inner1 OR inner2) AND (op2a code2 op2b)
3684 => (t OR (inner2 AND (op2a code2 op2b))) */
3685 else if (integer_onep (t))
3686 return boolean_true_node;
3687
3688 /* Save partial result for later. */
3689 partial = t;
3690 }
3691
3692 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3693 if (TREE_CODE (inner2) == SSA_NAME
3694 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3695 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3696 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3697 gimple_assign_rhs1 (s),
3698 gimple_assign_rhs2 (s),
3699 code2, op2a, op2b)))
3700 {
3701 /* Handle the AND case, where we are reassociating:
3702 (inner1 AND inner2) AND (op2a code2 op2b)
3703 => (inner1 AND t) */
3704 if (is_and)
3705 {
3706 if (integer_onep (t))
3707 return inner1;
3708 else if (integer_zerop (t))
3709 return boolean_false_node;
3710 /* If both are the same, we can apply the identity
3711 (x AND x) == x. */
3712 else if (partial && same_bool_result_p (t, partial))
3713 return t;
3714 }
3715
3716 /* Handle the OR case. where we are redistributing:
3717 (inner1 OR inner2) AND (op2a code2 op2b)
3718 => (t OR (inner1 AND (op2a code2 op2b)))
3719 => (t OR partial) */
3720 else
3721 {
3722 if (integer_onep (t))
3723 return boolean_true_node;
3724 else if (partial)
3725 {
3726 /* We already got a simplification for the other
3727 operand to the redistributed OR expression. The
3728 interesting case is when at least one is false.
3729 Or, if both are the same, we can apply the identity
3730 (x OR x) == x. */
3731 if (integer_zerop (partial))
3732 return t;
3733 else if (integer_zerop (t))
3734 return partial;
3735 else if (same_bool_result_p (t, partial))
3736 return t;
3737 }
3738 }
3739 }
3740 }
3741 return NULL_TREE;
3742 }
3743
3744 /* Try to simplify the AND of two comparisons defined by
3745 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3746 If this can be done without constructing an intermediate value,
3747 return the resulting tree; otherwise NULL_TREE is returned.
3748 This function is deliberately asymmetric as it recurses on SSA_DEFs
3749 in the first comparison but not the second. */
3750
3751 static tree
3752 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3753 enum tree_code code2, tree op2a, tree op2b)
3754 {
3755 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3756
3757 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3758 if (operand_equal_p (op1a, op2a, 0)
3759 && operand_equal_p (op1b, op2b, 0))
3760 {
3761 /* Result will be either NULL_TREE, or a combined comparison. */
3762 tree t = combine_comparisons (UNKNOWN_LOCATION,
3763 TRUTH_ANDIF_EXPR, code1, code2,
3764 truth_type, op1a, op1b);
3765 if (t)
3766 return t;
3767 }
3768
3769 /* Likewise the swapped case of the above. */
3770 if (operand_equal_p (op1a, op2b, 0)
3771 && operand_equal_p (op1b, op2a, 0))
3772 {
3773 /* Result will be either NULL_TREE, or a combined comparison. */
3774 tree t = combine_comparisons (UNKNOWN_LOCATION,
3775 TRUTH_ANDIF_EXPR, code1,
3776 swap_tree_comparison (code2),
3777 truth_type, op1a, op1b);
3778 if (t)
3779 return t;
3780 }
3781
3782 /* If both comparisons are of the same value against constants, we might
3783 be able to merge them. */
3784 if (operand_equal_p (op1a, op2a, 0)
3785 && TREE_CODE (op1b) == INTEGER_CST
3786 && TREE_CODE (op2b) == INTEGER_CST)
3787 {
3788 int cmp = tree_int_cst_compare (op1b, op2b);
3789
3790 /* If we have (op1a == op1b), we should either be able to
3791 return that or FALSE, depending on whether the constant op1b
3792 also satisfies the other comparison against op2b. */
3793 if (code1 == EQ_EXPR)
3794 {
3795 bool done = true;
3796 bool val;
3797 switch (code2)
3798 {
3799 case EQ_EXPR: val = (cmp == 0); break;
3800 case NE_EXPR: val = (cmp != 0); break;
3801 case LT_EXPR: val = (cmp < 0); break;
3802 case GT_EXPR: val = (cmp > 0); break;
3803 case LE_EXPR: val = (cmp <= 0); break;
3804 case GE_EXPR: val = (cmp >= 0); break;
3805 default: done = false;
3806 }
3807 if (done)
3808 {
3809 if (val)
3810 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3811 else
3812 return boolean_false_node;
3813 }
3814 }
3815 /* Likewise if the second comparison is an == comparison. */
3816 else if (code2 == EQ_EXPR)
3817 {
3818 bool done = true;
3819 bool val;
3820 switch (code1)
3821 {
3822 case EQ_EXPR: val = (cmp == 0); break;
3823 case NE_EXPR: val = (cmp != 0); break;
3824 case LT_EXPR: val = (cmp > 0); break;
3825 case GT_EXPR: val = (cmp < 0); break;
3826 case LE_EXPR: val = (cmp >= 0); break;
3827 case GE_EXPR: val = (cmp <= 0); break;
3828 default: done = false;
3829 }
3830 if (done)
3831 {
3832 if (val)
3833 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3834 else
3835 return boolean_false_node;
3836 }
3837 }
3838
3839 /* Same business with inequality tests. */
3840 else if (code1 == NE_EXPR)
3841 {
3842 bool val;
3843 switch (code2)
3844 {
3845 case EQ_EXPR: val = (cmp != 0); break;
3846 case NE_EXPR: val = (cmp == 0); break;
3847 case LT_EXPR: val = (cmp >= 0); break;
3848 case GT_EXPR: val = (cmp <= 0); break;
3849 case LE_EXPR: val = (cmp > 0); break;
3850 case GE_EXPR: val = (cmp < 0); break;
3851 default:
3852 val = false;
3853 }
3854 if (val)
3855 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3856 }
3857 else if (code2 == NE_EXPR)
3858 {
3859 bool val;
3860 switch (code1)
3861 {
3862 case EQ_EXPR: val = (cmp == 0); break;
3863 case NE_EXPR: val = (cmp != 0); break;
3864 case LT_EXPR: val = (cmp <= 0); break;
3865 case GT_EXPR: val = (cmp >= 0); break;
3866 case LE_EXPR: val = (cmp < 0); break;
3867 case GE_EXPR: val = (cmp > 0); break;
3868 default:
3869 val = false;
3870 }
3871 if (val)
3872 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3873 }
3874
3875 /* Chose the more restrictive of two < or <= comparisons. */
3876 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3877 && (code2 == LT_EXPR || code2 == LE_EXPR))
3878 {
3879 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3880 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3881 else
3882 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3883 }
3884
3885 /* Likewise chose the more restrictive of two > or >= comparisons. */
3886 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3887 && (code2 == GT_EXPR || code2 == GE_EXPR))
3888 {
3889 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3890 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3891 else
3892 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3893 }
3894
3895 /* Check for singleton ranges. */
3896 else if (cmp == 0
3897 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3898 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3899 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3900
3901 /* Check for disjoint ranges. */
3902 else if (cmp <= 0
3903 && (code1 == LT_EXPR || code1 == LE_EXPR)
3904 && (code2 == GT_EXPR || code2 == GE_EXPR))
3905 return boolean_false_node;
3906 else if (cmp >= 0
3907 && (code1 == GT_EXPR || code1 == GE_EXPR)
3908 && (code2 == LT_EXPR || code2 == LE_EXPR))
3909 return boolean_false_node;
3910 }
3911
3912 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3913 NAME's definition is a truth value. See if there are any simplifications
3914 that can be done against the NAME's definition. */
3915 if (TREE_CODE (op1a) == SSA_NAME
3916 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3917 && (integer_zerop (op1b) || integer_onep (op1b)))
3918 {
3919 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3920 || (code1 == NE_EXPR && integer_onep (op1b)));
3921 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3922 switch (gimple_code (stmt))
3923 {
3924 case GIMPLE_ASSIGN:
3925 /* Try to simplify by copy-propagating the definition. */
3926 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3927
3928 case GIMPLE_PHI:
3929 /* If every argument to the PHI produces the same result when
3930 ANDed with the second comparison, we win.
3931 Do not do this unless the type is bool since we need a bool
3932 result here anyway. */
3933 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3934 {
3935 tree result = NULL_TREE;
3936 unsigned i;
3937 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3938 {
3939 tree arg = gimple_phi_arg_def (stmt, i);
3940
3941 /* If this PHI has itself as an argument, ignore it.
3942 If all the other args produce the same result,
3943 we're still OK. */
3944 if (arg == gimple_phi_result (stmt))
3945 continue;
3946 else if (TREE_CODE (arg) == INTEGER_CST)
3947 {
3948 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3949 {
3950 if (!result)
3951 result = boolean_false_node;
3952 else if (!integer_zerop (result))
3953 return NULL_TREE;
3954 }
3955 else if (!result)
3956 result = fold_build2 (code2, boolean_type_node,
3957 op2a, op2b);
3958 else if (!same_bool_comparison_p (result,
3959 code2, op2a, op2b))
3960 return NULL_TREE;
3961 }
3962 else if (TREE_CODE (arg) == SSA_NAME
3963 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3964 {
3965 tree temp;
3966 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3967 /* In simple cases we can look through PHI nodes,
3968 but we have to be careful with loops.
3969 See PR49073. */
3970 if (! dom_info_available_p (CDI_DOMINATORS)
3971 || gimple_bb (def_stmt) == gimple_bb (stmt)
3972 || dominated_by_p (CDI_DOMINATORS,
3973 gimple_bb (def_stmt),
3974 gimple_bb (stmt)))
3975 return NULL_TREE;
3976 temp = and_var_with_comparison (arg, invert, code2,
3977 op2a, op2b);
3978 if (!temp)
3979 return NULL_TREE;
3980 else if (!result)
3981 result = temp;
3982 else if (!same_bool_result_p (result, temp))
3983 return NULL_TREE;
3984 }
3985 else
3986 return NULL_TREE;
3987 }
3988 return result;
3989 }
3990
3991 default:
3992 break;
3993 }
3994 }
3995 return NULL_TREE;
3996 }
3997
3998 /* Try to simplify the AND of two comparisons, specified by
3999 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4000 If this can be simplified to a single expression (without requiring
4001 introducing more SSA variables to hold intermediate values),
4002 return the resulting tree. Otherwise return NULL_TREE.
4003 If the result expression is non-null, it has boolean type. */
4004
4005 tree
4006 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4007 enum tree_code code2, tree op2a, tree op2b)
4008 {
4009 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4010 if (t)
4011 return t;
4012 else
4013 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4014 }
4015
4016 /* Helper function for or_comparisons_1: try to simplify the OR of the
4017 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4018 If INVERT is true, invert the value of VAR before doing the OR.
4019 Return NULL_EXPR if we can't simplify this to a single expression. */
4020
4021 static tree
4022 or_var_with_comparison (tree var, bool invert,
4023 enum tree_code code2, tree op2a, tree op2b)
4024 {
4025 tree t;
4026 gimple stmt = SSA_NAME_DEF_STMT (var);
4027
4028 /* We can only deal with variables whose definitions are assignments. */
4029 if (!is_gimple_assign (stmt))
4030 return NULL_TREE;
4031
4032 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4033 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4034 Then we only have to consider the simpler non-inverted cases. */
4035 if (invert)
4036 t = and_var_with_comparison_1 (stmt,
4037 invert_tree_comparison (code2, false),
4038 op2a, op2b);
4039 else
4040 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4041 return canonicalize_bool (t, invert);
4042 }
4043
4044 /* Try to simplify the OR of the ssa variable defined by the assignment
4045 STMT with the comparison specified by (OP2A CODE2 OP2B).
4046 Return NULL_EXPR if we can't simplify this to a single expression. */
4047
4048 static tree
4049 or_var_with_comparison_1 (gimple stmt,
4050 enum tree_code code2, tree op2a, tree op2b)
4051 {
4052 tree var = gimple_assign_lhs (stmt);
4053 tree true_test_var = NULL_TREE;
4054 tree false_test_var = NULL_TREE;
4055 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4056
4057 /* Check for identities like (var OR (var != 0)) => true . */
4058 if (TREE_CODE (op2a) == SSA_NAME
4059 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4060 {
4061 if ((code2 == NE_EXPR && integer_zerop (op2b))
4062 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4063 {
4064 true_test_var = op2a;
4065 if (var == true_test_var)
4066 return var;
4067 }
4068 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4069 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4070 {
4071 false_test_var = op2a;
4072 if (var == false_test_var)
4073 return boolean_true_node;
4074 }
4075 }
4076
4077 /* If the definition is a comparison, recurse on it. */
4078 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4079 {
4080 tree t = or_comparisons_1 (innercode,
4081 gimple_assign_rhs1 (stmt),
4082 gimple_assign_rhs2 (stmt),
4083 code2,
4084 op2a,
4085 op2b);
4086 if (t)
4087 return t;
4088 }
4089
4090 /* If the definition is an AND or OR expression, we may be able to
4091 simplify by reassociating. */
4092 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4093 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4094 {
4095 tree inner1 = gimple_assign_rhs1 (stmt);
4096 tree inner2 = gimple_assign_rhs2 (stmt);
4097 gimple s;
4098 tree t;
4099 tree partial = NULL_TREE;
4100 bool is_or = (innercode == BIT_IOR_EXPR);
4101
4102 /* Check for boolean identities that don't require recursive examination
4103 of inner1/inner2:
4104 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4105 inner1 OR (inner1 AND inner2) => inner1
4106 !inner1 OR (inner1 OR inner2) => true
4107 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4108 */
4109 if (inner1 == true_test_var)
4110 return (is_or ? var : inner1);
4111 else if (inner2 == true_test_var)
4112 return (is_or ? var : inner2);
4113 else if (inner1 == false_test_var)
4114 return (is_or
4115 ? boolean_true_node
4116 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4117 else if (inner2 == false_test_var)
4118 return (is_or
4119 ? boolean_true_node
4120 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4121
4122 /* Next, redistribute/reassociate the OR across the inner tests.
4123 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4124 if (TREE_CODE (inner1) == SSA_NAME
4125 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4126 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4127 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4128 gimple_assign_rhs1 (s),
4129 gimple_assign_rhs2 (s),
4130 code2, op2a, op2b)))
4131 {
4132 /* Handle the OR case, where we are reassociating:
4133 (inner1 OR inner2) OR (op2a code2 op2b)
4134 => (t OR inner2)
4135 If the partial result t is a constant, we win. Otherwise
4136 continue on to try reassociating with the other inner test. */
4137 if (is_or)
4138 {
4139 if (integer_onep (t))
4140 return boolean_true_node;
4141 else if (integer_zerop (t))
4142 return inner2;
4143 }
4144
4145 /* Handle the AND case, where we are redistributing:
4146 (inner1 AND inner2) OR (op2a code2 op2b)
4147 => (t AND (inner2 OR (op2a code op2b))) */
4148 else if (integer_zerop (t))
4149 return boolean_false_node;
4150
4151 /* Save partial result for later. */
4152 partial = t;
4153 }
4154
4155 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4156 if (TREE_CODE (inner2) == SSA_NAME
4157 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4159 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4160 gimple_assign_rhs1 (s),
4161 gimple_assign_rhs2 (s),
4162 code2, op2a, op2b)))
4163 {
4164 /* Handle the OR case, where we are reassociating:
4165 (inner1 OR inner2) OR (op2a code2 op2b)
4166 => (inner1 OR t)
4167 => (t OR partial) */
4168 if (is_or)
4169 {
4170 if (integer_zerop (t))
4171 return inner1;
4172 else if (integer_onep (t))
4173 return boolean_true_node;
4174 /* If both are the same, we can apply the identity
4175 (x OR x) == x. */
4176 else if (partial && same_bool_result_p (t, partial))
4177 return t;
4178 }
4179
4180 /* Handle the AND case, where we are redistributing:
4181 (inner1 AND inner2) OR (op2a code2 op2b)
4182 => (t AND (inner1 OR (op2a code2 op2b)))
4183 => (t AND partial) */
4184 else
4185 {
4186 if (integer_zerop (t))
4187 return boolean_false_node;
4188 else if (partial)
4189 {
4190 /* We already got a simplification for the other
4191 operand to the redistributed AND expression. The
4192 interesting case is when at least one is true.
4193 Or, if both are the same, we can apply the identity
4194 (x AND x) == x. */
4195 if (integer_onep (partial))
4196 return t;
4197 else if (integer_onep (t))
4198 return partial;
4199 else if (same_bool_result_p (t, partial))
4200 return t;
4201 }
4202 }
4203 }
4204 }
4205 return NULL_TREE;
4206 }
4207
4208 /* Try to simplify the OR of two comparisons defined by
4209 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4210 If this can be done without constructing an intermediate value,
4211 return the resulting tree; otherwise NULL_TREE is returned.
4212 This function is deliberately asymmetric as it recurses on SSA_DEFs
4213 in the first comparison but not the second. */
4214
4215 static tree
4216 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4217 enum tree_code code2, tree op2a, tree op2b)
4218 {
4219 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4220
4221 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4222 if (operand_equal_p (op1a, op2a, 0)
4223 && operand_equal_p (op1b, op2b, 0))
4224 {
4225 /* Result will be either NULL_TREE, or a combined comparison. */
4226 tree t = combine_comparisons (UNKNOWN_LOCATION,
4227 TRUTH_ORIF_EXPR, code1, code2,
4228 truth_type, op1a, op1b);
4229 if (t)
4230 return t;
4231 }
4232
4233 /* Likewise the swapped case of the above. */
4234 if (operand_equal_p (op1a, op2b, 0)
4235 && operand_equal_p (op1b, op2a, 0))
4236 {
4237 /* Result will be either NULL_TREE, or a combined comparison. */
4238 tree t = combine_comparisons (UNKNOWN_LOCATION,
4239 TRUTH_ORIF_EXPR, code1,
4240 swap_tree_comparison (code2),
4241 truth_type, op1a, op1b);
4242 if (t)
4243 return t;
4244 }
4245
4246 /* If both comparisons are of the same value against constants, we might
4247 be able to merge them. */
4248 if (operand_equal_p (op1a, op2a, 0)
4249 && TREE_CODE (op1b) == INTEGER_CST
4250 && TREE_CODE (op2b) == INTEGER_CST)
4251 {
4252 int cmp = tree_int_cst_compare (op1b, op2b);
4253
4254 /* If we have (op1a != op1b), we should either be able to
4255 return that or TRUE, depending on whether the constant op1b
4256 also satisfies the other comparison against op2b. */
4257 if (code1 == NE_EXPR)
4258 {
4259 bool done = true;
4260 bool val;
4261 switch (code2)
4262 {
4263 case EQ_EXPR: val = (cmp == 0); break;
4264 case NE_EXPR: val = (cmp != 0); break;
4265 case LT_EXPR: val = (cmp < 0); break;
4266 case GT_EXPR: val = (cmp > 0); break;
4267 case LE_EXPR: val = (cmp <= 0); break;
4268 case GE_EXPR: val = (cmp >= 0); break;
4269 default: done = false;
4270 }
4271 if (done)
4272 {
4273 if (val)
4274 return boolean_true_node;
4275 else
4276 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4277 }
4278 }
4279 /* Likewise if the second comparison is a != comparison. */
4280 else if (code2 == NE_EXPR)
4281 {
4282 bool done = true;
4283 bool val;
4284 switch (code1)
4285 {
4286 case EQ_EXPR: val = (cmp == 0); break;
4287 case NE_EXPR: val = (cmp != 0); break;
4288 case LT_EXPR: val = (cmp > 0); break;
4289 case GT_EXPR: val = (cmp < 0); break;
4290 case LE_EXPR: val = (cmp >= 0); break;
4291 case GE_EXPR: val = (cmp <= 0); break;
4292 default: done = false;
4293 }
4294 if (done)
4295 {
4296 if (val)
4297 return boolean_true_node;
4298 else
4299 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4300 }
4301 }
4302
4303 /* See if an equality test is redundant with the other comparison. */
4304 else if (code1 == EQ_EXPR)
4305 {
4306 bool val;
4307 switch (code2)
4308 {
4309 case EQ_EXPR: val = (cmp == 0); break;
4310 case NE_EXPR: val = (cmp != 0); break;
4311 case LT_EXPR: val = (cmp < 0); break;
4312 case GT_EXPR: val = (cmp > 0); break;
4313 case LE_EXPR: val = (cmp <= 0); break;
4314 case GE_EXPR: val = (cmp >= 0); break;
4315 default:
4316 val = false;
4317 }
4318 if (val)
4319 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4320 }
4321 else if (code2 == EQ_EXPR)
4322 {
4323 bool val;
4324 switch (code1)
4325 {
4326 case EQ_EXPR: val = (cmp == 0); break;
4327 case NE_EXPR: val = (cmp != 0); break;
4328 case LT_EXPR: val = (cmp > 0); break;
4329 case GT_EXPR: val = (cmp < 0); break;
4330 case LE_EXPR: val = (cmp >= 0); break;
4331 case GE_EXPR: val = (cmp <= 0); break;
4332 default:
4333 val = false;
4334 }
4335 if (val)
4336 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4337 }
4338
4339 /* Chose the less restrictive of two < or <= comparisons. */
4340 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4341 && (code2 == LT_EXPR || code2 == LE_EXPR))
4342 {
4343 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4344 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4345 else
4346 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4347 }
4348
4349 /* Likewise chose the less restrictive of two > or >= comparisons. */
4350 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4351 && (code2 == GT_EXPR || code2 == GE_EXPR))
4352 {
4353 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4354 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4355 else
4356 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4357 }
4358
4359 /* Check for singleton ranges. */
4360 else if (cmp == 0
4361 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4362 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4363 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4364
4365 /* Check for less/greater pairs that don't restrict the range at all. */
4366 else if (cmp >= 0
4367 && (code1 == LT_EXPR || code1 == LE_EXPR)
4368 && (code2 == GT_EXPR || code2 == GE_EXPR))
4369 return boolean_true_node;
4370 else if (cmp <= 0
4371 && (code1 == GT_EXPR || code1 == GE_EXPR)
4372 && (code2 == LT_EXPR || code2 == LE_EXPR))
4373 return boolean_true_node;
4374 }
4375
4376 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4377 NAME's definition is a truth value. See if there are any simplifications
4378 that can be done against the NAME's definition. */
4379 if (TREE_CODE (op1a) == SSA_NAME
4380 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4381 && (integer_zerop (op1b) || integer_onep (op1b)))
4382 {
4383 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4384 || (code1 == NE_EXPR && integer_onep (op1b)));
4385 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4386 switch (gimple_code (stmt))
4387 {
4388 case GIMPLE_ASSIGN:
4389 /* Try to simplify by copy-propagating the definition. */
4390 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4391
4392 case GIMPLE_PHI:
4393 /* If every argument to the PHI produces the same result when
4394 ORed with the second comparison, we win.
4395 Do not do this unless the type is bool since we need a bool
4396 result here anyway. */
4397 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4398 {
4399 tree result = NULL_TREE;
4400 unsigned i;
4401 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4402 {
4403 tree arg = gimple_phi_arg_def (stmt, i);
4404
4405 /* If this PHI has itself as an argument, ignore it.
4406 If all the other args produce the same result,
4407 we're still OK. */
4408 if (arg == gimple_phi_result (stmt))
4409 continue;
4410 else if (TREE_CODE (arg) == INTEGER_CST)
4411 {
4412 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4413 {
4414 if (!result)
4415 result = boolean_true_node;
4416 else if (!integer_onep (result))
4417 return NULL_TREE;
4418 }
4419 else if (!result)
4420 result = fold_build2 (code2, boolean_type_node,
4421 op2a, op2b);
4422 else if (!same_bool_comparison_p (result,
4423 code2, op2a, op2b))
4424 return NULL_TREE;
4425 }
4426 else if (TREE_CODE (arg) == SSA_NAME
4427 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4428 {
4429 tree temp;
4430 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4431 /* In simple cases we can look through PHI nodes,
4432 but we have to be careful with loops.
4433 See PR49073. */
4434 if (! dom_info_available_p (CDI_DOMINATORS)
4435 || gimple_bb (def_stmt) == gimple_bb (stmt)
4436 || dominated_by_p (CDI_DOMINATORS,
4437 gimple_bb (def_stmt),
4438 gimple_bb (stmt)))
4439 return NULL_TREE;
4440 temp = or_var_with_comparison (arg, invert, code2,
4441 op2a, op2b);
4442 if (!temp)
4443 return NULL_TREE;
4444 else if (!result)
4445 result = temp;
4446 else if (!same_bool_result_p (result, temp))
4447 return NULL_TREE;
4448 }
4449 else
4450 return NULL_TREE;
4451 }
4452 return result;
4453 }
4454
4455 default:
4456 break;
4457 }
4458 }
4459 return NULL_TREE;
4460 }
4461
4462 /* Try to simplify the OR of two comparisons, specified by
4463 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4464 If this can be simplified to a single expression (without requiring
4465 introducing more SSA variables to hold intermediate values),
4466 return the resulting tree. Otherwise return NULL_TREE.
4467 If the result expression is non-null, it has boolean type. */
4468
4469 tree
4470 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4471 enum tree_code code2, tree op2a, tree op2b)
4472 {
4473 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4474 if (t)
4475 return t;
4476 else
4477 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4478 }
4479
4480
4481 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4482
4483 Either NULL_TREE, a simplified but non-constant or a constant
4484 is returned.
4485
4486 ??? This should go into a gimple-fold-inline.h file to be eventually
4487 privatized with the single valueize function used in the various TUs
4488 to avoid the indirect function call overhead. */
4489
4490 tree
4491 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4492 tree (*gvalueize) (tree))
4493 {
4494 code_helper rcode;
4495 tree ops[3] = {};
4496 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4497 edges if there are intermediate VARYING defs. For this reason
4498 do not follow SSA edges here even though SCCVN can technically
4499 just deal fine with that. */
4500 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
4501 && rcode.is_tree_code ()
4502 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4503 || ((tree_code) rcode) == ADDR_EXPR)
4504 && is_gimple_val (ops[0]))
4505 {
4506 tree res = ops[0];
4507 if (dump_file && dump_flags & TDF_DETAILS)
4508 {
4509 fprintf (dump_file, "Match-and-simplified ");
4510 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4511 fprintf (dump_file, " to ");
4512 print_generic_expr (dump_file, res, 0);
4513 fprintf (dump_file, "\n");
4514 }
4515 return res;
4516 }
4517
4518 location_t loc = gimple_location (stmt);
4519 switch (gimple_code (stmt))
4520 {
4521 case GIMPLE_ASSIGN:
4522 {
4523 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4524
4525 switch (get_gimple_rhs_class (subcode))
4526 {
4527 case GIMPLE_SINGLE_RHS:
4528 {
4529 tree rhs = gimple_assign_rhs1 (stmt);
4530 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4531
4532 if (TREE_CODE (rhs) == SSA_NAME)
4533 {
4534 /* If the RHS is an SSA_NAME, return its known constant value,
4535 if any. */
4536 return (*valueize) (rhs);
4537 }
4538 /* Handle propagating invariant addresses into address
4539 operations. */
4540 else if (TREE_CODE (rhs) == ADDR_EXPR
4541 && !is_gimple_min_invariant (rhs))
4542 {
4543 HOST_WIDE_INT offset = 0;
4544 tree base;
4545 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4546 &offset,
4547 valueize);
4548 if (base
4549 && (CONSTANT_CLASS_P (base)
4550 || decl_address_invariant_p (base)))
4551 return build_invariant_address (TREE_TYPE (rhs),
4552 base, offset);
4553 }
4554 else if (TREE_CODE (rhs) == CONSTRUCTOR
4555 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4556 && (CONSTRUCTOR_NELTS (rhs)
4557 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4558 {
4559 unsigned i;
4560 tree val, *vec;
4561
4562 vec = XALLOCAVEC (tree,
4563 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4564 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4565 {
4566 val = (*valueize) (val);
4567 if (TREE_CODE (val) == INTEGER_CST
4568 || TREE_CODE (val) == REAL_CST
4569 || TREE_CODE (val) == FIXED_CST)
4570 vec[i] = val;
4571 else
4572 return NULL_TREE;
4573 }
4574
4575 return build_vector (TREE_TYPE (rhs), vec);
4576 }
4577 if (subcode == OBJ_TYPE_REF)
4578 {
4579 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4580 /* If callee is constant, we can fold away the wrapper. */
4581 if (is_gimple_min_invariant (val))
4582 return val;
4583 }
4584
4585 if (kind == tcc_reference)
4586 {
4587 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4588 || TREE_CODE (rhs) == REALPART_EXPR
4589 || TREE_CODE (rhs) == IMAGPART_EXPR)
4590 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4591 {
4592 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4593 return fold_unary_loc (EXPR_LOCATION (rhs),
4594 TREE_CODE (rhs),
4595 TREE_TYPE (rhs), val);
4596 }
4597 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4598 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4599 {
4600 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4601 return fold_ternary_loc (EXPR_LOCATION (rhs),
4602 TREE_CODE (rhs),
4603 TREE_TYPE (rhs), val,
4604 TREE_OPERAND (rhs, 1),
4605 TREE_OPERAND (rhs, 2));
4606 }
4607 else if (TREE_CODE (rhs) == MEM_REF
4608 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4609 {
4610 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4611 if (TREE_CODE (val) == ADDR_EXPR
4612 && is_gimple_min_invariant (val))
4613 {
4614 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4615 unshare_expr (val),
4616 TREE_OPERAND (rhs, 1));
4617 if (tem)
4618 rhs = tem;
4619 }
4620 }
4621 return fold_const_aggregate_ref_1 (rhs, valueize);
4622 }
4623 else if (kind == tcc_declaration)
4624 return get_symbol_constant_value (rhs);
4625 return rhs;
4626 }
4627
4628 case GIMPLE_UNARY_RHS:
4629 return NULL_TREE;
4630
4631 case GIMPLE_BINARY_RHS:
4632 {
4633 /* Handle binary operators that can appear in GIMPLE form. */
4634 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4635 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4636
4637 /* Translate &x + CST into an invariant form suitable for
4638 further propagation. */
4639 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4640 && TREE_CODE (op0) == ADDR_EXPR
4641 && TREE_CODE (op1) == INTEGER_CST)
4642 {
4643 tree off = fold_convert (ptr_type_node, op1);
4644 return build_fold_addr_expr_loc
4645 (loc,
4646 fold_build2 (MEM_REF,
4647 TREE_TYPE (TREE_TYPE (op0)),
4648 unshare_expr (op0), off));
4649 }
4650
4651 return fold_binary_loc (loc, subcode,
4652 gimple_expr_type (stmt), op0, op1);
4653 }
4654
4655 case GIMPLE_TERNARY_RHS:
4656 {
4657 /* Handle ternary operators that can appear in GIMPLE form. */
4658 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4659 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4660 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4661
4662 /* Fold embedded expressions in ternary codes. */
4663 if ((subcode == COND_EXPR
4664 || subcode == VEC_COND_EXPR)
4665 && COMPARISON_CLASS_P (op0))
4666 {
4667 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4668 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4669 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4670 TREE_TYPE (op0), op00, op01);
4671 if (tem)
4672 op0 = tem;
4673 }
4674
4675 return fold_ternary_loc (loc, subcode,
4676 gimple_expr_type (stmt), op0, op1, op2);
4677 }
4678
4679 default:
4680 gcc_unreachable ();
4681 }
4682 }
4683
4684 case GIMPLE_CALL:
4685 {
4686 tree fn;
4687
4688 if (gimple_call_internal_p (stmt))
4689 {
4690 enum tree_code subcode = ERROR_MARK;
4691 switch (gimple_call_internal_fn (stmt))
4692 {
4693 case IFN_UBSAN_CHECK_ADD:
4694 subcode = PLUS_EXPR;
4695 break;
4696 case IFN_UBSAN_CHECK_SUB:
4697 subcode = MINUS_EXPR;
4698 break;
4699 case IFN_UBSAN_CHECK_MUL:
4700 subcode = MULT_EXPR;
4701 break;
4702 default:
4703 return NULL_TREE;
4704 }
4705 tree arg0 = gimple_call_arg (stmt, 0);
4706 tree arg1 = gimple_call_arg (stmt, 1);
4707 tree op0 = (*valueize) (arg0);
4708 tree op1 = (*valueize) (arg1);
4709
4710 if (TREE_CODE (op0) != INTEGER_CST
4711 || TREE_CODE (op1) != INTEGER_CST)
4712 {
4713 switch (subcode)
4714 {
4715 case MULT_EXPR:
4716 /* x * 0 = 0 * x = 0 without overflow. */
4717 if (integer_zerop (op0) || integer_zerop (op1))
4718 return build_zero_cst (TREE_TYPE (arg0));
4719 break;
4720 case MINUS_EXPR:
4721 /* y - y = 0 without overflow. */
4722 if (operand_equal_p (op0, op1, 0))
4723 return build_zero_cst (TREE_TYPE (arg0));
4724 break;
4725 default:
4726 break;
4727 }
4728 }
4729 tree res
4730 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4731 if (res
4732 && TREE_CODE (res) == INTEGER_CST
4733 && !TREE_OVERFLOW (res))
4734 return res;
4735 return NULL_TREE;
4736 }
4737
4738 fn = (*valueize) (gimple_call_fn (stmt));
4739 if (TREE_CODE (fn) == ADDR_EXPR
4740 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4741 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4742 && gimple_builtin_call_types_compatible_p (stmt,
4743 TREE_OPERAND (fn, 0)))
4744 {
4745 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4746 tree call, retval;
4747 unsigned i;
4748 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4749 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4750 call = build_call_array_loc (loc,
4751 gimple_call_return_type (stmt),
4752 fn, gimple_call_num_args (stmt), args);
4753 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4754 if (retval)
4755 {
4756 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4757 STRIP_NOPS (retval);
4758 retval = fold_convert (gimple_call_return_type (stmt), retval);
4759 }
4760 return retval;
4761 }
4762 return NULL_TREE;
4763 }
4764
4765 default:
4766 return NULL_TREE;
4767 }
4768 }
4769
4770 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4771 Returns NULL_TREE if folding to a constant is not possible, otherwise
4772 returns a constant according to is_gimple_min_invariant. */
4773
4774 tree
4775 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4776 {
4777 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4778 if (res && is_gimple_min_invariant (res))
4779 return res;
4780 return NULL_TREE;
4781 }
4782
4783
4784 /* The following set of functions are supposed to fold references using
4785 their constant initializers. */
4786
4787 static tree fold_ctor_reference (tree type, tree ctor,
4788 unsigned HOST_WIDE_INT offset,
4789 unsigned HOST_WIDE_INT size, tree);
4790
4791 /* See if we can find constructor defining value of BASE.
4792 When we know the consructor with constant offset (such as
4793 base is array[40] and we do know constructor of array), then
4794 BIT_OFFSET is adjusted accordingly.
4795
4796 As a special case, return error_mark_node when constructor
4797 is not explicitly available, but it is known to be zero
4798 such as 'static const int a;'. */
4799 static tree
4800 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4801 tree (*valueize)(tree))
4802 {
4803 HOST_WIDE_INT bit_offset2, size, max_size;
4804 if (TREE_CODE (base) == MEM_REF)
4805 {
4806 if (!integer_zerop (TREE_OPERAND (base, 1)))
4807 {
4808 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4809 return NULL_TREE;
4810 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4811 * BITS_PER_UNIT);
4812 }
4813
4814 if (valueize
4815 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4816 base = valueize (TREE_OPERAND (base, 0));
4817 if (!base || TREE_CODE (base) != ADDR_EXPR)
4818 return NULL_TREE;
4819 base = TREE_OPERAND (base, 0);
4820 }
4821
4822 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4823 DECL_INITIAL. If BASE is a nested reference into another
4824 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4825 the inner reference. */
4826 switch (TREE_CODE (base))
4827 {
4828 case VAR_DECL:
4829 case CONST_DECL:
4830 {
4831 tree init = ctor_for_folding (base);
4832
4833 /* Our semantic is exact opposite of ctor_for_folding;
4834 NULL means unknown, while error_mark_node is 0. */
4835 if (init == error_mark_node)
4836 return NULL_TREE;
4837 if (!init)
4838 return error_mark_node;
4839 return init;
4840 }
4841
4842 case ARRAY_REF:
4843 case COMPONENT_REF:
4844 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4845 if (max_size == -1 || size != max_size)
4846 return NULL_TREE;
4847 *bit_offset += bit_offset2;
4848 return get_base_constructor (base, bit_offset, valueize);
4849
4850 case STRING_CST:
4851 case CONSTRUCTOR:
4852 return base;
4853
4854 default:
4855 return NULL_TREE;
4856 }
4857 }
4858
4859 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4860 SIZE to the memory at bit OFFSET. */
4861
4862 static tree
4863 fold_array_ctor_reference (tree type, tree ctor,
4864 unsigned HOST_WIDE_INT offset,
4865 unsigned HOST_WIDE_INT size,
4866 tree from_decl)
4867 {
4868 unsigned HOST_WIDE_INT cnt;
4869 tree cfield, cval;
4870 offset_int low_bound;
4871 offset_int elt_size;
4872 offset_int index, max_index;
4873 offset_int access_index;
4874 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4875 HOST_WIDE_INT inner_offset;
4876
4877 /* Compute low bound and elt size. */
4878 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4879 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4880 if (domain_type && TYPE_MIN_VALUE (domain_type))
4881 {
4882 /* Static constructors for variably sized objects makes no sense. */
4883 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4884 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4885 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4886 }
4887 else
4888 low_bound = 0;
4889 /* Static constructors for variably sized objects makes no sense. */
4890 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4891 == INTEGER_CST);
4892 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4893
4894 /* We can handle only constantly sized accesses that are known to not
4895 be larger than size of array element. */
4896 if (!TYPE_SIZE_UNIT (type)
4897 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4898 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4899 || elt_size == 0)
4900 return NULL_TREE;
4901
4902 /* Compute the array index we look for. */
4903 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4904 elt_size);
4905 access_index += low_bound;
4906 if (index_type)
4907 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4908 TYPE_SIGN (index_type));
4909
4910 /* And offset within the access. */
4911 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4912
4913 /* See if the array field is large enough to span whole access. We do not
4914 care to fold accesses spanning multiple array indexes. */
4915 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4916 return NULL_TREE;
4917
4918 index = low_bound - 1;
4919 if (index_type)
4920 index = wi::ext (index, TYPE_PRECISION (index_type),
4921 TYPE_SIGN (index_type));
4922
4923 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4924 {
4925 /* Array constructor might explicitely set index, or specify range
4926 or leave index NULL meaning that it is next index after previous
4927 one. */
4928 if (cfield)
4929 {
4930 if (TREE_CODE (cfield) == INTEGER_CST)
4931 max_index = index = wi::to_offset (cfield);
4932 else
4933 {
4934 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4935 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4936 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4937 }
4938 }
4939 else
4940 {
4941 index += 1;
4942 if (index_type)
4943 index = wi::ext (index, TYPE_PRECISION (index_type),
4944 TYPE_SIGN (index_type));
4945 max_index = index;
4946 }
4947
4948 /* Do we have match? */
4949 if (wi::cmpu (access_index, index) >= 0
4950 && wi::cmpu (access_index, max_index) <= 0)
4951 return fold_ctor_reference (type, cval, inner_offset, size,
4952 from_decl);
4953 }
4954 /* When memory is not explicitely mentioned in constructor,
4955 it is 0 (or out of range). */
4956 return build_zero_cst (type);
4957 }
4958
4959 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4960 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4961
4962 static tree
4963 fold_nonarray_ctor_reference (tree type, tree ctor,
4964 unsigned HOST_WIDE_INT offset,
4965 unsigned HOST_WIDE_INT size,
4966 tree from_decl)
4967 {
4968 unsigned HOST_WIDE_INT cnt;
4969 tree cfield, cval;
4970
4971 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4972 cval)
4973 {
4974 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4975 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4976 tree field_size = DECL_SIZE (cfield);
4977 offset_int bitoffset;
4978 offset_int bitoffset_end, access_end;
4979
4980 /* Variable sized objects in static constructors makes no sense,
4981 but field_size can be NULL for flexible array members. */
4982 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4983 && TREE_CODE (byte_offset) == INTEGER_CST
4984 && (field_size != NULL_TREE
4985 ? TREE_CODE (field_size) == INTEGER_CST
4986 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4987
4988 /* Compute bit offset of the field. */
4989 bitoffset = (wi::to_offset (field_offset)
4990 + wi::lshift (wi::to_offset (byte_offset),
4991 LOG2_BITS_PER_UNIT));
4992 /* Compute bit offset where the field ends. */
4993 if (field_size != NULL_TREE)
4994 bitoffset_end = bitoffset + wi::to_offset (field_size);
4995 else
4996 bitoffset_end = 0;
4997
4998 access_end = offset_int (offset) + size;
4999
5000 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5001 [BITOFFSET, BITOFFSET_END)? */
5002 if (wi::cmps (access_end, bitoffset) > 0
5003 && (field_size == NULL_TREE
5004 || wi::lts_p (offset, bitoffset_end)))
5005 {
5006 offset_int inner_offset = offset_int (offset) - bitoffset;
5007 /* We do have overlap. Now see if field is large enough to
5008 cover the access. Give up for accesses spanning multiple
5009 fields. */
5010 if (wi::cmps (access_end, bitoffset_end) > 0)
5011 return NULL_TREE;
5012 if (wi::lts_p (offset, bitoffset))
5013 return NULL_TREE;
5014 return fold_ctor_reference (type, cval,
5015 inner_offset.to_uhwi (), size,
5016 from_decl);
5017 }
5018 }
5019 /* When memory is not explicitely mentioned in constructor, it is 0. */
5020 return build_zero_cst (type);
5021 }
5022
5023 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5024 to the memory at bit OFFSET. */
5025
5026 static tree
5027 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5028 unsigned HOST_WIDE_INT size, tree from_decl)
5029 {
5030 tree ret;
5031
5032 /* We found the field with exact match. */
5033 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5034 && !offset)
5035 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5036
5037 /* We are at the end of walk, see if we can view convert the
5038 result. */
5039 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5040 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5041 && !compare_tree_int (TYPE_SIZE (type), size)
5042 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5043 {
5044 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5045 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5046 if (ret)
5047 STRIP_NOPS (ret);
5048 return ret;
5049 }
5050 /* For constants and byte-aligned/sized reads try to go through
5051 native_encode/interpret. */
5052 if (CONSTANT_CLASS_P (ctor)
5053 && BITS_PER_UNIT == 8
5054 && offset % BITS_PER_UNIT == 0
5055 && size % BITS_PER_UNIT == 0
5056 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5057 {
5058 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5059 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5060 offset / BITS_PER_UNIT) > 0)
5061 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5062 }
5063 if (TREE_CODE (ctor) == CONSTRUCTOR)
5064 {
5065
5066 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5067 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5068 return fold_array_ctor_reference (type, ctor, offset, size,
5069 from_decl);
5070 else
5071 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5072 from_decl);
5073 }
5074
5075 return NULL_TREE;
5076 }
5077
5078 /* Return the tree representing the element referenced by T if T is an
5079 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5080 names using VALUEIZE. Return NULL_TREE otherwise. */
5081
5082 tree
5083 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5084 {
5085 tree ctor, idx, base;
5086 HOST_WIDE_INT offset, size, max_size;
5087 tree tem;
5088
5089 if (TREE_THIS_VOLATILE (t))
5090 return NULL_TREE;
5091
5092 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5093 return get_symbol_constant_value (t);
5094
5095 tem = fold_read_from_constant_string (t);
5096 if (tem)
5097 return tem;
5098
5099 switch (TREE_CODE (t))
5100 {
5101 case ARRAY_REF:
5102 case ARRAY_RANGE_REF:
5103 /* Constant indexes are handled well by get_base_constructor.
5104 Only special case variable offsets.
5105 FIXME: This code can't handle nested references with variable indexes
5106 (they will be handled only by iteration of ccp). Perhaps we can bring
5107 get_ref_base_and_extent here and make it use a valueize callback. */
5108 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5109 && valueize
5110 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5111 && TREE_CODE (idx) == INTEGER_CST)
5112 {
5113 tree low_bound, unit_size;
5114
5115 /* If the resulting bit-offset is constant, track it. */
5116 if ((low_bound = array_ref_low_bound (t),
5117 TREE_CODE (low_bound) == INTEGER_CST)
5118 && (unit_size = array_ref_element_size (t),
5119 tree_fits_uhwi_p (unit_size)))
5120 {
5121 offset_int woffset
5122 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5123 TYPE_PRECISION (TREE_TYPE (idx)));
5124
5125 if (wi::fits_shwi_p (woffset))
5126 {
5127 offset = woffset.to_shwi ();
5128 /* TODO: This code seems wrong, multiply then check
5129 to see if it fits. */
5130 offset *= tree_to_uhwi (unit_size);
5131 offset *= BITS_PER_UNIT;
5132
5133 base = TREE_OPERAND (t, 0);
5134 ctor = get_base_constructor (base, &offset, valueize);
5135 /* Empty constructor. Always fold to 0. */
5136 if (ctor == error_mark_node)
5137 return build_zero_cst (TREE_TYPE (t));
5138 /* Out of bound array access. Value is undefined,
5139 but don't fold. */
5140 if (offset < 0)
5141 return NULL_TREE;
5142 /* We can not determine ctor. */
5143 if (!ctor)
5144 return NULL_TREE;
5145 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5146 tree_to_uhwi (unit_size)
5147 * BITS_PER_UNIT,
5148 base);
5149 }
5150 }
5151 }
5152 /* Fallthru. */
5153
5154 case COMPONENT_REF:
5155 case BIT_FIELD_REF:
5156 case TARGET_MEM_REF:
5157 case MEM_REF:
5158 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5159 ctor = get_base_constructor (base, &offset, valueize);
5160
5161 /* Empty constructor. Always fold to 0. */
5162 if (ctor == error_mark_node)
5163 return build_zero_cst (TREE_TYPE (t));
5164 /* We do not know precise address. */
5165 if (max_size == -1 || max_size != size)
5166 return NULL_TREE;
5167 /* We can not determine ctor. */
5168 if (!ctor)
5169 return NULL_TREE;
5170
5171 /* Out of bound array access. Value is undefined, but don't fold. */
5172 if (offset < 0)
5173 return NULL_TREE;
5174
5175 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5176 base);
5177
5178 case REALPART_EXPR:
5179 case IMAGPART_EXPR:
5180 {
5181 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5182 if (c && TREE_CODE (c) == COMPLEX_CST)
5183 return fold_build1_loc (EXPR_LOCATION (t),
5184 TREE_CODE (t), TREE_TYPE (t), c);
5185 break;
5186 }
5187
5188 default:
5189 break;
5190 }
5191
5192 return NULL_TREE;
5193 }
5194
5195 tree
5196 fold_const_aggregate_ref (tree t)
5197 {
5198 return fold_const_aggregate_ref_1 (t, NULL);
5199 }
5200
5201 /* Lookup virtual method with index TOKEN in a virtual table V
5202 at OFFSET.
5203 Set CAN_REFER if non-NULL to false if method
5204 is not referable or if the virtual table is ill-formed (such as rewriten
5205 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5206
5207 tree
5208 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5209 tree v,
5210 unsigned HOST_WIDE_INT offset,
5211 bool *can_refer)
5212 {
5213 tree vtable = v, init, fn;
5214 unsigned HOST_WIDE_INT size;
5215 unsigned HOST_WIDE_INT elt_size, access_index;
5216 tree domain_type;
5217
5218 if (can_refer)
5219 *can_refer = true;
5220
5221 /* First of all double check we have virtual table. */
5222 if (TREE_CODE (v) != VAR_DECL
5223 || !DECL_VIRTUAL_P (v))
5224 {
5225 gcc_assert (in_lto_p);
5226 /* Pass down that we lost track of the target. */
5227 if (can_refer)
5228 *can_refer = false;
5229 return NULL_TREE;
5230 }
5231
5232 init = ctor_for_folding (v);
5233
5234 /* The virtual tables should always be born with constructors
5235 and we always should assume that they are avaialble for
5236 folding. At the moment we do not stream them in all cases,
5237 but it should never happen that ctor seem unreachable. */
5238 gcc_assert (init);
5239 if (init == error_mark_node)
5240 {
5241 gcc_assert (in_lto_p);
5242 /* Pass down that we lost track of the target. */
5243 if (can_refer)
5244 *can_refer = false;
5245 return NULL_TREE;
5246 }
5247 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5248 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5249 offset *= BITS_PER_UNIT;
5250 offset += token * size;
5251
5252 /* Lookup the value in the constructor that is assumed to be array.
5253 This is equivalent to
5254 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5255 offset, size, NULL);
5256 but in a constant time. We expect that frontend produced a simple
5257 array without indexed initializers. */
5258
5259 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5260 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5261 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5262 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5263
5264 access_index = offset / BITS_PER_UNIT / elt_size;
5265 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5266
5267 /* This code makes an assumption that there are no
5268 indexed fileds produced by C++ FE, so we can directly index the array. */
5269 if (access_index < CONSTRUCTOR_NELTS (init))
5270 {
5271 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5272 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5273 STRIP_NOPS (fn);
5274 }
5275 else
5276 fn = NULL;
5277
5278 /* For type inconsistent program we may end up looking up virtual method
5279 in virtual table that does not contain TOKEN entries. We may overrun
5280 the virtual table and pick up a constant or RTTI info pointer.
5281 In any case the call is undefined. */
5282 if (!fn
5283 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5284 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5285 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5286 else
5287 {
5288 fn = TREE_OPERAND (fn, 0);
5289
5290 /* When cgraph node is missing and function is not public, we cannot
5291 devirtualize. This can happen in WHOPR when the actual method
5292 ends up in other partition, because we found devirtualization
5293 possibility too late. */
5294 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5295 {
5296 if (can_refer)
5297 {
5298 *can_refer = false;
5299 return fn;
5300 }
5301 return NULL_TREE;
5302 }
5303 }
5304
5305 /* Make sure we create a cgraph node for functions we'll reference.
5306 They can be non-existent if the reference comes from an entry
5307 of an external vtable for example. */
5308 cgraph_node::get_create (fn);
5309
5310 return fn;
5311 }
5312
5313 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5314 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5315 KNOWN_BINFO carries the binfo describing the true type of
5316 OBJ_TYPE_REF_OBJECT(REF).
5317 Set CAN_REFER if non-NULL to false if method
5318 is not referable or if the virtual table is ill-formed (such as rewriten
5319 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5320
5321 tree
5322 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5323 bool *can_refer)
5324 {
5325 unsigned HOST_WIDE_INT offset;
5326 tree v;
5327
5328 v = BINFO_VTABLE (known_binfo);
5329 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5330 if (!v)
5331 return NULL_TREE;
5332
5333 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5334 {
5335 if (can_refer)
5336 *can_refer = false;
5337 return NULL_TREE;
5338 }
5339 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5340 }
5341
5342 /* Return true iff VAL is a gimple expression that is known to be
5343 non-negative. Restricted to floating-point inputs. */
5344
5345 bool
5346 gimple_val_nonnegative_real_p (tree val)
5347 {
5348 gimple def_stmt;
5349
5350 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5351
5352 /* Use existing logic for non-gimple trees. */
5353 if (tree_expr_nonnegative_p (val))
5354 return true;
5355
5356 if (TREE_CODE (val) != SSA_NAME)
5357 return false;
5358
5359 /* Currently we look only at the immediately defining statement
5360 to make this determination, since recursion on defining
5361 statements of operands can lead to quadratic behavior in the
5362 worst case. This is expected to catch almost all occurrences
5363 in practice. It would be possible to implement limited-depth
5364 recursion if important cases are lost. Alternatively, passes
5365 that need this information (such as the pow/powi lowering code
5366 in the cse_sincos pass) could be revised to provide it through
5367 dataflow propagation. */
5368
5369 def_stmt = SSA_NAME_DEF_STMT (val);
5370
5371 if (is_gimple_assign (def_stmt))
5372 {
5373 tree op0, op1;
5374
5375 /* See fold-const.c:tree_expr_nonnegative_p for additional
5376 cases that could be handled with recursion. */
5377
5378 switch (gimple_assign_rhs_code (def_stmt))
5379 {
5380 case ABS_EXPR:
5381 /* Always true for floating-point operands. */
5382 return true;
5383
5384 case MULT_EXPR:
5385 /* True if the two operands are identical (since we are
5386 restricted to floating-point inputs). */
5387 op0 = gimple_assign_rhs1 (def_stmt);
5388 op1 = gimple_assign_rhs2 (def_stmt);
5389
5390 if (op0 == op1
5391 || operand_equal_p (op0, op1, 0))
5392 return true;
5393
5394 default:
5395 return false;
5396 }
5397 }
5398 else if (is_gimple_call (def_stmt))
5399 {
5400 tree fndecl = gimple_call_fndecl (def_stmt);
5401 if (fndecl
5402 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5403 {
5404 tree arg1;
5405
5406 switch (DECL_FUNCTION_CODE (fndecl))
5407 {
5408 CASE_FLT_FN (BUILT_IN_ACOS):
5409 CASE_FLT_FN (BUILT_IN_ACOSH):
5410 CASE_FLT_FN (BUILT_IN_CABS):
5411 CASE_FLT_FN (BUILT_IN_COSH):
5412 CASE_FLT_FN (BUILT_IN_ERFC):
5413 CASE_FLT_FN (BUILT_IN_EXP):
5414 CASE_FLT_FN (BUILT_IN_EXP10):
5415 CASE_FLT_FN (BUILT_IN_EXP2):
5416 CASE_FLT_FN (BUILT_IN_FABS):
5417 CASE_FLT_FN (BUILT_IN_FDIM):
5418 CASE_FLT_FN (BUILT_IN_HYPOT):
5419 CASE_FLT_FN (BUILT_IN_POW10):
5420 return true;
5421
5422 CASE_FLT_FN (BUILT_IN_SQRT):
5423 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5424 nonnegative inputs. */
5425 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5426 return true;
5427
5428 break;
5429
5430 CASE_FLT_FN (BUILT_IN_POWI):
5431 /* True if the second argument is an even integer. */
5432 arg1 = gimple_call_arg (def_stmt, 1);
5433
5434 if (TREE_CODE (arg1) == INTEGER_CST
5435 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5436 return true;
5437
5438 break;
5439
5440 CASE_FLT_FN (BUILT_IN_POW):
5441 /* True if the second argument is an even integer-valued
5442 real. */
5443 arg1 = gimple_call_arg (def_stmt, 1);
5444
5445 if (TREE_CODE (arg1) == REAL_CST)
5446 {
5447 REAL_VALUE_TYPE c;
5448 HOST_WIDE_INT n;
5449
5450 c = TREE_REAL_CST (arg1);
5451 n = real_to_integer (&c);
5452
5453 if ((n & 1) == 0)
5454 {
5455 REAL_VALUE_TYPE cint;
5456 real_from_integer (&cint, VOIDmode, n, SIGNED);
5457 if (real_identical (&c, &cint))
5458 return true;
5459 }
5460 }
5461
5462 break;
5463
5464 default:
5465 return false;
5466 }
5467 }
5468 }
5469
5470 return false;
5471 }
5472
5473 /* Given a pointer value OP0, return a simplified version of an
5474 indirection through OP0, or NULL_TREE if no simplification is
5475 possible. Note that the resulting type may be different from
5476 the type pointed to in the sense that it is still compatible
5477 from the langhooks point of view. */
5478
5479 tree
5480 gimple_fold_indirect_ref (tree t)
5481 {
5482 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5483 tree sub = t;
5484 tree subtype;
5485
5486 STRIP_NOPS (sub);
5487 subtype = TREE_TYPE (sub);
5488 if (!POINTER_TYPE_P (subtype))
5489 return NULL_TREE;
5490
5491 if (TREE_CODE (sub) == ADDR_EXPR)
5492 {
5493 tree op = TREE_OPERAND (sub, 0);
5494 tree optype = TREE_TYPE (op);
5495 /* *&p => p */
5496 if (useless_type_conversion_p (type, optype))
5497 return op;
5498
5499 /* *(foo *)&fooarray => fooarray[0] */
5500 if (TREE_CODE (optype) == ARRAY_TYPE
5501 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5502 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5503 {
5504 tree type_domain = TYPE_DOMAIN (optype);
5505 tree min_val = size_zero_node;
5506 if (type_domain && TYPE_MIN_VALUE (type_domain))
5507 min_val = TYPE_MIN_VALUE (type_domain);
5508 if (TREE_CODE (min_val) == INTEGER_CST)
5509 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5510 }
5511 /* *(foo *)&complexfoo => __real__ complexfoo */
5512 else if (TREE_CODE (optype) == COMPLEX_TYPE
5513 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5514 return fold_build1 (REALPART_EXPR, type, op);
5515 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5516 else if (TREE_CODE (optype) == VECTOR_TYPE
5517 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5518 {
5519 tree part_width = TYPE_SIZE (type);
5520 tree index = bitsize_int (0);
5521 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5522 }
5523 }
5524
5525 /* *(p + CST) -> ... */
5526 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5527 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5528 {
5529 tree addr = TREE_OPERAND (sub, 0);
5530 tree off = TREE_OPERAND (sub, 1);
5531 tree addrtype;
5532
5533 STRIP_NOPS (addr);
5534 addrtype = TREE_TYPE (addr);
5535
5536 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5537 if (TREE_CODE (addr) == ADDR_EXPR
5538 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5539 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5540 && tree_fits_uhwi_p (off))
5541 {
5542 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5543 tree part_width = TYPE_SIZE (type);
5544 unsigned HOST_WIDE_INT part_widthi
5545 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5546 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5547 tree index = bitsize_int (indexi);
5548 if (offset / part_widthi
5549 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5550 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5551 part_width, index);
5552 }
5553
5554 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5555 if (TREE_CODE (addr) == ADDR_EXPR
5556 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5557 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5558 {
5559 tree size = TYPE_SIZE_UNIT (type);
5560 if (tree_int_cst_equal (size, off))
5561 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5562 }
5563
5564 /* *(p + CST) -> MEM_REF <p, CST>. */
5565 if (TREE_CODE (addr) != ADDR_EXPR
5566 || DECL_P (TREE_OPERAND (addr, 0)))
5567 return fold_build2 (MEM_REF, type,
5568 addr,
5569 wide_int_to_tree (ptype, off));
5570 }
5571
5572 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5573 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5574 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5575 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5576 {
5577 tree type_domain;
5578 tree min_val = size_zero_node;
5579 tree osub = sub;
5580 sub = gimple_fold_indirect_ref (sub);
5581 if (! sub)
5582 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5583 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5584 if (type_domain && TYPE_MIN_VALUE (type_domain))
5585 min_val = TYPE_MIN_VALUE (type_domain);
5586 if (TREE_CODE (min_val) == INTEGER_CST)
5587 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5588 }
5589
5590 return NULL_TREE;
5591 }
5592
5593 /* Return true if CODE is an operation that when operating on signed
5594 integer types involves undefined behavior on overflow and the
5595 operation can be expressed with unsigned arithmetic. */
5596
5597 bool
5598 arith_code_with_undefined_signed_overflow (tree_code code)
5599 {
5600 switch (code)
5601 {
5602 case PLUS_EXPR:
5603 case MINUS_EXPR:
5604 case MULT_EXPR:
5605 case NEGATE_EXPR:
5606 case POINTER_PLUS_EXPR:
5607 return true;
5608 default:
5609 return false;
5610 }
5611 }
5612
5613 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5614 operation that can be transformed to unsigned arithmetic by converting
5615 its operand, carrying out the operation in the corresponding unsigned
5616 type and converting the result back to the original type.
5617
5618 Returns a sequence of statements that replace STMT and also contain
5619 a modified form of STMT itself. */
5620
5621 gimple_seq
5622 rewrite_to_defined_overflow (gimple stmt)
5623 {
5624 if (dump_file && (dump_flags & TDF_DETAILS))
5625 {
5626 fprintf (dump_file, "rewriting stmt with undefined signed "
5627 "overflow ");
5628 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5629 }
5630
5631 tree lhs = gimple_assign_lhs (stmt);
5632 tree type = unsigned_type_for (TREE_TYPE (lhs));
5633 gimple_seq stmts = NULL;
5634 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5635 {
5636 gimple_seq stmts2 = NULL;
5637 gimple_set_op (stmt, i,
5638 force_gimple_operand (fold_convert (type,
5639 gimple_op (stmt, i)),
5640 &stmts2, true, NULL_TREE));
5641 gimple_seq_add_seq (&stmts, stmts2);
5642 }
5643 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5644 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5645 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5646 gimple_seq_add_stmt (&stmts, stmt);
5647 gimple cvt = gimple_build_assign_with_ops
5648 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5649 gimple_seq_add_stmt (&stmts, cvt);
5650
5651 return stmts;
5652 }
5653
5654
5655 /* Build the expression CODE OP0 of type TYPE with location LOC,
5656 simplifying it first if possible using VALUEIZE if not NULL.
5657 OP0 is expected to be valueized already. Returns the built
5658 expression value and appends statements possibly defining it
5659 to SEQ. */
5660
5661 tree
5662 gimple_build (gimple_seq *seq, location_t loc,
5663 enum tree_code code, tree type, tree op0,
5664 tree (*valueize)(tree))
5665 {
5666 tree res = gimple_simplify (code, type, op0, seq, valueize);
5667 if (!res)
5668 {
5669 if (gimple_in_ssa_p (cfun))
5670 res = make_ssa_name (type, NULL);
5671 else
5672 res = create_tmp_reg (type, NULL);
5673 gimple stmt;
5674 if (code == REALPART_EXPR
5675 || code == IMAGPART_EXPR
5676 || code == VIEW_CONVERT_EXPR)
5677 stmt = gimple_build_assign_with_ops (code, res,
5678 build1 (code, type,
5679 op0), NULL_TREE);
5680 else
5681 stmt = gimple_build_assign_with_ops (code, res, op0, NULL_TREE);
5682 gimple_set_location (stmt, loc);
5683 gimple_seq_add_stmt_without_update (seq, stmt);
5684 }
5685 return res;
5686 }
5687
5688 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5689 simplifying it first if possible using VALUEIZE if not NULL.
5690 OP0 and OP1 are expected to be valueized already. Returns the built
5691 expression value and appends statements possibly defining it
5692 to SEQ. */
5693
5694 tree
5695 gimple_build (gimple_seq *seq, location_t loc,
5696 enum tree_code code, tree type, tree op0, tree op1,
5697 tree (*valueize)(tree))
5698 {
5699 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5700 if (!res)
5701 {
5702 if (gimple_in_ssa_p (cfun))
5703 res = make_ssa_name (type, NULL);
5704 else
5705 res = create_tmp_reg (type, NULL);
5706 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5707 gimple_set_location (stmt, loc);
5708 gimple_seq_add_stmt_without_update (seq, stmt);
5709 }
5710 return res;
5711 }
5712
5713 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5714 simplifying it first if possible using VALUEIZE if not NULL.
5715 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5716 expression value and appends statements possibly defining it
5717 to SEQ. */
5718
5719 tree
5720 gimple_build (gimple_seq *seq, location_t loc,
5721 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5722 tree (*valueize)(tree))
5723 {
5724 tree res = gimple_simplify (code, type, op0, op1, op2,
5725 seq, valueize);
5726 if (!res)
5727 {
5728 if (gimple_in_ssa_p (cfun))
5729 res = make_ssa_name (type, NULL);
5730 else
5731 res = create_tmp_reg (type, NULL);
5732 gimple stmt;
5733 if (code == BIT_FIELD_REF)
5734 stmt = gimple_build_assign_with_ops (code, res,
5735 build3 (BIT_FIELD_REF, type,
5736 op0, op1, op2),
5737 NULL_TREE);
5738 else
5739 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5740 gimple_set_location (stmt, loc);
5741 gimple_seq_add_stmt_without_update (seq, stmt);
5742 }
5743 return res;
5744 }
5745
5746 /* Build the call FN (ARG0) with a result of type TYPE
5747 (or no result if TYPE is void) with location LOC,
5748 simplifying it first if possible using VALUEIZE if not NULL.
5749 ARG0 is expected to be valueized already. Returns the built
5750 expression value (or NULL_TREE if TYPE is void) and appends
5751 statements possibly defining it to SEQ. */
5752
5753 tree
5754 gimple_build (gimple_seq *seq, location_t loc,
5755 enum built_in_function fn, tree type, tree arg0,
5756 tree (*valueize)(tree))
5757 {
5758 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5759 if (!res)
5760 {
5761 tree decl = builtin_decl_implicit (fn);
5762 gimple stmt = gimple_build_call (decl, 1, arg0);
5763 if (!VOID_TYPE_P (type))
5764 {
5765 if (gimple_in_ssa_p (cfun))
5766 res = make_ssa_name (type, NULL);
5767 else
5768 res = create_tmp_reg (type, NULL);
5769 gimple_call_set_lhs (stmt, res);
5770 }
5771 gimple_set_location (stmt, loc);
5772 gimple_seq_add_stmt_without_update (seq, stmt);
5773 }
5774 return res;
5775 }
5776
5777 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5778 (or no result if TYPE is void) with location LOC,
5779 simplifying it first if possible using VALUEIZE if not NULL.
5780 ARG0 is expected to be valueized already. Returns the built
5781 expression value (or NULL_TREE if TYPE is void) and appends
5782 statements possibly defining it to SEQ. */
5783
5784 tree
5785 gimple_build (gimple_seq *seq, location_t loc,
5786 enum built_in_function fn, tree type, tree arg0, tree arg1,
5787 tree (*valueize)(tree))
5788 {
5789 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5790 if (!res)
5791 {
5792 tree decl = builtin_decl_implicit (fn);
5793 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5794 if (!VOID_TYPE_P (type))
5795 {
5796 if (gimple_in_ssa_p (cfun))
5797 res = make_ssa_name (type, NULL);
5798 else
5799 res = create_tmp_reg (type, NULL);
5800 gimple_call_set_lhs (stmt, res);
5801 }
5802 gimple_set_location (stmt, loc);
5803 gimple_seq_add_stmt_without_update (seq, stmt);
5804 }
5805 return res;
5806 }
5807
5808 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5809 (or no result if TYPE is void) with location LOC,
5810 simplifying it first if possible using VALUEIZE if not NULL.
5811 ARG0 is expected to be valueized already. Returns the built
5812 expression value (or NULL_TREE if TYPE is void) and appends
5813 statements possibly defining it to SEQ. */
5814
5815 tree
5816 gimple_build (gimple_seq *seq, location_t loc,
5817 enum built_in_function fn, tree type,
5818 tree arg0, tree arg1, tree arg2,
5819 tree (*valueize)(tree))
5820 {
5821 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5822 if (!res)
5823 {
5824 tree decl = builtin_decl_implicit (fn);
5825 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5826 if (!VOID_TYPE_P (type))
5827 {
5828 if (gimple_in_ssa_p (cfun))
5829 res = make_ssa_name (type, NULL);
5830 else
5831 res = create_tmp_reg (type, NULL);
5832 gimple_call_set_lhs (stmt, res);
5833 }
5834 gimple_set_location (stmt, loc);
5835 gimple_seq_add_stmt_without_update (seq, stmt);
5836 }
5837 return res;
5838 }
5839
5840 /* Build the conversion (TYPE) OP with a result of type TYPE
5841 with location LOC if such conversion is neccesary in GIMPLE,
5842 simplifying it first.
5843 Returns the built expression value and appends
5844 statements possibly defining it to SEQ. */
5845
5846 tree
5847 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5848 {
5849 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5850 return op;
5851 return gimple_build (seq, loc, NOP_EXPR, type, op);
5852 }