tree-ssa-strlen.c (maybe_diag_stxncpy_trunc): Avoid undesirable warning.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 static bool get_range_strlen (tree, tree[2], bitmap *, int,
70 int, bool *, unsigned, tree *);
71
72 /* Return true when DECL can be referenced from current unit.
73 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
74 We can get declarations that are not possible to reference for various
75 reasons:
76
77 1) When analyzing C++ virtual tables.
78 C++ virtual tables do have known constructors even
79 when they are keyed to other compilation unit.
80 Those tables can contain pointers to methods and vars
81 in other units. Those methods have both STATIC and EXTERNAL
82 set.
83 2) In WHOPR mode devirtualization might lead to reference
84 to method that was partitioned elsehwere.
85 In this case we have static VAR_DECL or FUNCTION_DECL
86 that has no corresponding callgraph/varpool node
87 declaring the body.
88 3) COMDAT functions referred by external vtables that
89 we devirtualize only during final compilation stage.
90 At this time we already decided that we will not output
91 the function body and thus we can't reference the symbol
92 directly. */
93
94 static bool
95 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
96 {
97 varpool_node *vnode;
98 struct cgraph_node *node;
99 symtab_node *snode;
100
101 if (DECL_ABSTRACT_P (decl))
102 return false;
103
104 /* We are concerned only about static/external vars and functions. */
105 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
106 || !VAR_OR_FUNCTION_DECL_P (decl))
107 return true;
108
109 /* Static objects can be referred only if they was not optimized out yet. */
110 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
111 {
112 /* Before we start optimizing unreachable code we can be sure all
113 static objects are defined. */
114 if (symtab->function_flags_ready)
115 return true;
116 snode = symtab_node::get (decl);
117 if (!snode || !snode->definition)
118 return false;
119 node = dyn_cast <cgraph_node *> (snode);
120 return !node || !node->global.inlined_to;
121 }
122
123 /* We will later output the initializer, so we can refer to it.
124 So we are concerned only when DECL comes from initializer of
125 external var or var that has been optimized out. */
126 if (!from_decl
127 || !VAR_P (from_decl)
128 || (!DECL_EXTERNAL (from_decl)
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->definition)
131 || (flag_ltrans
132 && (vnode = varpool_node::get (from_decl)) != NULL
133 && vnode->in_other_partition))
134 return true;
135 /* We are folding reference from external vtable. The vtable may reffer
136 to a symbol keyed to other compilation unit. The other compilation
137 unit may be in separate DSO and the symbol may be hidden. */
138 if (DECL_VISIBILITY_SPECIFIED (decl)
139 && DECL_EXTERNAL (decl)
140 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
141 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
142 return false;
143 /* When function is public, we always can introduce new reference.
144 Exception are the COMDAT functions where introducing a direct
145 reference imply need to include function body in the curren tunit. */
146 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
147 return true;
148 /* We have COMDAT. We are going to check if we still have definition
149 or if the definition is going to be output in other partition.
150 Bypass this when gimplifying; all needed functions will be produced.
151
152 As observed in PR20991 for already optimized out comdat virtual functions
153 it may be tempting to not necessarily give up because the copy will be
154 output elsewhere when corresponding vtable is output.
155 This is however not possible - ABI specify that COMDATs are output in
156 units where they are used and when the other unit was compiled with LTO
157 it is possible that vtable was kept public while the function itself
158 was privatized. */
159 if (!symtab->function_flags_ready)
160 return true;
161
162 snode = symtab_node::get (decl);
163 if (!snode
164 || ((!snode->definition || DECL_EXTERNAL (decl))
165 && (!snode->in_other_partition
166 || (!snode->forced_by_abi && !snode->force_output))))
167 return false;
168 node = dyn_cast <cgraph_node *> (snode);
169 return !node || !node->global.inlined_to;
170 }
171
172 /* Create a temporary for TYPE for a statement STMT. If the current function
173 is in SSA form, a SSA name is created. Otherwise a temporary register
174 is made. */
175
176 tree
177 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
178 {
179 if (gimple_in_ssa_p (cfun))
180 return make_ssa_name (type, stmt);
181 else
182 return create_tmp_reg (type);
183 }
184
185 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
186 acceptable form for is_gimple_min_invariant.
187 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
188
189 tree
190 canonicalize_constructor_val (tree cval, tree from_decl)
191 {
192 tree orig_cval = cval;
193 STRIP_NOPS (cval);
194 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
195 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
196 {
197 tree ptr = TREE_OPERAND (cval, 0);
198 if (is_gimple_min_invariant (ptr))
199 cval = build1_loc (EXPR_LOCATION (cval),
200 ADDR_EXPR, TREE_TYPE (ptr),
201 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
202 ptr,
203 fold_convert (ptr_type_node,
204 TREE_OPERAND (cval, 1))));
205 }
206 if (TREE_CODE (cval) == ADDR_EXPR)
207 {
208 tree base = NULL_TREE;
209 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
210 {
211 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
212 if (base)
213 TREE_OPERAND (cval, 0) = base;
214 }
215 else
216 base = get_base_address (TREE_OPERAND (cval, 0));
217 if (!base)
218 return NULL_TREE;
219
220 if (VAR_OR_FUNCTION_DECL_P (base)
221 && !can_refer_decl_in_current_unit_p (base, from_decl))
222 return NULL_TREE;
223 if (TREE_TYPE (base) == error_mark_node)
224 return NULL_TREE;
225 if (VAR_P (base))
226 TREE_ADDRESSABLE (base) = 1;
227 else if (TREE_CODE (base) == FUNCTION_DECL)
228 {
229 /* Make sure we create a cgraph node for functions we'll reference.
230 They can be non-existent if the reference comes from an entry
231 of an external vtable for example. */
232 cgraph_node::get_create (base);
233 }
234 /* Fixup types in global initializers. */
235 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
236 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
237
238 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
239 cval = fold_convert (TREE_TYPE (orig_cval), cval);
240 return cval;
241 }
242 if (TREE_OVERFLOW_P (cval))
243 return drop_tree_overflow (cval);
244 return orig_cval;
245 }
246
247 /* If SYM is a constant variable with known value, return the value.
248 NULL_TREE is returned otherwise. */
249
250 tree
251 get_symbol_constant_value (tree sym)
252 {
253 tree val = ctor_for_folding (sym);
254 if (val != error_mark_node)
255 {
256 if (val)
257 {
258 val = canonicalize_constructor_val (unshare_expr (val), sym);
259 if (val && is_gimple_min_invariant (val))
260 return val;
261 else
262 return NULL_TREE;
263 }
264 /* Variables declared 'const' without an initializer
265 have zero as the initializer if they may not be
266 overridden at link or run time. */
267 if (!val
268 && is_gimple_reg_type (TREE_TYPE (sym)))
269 return build_zero_cst (TREE_TYPE (sym));
270 }
271
272 return NULL_TREE;
273 }
274
275
276
277 /* Subroutine of fold_stmt. We perform several simplifications of the
278 memory reference tree EXPR and make sure to re-gimplify them properly
279 after propagation of constant addresses. IS_LHS is true if the
280 reference is supposed to be an lvalue. */
281
282 static tree
283 maybe_fold_reference (tree expr, bool is_lhs)
284 {
285 tree result;
286
287 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
288 || TREE_CODE (expr) == REALPART_EXPR
289 || TREE_CODE (expr) == IMAGPART_EXPR)
290 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
291 return fold_unary_loc (EXPR_LOCATION (expr),
292 TREE_CODE (expr),
293 TREE_TYPE (expr),
294 TREE_OPERAND (expr, 0));
295 else if (TREE_CODE (expr) == BIT_FIELD_REF
296 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
297 return fold_ternary_loc (EXPR_LOCATION (expr),
298 TREE_CODE (expr),
299 TREE_TYPE (expr),
300 TREE_OPERAND (expr, 0),
301 TREE_OPERAND (expr, 1),
302 TREE_OPERAND (expr, 2));
303
304 if (!is_lhs
305 && (result = fold_const_aggregate_ref (expr))
306 && is_gimple_min_invariant (result))
307 return result;
308
309 return NULL_TREE;
310 }
311
312
313 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
314 replacement rhs for the statement or NULL_TREE if no simplification
315 could be made. It is assumed that the operands have been previously
316 folded. */
317
318 static tree
319 fold_gimple_assign (gimple_stmt_iterator *si)
320 {
321 gimple *stmt = gsi_stmt (*si);
322 enum tree_code subcode = gimple_assign_rhs_code (stmt);
323 location_t loc = gimple_location (stmt);
324
325 tree result = NULL_TREE;
326
327 switch (get_gimple_rhs_class (subcode))
328 {
329 case GIMPLE_SINGLE_RHS:
330 {
331 tree rhs = gimple_assign_rhs1 (stmt);
332
333 if (TREE_CLOBBER_P (rhs))
334 return NULL_TREE;
335
336 if (REFERENCE_CLASS_P (rhs))
337 return maybe_fold_reference (rhs, false);
338
339 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
340 {
341 tree val = OBJ_TYPE_REF_EXPR (rhs);
342 if (is_gimple_min_invariant (val))
343 return val;
344 else if (flag_devirtualize && virtual_method_call_p (rhs))
345 {
346 bool final;
347 vec <cgraph_node *>targets
348 = possible_polymorphic_call_targets (rhs, stmt, &final);
349 if (final && targets.length () <= 1 && dbg_cnt (devirt))
350 {
351 if (dump_enabled_p ())
352 {
353 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
354 "resolving virtual function address "
355 "reference to function %s\n",
356 targets.length () == 1
357 ? targets[0]->name ()
358 : "NULL");
359 }
360 if (targets.length () == 1)
361 {
362 val = fold_convert (TREE_TYPE (val),
363 build_fold_addr_expr_loc
364 (loc, targets[0]->decl));
365 STRIP_USELESS_TYPE_CONVERSION (val);
366 }
367 else
368 /* We can not use __builtin_unreachable here because it
369 can not have address taken. */
370 val = build_int_cst (TREE_TYPE (val), 0);
371 return val;
372 }
373 }
374 }
375
376 else if (TREE_CODE (rhs) == ADDR_EXPR)
377 {
378 tree ref = TREE_OPERAND (rhs, 0);
379 tree tem = maybe_fold_reference (ref, true);
380 if (tem
381 && TREE_CODE (tem) == MEM_REF
382 && integer_zerop (TREE_OPERAND (tem, 1)))
383 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
384 else if (tem)
385 result = fold_convert (TREE_TYPE (rhs),
386 build_fold_addr_expr_loc (loc, tem));
387 else if (TREE_CODE (ref) == MEM_REF
388 && integer_zerop (TREE_OPERAND (ref, 1)))
389 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
390
391 if (result)
392 {
393 /* Strip away useless type conversions. Both the
394 NON_LVALUE_EXPR that may have been added by fold, and
395 "useless" type conversions that might now be apparent
396 due to propagation. */
397 STRIP_USELESS_TYPE_CONVERSION (result);
398
399 if (result != rhs && valid_gimple_rhs_p (result))
400 return result;
401 }
402 }
403
404 else if (TREE_CODE (rhs) == CONSTRUCTOR
405 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
406 {
407 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
408 unsigned i;
409 tree val;
410
411 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
412 if (! CONSTANT_CLASS_P (val))
413 return NULL_TREE;
414
415 return build_vector_from_ctor (TREE_TYPE (rhs),
416 CONSTRUCTOR_ELTS (rhs));
417 }
418
419 else if (DECL_P (rhs))
420 return get_symbol_constant_value (rhs);
421 }
422 break;
423
424 case GIMPLE_UNARY_RHS:
425 break;
426
427 case GIMPLE_BINARY_RHS:
428 break;
429
430 case GIMPLE_TERNARY_RHS:
431 result = fold_ternary_loc (loc, subcode,
432 TREE_TYPE (gimple_assign_lhs (stmt)),
433 gimple_assign_rhs1 (stmt),
434 gimple_assign_rhs2 (stmt),
435 gimple_assign_rhs3 (stmt));
436
437 if (result)
438 {
439 STRIP_USELESS_TYPE_CONVERSION (result);
440 if (valid_gimple_rhs_p (result))
441 return result;
442 }
443 break;
444
445 case GIMPLE_INVALID_RHS:
446 gcc_unreachable ();
447 }
448
449 return NULL_TREE;
450 }
451
452
453 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
454 adjusting the replacement stmts location and virtual operands.
455 If the statement has a lhs the last stmt in the sequence is expected
456 to assign to that lhs. */
457
458 static void
459 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
460 {
461 gimple *stmt = gsi_stmt (*si_p);
462
463 if (gimple_has_location (stmt))
464 annotate_all_with_location (stmts, gimple_location (stmt));
465
466 /* First iterate over the replacement statements backward, assigning
467 virtual operands to their defining statements. */
468 gimple *laststore = NULL;
469 for (gimple_stmt_iterator i = gsi_last (stmts);
470 !gsi_end_p (i); gsi_prev (&i))
471 {
472 gimple *new_stmt = gsi_stmt (i);
473 if ((gimple_assign_single_p (new_stmt)
474 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
475 || (is_gimple_call (new_stmt)
476 && (gimple_call_flags (new_stmt)
477 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
478 {
479 tree vdef;
480 if (!laststore)
481 vdef = gimple_vdef (stmt);
482 else
483 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
484 gimple_set_vdef (new_stmt, vdef);
485 if (vdef && TREE_CODE (vdef) == SSA_NAME)
486 SSA_NAME_DEF_STMT (vdef) = new_stmt;
487 laststore = new_stmt;
488 }
489 }
490
491 /* Second iterate over the statements forward, assigning virtual
492 operands to their uses. */
493 tree reaching_vuse = gimple_vuse (stmt);
494 for (gimple_stmt_iterator i = gsi_start (stmts);
495 !gsi_end_p (i); gsi_next (&i))
496 {
497 gimple *new_stmt = gsi_stmt (i);
498 /* If the new statement possibly has a VUSE, update it with exact SSA
499 name we know will reach this one. */
500 if (gimple_has_mem_ops (new_stmt))
501 gimple_set_vuse (new_stmt, reaching_vuse);
502 gimple_set_modified (new_stmt, true);
503 if (gimple_vdef (new_stmt))
504 reaching_vuse = gimple_vdef (new_stmt);
505 }
506
507 /* If the new sequence does not do a store release the virtual
508 definition of the original statement. */
509 if (reaching_vuse
510 && reaching_vuse == gimple_vuse (stmt))
511 {
512 tree vdef = gimple_vdef (stmt);
513 if (vdef
514 && TREE_CODE (vdef) == SSA_NAME)
515 {
516 unlink_stmt_vdef (stmt);
517 release_ssa_name (vdef);
518 }
519 }
520
521 /* Finally replace the original statement with the sequence. */
522 gsi_replace_with_seq (si_p, stmts, false);
523 }
524
525 /* Convert EXPR into a GIMPLE value suitable for substitution on the
526 RHS of an assignment. Insert the necessary statements before
527 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
528 is replaced. If the call is expected to produces a result, then it
529 is replaced by an assignment of the new RHS to the result variable.
530 If the result is to be ignored, then the call is replaced by a
531 GIMPLE_NOP. A proper VDEF chain is retained by making the first
532 VUSE and the last VDEF of the whole sequence be the same as the replaced
533 statement and using new SSA names for stores in between. */
534
535 void
536 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
537 {
538 tree lhs;
539 gimple *stmt, *new_stmt;
540 gimple_stmt_iterator i;
541 gimple_seq stmts = NULL;
542
543 stmt = gsi_stmt (*si_p);
544
545 gcc_assert (is_gimple_call (stmt));
546
547 push_gimplify_context (gimple_in_ssa_p (cfun));
548
549 lhs = gimple_call_lhs (stmt);
550 if (lhs == NULL_TREE)
551 {
552 gimplify_and_add (expr, &stmts);
553 /* We can end up with folding a memcpy of an empty class assignment
554 which gets optimized away by C++ gimplification. */
555 if (gimple_seq_empty_p (stmts))
556 {
557 pop_gimplify_context (NULL);
558 if (gimple_in_ssa_p (cfun))
559 {
560 unlink_stmt_vdef (stmt);
561 release_defs (stmt);
562 }
563 gsi_replace (si_p, gimple_build_nop (), false);
564 return;
565 }
566 }
567 else
568 {
569 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
570 new_stmt = gimple_build_assign (lhs, tmp);
571 i = gsi_last (stmts);
572 gsi_insert_after_without_update (&i, new_stmt,
573 GSI_CONTINUE_LINKING);
574 }
575
576 pop_gimplify_context (NULL);
577
578 gsi_replace_with_seq_vops (si_p, stmts);
579 }
580
581
582 /* Replace the call at *GSI with the gimple value VAL. */
583
584 void
585 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
586 {
587 gimple *stmt = gsi_stmt (*gsi);
588 tree lhs = gimple_call_lhs (stmt);
589 gimple *repl;
590 if (lhs)
591 {
592 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
593 val = fold_convert (TREE_TYPE (lhs), val);
594 repl = gimple_build_assign (lhs, val);
595 }
596 else
597 repl = gimple_build_nop ();
598 tree vdef = gimple_vdef (stmt);
599 if (vdef && TREE_CODE (vdef) == SSA_NAME)
600 {
601 unlink_stmt_vdef (stmt);
602 release_ssa_name (vdef);
603 }
604 gsi_replace (gsi, repl, false);
605 }
606
607 /* Replace the call at *GSI with the new call REPL and fold that
608 again. */
609
610 static void
611 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
612 {
613 gimple *stmt = gsi_stmt (*gsi);
614 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
615 gimple_set_location (repl, gimple_location (stmt));
616 if (gimple_vdef (stmt)
617 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
618 {
619 gimple_set_vdef (repl, gimple_vdef (stmt));
620 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
621 }
622 if (gimple_vuse (stmt))
623 gimple_set_vuse (repl, gimple_vuse (stmt));
624 gsi_replace (gsi, repl, false);
625 fold_stmt (gsi);
626 }
627
628 /* Return true if VAR is a VAR_DECL or a component thereof. */
629
630 static bool
631 var_decl_component_p (tree var)
632 {
633 tree inner = var;
634 while (handled_component_p (inner))
635 inner = TREE_OPERAND (inner, 0);
636 return (DECL_P (inner)
637 || (TREE_CODE (inner) == MEM_REF
638 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
639 }
640
641 /* Return TRUE if the SIZE argument, representing the size of an
642 object, is in a range of values of which exactly zero is valid. */
643
644 static bool
645 size_must_be_zero_p (tree size)
646 {
647 if (integer_zerop (size))
648 return true;
649
650 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
651 return false;
652
653 tree type = TREE_TYPE (size);
654 int prec = TYPE_PRECISION (type);
655
656 /* Compute the value of SSIZE_MAX, the largest positive value that
657 can be stored in ssize_t, the signed counterpart of size_t. */
658 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
659 value_range valid_range (VR_RANGE,
660 build_int_cst (type, 0),
661 wide_int_to_tree (type, ssize_max));
662 value_range vr;
663 get_range_info (size, vr);
664 vr.intersect (&valid_range);
665 return vr.zero_p ();
666 }
667
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
674
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
678 {
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
684
685 bool nowarn = gimple_no_warning_p (stmt);
686
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
690 {
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
698 {
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
701 }
702 gsi_replace (gsi, repl, false);
703 return true;
704 }
705
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
709 {
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
717 {
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
720 }
721 goto done;
722 }
723 else
724 {
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
728 const char *tmp_str;
729 unsigned HOST_WIDE_INT tmp_len;
730
731 /* Build accesses at offset zero with a ref-all character type. */
732 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
733 ptr_mode, true), 0);
734
735 /* If we can perform the copy efficiently with first doing all loads
736 and then all stores inline it that way. Currently efficiently
737 means that we can load all the memory into a single integer
738 register which is what MOVE_MAX gives us. */
739 src_align = get_pointer_alignment (src);
740 dest_align = get_pointer_alignment (dest);
741 if (tree_fits_uhwi_p (len)
742 && compare_tree_int (len, MOVE_MAX) <= 0
743 /* ??? Don't transform copies from strings with known length this
744 confuses the tree-ssa-strlen.c. This doesn't handle
745 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
746 reason. */
747 && !c_strlen (src, 2)
748 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
749 && memchr (tmp_str, 0, tmp_len) == NULL))
750 {
751 unsigned ilen = tree_to_uhwi (len);
752 if (pow2p_hwi (ilen))
753 {
754 /* Detect invalid bounds and overlapping copies and issue
755 either -Warray-bounds or -Wrestrict. */
756 if (!nowarn
757 && check_bounds_or_overlap (as_a <gcall *>(stmt),
758 dest, src, len, len))
759 gimple_set_no_warning (stmt, true);
760
761 scalar_int_mode mode;
762 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
763 if (type
764 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
765 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
766 /* If the destination pointer is not aligned we must be able
767 to emit an unaligned store. */
768 && (dest_align >= GET_MODE_ALIGNMENT (mode)
769 || !targetm.slow_unaligned_access (mode, dest_align)
770 || (optab_handler (movmisalign_optab, mode)
771 != CODE_FOR_nothing)))
772 {
773 tree srctype = type;
774 tree desttype = type;
775 if (src_align < GET_MODE_ALIGNMENT (mode))
776 srctype = build_aligned_type (type, src_align);
777 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
778 tree tem = fold_const_aggregate_ref (srcmem);
779 if (tem)
780 srcmem = tem;
781 else if (src_align < GET_MODE_ALIGNMENT (mode)
782 && targetm.slow_unaligned_access (mode, src_align)
783 && (optab_handler (movmisalign_optab, mode)
784 == CODE_FOR_nothing))
785 srcmem = NULL_TREE;
786 if (srcmem)
787 {
788 gimple *new_stmt;
789 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
790 {
791 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
792 srcmem
793 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
794 new_stmt);
795 gimple_assign_set_lhs (new_stmt, srcmem);
796 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
797 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
798 }
799 if (dest_align < GET_MODE_ALIGNMENT (mode))
800 desttype = build_aligned_type (type, dest_align);
801 new_stmt
802 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
803 dest, off0),
804 srcmem);
805 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
806 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
807 if (gimple_vdef (new_stmt)
808 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
809 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
810 if (!lhs)
811 {
812 gsi_replace (gsi, new_stmt, false);
813 return true;
814 }
815 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 goto done;
817 }
818 }
819 }
820 }
821
822 if (endp == 3)
823 {
824 /* Both DEST and SRC must be pointer types.
825 ??? This is what old code did. Is the testing for pointer types
826 really mandatory?
827
828 If either SRC is readonly or length is 1, we can use memcpy. */
829 if (!dest_align || !src_align)
830 return false;
831 if (readonly_data_expr (src)
832 || (tree_fits_uhwi_p (len)
833 && (MIN (src_align, dest_align) / BITS_PER_UNIT
834 >= tree_to_uhwi (len))))
835 {
836 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
837 if (!fn)
838 return false;
839 gimple_call_set_fndecl (stmt, fn);
840 gimple_call_set_arg (stmt, 0, dest);
841 gimple_call_set_arg (stmt, 1, src);
842 fold_stmt (gsi);
843 return true;
844 }
845
846 /* If *src and *dest can't overlap, optimize into memcpy as well. */
847 if (TREE_CODE (src) == ADDR_EXPR
848 && TREE_CODE (dest) == ADDR_EXPR)
849 {
850 tree src_base, dest_base, fn;
851 poly_int64 src_offset = 0, dest_offset = 0;
852 poly_uint64 maxsize;
853
854 srcvar = TREE_OPERAND (src, 0);
855 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
856 if (src_base == NULL)
857 src_base = srcvar;
858 destvar = TREE_OPERAND (dest, 0);
859 dest_base = get_addr_base_and_unit_offset (destvar,
860 &dest_offset);
861 if (dest_base == NULL)
862 dest_base = destvar;
863 if (!poly_int_tree_p (len, &maxsize))
864 maxsize = -1;
865 if (SSA_VAR_P (src_base)
866 && SSA_VAR_P (dest_base))
867 {
868 if (operand_equal_p (src_base, dest_base, 0)
869 && ranges_maybe_overlap_p (src_offset, maxsize,
870 dest_offset, maxsize))
871 return false;
872 }
873 else if (TREE_CODE (src_base) == MEM_REF
874 && TREE_CODE (dest_base) == MEM_REF)
875 {
876 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
877 TREE_OPERAND (dest_base, 0), 0))
878 return false;
879 poly_offset_int full_src_offset
880 = mem_ref_offset (src_base) + src_offset;
881 poly_offset_int full_dest_offset
882 = mem_ref_offset (dest_base) + dest_offset;
883 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
884 full_dest_offset, maxsize))
885 return false;
886 }
887 else
888 return false;
889
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
898 }
899
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
906 {
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
911 {
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
921 }
922 }
923
924 return false;
925 }
926
927 if (!tree_fits_shwi_p (len))
928 return false;
929 if (!POINTER_TYPE_P (TREE_TYPE (src))
930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
931 return false;
932 /* In the following try to find a type that is most natural to be
933 used for the memcpy source and destination and that allows
934 the most optimization when memcpy is turned into a plain assignment
935 using that type. In theory we could always use a char[len] type
936 but that only gains us that the destination and source possibly
937 no longer will have their address taken. */
938 srctype = TREE_TYPE (TREE_TYPE (src));
939 if (TREE_CODE (srctype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
941 srctype = TREE_TYPE (srctype);
942 desttype = TREE_TYPE (TREE_TYPE (dest));
943 if (TREE_CODE (desttype) == ARRAY_TYPE
944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
945 desttype = TREE_TYPE (desttype);
946 if (TREE_ADDRESSABLE (srctype)
947 || TREE_ADDRESSABLE (desttype))
948 return false;
949
950 /* Make sure we are not copying using a floating-point mode or
951 a type whose size possibly does not match its precision. */
952 if (FLOAT_MODE_P (TYPE_MODE (desttype))
953 || TREE_CODE (desttype) == BOOLEAN_TYPE
954 || TREE_CODE (desttype) == ENUMERAL_TYPE)
955 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
956 if (FLOAT_MODE_P (TYPE_MODE (srctype))
957 || TREE_CODE (srctype) == BOOLEAN_TYPE
958 || TREE_CODE (srctype) == ENUMERAL_TYPE)
959 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
960 if (!srctype)
961 srctype = desttype;
962 if (!desttype)
963 desttype = srctype;
964 if (!srctype)
965 return false;
966
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (dest_align < TYPE_ALIGN (desttype)
970 || src_align < TYPE_ALIGN (srctype))
971 return false;
972
973 destvar = NULL_TREE;
974 if (TREE_CODE (dest) == ADDR_EXPR
975 && var_decl_component_p (TREE_OPERAND (dest, 0))
976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
977 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
978
979 srcvar = NULL_TREE;
980 if (TREE_CODE (src) == ADDR_EXPR
981 && var_decl_component_p (TREE_OPERAND (src, 0))
982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
983 {
984 if (!destvar
985 || src_align >= TYPE_ALIGN (desttype))
986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
987 src, off0);
988 else if (!STRICT_ALIGNMENT)
989 {
990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
991 src_align);
992 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
993 }
994 }
995
996 if (srcvar == NULL_TREE && destvar == NULL_TREE)
997 return false;
998
999 if (srcvar == NULL_TREE)
1000 {
1001 if (src_align >= TYPE_ALIGN (desttype))
1002 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1003 else
1004 {
1005 if (STRICT_ALIGNMENT)
1006 return false;
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1010 }
1011 }
1012 else if (destvar == NULL_TREE)
1013 {
1014 if (dest_align >= TYPE_ALIGN (srctype))
1015 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1016 else
1017 {
1018 if (STRICT_ALIGNMENT)
1019 return false;
1020 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1021 dest_align);
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 }
1024 }
1025
1026 /* Detect invalid bounds and overlapping copies and issue either
1027 -Warray-bounds or -Wrestrict. */
1028 if (!nowarn)
1029 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1030
1031 gimple *new_stmt;
1032 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1033 {
1034 tree tem = fold_const_aggregate_ref (srcvar);
1035 if (tem)
1036 srcvar = tem;
1037 if (! is_gimple_min_invariant (srcvar))
1038 {
1039 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1040 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1041 new_stmt);
1042 gimple_assign_set_lhs (new_stmt, srcvar);
1043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1044 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1045 }
1046 new_stmt = gimple_build_assign (destvar, srcvar);
1047 goto set_vop_and_replace;
1048 }
1049
1050 /* We get an aggregate copy. Use an unsigned char[] type to
1051 perform the copying to preserve padding and to avoid any issues
1052 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1053 desttype = build_array_type_nelts (unsigned_char_type_node,
1054 tree_to_uhwi (len));
1055 srctype = desttype;
1056 if (src_align > TYPE_ALIGN (srctype))
1057 srctype = build_aligned_type (srctype, src_align);
1058 if (dest_align > TYPE_ALIGN (desttype))
1059 desttype = build_aligned_type (desttype, dest_align);
1060 new_stmt
1061 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1062 fold_build2 (MEM_REF, srctype, src, off0));
1063 set_vop_and_replace:
1064 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1065 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1066 if (gimple_vdef (new_stmt)
1067 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1068 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1069 if (!lhs)
1070 {
1071 gsi_replace (gsi, new_stmt, false);
1072 return true;
1073 }
1074 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1075 }
1076
1077 done:
1078 gimple_seq stmts = NULL;
1079 if (endp == 0 || endp == 3)
1080 len = NULL_TREE;
1081 else if (endp == 2)
1082 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1083 ssize_int (1));
1084 if (endp == 2 || endp == 1)
1085 {
1086 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1087 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1088 TREE_TYPE (dest), dest, len);
1089 }
1090
1091 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1092 gimple *repl = gimple_build_assign (lhs, dest);
1093 gsi_replace (gsi, repl, false);
1094 return true;
1095 }
1096
1097 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1098 to built-in memcmp (a, b, len). */
1099
1100 static bool
1101 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1102 {
1103 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1104
1105 if (!fn)
1106 return false;
1107
1108 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1109
1110 gimple *stmt = gsi_stmt (*gsi);
1111 tree a = gimple_call_arg (stmt, 0);
1112 tree b = gimple_call_arg (stmt, 1);
1113 tree len = gimple_call_arg (stmt, 2);
1114
1115 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1116 replace_call_with_call_and_fold (gsi, repl);
1117
1118 return true;
1119 }
1120
1121 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1122 to built-in memmove (dest, src, len). */
1123
1124 static bool
1125 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1126 {
1127 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1128
1129 if (!fn)
1130 return false;
1131
1132 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1133 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1134 len) into memmove (dest, src, len). */
1135
1136 gimple *stmt = gsi_stmt (*gsi);
1137 tree src = gimple_call_arg (stmt, 0);
1138 tree dest = gimple_call_arg (stmt, 1);
1139 tree len = gimple_call_arg (stmt, 2);
1140
1141 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1142 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1143 replace_call_with_call_and_fold (gsi, repl);
1144
1145 return true;
1146 }
1147
1148 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1149 to built-in memset (dest, 0, len). */
1150
1151 static bool
1152 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1153 {
1154 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1155
1156 if (!fn)
1157 return false;
1158
1159 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1160
1161 gimple *stmt = gsi_stmt (*gsi);
1162 tree dest = gimple_call_arg (stmt, 0);
1163 tree len = gimple_call_arg (stmt, 1);
1164
1165 gimple_seq seq = NULL;
1166 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1167 gimple_seq_add_stmt_without_update (&seq, repl);
1168 gsi_replace_with_seq_vops (gsi, seq);
1169 fold_stmt (gsi);
1170
1171 return true;
1172 }
1173
1174 /* Fold function call to builtin memset or bzero at *GSI setting the
1175 memory of size LEN to VAL. Return whether a simplification was made. */
1176
1177 static bool
1178 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1179 {
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree etype;
1182 unsigned HOST_WIDE_INT length, cval;
1183
1184 /* If the LEN parameter is zero, return DEST. */
1185 if (integer_zerop (len))
1186 {
1187 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1188 return true;
1189 }
1190
1191 if (! tree_fits_uhwi_p (len))
1192 return false;
1193
1194 if (TREE_CODE (c) != INTEGER_CST)
1195 return false;
1196
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree var = dest;
1199 if (TREE_CODE (var) != ADDR_EXPR)
1200 return false;
1201
1202 var = TREE_OPERAND (var, 0);
1203 if (TREE_THIS_VOLATILE (var))
1204 return false;
1205
1206 etype = TREE_TYPE (var);
1207 if (TREE_CODE (etype) == ARRAY_TYPE)
1208 etype = TREE_TYPE (etype);
1209
1210 if (!INTEGRAL_TYPE_P (etype)
1211 && !POINTER_TYPE_P (etype))
1212 return NULL_TREE;
1213
1214 if (! var_decl_component_p (var))
1215 return NULL_TREE;
1216
1217 length = tree_to_uhwi (len);
1218 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1219 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1220 return NULL_TREE;
1221
1222 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1223 return NULL_TREE;
1224
1225 if (integer_zerop (c))
1226 cval = 0;
1227 else
1228 {
1229 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1230 return NULL_TREE;
1231
1232 cval = TREE_INT_CST_LOW (c);
1233 cval &= 0xff;
1234 cval |= cval << 8;
1235 cval |= cval << 16;
1236 cval |= (cval << 31) << 1;
1237 }
1238
1239 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1240 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1241 gimple_set_vuse (store, gimple_vuse (stmt));
1242 tree vdef = gimple_vdef (stmt);
1243 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1244 {
1245 gimple_set_vdef (store, gimple_vdef (stmt));
1246 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1247 }
1248 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1249 if (gimple_call_lhs (stmt))
1250 {
1251 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1252 gsi_replace (gsi, asgn, false);
1253 }
1254 else
1255 {
1256 gimple_stmt_iterator gsi2 = *gsi;
1257 gsi_prev (gsi);
1258 gsi_remove (&gsi2, true);
1259 }
1260
1261 return true;
1262 }
1263
1264 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1265
1266 static bool
1267 get_range_strlen_tree (tree arg, tree length[2], bitmap *visited, int type,
1268 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1269 {
1270 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1271
1272 /* The minimum and maximum length. */
1273 tree *const minlen = length;
1274 tree *const maxlen = length + 1;
1275
1276 /* The length computed by this invocation of the function. */
1277 tree val = NULL_TREE;
1278
1279 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1280 if (TREE_CODE (arg) == ADDR_EXPR
1281 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1282 {
1283 tree op = TREE_OPERAND (arg, 0);
1284 if (integer_zerop (TREE_OPERAND (op, 1)))
1285 {
1286 tree aop0 = TREE_OPERAND (op, 0);
1287 if (TREE_CODE (aop0) == INDIRECT_REF
1288 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1289 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1290 visited, type, fuzzy, flexp,
1291 eltsize, nonstr);
1292 }
1293 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1294 {
1295 /* Fail if an array is the last member of a struct object
1296 since it could be treated as a (fake) flexible array
1297 member. */
1298 tree idx = TREE_OPERAND (op, 1);
1299
1300 arg = TREE_OPERAND (op, 0);
1301 tree optype = TREE_TYPE (arg);
1302 if (tree dom = TYPE_DOMAIN (optype))
1303 if (tree bound = TYPE_MAX_VALUE (dom))
1304 if (TREE_CODE (bound) == INTEGER_CST
1305 && TREE_CODE (idx) == INTEGER_CST
1306 && tree_int_cst_lt (bound, idx))
1307 return false;
1308 }
1309 }
1310
1311 if (type == 2)
1312 {
1313 /* We are computing the maximum value (not string length). */
1314 val = arg;
1315 if (TREE_CODE (val) != INTEGER_CST
1316 || tree_int_cst_sgn (val) < 0)
1317 return false;
1318 }
1319 else
1320 {
1321 c_strlen_data lendata = { };
1322 val = c_strlen (arg, 1, &lendata, eltsize);
1323
1324 /* If we potentially had a non-terminated string, then
1325 bubble that information up to the caller. */
1326 if (!val && lendata.decl)
1327 {
1328 *nonstr = lendata.decl;
1329 *minlen = lendata.minlen;
1330 *maxlen = lendata.minlen;
1331 return type == 0 ? false : true;
1332 }
1333 }
1334
1335 if (!val && fuzzy)
1336 {
1337 if (TREE_CODE (arg) == ADDR_EXPR)
1338 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1339 visited, type, fuzzy, flexp,
1340 eltsize, nonstr);
1341
1342 if (TREE_CODE (arg) == ARRAY_REF)
1343 {
1344 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1345
1346 /* Determine the "innermost" array type. */
1347 while (TREE_CODE (optype) == ARRAY_TYPE
1348 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1349 optype = TREE_TYPE (optype);
1350
1351 /* Avoid arrays of pointers. */
1352 tree eltype = TREE_TYPE (optype);
1353 if (TREE_CODE (optype) != ARRAY_TYPE
1354 || !INTEGRAL_TYPE_P (eltype))
1355 return false;
1356
1357 /* Fail when the array bound is unknown or zero. */
1358 val = TYPE_SIZE_UNIT (optype);
1359 if (!val || integer_zerop (val))
1360 return false;
1361
1362 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1363 integer_one_node);
1364
1365 /* Set the minimum size to zero since the string in
1366 the array could have zero length. */
1367 *minlen = ssize_int (0);
1368
1369 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1370 && optype == TREE_TYPE (TREE_OPERAND (arg, 0))
1371 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1372 *flexp = true;
1373 }
1374 else if (TREE_CODE (arg) == COMPONENT_REF
1375 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1376 == ARRAY_TYPE))
1377 {
1378 /* Use the type of the member array to determine the upper
1379 bound on the length of the array. This may be overly
1380 optimistic if the array itself isn't NUL-terminated and
1381 the caller relies on the subsequent member to contain
1382 the NUL but that would only be considered valid if
1383 the array were the last member of a struct.
1384 Set *FLEXP to true if the array whose bound is being
1385 used is at the end of a struct. */
1386 if (array_at_struct_end_p (arg))
1387 *flexp = true;
1388
1389 tree fld = TREE_OPERAND (arg, 1);
1390
1391 tree optype = TREE_TYPE (fld);
1392
1393 /* Determine the "innermost" array type. */
1394 while (TREE_CODE (optype) == ARRAY_TYPE
1395 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1396 optype = TREE_TYPE (optype);
1397
1398 /* Fail when the array bound is unknown or zero. */
1399 val = TYPE_SIZE_UNIT (optype);
1400 if (!val || integer_zerop (val))
1401 return false;
1402 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1403 integer_one_node);
1404
1405 /* Set the minimum size to zero since the string in
1406 the array could have zero length. */
1407 *minlen = ssize_int (0);
1408 }
1409
1410 if (VAR_P (arg))
1411 {
1412 tree type = TREE_TYPE (arg);
1413 if (POINTER_TYPE_P (type))
1414 type = TREE_TYPE (type);
1415
1416 if (TREE_CODE (type) == ARRAY_TYPE)
1417 {
1418 val = TYPE_SIZE_UNIT (type);
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
1422 return false;
1423 val = wide_int_to_tree (TREE_TYPE (val),
1424 wi::sub (wi::to_wide (val), 1));
1425
1426 /* Set the minimum size to zero since the string in
1427 the array could have zero length. */
1428 *minlen = ssize_int (0);
1429 }
1430 }
1431 }
1432
1433 if (!val)
1434 return false;
1435
1436 /* Adjust the lower bound on the string length as necessary. */
1437 if (!*minlen
1438 || (type > 0
1439 && TREE_CODE (*minlen) == INTEGER_CST
1440 && TREE_CODE (val) == INTEGER_CST
1441 && tree_int_cst_lt (val, *minlen)))
1442 *minlen = val;
1443
1444 if (*maxlen)
1445 {
1446 /* Adjust the more conservative bound if possible/necessary
1447 and fail otherwise. */
1448 if (type > 0)
1449 {
1450 if (TREE_CODE (*maxlen) != INTEGER_CST
1451 || TREE_CODE (val) != INTEGER_CST)
1452 return false;
1453
1454 if (tree_int_cst_lt (*maxlen, val))
1455 *maxlen = val;
1456 return true;
1457 }
1458 else if (simple_cst_equal (val, *maxlen) != 1)
1459 {
1460 /* Fail if the length of this ARG is different from that
1461 previously determined from another ARG. */
1462 return false;
1463 }
1464 }
1465
1466 *maxlen = val;
1467 return true;
1468 }
1469
1470 /* Obtain the minimum and maximum string length or minimum and maximum
1471 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1472 If ARG is an SSA name variable, follow its use-def chains. When
1473 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1474 if we are unable to determine the length or value, return false.
1475 VISITED is a bitmap of visited variables.
1476 TYPE is 0 if string length should be obtained, 1 for maximum string
1477 length and 2 for maximum value ARG can have.
1478 When FUZZY is non-zero and the length of a string cannot be determined,
1479 the function instead considers as the maximum possible length the
1480 size of a character array it may refer to. If FUZZY is 2, it will handle
1481 PHIs and COND_EXPRs optimistically, if we can determine string length
1482 minimum and maximum, it will use the minimum from the ones where it
1483 can be determined.
1484 Set *FLEXP to true if the range of the string lengths has been
1485 obtained from the upper bound of an array at the end of a struct.
1486 Such an array may hold a string that's longer than its upper bound
1487 due to it being used as a poor-man's flexible array member.
1488 Pass NONSTR through to children.
1489 ELTSIZE is 1 for normal single byte character strings, and 2 or
1490 4 for wide characer strings. ELTSIZE is by default 1. */
1491
1492 static bool
1493 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1494 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1495 {
1496
1497 if (TREE_CODE (arg) != SSA_NAME)
1498 return get_range_strlen_tree (arg, length, visited, type, fuzzy, flexp,
1499 eltsize, nonstr);
1500
1501 /* If ARG is registered for SSA update we cannot look at its defining
1502 statement. */
1503 if (name_registered_for_update_p (arg))
1504 return false;
1505
1506 /* If we were already here, break the infinite cycle. */
1507 if (!*visited)
1508 *visited = BITMAP_ALLOC (NULL);
1509 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1510 return true;
1511
1512 tree var = arg;
1513 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1514
1515 /* The minimum and maximum length. */
1516 tree *const maxlen = length + 1;
1517
1518 switch (gimple_code (def_stmt))
1519 {
1520 case GIMPLE_ASSIGN:
1521 /* The RHS of the statement defining VAR must either have a
1522 constant length or come from another SSA_NAME with a constant
1523 length. */
1524 if (gimple_assign_single_p (def_stmt)
1525 || gimple_assign_unary_nop_p (def_stmt))
1526 {
1527 tree rhs = gimple_assign_rhs1 (def_stmt);
1528 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1529 eltsize, nonstr);
1530 }
1531 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1532 {
1533 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1534 gimple_assign_rhs3 (def_stmt) };
1535
1536 for (unsigned int i = 0; i < 2; i++)
1537 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1538 flexp, eltsize, nonstr))
1539 {
1540 if (fuzzy == 2)
1541 *maxlen = build_all_ones_cst (size_type_node);
1542 else
1543 return false;
1544 }
1545 return true;
1546 }
1547 return false;
1548
1549 case GIMPLE_PHI:
1550 /* All the arguments of the PHI node must have the same constant
1551 length. */
1552 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1553 {
1554 tree arg = gimple_phi_arg (def_stmt, i)->def;
1555
1556 /* If this PHI has itself as an argument, we cannot
1557 determine the string length of this argument. However,
1558 if we can find a constant string length for the other
1559 PHI args then we can still be sure that this is a
1560 constant string length. So be optimistic and just
1561 continue with the next argument. */
1562 if (arg == gimple_phi_result (def_stmt))
1563 continue;
1564
1565 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1566 eltsize, nonstr))
1567 {
1568 if (fuzzy == 2)
1569 *maxlen = build_all_ones_cst (size_type_node);
1570 else
1571 return false;
1572 }
1573 }
1574 return true;
1575
1576 default:
1577 return false;
1578 }
1579 }
1580 /* Determine the minimum and maximum value or string length that ARG
1581 refers to and store each in the first two elements of MINMAXLEN.
1582 For expressions that point to strings of unknown lengths that are
1583 character arrays, use the upper bound of the array as the maximum
1584 length. For example, given an expression like 'x ? array : "xyz"'
1585 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1586 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1587 stored in array.
1588 Return true if the range of the string lengths has been obtained
1589 from the upper bound of an array at the end of a struct. Such
1590 an array may hold a string that's longer than its upper bound
1591 due to it being used as a poor-man's flexible array member.
1592
1593 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1594 and false if PHIs and COND_EXPRs are to be handled optimistically,
1595 if we can determine string length minimum and maximum; it will use
1596 the minimum from the ones where it can be determined.
1597 STRICT false should be only used for warning code.
1598 When non-null, clear *NONSTR if ARG refers to a constant array
1599 that is known not be nul-terminated. Otherwise set it to
1600 the declaration of the constant non-terminated array.
1601
1602 ELTSIZE is 1 for normal single byte character strings, and 2 or
1603 4 for wide characer strings. ELTSIZE is by default 1. */
1604
1605 bool
1606 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1607 bool strict, tree *nonstr /* = NULL */)
1608 {
1609 bitmap visited = NULL;
1610
1611 minmaxlen[0] = NULL_TREE;
1612 minmaxlen[1] = NULL_TREE;
1613
1614 tree nonstrbuf;
1615 if (!nonstr)
1616 nonstr = &nonstrbuf;
1617 *nonstr = NULL_TREE;
1618
1619 bool flexarray = false;
1620 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1621 &flexarray, eltsize, nonstr))
1622 {
1623 minmaxlen[0] = NULL_TREE;
1624 minmaxlen[1] = NULL_TREE;
1625 }
1626
1627 if (visited)
1628 BITMAP_FREE (visited);
1629
1630 return flexarray;
1631 }
1632
1633 /* Return the maximum string length for ARG, counting by TYPE
1634 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1635 if the caller is prepared to handle unterminated strings.
1636
1637 If an unterminated string is discovered and our caller handles
1638 unterminated strings, then bubble up the offending DECL and
1639 return the maximum size. Otherwise return NULL. */
1640
1641 tree
1642 get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
1643 {
1644 bitmap visited = NULL;
1645 tree len[2] = { NULL_TREE, NULL_TREE };
1646
1647 bool dummy;
1648 /* Set to non-null if ARG refers to an untermianted array. */
1649 tree mynonstr = NULL_TREE;
1650 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
1651 len[1] = NULL_TREE;
1652 if (visited)
1653 BITMAP_FREE (visited);
1654
1655 if (nonstr)
1656 {
1657 /* For callers prepared to handle unterminated arrays set
1658 *NONSTR to point to the declaration of the array and return
1659 the maximum length/size. */
1660 *nonstr = mynonstr;
1661 return len[1];
1662 }
1663
1664 /* Fail if the constant array isn't nul-terminated. */
1665 return mynonstr ? NULL_TREE : len[1];
1666 }
1667
1668
1669 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1670 If LEN is not NULL, it represents the length of the string to be
1671 copied. Return NULL_TREE if no simplification can be made. */
1672
1673 static bool
1674 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1675 tree dest, tree src)
1676 {
1677 gimple *stmt = gsi_stmt (*gsi);
1678 location_t loc = gimple_location (stmt);
1679 tree fn;
1680
1681 /* If SRC and DEST are the same (and not volatile), return DEST. */
1682 if (operand_equal_p (src, dest, 0))
1683 {
1684 /* Issue -Wrestrict unless the pointers are null (those do
1685 not point to objects and so do not indicate an overlap;
1686 such calls could be the result of sanitization and jump
1687 threading). */
1688 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1689 {
1690 tree func = gimple_call_fndecl (stmt);
1691
1692 warning_at (loc, OPT_Wrestrict,
1693 "%qD source argument is the same as destination",
1694 func);
1695 }
1696
1697 replace_call_with_value (gsi, dest);
1698 return true;
1699 }
1700
1701 if (optimize_function_for_size_p (cfun))
1702 return false;
1703
1704 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1705 if (!fn)
1706 return false;
1707
1708 /* Set to non-null if ARG refers to an unterminated array. */
1709 tree nonstr = NULL;
1710 tree len = get_maxval_strlen (src, 0, &nonstr);
1711
1712 if (nonstr)
1713 {
1714 /* Avoid folding calls with unterminated arrays. */
1715 if (!gimple_no_warning_p (stmt))
1716 warn_string_no_nul (loc, "strcpy", src, nonstr);
1717 gimple_set_no_warning (stmt, true);
1718 return false;
1719 }
1720
1721 if (!len)
1722 return false;
1723
1724 len = fold_convert_loc (loc, size_type_node, len);
1725 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1726 len = force_gimple_operand_gsi (gsi, len, true,
1727 NULL_TREE, true, GSI_SAME_STMT);
1728 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1729 replace_call_with_call_and_fold (gsi, repl);
1730 return true;
1731 }
1732
1733 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1734 If SLEN is not NULL, it represents the length of the source string.
1735 Return NULL_TREE if no simplification can be made. */
1736
1737 static bool
1738 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1739 tree dest, tree src, tree len)
1740 {
1741 gimple *stmt = gsi_stmt (*gsi);
1742 location_t loc = gimple_location (stmt);
1743 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1744
1745 /* If the LEN parameter is zero, return DEST. */
1746 if (integer_zerop (len))
1747 {
1748 /* Avoid warning if the destination refers to a an array/pointer
1749 decorate with attribute nonstring. */
1750 if (!nonstring)
1751 {
1752 tree fndecl = gimple_call_fndecl (stmt);
1753
1754 /* Warn about the lack of nul termination: the result is not
1755 a (nul-terminated) string. */
1756 tree slen = get_maxval_strlen (src, 0);
1757 if (slen && !integer_zerop (slen))
1758 warning_at (loc, OPT_Wstringop_truncation,
1759 "%G%qD destination unchanged after copying no bytes "
1760 "from a string of length %E",
1761 stmt, fndecl, slen);
1762 else
1763 warning_at (loc, OPT_Wstringop_truncation,
1764 "%G%qD destination unchanged after copying no bytes",
1765 stmt, fndecl);
1766 }
1767
1768 replace_call_with_value (gsi, dest);
1769 return true;
1770 }
1771
1772 /* We can't compare slen with len as constants below if len is not a
1773 constant. */
1774 if (TREE_CODE (len) != INTEGER_CST)
1775 return false;
1776
1777 /* Now, we must be passed a constant src ptr parameter. */
1778 tree slen = get_maxval_strlen (src, 0);
1779 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1780 return false;
1781
1782 /* The size of the source string including the terminating nul. */
1783 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1784
1785 /* We do not support simplification of this case, though we do
1786 support it when expanding trees into RTL. */
1787 /* FIXME: generate a call to __builtin_memset. */
1788 if (tree_int_cst_lt (ssize, len))
1789 return false;
1790
1791 /* Diagnose truncation that leaves the copy unterminated. */
1792 maybe_diag_stxncpy_trunc (*gsi, src, len);
1793
1794 /* OK transform into builtin memcpy. */
1795 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1796 if (!fn)
1797 return false;
1798
1799 len = fold_convert_loc (loc, size_type_node, len);
1800 len = force_gimple_operand_gsi (gsi, len, true,
1801 NULL_TREE, true, GSI_SAME_STMT);
1802 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1803 replace_call_with_call_and_fold (gsi, repl);
1804
1805 return true;
1806 }
1807
1808 /* Fold function call to builtin strchr or strrchr.
1809 If both arguments are constant, evaluate and fold the result,
1810 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1811 In general strlen is significantly faster than strchr
1812 due to being a simpler operation. */
1813 static bool
1814 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1815 {
1816 gimple *stmt = gsi_stmt (*gsi);
1817 tree str = gimple_call_arg (stmt, 0);
1818 tree c = gimple_call_arg (stmt, 1);
1819 location_t loc = gimple_location (stmt);
1820 const char *p;
1821 char ch;
1822
1823 if (!gimple_call_lhs (stmt))
1824 return false;
1825
1826 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1827 {
1828 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1829
1830 if (p1 == NULL)
1831 {
1832 replace_call_with_value (gsi, integer_zero_node);
1833 return true;
1834 }
1835
1836 tree len = build_int_cst (size_type_node, p1 - p);
1837 gimple_seq stmts = NULL;
1838 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1839 POINTER_PLUS_EXPR, str, len);
1840 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1841 gsi_replace_with_seq_vops (gsi, stmts);
1842 return true;
1843 }
1844
1845 if (!integer_zerop (c))
1846 return false;
1847
1848 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1849 if (is_strrchr && optimize_function_for_size_p (cfun))
1850 {
1851 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1852
1853 if (strchr_fn)
1854 {
1855 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1856 replace_call_with_call_and_fold (gsi, repl);
1857 return true;
1858 }
1859
1860 return false;
1861 }
1862
1863 tree len;
1864 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1865
1866 if (!strlen_fn)
1867 return false;
1868
1869 /* Create newstr = strlen (str). */
1870 gimple_seq stmts = NULL;
1871 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1872 gimple_set_location (new_stmt, loc);
1873 len = create_tmp_reg_or_ssa_name (size_type_node);
1874 gimple_call_set_lhs (new_stmt, len);
1875 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1876
1877 /* Create (str p+ strlen (str)). */
1878 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1879 POINTER_PLUS_EXPR, str, len);
1880 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1881 gsi_replace_with_seq_vops (gsi, stmts);
1882 /* gsi now points at the assignment to the lhs, get a
1883 stmt iterator to the strlen.
1884 ??? We can't use gsi_for_stmt as that doesn't work when the
1885 CFG isn't built yet. */
1886 gimple_stmt_iterator gsi2 = *gsi;
1887 gsi_prev (&gsi2);
1888 fold_stmt (&gsi2);
1889 return true;
1890 }
1891
1892 /* Fold function call to builtin strstr.
1893 If both arguments are constant, evaluate and fold the result,
1894 additionally fold strstr (x, "") into x and strstr (x, "c")
1895 into strchr (x, 'c'). */
1896 static bool
1897 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1898 {
1899 gimple *stmt = gsi_stmt (*gsi);
1900 tree haystack = gimple_call_arg (stmt, 0);
1901 tree needle = gimple_call_arg (stmt, 1);
1902 const char *p, *q;
1903
1904 if (!gimple_call_lhs (stmt))
1905 return false;
1906
1907 q = c_getstr (needle);
1908 if (q == NULL)
1909 return false;
1910
1911 if ((p = c_getstr (haystack)))
1912 {
1913 const char *r = strstr (p, q);
1914
1915 if (r == NULL)
1916 {
1917 replace_call_with_value (gsi, integer_zero_node);
1918 return true;
1919 }
1920
1921 tree len = build_int_cst (size_type_node, r - p);
1922 gimple_seq stmts = NULL;
1923 gimple *new_stmt
1924 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1925 haystack, len);
1926 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1927 gsi_replace_with_seq_vops (gsi, stmts);
1928 return true;
1929 }
1930
1931 /* For strstr (x, "") return x. */
1932 if (q[0] == '\0')
1933 {
1934 replace_call_with_value (gsi, haystack);
1935 return true;
1936 }
1937
1938 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1939 if (q[1] == '\0')
1940 {
1941 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1942 if (strchr_fn)
1943 {
1944 tree c = build_int_cst (integer_type_node, q[0]);
1945 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1946 replace_call_with_call_and_fold (gsi, repl);
1947 return true;
1948 }
1949 }
1950
1951 return false;
1952 }
1953
1954 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1955 to the call.
1956
1957 Return NULL_TREE if no simplification was possible, otherwise return the
1958 simplified form of the call as a tree.
1959
1960 The simplified form may be a constant or other expression which
1961 computes the same value, but in a more efficient manner (including
1962 calls to other builtin functions).
1963
1964 The call may contain arguments which need to be evaluated, but
1965 which are not useful to determine the result of the call. In
1966 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1967 COMPOUND_EXPR will be an argument which must be evaluated.
1968 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1969 COMPOUND_EXPR in the chain will contain the tree for the simplified
1970 form of the builtin function call. */
1971
1972 static bool
1973 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1974 {
1975 gimple *stmt = gsi_stmt (*gsi);
1976 location_t loc = gimple_location (stmt);
1977
1978 const char *p = c_getstr (src);
1979
1980 /* If the string length is zero, return the dst parameter. */
1981 if (p && *p == '\0')
1982 {
1983 replace_call_with_value (gsi, dst);
1984 return true;
1985 }
1986
1987 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1988 return false;
1989
1990 /* See if we can store by pieces into (dst + strlen(dst)). */
1991 tree newdst;
1992 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1993 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1994
1995 if (!strlen_fn || !memcpy_fn)
1996 return false;
1997
1998 /* If the length of the source string isn't computable don't
1999 split strcat into strlen and memcpy. */
2000 tree len = get_maxval_strlen (src, 0);
2001 if (! len)
2002 return false;
2003
2004 /* Create strlen (dst). */
2005 gimple_seq stmts = NULL, stmts2;
2006 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2007 gimple_set_location (repl, loc);
2008 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2009 gimple_call_set_lhs (repl, newdst);
2010 gimple_seq_add_stmt_without_update (&stmts, repl);
2011
2012 /* Create (dst p+ strlen (dst)). */
2013 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2014 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2015 gimple_seq_add_seq_without_update (&stmts, stmts2);
2016
2017 len = fold_convert_loc (loc, size_type_node, len);
2018 len = size_binop_loc (loc, PLUS_EXPR, len,
2019 build_int_cst (size_type_node, 1));
2020 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2021 gimple_seq_add_seq_without_update (&stmts, stmts2);
2022
2023 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2024 gimple_seq_add_stmt_without_update (&stmts, repl);
2025 if (gimple_call_lhs (stmt))
2026 {
2027 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2028 gimple_seq_add_stmt_without_update (&stmts, repl);
2029 gsi_replace_with_seq_vops (gsi, stmts);
2030 /* gsi now points at the assignment to the lhs, get a
2031 stmt iterator to the memcpy call.
2032 ??? We can't use gsi_for_stmt as that doesn't work when the
2033 CFG isn't built yet. */
2034 gimple_stmt_iterator gsi2 = *gsi;
2035 gsi_prev (&gsi2);
2036 fold_stmt (&gsi2);
2037 }
2038 else
2039 {
2040 gsi_replace_with_seq_vops (gsi, stmts);
2041 fold_stmt (gsi);
2042 }
2043 return true;
2044 }
2045
2046 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2047 are the arguments to the call. */
2048
2049 static bool
2050 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2051 {
2052 gimple *stmt = gsi_stmt (*gsi);
2053 tree dest = gimple_call_arg (stmt, 0);
2054 tree src = gimple_call_arg (stmt, 1);
2055 tree size = gimple_call_arg (stmt, 2);
2056 tree fn;
2057 const char *p;
2058
2059
2060 p = c_getstr (src);
2061 /* If the SRC parameter is "", return DEST. */
2062 if (p && *p == '\0')
2063 {
2064 replace_call_with_value (gsi, dest);
2065 return true;
2066 }
2067
2068 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2069 return false;
2070
2071 /* If __builtin_strcat_chk is used, assume strcat is available. */
2072 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2073 if (!fn)
2074 return false;
2075
2076 gimple *repl = gimple_build_call (fn, 2, dest, src);
2077 replace_call_with_call_and_fold (gsi, repl);
2078 return true;
2079 }
2080
2081 /* Simplify a call to the strncat builtin. */
2082
2083 static bool
2084 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2085 {
2086 gimple *stmt = gsi_stmt (*gsi);
2087 tree dst = gimple_call_arg (stmt, 0);
2088 tree src = gimple_call_arg (stmt, 1);
2089 tree len = gimple_call_arg (stmt, 2);
2090
2091 const char *p = c_getstr (src);
2092
2093 /* If the requested length is zero, or the src parameter string
2094 length is zero, return the dst parameter. */
2095 if (integer_zerop (len) || (p && *p == '\0'))
2096 {
2097 replace_call_with_value (gsi, dst);
2098 return true;
2099 }
2100
2101 if (TREE_CODE (len) != INTEGER_CST || !p)
2102 return false;
2103
2104 unsigned srclen = strlen (p);
2105
2106 int cmpsrc = compare_tree_int (len, srclen);
2107
2108 /* Return early if the requested len is less than the string length.
2109 Warnings will be issued elsewhere later. */
2110 if (cmpsrc < 0)
2111 return false;
2112
2113 unsigned HOST_WIDE_INT dstsize;
2114
2115 bool nowarn = gimple_no_warning_p (stmt);
2116
2117 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2118 {
2119 int cmpdst = compare_tree_int (len, dstsize);
2120
2121 if (cmpdst >= 0)
2122 {
2123 tree fndecl = gimple_call_fndecl (stmt);
2124
2125 /* Strncat copies (at most) LEN bytes and always appends
2126 the terminating NUL so the specified bound should never
2127 be equal to (or greater than) the size of the destination.
2128 If it is, the copy could overflow. */
2129 location_t loc = gimple_location (stmt);
2130 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2131 cmpdst == 0
2132 ? G_("%G%qD specified bound %E equals "
2133 "destination size")
2134 : G_("%G%qD specified bound %E exceeds "
2135 "destination size %wu"),
2136 stmt, fndecl, len, dstsize);
2137 if (nowarn)
2138 gimple_set_no_warning (stmt, true);
2139 }
2140 }
2141
2142 if (!nowarn && cmpsrc == 0)
2143 {
2144 tree fndecl = gimple_call_fndecl (stmt);
2145 location_t loc = gimple_location (stmt);
2146
2147 /* To avoid possible overflow the specified bound should also
2148 not be equal to the length of the source, even when the size
2149 of the destination is unknown (it's not an uncommon mistake
2150 to specify as the bound to strncpy the length of the source). */
2151 if (warning_at (loc, OPT_Wstringop_overflow_,
2152 "%G%qD specified bound %E equals source length",
2153 stmt, fndecl, len))
2154 gimple_set_no_warning (stmt, true);
2155 }
2156
2157 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2158
2159 /* If the replacement _DECL isn't initialized, don't do the
2160 transformation. */
2161 if (!fn)
2162 return false;
2163
2164 /* Otherwise, emit a call to strcat. */
2165 gcall *repl = gimple_build_call (fn, 2, dst, src);
2166 replace_call_with_call_and_fold (gsi, repl);
2167 return true;
2168 }
2169
2170 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2171 LEN, and SIZE. */
2172
2173 static bool
2174 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2175 {
2176 gimple *stmt = gsi_stmt (*gsi);
2177 tree dest = gimple_call_arg (stmt, 0);
2178 tree src = gimple_call_arg (stmt, 1);
2179 tree len = gimple_call_arg (stmt, 2);
2180 tree size = gimple_call_arg (stmt, 3);
2181 tree fn;
2182 const char *p;
2183
2184 p = c_getstr (src);
2185 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2186 if ((p && *p == '\0')
2187 || integer_zerop (len))
2188 {
2189 replace_call_with_value (gsi, dest);
2190 return true;
2191 }
2192
2193 if (! tree_fits_uhwi_p (size))
2194 return false;
2195
2196 if (! integer_all_onesp (size))
2197 {
2198 tree src_len = c_strlen (src, 1);
2199 if (src_len
2200 && tree_fits_uhwi_p (src_len)
2201 && tree_fits_uhwi_p (len)
2202 && ! tree_int_cst_lt (len, src_len))
2203 {
2204 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2205 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2206 if (!fn)
2207 return false;
2208
2209 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2210 replace_call_with_call_and_fold (gsi, repl);
2211 return true;
2212 }
2213 return false;
2214 }
2215
2216 /* If __builtin_strncat_chk is used, assume strncat is available. */
2217 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2218 if (!fn)
2219 return false;
2220
2221 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2222 replace_call_with_call_and_fold (gsi, repl);
2223 return true;
2224 }
2225
2226 /* Build and append gimple statements to STMTS that would load a first
2227 character of a memory location identified by STR. LOC is location
2228 of the statement. */
2229
2230 static tree
2231 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2232 {
2233 tree var;
2234
2235 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2236 tree cst_uchar_ptr_node
2237 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2238 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2239
2240 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2241 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2242 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2243
2244 gimple_assign_set_lhs (stmt, var);
2245 gimple_seq_add_stmt_without_update (stmts, stmt);
2246
2247 return var;
2248 }
2249
2250 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2251 FCODE is the name of the builtin. */
2252
2253 static bool
2254 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2255 {
2256 gimple *stmt = gsi_stmt (*gsi);
2257 tree callee = gimple_call_fndecl (stmt);
2258 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2259
2260 tree type = integer_type_node;
2261 tree str1 = gimple_call_arg (stmt, 0);
2262 tree str2 = gimple_call_arg (stmt, 1);
2263 tree lhs = gimple_call_lhs (stmt);
2264 HOST_WIDE_INT length = -1;
2265
2266 /* Handle strncmp and strncasecmp functions. */
2267 if (gimple_call_num_args (stmt) == 3)
2268 {
2269 tree len = gimple_call_arg (stmt, 2);
2270 if (tree_fits_uhwi_p (len))
2271 length = tree_to_uhwi (len);
2272 }
2273
2274 /* If the LEN parameter is zero, return zero. */
2275 if (length == 0)
2276 {
2277 replace_call_with_value (gsi, integer_zero_node);
2278 return true;
2279 }
2280
2281 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2282 if (operand_equal_p (str1, str2, 0))
2283 {
2284 replace_call_with_value (gsi, integer_zero_node);
2285 return true;
2286 }
2287
2288 const char *p1 = c_getstr (str1);
2289 const char *p2 = c_getstr (str2);
2290
2291 /* For known strings, return an immediate value. */
2292 if (p1 && p2)
2293 {
2294 int r = 0;
2295 bool known_result = false;
2296
2297 switch (fcode)
2298 {
2299 case BUILT_IN_STRCMP:
2300 case BUILT_IN_STRCMP_EQ:
2301 {
2302 r = strcmp (p1, p2);
2303 known_result = true;
2304 break;
2305 }
2306 case BUILT_IN_STRNCMP:
2307 case BUILT_IN_STRNCMP_EQ:
2308 {
2309 if (length == -1)
2310 break;
2311 r = strncmp (p1, p2, length);
2312 known_result = true;
2313 break;
2314 }
2315 /* Only handleable situation is where the string are equal (result 0),
2316 which is already handled by operand_equal_p case. */
2317 case BUILT_IN_STRCASECMP:
2318 break;
2319 case BUILT_IN_STRNCASECMP:
2320 {
2321 if (length == -1)
2322 break;
2323 r = strncmp (p1, p2, length);
2324 if (r == 0)
2325 known_result = true;
2326 break;
2327 }
2328 default:
2329 gcc_unreachable ();
2330 }
2331
2332 if (known_result)
2333 {
2334 replace_call_with_value (gsi, build_cmp_result (type, r));
2335 return true;
2336 }
2337 }
2338
2339 bool nonzero_length = length >= 1
2340 || fcode == BUILT_IN_STRCMP
2341 || fcode == BUILT_IN_STRCMP_EQ
2342 || fcode == BUILT_IN_STRCASECMP;
2343
2344 location_t loc = gimple_location (stmt);
2345
2346 /* If the second arg is "", return *(const unsigned char*)arg1. */
2347 if (p2 && *p2 == '\0' && nonzero_length)
2348 {
2349 gimple_seq stmts = NULL;
2350 tree var = gimple_load_first_char (loc, str1, &stmts);
2351 if (lhs)
2352 {
2353 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2354 gimple_seq_add_stmt_without_update (&stmts, stmt);
2355 }
2356
2357 gsi_replace_with_seq_vops (gsi, stmts);
2358 return true;
2359 }
2360
2361 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2362 if (p1 && *p1 == '\0' && nonzero_length)
2363 {
2364 gimple_seq stmts = NULL;
2365 tree var = gimple_load_first_char (loc, str2, &stmts);
2366
2367 if (lhs)
2368 {
2369 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2370 stmt = gimple_build_assign (c, NOP_EXPR, var);
2371 gimple_seq_add_stmt_without_update (&stmts, stmt);
2372
2373 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2374 gimple_seq_add_stmt_without_update (&stmts, stmt);
2375 }
2376
2377 gsi_replace_with_seq_vops (gsi, stmts);
2378 return true;
2379 }
2380
2381 /* If len parameter is one, return an expression corresponding to
2382 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2383 if (fcode == BUILT_IN_STRNCMP && length == 1)
2384 {
2385 gimple_seq stmts = NULL;
2386 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2387 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2388
2389 if (lhs)
2390 {
2391 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2392 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2393 gimple_seq_add_stmt_without_update (&stmts, convert1);
2394
2395 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2396 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2397 gimple_seq_add_stmt_without_update (&stmts, convert2);
2398
2399 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2400 gimple_seq_add_stmt_without_update (&stmts, stmt);
2401 }
2402
2403 gsi_replace_with_seq_vops (gsi, stmts);
2404 return true;
2405 }
2406
2407 /* If length is larger than the length of one constant string,
2408 replace strncmp with corresponding strcmp */
2409 if (fcode == BUILT_IN_STRNCMP
2410 && length > 0
2411 && ((p2 && (size_t) length > strlen (p2))
2412 || (p1 && (size_t) length > strlen (p1))))
2413 {
2414 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2415 if (!fn)
2416 return false;
2417 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2418 replace_call_with_call_and_fold (gsi, repl);
2419 return true;
2420 }
2421
2422 return false;
2423 }
2424
2425 /* Fold a call to the memchr pointed by GSI iterator. */
2426
2427 static bool
2428 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2429 {
2430 gimple *stmt = gsi_stmt (*gsi);
2431 tree lhs = gimple_call_lhs (stmt);
2432 tree arg1 = gimple_call_arg (stmt, 0);
2433 tree arg2 = gimple_call_arg (stmt, 1);
2434 tree len = gimple_call_arg (stmt, 2);
2435
2436 /* If the LEN parameter is zero, return zero. */
2437 if (integer_zerop (len))
2438 {
2439 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2440 return true;
2441 }
2442
2443 char c;
2444 if (TREE_CODE (arg2) != INTEGER_CST
2445 || !tree_fits_uhwi_p (len)
2446 || !target_char_cst_p (arg2, &c))
2447 return false;
2448
2449 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2450 unsigned HOST_WIDE_INT string_length;
2451 const char *p1 = c_getstr (arg1, &string_length);
2452
2453 if (p1)
2454 {
2455 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2456 if (r == NULL)
2457 {
2458 if (length <= string_length)
2459 {
2460 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2461 return true;
2462 }
2463 }
2464 else
2465 {
2466 unsigned HOST_WIDE_INT offset = r - p1;
2467 gimple_seq stmts = NULL;
2468 if (lhs != NULL_TREE)
2469 {
2470 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2471 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2472 arg1, offset_cst);
2473 gimple_seq_add_stmt_without_update (&stmts, stmt);
2474 }
2475 else
2476 gimple_seq_add_stmt_without_update (&stmts,
2477 gimple_build_nop ());
2478
2479 gsi_replace_with_seq_vops (gsi, stmts);
2480 return true;
2481 }
2482 }
2483
2484 return false;
2485 }
2486
2487 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2488 to the call. IGNORE is true if the value returned
2489 by the builtin will be ignored. UNLOCKED is true is true if this
2490 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2491 the known length of the string. Return NULL_TREE if no simplification
2492 was possible. */
2493
2494 static bool
2495 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2496 tree arg0, tree arg1,
2497 bool unlocked)
2498 {
2499 gimple *stmt = gsi_stmt (*gsi);
2500
2501 /* If we're using an unlocked function, assume the other unlocked
2502 functions exist explicitly. */
2503 tree const fn_fputc = (unlocked
2504 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2505 : builtin_decl_implicit (BUILT_IN_FPUTC));
2506 tree const fn_fwrite = (unlocked
2507 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2508 : builtin_decl_implicit (BUILT_IN_FWRITE));
2509
2510 /* If the return value is used, don't do the transformation. */
2511 if (gimple_call_lhs (stmt))
2512 return false;
2513
2514 /* Get the length of the string passed to fputs. If the length
2515 can't be determined, punt. */
2516 tree len = get_maxval_strlen (arg0, 0);
2517 if (!len
2518 || TREE_CODE (len) != INTEGER_CST)
2519 return false;
2520
2521 switch (compare_tree_int (len, 1))
2522 {
2523 case -1: /* length is 0, delete the call entirely . */
2524 replace_call_with_value (gsi, integer_zero_node);
2525 return true;
2526
2527 case 0: /* length is 1, call fputc. */
2528 {
2529 const char *p = c_getstr (arg0);
2530 if (p != NULL)
2531 {
2532 if (!fn_fputc)
2533 return false;
2534
2535 gimple *repl = gimple_build_call (fn_fputc, 2,
2536 build_int_cst
2537 (integer_type_node, p[0]), arg1);
2538 replace_call_with_call_and_fold (gsi, repl);
2539 return true;
2540 }
2541 }
2542 /* FALLTHROUGH */
2543 case 1: /* length is greater than 1, call fwrite. */
2544 {
2545 /* If optimizing for size keep fputs. */
2546 if (optimize_function_for_size_p (cfun))
2547 return false;
2548 /* New argument list transforming fputs(string, stream) to
2549 fwrite(string, 1, len, stream). */
2550 if (!fn_fwrite)
2551 return false;
2552
2553 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2554 size_one_node, len, arg1);
2555 replace_call_with_call_and_fold (gsi, repl);
2556 return true;
2557 }
2558 default:
2559 gcc_unreachable ();
2560 }
2561 return false;
2562 }
2563
2564 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2565 DEST, SRC, LEN, and SIZE are the arguments to the call.
2566 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2567 code of the builtin. If MAXLEN is not NULL, it is maximum length
2568 passed as third argument. */
2569
2570 static bool
2571 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2572 tree dest, tree src, tree len, tree size,
2573 enum built_in_function fcode)
2574 {
2575 gimple *stmt = gsi_stmt (*gsi);
2576 location_t loc = gimple_location (stmt);
2577 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2578 tree fn;
2579
2580 /* If SRC and DEST are the same (and not volatile), return DEST
2581 (resp. DEST+LEN for __mempcpy_chk). */
2582 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2583 {
2584 if (fcode != BUILT_IN_MEMPCPY_CHK)
2585 {
2586 replace_call_with_value (gsi, dest);
2587 return true;
2588 }
2589 else
2590 {
2591 gimple_seq stmts = NULL;
2592 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2593 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2594 TREE_TYPE (dest), dest, len);
2595 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2596 replace_call_with_value (gsi, temp);
2597 return true;
2598 }
2599 }
2600
2601 if (! tree_fits_uhwi_p (size))
2602 return false;
2603
2604 tree maxlen = get_maxval_strlen (len, 2);
2605 if (! integer_all_onesp (size))
2606 {
2607 if (! tree_fits_uhwi_p (len))
2608 {
2609 /* If LEN is not constant, try MAXLEN too.
2610 For MAXLEN only allow optimizing into non-_ocs function
2611 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2612 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2613 {
2614 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2615 {
2616 /* (void) __mempcpy_chk () can be optimized into
2617 (void) __memcpy_chk (). */
2618 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2619 if (!fn)
2620 return false;
2621
2622 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2623 replace_call_with_call_and_fold (gsi, repl);
2624 return true;
2625 }
2626 return false;
2627 }
2628 }
2629 else
2630 maxlen = len;
2631
2632 if (tree_int_cst_lt (size, maxlen))
2633 return false;
2634 }
2635
2636 fn = NULL_TREE;
2637 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2638 mem{cpy,pcpy,move,set} is available. */
2639 switch (fcode)
2640 {
2641 case BUILT_IN_MEMCPY_CHK:
2642 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2643 break;
2644 case BUILT_IN_MEMPCPY_CHK:
2645 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2646 break;
2647 case BUILT_IN_MEMMOVE_CHK:
2648 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2649 break;
2650 case BUILT_IN_MEMSET_CHK:
2651 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2652 break;
2653 default:
2654 break;
2655 }
2656
2657 if (!fn)
2658 return false;
2659
2660 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2661 replace_call_with_call_and_fold (gsi, repl);
2662 return true;
2663 }
2664
2665 /* Fold a call to the __st[rp]cpy_chk builtin.
2666 DEST, SRC, and SIZE are the arguments to the call.
2667 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2668 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2669 strings passed as second argument. */
2670
2671 static bool
2672 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2673 tree dest,
2674 tree src, tree size,
2675 enum built_in_function fcode)
2676 {
2677 gimple *stmt = gsi_stmt (*gsi);
2678 location_t loc = gimple_location (stmt);
2679 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2680 tree len, fn;
2681
2682 /* If SRC and DEST are the same (and not volatile), return DEST. */
2683 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2684 {
2685 /* Issue -Wrestrict unless the pointers are null (those do
2686 not point to objects and so do not indicate an overlap;
2687 such calls could be the result of sanitization and jump
2688 threading). */
2689 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2690 {
2691 tree func = gimple_call_fndecl (stmt);
2692
2693 warning_at (loc, OPT_Wrestrict,
2694 "%qD source argument is the same as destination",
2695 func);
2696 }
2697
2698 replace_call_with_value (gsi, dest);
2699 return true;
2700 }
2701
2702 if (! tree_fits_uhwi_p (size))
2703 return false;
2704
2705 tree maxlen = get_maxval_strlen (src, 1);
2706 if (! integer_all_onesp (size))
2707 {
2708 len = c_strlen (src, 1);
2709 if (! len || ! tree_fits_uhwi_p (len))
2710 {
2711 /* If LEN is not constant, try MAXLEN too.
2712 For MAXLEN only allow optimizing into non-_ocs function
2713 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2714 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2715 {
2716 if (fcode == BUILT_IN_STPCPY_CHK)
2717 {
2718 if (! ignore)
2719 return false;
2720
2721 /* If return value of __stpcpy_chk is ignored,
2722 optimize into __strcpy_chk. */
2723 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2724 if (!fn)
2725 return false;
2726
2727 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2728 replace_call_with_call_and_fold (gsi, repl);
2729 return true;
2730 }
2731
2732 if (! len || TREE_SIDE_EFFECTS (len))
2733 return false;
2734
2735 /* If c_strlen returned something, but not a constant,
2736 transform __strcpy_chk into __memcpy_chk. */
2737 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2738 if (!fn)
2739 return false;
2740
2741 gimple_seq stmts = NULL;
2742 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2743 len = gimple_convert (&stmts, loc, size_type_node, len);
2744 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2745 build_int_cst (size_type_node, 1));
2746 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2747 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2748 replace_call_with_call_and_fold (gsi, repl);
2749 return true;
2750 }
2751 }
2752 else
2753 maxlen = len;
2754
2755 if (! tree_int_cst_lt (maxlen, size))
2756 return false;
2757 }
2758
2759 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2760 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2761 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2762 if (!fn)
2763 return false;
2764
2765 gimple *repl = gimple_build_call (fn, 2, dest, src);
2766 replace_call_with_call_and_fold (gsi, repl);
2767 return true;
2768 }
2769
2770 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2771 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2772 length passed as third argument. IGNORE is true if return value can be
2773 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2774
2775 static bool
2776 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2777 tree dest, tree src,
2778 tree len, tree size,
2779 enum built_in_function fcode)
2780 {
2781 gimple *stmt = gsi_stmt (*gsi);
2782 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2783 tree fn;
2784
2785 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2786 {
2787 /* If return value of __stpncpy_chk is ignored,
2788 optimize into __strncpy_chk. */
2789 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2790 if (fn)
2791 {
2792 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2793 replace_call_with_call_and_fold (gsi, repl);
2794 return true;
2795 }
2796 }
2797
2798 if (! tree_fits_uhwi_p (size))
2799 return false;
2800
2801 tree maxlen = get_maxval_strlen (len, 2);
2802 if (! integer_all_onesp (size))
2803 {
2804 if (! tree_fits_uhwi_p (len))
2805 {
2806 /* If LEN is not constant, try MAXLEN too.
2807 For MAXLEN only allow optimizing into non-_ocs function
2808 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2809 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2810 return false;
2811 }
2812 else
2813 maxlen = len;
2814
2815 if (tree_int_cst_lt (size, maxlen))
2816 return false;
2817 }
2818
2819 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2820 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2821 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2822 if (!fn)
2823 return false;
2824
2825 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2826 replace_call_with_call_and_fold (gsi, repl);
2827 return true;
2828 }
2829
2830 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2831 Return NULL_TREE if no simplification can be made. */
2832
2833 static bool
2834 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2835 {
2836 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2837 location_t loc = gimple_location (stmt);
2838 tree dest = gimple_call_arg (stmt, 0);
2839 tree src = gimple_call_arg (stmt, 1);
2840 tree fn, lenp1;
2841
2842 /* If the result is unused, replace stpcpy with strcpy. */
2843 if (gimple_call_lhs (stmt) == NULL_TREE)
2844 {
2845 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2846 if (!fn)
2847 return false;
2848 gimple_call_set_fndecl (stmt, fn);
2849 fold_stmt (gsi);
2850 return true;
2851 }
2852
2853 /* Set to non-null if ARG refers to an unterminated array. */
2854 c_strlen_data data = { };
2855 tree len = c_strlen (src, 1, &data, 1);
2856 if (!len
2857 || TREE_CODE (len) != INTEGER_CST)
2858 {
2859 data.decl = unterminated_array (src);
2860 if (!data.decl)
2861 return false;
2862 }
2863
2864 if (data.decl)
2865 {
2866 /* Avoid folding calls with unterminated arrays. */
2867 if (!gimple_no_warning_p (stmt))
2868 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2869 gimple_set_no_warning (stmt, true);
2870 return false;
2871 }
2872
2873 if (optimize_function_for_size_p (cfun)
2874 /* If length is zero it's small enough. */
2875 && !integer_zerop (len))
2876 return false;
2877
2878 /* If the source has a known length replace stpcpy with memcpy. */
2879 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2880 if (!fn)
2881 return false;
2882
2883 gimple_seq stmts = NULL;
2884 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2885 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2886 tem, build_int_cst (size_type_node, 1));
2887 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2888 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2889 gimple_set_vuse (repl, gimple_vuse (stmt));
2890 gimple_set_vdef (repl, gimple_vdef (stmt));
2891 if (gimple_vdef (repl)
2892 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2893 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2894 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2895 /* Replace the result with dest + len. */
2896 stmts = NULL;
2897 tem = gimple_convert (&stmts, loc, sizetype, len);
2898 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2899 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2900 POINTER_PLUS_EXPR, dest, tem);
2901 gsi_replace (gsi, ret, false);
2902 /* Finally fold the memcpy call. */
2903 gimple_stmt_iterator gsi2 = *gsi;
2904 gsi_prev (&gsi2);
2905 fold_stmt (&gsi2);
2906 return true;
2907 }
2908
2909 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2910 NULL_TREE if a normal call should be emitted rather than expanding
2911 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2912 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2913 passed as second argument. */
2914
2915 static bool
2916 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2917 enum built_in_function fcode)
2918 {
2919 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2920 tree dest, size, len, fn, fmt, flag;
2921 const char *fmt_str;
2922
2923 /* Verify the required arguments in the original call. */
2924 if (gimple_call_num_args (stmt) < 5)
2925 return false;
2926
2927 dest = gimple_call_arg (stmt, 0);
2928 len = gimple_call_arg (stmt, 1);
2929 flag = gimple_call_arg (stmt, 2);
2930 size = gimple_call_arg (stmt, 3);
2931 fmt = gimple_call_arg (stmt, 4);
2932
2933 if (! tree_fits_uhwi_p (size))
2934 return false;
2935
2936 if (! integer_all_onesp (size))
2937 {
2938 tree maxlen = get_maxval_strlen (len, 2);
2939 if (! tree_fits_uhwi_p (len))
2940 {
2941 /* If LEN is not constant, try MAXLEN too.
2942 For MAXLEN only allow optimizing into non-_ocs function
2943 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2944 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2945 return false;
2946 }
2947 else
2948 maxlen = len;
2949
2950 if (tree_int_cst_lt (size, maxlen))
2951 return false;
2952 }
2953
2954 if (!init_target_chars ())
2955 return false;
2956
2957 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2958 or if format doesn't contain % chars or is "%s". */
2959 if (! integer_zerop (flag))
2960 {
2961 fmt_str = c_getstr (fmt);
2962 if (fmt_str == NULL)
2963 return false;
2964 if (strchr (fmt_str, target_percent) != NULL
2965 && strcmp (fmt_str, target_percent_s))
2966 return false;
2967 }
2968
2969 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2970 available. */
2971 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2972 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2973 if (!fn)
2974 return false;
2975
2976 /* Replace the called function and the first 5 argument by 3 retaining
2977 trailing varargs. */
2978 gimple_call_set_fndecl (stmt, fn);
2979 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2980 gimple_call_set_arg (stmt, 0, dest);
2981 gimple_call_set_arg (stmt, 1, len);
2982 gimple_call_set_arg (stmt, 2, fmt);
2983 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2984 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2985 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2986 fold_stmt (gsi);
2987 return true;
2988 }
2989
2990 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2991 Return NULL_TREE if a normal call should be emitted rather than
2992 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2993 or BUILT_IN_VSPRINTF_CHK. */
2994
2995 static bool
2996 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2997 enum built_in_function fcode)
2998 {
2999 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3000 tree dest, size, len, fn, fmt, flag;
3001 const char *fmt_str;
3002 unsigned nargs = gimple_call_num_args (stmt);
3003
3004 /* Verify the required arguments in the original call. */
3005 if (nargs < 4)
3006 return false;
3007 dest = gimple_call_arg (stmt, 0);
3008 flag = gimple_call_arg (stmt, 1);
3009 size = gimple_call_arg (stmt, 2);
3010 fmt = gimple_call_arg (stmt, 3);
3011
3012 if (! tree_fits_uhwi_p (size))
3013 return false;
3014
3015 len = NULL_TREE;
3016
3017 if (!init_target_chars ())
3018 return false;
3019
3020 /* Check whether the format is a literal string constant. */
3021 fmt_str = c_getstr (fmt);
3022 if (fmt_str != NULL)
3023 {
3024 /* If the format doesn't contain % args or %%, we know the size. */
3025 if (strchr (fmt_str, target_percent) == 0)
3026 {
3027 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3028 len = build_int_cstu (size_type_node, strlen (fmt_str));
3029 }
3030 /* If the format is "%s" and first ... argument is a string literal,
3031 we know the size too. */
3032 else if (fcode == BUILT_IN_SPRINTF_CHK
3033 && strcmp (fmt_str, target_percent_s) == 0)
3034 {
3035 tree arg;
3036
3037 if (nargs == 5)
3038 {
3039 arg = gimple_call_arg (stmt, 4);
3040 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3041 {
3042 len = c_strlen (arg, 1);
3043 if (! len || ! tree_fits_uhwi_p (len))
3044 len = NULL_TREE;
3045 }
3046 }
3047 }
3048 }
3049
3050 if (! integer_all_onesp (size))
3051 {
3052 if (! len || ! tree_int_cst_lt (len, size))
3053 return false;
3054 }
3055
3056 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3057 or if format doesn't contain % chars or is "%s". */
3058 if (! integer_zerop (flag))
3059 {
3060 if (fmt_str == NULL)
3061 return false;
3062 if (strchr (fmt_str, target_percent) != NULL
3063 && strcmp (fmt_str, target_percent_s))
3064 return false;
3065 }
3066
3067 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3068 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3069 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3070 if (!fn)
3071 return false;
3072
3073 /* Replace the called function and the first 4 argument by 2 retaining
3074 trailing varargs. */
3075 gimple_call_set_fndecl (stmt, fn);
3076 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3077 gimple_call_set_arg (stmt, 0, dest);
3078 gimple_call_set_arg (stmt, 1, fmt);
3079 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3080 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3081 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3082 fold_stmt (gsi);
3083 return true;
3084 }
3085
3086 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3087 ORIG may be null if this is a 2-argument call. We don't attempt to
3088 simplify calls with more than 3 arguments.
3089
3090 Return true if simplification was possible, otherwise false. */
3091
3092 bool
3093 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3094 {
3095 gimple *stmt = gsi_stmt (*gsi);
3096 tree dest = gimple_call_arg (stmt, 0);
3097 tree fmt = gimple_call_arg (stmt, 1);
3098 tree orig = NULL_TREE;
3099 const char *fmt_str = NULL;
3100
3101 /* Verify the required arguments in the original call. We deal with two
3102 types of sprintf() calls: 'sprintf (str, fmt)' and
3103 'sprintf (dest, "%s", orig)'. */
3104 if (gimple_call_num_args (stmt) > 3)
3105 return false;
3106
3107 if (gimple_call_num_args (stmt) == 3)
3108 orig = gimple_call_arg (stmt, 2);
3109
3110 /* Check whether the format is a literal string constant. */
3111 fmt_str = c_getstr (fmt);
3112 if (fmt_str == NULL)
3113 return false;
3114
3115 if (!init_target_chars ())
3116 return false;
3117
3118 /* If the format doesn't contain % args or %%, use strcpy. */
3119 if (strchr (fmt_str, target_percent) == NULL)
3120 {
3121 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3122
3123 if (!fn)
3124 return false;
3125
3126 /* Don't optimize sprintf (buf, "abc", ptr++). */
3127 if (orig)
3128 return false;
3129
3130 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3131 'format' is known to contain no % formats. */
3132 gimple_seq stmts = NULL;
3133 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3134
3135 /* Propagate the NO_WARNING bit to avoid issuing the same
3136 warning more than once. */
3137 if (gimple_no_warning_p (stmt))
3138 gimple_set_no_warning (repl, true);
3139
3140 gimple_seq_add_stmt_without_update (&stmts, repl);
3141 if (gimple_call_lhs (stmt))
3142 {
3143 repl = gimple_build_assign (gimple_call_lhs (stmt),
3144 build_int_cst (integer_type_node,
3145 strlen (fmt_str)));
3146 gimple_seq_add_stmt_without_update (&stmts, repl);
3147 gsi_replace_with_seq_vops (gsi, stmts);
3148 /* gsi now points at the assignment to the lhs, get a
3149 stmt iterator to the memcpy call.
3150 ??? We can't use gsi_for_stmt as that doesn't work when the
3151 CFG isn't built yet. */
3152 gimple_stmt_iterator gsi2 = *gsi;
3153 gsi_prev (&gsi2);
3154 fold_stmt (&gsi2);
3155 }
3156 else
3157 {
3158 gsi_replace_with_seq_vops (gsi, stmts);
3159 fold_stmt (gsi);
3160 }
3161 return true;
3162 }
3163
3164 /* If the format is "%s", use strcpy if the result isn't used. */
3165 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3166 {
3167 tree fn;
3168 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3169
3170 if (!fn)
3171 return false;
3172
3173 /* Don't crash on sprintf (str1, "%s"). */
3174 if (!orig)
3175 return false;
3176
3177 tree orig_len = NULL_TREE;
3178 if (gimple_call_lhs (stmt))
3179 {
3180 orig_len = get_maxval_strlen (orig, 0);
3181 if (!orig_len)
3182 return false;
3183 }
3184
3185 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3186 gimple_seq stmts = NULL;
3187 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3188
3189 /* Propagate the NO_WARNING bit to avoid issuing the same
3190 warning more than once. */
3191 if (gimple_no_warning_p (stmt))
3192 gimple_set_no_warning (repl, true);
3193
3194 gimple_seq_add_stmt_without_update (&stmts, repl);
3195 if (gimple_call_lhs (stmt))
3196 {
3197 if (!useless_type_conversion_p (integer_type_node,
3198 TREE_TYPE (orig_len)))
3199 orig_len = fold_convert (integer_type_node, orig_len);
3200 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3201 gimple_seq_add_stmt_without_update (&stmts, repl);
3202 gsi_replace_with_seq_vops (gsi, stmts);
3203 /* gsi now points at the assignment to the lhs, get a
3204 stmt iterator to the memcpy call.
3205 ??? We can't use gsi_for_stmt as that doesn't work when the
3206 CFG isn't built yet. */
3207 gimple_stmt_iterator gsi2 = *gsi;
3208 gsi_prev (&gsi2);
3209 fold_stmt (&gsi2);
3210 }
3211 else
3212 {
3213 gsi_replace_with_seq_vops (gsi, stmts);
3214 fold_stmt (gsi);
3215 }
3216 return true;
3217 }
3218 return false;
3219 }
3220
3221 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3222 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3223 attempt to simplify calls with more than 4 arguments.
3224
3225 Return true if simplification was possible, otherwise false. */
3226
3227 bool
3228 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3229 {
3230 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3231 tree dest = gimple_call_arg (stmt, 0);
3232 tree destsize = gimple_call_arg (stmt, 1);
3233 tree fmt = gimple_call_arg (stmt, 2);
3234 tree orig = NULL_TREE;
3235 const char *fmt_str = NULL;
3236
3237 if (gimple_call_num_args (stmt) > 4)
3238 return false;
3239
3240 if (gimple_call_num_args (stmt) == 4)
3241 orig = gimple_call_arg (stmt, 3);
3242
3243 if (!tree_fits_uhwi_p (destsize))
3244 return false;
3245 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3246
3247 /* Check whether the format is a literal string constant. */
3248 fmt_str = c_getstr (fmt);
3249 if (fmt_str == NULL)
3250 return false;
3251
3252 if (!init_target_chars ())
3253 return false;
3254
3255 /* If the format doesn't contain % args or %%, use strcpy. */
3256 if (strchr (fmt_str, target_percent) == NULL)
3257 {
3258 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3259 if (!fn)
3260 return false;
3261
3262 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3263 if (orig)
3264 return false;
3265
3266 /* We could expand this as
3267 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3268 or to
3269 memcpy (str, fmt_with_nul_at_cstm1, cst);
3270 but in the former case that might increase code size
3271 and in the latter case grow .rodata section too much.
3272 So punt for now. */
3273 size_t len = strlen (fmt_str);
3274 if (len >= destlen)
3275 return false;
3276
3277 gimple_seq stmts = NULL;
3278 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3279 gimple_seq_add_stmt_without_update (&stmts, repl);
3280 if (gimple_call_lhs (stmt))
3281 {
3282 repl = gimple_build_assign (gimple_call_lhs (stmt),
3283 build_int_cst (integer_type_node, len));
3284 gimple_seq_add_stmt_without_update (&stmts, repl);
3285 gsi_replace_with_seq_vops (gsi, stmts);
3286 /* gsi now points at the assignment to the lhs, get a
3287 stmt iterator to the memcpy call.
3288 ??? We can't use gsi_for_stmt as that doesn't work when the
3289 CFG isn't built yet. */
3290 gimple_stmt_iterator gsi2 = *gsi;
3291 gsi_prev (&gsi2);
3292 fold_stmt (&gsi2);
3293 }
3294 else
3295 {
3296 gsi_replace_with_seq_vops (gsi, stmts);
3297 fold_stmt (gsi);
3298 }
3299 return true;
3300 }
3301
3302 /* If the format is "%s", use strcpy if the result isn't used. */
3303 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3304 {
3305 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3306 if (!fn)
3307 return false;
3308
3309 /* Don't crash on snprintf (str1, cst, "%s"). */
3310 if (!orig)
3311 return false;
3312
3313 tree orig_len = get_maxval_strlen (orig, 0);
3314 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3315 return false;
3316
3317 /* We could expand this as
3318 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3319 or to
3320 memcpy (str1, str2_with_nul_at_cstm1, cst);
3321 but in the former case that might increase code size
3322 and in the latter case grow .rodata section too much.
3323 So punt for now. */
3324 if (compare_tree_int (orig_len, destlen) >= 0)
3325 return false;
3326
3327 /* Convert snprintf (str1, cst, "%s", str2) into
3328 strcpy (str1, str2) if strlen (str2) < cst. */
3329 gimple_seq stmts = NULL;
3330 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3331 gimple_seq_add_stmt_without_update (&stmts, repl);
3332 if (gimple_call_lhs (stmt))
3333 {
3334 if (!useless_type_conversion_p (integer_type_node,
3335 TREE_TYPE (orig_len)))
3336 orig_len = fold_convert (integer_type_node, orig_len);
3337 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3338 gimple_seq_add_stmt_without_update (&stmts, repl);
3339 gsi_replace_with_seq_vops (gsi, stmts);
3340 /* gsi now points at the assignment to the lhs, get a
3341 stmt iterator to the memcpy call.
3342 ??? We can't use gsi_for_stmt as that doesn't work when the
3343 CFG isn't built yet. */
3344 gimple_stmt_iterator gsi2 = *gsi;
3345 gsi_prev (&gsi2);
3346 fold_stmt (&gsi2);
3347 }
3348 else
3349 {
3350 gsi_replace_with_seq_vops (gsi, stmts);
3351 fold_stmt (gsi);
3352 }
3353 return true;
3354 }
3355 return false;
3356 }
3357
3358 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3359 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3360 more than 3 arguments, and ARG may be null in the 2-argument case.
3361
3362 Return NULL_TREE if no simplification was possible, otherwise return the
3363 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3364 code of the function to be simplified. */
3365
3366 static bool
3367 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3368 tree fp, tree fmt, tree arg,
3369 enum built_in_function fcode)
3370 {
3371 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3372 tree fn_fputc, fn_fputs;
3373 const char *fmt_str = NULL;
3374
3375 /* If the return value is used, don't do the transformation. */
3376 if (gimple_call_lhs (stmt) != NULL_TREE)
3377 return false;
3378
3379 /* Check whether the format is a literal string constant. */
3380 fmt_str = c_getstr (fmt);
3381 if (fmt_str == NULL)
3382 return false;
3383
3384 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3385 {
3386 /* If we're using an unlocked function, assume the other
3387 unlocked functions exist explicitly. */
3388 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3389 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3390 }
3391 else
3392 {
3393 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3394 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3395 }
3396
3397 if (!init_target_chars ())
3398 return false;
3399
3400 /* If the format doesn't contain % args or %%, use strcpy. */
3401 if (strchr (fmt_str, target_percent) == NULL)
3402 {
3403 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3404 && arg)
3405 return false;
3406
3407 /* If the format specifier was "", fprintf does nothing. */
3408 if (fmt_str[0] == '\0')
3409 {
3410 replace_call_with_value (gsi, NULL_TREE);
3411 return true;
3412 }
3413
3414 /* When "string" doesn't contain %, replace all cases of
3415 fprintf (fp, string) with fputs (string, fp). The fputs
3416 builtin will take care of special cases like length == 1. */
3417 if (fn_fputs)
3418 {
3419 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3420 replace_call_with_call_and_fold (gsi, repl);
3421 return true;
3422 }
3423 }
3424
3425 /* The other optimizations can be done only on the non-va_list variants. */
3426 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3427 return false;
3428
3429 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3430 else if (strcmp (fmt_str, target_percent_s) == 0)
3431 {
3432 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3433 return false;
3434 if (fn_fputs)
3435 {
3436 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3437 replace_call_with_call_and_fold (gsi, repl);
3438 return true;
3439 }
3440 }
3441
3442 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3443 else if (strcmp (fmt_str, target_percent_c) == 0)
3444 {
3445 if (!arg
3446 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3447 return false;
3448 if (fn_fputc)
3449 {
3450 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3451 replace_call_with_call_and_fold (gsi, repl);
3452 return true;
3453 }
3454 }
3455
3456 return false;
3457 }
3458
3459 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3460 FMT and ARG are the arguments to the call; we don't fold cases with
3461 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3462
3463 Return NULL_TREE if no simplification was possible, otherwise return the
3464 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3465 code of the function to be simplified. */
3466
3467 static bool
3468 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3469 tree arg, enum built_in_function fcode)
3470 {
3471 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3472 tree fn_putchar, fn_puts, newarg;
3473 const char *fmt_str = NULL;
3474
3475 /* If the return value is used, don't do the transformation. */
3476 if (gimple_call_lhs (stmt) != NULL_TREE)
3477 return false;
3478
3479 /* Check whether the format is a literal string constant. */
3480 fmt_str = c_getstr (fmt);
3481 if (fmt_str == NULL)
3482 return false;
3483
3484 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3485 {
3486 /* If we're using an unlocked function, assume the other
3487 unlocked functions exist explicitly. */
3488 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3489 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3490 }
3491 else
3492 {
3493 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3494 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3495 }
3496
3497 if (!init_target_chars ())
3498 return false;
3499
3500 if (strcmp (fmt_str, target_percent_s) == 0
3501 || strchr (fmt_str, target_percent) == NULL)
3502 {
3503 const char *str;
3504
3505 if (strcmp (fmt_str, target_percent_s) == 0)
3506 {
3507 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3508 return false;
3509
3510 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3511 return false;
3512
3513 str = c_getstr (arg);
3514 if (str == NULL)
3515 return false;
3516 }
3517 else
3518 {
3519 /* The format specifier doesn't contain any '%' characters. */
3520 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3521 && arg)
3522 return false;
3523 str = fmt_str;
3524 }
3525
3526 /* If the string was "", printf does nothing. */
3527 if (str[0] == '\0')
3528 {
3529 replace_call_with_value (gsi, NULL_TREE);
3530 return true;
3531 }
3532
3533 /* If the string has length of 1, call putchar. */
3534 if (str[1] == '\0')
3535 {
3536 /* Given printf("c"), (where c is any one character,)
3537 convert "c"[0] to an int and pass that to the replacement
3538 function. */
3539 newarg = build_int_cst (integer_type_node, str[0]);
3540 if (fn_putchar)
3541 {
3542 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3543 replace_call_with_call_and_fold (gsi, repl);
3544 return true;
3545 }
3546 }
3547 else
3548 {
3549 /* If the string was "string\n", call puts("string"). */
3550 size_t len = strlen (str);
3551 if ((unsigned char)str[len - 1] == target_newline
3552 && (size_t) (int) len == len
3553 && (int) len > 0)
3554 {
3555 char *newstr;
3556
3557 /* Create a NUL-terminated string that's one char shorter
3558 than the original, stripping off the trailing '\n'. */
3559 newstr = xstrdup (str);
3560 newstr[len - 1] = '\0';
3561 newarg = build_string_literal (len, newstr);
3562 free (newstr);
3563 if (fn_puts)
3564 {
3565 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3566 replace_call_with_call_and_fold (gsi, repl);
3567 return true;
3568 }
3569 }
3570 else
3571 /* We'd like to arrange to call fputs(string,stdout) here,
3572 but we need stdout and don't have a way to get it yet. */
3573 return false;
3574 }
3575 }
3576
3577 /* The other optimizations can be done only on the non-va_list variants. */
3578 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3579 return false;
3580
3581 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3582 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3583 {
3584 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3585 return false;
3586 if (fn_puts)
3587 {
3588 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3589 replace_call_with_call_and_fold (gsi, repl);
3590 return true;
3591 }
3592 }
3593
3594 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3595 else if (strcmp (fmt_str, target_percent_c) == 0)
3596 {
3597 if (!arg || ! useless_type_conversion_p (integer_type_node,
3598 TREE_TYPE (arg)))
3599 return false;
3600 if (fn_putchar)
3601 {
3602 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3603 replace_call_with_call_and_fold (gsi, repl);
3604 return true;
3605 }
3606 }
3607
3608 return false;
3609 }
3610
3611
3612
3613 /* Fold a call to __builtin_strlen with known length LEN. */
3614
3615 static bool
3616 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3617 {
3618 gimple *stmt = gsi_stmt (*gsi);
3619 tree arg = gimple_call_arg (stmt, 0);
3620
3621 wide_int minlen;
3622 wide_int maxlen;
3623
3624 /* Set to non-null if ARG refers to an unterminated array. */
3625 tree nonstr;
3626 tree lenrange[2];
3627 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3628 && !nonstr
3629 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3630 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3631 {
3632 /* The range of lengths refers to either a single constant
3633 string or to the longest and shortest constant string
3634 referenced by the argument of the strlen() call, or to
3635 the strings that can possibly be stored in the arrays
3636 the argument refers to. */
3637 minlen = wi::to_wide (lenrange[0]);
3638 maxlen = wi::to_wide (lenrange[1]);
3639 }
3640 else
3641 {
3642 unsigned prec = TYPE_PRECISION (sizetype);
3643
3644 minlen = wi::shwi (0, prec);
3645 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3646 }
3647
3648 if (minlen == maxlen)
3649 {
3650 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3651 true, GSI_SAME_STMT);
3652 replace_call_with_value (gsi, lenrange[0]);
3653 return true;
3654 }
3655
3656 if (tree lhs = gimple_call_lhs (stmt))
3657 if (TREE_CODE (lhs) == SSA_NAME
3658 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3659 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3660
3661 return false;
3662 }
3663
3664 /* Fold a call to __builtin_acc_on_device. */
3665
3666 static bool
3667 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3668 {
3669 /* Defer folding until we know which compiler we're in. */
3670 if (symtab->state != EXPANSION)
3671 return false;
3672
3673 unsigned val_host = GOMP_DEVICE_HOST;
3674 unsigned val_dev = GOMP_DEVICE_NONE;
3675
3676 #ifdef ACCEL_COMPILER
3677 val_host = GOMP_DEVICE_NOT_HOST;
3678 val_dev = ACCEL_COMPILER_acc_device;
3679 #endif
3680
3681 location_t loc = gimple_location (gsi_stmt (*gsi));
3682
3683 tree host_eq = make_ssa_name (boolean_type_node);
3684 gimple *host_ass = gimple_build_assign
3685 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3686 gimple_set_location (host_ass, loc);
3687 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3688
3689 tree dev_eq = make_ssa_name (boolean_type_node);
3690 gimple *dev_ass = gimple_build_assign
3691 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3692 gimple_set_location (dev_ass, loc);
3693 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3694
3695 tree result = make_ssa_name (boolean_type_node);
3696 gimple *result_ass = gimple_build_assign
3697 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3698 gimple_set_location (result_ass, loc);
3699 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3700
3701 replace_call_with_value (gsi, result);
3702
3703 return true;
3704 }
3705
3706 /* Fold realloc (0, n) -> malloc (n). */
3707
3708 static bool
3709 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3710 {
3711 gimple *stmt = gsi_stmt (*gsi);
3712 tree arg = gimple_call_arg (stmt, 0);
3713 tree size = gimple_call_arg (stmt, 1);
3714
3715 if (operand_equal_p (arg, null_pointer_node, 0))
3716 {
3717 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3718 if (fn_malloc)
3719 {
3720 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3721 replace_call_with_call_and_fold (gsi, repl);
3722 return true;
3723 }
3724 }
3725 return false;
3726 }
3727
3728 /* Fold the non-target builtin at *GSI and return whether any simplification
3729 was made. */
3730
3731 static bool
3732 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3733 {
3734 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3735 tree callee = gimple_call_fndecl (stmt);
3736
3737 /* Give up for always_inline inline builtins until they are
3738 inlined. */
3739 if (avoid_folding_inline_builtin (callee))
3740 return false;
3741
3742 unsigned n = gimple_call_num_args (stmt);
3743 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3744 switch (fcode)
3745 {
3746 case BUILT_IN_BCMP:
3747 return gimple_fold_builtin_bcmp (gsi);
3748 case BUILT_IN_BCOPY:
3749 return gimple_fold_builtin_bcopy (gsi);
3750 case BUILT_IN_BZERO:
3751 return gimple_fold_builtin_bzero (gsi);
3752
3753 case BUILT_IN_MEMSET:
3754 return gimple_fold_builtin_memset (gsi,
3755 gimple_call_arg (stmt, 1),
3756 gimple_call_arg (stmt, 2));
3757 case BUILT_IN_MEMCPY:
3758 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3759 gimple_call_arg (stmt, 1), 0);
3760 case BUILT_IN_MEMPCPY:
3761 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3762 gimple_call_arg (stmt, 1), 1);
3763 case BUILT_IN_MEMMOVE:
3764 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3765 gimple_call_arg (stmt, 1), 3);
3766 case BUILT_IN_SPRINTF_CHK:
3767 case BUILT_IN_VSPRINTF_CHK:
3768 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3769 case BUILT_IN_STRCAT_CHK:
3770 return gimple_fold_builtin_strcat_chk (gsi);
3771 case BUILT_IN_STRNCAT_CHK:
3772 return gimple_fold_builtin_strncat_chk (gsi);
3773 case BUILT_IN_STRLEN:
3774 return gimple_fold_builtin_strlen (gsi);
3775 case BUILT_IN_STRCPY:
3776 return gimple_fold_builtin_strcpy (gsi,
3777 gimple_call_arg (stmt, 0),
3778 gimple_call_arg (stmt, 1));
3779 case BUILT_IN_STRNCPY:
3780 return gimple_fold_builtin_strncpy (gsi,
3781 gimple_call_arg (stmt, 0),
3782 gimple_call_arg (stmt, 1),
3783 gimple_call_arg (stmt, 2));
3784 case BUILT_IN_STRCAT:
3785 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3786 gimple_call_arg (stmt, 1));
3787 case BUILT_IN_STRNCAT:
3788 return gimple_fold_builtin_strncat (gsi);
3789 case BUILT_IN_INDEX:
3790 case BUILT_IN_STRCHR:
3791 return gimple_fold_builtin_strchr (gsi, false);
3792 case BUILT_IN_RINDEX:
3793 case BUILT_IN_STRRCHR:
3794 return gimple_fold_builtin_strchr (gsi, true);
3795 case BUILT_IN_STRSTR:
3796 return gimple_fold_builtin_strstr (gsi);
3797 case BUILT_IN_STRCMP:
3798 case BUILT_IN_STRCMP_EQ:
3799 case BUILT_IN_STRCASECMP:
3800 case BUILT_IN_STRNCMP:
3801 case BUILT_IN_STRNCMP_EQ:
3802 case BUILT_IN_STRNCASECMP:
3803 return gimple_fold_builtin_string_compare (gsi);
3804 case BUILT_IN_MEMCHR:
3805 return gimple_fold_builtin_memchr (gsi);
3806 case BUILT_IN_FPUTS:
3807 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3808 gimple_call_arg (stmt, 1), false);
3809 case BUILT_IN_FPUTS_UNLOCKED:
3810 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3811 gimple_call_arg (stmt, 1), true);
3812 case BUILT_IN_MEMCPY_CHK:
3813 case BUILT_IN_MEMPCPY_CHK:
3814 case BUILT_IN_MEMMOVE_CHK:
3815 case BUILT_IN_MEMSET_CHK:
3816 return gimple_fold_builtin_memory_chk (gsi,
3817 gimple_call_arg (stmt, 0),
3818 gimple_call_arg (stmt, 1),
3819 gimple_call_arg (stmt, 2),
3820 gimple_call_arg (stmt, 3),
3821 fcode);
3822 case BUILT_IN_STPCPY:
3823 return gimple_fold_builtin_stpcpy (gsi);
3824 case BUILT_IN_STRCPY_CHK:
3825 case BUILT_IN_STPCPY_CHK:
3826 return gimple_fold_builtin_stxcpy_chk (gsi,
3827 gimple_call_arg (stmt, 0),
3828 gimple_call_arg (stmt, 1),
3829 gimple_call_arg (stmt, 2),
3830 fcode);
3831 case BUILT_IN_STRNCPY_CHK:
3832 case BUILT_IN_STPNCPY_CHK:
3833 return gimple_fold_builtin_stxncpy_chk (gsi,
3834 gimple_call_arg (stmt, 0),
3835 gimple_call_arg (stmt, 1),
3836 gimple_call_arg (stmt, 2),
3837 gimple_call_arg (stmt, 3),
3838 fcode);
3839 case BUILT_IN_SNPRINTF_CHK:
3840 case BUILT_IN_VSNPRINTF_CHK:
3841 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3842
3843 case BUILT_IN_FPRINTF:
3844 case BUILT_IN_FPRINTF_UNLOCKED:
3845 case BUILT_IN_VFPRINTF:
3846 if (n == 2 || n == 3)
3847 return gimple_fold_builtin_fprintf (gsi,
3848 gimple_call_arg (stmt, 0),
3849 gimple_call_arg (stmt, 1),
3850 n == 3
3851 ? gimple_call_arg (stmt, 2)
3852 : NULL_TREE,
3853 fcode);
3854 break;
3855 case BUILT_IN_FPRINTF_CHK:
3856 case BUILT_IN_VFPRINTF_CHK:
3857 if (n == 3 || n == 4)
3858 return gimple_fold_builtin_fprintf (gsi,
3859 gimple_call_arg (stmt, 0),
3860 gimple_call_arg (stmt, 2),
3861 n == 4
3862 ? gimple_call_arg (stmt, 3)
3863 : NULL_TREE,
3864 fcode);
3865 break;
3866 case BUILT_IN_PRINTF:
3867 case BUILT_IN_PRINTF_UNLOCKED:
3868 case BUILT_IN_VPRINTF:
3869 if (n == 1 || n == 2)
3870 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3871 n == 2
3872 ? gimple_call_arg (stmt, 1)
3873 : NULL_TREE, fcode);
3874 break;
3875 case BUILT_IN_PRINTF_CHK:
3876 case BUILT_IN_VPRINTF_CHK:
3877 if (n == 2 || n == 3)
3878 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3879 n == 3
3880 ? gimple_call_arg (stmt, 2)
3881 : NULL_TREE, fcode);
3882 break;
3883 case BUILT_IN_ACC_ON_DEVICE:
3884 return gimple_fold_builtin_acc_on_device (gsi,
3885 gimple_call_arg (stmt, 0));
3886 case BUILT_IN_REALLOC:
3887 return gimple_fold_builtin_realloc (gsi);
3888
3889 default:;
3890 }
3891
3892 /* Try the generic builtin folder. */
3893 bool ignore = (gimple_call_lhs (stmt) == NULL);
3894 tree result = fold_call_stmt (stmt, ignore);
3895 if (result)
3896 {
3897 if (ignore)
3898 STRIP_NOPS (result);
3899 else
3900 result = fold_convert (gimple_call_return_type (stmt), result);
3901 if (!update_call_from_tree (gsi, result))
3902 gimplify_and_update_call_from_tree (gsi, result);
3903 return true;
3904 }
3905
3906 return false;
3907 }
3908
3909 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3910 function calls to constants, where possible. */
3911
3912 static tree
3913 fold_internal_goacc_dim (const gimple *call)
3914 {
3915 int axis = oacc_get_ifn_dim_arg (call);
3916 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3917 tree result = NULL_TREE;
3918 tree type = TREE_TYPE (gimple_call_lhs (call));
3919
3920 switch (gimple_call_internal_fn (call))
3921 {
3922 case IFN_GOACC_DIM_POS:
3923 /* If the size is 1, we know the answer. */
3924 if (size == 1)
3925 result = build_int_cst (type, 0);
3926 break;
3927 case IFN_GOACC_DIM_SIZE:
3928 /* If the size is not dynamic, we know the answer. */
3929 if (size)
3930 result = build_int_cst (type, size);
3931 break;
3932 default:
3933 break;
3934 }
3935
3936 return result;
3937 }
3938
3939 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3940 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3941 &var where var is only addressable because of such calls. */
3942
3943 bool
3944 optimize_atomic_compare_exchange_p (gimple *stmt)
3945 {
3946 if (gimple_call_num_args (stmt) != 6
3947 || !flag_inline_atomics
3948 || !optimize
3949 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3950 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3951 || !gimple_vdef (stmt)
3952 || !gimple_vuse (stmt))
3953 return false;
3954
3955 tree fndecl = gimple_call_fndecl (stmt);
3956 switch (DECL_FUNCTION_CODE (fndecl))
3957 {
3958 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3959 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3960 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3961 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3962 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3963 break;
3964 default:
3965 return false;
3966 }
3967
3968 tree expected = gimple_call_arg (stmt, 1);
3969 if (TREE_CODE (expected) != ADDR_EXPR
3970 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3971 return false;
3972
3973 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3974 if (!is_gimple_reg_type (etype)
3975 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3976 || TREE_THIS_VOLATILE (etype)
3977 || VECTOR_TYPE_P (etype)
3978 || TREE_CODE (etype) == COMPLEX_TYPE
3979 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3980 might not preserve all the bits. See PR71716. */
3981 || SCALAR_FLOAT_TYPE_P (etype)
3982 || maybe_ne (TYPE_PRECISION (etype),
3983 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3984 return false;
3985
3986 tree weak = gimple_call_arg (stmt, 3);
3987 if (!integer_zerop (weak) && !integer_onep (weak))
3988 return false;
3989
3990 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3991 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3992 machine_mode mode = TYPE_MODE (itype);
3993
3994 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3995 == CODE_FOR_nothing
3996 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3997 return false;
3998
3999 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4000 return false;
4001
4002 return true;
4003 }
4004
4005 /* Fold
4006 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4007 into
4008 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4009 i = IMAGPART_EXPR <t>;
4010 r = (_Bool) i;
4011 e = REALPART_EXPR <t>; */
4012
4013 void
4014 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4015 {
4016 gimple *stmt = gsi_stmt (*gsi);
4017 tree fndecl = gimple_call_fndecl (stmt);
4018 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4019 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4020 tree ctype = build_complex_type (itype);
4021 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4022 bool throws = false;
4023 edge e = NULL;
4024 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4025 expected);
4026 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4027 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4028 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4029 {
4030 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4031 build1 (VIEW_CONVERT_EXPR, itype,
4032 gimple_assign_lhs (g)));
4033 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4034 }
4035 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4036 + int_size_in_bytes (itype);
4037 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4038 gimple_call_arg (stmt, 0),
4039 gimple_assign_lhs (g),
4040 gimple_call_arg (stmt, 2),
4041 build_int_cst (integer_type_node, flag),
4042 gimple_call_arg (stmt, 4),
4043 gimple_call_arg (stmt, 5));
4044 tree lhs = make_ssa_name (ctype);
4045 gimple_call_set_lhs (g, lhs);
4046 gimple_set_vdef (g, gimple_vdef (stmt));
4047 gimple_set_vuse (g, gimple_vuse (stmt));
4048 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4049 tree oldlhs = gimple_call_lhs (stmt);
4050 if (stmt_can_throw_internal (cfun, stmt))
4051 {
4052 throws = true;
4053 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4054 }
4055 gimple_call_set_nothrow (as_a <gcall *> (g),
4056 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4057 gimple_call_set_lhs (stmt, NULL_TREE);
4058 gsi_replace (gsi, g, true);
4059 if (oldlhs)
4060 {
4061 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4062 build1 (IMAGPART_EXPR, itype, lhs));
4063 if (throws)
4064 {
4065 gsi_insert_on_edge_immediate (e, g);
4066 *gsi = gsi_for_stmt (g);
4067 }
4068 else
4069 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4070 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4071 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4072 }
4073 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4074 build1 (REALPART_EXPR, itype, lhs));
4075 if (throws && oldlhs == NULL_TREE)
4076 {
4077 gsi_insert_on_edge_immediate (e, g);
4078 *gsi = gsi_for_stmt (g);
4079 }
4080 else
4081 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4082 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4083 {
4084 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4085 VIEW_CONVERT_EXPR,
4086 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4087 gimple_assign_lhs (g)));
4088 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4089 }
4090 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4091 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4092 *gsi = gsiret;
4093 }
4094
4095 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4096 doesn't fit into TYPE. The test for overflow should be regardless of
4097 -fwrapv, and even for unsigned types. */
4098
4099 bool
4100 arith_overflowed_p (enum tree_code code, const_tree type,
4101 const_tree arg0, const_tree arg1)
4102 {
4103 widest2_int warg0 = widest2_int_cst (arg0);
4104 widest2_int warg1 = widest2_int_cst (arg1);
4105 widest2_int wres;
4106 switch (code)
4107 {
4108 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4109 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4110 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4111 default: gcc_unreachable ();
4112 }
4113 signop sign = TYPE_SIGN (type);
4114 if (sign == UNSIGNED && wi::neg_p (wres))
4115 return true;
4116 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4117 }
4118
4119 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4120 The statement may be replaced by another statement, e.g., if the call
4121 simplifies to a constant value. Return true if any changes were made.
4122 It is assumed that the operands have been previously folded. */
4123
4124 static bool
4125 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4126 {
4127 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4128 tree callee;
4129 bool changed = false;
4130 unsigned i;
4131
4132 /* Fold *& in call arguments. */
4133 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4134 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4135 {
4136 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4137 if (tmp)
4138 {
4139 gimple_call_set_arg (stmt, i, tmp);
4140 changed = true;
4141 }
4142 }
4143
4144 /* Check for virtual calls that became direct calls. */
4145 callee = gimple_call_fn (stmt);
4146 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4147 {
4148 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4149 {
4150 if (dump_file && virtual_method_call_p (callee)
4151 && !possible_polymorphic_call_target_p
4152 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4153 (OBJ_TYPE_REF_EXPR (callee)))))
4154 {
4155 fprintf (dump_file,
4156 "Type inheritance inconsistent devirtualization of ");
4157 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4158 fprintf (dump_file, " to ");
4159 print_generic_expr (dump_file, callee, TDF_SLIM);
4160 fprintf (dump_file, "\n");
4161 }
4162
4163 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4164 changed = true;
4165 }
4166 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4167 {
4168 bool final;
4169 vec <cgraph_node *>targets
4170 = possible_polymorphic_call_targets (callee, stmt, &final);
4171 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4172 {
4173 tree lhs = gimple_call_lhs (stmt);
4174 if (dump_enabled_p ())
4175 {
4176 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4177 "folding virtual function call to %s\n",
4178 targets.length () == 1
4179 ? targets[0]->name ()
4180 : "__builtin_unreachable");
4181 }
4182 if (targets.length () == 1)
4183 {
4184 tree fndecl = targets[0]->decl;
4185 gimple_call_set_fndecl (stmt, fndecl);
4186 changed = true;
4187 /* If changing the call to __cxa_pure_virtual
4188 or similar noreturn function, adjust gimple_call_fntype
4189 too. */
4190 if (gimple_call_noreturn_p (stmt)
4191 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4192 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4193 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4194 == void_type_node))
4195 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4196 /* If the call becomes noreturn, remove the lhs. */
4197 if (lhs
4198 && gimple_call_noreturn_p (stmt)
4199 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4200 || should_remove_lhs_p (lhs)))
4201 {
4202 if (TREE_CODE (lhs) == SSA_NAME)
4203 {
4204 tree var = create_tmp_var (TREE_TYPE (lhs));
4205 tree def = get_or_create_ssa_default_def (cfun, var);
4206 gimple *new_stmt = gimple_build_assign (lhs, def);
4207 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4208 }
4209 gimple_call_set_lhs (stmt, NULL_TREE);
4210 }
4211 maybe_remove_unused_call_args (cfun, stmt);
4212 }
4213 else
4214 {
4215 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4216 gimple *new_stmt = gimple_build_call (fndecl, 0);
4217 gimple_set_location (new_stmt, gimple_location (stmt));
4218 /* If the call had a SSA name as lhs morph that into
4219 an uninitialized value. */
4220 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4221 {
4222 tree var = create_tmp_var (TREE_TYPE (lhs));
4223 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4224 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4225 set_ssa_default_def (cfun, var, lhs);
4226 }
4227 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4228 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4229 gsi_replace (gsi, new_stmt, false);
4230 return true;
4231 }
4232 }
4233 }
4234 }
4235
4236 /* Check for indirect calls that became direct calls, and then
4237 no longer require a static chain. */
4238 if (gimple_call_chain (stmt))
4239 {
4240 tree fn = gimple_call_fndecl (stmt);
4241 if (fn && !DECL_STATIC_CHAIN (fn))
4242 {
4243 gimple_call_set_chain (stmt, NULL);
4244 changed = true;
4245 }
4246 else
4247 {
4248 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4249 if (tmp)
4250 {
4251 gimple_call_set_chain (stmt, tmp);
4252 changed = true;
4253 }
4254 }
4255 }
4256
4257 if (inplace)
4258 return changed;
4259
4260 /* Check for builtins that CCP can handle using information not
4261 available in the generic fold routines. */
4262 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4263 {
4264 if (gimple_fold_builtin (gsi))
4265 changed = true;
4266 }
4267 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4268 {
4269 changed |= targetm.gimple_fold_builtin (gsi);
4270 }
4271 else if (gimple_call_internal_p (stmt))
4272 {
4273 enum tree_code subcode = ERROR_MARK;
4274 tree result = NULL_TREE;
4275 bool cplx_result = false;
4276 tree overflow = NULL_TREE;
4277 switch (gimple_call_internal_fn (stmt))
4278 {
4279 case IFN_BUILTIN_EXPECT:
4280 result = fold_builtin_expect (gimple_location (stmt),
4281 gimple_call_arg (stmt, 0),
4282 gimple_call_arg (stmt, 1),
4283 gimple_call_arg (stmt, 2),
4284 NULL_TREE);
4285 break;
4286 case IFN_UBSAN_OBJECT_SIZE:
4287 {
4288 tree offset = gimple_call_arg (stmt, 1);
4289 tree objsize = gimple_call_arg (stmt, 2);
4290 if (integer_all_onesp (objsize)
4291 || (TREE_CODE (offset) == INTEGER_CST
4292 && TREE_CODE (objsize) == INTEGER_CST
4293 && tree_int_cst_le (offset, objsize)))
4294 {
4295 replace_call_with_value (gsi, NULL_TREE);
4296 return true;
4297 }
4298 }
4299 break;
4300 case IFN_UBSAN_PTR:
4301 if (integer_zerop (gimple_call_arg (stmt, 1)))
4302 {
4303 replace_call_with_value (gsi, NULL_TREE);
4304 return true;
4305 }
4306 break;
4307 case IFN_UBSAN_BOUNDS:
4308 {
4309 tree index = gimple_call_arg (stmt, 1);
4310 tree bound = gimple_call_arg (stmt, 2);
4311 if (TREE_CODE (index) == INTEGER_CST
4312 && TREE_CODE (bound) == INTEGER_CST)
4313 {
4314 index = fold_convert (TREE_TYPE (bound), index);
4315 if (TREE_CODE (index) == INTEGER_CST
4316 && tree_int_cst_le (index, bound))
4317 {
4318 replace_call_with_value (gsi, NULL_TREE);
4319 return true;
4320 }
4321 }
4322 }
4323 break;
4324 case IFN_GOACC_DIM_SIZE:
4325 case IFN_GOACC_DIM_POS:
4326 result = fold_internal_goacc_dim (stmt);
4327 break;
4328 case IFN_UBSAN_CHECK_ADD:
4329 subcode = PLUS_EXPR;
4330 break;
4331 case IFN_UBSAN_CHECK_SUB:
4332 subcode = MINUS_EXPR;
4333 break;
4334 case IFN_UBSAN_CHECK_MUL:
4335 subcode = MULT_EXPR;
4336 break;
4337 case IFN_ADD_OVERFLOW:
4338 subcode = PLUS_EXPR;
4339 cplx_result = true;
4340 break;
4341 case IFN_SUB_OVERFLOW:
4342 subcode = MINUS_EXPR;
4343 cplx_result = true;
4344 break;
4345 case IFN_MUL_OVERFLOW:
4346 subcode = MULT_EXPR;
4347 cplx_result = true;
4348 break;
4349 default:
4350 break;
4351 }
4352 if (subcode != ERROR_MARK)
4353 {
4354 tree arg0 = gimple_call_arg (stmt, 0);
4355 tree arg1 = gimple_call_arg (stmt, 1);
4356 tree type = TREE_TYPE (arg0);
4357 if (cplx_result)
4358 {
4359 tree lhs = gimple_call_lhs (stmt);
4360 if (lhs == NULL_TREE)
4361 type = NULL_TREE;
4362 else
4363 type = TREE_TYPE (TREE_TYPE (lhs));
4364 }
4365 if (type == NULL_TREE)
4366 ;
4367 /* x = y + 0; x = y - 0; x = y * 0; */
4368 else if (integer_zerop (arg1))
4369 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4370 /* x = 0 + y; x = 0 * y; */
4371 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4372 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4373 /* x = y - y; */
4374 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4375 result = integer_zero_node;
4376 /* x = y * 1; x = 1 * y; */
4377 else if (subcode == MULT_EXPR && integer_onep (arg1))
4378 result = arg0;
4379 else if (subcode == MULT_EXPR && integer_onep (arg0))
4380 result = arg1;
4381 else if (TREE_CODE (arg0) == INTEGER_CST
4382 && TREE_CODE (arg1) == INTEGER_CST)
4383 {
4384 if (cplx_result)
4385 result = int_const_binop (subcode, fold_convert (type, arg0),
4386 fold_convert (type, arg1));
4387 else
4388 result = int_const_binop (subcode, arg0, arg1);
4389 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4390 {
4391 if (cplx_result)
4392 overflow = build_one_cst (type);
4393 else
4394 result = NULL_TREE;
4395 }
4396 }
4397 if (result)
4398 {
4399 if (result == integer_zero_node)
4400 result = build_zero_cst (type);
4401 else if (cplx_result && TREE_TYPE (result) != type)
4402 {
4403 if (TREE_CODE (result) == INTEGER_CST)
4404 {
4405 if (arith_overflowed_p (PLUS_EXPR, type, result,
4406 integer_zero_node))
4407 overflow = build_one_cst (type);
4408 }
4409 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4410 && TYPE_UNSIGNED (type))
4411 || (TYPE_PRECISION (type)
4412 < (TYPE_PRECISION (TREE_TYPE (result))
4413 + (TYPE_UNSIGNED (TREE_TYPE (result))
4414 && !TYPE_UNSIGNED (type)))))
4415 result = NULL_TREE;
4416 if (result)
4417 result = fold_convert (type, result);
4418 }
4419 }
4420 }
4421
4422 if (result)
4423 {
4424 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4425 result = drop_tree_overflow (result);
4426 if (cplx_result)
4427 {
4428 if (overflow == NULL_TREE)
4429 overflow = build_zero_cst (TREE_TYPE (result));
4430 tree ctype = build_complex_type (TREE_TYPE (result));
4431 if (TREE_CODE (result) == INTEGER_CST
4432 && TREE_CODE (overflow) == INTEGER_CST)
4433 result = build_complex (ctype, result, overflow);
4434 else
4435 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4436 ctype, result, overflow);
4437 }
4438 if (!update_call_from_tree (gsi, result))
4439 gimplify_and_update_call_from_tree (gsi, result);
4440 changed = true;
4441 }
4442 }
4443
4444 return changed;
4445 }
4446
4447
4448 /* Return true whether NAME has a use on STMT. */
4449
4450 static bool
4451 has_use_on_stmt (tree name, gimple *stmt)
4452 {
4453 imm_use_iterator iter;
4454 use_operand_p use_p;
4455 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4456 if (USE_STMT (use_p) == stmt)
4457 return true;
4458 return false;
4459 }
4460
4461 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4462 gimple_simplify.
4463
4464 Replaces *GSI with the simplification result in RCODE and OPS
4465 and the associated statements in *SEQ. Does the replacement
4466 according to INPLACE and returns true if the operation succeeded. */
4467
4468 static bool
4469 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4470 gimple_match_op *res_op,
4471 gimple_seq *seq, bool inplace)
4472 {
4473 gimple *stmt = gsi_stmt (*gsi);
4474 tree *ops = res_op->ops;
4475 unsigned int num_ops = res_op->num_ops;
4476
4477 /* Play safe and do not allow abnormals to be mentioned in
4478 newly created statements. See also maybe_push_res_to_seq.
4479 As an exception allow such uses if there was a use of the
4480 same SSA name on the old stmt. */
4481 for (unsigned int i = 0; i < num_ops; ++i)
4482 if (TREE_CODE (ops[i]) == SSA_NAME
4483 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4484 && !has_use_on_stmt (ops[i], stmt))
4485 return false;
4486
4487 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4488 for (unsigned int i = 0; i < 2; ++i)
4489 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4490 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4491 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4492 return false;
4493
4494 /* Don't insert new statements when INPLACE is true, even if we could
4495 reuse STMT for the final statement. */
4496 if (inplace && !gimple_seq_empty_p (*seq))
4497 return false;
4498
4499 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4500 {
4501 gcc_assert (res_op->code.is_tree_code ());
4502 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4503 /* GIMPLE_CONDs condition may not throw. */
4504 && (!flag_exceptions
4505 || !cfun->can_throw_non_call_exceptions
4506 || !operation_could_trap_p (res_op->code,
4507 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4508 false, NULL_TREE)))
4509 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4510 else if (res_op->code == SSA_NAME)
4511 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4512 build_zero_cst (TREE_TYPE (ops[0])));
4513 else if (res_op->code == INTEGER_CST)
4514 {
4515 if (integer_zerop (ops[0]))
4516 gimple_cond_make_false (cond_stmt);
4517 else
4518 gimple_cond_make_true (cond_stmt);
4519 }
4520 else if (!inplace)
4521 {
4522 tree res = maybe_push_res_to_seq (res_op, seq);
4523 if (!res)
4524 return false;
4525 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4526 build_zero_cst (TREE_TYPE (res)));
4527 }
4528 else
4529 return false;
4530 if (dump_file && (dump_flags & TDF_DETAILS))
4531 {
4532 fprintf (dump_file, "gimple_simplified to ");
4533 if (!gimple_seq_empty_p (*seq))
4534 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4535 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4536 0, TDF_SLIM);
4537 }
4538 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4539 return true;
4540 }
4541 else if (is_gimple_assign (stmt)
4542 && res_op->code.is_tree_code ())
4543 {
4544 if (!inplace
4545 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4546 {
4547 maybe_build_generic_op (res_op);
4548 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4549 res_op->op_or_null (0),
4550 res_op->op_or_null (1),
4551 res_op->op_or_null (2));
4552 if (dump_file && (dump_flags & TDF_DETAILS))
4553 {
4554 fprintf (dump_file, "gimple_simplified to ");
4555 if (!gimple_seq_empty_p (*seq))
4556 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4557 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4558 0, TDF_SLIM);
4559 }
4560 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4561 return true;
4562 }
4563 }
4564 else if (res_op->code.is_fn_code ()
4565 && gimple_call_combined_fn (stmt) == res_op->code)
4566 {
4567 gcc_assert (num_ops == gimple_call_num_args (stmt));
4568 for (unsigned int i = 0; i < num_ops; ++i)
4569 gimple_call_set_arg (stmt, i, ops[i]);
4570 if (dump_file && (dump_flags & TDF_DETAILS))
4571 {
4572 fprintf (dump_file, "gimple_simplified to ");
4573 if (!gimple_seq_empty_p (*seq))
4574 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4575 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4576 }
4577 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4578 return true;
4579 }
4580 else if (!inplace)
4581 {
4582 if (gimple_has_lhs (stmt))
4583 {
4584 tree lhs = gimple_get_lhs (stmt);
4585 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4586 return false;
4587 if (dump_file && (dump_flags & TDF_DETAILS))
4588 {
4589 fprintf (dump_file, "gimple_simplified to ");
4590 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4591 }
4592 gsi_replace_with_seq_vops (gsi, *seq);
4593 return true;
4594 }
4595 else
4596 gcc_unreachable ();
4597 }
4598
4599 return false;
4600 }
4601
4602 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4603
4604 static bool
4605 maybe_canonicalize_mem_ref_addr (tree *t)
4606 {
4607 bool res = false;
4608
4609 if (TREE_CODE (*t) == ADDR_EXPR)
4610 t = &TREE_OPERAND (*t, 0);
4611
4612 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4613 generic vector extension. The actual vector referenced is
4614 view-converted to an array type for this purpose. If the index
4615 is constant the canonical representation in the middle-end is a
4616 BIT_FIELD_REF so re-write the former to the latter here. */
4617 if (TREE_CODE (*t) == ARRAY_REF
4618 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4619 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4620 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4621 {
4622 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4623 if (VECTOR_TYPE_P (vtype))
4624 {
4625 tree low = array_ref_low_bound (*t);
4626 if (TREE_CODE (low) == INTEGER_CST)
4627 {
4628 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4629 {
4630 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4631 wi::to_widest (low));
4632 idx = wi::mul (idx, wi::to_widest
4633 (TYPE_SIZE (TREE_TYPE (*t))));
4634 widest_int ext
4635 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4636 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4637 {
4638 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4639 TREE_TYPE (*t),
4640 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4641 TYPE_SIZE (TREE_TYPE (*t)),
4642 wide_int_to_tree (bitsizetype, idx));
4643 res = true;
4644 }
4645 }
4646 }
4647 }
4648 }
4649
4650 while (handled_component_p (*t))
4651 t = &TREE_OPERAND (*t, 0);
4652
4653 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4654 of invariant addresses into a SSA name MEM_REF address. */
4655 if (TREE_CODE (*t) == MEM_REF
4656 || TREE_CODE (*t) == TARGET_MEM_REF)
4657 {
4658 tree addr = TREE_OPERAND (*t, 0);
4659 if (TREE_CODE (addr) == ADDR_EXPR
4660 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4661 || handled_component_p (TREE_OPERAND (addr, 0))))
4662 {
4663 tree base;
4664 poly_int64 coffset;
4665 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4666 &coffset);
4667 if (!base)
4668 gcc_unreachable ();
4669
4670 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4671 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4672 TREE_OPERAND (*t, 1),
4673 size_int (coffset));
4674 res = true;
4675 }
4676 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4677 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4678 }
4679
4680 /* Canonicalize back MEM_REFs to plain reference trees if the object
4681 accessed is a decl that has the same access semantics as the MEM_REF. */
4682 if (TREE_CODE (*t) == MEM_REF
4683 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4684 && integer_zerop (TREE_OPERAND (*t, 1))
4685 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4686 {
4687 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4688 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4689 if (/* Same volatile qualification. */
4690 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4691 /* Same TBAA behavior with -fstrict-aliasing. */
4692 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4693 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4694 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4695 /* Same alignment. */
4696 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4697 /* We have to look out here to not drop a required conversion
4698 from the rhs to the lhs if *t appears on the lhs or vice-versa
4699 if it appears on the rhs. Thus require strict type
4700 compatibility. */
4701 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4702 {
4703 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4704 res = true;
4705 }
4706 }
4707
4708 /* Canonicalize TARGET_MEM_REF in particular with respect to
4709 the indexes becoming constant. */
4710 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4711 {
4712 tree tem = maybe_fold_tmr (*t);
4713 if (tem)
4714 {
4715 *t = tem;
4716 res = true;
4717 }
4718 }
4719
4720 return res;
4721 }
4722
4723 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4724 distinguishes both cases. */
4725
4726 static bool
4727 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4728 {
4729 bool changed = false;
4730 gimple *stmt = gsi_stmt (*gsi);
4731 bool nowarning = gimple_no_warning_p (stmt);
4732 unsigned i;
4733 fold_defer_overflow_warnings ();
4734
4735 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4736 after propagation.
4737 ??? This shouldn't be done in generic folding but in the
4738 propagation helpers which also know whether an address was
4739 propagated.
4740 Also canonicalize operand order. */
4741 switch (gimple_code (stmt))
4742 {
4743 case GIMPLE_ASSIGN:
4744 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4745 {
4746 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4747 if ((REFERENCE_CLASS_P (*rhs)
4748 || TREE_CODE (*rhs) == ADDR_EXPR)
4749 && maybe_canonicalize_mem_ref_addr (rhs))
4750 changed = true;
4751 tree *lhs = gimple_assign_lhs_ptr (stmt);
4752 if (REFERENCE_CLASS_P (*lhs)
4753 && maybe_canonicalize_mem_ref_addr (lhs))
4754 changed = true;
4755 }
4756 else
4757 {
4758 /* Canonicalize operand order. */
4759 enum tree_code code = gimple_assign_rhs_code (stmt);
4760 if (TREE_CODE_CLASS (code) == tcc_comparison
4761 || commutative_tree_code (code)
4762 || commutative_ternary_tree_code (code))
4763 {
4764 tree rhs1 = gimple_assign_rhs1 (stmt);
4765 tree rhs2 = gimple_assign_rhs2 (stmt);
4766 if (tree_swap_operands_p (rhs1, rhs2))
4767 {
4768 gimple_assign_set_rhs1 (stmt, rhs2);
4769 gimple_assign_set_rhs2 (stmt, rhs1);
4770 if (TREE_CODE_CLASS (code) == tcc_comparison)
4771 gimple_assign_set_rhs_code (stmt,
4772 swap_tree_comparison (code));
4773 changed = true;
4774 }
4775 }
4776 }
4777 break;
4778 case GIMPLE_CALL:
4779 {
4780 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4781 {
4782 tree *arg = gimple_call_arg_ptr (stmt, i);
4783 if (REFERENCE_CLASS_P (*arg)
4784 && maybe_canonicalize_mem_ref_addr (arg))
4785 changed = true;
4786 }
4787 tree *lhs = gimple_call_lhs_ptr (stmt);
4788 if (*lhs
4789 && REFERENCE_CLASS_P (*lhs)
4790 && maybe_canonicalize_mem_ref_addr (lhs))
4791 changed = true;
4792 break;
4793 }
4794 case GIMPLE_ASM:
4795 {
4796 gasm *asm_stmt = as_a <gasm *> (stmt);
4797 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4798 {
4799 tree link = gimple_asm_output_op (asm_stmt, i);
4800 tree op = TREE_VALUE (link);
4801 if (REFERENCE_CLASS_P (op)
4802 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4803 changed = true;
4804 }
4805 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4806 {
4807 tree link = gimple_asm_input_op (asm_stmt, i);
4808 tree op = TREE_VALUE (link);
4809 if ((REFERENCE_CLASS_P (op)
4810 || TREE_CODE (op) == ADDR_EXPR)
4811 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4812 changed = true;
4813 }
4814 }
4815 break;
4816 case GIMPLE_DEBUG:
4817 if (gimple_debug_bind_p (stmt))
4818 {
4819 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4820 if (*val
4821 && (REFERENCE_CLASS_P (*val)
4822 || TREE_CODE (*val) == ADDR_EXPR)
4823 && maybe_canonicalize_mem_ref_addr (val))
4824 changed = true;
4825 }
4826 break;
4827 case GIMPLE_COND:
4828 {
4829 /* Canonicalize operand order. */
4830 tree lhs = gimple_cond_lhs (stmt);
4831 tree rhs = gimple_cond_rhs (stmt);
4832 if (tree_swap_operands_p (lhs, rhs))
4833 {
4834 gcond *gc = as_a <gcond *> (stmt);
4835 gimple_cond_set_lhs (gc, rhs);
4836 gimple_cond_set_rhs (gc, lhs);
4837 gimple_cond_set_code (gc,
4838 swap_tree_comparison (gimple_cond_code (gc)));
4839 changed = true;
4840 }
4841 }
4842 default:;
4843 }
4844
4845 /* Dispatch to pattern-based folding. */
4846 if (!inplace
4847 || is_gimple_assign (stmt)
4848 || gimple_code (stmt) == GIMPLE_COND)
4849 {
4850 gimple_seq seq = NULL;
4851 gimple_match_op res_op;
4852 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4853 valueize, valueize))
4854 {
4855 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4856 changed = true;
4857 else
4858 gimple_seq_discard (seq);
4859 }
4860 }
4861
4862 stmt = gsi_stmt (*gsi);
4863
4864 /* Fold the main computation performed by the statement. */
4865 switch (gimple_code (stmt))
4866 {
4867 case GIMPLE_ASSIGN:
4868 {
4869 /* Try to canonicalize for boolean-typed X the comparisons
4870 X == 0, X == 1, X != 0, and X != 1. */
4871 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4872 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4873 {
4874 tree lhs = gimple_assign_lhs (stmt);
4875 tree op1 = gimple_assign_rhs1 (stmt);
4876 tree op2 = gimple_assign_rhs2 (stmt);
4877 tree type = TREE_TYPE (op1);
4878
4879 /* Check whether the comparison operands are of the same boolean
4880 type as the result type is.
4881 Check that second operand is an integer-constant with value
4882 one or zero. */
4883 if (TREE_CODE (op2) == INTEGER_CST
4884 && (integer_zerop (op2) || integer_onep (op2))
4885 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4886 {
4887 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4888 bool is_logical_not = false;
4889
4890 /* X == 0 and X != 1 is a logical-not.of X
4891 X == 1 and X != 0 is X */
4892 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4893 || (cmp_code == NE_EXPR && integer_onep (op2)))
4894 is_logical_not = true;
4895
4896 if (is_logical_not == false)
4897 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4898 /* Only for one-bit precision typed X the transformation
4899 !X -> ~X is valied. */
4900 else if (TYPE_PRECISION (type) == 1)
4901 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4902 /* Otherwise we use !X -> X ^ 1. */
4903 else
4904 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4905 build_int_cst (type, 1));
4906 changed = true;
4907 break;
4908 }
4909 }
4910
4911 unsigned old_num_ops = gimple_num_ops (stmt);
4912 tree lhs = gimple_assign_lhs (stmt);
4913 tree new_rhs = fold_gimple_assign (gsi);
4914 if (new_rhs
4915 && !useless_type_conversion_p (TREE_TYPE (lhs),
4916 TREE_TYPE (new_rhs)))
4917 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4918 if (new_rhs
4919 && (!inplace
4920 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4921 {
4922 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4923 changed = true;
4924 }
4925 break;
4926 }
4927
4928 case GIMPLE_CALL:
4929 changed |= gimple_fold_call (gsi, inplace);
4930 break;
4931
4932 case GIMPLE_ASM:
4933 /* Fold *& in asm operands. */
4934 {
4935 gasm *asm_stmt = as_a <gasm *> (stmt);
4936 size_t noutputs;
4937 const char **oconstraints;
4938 const char *constraint;
4939 bool allows_mem, allows_reg;
4940
4941 noutputs = gimple_asm_noutputs (asm_stmt);
4942 oconstraints = XALLOCAVEC (const char *, noutputs);
4943
4944 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4945 {
4946 tree link = gimple_asm_output_op (asm_stmt, i);
4947 tree op = TREE_VALUE (link);
4948 oconstraints[i]
4949 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4950 if (REFERENCE_CLASS_P (op)
4951 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4952 {
4953 TREE_VALUE (link) = op;
4954 changed = true;
4955 }
4956 }
4957 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4958 {
4959 tree link = gimple_asm_input_op (asm_stmt, i);
4960 tree op = TREE_VALUE (link);
4961 constraint
4962 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4963 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4964 oconstraints, &allows_mem, &allows_reg);
4965 if (REFERENCE_CLASS_P (op)
4966 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4967 != NULL_TREE)
4968 {
4969 TREE_VALUE (link) = op;
4970 changed = true;
4971 }
4972 }
4973 }
4974 break;
4975
4976 case GIMPLE_DEBUG:
4977 if (gimple_debug_bind_p (stmt))
4978 {
4979 tree val = gimple_debug_bind_get_value (stmt);
4980 if (val
4981 && REFERENCE_CLASS_P (val))
4982 {
4983 tree tem = maybe_fold_reference (val, false);
4984 if (tem)
4985 {
4986 gimple_debug_bind_set_value (stmt, tem);
4987 changed = true;
4988 }
4989 }
4990 else if (val
4991 && TREE_CODE (val) == ADDR_EXPR)
4992 {
4993 tree ref = TREE_OPERAND (val, 0);
4994 tree tem = maybe_fold_reference (ref, false);
4995 if (tem)
4996 {
4997 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4998 gimple_debug_bind_set_value (stmt, tem);
4999 changed = true;
5000 }
5001 }
5002 }
5003 break;
5004
5005 case GIMPLE_RETURN:
5006 {
5007 greturn *ret_stmt = as_a<greturn *> (stmt);
5008 tree ret = gimple_return_retval(ret_stmt);
5009
5010 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5011 {
5012 tree val = valueize (ret);
5013 if (val && val != ret
5014 && may_propagate_copy (ret, val))
5015 {
5016 gimple_return_set_retval (ret_stmt, val);
5017 changed = true;
5018 }
5019 }
5020 }
5021 break;
5022
5023 default:;
5024 }
5025
5026 stmt = gsi_stmt (*gsi);
5027
5028 /* Fold *& on the lhs. */
5029 if (gimple_has_lhs (stmt))
5030 {
5031 tree lhs = gimple_get_lhs (stmt);
5032 if (lhs && REFERENCE_CLASS_P (lhs))
5033 {
5034 tree new_lhs = maybe_fold_reference (lhs, true);
5035 if (new_lhs)
5036 {
5037 gimple_set_lhs (stmt, new_lhs);
5038 changed = true;
5039 }
5040 }
5041 }
5042
5043 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5044 return changed;
5045 }
5046
5047 /* Valueziation callback that ends up not following SSA edges. */
5048
5049 tree
5050 no_follow_ssa_edges (tree)
5051 {
5052 return NULL_TREE;
5053 }
5054
5055 /* Valueization callback that ends up following single-use SSA edges only. */
5056
5057 tree
5058 follow_single_use_edges (tree val)
5059 {
5060 if (TREE_CODE (val) == SSA_NAME
5061 && !has_single_use (val))
5062 return NULL_TREE;
5063 return val;
5064 }
5065
5066 /* Valueization callback that follows all SSA edges. */
5067
5068 tree
5069 follow_all_ssa_edges (tree val)
5070 {
5071 return val;
5072 }
5073
5074 /* Fold the statement pointed to by GSI. In some cases, this function may
5075 replace the whole statement with a new one. Returns true iff folding
5076 makes any changes.
5077 The statement pointed to by GSI should be in valid gimple form but may
5078 be in unfolded state as resulting from for example constant propagation
5079 which can produce *&x = 0. */
5080
5081 bool
5082 fold_stmt (gimple_stmt_iterator *gsi)
5083 {
5084 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5085 }
5086
5087 bool
5088 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5089 {
5090 return fold_stmt_1 (gsi, false, valueize);
5091 }
5092
5093 /* Perform the minimal folding on statement *GSI. Only operations like
5094 *&x created by constant propagation are handled. The statement cannot
5095 be replaced with a new one. Return true if the statement was
5096 changed, false otherwise.
5097 The statement *GSI should be in valid gimple form but may
5098 be in unfolded state as resulting from for example constant propagation
5099 which can produce *&x = 0. */
5100
5101 bool
5102 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5103 {
5104 gimple *stmt = gsi_stmt (*gsi);
5105 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5106 gcc_assert (gsi_stmt (*gsi) == stmt);
5107 return changed;
5108 }
5109
5110 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5111 if EXPR is null or we don't know how.
5112 If non-null, the result always has boolean type. */
5113
5114 static tree
5115 canonicalize_bool (tree expr, bool invert)
5116 {
5117 if (!expr)
5118 return NULL_TREE;
5119 else if (invert)
5120 {
5121 if (integer_nonzerop (expr))
5122 return boolean_false_node;
5123 else if (integer_zerop (expr))
5124 return boolean_true_node;
5125 else if (TREE_CODE (expr) == SSA_NAME)
5126 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5127 build_int_cst (TREE_TYPE (expr), 0));
5128 else if (COMPARISON_CLASS_P (expr))
5129 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5130 boolean_type_node,
5131 TREE_OPERAND (expr, 0),
5132 TREE_OPERAND (expr, 1));
5133 else
5134 return NULL_TREE;
5135 }
5136 else
5137 {
5138 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5139 return expr;
5140 if (integer_nonzerop (expr))
5141 return boolean_true_node;
5142 else if (integer_zerop (expr))
5143 return boolean_false_node;
5144 else if (TREE_CODE (expr) == SSA_NAME)
5145 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5146 build_int_cst (TREE_TYPE (expr), 0));
5147 else if (COMPARISON_CLASS_P (expr))
5148 return fold_build2 (TREE_CODE (expr),
5149 boolean_type_node,
5150 TREE_OPERAND (expr, 0),
5151 TREE_OPERAND (expr, 1));
5152 else
5153 return NULL_TREE;
5154 }
5155 }
5156
5157 /* Check to see if a boolean expression EXPR is logically equivalent to the
5158 comparison (OP1 CODE OP2). Check for various identities involving
5159 SSA_NAMEs. */
5160
5161 static bool
5162 same_bool_comparison_p (const_tree expr, enum tree_code code,
5163 const_tree op1, const_tree op2)
5164 {
5165 gimple *s;
5166
5167 /* The obvious case. */
5168 if (TREE_CODE (expr) == code
5169 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5170 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5171 return true;
5172
5173 /* Check for comparing (name, name != 0) and the case where expr
5174 is an SSA_NAME with a definition matching the comparison. */
5175 if (TREE_CODE (expr) == SSA_NAME
5176 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5177 {
5178 if (operand_equal_p (expr, op1, 0))
5179 return ((code == NE_EXPR && integer_zerop (op2))
5180 || (code == EQ_EXPR && integer_nonzerop (op2)));
5181 s = SSA_NAME_DEF_STMT (expr);
5182 if (is_gimple_assign (s)
5183 && gimple_assign_rhs_code (s) == code
5184 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5185 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5186 return true;
5187 }
5188
5189 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5190 of name is a comparison, recurse. */
5191 if (TREE_CODE (op1) == SSA_NAME
5192 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5193 {
5194 s = SSA_NAME_DEF_STMT (op1);
5195 if (is_gimple_assign (s)
5196 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5197 {
5198 enum tree_code c = gimple_assign_rhs_code (s);
5199 if ((c == NE_EXPR && integer_zerop (op2))
5200 || (c == EQ_EXPR && integer_nonzerop (op2)))
5201 return same_bool_comparison_p (expr, c,
5202 gimple_assign_rhs1 (s),
5203 gimple_assign_rhs2 (s));
5204 if ((c == EQ_EXPR && integer_zerop (op2))
5205 || (c == NE_EXPR && integer_nonzerop (op2)))
5206 return same_bool_comparison_p (expr,
5207 invert_tree_comparison (c, false),
5208 gimple_assign_rhs1 (s),
5209 gimple_assign_rhs2 (s));
5210 }
5211 }
5212 return false;
5213 }
5214
5215 /* Check to see if two boolean expressions OP1 and OP2 are logically
5216 equivalent. */
5217
5218 static bool
5219 same_bool_result_p (const_tree op1, const_tree op2)
5220 {
5221 /* Simple cases first. */
5222 if (operand_equal_p (op1, op2, 0))
5223 return true;
5224
5225 /* Check the cases where at least one of the operands is a comparison.
5226 These are a bit smarter than operand_equal_p in that they apply some
5227 identifies on SSA_NAMEs. */
5228 if (COMPARISON_CLASS_P (op2)
5229 && same_bool_comparison_p (op1, TREE_CODE (op2),
5230 TREE_OPERAND (op2, 0),
5231 TREE_OPERAND (op2, 1)))
5232 return true;
5233 if (COMPARISON_CLASS_P (op1)
5234 && same_bool_comparison_p (op2, TREE_CODE (op1),
5235 TREE_OPERAND (op1, 0),
5236 TREE_OPERAND (op1, 1)))
5237 return true;
5238
5239 /* Default case. */
5240 return false;
5241 }
5242
5243 /* Forward declarations for some mutually recursive functions. */
5244
5245 static tree
5246 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5247 enum tree_code code2, tree op2a, tree op2b);
5248 static tree
5249 and_var_with_comparison (tree var, bool invert,
5250 enum tree_code code2, tree op2a, tree op2b);
5251 static tree
5252 and_var_with_comparison_1 (gimple *stmt,
5253 enum tree_code code2, tree op2a, tree op2b);
5254 static tree
5255 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5256 enum tree_code code2, tree op2a, tree op2b);
5257 static tree
5258 or_var_with_comparison (tree var, bool invert,
5259 enum tree_code code2, tree op2a, tree op2b);
5260 static tree
5261 or_var_with_comparison_1 (gimple *stmt,
5262 enum tree_code code2, tree op2a, tree op2b);
5263
5264 /* Helper function for and_comparisons_1: try to simplify the AND of the
5265 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5266 If INVERT is true, invert the value of the VAR before doing the AND.
5267 Return NULL_EXPR if we can't simplify this to a single expression. */
5268
5269 static tree
5270 and_var_with_comparison (tree var, bool invert,
5271 enum tree_code code2, tree op2a, tree op2b)
5272 {
5273 tree t;
5274 gimple *stmt = SSA_NAME_DEF_STMT (var);
5275
5276 /* We can only deal with variables whose definitions are assignments. */
5277 if (!is_gimple_assign (stmt))
5278 return NULL_TREE;
5279
5280 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5281 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5282 Then we only have to consider the simpler non-inverted cases. */
5283 if (invert)
5284 t = or_var_with_comparison_1 (stmt,
5285 invert_tree_comparison (code2, false),
5286 op2a, op2b);
5287 else
5288 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5289 return canonicalize_bool (t, invert);
5290 }
5291
5292 /* Try to simplify the AND of the ssa variable defined by the assignment
5293 STMT with the comparison specified by (OP2A CODE2 OP2B).
5294 Return NULL_EXPR if we can't simplify this to a single expression. */
5295
5296 static tree
5297 and_var_with_comparison_1 (gimple *stmt,
5298 enum tree_code code2, tree op2a, tree op2b)
5299 {
5300 tree var = gimple_assign_lhs (stmt);
5301 tree true_test_var = NULL_TREE;
5302 tree false_test_var = NULL_TREE;
5303 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5304
5305 /* Check for identities like (var AND (var == 0)) => false. */
5306 if (TREE_CODE (op2a) == SSA_NAME
5307 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5308 {
5309 if ((code2 == NE_EXPR && integer_zerop (op2b))
5310 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5311 {
5312 true_test_var = op2a;
5313 if (var == true_test_var)
5314 return var;
5315 }
5316 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5317 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5318 {
5319 false_test_var = op2a;
5320 if (var == false_test_var)
5321 return boolean_false_node;
5322 }
5323 }
5324
5325 /* If the definition is a comparison, recurse on it. */
5326 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5327 {
5328 tree t = and_comparisons_1 (innercode,
5329 gimple_assign_rhs1 (stmt),
5330 gimple_assign_rhs2 (stmt),
5331 code2,
5332 op2a,
5333 op2b);
5334 if (t)
5335 return t;
5336 }
5337
5338 /* If the definition is an AND or OR expression, we may be able to
5339 simplify by reassociating. */
5340 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5341 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5342 {
5343 tree inner1 = gimple_assign_rhs1 (stmt);
5344 tree inner2 = gimple_assign_rhs2 (stmt);
5345 gimple *s;
5346 tree t;
5347 tree partial = NULL_TREE;
5348 bool is_and = (innercode == BIT_AND_EXPR);
5349
5350 /* Check for boolean identities that don't require recursive examination
5351 of inner1/inner2:
5352 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5353 inner1 AND (inner1 OR inner2) => inner1
5354 !inner1 AND (inner1 AND inner2) => false
5355 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5356 Likewise for similar cases involving inner2. */
5357 if (inner1 == true_test_var)
5358 return (is_and ? var : inner1);
5359 else if (inner2 == true_test_var)
5360 return (is_and ? var : inner2);
5361 else if (inner1 == false_test_var)
5362 return (is_and
5363 ? boolean_false_node
5364 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5365 else if (inner2 == false_test_var)
5366 return (is_and
5367 ? boolean_false_node
5368 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5369
5370 /* Next, redistribute/reassociate the AND across the inner tests.
5371 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5372 if (TREE_CODE (inner1) == SSA_NAME
5373 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5374 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5375 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5376 gimple_assign_rhs1 (s),
5377 gimple_assign_rhs2 (s),
5378 code2, op2a, op2b)))
5379 {
5380 /* Handle the AND case, where we are reassociating:
5381 (inner1 AND inner2) AND (op2a code2 op2b)
5382 => (t AND inner2)
5383 If the partial result t is a constant, we win. Otherwise
5384 continue on to try reassociating with the other inner test. */
5385 if (is_and)
5386 {
5387 if (integer_onep (t))
5388 return inner2;
5389 else if (integer_zerop (t))
5390 return boolean_false_node;
5391 }
5392
5393 /* Handle the OR case, where we are redistributing:
5394 (inner1 OR inner2) AND (op2a code2 op2b)
5395 => (t OR (inner2 AND (op2a code2 op2b))) */
5396 else if (integer_onep (t))
5397 return boolean_true_node;
5398
5399 /* Save partial result for later. */
5400 partial = t;
5401 }
5402
5403 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5404 if (TREE_CODE (inner2) == SSA_NAME
5405 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5406 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5407 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5408 gimple_assign_rhs1 (s),
5409 gimple_assign_rhs2 (s),
5410 code2, op2a, op2b)))
5411 {
5412 /* Handle the AND case, where we are reassociating:
5413 (inner1 AND inner2) AND (op2a code2 op2b)
5414 => (inner1 AND t) */
5415 if (is_and)
5416 {
5417 if (integer_onep (t))
5418 return inner1;
5419 else if (integer_zerop (t))
5420 return boolean_false_node;
5421 /* If both are the same, we can apply the identity
5422 (x AND x) == x. */
5423 else if (partial && same_bool_result_p (t, partial))
5424 return t;
5425 }
5426
5427 /* Handle the OR case. where we are redistributing:
5428 (inner1 OR inner2) AND (op2a code2 op2b)
5429 => (t OR (inner1 AND (op2a code2 op2b)))
5430 => (t OR partial) */
5431 else
5432 {
5433 if (integer_onep (t))
5434 return boolean_true_node;
5435 else if (partial)
5436 {
5437 /* We already got a simplification for the other
5438 operand to the redistributed OR expression. The
5439 interesting case is when at least one is false.
5440 Or, if both are the same, we can apply the identity
5441 (x OR x) == x. */
5442 if (integer_zerop (partial))
5443 return t;
5444 else if (integer_zerop (t))
5445 return partial;
5446 else if (same_bool_result_p (t, partial))
5447 return t;
5448 }
5449 }
5450 }
5451 }
5452 return NULL_TREE;
5453 }
5454
5455 /* Try to simplify the AND of two comparisons defined by
5456 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5457 If this can be done without constructing an intermediate value,
5458 return the resulting tree; otherwise NULL_TREE is returned.
5459 This function is deliberately asymmetric as it recurses on SSA_DEFs
5460 in the first comparison but not the second. */
5461
5462 static tree
5463 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5464 enum tree_code code2, tree op2a, tree op2b)
5465 {
5466 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5467
5468 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5469 if (operand_equal_p (op1a, op2a, 0)
5470 && operand_equal_p (op1b, op2b, 0))
5471 {
5472 /* Result will be either NULL_TREE, or a combined comparison. */
5473 tree t = combine_comparisons (UNKNOWN_LOCATION,
5474 TRUTH_ANDIF_EXPR, code1, code2,
5475 truth_type, op1a, op1b);
5476 if (t)
5477 return t;
5478 }
5479
5480 /* Likewise the swapped case of the above. */
5481 if (operand_equal_p (op1a, op2b, 0)
5482 && operand_equal_p (op1b, op2a, 0))
5483 {
5484 /* Result will be either NULL_TREE, or a combined comparison. */
5485 tree t = combine_comparisons (UNKNOWN_LOCATION,
5486 TRUTH_ANDIF_EXPR, code1,
5487 swap_tree_comparison (code2),
5488 truth_type, op1a, op1b);
5489 if (t)
5490 return t;
5491 }
5492
5493 /* If both comparisons are of the same value against constants, we might
5494 be able to merge them. */
5495 if (operand_equal_p (op1a, op2a, 0)
5496 && TREE_CODE (op1b) == INTEGER_CST
5497 && TREE_CODE (op2b) == INTEGER_CST)
5498 {
5499 int cmp = tree_int_cst_compare (op1b, op2b);
5500
5501 /* If we have (op1a == op1b), we should either be able to
5502 return that or FALSE, depending on whether the constant op1b
5503 also satisfies the other comparison against op2b. */
5504 if (code1 == EQ_EXPR)
5505 {
5506 bool done = true;
5507 bool val;
5508 switch (code2)
5509 {
5510 case EQ_EXPR: val = (cmp == 0); break;
5511 case NE_EXPR: val = (cmp != 0); break;
5512 case LT_EXPR: val = (cmp < 0); break;
5513 case GT_EXPR: val = (cmp > 0); break;
5514 case LE_EXPR: val = (cmp <= 0); break;
5515 case GE_EXPR: val = (cmp >= 0); break;
5516 default: done = false;
5517 }
5518 if (done)
5519 {
5520 if (val)
5521 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5522 else
5523 return boolean_false_node;
5524 }
5525 }
5526 /* Likewise if the second comparison is an == comparison. */
5527 else if (code2 == EQ_EXPR)
5528 {
5529 bool done = true;
5530 bool val;
5531 switch (code1)
5532 {
5533 case EQ_EXPR: val = (cmp == 0); break;
5534 case NE_EXPR: val = (cmp != 0); break;
5535 case LT_EXPR: val = (cmp > 0); break;
5536 case GT_EXPR: val = (cmp < 0); break;
5537 case LE_EXPR: val = (cmp >= 0); break;
5538 case GE_EXPR: val = (cmp <= 0); break;
5539 default: done = false;
5540 }
5541 if (done)
5542 {
5543 if (val)
5544 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5545 else
5546 return boolean_false_node;
5547 }
5548 }
5549
5550 /* Same business with inequality tests. */
5551 else if (code1 == NE_EXPR)
5552 {
5553 bool val;
5554 switch (code2)
5555 {
5556 case EQ_EXPR: val = (cmp != 0); break;
5557 case NE_EXPR: val = (cmp == 0); break;
5558 case LT_EXPR: val = (cmp >= 0); break;
5559 case GT_EXPR: val = (cmp <= 0); break;
5560 case LE_EXPR: val = (cmp > 0); break;
5561 case GE_EXPR: val = (cmp < 0); break;
5562 default:
5563 val = false;
5564 }
5565 if (val)
5566 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5567 }
5568 else if (code2 == NE_EXPR)
5569 {
5570 bool val;
5571 switch (code1)
5572 {
5573 case EQ_EXPR: val = (cmp == 0); break;
5574 case NE_EXPR: val = (cmp != 0); break;
5575 case LT_EXPR: val = (cmp <= 0); break;
5576 case GT_EXPR: val = (cmp >= 0); break;
5577 case LE_EXPR: val = (cmp < 0); break;
5578 case GE_EXPR: val = (cmp > 0); break;
5579 default:
5580 val = false;
5581 }
5582 if (val)
5583 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5584 }
5585
5586 /* Chose the more restrictive of two < or <= comparisons. */
5587 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5588 && (code2 == LT_EXPR || code2 == LE_EXPR))
5589 {
5590 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5591 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5592 else
5593 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5594 }
5595
5596 /* Likewise chose the more restrictive of two > or >= comparisons. */
5597 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5598 && (code2 == GT_EXPR || code2 == GE_EXPR))
5599 {
5600 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5601 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5602 else
5603 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5604 }
5605
5606 /* Check for singleton ranges. */
5607 else if (cmp == 0
5608 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5609 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5610 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5611
5612 /* Check for disjoint ranges. */
5613 else if (cmp <= 0
5614 && (code1 == LT_EXPR || code1 == LE_EXPR)
5615 && (code2 == GT_EXPR || code2 == GE_EXPR))
5616 return boolean_false_node;
5617 else if (cmp >= 0
5618 && (code1 == GT_EXPR || code1 == GE_EXPR)
5619 && (code2 == LT_EXPR || code2 == LE_EXPR))
5620 return boolean_false_node;
5621 }
5622
5623 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5624 NAME's definition is a truth value. See if there are any simplifications
5625 that can be done against the NAME's definition. */
5626 if (TREE_CODE (op1a) == SSA_NAME
5627 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5628 && (integer_zerop (op1b) || integer_onep (op1b)))
5629 {
5630 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5631 || (code1 == NE_EXPR && integer_onep (op1b)));
5632 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5633 switch (gimple_code (stmt))
5634 {
5635 case GIMPLE_ASSIGN:
5636 /* Try to simplify by copy-propagating the definition. */
5637 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5638
5639 case GIMPLE_PHI:
5640 /* If every argument to the PHI produces the same result when
5641 ANDed with the second comparison, we win.
5642 Do not do this unless the type is bool since we need a bool
5643 result here anyway. */
5644 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5645 {
5646 tree result = NULL_TREE;
5647 unsigned i;
5648 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5649 {
5650 tree arg = gimple_phi_arg_def (stmt, i);
5651
5652 /* If this PHI has itself as an argument, ignore it.
5653 If all the other args produce the same result,
5654 we're still OK. */
5655 if (arg == gimple_phi_result (stmt))
5656 continue;
5657 else if (TREE_CODE (arg) == INTEGER_CST)
5658 {
5659 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5660 {
5661 if (!result)
5662 result = boolean_false_node;
5663 else if (!integer_zerop (result))
5664 return NULL_TREE;
5665 }
5666 else if (!result)
5667 result = fold_build2 (code2, boolean_type_node,
5668 op2a, op2b);
5669 else if (!same_bool_comparison_p (result,
5670 code2, op2a, op2b))
5671 return NULL_TREE;
5672 }
5673 else if (TREE_CODE (arg) == SSA_NAME
5674 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5675 {
5676 tree temp;
5677 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5678 /* In simple cases we can look through PHI nodes,
5679 but we have to be careful with loops.
5680 See PR49073. */
5681 if (! dom_info_available_p (CDI_DOMINATORS)
5682 || gimple_bb (def_stmt) == gimple_bb (stmt)
5683 || dominated_by_p (CDI_DOMINATORS,
5684 gimple_bb (def_stmt),
5685 gimple_bb (stmt)))
5686 return NULL_TREE;
5687 temp = and_var_with_comparison (arg, invert, code2,
5688 op2a, op2b);
5689 if (!temp)
5690 return NULL_TREE;
5691 else if (!result)
5692 result = temp;
5693 else if (!same_bool_result_p (result, temp))
5694 return NULL_TREE;
5695 }
5696 else
5697 return NULL_TREE;
5698 }
5699 return result;
5700 }
5701
5702 default:
5703 break;
5704 }
5705 }
5706 return NULL_TREE;
5707 }
5708
5709 /* Try to simplify the AND of two comparisons, specified by
5710 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5711 If this can be simplified to a single expression (without requiring
5712 introducing more SSA variables to hold intermediate values),
5713 return the resulting tree. Otherwise return NULL_TREE.
5714 If the result expression is non-null, it has boolean type. */
5715
5716 tree
5717 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5718 enum tree_code code2, tree op2a, tree op2b)
5719 {
5720 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5721 if (t)
5722 return t;
5723 else
5724 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5725 }
5726
5727 /* Helper function for or_comparisons_1: try to simplify the OR of the
5728 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5729 If INVERT is true, invert the value of VAR before doing the OR.
5730 Return NULL_EXPR if we can't simplify this to a single expression. */
5731
5732 static tree
5733 or_var_with_comparison (tree var, bool invert,
5734 enum tree_code code2, tree op2a, tree op2b)
5735 {
5736 tree t;
5737 gimple *stmt = SSA_NAME_DEF_STMT (var);
5738
5739 /* We can only deal with variables whose definitions are assignments. */
5740 if (!is_gimple_assign (stmt))
5741 return NULL_TREE;
5742
5743 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5744 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5745 Then we only have to consider the simpler non-inverted cases. */
5746 if (invert)
5747 t = and_var_with_comparison_1 (stmt,
5748 invert_tree_comparison (code2, false),
5749 op2a, op2b);
5750 else
5751 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5752 return canonicalize_bool (t, invert);
5753 }
5754
5755 /* Try to simplify the OR of the ssa variable defined by the assignment
5756 STMT with the comparison specified by (OP2A CODE2 OP2B).
5757 Return NULL_EXPR if we can't simplify this to a single expression. */
5758
5759 static tree
5760 or_var_with_comparison_1 (gimple *stmt,
5761 enum tree_code code2, tree op2a, tree op2b)
5762 {
5763 tree var = gimple_assign_lhs (stmt);
5764 tree true_test_var = NULL_TREE;
5765 tree false_test_var = NULL_TREE;
5766 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5767
5768 /* Check for identities like (var OR (var != 0)) => true . */
5769 if (TREE_CODE (op2a) == SSA_NAME
5770 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5771 {
5772 if ((code2 == NE_EXPR && integer_zerop (op2b))
5773 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5774 {
5775 true_test_var = op2a;
5776 if (var == true_test_var)
5777 return var;
5778 }
5779 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5780 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5781 {
5782 false_test_var = op2a;
5783 if (var == false_test_var)
5784 return boolean_true_node;
5785 }
5786 }
5787
5788 /* If the definition is a comparison, recurse on it. */
5789 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5790 {
5791 tree t = or_comparisons_1 (innercode,
5792 gimple_assign_rhs1 (stmt),
5793 gimple_assign_rhs2 (stmt),
5794 code2,
5795 op2a,
5796 op2b);
5797 if (t)
5798 return t;
5799 }
5800
5801 /* If the definition is an AND or OR expression, we may be able to
5802 simplify by reassociating. */
5803 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5804 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5805 {
5806 tree inner1 = gimple_assign_rhs1 (stmt);
5807 tree inner2 = gimple_assign_rhs2 (stmt);
5808 gimple *s;
5809 tree t;
5810 tree partial = NULL_TREE;
5811 bool is_or = (innercode == BIT_IOR_EXPR);
5812
5813 /* Check for boolean identities that don't require recursive examination
5814 of inner1/inner2:
5815 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5816 inner1 OR (inner1 AND inner2) => inner1
5817 !inner1 OR (inner1 OR inner2) => true
5818 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5819 */
5820 if (inner1 == true_test_var)
5821 return (is_or ? var : inner1);
5822 else if (inner2 == true_test_var)
5823 return (is_or ? var : inner2);
5824 else if (inner1 == false_test_var)
5825 return (is_or
5826 ? boolean_true_node
5827 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5828 else if (inner2 == false_test_var)
5829 return (is_or
5830 ? boolean_true_node
5831 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5832
5833 /* Next, redistribute/reassociate the OR across the inner tests.
5834 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5835 if (TREE_CODE (inner1) == SSA_NAME
5836 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5837 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5838 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5839 gimple_assign_rhs1 (s),
5840 gimple_assign_rhs2 (s),
5841 code2, op2a, op2b)))
5842 {
5843 /* Handle the OR case, where we are reassociating:
5844 (inner1 OR inner2) OR (op2a code2 op2b)
5845 => (t OR inner2)
5846 If the partial result t is a constant, we win. Otherwise
5847 continue on to try reassociating with the other inner test. */
5848 if (is_or)
5849 {
5850 if (integer_onep (t))
5851 return boolean_true_node;
5852 else if (integer_zerop (t))
5853 return inner2;
5854 }
5855
5856 /* Handle the AND case, where we are redistributing:
5857 (inner1 AND inner2) OR (op2a code2 op2b)
5858 => (t AND (inner2 OR (op2a code op2b))) */
5859 else if (integer_zerop (t))
5860 return boolean_false_node;
5861
5862 /* Save partial result for later. */
5863 partial = t;
5864 }
5865
5866 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5867 if (TREE_CODE (inner2) == SSA_NAME
5868 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5869 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5870 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5871 gimple_assign_rhs1 (s),
5872 gimple_assign_rhs2 (s),
5873 code2, op2a, op2b)))
5874 {
5875 /* Handle the OR case, where we are reassociating:
5876 (inner1 OR inner2) OR (op2a code2 op2b)
5877 => (inner1 OR t)
5878 => (t OR partial) */
5879 if (is_or)
5880 {
5881 if (integer_zerop (t))
5882 return inner1;
5883 else if (integer_onep (t))
5884 return boolean_true_node;
5885 /* If both are the same, we can apply the identity
5886 (x OR x) == x. */
5887 else if (partial && same_bool_result_p (t, partial))
5888 return t;
5889 }
5890
5891 /* Handle the AND case, where we are redistributing:
5892 (inner1 AND inner2) OR (op2a code2 op2b)
5893 => (t AND (inner1 OR (op2a code2 op2b)))
5894 => (t AND partial) */
5895 else
5896 {
5897 if (integer_zerop (t))
5898 return boolean_false_node;
5899 else if (partial)
5900 {
5901 /* We already got a simplification for the other
5902 operand to the redistributed AND expression. The
5903 interesting case is when at least one is true.
5904 Or, if both are the same, we can apply the identity
5905 (x AND x) == x. */
5906 if (integer_onep (partial))
5907 return t;
5908 else if (integer_onep (t))
5909 return partial;
5910 else if (same_bool_result_p (t, partial))
5911 return t;
5912 }
5913 }
5914 }
5915 }
5916 return NULL_TREE;
5917 }
5918
5919 /* Try to simplify the OR of two comparisons defined by
5920 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5921 If this can be done without constructing an intermediate value,
5922 return the resulting tree; otherwise NULL_TREE is returned.
5923 This function is deliberately asymmetric as it recurses on SSA_DEFs
5924 in the first comparison but not the second. */
5925
5926 static tree
5927 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5928 enum tree_code code2, tree op2a, tree op2b)
5929 {
5930 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5931
5932 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5933 if (operand_equal_p (op1a, op2a, 0)
5934 && operand_equal_p (op1b, op2b, 0))
5935 {
5936 /* Result will be either NULL_TREE, or a combined comparison. */
5937 tree t = combine_comparisons (UNKNOWN_LOCATION,
5938 TRUTH_ORIF_EXPR, code1, code2,
5939 truth_type, op1a, op1b);
5940 if (t)
5941 return t;
5942 }
5943
5944 /* Likewise the swapped case of the above. */
5945 if (operand_equal_p (op1a, op2b, 0)
5946 && operand_equal_p (op1b, op2a, 0))
5947 {
5948 /* Result will be either NULL_TREE, or a combined comparison. */
5949 tree t = combine_comparisons (UNKNOWN_LOCATION,
5950 TRUTH_ORIF_EXPR, code1,
5951 swap_tree_comparison (code2),
5952 truth_type, op1a, op1b);
5953 if (t)
5954 return t;
5955 }
5956
5957 /* If both comparisons are of the same value against constants, we might
5958 be able to merge them. */
5959 if (operand_equal_p (op1a, op2a, 0)
5960 && TREE_CODE (op1b) == INTEGER_CST
5961 && TREE_CODE (op2b) == INTEGER_CST)
5962 {
5963 int cmp = tree_int_cst_compare (op1b, op2b);
5964
5965 /* If we have (op1a != op1b), we should either be able to
5966 return that or TRUE, depending on whether the constant op1b
5967 also satisfies the other comparison against op2b. */
5968 if (code1 == NE_EXPR)
5969 {
5970 bool done = true;
5971 bool val;
5972 switch (code2)
5973 {
5974 case EQ_EXPR: val = (cmp == 0); break;
5975 case NE_EXPR: val = (cmp != 0); break;
5976 case LT_EXPR: val = (cmp < 0); break;
5977 case GT_EXPR: val = (cmp > 0); break;
5978 case LE_EXPR: val = (cmp <= 0); break;
5979 case GE_EXPR: val = (cmp >= 0); break;
5980 default: done = false;
5981 }
5982 if (done)
5983 {
5984 if (val)
5985 return boolean_true_node;
5986 else
5987 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5988 }
5989 }
5990 /* Likewise if the second comparison is a != comparison. */
5991 else if (code2 == NE_EXPR)
5992 {
5993 bool done = true;
5994 bool val;
5995 switch (code1)
5996 {
5997 case EQ_EXPR: val = (cmp == 0); break;
5998 case NE_EXPR: val = (cmp != 0); break;
5999 case LT_EXPR: val = (cmp > 0); break;
6000 case GT_EXPR: val = (cmp < 0); break;
6001 case LE_EXPR: val = (cmp >= 0); break;
6002 case GE_EXPR: val = (cmp <= 0); break;
6003 default: done = false;
6004 }
6005 if (done)
6006 {
6007 if (val)
6008 return boolean_true_node;
6009 else
6010 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6011 }
6012 }
6013
6014 /* See if an equality test is redundant with the other comparison. */
6015 else if (code1 == EQ_EXPR)
6016 {
6017 bool val;
6018 switch (code2)
6019 {
6020 case EQ_EXPR: val = (cmp == 0); break;
6021 case NE_EXPR: val = (cmp != 0); break;
6022 case LT_EXPR: val = (cmp < 0); break;
6023 case GT_EXPR: val = (cmp > 0); break;
6024 case LE_EXPR: val = (cmp <= 0); break;
6025 case GE_EXPR: val = (cmp >= 0); break;
6026 default:
6027 val = false;
6028 }
6029 if (val)
6030 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6031 }
6032 else if (code2 == EQ_EXPR)
6033 {
6034 bool val;
6035 switch (code1)
6036 {
6037 case EQ_EXPR: val = (cmp == 0); break;
6038 case NE_EXPR: val = (cmp != 0); break;
6039 case LT_EXPR: val = (cmp > 0); break;
6040 case GT_EXPR: val = (cmp < 0); break;
6041 case LE_EXPR: val = (cmp >= 0); break;
6042 case GE_EXPR: val = (cmp <= 0); break;
6043 default:
6044 val = false;
6045 }
6046 if (val)
6047 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6048 }
6049
6050 /* Chose the less restrictive of two < or <= comparisons. */
6051 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6052 && (code2 == LT_EXPR || code2 == LE_EXPR))
6053 {
6054 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6055 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6056 else
6057 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6058 }
6059
6060 /* Likewise chose the less restrictive of two > or >= comparisons. */
6061 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6062 && (code2 == GT_EXPR || code2 == GE_EXPR))
6063 {
6064 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6065 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6066 else
6067 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6068 }
6069
6070 /* Check for singleton ranges. */
6071 else if (cmp == 0
6072 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6073 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6074 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6075
6076 /* Check for less/greater pairs that don't restrict the range at all. */
6077 else if (cmp >= 0
6078 && (code1 == LT_EXPR || code1 == LE_EXPR)
6079 && (code2 == GT_EXPR || code2 == GE_EXPR))
6080 return boolean_true_node;
6081 else if (cmp <= 0
6082 && (code1 == GT_EXPR || code1 == GE_EXPR)
6083 && (code2 == LT_EXPR || code2 == LE_EXPR))
6084 return boolean_true_node;
6085 }
6086
6087 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6088 NAME's definition is a truth value. See if there are any simplifications
6089 that can be done against the NAME's definition. */
6090 if (TREE_CODE (op1a) == SSA_NAME
6091 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6092 && (integer_zerop (op1b) || integer_onep (op1b)))
6093 {
6094 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6095 || (code1 == NE_EXPR && integer_onep (op1b)));
6096 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6097 switch (gimple_code (stmt))
6098 {
6099 case GIMPLE_ASSIGN:
6100 /* Try to simplify by copy-propagating the definition. */
6101 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6102
6103 case GIMPLE_PHI:
6104 /* If every argument to the PHI produces the same result when
6105 ORed with the second comparison, we win.
6106 Do not do this unless the type is bool since we need a bool
6107 result here anyway. */
6108 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6109 {
6110 tree result = NULL_TREE;
6111 unsigned i;
6112 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6113 {
6114 tree arg = gimple_phi_arg_def (stmt, i);
6115
6116 /* If this PHI has itself as an argument, ignore it.
6117 If all the other args produce the same result,
6118 we're still OK. */
6119 if (arg == gimple_phi_result (stmt))
6120 continue;
6121 else if (TREE_CODE (arg) == INTEGER_CST)
6122 {
6123 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6124 {
6125 if (!result)
6126 result = boolean_true_node;
6127 else if (!integer_onep (result))
6128 return NULL_TREE;
6129 }
6130 else if (!result)
6131 result = fold_build2 (code2, boolean_type_node,
6132 op2a, op2b);
6133 else if (!same_bool_comparison_p (result,
6134 code2, op2a, op2b))
6135 return NULL_TREE;
6136 }
6137 else if (TREE_CODE (arg) == SSA_NAME
6138 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6139 {
6140 tree temp;
6141 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6142 /* In simple cases we can look through PHI nodes,
6143 but we have to be careful with loops.
6144 See PR49073. */
6145 if (! dom_info_available_p (CDI_DOMINATORS)
6146 || gimple_bb (def_stmt) == gimple_bb (stmt)
6147 || dominated_by_p (CDI_DOMINATORS,
6148 gimple_bb (def_stmt),
6149 gimple_bb (stmt)))
6150 return NULL_TREE;
6151 temp = or_var_with_comparison (arg, invert, code2,
6152 op2a, op2b);
6153 if (!temp)
6154 return NULL_TREE;
6155 else if (!result)
6156 result = temp;
6157 else if (!same_bool_result_p (result, temp))
6158 return NULL_TREE;
6159 }
6160 else
6161 return NULL_TREE;
6162 }
6163 return result;
6164 }
6165
6166 default:
6167 break;
6168 }
6169 }
6170 return NULL_TREE;
6171 }
6172
6173 /* Try to simplify the OR of two comparisons, specified by
6174 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6175 If this can be simplified to a single expression (without requiring
6176 introducing more SSA variables to hold intermediate values),
6177 return the resulting tree. Otherwise return NULL_TREE.
6178 If the result expression is non-null, it has boolean type. */
6179
6180 tree
6181 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6182 enum tree_code code2, tree op2a, tree op2b)
6183 {
6184 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6185 if (t)
6186 return t;
6187 else
6188 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6189 }
6190
6191
6192 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6193
6194 Either NULL_TREE, a simplified but non-constant or a constant
6195 is returned.
6196
6197 ??? This should go into a gimple-fold-inline.h file to be eventually
6198 privatized with the single valueize function used in the various TUs
6199 to avoid the indirect function call overhead. */
6200
6201 tree
6202 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6203 tree (*gvalueize) (tree))
6204 {
6205 gimple_match_op res_op;
6206 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6207 edges if there are intermediate VARYING defs. For this reason
6208 do not follow SSA edges here even though SCCVN can technically
6209 just deal fine with that. */
6210 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6211 {
6212 tree res = NULL_TREE;
6213 if (gimple_simplified_result_is_gimple_val (&res_op))
6214 res = res_op.ops[0];
6215 else if (mprts_hook)
6216 res = mprts_hook (&res_op);
6217 if (res)
6218 {
6219 if (dump_file && dump_flags & TDF_DETAILS)
6220 {
6221 fprintf (dump_file, "Match-and-simplified ");
6222 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6223 fprintf (dump_file, " to ");
6224 print_generic_expr (dump_file, res);
6225 fprintf (dump_file, "\n");
6226 }
6227 return res;
6228 }
6229 }
6230
6231 location_t loc = gimple_location (stmt);
6232 switch (gimple_code (stmt))
6233 {
6234 case GIMPLE_ASSIGN:
6235 {
6236 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6237
6238 switch (get_gimple_rhs_class (subcode))
6239 {
6240 case GIMPLE_SINGLE_RHS:
6241 {
6242 tree rhs = gimple_assign_rhs1 (stmt);
6243 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6244
6245 if (TREE_CODE (rhs) == SSA_NAME)
6246 {
6247 /* If the RHS is an SSA_NAME, return its known constant value,
6248 if any. */
6249 return (*valueize) (rhs);
6250 }
6251 /* Handle propagating invariant addresses into address
6252 operations. */
6253 else if (TREE_CODE (rhs) == ADDR_EXPR
6254 && !is_gimple_min_invariant (rhs))
6255 {
6256 poly_int64 offset = 0;
6257 tree base;
6258 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6259 &offset,
6260 valueize);
6261 if (base
6262 && (CONSTANT_CLASS_P (base)
6263 || decl_address_invariant_p (base)))
6264 return build_invariant_address (TREE_TYPE (rhs),
6265 base, offset);
6266 }
6267 else if (TREE_CODE (rhs) == CONSTRUCTOR
6268 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6269 && known_eq (CONSTRUCTOR_NELTS (rhs),
6270 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6271 {
6272 unsigned i, nelts;
6273 tree val;
6274
6275 nelts = CONSTRUCTOR_NELTS (rhs);
6276 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6277 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6278 {
6279 val = (*valueize) (val);
6280 if (TREE_CODE (val) == INTEGER_CST
6281 || TREE_CODE (val) == REAL_CST
6282 || TREE_CODE (val) == FIXED_CST)
6283 vec.quick_push (val);
6284 else
6285 return NULL_TREE;
6286 }
6287
6288 return vec.build ();
6289 }
6290 if (subcode == OBJ_TYPE_REF)
6291 {
6292 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6293 /* If callee is constant, we can fold away the wrapper. */
6294 if (is_gimple_min_invariant (val))
6295 return val;
6296 }
6297
6298 if (kind == tcc_reference)
6299 {
6300 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6301 || TREE_CODE (rhs) == REALPART_EXPR
6302 || TREE_CODE (rhs) == IMAGPART_EXPR)
6303 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6304 {
6305 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6306 return fold_unary_loc (EXPR_LOCATION (rhs),
6307 TREE_CODE (rhs),
6308 TREE_TYPE (rhs), val);
6309 }
6310 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6311 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6312 {
6313 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6314 return fold_ternary_loc (EXPR_LOCATION (rhs),
6315 TREE_CODE (rhs),
6316 TREE_TYPE (rhs), val,
6317 TREE_OPERAND (rhs, 1),
6318 TREE_OPERAND (rhs, 2));
6319 }
6320 else if (TREE_CODE (rhs) == MEM_REF
6321 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6322 {
6323 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6324 if (TREE_CODE (val) == ADDR_EXPR
6325 && is_gimple_min_invariant (val))
6326 {
6327 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6328 unshare_expr (val),
6329 TREE_OPERAND (rhs, 1));
6330 if (tem)
6331 rhs = tem;
6332 }
6333 }
6334 return fold_const_aggregate_ref_1 (rhs, valueize);
6335 }
6336 else if (kind == tcc_declaration)
6337 return get_symbol_constant_value (rhs);
6338 return rhs;
6339 }
6340
6341 case GIMPLE_UNARY_RHS:
6342 return NULL_TREE;
6343
6344 case GIMPLE_BINARY_RHS:
6345 /* Translate &x + CST into an invariant form suitable for
6346 further propagation. */
6347 if (subcode == POINTER_PLUS_EXPR)
6348 {
6349 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6350 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6351 if (TREE_CODE (op0) == ADDR_EXPR
6352 && TREE_CODE (op1) == INTEGER_CST)
6353 {
6354 tree off = fold_convert (ptr_type_node, op1);
6355 return build_fold_addr_expr_loc
6356 (loc,
6357 fold_build2 (MEM_REF,
6358 TREE_TYPE (TREE_TYPE (op0)),
6359 unshare_expr (op0), off));
6360 }
6361 }
6362 /* Canonicalize bool != 0 and bool == 0 appearing after
6363 valueization. While gimple_simplify handles this
6364 it can get confused by the ~X == 1 -> X == 0 transform
6365 which we cant reduce to a SSA name or a constant
6366 (and we have no way to tell gimple_simplify to not
6367 consider those transforms in the first place). */
6368 else if (subcode == EQ_EXPR
6369 || subcode == NE_EXPR)
6370 {
6371 tree lhs = gimple_assign_lhs (stmt);
6372 tree op0 = gimple_assign_rhs1 (stmt);
6373 if (useless_type_conversion_p (TREE_TYPE (lhs),
6374 TREE_TYPE (op0)))
6375 {
6376 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6377 op0 = (*valueize) (op0);
6378 if (TREE_CODE (op0) == INTEGER_CST)
6379 std::swap (op0, op1);
6380 if (TREE_CODE (op1) == INTEGER_CST
6381 && ((subcode == NE_EXPR && integer_zerop (op1))
6382 || (subcode == EQ_EXPR && integer_onep (op1))))
6383 return op0;
6384 }
6385 }
6386 return NULL_TREE;
6387
6388 case GIMPLE_TERNARY_RHS:
6389 {
6390 /* Handle ternary operators that can appear in GIMPLE form. */
6391 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6392 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6393 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6394 return fold_ternary_loc (loc, subcode,
6395 gimple_expr_type (stmt), op0, op1, op2);
6396 }
6397
6398 default:
6399 gcc_unreachable ();
6400 }
6401 }
6402
6403 case GIMPLE_CALL:
6404 {
6405 tree fn;
6406 gcall *call_stmt = as_a <gcall *> (stmt);
6407
6408 if (gimple_call_internal_p (stmt))
6409 {
6410 enum tree_code subcode = ERROR_MARK;
6411 switch (gimple_call_internal_fn (stmt))
6412 {
6413 case IFN_UBSAN_CHECK_ADD:
6414 subcode = PLUS_EXPR;
6415 break;
6416 case IFN_UBSAN_CHECK_SUB:
6417 subcode = MINUS_EXPR;
6418 break;
6419 case IFN_UBSAN_CHECK_MUL:
6420 subcode = MULT_EXPR;
6421 break;
6422 case IFN_BUILTIN_EXPECT:
6423 {
6424 tree arg0 = gimple_call_arg (stmt, 0);
6425 tree op0 = (*valueize) (arg0);
6426 if (TREE_CODE (op0) == INTEGER_CST)
6427 return op0;
6428 return NULL_TREE;
6429 }
6430 default:
6431 return NULL_TREE;
6432 }
6433 tree arg0 = gimple_call_arg (stmt, 0);
6434 tree arg1 = gimple_call_arg (stmt, 1);
6435 tree op0 = (*valueize) (arg0);
6436 tree op1 = (*valueize) (arg1);
6437
6438 if (TREE_CODE (op0) != INTEGER_CST
6439 || TREE_CODE (op1) != INTEGER_CST)
6440 {
6441 switch (subcode)
6442 {
6443 case MULT_EXPR:
6444 /* x * 0 = 0 * x = 0 without overflow. */
6445 if (integer_zerop (op0) || integer_zerop (op1))
6446 return build_zero_cst (TREE_TYPE (arg0));
6447 break;
6448 case MINUS_EXPR:
6449 /* y - y = 0 without overflow. */
6450 if (operand_equal_p (op0, op1, 0))
6451 return build_zero_cst (TREE_TYPE (arg0));
6452 break;
6453 default:
6454 break;
6455 }
6456 }
6457 tree res
6458 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6459 if (res
6460 && TREE_CODE (res) == INTEGER_CST
6461 && !TREE_OVERFLOW (res))
6462 return res;
6463 return NULL_TREE;
6464 }
6465
6466 fn = (*valueize) (gimple_call_fn (stmt));
6467 if (TREE_CODE (fn) == ADDR_EXPR
6468 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6469 && gimple_builtin_call_types_compatible_p (stmt,
6470 TREE_OPERAND (fn, 0)))
6471 {
6472 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6473 tree retval;
6474 unsigned i;
6475 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6476 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6477 retval = fold_builtin_call_array (loc,
6478 gimple_call_return_type (call_stmt),
6479 fn, gimple_call_num_args (stmt), args);
6480 if (retval)
6481 {
6482 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6483 STRIP_NOPS (retval);
6484 retval = fold_convert (gimple_call_return_type (call_stmt),
6485 retval);
6486 }
6487 return retval;
6488 }
6489 return NULL_TREE;
6490 }
6491
6492 default:
6493 return NULL_TREE;
6494 }
6495 }
6496
6497 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6498 Returns NULL_TREE if folding to a constant is not possible, otherwise
6499 returns a constant according to is_gimple_min_invariant. */
6500
6501 tree
6502 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6503 {
6504 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6505 if (res && is_gimple_min_invariant (res))
6506 return res;
6507 return NULL_TREE;
6508 }
6509
6510
6511 /* The following set of functions are supposed to fold references using
6512 their constant initializers. */
6513
6514 /* See if we can find constructor defining value of BASE.
6515 When we know the consructor with constant offset (such as
6516 base is array[40] and we do know constructor of array), then
6517 BIT_OFFSET is adjusted accordingly.
6518
6519 As a special case, return error_mark_node when constructor
6520 is not explicitly available, but it is known to be zero
6521 such as 'static const int a;'. */
6522 static tree
6523 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6524 tree (*valueize)(tree))
6525 {
6526 poly_int64 bit_offset2, size, max_size;
6527 bool reverse;
6528
6529 if (TREE_CODE (base) == MEM_REF)
6530 {
6531 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6532 if (!boff.to_shwi (bit_offset))
6533 return NULL_TREE;
6534
6535 if (valueize
6536 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6537 base = valueize (TREE_OPERAND (base, 0));
6538 if (!base || TREE_CODE (base) != ADDR_EXPR)
6539 return NULL_TREE;
6540 base = TREE_OPERAND (base, 0);
6541 }
6542 else if (valueize
6543 && TREE_CODE (base) == SSA_NAME)
6544 base = valueize (base);
6545
6546 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6547 DECL_INITIAL. If BASE is a nested reference into another
6548 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6549 the inner reference. */
6550 switch (TREE_CODE (base))
6551 {
6552 case VAR_DECL:
6553 case CONST_DECL:
6554 {
6555 tree init = ctor_for_folding (base);
6556
6557 /* Our semantic is exact opposite of ctor_for_folding;
6558 NULL means unknown, while error_mark_node is 0. */
6559 if (init == error_mark_node)
6560 return NULL_TREE;
6561 if (!init)
6562 return error_mark_node;
6563 return init;
6564 }
6565
6566 case VIEW_CONVERT_EXPR:
6567 return get_base_constructor (TREE_OPERAND (base, 0),
6568 bit_offset, valueize);
6569
6570 case ARRAY_REF:
6571 case COMPONENT_REF:
6572 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6573 &reverse);
6574 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6575 return NULL_TREE;
6576 *bit_offset += bit_offset2;
6577 return get_base_constructor (base, bit_offset, valueize);
6578
6579 case CONSTRUCTOR:
6580 return base;
6581
6582 default:
6583 if (CONSTANT_CLASS_P (base))
6584 return base;
6585
6586 return NULL_TREE;
6587 }
6588 }
6589
6590 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6591 to the memory at bit OFFSET. When non-null, TYPE is the expected
6592 type of the reference; otherwise the type of the referenced element
6593 is used instead. When SIZE is zero, attempt to fold a reference to
6594 the entire element which OFFSET refers to. Increment *SUBOFF by
6595 the bit offset of the accessed element. */
6596
6597 static tree
6598 fold_array_ctor_reference (tree type, tree ctor,
6599 unsigned HOST_WIDE_INT offset,
6600 unsigned HOST_WIDE_INT size,
6601 tree from_decl,
6602 unsigned HOST_WIDE_INT *suboff)
6603 {
6604 offset_int low_bound;
6605 offset_int elt_size;
6606 offset_int access_index;
6607 tree domain_type = NULL_TREE;
6608 HOST_WIDE_INT inner_offset;
6609
6610 /* Compute low bound and elt size. */
6611 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6612 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6613 if (domain_type && TYPE_MIN_VALUE (domain_type))
6614 {
6615 /* Static constructors for variably sized objects makes no sense. */
6616 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6617 return NULL_TREE;
6618 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6619 }
6620 else
6621 low_bound = 0;
6622 /* Static constructors for variably sized objects makes no sense. */
6623 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6624 return NULL_TREE;
6625 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6626
6627 /* When TYPE is non-null, verify that it specifies a constant-sized
6628 accessed not larger than size of array element. */
6629 if (type
6630 && (!TYPE_SIZE_UNIT (type)
6631 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6632 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6633 || elt_size == 0))
6634 return NULL_TREE;
6635
6636 /* Compute the array index we look for. */
6637 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6638 elt_size);
6639 access_index += low_bound;
6640
6641 /* And offset within the access. */
6642 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6643
6644 /* See if the array field is large enough to span whole access. We do not
6645 care to fold accesses spanning multiple array indexes. */
6646 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6647 return NULL_TREE;
6648 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6649 {
6650 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6651 {
6652 /* For the final reference to the entire accessed element
6653 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6654 may be null) in favor of the type of the element, and set
6655 SIZE to the size of the accessed element. */
6656 inner_offset = 0;
6657 type = TREE_TYPE (val);
6658 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6659 }
6660
6661 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6662 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6663 suboff);
6664 }
6665
6666 /* Memory not explicitly mentioned in constructor is 0 (or
6667 the reference is out of range). */
6668 return type ? build_zero_cst (type) : NULL_TREE;
6669 }
6670
6671 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6672 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6673 is the expected type of the reference; otherwise the type of
6674 the referenced member is used instead. When SIZE is zero,
6675 attempt to fold a reference to the entire member which OFFSET
6676 refers to; in this case. Increment *SUBOFF by the bit offset
6677 of the accessed member. */
6678
6679 static tree
6680 fold_nonarray_ctor_reference (tree type, tree ctor,
6681 unsigned HOST_WIDE_INT offset,
6682 unsigned HOST_WIDE_INT size,
6683 tree from_decl,
6684 unsigned HOST_WIDE_INT *suboff)
6685 {
6686 unsigned HOST_WIDE_INT cnt;
6687 tree cfield, cval;
6688
6689 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6690 cval)
6691 {
6692 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6693 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6694 tree field_size = DECL_SIZE (cfield);
6695
6696 if (!field_size)
6697 {
6698 /* Determine the size of the flexible array member from
6699 the size of the initializer provided for it. */
6700 field_size = TYPE_SIZE (TREE_TYPE (cval));
6701 }
6702
6703 /* Variable sized objects in static constructors makes no sense,
6704 but field_size can be NULL for flexible array members. */
6705 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6706 && TREE_CODE (byte_offset) == INTEGER_CST
6707 && (field_size != NULL_TREE
6708 ? TREE_CODE (field_size) == INTEGER_CST
6709 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6710
6711 /* Compute bit offset of the field. */
6712 offset_int bitoffset
6713 = (wi::to_offset (field_offset)
6714 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6715 /* Compute bit offset where the field ends. */
6716 offset_int bitoffset_end;
6717 if (field_size != NULL_TREE)
6718 bitoffset_end = bitoffset + wi::to_offset (field_size);
6719 else
6720 bitoffset_end = 0;
6721
6722 /* Compute the bit offset of the end of the desired access.
6723 As a special case, if the size of the desired access is
6724 zero, assume the access is to the entire field (and let
6725 the caller make any necessary adjustments by storing
6726 the actual bounds of the field in FIELDBOUNDS). */
6727 offset_int access_end = offset_int (offset);
6728 if (size)
6729 access_end += size;
6730 else
6731 access_end = bitoffset_end;
6732
6733 /* Is there any overlap between the desired access at
6734 [OFFSET, OFFSET+SIZE) and the offset of the field within
6735 the object at [BITOFFSET, BITOFFSET_END)? */
6736 if (wi::cmps (access_end, bitoffset) > 0
6737 && (field_size == NULL_TREE
6738 || wi::lts_p (offset, bitoffset_end)))
6739 {
6740 *suboff += bitoffset.to_uhwi ();
6741
6742 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6743 {
6744 /* For the final reference to the entire accessed member
6745 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6746 be null) in favor of the type of the member, and set
6747 SIZE to the size of the accessed member. */
6748 offset = bitoffset.to_uhwi ();
6749 type = TREE_TYPE (cval);
6750 size = (bitoffset_end - bitoffset).to_uhwi ();
6751 }
6752
6753 /* We do have overlap. Now see if the field is large enough
6754 to cover the access. Give up for accesses that extend
6755 beyond the end of the object or that span multiple fields. */
6756 if (wi::cmps (access_end, bitoffset_end) > 0)
6757 return NULL_TREE;
6758 if (offset < bitoffset)
6759 return NULL_TREE;
6760
6761 offset_int inner_offset = offset_int (offset) - bitoffset;
6762 return fold_ctor_reference (type, cval,
6763 inner_offset.to_uhwi (), size,
6764 from_decl, suboff);
6765 }
6766 }
6767 /* Memory not explicitly mentioned in constructor is 0. */
6768 return type ? build_zero_cst (type) : NULL_TREE;
6769 }
6770
6771 /* CTOR is value initializing memory. Fold a reference of TYPE and
6772 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6773 is zero, attempt to fold a reference to the entire subobject
6774 which OFFSET refers to. This is used when folding accesses to
6775 string members of aggregates. When non-null, set *SUBOFF to
6776 the bit offset of the accessed subobject. */
6777
6778 tree
6779 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6780 const poly_uint64 &poly_size, tree from_decl,
6781 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6782 {
6783 tree ret;
6784
6785 /* We found the field with exact match. */
6786 if (type
6787 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6788 && known_eq (poly_offset, 0U))
6789 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6790
6791 /* The remaining optimizations need a constant size and offset. */
6792 unsigned HOST_WIDE_INT size, offset;
6793 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6794 return NULL_TREE;
6795
6796 /* We are at the end of walk, see if we can view convert the
6797 result. */
6798 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6799 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6800 && !compare_tree_int (TYPE_SIZE (type), size)
6801 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6802 {
6803 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6804 if (ret)
6805 {
6806 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6807 if (ret)
6808 STRIP_USELESS_TYPE_CONVERSION (ret);
6809 }
6810 return ret;
6811 }
6812 /* For constants and byte-aligned/sized reads try to go through
6813 native_encode/interpret. */
6814 if (CONSTANT_CLASS_P (ctor)
6815 && BITS_PER_UNIT == 8
6816 && offset % BITS_PER_UNIT == 0
6817 && size % BITS_PER_UNIT == 0
6818 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6819 {
6820 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6821 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6822 offset / BITS_PER_UNIT);
6823 if (len > 0)
6824 return native_interpret_expr (type, buf, len);
6825 }
6826 if (TREE_CODE (ctor) == CONSTRUCTOR)
6827 {
6828 unsigned HOST_WIDE_INT dummy = 0;
6829 if (!suboff)
6830 suboff = &dummy;
6831
6832 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6833 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6834 return fold_array_ctor_reference (type, ctor, offset, size,
6835 from_decl, suboff);
6836
6837 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6838 from_decl, suboff);
6839 }
6840
6841 return NULL_TREE;
6842 }
6843
6844 /* Return the tree representing the element referenced by T if T is an
6845 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6846 names using VALUEIZE. Return NULL_TREE otherwise. */
6847
6848 tree
6849 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6850 {
6851 tree ctor, idx, base;
6852 poly_int64 offset, size, max_size;
6853 tree tem;
6854 bool reverse;
6855
6856 if (TREE_THIS_VOLATILE (t))
6857 return NULL_TREE;
6858
6859 if (DECL_P (t))
6860 return get_symbol_constant_value (t);
6861
6862 tem = fold_read_from_constant_string (t);
6863 if (tem)
6864 return tem;
6865
6866 switch (TREE_CODE (t))
6867 {
6868 case ARRAY_REF:
6869 case ARRAY_RANGE_REF:
6870 /* Constant indexes are handled well by get_base_constructor.
6871 Only special case variable offsets.
6872 FIXME: This code can't handle nested references with variable indexes
6873 (they will be handled only by iteration of ccp). Perhaps we can bring
6874 get_ref_base_and_extent here and make it use a valueize callback. */
6875 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6876 && valueize
6877 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6878 && poly_int_tree_p (idx))
6879 {
6880 tree low_bound, unit_size;
6881
6882 /* If the resulting bit-offset is constant, track it. */
6883 if ((low_bound = array_ref_low_bound (t),
6884 poly_int_tree_p (low_bound))
6885 && (unit_size = array_ref_element_size (t),
6886 tree_fits_uhwi_p (unit_size)))
6887 {
6888 poly_offset_int woffset
6889 = wi::sext (wi::to_poly_offset (idx)
6890 - wi::to_poly_offset (low_bound),
6891 TYPE_PRECISION (TREE_TYPE (idx)));
6892
6893 if (woffset.to_shwi (&offset))
6894 {
6895 /* TODO: This code seems wrong, multiply then check
6896 to see if it fits. */
6897 offset *= tree_to_uhwi (unit_size);
6898 offset *= BITS_PER_UNIT;
6899
6900 base = TREE_OPERAND (t, 0);
6901 ctor = get_base_constructor (base, &offset, valueize);
6902 /* Empty constructor. Always fold to 0. */
6903 if (ctor == error_mark_node)
6904 return build_zero_cst (TREE_TYPE (t));
6905 /* Out of bound array access. Value is undefined,
6906 but don't fold. */
6907 if (maybe_lt (offset, 0))
6908 return NULL_TREE;
6909 /* We can not determine ctor. */
6910 if (!ctor)
6911 return NULL_TREE;
6912 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6913 tree_to_uhwi (unit_size)
6914 * BITS_PER_UNIT,
6915 base);
6916 }
6917 }
6918 }
6919 /* Fallthru. */
6920
6921 case COMPONENT_REF:
6922 case BIT_FIELD_REF:
6923 case TARGET_MEM_REF:
6924 case MEM_REF:
6925 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6926 ctor = get_base_constructor (base, &offset, valueize);
6927
6928 /* Empty constructor. Always fold to 0. */
6929 if (ctor == error_mark_node)
6930 return build_zero_cst (TREE_TYPE (t));
6931 /* We do not know precise address. */
6932 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6933 return NULL_TREE;
6934 /* We can not determine ctor. */
6935 if (!ctor)
6936 return NULL_TREE;
6937
6938 /* Out of bound array access. Value is undefined, but don't fold. */
6939 if (maybe_lt (offset, 0))
6940 return NULL_TREE;
6941
6942 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6943 base);
6944
6945 case REALPART_EXPR:
6946 case IMAGPART_EXPR:
6947 {
6948 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6949 if (c && TREE_CODE (c) == COMPLEX_CST)
6950 return fold_build1_loc (EXPR_LOCATION (t),
6951 TREE_CODE (t), TREE_TYPE (t), c);
6952 break;
6953 }
6954
6955 default:
6956 break;
6957 }
6958
6959 return NULL_TREE;
6960 }
6961
6962 tree
6963 fold_const_aggregate_ref (tree t)
6964 {
6965 return fold_const_aggregate_ref_1 (t, NULL);
6966 }
6967
6968 /* Lookup virtual method with index TOKEN in a virtual table V
6969 at OFFSET.
6970 Set CAN_REFER if non-NULL to false if method
6971 is not referable or if the virtual table is ill-formed (such as rewriten
6972 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6973
6974 tree
6975 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6976 tree v,
6977 unsigned HOST_WIDE_INT offset,
6978 bool *can_refer)
6979 {
6980 tree vtable = v, init, fn;
6981 unsigned HOST_WIDE_INT size;
6982 unsigned HOST_WIDE_INT elt_size, access_index;
6983 tree domain_type;
6984
6985 if (can_refer)
6986 *can_refer = true;
6987
6988 /* First of all double check we have virtual table. */
6989 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6990 {
6991 /* Pass down that we lost track of the target. */
6992 if (can_refer)
6993 *can_refer = false;
6994 return NULL_TREE;
6995 }
6996
6997 init = ctor_for_folding (v);
6998
6999 /* The virtual tables should always be born with constructors
7000 and we always should assume that they are avaialble for
7001 folding. At the moment we do not stream them in all cases,
7002 but it should never happen that ctor seem unreachable. */
7003 gcc_assert (init);
7004 if (init == error_mark_node)
7005 {
7006 /* Pass down that we lost track of the target. */
7007 if (can_refer)
7008 *can_refer = false;
7009 return NULL_TREE;
7010 }
7011 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7012 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7013 offset *= BITS_PER_UNIT;
7014 offset += token * size;
7015
7016 /* Lookup the value in the constructor that is assumed to be array.
7017 This is equivalent to
7018 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7019 offset, size, NULL);
7020 but in a constant time. We expect that frontend produced a simple
7021 array without indexed initializers. */
7022
7023 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7024 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7025 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7026 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7027
7028 access_index = offset / BITS_PER_UNIT / elt_size;
7029 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7030
7031 /* The C++ FE can now produce indexed fields, and we check if the indexes
7032 match. */
7033 if (access_index < CONSTRUCTOR_NELTS (init))
7034 {
7035 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7036 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7037 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7038 STRIP_NOPS (fn);
7039 }
7040 else
7041 fn = NULL;
7042
7043 /* For type inconsistent program we may end up looking up virtual method
7044 in virtual table that does not contain TOKEN entries. We may overrun
7045 the virtual table and pick up a constant or RTTI info pointer.
7046 In any case the call is undefined. */
7047 if (!fn
7048 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7049 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7050 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7051 else
7052 {
7053 fn = TREE_OPERAND (fn, 0);
7054
7055 /* When cgraph node is missing and function is not public, we cannot
7056 devirtualize. This can happen in WHOPR when the actual method
7057 ends up in other partition, because we found devirtualization
7058 possibility too late. */
7059 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7060 {
7061 if (can_refer)
7062 {
7063 *can_refer = false;
7064 return fn;
7065 }
7066 return NULL_TREE;
7067 }
7068 }
7069
7070 /* Make sure we create a cgraph node for functions we'll reference.
7071 They can be non-existent if the reference comes from an entry
7072 of an external vtable for example. */
7073 cgraph_node::get_create (fn);
7074
7075 return fn;
7076 }
7077
7078 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7079 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7080 KNOWN_BINFO carries the binfo describing the true type of
7081 OBJ_TYPE_REF_OBJECT(REF).
7082 Set CAN_REFER if non-NULL to false if method
7083 is not referable or if the virtual table is ill-formed (such as rewriten
7084 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7085
7086 tree
7087 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7088 bool *can_refer)
7089 {
7090 unsigned HOST_WIDE_INT offset;
7091 tree v;
7092
7093 v = BINFO_VTABLE (known_binfo);
7094 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7095 if (!v)
7096 return NULL_TREE;
7097
7098 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7099 {
7100 if (can_refer)
7101 *can_refer = false;
7102 return NULL_TREE;
7103 }
7104 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7105 }
7106
7107 /* Given a pointer value T, return a simplified version of an
7108 indirection through T, or NULL_TREE if no simplification is
7109 possible. Note that the resulting type may be different from
7110 the type pointed to in the sense that it is still compatible
7111 from the langhooks point of view. */
7112
7113 tree
7114 gimple_fold_indirect_ref (tree t)
7115 {
7116 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7117 tree sub = t;
7118 tree subtype;
7119
7120 STRIP_NOPS (sub);
7121 subtype = TREE_TYPE (sub);
7122 if (!POINTER_TYPE_P (subtype)
7123 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7124 return NULL_TREE;
7125
7126 if (TREE_CODE (sub) == ADDR_EXPR)
7127 {
7128 tree op = TREE_OPERAND (sub, 0);
7129 tree optype = TREE_TYPE (op);
7130 /* *&p => p */
7131 if (useless_type_conversion_p (type, optype))
7132 return op;
7133
7134 /* *(foo *)&fooarray => fooarray[0] */
7135 if (TREE_CODE (optype) == ARRAY_TYPE
7136 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7137 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7138 {
7139 tree type_domain = TYPE_DOMAIN (optype);
7140 tree min_val = size_zero_node;
7141 if (type_domain && TYPE_MIN_VALUE (type_domain))
7142 min_val = TYPE_MIN_VALUE (type_domain);
7143 if (TREE_CODE (min_val) == INTEGER_CST)
7144 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7145 }
7146 /* *(foo *)&complexfoo => __real__ complexfoo */
7147 else if (TREE_CODE (optype) == COMPLEX_TYPE
7148 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7149 return fold_build1 (REALPART_EXPR, type, op);
7150 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7151 else if (TREE_CODE (optype) == VECTOR_TYPE
7152 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7153 {
7154 tree part_width = TYPE_SIZE (type);
7155 tree index = bitsize_int (0);
7156 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7157 }
7158 }
7159
7160 /* *(p + CST) -> ... */
7161 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7162 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7163 {
7164 tree addr = TREE_OPERAND (sub, 0);
7165 tree off = TREE_OPERAND (sub, 1);
7166 tree addrtype;
7167
7168 STRIP_NOPS (addr);
7169 addrtype = TREE_TYPE (addr);
7170
7171 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7172 if (TREE_CODE (addr) == ADDR_EXPR
7173 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7174 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7175 && tree_fits_uhwi_p (off))
7176 {
7177 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7178 tree part_width = TYPE_SIZE (type);
7179 unsigned HOST_WIDE_INT part_widthi
7180 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7181 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7182 tree index = bitsize_int (indexi);
7183 if (known_lt (offset / part_widthi,
7184 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7185 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7186 part_width, index);
7187 }
7188
7189 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7190 if (TREE_CODE (addr) == ADDR_EXPR
7191 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7192 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7193 {
7194 tree size = TYPE_SIZE_UNIT (type);
7195 if (tree_int_cst_equal (size, off))
7196 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7197 }
7198
7199 /* *(p + CST) -> MEM_REF <p, CST>. */
7200 if (TREE_CODE (addr) != ADDR_EXPR
7201 || DECL_P (TREE_OPERAND (addr, 0)))
7202 return fold_build2 (MEM_REF, type,
7203 addr,
7204 wide_int_to_tree (ptype, wi::to_wide (off)));
7205 }
7206
7207 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7208 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7210 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7211 {
7212 tree type_domain;
7213 tree min_val = size_zero_node;
7214 tree osub = sub;
7215 sub = gimple_fold_indirect_ref (sub);
7216 if (! sub)
7217 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7218 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7219 if (type_domain && TYPE_MIN_VALUE (type_domain))
7220 min_val = TYPE_MIN_VALUE (type_domain);
7221 if (TREE_CODE (min_val) == INTEGER_CST)
7222 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7223 }
7224
7225 return NULL_TREE;
7226 }
7227
7228 /* Return true if CODE is an operation that when operating on signed
7229 integer types involves undefined behavior on overflow and the
7230 operation can be expressed with unsigned arithmetic. */
7231
7232 bool
7233 arith_code_with_undefined_signed_overflow (tree_code code)
7234 {
7235 switch (code)
7236 {
7237 case PLUS_EXPR:
7238 case MINUS_EXPR:
7239 case MULT_EXPR:
7240 case NEGATE_EXPR:
7241 case POINTER_PLUS_EXPR:
7242 return true;
7243 default:
7244 return false;
7245 }
7246 }
7247
7248 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7249 operation that can be transformed to unsigned arithmetic by converting
7250 its operand, carrying out the operation in the corresponding unsigned
7251 type and converting the result back to the original type.
7252
7253 Returns a sequence of statements that replace STMT and also contain
7254 a modified form of STMT itself. */
7255
7256 gimple_seq
7257 rewrite_to_defined_overflow (gimple *stmt)
7258 {
7259 if (dump_file && (dump_flags & TDF_DETAILS))
7260 {
7261 fprintf (dump_file, "rewriting stmt with undefined signed "
7262 "overflow ");
7263 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7264 }
7265
7266 tree lhs = gimple_assign_lhs (stmt);
7267 tree type = unsigned_type_for (TREE_TYPE (lhs));
7268 gimple_seq stmts = NULL;
7269 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7270 {
7271 tree op = gimple_op (stmt, i);
7272 op = gimple_convert (&stmts, type, op);
7273 gimple_set_op (stmt, i, op);
7274 }
7275 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7276 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7277 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7278 gimple_seq_add_stmt (&stmts, stmt);
7279 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7280 gimple_seq_add_stmt (&stmts, cvt);
7281
7282 return stmts;
7283 }
7284
7285
7286 /* The valueization hook we use for the gimple_build API simplification.
7287 This makes us match fold_buildN behavior by only combining with
7288 statements in the sequence(s) we are currently building. */
7289
7290 static tree
7291 gimple_build_valueize (tree op)
7292 {
7293 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7294 return op;
7295 return NULL_TREE;
7296 }
7297
7298 /* Build the expression CODE OP0 of type TYPE with location LOC,
7299 simplifying it first if possible. Returns the built
7300 expression value and appends statements possibly defining it
7301 to SEQ. */
7302
7303 tree
7304 gimple_build (gimple_seq *seq, location_t loc,
7305 enum tree_code code, tree type, tree op0)
7306 {
7307 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7308 if (!res)
7309 {
7310 res = create_tmp_reg_or_ssa_name (type);
7311 gimple *stmt;
7312 if (code == REALPART_EXPR
7313 || code == IMAGPART_EXPR
7314 || code == VIEW_CONVERT_EXPR)
7315 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7316 else
7317 stmt = gimple_build_assign (res, code, op0);
7318 gimple_set_location (stmt, loc);
7319 gimple_seq_add_stmt_without_update (seq, stmt);
7320 }
7321 return res;
7322 }
7323
7324 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7325 simplifying it first if possible. Returns the built
7326 expression value and appends statements possibly defining it
7327 to SEQ. */
7328
7329 tree
7330 gimple_build (gimple_seq *seq, location_t loc,
7331 enum tree_code code, tree type, tree op0, tree op1)
7332 {
7333 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7334 if (!res)
7335 {
7336 res = create_tmp_reg_or_ssa_name (type);
7337 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7338 gimple_set_location (stmt, loc);
7339 gimple_seq_add_stmt_without_update (seq, stmt);
7340 }
7341 return res;
7342 }
7343
7344 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7345 simplifying it first if possible. Returns the built
7346 expression value and appends statements possibly defining it
7347 to SEQ. */
7348
7349 tree
7350 gimple_build (gimple_seq *seq, location_t loc,
7351 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7352 {
7353 tree res = gimple_simplify (code, type, op0, op1, op2,
7354 seq, gimple_build_valueize);
7355 if (!res)
7356 {
7357 res = create_tmp_reg_or_ssa_name (type);
7358 gimple *stmt;
7359 if (code == BIT_FIELD_REF)
7360 stmt = gimple_build_assign (res, code,
7361 build3 (code, type, op0, op1, op2));
7362 else
7363 stmt = gimple_build_assign (res, code, op0, op1, op2);
7364 gimple_set_location (stmt, loc);
7365 gimple_seq_add_stmt_without_update (seq, stmt);
7366 }
7367 return res;
7368 }
7369
7370 /* Build the call FN (ARG0) with a result of type TYPE
7371 (or no result if TYPE is void) with location LOC,
7372 simplifying it first if possible. Returns the built
7373 expression value (or NULL_TREE if TYPE is void) and appends
7374 statements possibly defining it to SEQ. */
7375
7376 tree
7377 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7378 tree type, tree arg0)
7379 {
7380 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7381 if (!res)
7382 {
7383 gcall *stmt;
7384 if (internal_fn_p (fn))
7385 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7386 else
7387 {
7388 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7389 stmt = gimple_build_call (decl, 1, arg0);
7390 }
7391 if (!VOID_TYPE_P (type))
7392 {
7393 res = create_tmp_reg_or_ssa_name (type);
7394 gimple_call_set_lhs (stmt, res);
7395 }
7396 gimple_set_location (stmt, loc);
7397 gimple_seq_add_stmt_without_update (seq, stmt);
7398 }
7399 return res;
7400 }
7401
7402 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7403 (or no result if TYPE is void) with location LOC,
7404 simplifying it first if possible. Returns the built
7405 expression value (or NULL_TREE if TYPE is void) and appends
7406 statements possibly defining it to SEQ. */
7407
7408 tree
7409 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7410 tree type, tree arg0, tree arg1)
7411 {
7412 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7413 if (!res)
7414 {
7415 gcall *stmt;
7416 if (internal_fn_p (fn))
7417 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7418 else
7419 {
7420 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7421 stmt = gimple_build_call (decl, 2, arg0, arg1);
7422 }
7423 if (!VOID_TYPE_P (type))
7424 {
7425 res = create_tmp_reg_or_ssa_name (type);
7426 gimple_call_set_lhs (stmt, res);
7427 }
7428 gimple_set_location (stmt, loc);
7429 gimple_seq_add_stmt_without_update (seq, stmt);
7430 }
7431 return res;
7432 }
7433
7434 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7435 (or no result if TYPE is void) with location LOC,
7436 simplifying it first if possible. Returns the built
7437 expression value (or NULL_TREE if TYPE is void) and appends
7438 statements possibly defining it to SEQ. */
7439
7440 tree
7441 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7442 tree type, tree arg0, tree arg1, tree arg2)
7443 {
7444 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7445 seq, gimple_build_valueize);
7446 if (!res)
7447 {
7448 gcall *stmt;
7449 if (internal_fn_p (fn))
7450 stmt = gimple_build_call_internal (as_internal_fn (fn),
7451 3, arg0, arg1, arg2);
7452 else
7453 {
7454 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7455 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7456 }
7457 if (!VOID_TYPE_P (type))
7458 {
7459 res = create_tmp_reg_or_ssa_name (type);
7460 gimple_call_set_lhs (stmt, res);
7461 }
7462 gimple_set_location (stmt, loc);
7463 gimple_seq_add_stmt_without_update (seq, stmt);
7464 }
7465 return res;
7466 }
7467
7468 /* Build the conversion (TYPE) OP with a result of type TYPE
7469 with location LOC if such conversion is neccesary in GIMPLE,
7470 simplifying it first.
7471 Returns the built expression value and appends
7472 statements possibly defining it to SEQ. */
7473
7474 tree
7475 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7476 {
7477 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7478 return op;
7479 return gimple_build (seq, loc, NOP_EXPR, type, op);
7480 }
7481
7482 /* Build the conversion (ptrofftype) OP with a result of a type
7483 compatible with ptrofftype with location LOC if such conversion
7484 is neccesary in GIMPLE, simplifying it first.
7485 Returns the built expression value and appends
7486 statements possibly defining it to SEQ. */
7487
7488 tree
7489 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7490 {
7491 if (ptrofftype_p (TREE_TYPE (op)))
7492 return op;
7493 return gimple_convert (seq, loc, sizetype, op);
7494 }
7495
7496 /* Build a vector of type TYPE in which each element has the value OP.
7497 Return a gimple value for the result, appending any new statements
7498 to SEQ. */
7499
7500 tree
7501 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7502 tree op)
7503 {
7504 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7505 && !CONSTANT_CLASS_P (op))
7506 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7507
7508 tree res, vec = build_vector_from_val (type, op);
7509 if (is_gimple_val (vec))
7510 return vec;
7511 if (gimple_in_ssa_p (cfun))
7512 res = make_ssa_name (type);
7513 else
7514 res = create_tmp_reg (type);
7515 gimple *stmt = gimple_build_assign (res, vec);
7516 gimple_set_location (stmt, loc);
7517 gimple_seq_add_stmt_without_update (seq, stmt);
7518 return res;
7519 }
7520
7521 /* Build a vector from BUILDER, handling the case in which some elements
7522 are non-constant. Return a gimple value for the result, appending any
7523 new instructions to SEQ.
7524
7525 BUILDER must not have a stepped encoding on entry. This is because
7526 the function is not geared up to handle the arithmetic that would
7527 be needed in the variable case, and any code building a vector that
7528 is known to be constant should use BUILDER->build () directly. */
7529
7530 tree
7531 gimple_build_vector (gimple_seq *seq, location_t loc,
7532 tree_vector_builder *builder)
7533 {
7534 gcc_assert (builder->nelts_per_pattern () <= 2);
7535 unsigned int encoded_nelts = builder->encoded_nelts ();
7536 for (unsigned int i = 0; i < encoded_nelts; ++i)
7537 if (!TREE_CONSTANT ((*builder)[i]))
7538 {
7539 tree type = builder->type ();
7540 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7541 vec<constructor_elt, va_gc> *v;
7542 vec_alloc (v, nelts);
7543 for (i = 0; i < nelts; ++i)
7544 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7545
7546 tree res;
7547 if (gimple_in_ssa_p (cfun))
7548 res = make_ssa_name (type);
7549 else
7550 res = create_tmp_reg (type);
7551 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7552 gimple_set_location (stmt, loc);
7553 gimple_seq_add_stmt_without_update (seq, stmt);
7554 return res;
7555 }
7556 return builder->build ();
7557 }
7558
7559 /* Return true if the result of assignment STMT is known to be non-negative.
7560 If the return value is based on the assumption that signed overflow is
7561 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7562 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7563
7564 static bool
7565 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7566 int depth)
7567 {
7568 enum tree_code code = gimple_assign_rhs_code (stmt);
7569 switch (get_gimple_rhs_class (code))
7570 {
7571 case GIMPLE_UNARY_RHS:
7572 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7573 gimple_expr_type (stmt),
7574 gimple_assign_rhs1 (stmt),
7575 strict_overflow_p, depth);
7576 case GIMPLE_BINARY_RHS:
7577 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7578 gimple_expr_type (stmt),
7579 gimple_assign_rhs1 (stmt),
7580 gimple_assign_rhs2 (stmt),
7581 strict_overflow_p, depth);
7582 case GIMPLE_TERNARY_RHS:
7583 return false;
7584 case GIMPLE_SINGLE_RHS:
7585 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7586 strict_overflow_p, depth);
7587 case GIMPLE_INVALID_RHS:
7588 break;
7589 }
7590 gcc_unreachable ();
7591 }
7592
7593 /* Return true if return value of call STMT is known to be non-negative.
7594 If the return value is based on the assumption that signed overflow is
7595 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7596 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7597
7598 static bool
7599 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7600 int depth)
7601 {
7602 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7603 gimple_call_arg (stmt, 0) : NULL_TREE;
7604 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7605 gimple_call_arg (stmt, 1) : NULL_TREE;
7606
7607 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7608 gimple_call_combined_fn (stmt),
7609 arg0,
7610 arg1,
7611 strict_overflow_p, depth);
7612 }
7613
7614 /* Return true if return value of call STMT is known to be non-negative.
7615 If the return value is based on the assumption that signed overflow is
7616 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7617 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7618
7619 static bool
7620 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7621 int depth)
7622 {
7623 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7624 {
7625 tree arg = gimple_phi_arg_def (stmt, i);
7626 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7627 return false;
7628 }
7629 return true;
7630 }
7631
7632 /* Return true if STMT is known to compute a non-negative value.
7633 If the return value is based on the assumption that signed overflow is
7634 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7635 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7636
7637 bool
7638 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7639 int depth)
7640 {
7641 switch (gimple_code (stmt))
7642 {
7643 case GIMPLE_ASSIGN:
7644 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7645 depth);
7646 case GIMPLE_CALL:
7647 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7648 depth);
7649 case GIMPLE_PHI:
7650 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7651 depth);
7652 default:
7653 return false;
7654 }
7655 }
7656
7657 /* Return true if the floating-point value computed by assignment STMT
7658 is known to have an integer value. We also allow +Inf, -Inf and NaN
7659 to be considered integer values. Return false for signaling NaN.
7660
7661 DEPTH is the current nesting depth of the query. */
7662
7663 static bool
7664 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7665 {
7666 enum tree_code code = gimple_assign_rhs_code (stmt);
7667 switch (get_gimple_rhs_class (code))
7668 {
7669 case GIMPLE_UNARY_RHS:
7670 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7671 gimple_assign_rhs1 (stmt), depth);
7672 case GIMPLE_BINARY_RHS:
7673 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7674 gimple_assign_rhs1 (stmt),
7675 gimple_assign_rhs2 (stmt), depth);
7676 case GIMPLE_TERNARY_RHS:
7677 return false;
7678 case GIMPLE_SINGLE_RHS:
7679 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7680 case GIMPLE_INVALID_RHS:
7681 break;
7682 }
7683 gcc_unreachable ();
7684 }
7685
7686 /* Return true if the floating-point value computed by call STMT is known
7687 to have an integer value. We also allow +Inf, -Inf and NaN to be
7688 considered integer values. Return false for signaling NaN.
7689
7690 DEPTH is the current nesting depth of the query. */
7691
7692 static bool
7693 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7694 {
7695 tree arg0 = (gimple_call_num_args (stmt) > 0
7696 ? gimple_call_arg (stmt, 0)
7697 : NULL_TREE);
7698 tree arg1 = (gimple_call_num_args (stmt) > 1
7699 ? gimple_call_arg (stmt, 1)
7700 : NULL_TREE);
7701 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7702 arg0, arg1, depth);
7703 }
7704
7705 /* Return true if the floating-point result of phi STMT is known to have
7706 an integer value. We also allow +Inf, -Inf and NaN to be considered
7707 integer values. Return false for signaling NaN.
7708
7709 DEPTH is the current nesting depth of the query. */
7710
7711 static bool
7712 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7713 {
7714 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7715 {
7716 tree arg = gimple_phi_arg_def (stmt, i);
7717 if (!integer_valued_real_single_p (arg, depth + 1))
7718 return false;
7719 }
7720 return true;
7721 }
7722
7723 /* Return true if the floating-point value computed by STMT is known
7724 to have an integer value. We also allow +Inf, -Inf and NaN to be
7725 considered integer values. Return false for signaling NaN.
7726
7727 DEPTH is the current nesting depth of the query. */
7728
7729 bool
7730 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7731 {
7732 switch (gimple_code (stmt))
7733 {
7734 case GIMPLE_ASSIGN:
7735 return gimple_assign_integer_valued_real_p (stmt, depth);
7736 case GIMPLE_CALL:
7737 return gimple_call_integer_valued_real_p (stmt, depth);
7738 case GIMPLE_PHI:
7739 return gimple_phi_integer_valued_real_p (stmt, depth);
7740 default:
7741 return false;
7742 }
7743 }