c++: Handle COMPOUND_EXPRs in ocp_convert [PR94339]
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83 };
84
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
92
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
116
117 if (DECL_ABSTRACT_P (decl))
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
124
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
128 {
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
140 }
141
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
170
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
180
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
189 }
190
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202 }
203
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 {
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
241
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 TREE_ADDRESSABLE (base) = 1;
249 else if (TREE_CODE (base) == FUNCTION_DECL)
250 {
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base);
255 }
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
263 }
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
266 {
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
272 }
273 return orig_cval;
274 }
275
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
278
279 tree
280 get_symbol_constant_value (tree sym)
281 {
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
284 {
285 if (val)
286 {
287 val = canonicalize_constructor_val (unshare_expr (val), sym);
288 if (val && is_gimple_min_invariant (val))
289 return val;
290 else
291 return NULL_TREE;
292 }
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
297 && is_gimple_reg_type (TREE_TYPE (sym)))
298 return build_zero_cst (TREE_TYPE (sym));
299 }
300
301 return NULL_TREE;
302 }
303
304
305
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
310
311 static tree
312 maybe_fold_reference (tree expr, bool is_lhs)
313 {
314 tree result;
315
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
337
338 return NULL_TREE;
339 }
340
341
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
346
347 static tree
348 fold_gimple_assign (gimple_stmt_iterator *si)
349 {
350 gimple *stmt = gsi_stmt (*si);
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
353
354 tree result = NULL_TREE;
355
356 switch (get_gimple_rhs_class (subcode))
357 {
358 case GIMPLE_SINGLE_RHS:
359 {
360 tree rhs = gimple_assign_rhs1 (stmt);
361
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
364
365 if (REFERENCE_CLASS_P (rhs))
366 return maybe_fold_reference (rhs, false);
367
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 {
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
373 else if (flag_devirtualize && virtual_method_call_p (rhs))
374 {
375 bool final;
376 vec <cgraph_node *>targets
377 = possible_polymorphic_call_targets (rhs, stmt, &final);
378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 {
380 if (dump_enabled_p ())
381 {
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
387 : "NULL");
388 }
389 if (targets.length () == 1)
390 {
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
395 }
396 else
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val = build_int_cst (TREE_TYPE (val), 0);
400 return val;
401 }
402 }
403 }
404
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 {
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419
420 if (result)
421 {
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
427
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
430 }
431 }
432
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 {
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
439
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
441 if (! CONSTANT_CLASS_P (val))
442 return NULL_TREE;
443
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
446 }
447
448 else if (DECL_P (rhs))
449 return get_symbol_constant_value (rhs);
450 }
451 break;
452
453 case GIMPLE_UNARY_RHS:
454 break;
455
456 case GIMPLE_BINARY_RHS:
457 break;
458
459 case GIMPLE_TERNARY_RHS:
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
465
466 if (result)
467 {
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
471 }
472 break;
473
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
476 }
477
478 return NULL_TREE;
479 }
480
481
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
486
487 static void
488 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489 {
490 gimple *stmt = gsi_stmt (*si_p);
491
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
494
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple *laststore = NULL;
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
500 {
501 gimple *new_stmt = gsi_stmt (i);
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 {
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
517 }
518 }
519
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
525 {
526 gimple *new_stmt = gsi_stmt (i);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
534 }
535
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
540 {
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
544 {
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
547 }
548 }
549
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
552 }
553
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
563
564 void
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566 {
567 tree lhs;
568 gimple *stmt, *new_stmt;
569 gimple_stmt_iterator i;
570 gimple_seq stmts = NULL;
571
572 stmt = gsi_stmt (*si_p);
573
574 gcc_assert (is_gimple_call (stmt));
575
576 push_gimplify_context (gimple_in_ssa_p (cfun));
577
578 lhs = gimple_call_lhs (stmt);
579 if (lhs == NULL_TREE)
580 {
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
585 {
586 pop_gimplify_context (NULL);
587 if (gimple_in_ssa_p (cfun))
588 {
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
591 }
592 gsi_replace (si_p, gimple_build_nop (), false);
593 return;
594 }
595 }
596 else
597 {
598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
603 }
604
605 pop_gimplify_context (NULL);
606
607 gsi_replace_with_seq_vops (si_p, stmts);
608 }
609
610
611 /* Replace the call at *GSI with the gimple value VAL. */
612
613 void
614 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615 {
616 gimple *stmt = gsi_stmt (*gsi);
617 tree lhs = gimple_call_lhs (stmt);
618 gimple *repl;
619 if (lhs)
620 {
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
624 }
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 {
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
632 }
633 gsi_replace (gsi, repl, false);
634 }
635
636 /* Replace the call at *GSI with the new call REPL and fold that
637 again. */
638
639 static void
640 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
641 {
642 gimple *stmt = gsi_stmt (*gsi);
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
645 gimple_move_vops (repl, stmt);
646 gsi_replace (gsi, repl, false);
647 fold_stmt (gsi);
648 }
649
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
651
652 static bool
653 var_decl_component_p (tree var)
654 {
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
661 }
662
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
665
666 static bool
667 size_must_be_zero_p (tree size)
668 {
669 if (integer_zerop (size))
670 return true;
671
672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
673 return false;
674
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
677
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
681 value_range valid_range (build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
687 }
688
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
695
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
699 {
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 tree destvar, srcvar;
704 location_t loc = gimple_location (stmt);
705
706 /* If the LEN parameter is a constant zero or in range where
707 the only valid value is zero, return DEST. */
708 if (size_must_be_zero_p (len))
709 {
710 gimple *repl;
711 if (gimple_call_lhs (stmt))
712 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, false);
722 return true;
723 }
724
725 /* If SRC and DEST are the same (and not volatile), return
726 DEST{,+LEN,+LEN-1}. */
727 if (operand_equal_p (src, dest, 0))
728 {
729 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 It's safe and may even be emitted by GCC itself (see bug
731 32667). */
732 unlink_stmt_vdef (stmt);
733 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 release_ssa_name (gimple_vdef (stmt));
735 if (!lhs)
736 {
737 gsi_replace (gsi, gimple_build_nop (), false);
738 return true;
739 }
740 goto done;
741 }
742 else
743 {
744 tree srctype, desttype;
745 unsigned int src_align, dest_align;
746 tree off0;
747 const char *tmp_str;
748 unsigned HOST_WIDE_INT tmp_len;
749
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
753
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* FIXME: Don't transform copies from strings with known length.
763 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
764 from being handled, and the case was XFAILed for that reason.
765 Now that it is handled and the XFAIL removed, as soon as other
766 strlenopt tests that rely on it for passing are adjusted, this
767 hack can be removed. */
768 && !c_strlen (src, 1)
769 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
770 && memchr (tmp_str, 0, tmp_len) == NULL))
771 {
772 unsigned ilen = tree_to_uhwi (len);
773 if (pow2p_hwi (ilen))
774 {
775 /* Detect out-of-bounds accesses without issuing warnings.
776 Avoid folding out-of-bounds copies but to avoid false
777 positives for unreachable code defer warning until after
778 DCE has worked its magic.
779 -Wrestrict is still diagnosed. */
780 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
781 dest, src, len, len,
782 false, false))
783 if (warning != OPT_Wrestrict)
784 return false;
785
786 scalar_int_mode mode;
787 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
788 if (type
789 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
790 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
791 /* If the destination pointer is not aligned we must be able
792 to emit an unaligned store. */
793 && (dest_align >= GET_MODE_ALIGNMENT (mode)
794 || !targetm.slow_unaligned_access (mode, dest_align)
795 || (optab_handler (movmisalign_optab, mode)
796 != CODE_FOR_nothing)))
797 {
798 tree srctype = type;
799 tree desttype = type;
800 if (src_align < GET_MODE_ALIGNMENT (mode))
801 srctype = build_aligned_type (type, src_align);
802 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
803 tree tem = fold_const_aggregate_ref (srcmem);
804 if (tem)
805 srcmem = tem;
806 else if (src_align < GET_MODE_ALIGNMENT (mode)
807 && targetm.slow_unaligned_access (mode, src_align)
808 && (optab_handler (movmisalign_optab, mode)
809 == CODE_FOR_nothing))
810 srcmem = NULL_TREE;
811 if (srcmem)
812 {
813 gimple *new_stmt;
814 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
815 {
816 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
817 srcmem
818 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
819 new_stmt);
820 gimple_assign_set_lhs (new_stmt, srcmem);
821 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
822 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
823 }
824 if (dest_align < GET_MODE_ALIGNMENT (mode))
825 desttype = build_aligned_type (type, dest_align);
826 new_stmt
827 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
828 dest, off0),
829 srcmem);
830 gimple_move_vops (new_stmt, stmt);
831 if (!lhs)
832 {
833 gsi_replace (gsi, new_stmt, false);
834 return true;
835 }
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 goto done;
838 }
839 }
840 }
841 }
842
843 if (code == BUILT_IN_MEMMOVE)
844 {
845 /* Both DEST and SRC must be pointer types.
846 ??? This is what old code did. Is the testing for pointer types
847 really mandatory?
848
849 If either SRC is readonly or length is 1, we can use memcpy. */
850 if (!dest_align || !src_align)
851 return false;
852 if (readonly_data_expr (src)
853 || (tree_fits_uhwi_p (len)
854 && (MIN (src_align, dest_align) / BITS_PER_UNIT
855 >= tree_to_uhwi (len))))
856 {
857 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
858 if (!fn)
859 return false;
860 gimple_call_set_fndecl (stmt, fn);
861 gimple_call_set_arg (stmt, 0, dest);
862 gimple_call_set_arg (stmt, 1, src);
863 fold_stmt (gsi);
864 return true;
865 }
866
867 /* If *src and *dest can't overlap, optimize into memcpy as well. */
868 if (TREE_CODE (src) == ADDR_EXPR
869 && TREE_CODE (dest) == ADDR_EXPR)
870 {
871 tree src_base, dest_base, fn;
872 poly_int64 src_offset = 0, dest_offset = 0;
873 poly_uint64 maxsize;
874
875 srcvar = TREE_OPERAND (src, 0);
876 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
877 if (src_base == NULL)
878 src_base = srcvar;
879 destvar = TREE_OPERAND (dest, 0);
880 dest_base = get_addr_base_and_unit_offset (destvar,
881 &dest_offset);
882 if (dest_base == NULL)
883 dest_base = destvar;
884 if (!poly_int_tree_p (len, &maxsize))
885 maxsize = -1;
886 if (SSA_VAR_P (src_base)
887 && SSA_VAR_P (dest_base))
888 {
889 if (operand_equal_p (src_base, dest_base, 0)
890 && ranges_maybe_overlap_p (src_offset, maxsize,
891 dest_offset, maxsize))
892 return false;
893 }
894 else if (TREE_CODE (src_base) == MEM_REF
895 && TREE_CODE (dest_base) == MEM_REF)
896 {
897 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
898 TREE_OPERAND (dest_base, 0), 0))
899 return false;
900 poly_offset_int full_src_offset
901 = mem_ref_offset (src_base) + src_offset;
902 poly_offset_int full_dest_offset
903 = mem_ref_offset (dest_base) + dest_offset;
904 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
905 full_dest_offset, maxsize))
906 return false;
907 }
908 else
909 return false;
910
911 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
912 if (!fn)
913 return false;
914 gimple_call_set_fndecl (stmt, fn);
915 gimple_call_set_arg (stmt, 0, dest);
916 gimple_call_set_arg (stmt, 1, src);
917 fold_stmt (gsi);
918 return true;
919 }
920
921 /* If the destination and source do not alias optimize into
922 memcpy as well. */
923 if ((is_gimple_min_invariant (dest)
924 || TREE_CODE (dest) == SSA_NAME)
925 && (is_gimple_min_invariant (src)
926 || TREE_CODE (src) == SSA_NAME))
927 {
928 ao_ref destr, srcr;
929 ao_ref_init_from_ptr_and_size (&destr, dest, len);
930 ao_ref_init_from_ptr_and_size (&srcr, src, len);
931 if (!refs_may_alias_p_1 (&destr, &srcr, false))
932 {
933 tree fn;
934 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
935 if (!fn)
936 return false;
937 gimple_call_set_fndecl (stmt, fn);
938 gimple_call_set_arg (stmt, 0, dest);
939 gimple_call_set_arg (stmt, 1, src);
940 fold_stmt (gsi);
941 return true;
942 }
943 }
944
945 return false;
946 }
947
948 if (!tree_fits_shwi_p (len))
949 return false;
950 if (!POINTER_TYPE_P (TREE_TYPE (src))
951 || !POINTER_TYPE_P (TREE_TYPE (dest)))
952 return false;
953 /* In the following try to find a type that is most natural to be
954 used for the memcpy source and destination and that allows
955 the most optimization when memcpy is turned into a plain assignment
956 using that type. In theory we could always use a char[len] type
957 but that only gains us that the destination and source possibly
958 no longer will have their address taken. */
959 srctype = TREE_TYPE (TREE_TYPE (src));
960 if (TREE_CODE (srctype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
962 srctype = TREE_TYPE (srctype);
963 desttype = TREE_TYPE (TREE_TYPE (dest));
964 if (TREE_CODE (desttype) == ARRAY_TYPE
965 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
966 desttype = TREE_TYPE (desttype);
967 if (TREE_ADDRESSABLE (srctype)
968 || TREE_ADDRESSABLE (desttype))
969 return false;
970
971 /* Make sure we are not copying using a floating-point mode or
972 a type whose size possibly does not match its precision. */
973 if (FLOAT_MODE_P (TYPE_MODE (desttype))
974 || TREE_CODE (desttype) == BOOLEAN_TYPE
975 || TREE_CODE (desttype) == ENUMERAL_TYPE)
976 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
977 if (FLOAT_MODE_P (TYPE_MODE (srctype))
978 || TREE_CODE (srctype) == BOOLEAN_TYPE
979 || TREE_CODE (srctype) == ENUMERAL_TYPE)
980 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
981 if (!srctype)
982 srctype = desttype;
983 if (!desttype)
984 desttype = srctype;
985 if (!srctype)
986 return false;
987
988 src_align = get_pointer_alignment (src);
989 dest_align = get_pointer_alignment (dest);
990
991 /* Choose between src and destination type for the access based
992 on alignment, whether the access constitutes a register access
993 and whether it may actually expose a declaration for SSA rewrite
994 or SRA decomposition. */
995 destvar = NULL_TREE;
996 srcvar = NULL_TREE;
997 if (TREE_CODE (dest) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (dest, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1000 && dest_align >= TYPE_ALIGN (desttype)
1001 && (is_gimple_reg_type (desttype)
1002 || src_align >= TYPE_ALIGN (desttype)))
1003 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1004 else if (TREE_CODE (src) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (src, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1007 && src_align >= TYPE_ALIGN (srctype)
1008 && (is_gimple_reg_type (srctype)
1009 || dest_align >= TYPE_ALIGN (srctype)))
1010 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1011 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1012 return false;
1013
1014 /* Now that we chose an access type express the other side in
1015 terms of it if the target allows that with respect to alignment
1016 constraints. */
1017 if (srcvar == NULL_TREE)
1018 {
1019 if (src_align >= TYPE_ALIGN (desttype))
1020 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1021 else
1022 {
1023 if (STRICT_ALIGNMENT)
1024 return false;
1025 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1026 src_align);
1027 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1028 }
1029 }
1030 else if (destvar == NULL_TREE)
1031 {
1032 if (dest_align >= TYPE_ALIGN (srctype))
1033 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1034 else
1035 {
1036 if (STRICT_ALIGNMENT)
1037 return false;
1038 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1039 dest_align);
1040 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1041 }
1042 }
1043
1044 /* Same as above, detect out-of-bounds accesses without issuing
1045 warnings. Avoid folding out-of-bounds copies but to avoid
1046 false positives for unreachable code defer warning until
1047 after DCE has worked its magic.
1048 -Wrestrict is still diagnosed. */
1049 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1050 dest, src, len, len,
1051 false, false))
1052 if (warning != OPT_Wrestrict)
1053 return false;
1054
1055 gimple *new_stmt;
1056 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1057 {
1058 tree tem = fold_const_aggregate_ref (srcvar);
1059 if (tem)
1060 srcvar = tem;
1061 if (! is_gimple_min_invariant (srcvar))
1062 {
1063 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1064 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1065 new_stmt);
1066 gimple_assign_set_lhs (new_stmt, srcvar);
1067 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1068 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1069 }
1070 new_stmt = gimple_build_assign (destvar, srcvar);
1071 goto set_vop_and_replace;
1072 }
1073
1074 /* We get an aggregate copy. Use an unsigned char[] type to
1075 perform the copying to preserve padding and to avoid any issues
1076 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1077 desttype = build_array_type_nelts (unsigned_char_type_node,
1078 tree_to_uhwi (len));
1079 srctype = desttype;
1080 if (src_align > TYPE_ALIGN (srctype))
1081 srctype = build_aligned_type (srctype, src_align);
1082 if (dest_align > TYPE_ALIGN (desttype))
1083 desttype = build_aligned_type (desttype, dest_align);
1084 new_stmt
1085 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1086 fold_build2 (MEM_REF, srctype, src, off0));
1087 set_vop_and_replace:
1088 gimple_move_vops (new_stmt, stmt);
1089 if (!lhs)
1090 {
1091 gsi_replace (gsi, new_stmt, false);
1092 return true;
1093 }
1094 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1095 }
1096
1097 done:
1098 gimple_seq stmts = NULL;
1099 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1100 len = NULL_TREE;
1101 else if (code == BUILT_IN_MEMPCPY)
1102 {
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1106 }
1107 else
1108 gcc_unreachable ();
1109
1110 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1111 gimple *repl = gimple_build_assign (lhs, dest);
1112 gsi_replace (gsi, repl, false);
1113 return true;
1114 }
1115
1116 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1117 to built-in memcmp (a, b, len). */
1118
1119 static bool
1120 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1121 {
1122 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1123
1124 if (!fn)
1125 return false;
1126
1127 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1128
1129 gimple *stmt = gsi_stmt (*gsi);
1130 tree a = gimple_call_arg (stmt, 0);
1131 tree b = gimple_call_arg (stmt, 1);
1132 tree len = gimple_call_arg (stmt, 2);
1133
1134 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1135 replace_call_with_call_and_fold (gsi, repl);
1136
1137 return true;
1138 }
1139
1140 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1141 to built-in memmove (dest, src, len). */
1142
1143 static bool
1144 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1145 {
1146 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1147
1148 if (!fn)
1149 return false;
1150
1151 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1152 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1153 len) into memmove (dest, src, len). */
1154
1155 gimple *stmt = gsi_stmt (*gsi);
1156 tree src = gimple_call_arg (stmt, 0);
1157 tree dest = gimple_call_arg (stmt, 1);
1158 tree len = gimple_call_arg (stmt, 2);
1159
1160 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1161 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1162 replace_call_with_call_and_fold (gsi, repl);
1163
1164 return true;
1165 }
1166
1167 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1168 to built-in memset (dest, 0, len). */
1169
1170 static bool
1171 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1172 {
1173 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1174
1175 if (!fn)
1176 return false;
1177
1178 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1179
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree dest = gimple_call_arg (stmt, 0);
1182 tree len = gimple_call_arg (stmt, 1);
1183
1184 gimple_seq seq = NULL;
1185 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1186 gimple_seq_add_stmt_without_update (&seq, repl);
1187 gsi_replace_with_seq_vops (gsi, seq);
1188 fold_stmt (gsi);
1189
1190 return true;
1191 }
1192
1193 /* Fold function call to builtin memset or bzero at *GSI setting the
1194 memory of size LEN to VAL. Return whether a simplification was made. */
1195
1196 static bool
1197 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1198 {
1199 gimple *stmt = gsi_stmt (*gsi);
1200 tree etype;
1201 unsigned HOST_WIDE_INT length, cval;
1202
1203 /* If the LEN parameter is zero, return DEST. */
1204 if (integer_zerop (len))
1205 {
1206 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1207 return true;
1208 }
1209
1210 if (! tree_fits_uhwi_p (len))
1211 return false;
1212
1213 if (TREE_CODE (c) != INTEGER_CST)
1214 return false;
1215
1216 tree dest = gimple_call_arg (stmt, 0);
1217 tree var = dest;
1218 if (TREE_CODE (var) != ADDR_EXPR)
1219 return false;
1220
1221 var = TREE_OPERAND (var, 0);
1222 if (TREE_THIS_VOLATILE (var))
1223 return false;
1224
1225 etype = TREE_TYPE (var);
1226 if (TREE_CODE (etype) == ARRAY_TYPE)
1227 etype = TREE_TYPE (etype);
1228
1229 if (!INTEGRAL_TYPE_P (etype)
1230 && !POINTER_TYPE_P (etype))
1231 return NULL_TREE;
1232
1233 if (! var_decl_component_p (var))
1234 return NULL_TREE;
1235
1236 length = tree_to_uhwi (len);
1237 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1238 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1239 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1240 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1241 return NULL_TREE;
1242
1243 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1244 return NULL_TREE;
1245
1246 if (!type_has_mode_precision_p (etype))
1247 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1248 TYPE_UNSIGNED (etype));
1249
1250 if (integer_zerop (c))
1251 cval = 0;
1252 else
1253 {
1254 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1255 return NULL_TREE;
1256
1257 cval = TREE_INT_CST_LOW (c);
1258 cval &= 0xff;
1259 cval |= cval << 8;
1260 cval |= cval << 16;
1261 cval |= (cval << 31) << 1;
1262 }
1263
1264 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1265 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1266 gimple_move_vops (store, stmt);
1267 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1268 if (gimple_call_lhs (stmt))
1269 {
1270 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1271 gsi_replace (gsi, asgn, false);
1272 }
1273 else
1274 {
1275 gimple_stmt_iterator gsi2 = *gsi;
1276 gsi_prev (gsi);
1277 gsi_remove (&gsi2, true);
1278 }
1279
1280 return true;
1281 }
1282
1283 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1284
1285 static bool
1286 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1287 c_strlen_data *pdata, unsigned eltsize)
1288 {
1289 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1290
1291 /* The length computed by this invocation of the function. */
1292 tree val = NULL_TREE;
1293
1294 /* True if VAL is an optimistic (tight) bound determined from
1295 the size of the character array in which the string may be
1296 stored. In that case, the computed VAL is used to set
1297 PDATA->MAXBOUND. */
1298 bool tight_bound = false;
1299
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1303 {
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
1306 {
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1311 pdata, eltsize);
1312 }
1313 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1314 && rkind == SRK_LENRANGE)
1315 {
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1320
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
1329 }
1330 }
1331
1332 if (rkind == SRK_INT_VALUE)
1333 {
1334 /* We are computing the maximum value (not string length). */
1335 val = arg;
1336 if (TREE_CODE (val) != INTEGER_CST
1337 || tree_int_cst_sgn (val) < 0)
1338 return false;
1339 }
1340 else
1341 {
1342 c_strlen_data lendata = { };
1343 val = c_strlen (arg, 1, &lendata, eltsize);
1344
1345 if (!val && lendata.decl)
1346 {
1347 /* ARG refers to an unterminated const character array.
1348 DATA.DECL with size DATA.LEN. */
1349 val = lendata.minlen;
1350 pdata->decl = lendata.decl;
1351 }
1352 }
1353
1354 /* Set if VAL represents the maximum length based on array size (set
1355 when exact length cannot be determined). */
1356 bool maxbound = false;
1357
1358 if (!val && rkind == SRK_LENRANGE)
1359 {
1360 if (TREE_CODE (arg) == ADDR_EXPR)
1361 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1362 pdata, eltsize);
1363
1364 if (TREE_CODE (arg) == ARRAY_REF)
1365 {
1366 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1367
1368 /* Determine the "innermost" array type. */
1369 while (TREE_CODE (optype) == ARRAY_TYPE
1370 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1371 optype = TREE_TYPE (optype);
1372
1373 /* Avoid arrays of pointers. */
1374 tree eltype = TREE_TYPE (optype);
1375 if (TREE_CODE (optype) != ARRAY_TYPE
1376 || !INTEGRAL_TYPE_P (eltype))
1377 return false;
1378
1379 /* Fail when the array bound is unknown or zero. */
1380 val = TYPE_SIZE_UNIT (optype);
1381 if (!val
1382 || TREE_CODE (val) != INTEGER_CST
1383 || integer_zerop (val))
1384 return false;
1385
1386 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1387 integer_one_node);
1388
1389 /* Set the minimum size to zero since the string in
1390 the array could have zero length. */
1391 pdata->minlen = ssize_int (0);
1392
1393 tight_bound = true;
1394 }
1395 else if (TREE_CODE (arg) == COMPONENT_REF
1396 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1397 == ARRAY_TYPE))
1398 {
1399 /* Use the type of the member array to determine the upper
1400 bound on the length of the array. This may be overly
1401 optimistic if the array itself isn't NUL-terminated and
1402 the caller relies on the subsequent member to contain
1403 the NUL but that would only be considered valid if
1404 the array were the last member of a struct. */
1405
1406 tree fld = TREE_OPERAND (arg, 1);
1407
1408 tree optype = TREE_TYPE (fld);
1409
1410 /* Determine the "innermost" array type. */
1411 while (TREE_CODE (optype) == ARRAY_TYPE
1412 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1413 optype = TREE_TYPE (optype);
1414
1415 /* Fail when the array bound is unknown or zero. */
1416 val = TYPE_SIZE_UNIT (optype);
1417 if (!val
1418 || TREE_CODE (val) != INTEGER_CST
1419 || integer_zerop (val))
1420 return false;
1421 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1422 integer_one_node);
1423
1424 /* Set the minimum size to zero since the string in
1425 the array could have zero length. */
1426 pdata->minlen = ssize_int (0);
1427
1428 /* The array size determined above is an optimistic bound
1429 on the length. If the array isn't nul-terminated the
1430 length computed by the library function would be greater.
1431 Even though using strlen to cross the subobject boundary
1432 is undefined, avoid drawing conclusions from the member
1433 type about the length here. */
1434 tight_bound = true;
1435 }
1436 else if (TREE_CODE (arg) == MEM_REF
1437 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1438 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1439 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1440 {
1441 /* Handle a MEM_REF into a DECL accessing an array of integers,
1442 being conservative about references to extern structures with
1443 flexible array members that can be initialized to arbitrary
1444 numbers of elements as an extension (static structs are okay).
1445 FIXME: Make this less conservative -- see
1446 component_ref_size in tree.c. */
1447 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1448 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1449 && (decl_binds_to_current_def_p (ref)
1450 || !array_at_struct_end_p (arg)))
1451 {
1452 /* Fail if the offset is out of bounds. Such accesses
1453 should be diagnosed at some point. */
1454 val = DECL_SIZE_UNIT (ref);
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
1458 return false;
1459
1460 poly_offset_int psiz = wi::to_offset (val);
1461 poly_offset_int poff = mem_ref_offset (arg);
1462 if (known_le (psiz, poff))
1463 return false;
1464
1465 pdata->minlen = ssize_int (0);
1466
1467 /* Subtract the offset and one for the terminating nul. */
1468 psiz -= poff;
1469 psiz -= 1;
1470 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1471 /* Since VAL reflects the size of a declared object
1472 rather the type of the access it is not a tight bound. */
1473 }
1474 }
1475 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1476 {
1477 /* Avoid handling pointers to arrays. GCC might misuse
1478 a pointer to an array of one bound to point to an array
1479 object of a greater bound. */
1480 tree argtype = TREE_TYPE (arg);
1481 if (TREE_CODE (argtype) == ARRAY_TYPE)
1482 {
1483 val = TYPE_SIZE_UNIT (argtype);
1484 if (!val
1485 || TREE_CODE (val) != INTEGER_CST
1486 || integer_zerop (val))
1487 return false;
1488 val = wide_int_to_tree (TREE_TYPE (val),
1489 wi::sub (wi::to_wide (val), 1));
1490
1491 /* Set the minimum size to zero since the string in
1492 the array could have zero length. */
1493 pdata->minlen = ssize_int (0);
1494 }
1495 }
1496 maxbound = true;
1497 }
1498
1499 if (!val)
1500 return false;
1501
1502 /* Adjust the lower bound on the string length as necessary. */
1503 if (!pdata->minlen
1504 || (rkind != SRK_STRLEN
1505 && TREE_CODE (pdata->minlen) == INTEGER_CST
1506 && TREE_CODE (val) == INTEGER_CST
1507 && tree_int_cst_lt (val, pdata->minlen)))
1508 pdata->minlen = val;
1509
1510 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1511 {
1512 /* Adjust the tighter (more optimistic) string length bound
1513 if necessary and proceed to adjust the more conservative
1514 bound. */
1515 if (TREE_CODE (val) == INTEGER_CST)
1516 {
1517 if (tree_int_cst_lt (pdata->maxbound, val))
1518 pdata->maxbound = val;
1519 }
1520 else
1521 pdata->maxbound = val;
1522 }
1523 else if (pdata->maxbound || maxbound)
1524 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1525 if VAL corresponds to the maximum length determined based
1526 on the type of the object. */
1527 pdata->maxbound = val;
1528
1529 if (tight_bound)
1530 {
1531 /* VAL computed above represents an optimistically tight bound
1532 on the length of the string based on the referenced object's
1533 or subobject's type. Determine the conservative upper bound
1534 based on the enclosing object's size if possible. */
1535 if (rkind == SRK_LENRANGE)
1536 {
1537 poly_int64 offset;
1538 tree base = get_addr_base_and_unit_offset (arg, &offset);
1539 if (!base)
1540 {
1541 /* When the call above fails due to a non-constant offset
1542 assume the offset is zero and use the size of the whole
1543 enclosing object instead. */
1544 base = get_base_address (arg);
1545 offset = 0;
1546 }
1547 /* If the base object is a pointer no upper bound on the length
1548 can be determined. Otherwise the maximum length is equal to
1549 the size of the enclosing object minus the offset of
1550 the referenced subobject minus 1 (for the terminating nul). */
1551 tree type = TREE_TYPE (base);
1552 if (TREE_CODE (type) == POINTER_TYPE
1553 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1554 || !(val = DECL_SIZE_UNIT (base)))
1555 val = build_all_ones_cst (size_type_node);
1556 else
1557 {
1558 val = DECL_SIZE_UNIT (base);
1559 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1560 size_int (offset + 1));
1561 }
1562 }
1563 else
1564 return false;
1565 }
1566
1567 if (pdata->maxlen)
1568 {
1569 /* Adjust the more conservative bound if possible/necessary
1570 and fail otherwise. */
1571 if (rkind != SRK_STRLEN)
1572 {
1573 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1574 || TREE_CODE (val) != INTEGER_CST)
1575 return false;
1576
1577 if (tree_int_cst_lt (pdata->maxlen, val))
1578 pdata->maxlen = val;
1579 return true;
1580 }
1581 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1582 {
1583 /* Fail if the length of this ARG is different from that
1584 previously determined from another ARG. */
1585 return false;
1586 }
1587 }
1588
1589 pdata->maxlen = val;
1590 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1591 }
1592
1593 /* For an ARG referencing one or more strings, try to obtain the range
1594 of their lengths, or the size of the largest array ARG referes to if
1595 the range of lengths cannot be determined, and store all in *PDATA.
1596 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1597 the maximum constant value.
1598 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1599 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1600 length or if we are unable to determine the length, return false.
1601 VISITED is a bitmap of visited variables.
1602 RKIND determines the kind of value or range to obtain (see
1603 strlen_range_kind).
1604 Set PDATA->DECL if ARG refers to an unterminated constant array.
1605 On input, set ELTSIZE to 1 for normal single byte character strings,
1606 and either 2 or 4 for wide characer strings (the size of wchar_t).
1607 Return true if *PDATA was successfully populated and false otherwise. */
1608
1609 static bool
1610 get_range_strlen (tree arg, bitmap *visited,
1611 strlen_range_kind rkind,
1612 c_strlen_data *pdata, unsigned eltsize)
1613 {
1614
1615 if (TREE_CODE (arg) != SSA_NAME)
1616 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1617
1618 /* If ARG is registered for SSA update we cannot look at its defining
1619 statement. */
1620 if (name_registered_for_update_p (arg))
1621 return false;
1622
1623 /* If we were already here, break the infinite cycle. */
1624 if (!*visited)
1625 *visited = BITMAP_ALLOC (NULL);
1626 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1627 return true;
1628
1629 tree var = arg;
1630 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1631
1632 switch (gimple_code (def_stmt))
1633 {
1634 case GIMPLE_ASSIGN:
1635 /* The RHS of the statement defining VAR must either have a
1636 constant length or come from another SSA_NAME with a constant
1637 length. */
1638 if (gimple_assign_single_p (def_stmt)
1639 || gimple_assign_unary_nop_p (def_stmt))
1640 {
1641 tree rhs = gimple_assign_rhs1 (def_stmt);
1642 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1643 }
1644 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1645 {
1646 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1647 gimple_assign_rhs3 (def_stmt) };
1648
1649 for (unsigned int i = 0; i < 2; i++)
1650 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1651 {
1652 if (rkind != SRK_LENRANGE)
1653 return false;
1654 /* Set the upper bound to the maximum to prevent
1655 it from being adjusted in the next iteration but
1656 leave MINLEN and the more conservative MAXBOUND
1657 determined so far alone (or leave them null if
1658 they haven't been set yet). That the MINLEN is
1659 in fact zero can be determined from MAXLEN being
1660 unbounded but the discovered minimum is used for
1661 diagnostics. */
1662 pdata->maxlen = build_all_ones_cst (size_type_node);
1663 }
1664 return true;
1665 }
1666 return false;
1667
1668 case GIMPLE_PHI:
1669 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1670 must have a constant length. */
1671 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1672 {
1673 tree arg = gimple_phi_arg (def_stmt, i)->def;
1674
1675 /* If this PHI has itself as an argument, we cannot
1676 determine the string length of this argument. However,
1677 if we can find a constant string length for the other
1678 PHI args then we can still be sure that this is a
1679 constant string length. So be optimistic and just
1680 continue with the next argument. */
1681 if (arg == gimple_phi_result (def_stmt))
1682 continue;
1683
1684 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1685 {
1686 if (rkind != SRK_LENRANGE)
1687 return false;
1688 /* Set the upper bound to the maximum to prevent
1689 it from being adjusted in the next iteration but
1690 leave MINLEN and the more conservative MAXBOUND
1691 determined so far alone (or leave them null if
1692 they haven't been set yet). That the MINLEN is
1693 in fact zero can be determined from MAXLEN being
1694 unbounded but the discovered minimum is used for
1695 diagnostics. */
1696 pdata->maxlen = build_all_ones_cst (size_type_node);
1697 }
1698 }
1699 return true;
1700
1701 default:
1702 return false;
1703 }
1704 }
1705
1706 /* Try to obtain the range of the lengths of the string(s) referenced
1707 by ARG, or the size of the largest array ARG refers to if the range
1708 of lengths cannot be determined, and store all in *PDATA which must
1709 be zero-initialized on input except PDATA->MAXBOUND may be set to
1710 a non-null tree node other than INTEGER_CST to request to have it
1711 set to the length of the longest string in a PHI. ELTSIZE is
1712 the expected size of the string element in bytes: 1 for char and
1713 some power of 2 for wide characters.
1714 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1715 for optimization. Returning false means that a nonzero PDATA->MINLEN
1716 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1717 is -1 (in that case, the actual range is indeterminate, i.e.,
1718 [0, PTRDIFF_MAX - 2]. */
1719
1720 bool
1721 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1722 {
1723 bitmap visited = NULL;
1724 tree maxbound = pdata->maxbound;
1725
1726 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1727 {
1728 /* On failure extend the length range to an impossible maximum
1729 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1730 members can stay unchanged regardless. */
1731 pdata->minlen = ssize_int (0);
1732 pdata->maxlen = build_all_ones_cst (size_type_node);
1733 }
1734 else if (!pdata->minlen)
1735 pdata->minlen = ssize_int (0);
1736
1737 /* If it's unchanged from it initial non-null value, set the conservative
1738 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1739 if (maxbound && pdata->maxbound == maxbound)
1740 pdata->maxbound = build_all_ones_cst (size_type_node);
1741
1742 if (visited)
1743 BITMAP_FREE (visited);
1744
1745 return !integer_all_onesp (pdata->maxlen);
1746 }
1747
1748 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1749 For ARG of pointer types, NONSTR indicates if the caller is prepared
1750 to handle unterminated strings. For integer ARG and when RKIND ==
1751 SRK_INT_VALUE, NONSTR must be null.
1752
1753 If an unterminated array is discovered and our caller handles
1754 unterminated arrays, then bubble up the offending DECL and
1755 return the maximum size. Otherwise return NULL. */
1756
1757 static tree
1758 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1759 {
1760 /* A non-null NONSTR is meaningless when determining the maximum
1761 value of an integer ARG. */
1762 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1763 /* ARG must have an integral type when RKIND says so. */
1764 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1765
1766 bitmap visited = NULL;
1767
1768 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1769 is unbounded. */
1770 c_strlen_data lendata = { };
1771 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1772 lendata.maxlen = NULL_TREE;
1773 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1774 lendata.maxlen = NULL_TREE;
1775
1776 if (visited)
1777 BITMAP_FREE (visited);
1778
1779 if (nonstr)
1780 {
1781 /* For callers prepared to handle unterminated arrays set
1782 *NONSTR to point to the declaration of the array and return
1783 the maximum length/size. */
1784 *nonstr = lendata.decl;
1785 return lendata.maxlen;
1786 }
1787
1788 /* Fail if the constant array isn't nul-terminated. */
1789 return lendata.decl ? NULL_TREE : lendata.maxlen;
1790 }
1791
1792
1793 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1794 If LEN is not NULL, it represents the length of the string to be
1795 copied. Return NULL_TREE if no simplification can be made. */
1796
1797 static bool
1798 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1799 tree dest, tree src)
1800 {
1801 gimple *stmt = gsi_stmt (*gsi);
1802 location_t loc = gimple_location (stmt);
1803 tree fn;
1804
1805 /* If SRC and DEST are the same (and not volatile), return DEST. */
1806 if (operand_equal_p (src, dest, 0))
1807 {
1808 /* Issue -Wrestrict unless the pointers are null (those do
1809 not point to objects and so do not indicate an overlap;
1810 such calls could be the result of sanitization and jump
1811 threading). */
1812 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1813 {
1814 tree func = gimple_call_fndecl (stmt);
1815
1816 warning_at (loc, OPT_Wrestrict,
1817 "%qD source argument is the same as destination",
1818 func);
1819 }
1820
1821 replace_call_with_value (gsi, dest);
1822 return true;
1823 }
1824
1825 if (optimize_function_for_size_p (cfun))
1826 return false;
1827
1828 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1829 if (!fn)
1830 return false;
1831
1832 /* Set to non-null if ARG refers to an unterminated array. */
1833 tree nonstr = NULL;
1834 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1835
1836 if (nonstr)
1837 {
1838 /* Avoid folding calls with unterminated arrays. */
1839 if (!gimple_no_warning_p (stmt))
1840 warn_string_no_nul (loc, "strcpy", src, nonstr);
1841 gimple_set_no_warning (stmt, true);
1842 return false;
1843 }
1844
1845 if (!len)
1846 return false;
1847
1848 len = fold_convert_loc (loc, size_type_node, len);
1849 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1850 len = force_gimple_operand_gsi (gsi, len, true,
1851 NULL_TREE, true, GSI_SAME_STMT);
1852 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1853 replace_call_with_call_and_fold (gsi, repl);
1854 return true;
1855 }
1856
1857 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1858 If SLEN is not NULL, it represents the length of the source string.
1859 Return NULL_TREE if no simplification can be made. */
1860
1861 static bool
1862 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1863 tree dest, tree src, tree len)
1864 {
1865 gimple *stmt = gsi_stmt (*gsi);
1866 location_t loc = gimple_location (stmt);
1867 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1868
1869 /* If the LEN parameter is zero, return DEST. */
1870 if (integer_zerop (len))
1871 {
1872 /* Avoid warning if the destination refers to an array/pointer
1873 decorate with attribute nonstring. */
1874 if (!nonstring)
1875 {
1876 tree fndecl = gimple_call_fndecl (stmt);
1877
1878 /* Warn about the lack of nul termination: the result is not
1879 a (nul-terminated) string. */
1880 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1881 if (slen && !integer_zerop (slen))
1882 warning_at (loc, OPT_Wstringop_truncation,
1883 "%G%qD destination unchanged after copying no bytes "
1884 "from a string of length %E",
1885 stmt, fndecl, slen);
1886 else
1887 warning_at (loc, OPT_Wstringop_truncation,
1888 "%G%qD destination unchanged after copying no bytes",
1889 stmt, fndecl);
1890 }
1891
1892 replace_call_with_value (gsi, dest);
1893 return true;
1894 }
1895
1896 /* We can't compare slen with len as constants below if len is not a
1897 constant. */
1898 if (TREE_CODE (len) != INTEGER_CST)
1899 return false;
1900
1901 /* Now, we must be passed a constant src ptr parameter. */
1902 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1903 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1904 return false;
1905
1906 /* The size of the source string including the terminating nul. */
1907 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1908
1909 /* We do not support simplification of this case, though we do
1910 support it when expanding trees into RTL. */
1911 /* FIXME: generate a call to __builtin_memset. */
1912 if (tree_int_cst_lt (ssize, len))
1913 return false;
1914
1915 /* Diagnose truncation that leaves the copy unterminated. */
1916 maybe_diag_stxncpy_trunc (*gsi, src, len);
1917
1918 /* OK transform into builtin memcpy. */
1919 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1920 if (!fn)
1921 return false;
1922
1923 len = fold_convert_loc (loc, size_type_node, len);
1924 len = force_gimple_operand_gsi (gsi, len, true,
1925 NULL_TREE, true, GSI_SAME_STMT);
1926 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1927 replace_call_with_call_and_fold (gsi, repl);
1928
1929 return true;
1930 }
1931
1932 /* Fold function call to builtin strchr or strrchr.
1933 If both arguments are constant, evaluate and fold the result,
1934 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1935 In general strlen is significantly faster than strchr
1936 due to being a simpler operation. */
1937 static bool
1938 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1939 {
1940 gimple *stmt = gsi_stmt (*gsi);
1941 tree str = gimple_call_arg (stmt, 0);
1942 tree c = gimple_call_arg (stmt, 1);
1943 location_t loc = gimple_location (stmt);
1944 const char *p;
1945 char ch;
1946
1947 if (!gimple_call_lhs (stmt))
1948 return false;
1949
1950 /* Avoid folding if the first argument is not a nul-terminated array.
1951 Defer warning until later. */
1952 if (!check_nul_terminated_array (NULL_TREE, str))
1953 return false;
1954
1955 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1956 {
1957 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1958
1959 if (p1 == NULL)
1960 {
1961 replace_call_with_value (gsi, integer_zero_node);
1962 return true;
1963 }
1964
1965 tree len = build_int_cst (size_type_node, p1 - p);
1966 gimple_seq stmts = NULL;
1967 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1968 POINTER_PLUS_EXPR, str, len);
1969 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1970 gsi_replace_with_seq_vops (gsi, stmts);
1971 return true;
1972 }
1973
1974 if (!integer_zerop (c))
1975 return false;
1976
1977 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1978 if (is_strrchr && optimize_function_for_size_p (cfun))
1979 {
1980 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1981
1982 if (strchr_fn)
1983 {
1984 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1985 replace_call_with_call_and_fold (gsi, repl);
1986 return true;
1987 }
1988
1989 return false;
1990 }
1991
1992 tree len;
1993 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1994
1995 if (!strlen_fn)
1996 return false;
1997
1998 /* Create newstr = strlen (str). */
1999 gimple_seq stmts = NULL;
2000 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2001 gimple_set_location (new_stmt, loc);
2002 len = create_tmp_reg_or_ssa_name (size_type_node);
2003 gimple_call_set_lhs (new_stmt, len);
2004 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2005
2006 /* Create (str p+ strlen (str)). */
2007 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2008 POINTER_PLUS_EXPR, str, len);
2009 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2010 gsi_replace_with_seq_vops (gsi, stmts);
2011 /* gsi now points at the assignment to the lhs, get a
2012 stmt iterator to the strlen.
2013 ??? We can't use gsi_for_stmt as that doesn't work when the
2014 CFG isn't built yet. */
2015 gimple_stmt_iterator gsi2 = *gsi;
2016 gsi_prev (&gsi2);
2017 fold_stmt (&gsi2);
2018 return true;
2019 }
2020
2021 /* Fold function call to builtin strstr.
2022 If both arguments are constant, evaluate and fold the result,
2023 additionally fold strstr (x, "") into x and strstr (x, "c")
2024 into strchr (x, 'c'). */
2025 static bool
2026 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2027 {
2028 gimple *stmt = gsi_stmt (*gsi);
2029 if (!gimple_call_lhs (stmt))
2030 return false;
2031
2032 tree haystack = gimple_call_arg (stmt, 0);
2033 tree needle = gimple_call_arg (stmt, 1);
2034
2035 /* Avoid folding if either argument is not a nul-terminated array.
2036 Defer warning until later. */
2037 if (!check_nul_terminated_array (NULL_TREE, haystack)
2038 || !check_nul_terminated_array (NULL_TREE, needle))
2039 return false;
2040
2041 const char *q = c_getstr (needle);
2042 if (q == NULL)
2043 return false;
2044
2045 if (const char *p = c_getstr (haystack))
2046 {
2047 const char *r = strstr (p, q);
2048
2049 if (r == NULL)
2050 {
2051 replace_call_with_value (gsi, integer_zero_node);
2052 return true;
2053 }
2054
2055 tree len = build_int_cst (size_type_node, r - p);
2056 gimple_seq stmts = NULL;
2057 gimple *new_stmt
2058 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2059 haystack, len);
2060 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2061 gsi_replace_with_seq_vops (gsi, stmts);
2062 return true;
2063 }
2064
2065 /* For strstr (x, "") return x. */
2066 if (q[0] == '\0')
2067 {
2068 replace_call_with_value (gsi, haystack);
2069 return true;
2070 }
2071
2072 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2073 if (q[1] == '\0')
2074 {
2075 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2076 if (strchr_fn)
2077 {
2078 tree c = build_int_cst (integer_type_node, q[0]);
2079 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2080 replace_call_with_call_and_fold (gsi, repl);
2081 return true;
2082 }
2083 }
2084
2085 return false;
2086 }
2087
2088 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2089 to the call.
2090
2091 Return NULL_TREE if no simplification was possible, otherwise return the
2092 simplified form of the call as a tree.
2093
2094 The simplified form may be a constant or other expression which
2095 computes the same value, but in a more efficient manner (including
2096 calls to other builtin functions).
2097
2098 The call may contain arguments which need to be evaluated, but
2099 which are not useful to determine the result of the call. In
2100 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2101 COMPOUND_EXPR will be an argument which must be evaluated.
2102 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2103 COMPOUND_EXPR in the chain will contain the tree for the simplified
2104 form of the builtin function call. */
2105
2106 static bool
2107 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2108 {
2109 gimple *stmt = gsi_stmt (*gsi);
2110 location_t loc = gimple_location (stmt);
2111
2112 const char *p = c_getstr (src);
2113
2114 /* If the string length is zero, return the dst parameter. */
2115 if (p && *p == '\0')
2116 {
2117 replace_call_with_value (gsi, dst);
2118 return true;
2119 }
2120
2121 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2122 return false;
2123
2124 /* See if we can store by pieces into (dst + strlen(dst)). */
2125 tree newdst;
2126 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2127 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2128
2129 if (!strlen_fn || !memcpy_fn)
2130 return false;
2131
2132 /* If the length of the source string isn't computable don't
2133 split strcat into strlen and memcpy. */
2134 tree len = get_maxval_strlen (src, SRK_STRLEN);
2135 if (! len)
2136 return false;
2137
2138 /* Create strlen (dst). */
2139 gimple_seq stmts = NULL, stmts2;
2140 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2141 gimple_set_location (repl, loc);
2142 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2143 gimple_call_set_lhs (repl, newdst);
2144 gimple_seq_add_stmt_without_update (&stmts, repl);
2145
2146 /* Create (dst p+ strlen (dst)). */
2147 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2148 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2149 gimple_seq_add_seq_without_update (&stmts, stmts2);
2150
2151 len = fold_convert_loc (loc, size_type_node, len);
2152 len = size_binop_loc (loc, PLUS_EXPR, len,
2153 build_int_cst (size_type_node, 1));
2154 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2155 gimple_seq_add_seq_without_update (&stmts, stmts2);
2156
2157 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2158 gimple_seq_add_stmt_without_update (&stmts, repl);
2159 if (gimple_call_lhs (stmt))
2160 {
2161 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2162 gimple_seq_add_stmt_without_update (&stmts, repl);
2163 gsi_replace_with_seq_vops (gsi, stmts);
2164 /* gsi now points at the assignment to the lhs, get a
2165 stmt iterator to the memcpy call.
2166 ??? We can't use gsi_for_stmt as that doesn't work when the
2167 CFG isn't built yet. */
2168 gimple_stmt_iterator gsi2 = *gsi;
2169 gsi_prev (&gsi2);
2170 fold_stmt (&gsi2);
2171 }
2172 else
2173 {
2174 gsi_replace_with_seq_vops (gsi, stmts);
2175 fold_stmt (gsi);
2176 }
2177 return true;
2178 }
2179
2180 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2181 are the arguments to the call. */
2182
2183 static bool
2184 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2185 {
2186 gimple *stmt = gsi_stmt (*gsi);
2187 tree dest = gimple_call_arg (stmt, 0);
2188 tree src = gimple_call_arg (stmt, 1);
2189 tree size = gimple_call_arg (stmt, 2);
2190 tree fn;
2191 const char *p;
2192
2193
2194 p = c_getstr (src);
2195 /* If the SRC parameter is "", return DEST. */
2196 if (p && *p == '\0')
2197 {
2198 replace_call_with_value (gsi, dest);
2199 return true;
2200 }
2201
2202 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2203 return false;
2204
2205 /* If __builtin_strcat_chk is used, assume strcat is available. */
2206 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2207 if (!fn)
2208 return false;
2209
2210 gimple *repl = gimple_build_call (fn, 2, dest, src);
2211 replace_call_with_call_and_fold (gsi, repl);
2212 return true;
2213 }
2214
2215 /* Simplify a call to the strncat builtin. */
2216
2217 static bool
2218 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2219 {
2220 gimple *stmt = gsi_stmt (*gsi);
2221 tree dst = gimple_call_arg (stmt, 0);
2222 tree src = gimple_call_arg (stmt, 1);
2223 tree len = gimple_call_arg (stmt, 2);
2224
2225 const char *p = c_getstr (src);
2226
2227 /* If the requested length is zero, or the src parameter string
2228 length is zero, return the dst parameter. */
2229 if (integer_zerop (len) || (p && *p == '\0'))
2230 {
2231 replace_call_with_value (gsi, dst);
2232 return true;
2233 }
2234
2235 if (TREE_CODE (len) != INTEGER_CST || !p)
2236 return false;
2237
2238 unsigned srclen = strlen (p);
2239
2240 int cmpsrc = compare_tree_int (len, srclen);
2241
2242 /* Return early if the requested len is less than the string length.
2243 Warnings will be issued elsewhere later. */
2244 if (cmpsrc < 0)
2245 return false;
2246
2247 unsigned HOST_WIDE_INT dstsize;
2248
2249 bool nowarn = gimple_no_warning_p (stmt);
2250
2251 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2252 {
2253 int cmpdst = compare_tree_int (len, dstsize);
2254
2255 if (cmpdst >= 0)
2256 {
2257 tree fndecl = gimple_call_fndecl (stmt);
2258
2259 /* Strncat copies (at most) LEN bytes and always appends
2260 the terminating NUL so the specified bound should never
2261 be equal to (or greater than) the size of the destination.
2262 If it is, the copy could overflow. */
2263 location_t loc = gimple_location (stmt);
2264 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2265 cmpdst == 0
2266 ? G_("%G%qD specified bound %E equals "
2267 "destination size")
2268 : G_("%G%qD specified bound %E exceeds "
2269 "destination size %wu"),
2270 stmt, fndecl, len, dstsize);
2271 if (nowarn)
2272 gimple_set_no_warning (stmt, true);
2273 }
2274 }
2275
2276 if (!nowarn && cmpsrc == 0)
2277 {
2278 tree fndecl = gimple_call_fndecl (stmt);
2279 location_t loc = gimple_location (stmt);
2280
2281 /* To avoid possible overflow the specified bound should also
2282 not be equal to the length of the source, even when the size
2283 of the destination is unknown (it's not an uncommon mistake
2284 to specify as the bound to strncpy the length of the source). */
2285 if (warning_at (loc, OPT_Wstringop_overflow_,
2286 "%G%qD specified bound %E equals source length",
2287 stmt, fndecl, len))
2288 gimple_set_no_warning (stmt, true);
2289 }
2290
2291 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2292
2293 /* If the replacement _DECL isn't initialized, don't do the
2294 transformation. */
2295 if (!fn)
2296 return false;
2297
2298 /* Otherwise, emit a call to strcat. */
2299 gcall *repl = gimple_build_call (fn, 2, dst, src);
2300 replace_call_with_call_and_fold (gsi, repl);
2301 return true;
2302 }
2303
2304 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2305 LEN, and SIZE. */
2306
2307 static bool
2308 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2309 {
2310 gimple *stmt = gsi_stmt (*gsi);
2311 tree dest = gimple_call_arg (stmt, 0);
2312 tree src = gimple_call_arg (stmt, 1);
2313 tree len = gimple_call_arg (stmt, 2);
2314 tree size = gimple_call_arg (stmt, 3);
2315 tree fn;
2316 const char *p;
2317
2318 p = c_getstr (src);
2319 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2320 if ((p && *p == '\0')
2321 || integer_zerop (len))
2322 {
2323 replace_call_with_value (gsi, dest);
2324 return true;
2325 }
2326
2327 if (! tree_fits_uhwi_p (size))
2328 return false;
2329
2330 if (! integer_all_onesp (size))
2331 {
2332 tree src_len = c_strlen (src, 1);
2333 if (src_len
2334 && tree_fits_uhwi_p (src_len)
2335 && tree_fits_uhwi_p (len)
2336 && ! tree_int_cst_lt (len, src_len))
2337 {
2338 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2339 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2340 if (!fn)
2341 return false;
2342
2343 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2344 replace_call_with_call_and_fold (gsi, repl);
2345 return true;
2346 }
2347 return false;
2348 }
2349
2350 /* If __builtin_strncat_chk is used, assume strncat is available. */
2351 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2352 if (!fn)
2353 return false;
2354
2355 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2356 replace_call_with_call_and_fold (gsi, repl);
2357 return true;
2358 }
2359
2360 /* Build and append gimple statements to STMTS that would load a first
2361 character of a memory location identified by STR. LOC is location
2362 of the statement. */
2363
2364 static tree
2365 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2366 {
2367 tree var;
2368
2369 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2370 tree cst_uchar_ptr_node
2371 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2372 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2373
2374 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2375 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2376 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2377
2378 gimple_assign_set_lhs (stmt, var);
2379 gimple_seq_add_stmt_without_update (stmts, stmt);
2380
2381 return var;
2382 }
2383
2384 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2385
2386 static bool
2387 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2388 {
2389 gimple *stmt = gsi_stmt (*gsi);
2390 tree callee = gimple_call_fndecl (stmt);
2391 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2392
2393 tree type = integer_type_node;
2394 tree str1 = gimple_call_arg (stmt, 0);
2395 tree str2 = gimple_call_arg (stmt, 1);
2396 tree lhs = gimple_call_lhs (stmt);
2397
2398 tree bound_node = NULL_TREE;
2399 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2400
2401 /* Handle strncmp and strncasecmp functions. */
2402 if (gimple_call_num_args (stmt) == 3)
2403 {
2404 bound_node = gimple_call_arg (stmt, 2);
2405 if (tree_fits_uhwi_p (bound_node))
2406 bound = tree_to_uhwi (bound_node);
2407 }
2408
2409 /* If the BOUND parameter is zero, return zero. */
2410 if (bound == 0)
2411 {
2412 replace_call_with_value (gsi, integer_zero_node);
2413 return true;
2414 }
2415
2416 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2417 if (operand_equal_p (str1, str2, 0))
2418 {
2419 replace_call_with_value (gsi, integer_zero_node);
2420 return true;
2421 }
2422
2423 /* Initially set to the number of characters, including the terminating
2424 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2425 the array Sx is not terminated by a nul.
2426 For nul-terminated strings then adjusted to their length so that
2427 LENx == NULPOSx holds. */
2428 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2429 const char *p1 = c_getstr (str1, &len1);
2430 const char *p2 = c_getstr (str2, &len2);
2431
2432 /* The position of the terminating nul character if one exists, otherwise
2433 a value greater than LENx. */
2434 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2435
2436 if (p1)
2437 {
2438 size_t n = strnlen (p1, len1);
2439 if (n < len1)
2440 len1 = nulpos1 = n;
2441 }
2442
2443 if (p2)
2444 {
2445 size_t n = strnlen (p2, len2);
2446 if (n < len2)
2447 len2 = nulpos2 = n;
2448 }
2449
2450 /* For known strings, return an immediate value. */
2451 if (p1 && p2)
2452 {
2453 int r = 0;
2454 bool known_result = false;
2455
2456 switch (fcode)
2457 {
2458 case BUILT_IN_STRCMP:
2459 case BUILT_IN_STRCMP_EQ:
2460 if (len1 != nulpos1 || len2 != nulpos2)
2461 break;
2462
2463 r = strcmp (p1, p2);
2464 known_result = true;
2465 break;
2466
2467 case BUILT_IN_STRNCMP:
2468 case BUILT_IN_STRNCMP_EQ:
2469 {
2470 if (bound == HOST_WIDE_INT_M1U)
2471 break;
2472
2473 /* Reduce the bound to be no more than the length
2474 of the shorter of the two strings, or the sizes
2475 of the unterminated arrays. */
2476 unsigned HOST_WIDE_INT n = bound;
2477
2478 if (len1 == nulpos1 && len1 < n)
2479 n = len1 + 1;
2480 if (len2 == nulpos2 && len2 < n)
2481 n = len2 + 1;
2482
2483 if (MIN (nulpos1, nulpos2) + 1 < n)
2484 break;
2485
2486 r = strncmp (p1, p2, n);
2487 known_result = true;
2488 break;
2489 }
2490 /* Only handleable situation is where the string are equal (result 0),
2491 which is already handled by operand_equal_p case. */
2492 case BUILT_IN_STRCASECMP:
2493 break;
2494 case BUILT_IN_STRNCASECMP:
2495 {
2496 if (bound == HOST_WIDE_INT_M1U)
2497 break;
2498 r = strncmp (p1, p2, bound);
2499 if (r == 0)
2500 known_result = true;
2501 break;
2502 }
2503 default:
2504 gcc_unreachable ();
2505 }
2506
2507 if (known_result)
2508 {
2509 replace_call_with_value (gsi, build_cmp_result (type, r));
2510 return true;
2511 }
2512 }
2513
2514 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2515 || fcode == BUILT_IN_STRCMP
2516 || fcode == BUILT_IN_STRCMP_EQ
2517 || fcode == BUILT_IN_STRCASECMP;
2518
2519 location_t loc = gimple_location (stmt);
2520
2521 /* If the second arg is "", return *(const unsigned char*)arg1. */
2522 if (p2 && *p2 == '\0' && nonzero_bound)
2523 {
2524 gimple_seq stmts = NULL;
2525 tree var = gimple_load_first_char (loc, str1, &stmts);
2526 if (lhs)
2527 {
2528 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2529 gimple_seq_add_stmt_without_update (&stmts, stmt);
2530 }
2531
2532 gsi_replace_with_seq_vops (gsi, stmts);
2533 return true;
2534 }
2535
2536 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2537 if (p1 && *p1 == '\0' && nonzero_bound)
2538 {
2539 gimple_seq stmts = NULL;
2540 tree var = gimple_load_first_char (loc, str2, &stmts);
2541
2542 if (lhs)
2543 {
2544 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2545 stmt = gimple_build_assign (c, NOP_EXPR, var);
2546 gimple_seq_add_stmt_without_update (&stmts, stmt);
2547
2548 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2549 gimple_seq_add_stmt_without_update (&stmts, stmt);
2550 }
2551
2552 gsi_replace_with_seq_vops (gsi, stmts);
2553 return true;
2554 }
2555
2556 /* If BOUND is one, return an expression corresponding to
2557 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2558 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2559 {
2560 gimple_seq stmts = NULL;
2561 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2562 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2563
2564 if (lhs)
2565 {
2566 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2567 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2568 gimple_seq_add_stmt_without_update (&stmts, convert1);
2569
2570 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2571 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2572 gimple_seq_add_stmt_without_update (&stmts, convert2);
2573
2574 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2575 gimple_seq_add_stmt_without_update (&stmts, stmt);
2576 }
2577
2578 gsi_replace_with_seq_vops (gsi, stmts);
2579 return true;
2580 }
2581
2582 /* If BOUND is greater than the length of one constant string,
2583 and the other argument is also a nul-terminated string, replace
2584 strncmp with strcmp. */
2585 if (fcode == BUILT_IN_STRNCMP
2586 && bound > 0 && bound < HOST_WIDE_INT_M1U
2587 && ((p2 && len2 < bound && len2 == nulpos2)
2588 || (p1 && len1 < bound && len1 == nulpos1)))
2589 {
2590 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2591 if (!fn)
2592 return false;
2593 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2594 replace_call_with_call_and_fold (gsi, repl);
2595 return true;
2596 }
2597
2598 return false;
2599 }
2600
2601 /* Fold a call to the memchr pointed by GSI iterator. */
2602
2603 static bool
2604 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2605 {
2606 gimple *stmt = gsi_stmt (*gsi);
2607 tree lhs = gimple_call_lhs (stmt);
2608 tree arg1 = gimple_call_arg (stmt, 0);
2609 tree arg2 = gimple_call_arg (stmt, 1);
2610 tree len = gimple_call_arg (stmt, 2);
2611
2612 /* If the LEN parameter is zero, return zero. */
2613 if (integer_zerop (len))
2614 {
2615 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2616 return true;
2617 }
2618
2619 char c;
2620 if (TREE_CODE (arg2) != INTEGER_CST
2621 || !tree_fits_uhwi_p (len)
2622 || !target_char_cst_p (arg2, &c))
2623 return false;
2624
2625 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2626 unsigned HOST_WIDE_INT string_length;
2627 const char *p1 = c_getstr (arg1, &string_length);
2628
2629 if (p1)
2630 {
2631 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2632 if (r == NULL)
2633 {
2634 tree mem_size, offset_node;
2635 string_constant (arg1, &offset_node, &mem_size, NULL);
2636 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2637 ? 0 : tree_to_uhwi (offset_node);
2638 /* MEM_SIZE is the size of the array the string literal
2639 is stored in. */
2640 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2641 gcc_checking_assert (string_length <= string_size);
2642 if (length <= string_size)
2643 {
2644 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2645 return true;
2646 }
2647 }
2648 else
2649 {
2650 unsigned HOST_WIDE_INT offset = r - p1;
2651 gimple_seq stmts = NULL;
2652 if (lhs != NULL_TREE)
2653 {
2654 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2655 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2656 arg1, offset_cst);
2657 gimple_seq_add_stmt_without_update (&stmts, stmt);
2658 }
2659 else
2660 gimple_seq_add_stmt_without_update (&stmts,
2661 gimple_build_nop ());
2662
2663 gsi_replace_with_seq_vops (gsi, stmts);
2664 return true;
2665 }
2666 }
2667
2668 return false;
2669 }
2670
2671 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2672 to the call. IGNORE is true if the value returned
2673 by the builtin will be ignored. UNLOCKED is true is true if this
2674 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2675 the known length of the string. Return NULL_TREE if no simplification
2676 was possible. */
2677
2678 static bool
2679 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2680 tree arg0, tree arg1,
2681 bool unlocked)
2682 {
2683 gimple *stmt = gsi_stmt (*gsi);
2684
2685 /* If we're using an unlocked function, assume the other unlocked
2686 functions exist explicitly. */
2687 tree const fn_fputc = (unlocked
2688 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2689 : builtin_decl_implicit (BUILT_IN_FPUTC));
2690 tree const fn_fwrite = (unlocked
2691 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2692 : builtin_decl_implicit (BUILT_IN_FWRITE));
2693
2694 /* If the return value is used, don't do the transformation. */
2695 if (gimple_call_lhs (stmt))
2696 return false;
2697
2698 /* Get the length of the string passed to fputs. If the length
2699 can't be determined, punt. */
2700 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2701 if (!len
2702 || TREE_CODE (len) != INTEGER_CST)
2703 return false;
2704
2705 switch (compare_tree_int (len, 1))
2706 {
2707 case -1: /* length is 0, delete the call entirely . */
2708 replace_call_with_value (gsi, integer_zero_node);
2709 return true;
2710
2711 case 0: /* length is 1, call fputc. */
2712 {
2713 const char *p = c_getstr (arg0);
2714 if (p != NULL)
2715 {
2716 if (!fn_fputc)
2717 return false;
2718
2719 gimple *repl = gimple_build_call (fn_fputc, 2,
2720 build_int_cst
2721 (integer_type_node, p[0]), arg1);
2722 replace_call_with_call_and_fold (gsi, repl);
2723 return true;
2724 }
2725 }
2726 /* FALLTHROUGH */
2727 case 1: /* length is greater than 1, call fwrite. */
2728 {
2729 /* If optimizing for size keep fputs. */
2730 if (optimize_function_for_size_p (cfun))
2731 return false;
2732 /* New argument list transforming fputs(string, stream) to
2733 fwrite(string, 1, len, stream). */
2734 if (!fn_fwrite)
2735 return false;
2736
2737 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2738 size_one_node, len, arg1);
2739 replace_call_with_call_and_fold (gsi, repl);
2740 return true;
2741 }
2742 default:
2743 gcc_unreachable ();
2744 }
2745 return false;
2746 }
2747
2748 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2749 DEST, SRC, LEN, and SIZE are the arguments to the call.
2750 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2751 code of the builtin. If MAXLEN is not NULL, it is maximum length
2752 passed as third argument. */
2753
2754 static bool
2755 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2756 tree dest, tree src, tree len, tree size,
2757 enum built_in_function fcode)
2758 {
2759 gimple *stmt = gsi_stmt (*gsi);
2760 location_t loc = gimple_location (stmt);
2761 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2762 tree fn;
2763
2764 /* If SRC and DEST are the same (and not volatile), return DEST
2765 (resp. DEST+LEN for __mempcpy_chk). */
2766 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2767 {
2768 if (fcode != BUILT_IN_MEMPCPY_CHK)
2769 {
2770 replace_call_with_value (gsi, dest);
2771 return true;
2772 }
2773 else
2774 {
2775 gimple_seq stmts = NULL;
2776 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2777 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2778 TREE_TYPE (dest), dest, len);
2779 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2780 replace_call_with_value (gsi, temp);
2781 return true;
2782 }
2783 }
2784
2785 if (! tree_fits_uhwi_p (size))
2786 return false;
2787
2788 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2789 if (! integer_all_onesp (size))
2790 {
2791 if (! tree_fits_uhwi_p (len))
2792 {
2793 /* If LEN is not constant, try MAXLEN too.
2794 For MAXLEN only allow optimizing into non-_ocs function
2795 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2796 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2797 {
2798 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2799 {
2800 /* (void) __mempcpy_chk () can be optimized into
2801 (void) __memcpy_chk (). */
2802 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2803 if (!fn)
2804 return false;
2805
2806 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2807 replace_call_with_call_and_fold (gsi, repl);
2808 return true;
2809 }
2810 return false;
2811 }
2812 }
2813 else
2814 maxlen = len;
2815
2816 if (tree_int_cst_lt (size, maxlen))
2817 return false;
2818 }
2819
2820 fn = NULL_TREE;
2821 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2822 mem{cpy,pcpy,move,set} is available. */
2823 switch (fcode)
2824 {
2825 case BUILT_IN_MEMCPY_CHK:
2826 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2827 break;
2828 case BUILT_IN_MEMPCPY_CHK:
2829 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2830 break;
2831 case BUILT_IN_MEMMOVE_CHK:
2832 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2833 break;
2834 case BUILT_IN_MEMSET_CHK:
2835 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2836 break;
2837 default:
2838 break;
2839 }
2840
2841 if (!fn)
2842 return false;
2843
2844 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2847 }
2848
2849 /* Fold a call to the __st[rp]cpy_chk builtin.
2850 DEST, SRC, and SIZE are the arguments to the call.
2851 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2852 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2853 strings passed as second argument. */
2854
2855 static bool
2856 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2857 tree dest,
2858 tree src, tree size,
2859 enum built_in_function fcode)
2860 {
2861 gimple *stmt = gsi_stmt (*gsi);
2862 location_t loc = gimple_location (stmt);
2863 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2864 tree len, fn;
2865
2866 /* If SRC and DEST are the same (and not volatile), return DEST. */
2867 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2868 {
2869 /* Issue -Wrestrict unless the pointers are null (those do
2870 not point to objects and so do not indicate an overlap;
2871 such calls could be the result of sanitization and jump
2872 threading). */
2873 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2874 {
2875 tree func = gimple_call_fndecl (stmt);
2876
2877 warning_at (loc, OPT_Wrestrict,
2878 "%qD source argument is the same as destination",
2879 func);
2880 }
2881
2882 replace_call_with_value (gsi, dest);
2883 return true;
2884 }
2885
2886 if (! tree_fits_uhwi_p (size))
2887 return false;
2888
2889 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2890 if (! integer_all_onesp (size))
2891 {
2892 len = c_strlen (src, 1);
2893 if (! len || ! tree_fits_uhwi_p (len))
2894 {
2895 /* If LEN is not constant, try MAXLEN too.
2896 For MAXLEN only allow optimizing into non-_ocs function
2897 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2898 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2899 {
2900 if (fcode == BUILT_IN_STPCPY_CHK)
2901 {
2902 if (! ignore)
2903 return false;
2904
2905 /* If return value of __stpcpy_chk is ignored,
2906 optimize into __strcpy_chk. */
2907 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2908 if (!fn)
2909 return false;
2910
2911 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2912 replace_call_with_call_and_fold (gsi, repl);
2913 return true;
2914 }
2915
2916 if (! len || TREE_SIDE_EFFECTS (len))
2917 return false;
2918
2919 /* If c_strlen returned something, but not a constant,
2920 transform __strcpy_chk into __memcpy_chk. */
2921 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2922 if (!fn)
2923 return false;
2924
2925 gimple_seq stmts = NULL;
2926 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2927 len = gimple_convert (&stmts, loc, size_type_node, len);
2928 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2929 build_int_cst (size_type_node, 1));
2930 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2931 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2932 replace_call_with_call_and_fold (gsi, repl);
2933 return true;
2934 }
2935 }
2936 else
2937 maxlen = len;
2938
2939 if (! tree_int_cst_lt (maxlen, size))
2940 return false;
2941 }
2942
2943 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2944 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2945 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2946 if (!fn)
2947 return false;
2948
2949 gimple *repl = gimple_build_call (fn, 2, dest, src);
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2952 }
2953
2954 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2955 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2956 length passed as third argument. IGNORE is true if return value can be
2957 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2958
2959 static bool
2960 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2961 tree dest, tree src,
2962 tree len, tree size,
2963 enum built_in_function fcode)
2964 {
2965 gimple *stmt = gsi_stmt (*gsi);
2966 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2967 tree fn;
2968
2969 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2970 {
2971 /* If return value of __stpncpy_chk is ignored,
2972 optimize into __strncpy_chk. */
2973 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2974 if (fn)
2975 {
2976 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2977 replace_call_with_call_and_fold (gsi, repl);
2978 return true;
2979 }
2980 }
2981
2982 if (! tree_fits_uhwi_p (size))
2983 return false;
2984
2985 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2986 if (! integer_all_onesp (size))
2987 {
2988 if (! tree_fits_uhwi_p (len))
2989 {
2990 /* If LEN is not constant, try MAXLEN too.
2991 For MAXLEN only allow optimizing into non-_ocs function
2992 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2993 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2994 return false;
2995 }
2996 else
2997 maxlen = len;
2998
2999 if (tree_int_cst_lt (size, maxlen))
3000 return false;
3001 }
3002
3003 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3004 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3005 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3006 if (!fn)
3007 return false;
3008
3009 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3010 replace_call_with_call_and_fold (gsi, repl);
3011 return true;
3012 }
3013
3014 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3015 Return NULL_TREE if no simplification can be made. */
3016
3017 static bool
3018 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3019 {
3020 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3021 location_t loc = gimple_location (stmt);
3022 tree dest = gimple_call_arg (stmt, 0);
3023 tree src = gimple_call_arg (stmt, 1);
3024 tree fn, lenp1;
3025
3026 /* If the result is unused, replace stpcpy with strcpy. */
3027 if (gimple_call_lhs (stmt) == NULL_TREE)
3028 {
3029 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3030 if (!fn)
3031 return false;
3032 gimple_call_set_fndecl (stmt, fn);
3033 fold_stmt (gsi);
3034 return true;
3035 }
3036
3037 /* Set to non-null if ARG refers to an unterminated array. */
3038 c_strlen_data data = { };
3039 tree len = c_strlen (src, 1, &data, 1);
3040 if (!len
3041 || TREE_CODE (len) != INTEGER_CST)
3042 {
3043 data.decl = unterminated_array (src);
3044 if (!data.decl)
3045 return false;
3046 }
3047
3048 if (data.decl)
3049 {
3050 /* Avoid folding calls with unterminated arrays. */
3051 if (!gimple_no_warning_p (stmt))
3052 warn_string_no_nul (loc, "stpcpy", src, data.decl);
3053 gimple_set_no_warning (stmt, true);
3054 return false;
3055 }
3056
3057 if (optimize_function_for_size_p (cfun)
3058 /* If length is zero it's small enough. */
3059 && !integer_zerop (len))
3060 return false;
3061
3062 /* If the source has a known length replace stpcpy with memcpy. */
3063 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3064 if (!fn)
3065 return false;
3066
3067 gimple_seq stmts = NULL;
3068 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3069 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3070 tem, build_int_cst (size_type_node, 1));
3071 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3072 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3073 gimple_move_vops (repl, stmt);
3074 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3075 /* Replace the result with dest + len. */
3076 stmts = NULL;
3077 tem = gimple_convert (&stmts, loc, sizetype, len);
3078 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3079 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3080 POINTER_PLUS_EXPR, dest, tem);
3081 gsi_replace (gsi, ret, false);
3082 /* Finally fold the memcpy call. */
3083 gimple_stmt_iterator gsi2 = *gsi;
3084 gsi_prev (&gsi2);
3085 fold_stmt (&gsi2);
3086 return true;
3087 }
3088
3089 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3090 NULL_TREE if a normal call should be emitted rather than expanding
3091 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3092 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3093 passed as second argument. */
3094
3095 static bool
3096 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3097 enum built_in_function fcode)
3098 {
3099 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3100 tree dest, size, len, fn, fmt, flag;
3101 const char *fmt_str;
3102
3103 /* Verify the required arguments in the original call. */
3104 if (gimple_call_num_args (stmt) < 5)
3105 return false;
3106
3107 dest = gimple_call_arg (stmt, 0);
3108 len = gimple_call_arg (stmt, 1);
3109 flag = gimple_call_arg (stmt, 2);
3110 size = gimple_call_arg (stmt, 3);
3111 fmt = gimple_call_arg (stmt, 4);
3112
3113 if (! tree_fits_uhwi_p (size))
3114 return false;
3115
3116 if (! integer_all_onesp (size))
3117 {
3118 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3119 if (! tree_fits_uhwi_p (len))
3120 {
3121 /* If LEN is not constant, try MAXLEN too.
3122 For MAXLEN only allow optimizing into non-_ocs function
3123 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3124 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3125 return false;
3126 }
3127 else
3128 maxlen = len;
3129
3130 if (tree_int_cst_lt (size, maxlen))
3131 return false;
3132 }
3133
3134 if (!init_target_chars ())
3135 return false;
3136
3137 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3138 or if format doesn't contain % chars or is "%s". */
3139 if (! integer_zerop (flag))
3140 {
3141 fmt_str = c_getstr (fmt);
3142 if (fmt_str == NULL)
3143 return false;
3144 if (strchr (fmt_str, target_percent) != NULL
3145 && strcmp (fmt_str, target_percent_s))
3146 return false;
3147 }
3148
3149 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3150 available. */
3151 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3152 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3153 if (!fn)
3154 return false;
3155
3156 /* Replace the called function and the first 5 argument by 3 retaining
3157 trailing varargs. */
3158 gimple_call_set_fndecl (stmt, fn);
3159 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3160 gimple_call_set_arg (stmt, 0, dest);
3161 gimple_call_set_arg (stmt, 1, len);
3162 gimple_call_set_arg (stmt, 2, fmt);
3163 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3164 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3165 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3166 fold_stmt (gsi);
3167 return true;
3168 }
3169
3170 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3171 Return NULL_TREE if a normal call should be emitted rather than
3172 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3173 or BUILT_IN_VSPRINTF_CHK. */
3174
3175 static bool
3176 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3177 enum built_in_function fcode)
3178 {
3179 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3180 tree dest, size, len, fn, fmt, flag;
3181 const char *fmt_str;
3182 unsigned nargs = gimple_call_num_args (stmt);
3183
3184 /* Verify the required arguments in the original call. */
3185 if (nargs < 4)
3186 return false;
3187 dest = gimple_call_arg (stmt, 0);
3188 flag = gimple_call_arg (stmt, 1);
3189 size = gimple_call_arg (stmt, 2);
3190 fmt = gimple_call_arg (stmt, 3);
3191
3192 if (! tree_fits_uhwi_p (size))
3193 return false;
3194
3195 len = NULL_TREE;
3196
3197 if (!init_target_chars ())
3198 return false;
3199
3200 /* Check whether the format is a literal string constant. */
3201 fmt_str = c_getstr (fmt);
3202 if (fmt_str != NULL)
3203 {
3204 /* If the format doesn't contain % args or %%, we know the size. */
3205 if (strchr (fmt_str, target_percent) == 0)
3206 {
3207 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3208 len = build_int_cstu (size_type_node, strlen (fmt_str));
3209 }
3210 /* If the format is "%s" and first ... argument is a string literal,
3211 we know the size too. */
3212 else if (fcode == BUILT_IN_SPRINTF_CHK
3213 && strcmp (fmt_str, target_percent_s) == 0)
3214 {
3215 tree arg;
3216
3217 if (nargs == 5)
3218 {
3219 arg = gimple_call_arg (stmt, 4);
3220 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3221 {
3222 len = c_strlen (arg, 1);
3223 if (! len || ! tree_fits_uhwi_p (len))
3224 len = NULL_TREE;
3225 }
3226 }
3227 }
3228 }
3229
3230 if (! integer_all_onesp (size))
3231 {
3232 if (! len || ! tree_int_cst_lt (len, size))
3233 return false;
3234 }
3235
3236 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3237 or if format doesn't contain % chars or is "%s". */
3238 if (! integer_zerop (flag))
3239 {
3240 if (fmt_str == NULL)
3241 return false;
3242 if (strchr (fmt_str, target_percent) != NULL
3243 && strcmp (fmt_str, target_percent_s))
3244 return false;
3245 }
3246
3247 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3248 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3249 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3250 if (!fn)
3251 return false;
3252
3253 /* Replace the called function and the first 4 argument by 2 retaining
3254 trailing varargs. */
3255 gimple_call_set_fndecl (stmt, fn);
3256 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3257 gimple_call_set_arg (stmt, 0, dest);
3258 gimple_call_set_arg (stmt, 1, fmt);
3259 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3260 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3261 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3262 fold_stmt (gsi);
3263 return true;
3264 }
3265
3266 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3267 ORIG may be null if this is a 2-argument call. We don't attempt to
3268 simplify calls with more than 3 arguments.
3269
3270 Return true if simplification was possible, otherwise false. */
3271
3272 bool
3273 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3274 {
3275 gimple *stmt = gsi_stmt (*gsi);
3276 tree dest = gimple_call_arg (stmt, 0);
3277 tree fmt = gimple_call_arg (stmt, 1);
3278 tree orig = NULL_TREE;
3279 const char *fmt_str = NULL;
3280
3281 /* Verify the required arguments in the original call. We deal with two
3282 types of sprintf() calls: 'sprintf (str, fmt)' and
3283 'sprintf (dest, "%s", orig)'. */
3284 if (gimple_call_num_args (stmt) > 3)
3285 return false;
3286
3287 if (gimple_call_num_args (stmt) == 3)
3288 orig = gimple_call_arg (stmt, 2);
3289
3290 /* Check whether the format is a literal string constant. */
3291 fmt_str = c_getstr (fmt);
3292 if (fmt_str == NULL)
3293 return false;
3294
3295 if (!init_target_chars ())
3296 return false;
3297
3298 /* If the format doesn't contain % args or %%, use strcpy. */
3299 if (strchr (fmt_str, target_percent) == NULL)
3300 {
3301 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3302
3303 if (!fn)
3304 return false;
3305
3306 /* Don't optimize sprintf (buf, "abc", ptr++). */
3307 if (orig)
3308 return false;
3309
3310 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3311 'format' is known to contain no % formats. */
3312 gimple_seq stmts = NULL;
3313 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3314
3315 /* Propagate the NO_WARNING bit to avoid issuing the same
3316 warning more than once. */
3317 if (gimple_no_warning_p (stmt))
3318 gimple_set_no_warning (repl, true);
3319
3320 gimple_seq_add_stmt_without_update (&stmts, repl);
3321 if (tree lhs = gimple_call_lhs (stmt))
3322 {
3323 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3324 strlen (fmt_str)));
3325 gimple_seq_add_stmt_without_update (&stmts, repl);
3326 gsi_replace_with_seq_vops (gsi, stmts);
3327 /* gsi now points at the assignment to the lhs, get a
3328 stmt iterator to the memcpy call.
3329 ??? We can't use gsi_for_stmt as that doesn't work when the
3330 CFG isn't built yet. */
3331 gimple_stmt_iterator gsi2 = *gsi;
3332 gsi_prev (&gsi2);
3333 fold_stmt (&gsi2);
3334 }
3335 else
3336 {
3337 gsi_replace_with_seq_vops (gsi, stmts);
3338 fold_stmt (gsi);
3339 }
3340 return true;
3341 }
3342
3343 /* If the format is "%s", use strcpy if the result isn't used. */
3344 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3345 {
3346 tree fn;
3347 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3348
3349 if (!fn)
3350 return false;
3351
3352 /* Don't crash on sprintf (str1, "%s"). */
3353 if (!orig)
3354 return false;
3355
3356 tree orig_len = NULL_TREE;
3357 if (gimple_call_lhs (stmt))
3358 {
3359 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3360 if (!orig_len)
3361 return false;
3362 }
3363
3364 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3365 gimple_seq stmts = NULL;
3366 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3367
3368 /* Propagate the NO_WARNING bit to avoid issuing the same
3369 warning more than once. */
3370 if (gimple_no_warning_p (stmt))
3371 gimple_set_no_warning (repl, true);
3372
3373 gimple_seq_add_stmt_without_update (&stmts, repl);
3374 if (tree lhs = gimple_call_lhs (stmt))
3375 {
3376 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3377 TREE_TYPE (orig_len)))
3378 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3379 repl = gimple_build_assign (lhs, orig_len);
3380 gimple_seq_add_stmt_without_update (&stmts, repl);
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 /* gsi now points at the assignment to the lhs, get a
3383 stmt iterator to the memcpy call.
3384 ??? We can't use gsi_for_stmt as that doesn't work when the
3385 CFG isn't built yet. */
3386 gimple_stmt_iterator gsi2 = *gsi;
3387 gsi_prev (&gsi2);
3388 fold_stmt (&gsi2);
3389 }
3390 else
3391 {
3392 gsi_replace_with_seq_vops (gsi, stmts);
3393 fold_stmt (gsi);
3394 }
3395 return true;
3396 }
3397 return false;
3398 }
3399
3400 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3401 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3402 attempt to simplify calls with more than 4 arguments.
3403
3404 Return true if simplification was possible, otherwise false. */
3405
3406 bool
3407 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3408 {
3409 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3410 tree dest = gimple_call_arg (stmt, 0);
3411 tree destsize = gimple_call_arg (stmt, 1);
3412 tree fmt = gimple_call_arg (stmt, 2);
3413 tree orig = NULL_TREE;
3414 const char *fmt_str = NULL;
3415
3416 if (gimple_call_num_args (stmt) > 4)
3417 return false;
3418
3419 if (gimple_call_num_args (stmt) == 4)
3420 orig = gimple_call_arg (stmt, 3);
3421
3422 if (!tree_fits_uhwi_p (destsize))
3423 return false;
3424 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3425
3426 /* Check whether the format is a literal string constant. */
3427 fmt_str = c_getstr (fmt);
3428 if (fmt_str == NULL)
3429 return false;
3430
3431 if (!init_target_chars ())
3432 return false;
3433
3434 /* If the format doesn't contain % args or %%, use strcpy. */
3435 if (strchr (fmt_str, target_percent) == NULL)
3436 {
3437 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3438 if (!fn)
3439 return false;
3440
3441 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3442 if (orig)
3443 return false;
3444
3445 /* We could expand this as
3446 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3447 or to
3448 memcpy (str, fmt_with_nul_at_cstm1, cst);
3449 but in the former case that might increase code size
3450 and in the latter case grow .rodata section too much.
3451 So punt for now. */
3452 size_t len = strlen (fmt_str);
3453 if (len >= destlen)
3454 return false;
3455
3456 gimple_seq stmts = NULL;
3457 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3458 gimple_seq_add_stmt_without_update (&stmts, repl);
3459 if (tree lhs = gimple_call_lhs (stmt))
3460 {
3461 repl = gimple_build_assign (lhs,
3462 build_int_cst (TREE_TYPE (lhs), len));
3463 gimple_seq_add_stmt_without_update (&stmts, repl);
3464 gsi_replace_with_seq_vops (gsi, stmts);
3465 /* gsi now points at the assignment to the lhs, get a
3466 stmt iterator to the memcpy call.
3467 ??? We can't use gsi_for_stmt as that doesn't work when the
3468 CFG isn't built yet. */
3469 gimple_stmt_iterator gsi2 = *gsi;
3470 gsi_prev (&gsi2);
3471 fold_stmt (&gsi2);
3472 }
3473 else
3474 {
3475 gsi_replace_with_seq_vops (gsi, stmts);
3476 fold_stmt (gsi);
3477 }
3478 return true;
3479 }
3480
3481 /* If the format is "%s", use strcpy if the result isn't used. */
3482 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3483 {
3484 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3485 if (!fn)
3486 return false;
3487
3488 /* Don't crash on snprintf (str1, cst, "%s"). */
3489 if (!orig)
3490 return false;
3491
3492 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3493 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3494 return false;
3495
3496 /* We could expand this as
3497 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3498 or to
3499 memcpy (str1, str2_with_nul_at_cstm1, cst);
3500 but in the former case that might increase code size
3501 and in the latter case grow .rodata section too much.
3502 So punt for now. */
3503 if (compare_tree_int (orig_len, destlen) >= 0)
3504 return false;
3505
3506 /* Convert snprintf (str1, cst, "%s", str2) into
3507 strcpy (str1, str2) if strlen (str2) < cst. */
3508 gimple_seq stmts = NULL;
3509 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3510 gimple_seq_add_stmt_without_update (&stmts, repl);
3511 if (tree lhs = gimple_call_lhs (stmt))
3512 {
3513 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3514 TREE_TYPE (orig_len)))
3515 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3516 repl = gimple_build_assign (lhs, orig_len);
3517 gimple_seq_add_stmt_without_update (&stmts, repl);
3518 gsi_replace_with_seq_vops (gsi, stmts);
3519 /* gsi now points at the assignment to the lhs, get a
3520 stmt iterator to the memcpy call.
3521 ??? We can't use gsi_for_stmt as that doesn't work when the
3522 CFG isn't built yet. */
3523 gimple_stmt_iterator gsi2 = *gsi;
3524 gsi_prev (&gsi2);
3525 fold_stmt (&gsi2);
3526 }
3527 else
3528 {
3529 gsi_replace_with_seq_vops (gsi, stmts);
3530 fold_stmt (gsi);
3531 }
3532 return true;
3533 }
3534 return false;
3535 }
3536
3537 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3538 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3539 more than 3 arguments, and ARG may be null in the 2-argument case.
3540
3541 Return NULL_TREE if no simplification was possible, otherwise return the
3542 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3543 code of the function to be simplified. */
3544
3545 static bool
3546 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3547 tree fp, tree fmt, tree arg,
3548 enum built_in_function fcode)
3549 {
3550 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3551 tree fn_fputc, fn_fputs;
3552 const char *fmt_str = NULL;
3553
3554 /* If the return value is used, don't do the transformation. */
3555 if (gimple_call_lhs (stmt) != NULL_TREE)
3556 return false;
3557
3558 /* Check whether the format is a literal string constant. */
3559 fmt_str = c_getstr (fmt);
3560 if (fmt_str == NULL)
3561 return false;
3562
3563 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3564 {
3565 /* If we're using an unlocked function, assume the other
3566 unlocked functions exist explicitly. */
3567 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3568 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3569 }
3570 else
3571 {
3572 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3573 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3574 }
3575
3576 if (!init_target_chars ())
3577 return false;
3578
3579 /* If the format doesn't contain % args or %%, use strcpy. */
3580 if (strchr (fmt_str, target_percent) == NULL)
3581 {
3582 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3583 && arg)
3584 return false;
3585
3586 /* If the format specifier was "", fprintf does nothing. */
3587 if (fmt_str[0] == '\0')
3588 {
3589 replace_call_with_value (gsi, NULL_TREE);
3590 return true;
3591 }
3592
3593 /* When "string" doesn't contain %, replace all cases of
3594 fprintf (fp, string) with fputs (string, fp). The fputs
3595 builtin will take care of special cases like length == 1. */
3596 if (fn_fputs)
3597 {
3598 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3599 replace_call_with_call_and_fold (gsi, repl);
3600 return true;
3601 }
3602 }
3603
3604 /* The other optimizations can be done only on the non-va_list variants. */
3605 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3606 return false;
3607
3608 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3609 else if (strcmp (fmt_str, target_percent_s) == 0)
3610 {
3611 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3612 return false;
3613 if (fn_fputs)
3614 {
3615 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3616 replace_call_with_call_and_fold (gsi, repl);
3617 return true;
3618 }
3619 }
3620
3621 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3622 else if (strcmp (fmt_str, target_percent_c) == 0)
3623 {
3624 if (!arg
3625 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3626 return false;
3627 if (fn_fputc)
3628 {
3629 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3630 replace_call_with_call_and_fold (gsi, repl);
3631 return true;
3632 }
3633 }
3634
3635 return false;
3636 }
3637
3638 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3639 FMT and ARG are the arguments to the call; we don't fold cases with
3640 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3641
3642 Return NULL_TREE if no simplification was possible, otherwise return the
3643 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3644 code of the function to be simplified. */
3645
3646 static bool
3647 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3648 tree arg, enum built_in_function fcode)
3649 {
3650 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3651 tree fn_putchar, fn_puts, newarg;
3652 const char *fmt_str = NULL;
3653
3654 /* If the return value is used, don't do the transformation. */
3655 if (gimple_call_lhs (stmt) != NULL_TREE)
3656 return false;
3657
3658 /* Check whether the format is a literal string constant. */
3659 fmt_str = c_getstr (fmt);
3660 if (fmt_str == NULL)
3661 return false;
3662
3663 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3664 {
3665 /* If we're using an unlocked function, assume the other
3666 unlocked functions exist explicitly. */
3667 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3668 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3669 }
3670 else
3671 {
3672 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3673 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3674 }
3675
3676 if (!init_target_chars ())
3677 return false;
3678
3679 if (strcmp (fmt_str, target_percent_s) == 0
3680 || strchr (fmt_str, target_percent) == NULL)
3681 {
3682 const char *str;
3683
3684 if (strcmp (fmt_str, target_percent_s) == 0)
3685 {
3686 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3687 return false;
3688
3689 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3690 return false;
3691
3692 str = c_getstr (arg);
3693 if (str == NULL)
3694 return false;
3695 }
3696 else
3697 {
3698 /* The format specifier doesn't contain any '%' characters. */
3699 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3700 && arg)
3701 return false;
3702 str = fmt_str;
3703 }
3704
3705 /* If the string was "", printf does nothing. */
3706 if (str[0] == '\0')
3707 {
3708 replace_call_with_value (gsi, NULL_TREE);
3709 return true;
3710 }
3711
3712 /* If the string has length of 1, call putchar. */
3713 if (str[1] == '\0')
3714 {
3715 /* Given printf("c"), (where c is any one character,)
3716 convert "c"[0] to an int and pass that to the replacement
3717 function. */
3718 newarg = build_int_cst (integer_type_node, str[0]);
3719 if (fn_putchar)
3720 {
3721 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3722 replace_call_with_call_and_fold (gsi, repl);
3723 return true;
3724 }
3725 }
3726 else
3727 {
3728 /* If the string was "string\n", call puts("string"). */
3729 size_t len = strlen (str);
3730 if ((unsigned char)str[len - 1] == target_newline
3731 && (size_t) (int) len == len
3732 && (int) len > 0)
3733 {
3734 char *newstr;
3735
3736 /* Create a NUL-terminated string that's one char shorter
3737 than the original, stripping off the trailing '\n'. */
3738 newstr = xstrdup (str);
3739 newstr[len - 1] = '\0';
3740 newarg = build_string_literal (len, newstr);
3741 free (newstr);
3742 if (fn_puts)
3743 {
3744 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3745 replace_call_with_call_and_fold (gsi, repl);
3746 return true;
3747 }
3748 }
3749 else
3750 /* We'd like to arrange to call fputs(string,stdout) here,
3751 but we need stdout and don't have a way to get it yet. */
3752 return false;
3753 }
3754 }
3755
3756 /* The other optimizations can be done only on the non-va_list variants. */
3757 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3758 return false;
3759
3760 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3761 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3762 {
3763 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3764 return false;
3765 if (fn_puts)
3766 {
3767 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3768 replace_call_with_call_and_fold (gsi, repl);
3769 return true;
3770 }
3771 }
3772
3773 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3774 else if (strcmp (fmt_str, target_percent_c) == 0)
3775 {
3776 if (!arg || ! useless_type_conversion_p (integer_type_node,
3777 TREE_TYPE (arg)))
3778 return false;
3779 if (fn_putchar)
3780 {
3781 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3782 replace_call_with_call_and_fold (gsi, repl);
3783 return true;
3784 }
3785 }
3786
3787 return false;
3788 }
3789
3790
3791
3792 /* Fold a call to __builtin_strlen with known length LEN. */
3793
3794 static bool
3795 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3796 {
3797 gimple *stmt = gsi_stmt (*gsi);
3798 tree arg = gimple_call_arg (stmt, 0);
3799
3800 wide_int minlen;
3801 wide_int maxlen;
3802
3803 c_strlen_data lendata = { };
3804 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3805 && !lendata.decl
3806 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3807 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3808 {
3809 /* The range of lengths refers to either a single constant
3810 string or to the longest and shortest constant string
3811 referenced by the argument of the strlen() call, or to
3812 the strings that can possibly be stored in the arrays
3813 the argument refers to. */
3814 minlen = wi::to_wide (lendata.minlen);
3815 maxlen = wi::to_wide (lendata.maxlen);
3816 }
3817 else
3818 {
3819 unsigned prec = TYPE_PRECISION (sizetype);
3820
3821 minlen = wi::shwi (0, prec);
3822 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3823 }
3824
3825 if (minlen == maxlen)
3826 {
3827 /* Fold the strlen call to a constant. */
3828 tree type = TREE_TYPE (lendata.minlen);
3829 tree len = force_gimple_operand_gsi (gsi,
3830 wide_int_to_tree (type, minlen),
3831 true, NULL, true, GSI_SAME_STMT);
3832 replace_call_with_value (gsi, len);
3833 return true;
3834 }
3835
3836 /* Set the strlen() range to [0, MAXLEN]. */
3837 if (tree lhs = gimple_call_lhs (stmt))
3838 set_strlen_range (lhs, minlen, maxlen);
3839
3840 return false;
3841 }
3842
3843 /* Fold a call to __builtin_acc_on_device. */
3844
3845 static bool
3846 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3847 {
3848 /* Defer folding until we know which compiler we're in. */
3849 if (symtab->state != EXPANSION)
3850 return false;
3851
3852 unsigned val_host = GOMP_DEVICE_HOST;
3853 unsigned val_dev = GOMP_DEVICE_NONE;
3854
3855 #ifdef ACCEL_COMPILER
3856 val_host = GOMP_DEVICE_NOT_HOST;
3857 val_dev = ACCEL_COMPILER_acc_device;
3858 #endif
3859
3860 location_t loc = gimple_location (gsi_stmt (*gsi));
3861
3862 tree host_eq = make_ssa_name (boolean_type_node);
3863 gimple *host_ass = gimple_build_assign
3864 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3865 gimple_set_location (host_ass, loc);
3866 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3867
3868 tree dev_eq = make_ssa_name (boolean_type_node);
3869 gimple *dev_ass = gimple_build_assign
3870 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3871 gimple_set_location (dev_ass, loc);
3872 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3873
3874 tree result = make_ssa_name (boolean_type_node);
3875 gimple *result_ass = gimple_build_assign
3876 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3877 gimple_set_location (result_ass, loc);
3878 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3879
3880 replace_call_with_value (gsi, result);
3881
3882 return true;
3883 }
3884
3885 /* Fold realloc (0, n) -> malloc (n). */
3886
3887 static bool
3888 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3889 {
3890 gimple *stmt = gsi_stmt (*gsi);
3891 tree arg = gimple_call_arg (stmt, 0);
3892 tree size = gimple_call_arg (stmt, 1);
3893
3894 if (operand_equal_p (arg, null_pointer_node, 0))
3895 {
3896 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3897 if (fn_malloc)
3898 {
3899 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3900 replace_call_with_call_and_fold (gsi, repl);
3901 return true;
3902 }
3903 }
3904 return false;
3905 }
3906
3907 /* Fold the non-target builtin at *GSI and return whether any simplification
3908 was made. */
3909
3910 static bool
3911 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3912 {
3913 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3914 tree callee = gimple_call_fndecl (stmt);
3915
3916 /* Give up for always_inline inline builtins until they are
3917 inlined. */
3918 if (avoid_folding_inline_builtin (callee))
3919 return false;
3920
3921 unsigned n = gimple_call_num_args (stmt);
3922 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3923 switch (fcode)
3924 {
3925 case BUILT_IN_BCMP:
3926 return gimple_fold_builtin_bcmp (gsi);
3927 case BUILT_IN_BCOPY:
3928 return gimple_fold_builtin_bcopy (gsi);
3929 case BUILT_IN_BZERO:
3930 return gimple_fold_builtin_bzero (gsi);
3931
3932 case BUILT_IN_MEMSET:
3933 return gimple_fold_builtin_memset (gsi,
3934 gimple_call_arg (stmt, 1),
3935 gimple_call_arg (stmt, 2));
3936 case BUILT_IN_MEMCPY:
3937 case BUILT_IN_MEMPCPY:
3938 case BUILT_IN_MEMMOVE:
3939 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3940 gimple_call_arg (stmt, 1), fcode);
3941 case BUILT_IN_SPRINTF_CHK:
3942 case BUILT_IN_VSPRINTF_CHK:
3943 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3944 case BUILT_IN_STRCAT_CHK:
3945 return gimple_fold_builtin_strcat_chk (gsi);
3946 case BUILT_IN_STRNCAT_CHK:
3947 return gimple_fold_builtin_strncat_chk (gsi);
3948 case BUILT_IN_STRLEN:
3949 return gimple_fold_builtin_strlen (gsi);
3950 case BUILT_IN_STRCPY:
3951 return gimple_fold_builtin_strcpy (gsi,
3952 gimple_call_arg (stmt, 0),
3953 gimple_call_arg (stmt, 1));
3954 case BUILT_IN_STRNCPY:
3955 return gimple_fold_builtin_strncpy (gsi,
3956 gimple_call_arg (stmt, 0),
3957 gimple_call_arg (stmt, 1),
3958 gimple_call_arg (stmt, 2));
3959 case BUILT_IN_STRCAT:
3960 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3961 gimple_call_arg (stmt, 1));
3962 case BUILT_IN_STRNCAT:
3963 return gimple_fold_builtin_strncat (gsi);
3964 case BUILT_IN_INDEX:
3965 case BUILT_IN_STRCHR:
3966 return gimple_fold_builtin_strchr (gsi, false);
3967 case BUILT_IN_RINDEX:
3968 case BUILT_IN_STRRCHR:
3969 return gimple_fold_builtin_strchr (gsi, true);
3970 case BUILT_IN_STRSTR:
3971 return gimple_fold_builtin_strstr (gsi);
3972 case BUILT_IN_STRCMP:
3973 case BUILT_IN_STRCMP_EQ:
3974 case BUILT_IN_STRCASECMP:
3975 case BUILT_IN_STRNCMP:
3976 case BUILT_IN_STRNCMP_EQ:
3977 case BUILT_IN_STRNCASECMP:
3978 return gimple_fold_builtin_string_compare (gsi);
3979 case BUILT_IN_MEMCHR:
3980 return gimple_fold_builtin_memchr (gsi);
3981 case BUILT_IN_FPUTS:
3982 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3983 gimple_call_arg (stmt, 1), false);
3984 case BUILT_IN_FPUTS_UNLOCKED:
3985 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3986 gimple_call_arg (stmt, 1), true);
3987 case BUILT_IN_MEMCPY_CHK:
3988 case BUILT_IN_MEMPCPY_CHK:
3989 case BUILT_IN_MEMMOVE_CHK:
3990 case BUILT_IN_MEMSET_CHK:
3991 return gimple_fold_builtin_memory_chk (gsi,
3992 gimple_call_arg (stmt, 0),
3993 gimple_call_arg (stmt, 1),
3994 gimple_call_arg (stmt, 2),
3995 gimple_call_arg (stmt, 3),
3996 fcode);
3997 case BUILT_IN_STPCPY:
3998 return gimple_fold_builtin_stpcpy (gsi);
3999 case BUILT_IN_STRCPY_CHK:
4000 case BUILT_IN_STPCPY_CHK:
4001 return gimple_fold_builtin_stxcpy_chk (gsi,
4002 gimple_call_arg (stmt, 0),
4003 gimple_call_arg (stmt, 1),
4004 gimple_call_arg (stmt, 2),
4005 fcode);
4006 case BUILT_IN_STRNCPY_CHK:
4007 case BUILT_IN_STPNCPY_CHK:
4008 return gimple_fold_builtin_stxncpy_chk (gsi,
4009 gimple_call_arg (stmt, 0),
4010 gimple_call_arg (stmt, 1),
4011 gimple_call_arg (stmt, 2),
4012 gimple_call_arg (stmt, 3),
4013 fcode);
4014 case BUILT_IN_SNPRINTF_CHK:
4015 case BUILT_IN_VSNPRINTF_CHK:
4016 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4017
4018 case BUILT_IN_FPRINTF:
4019 case BUILT_IN_FPRINTF_UNLOCKED:
4020 case BUILT_IN_VFPRINTF:
4021 if (n == 2 || n == 3)
4022 return gimple_fold_builtin_fprintf (gsi,
4023 gimple_call_arg (stmt, 0),
4024 gimple_call_arg (stmt, 1),
4025 n == 3
4026 ? gimple_call_arg (stmt, 2)
4027 : NULL_TREE,
4028 fcode);
4029 break;
4030 case BUILT_IN_FPRINTF_CHK:
4031 case BUILT_IN_VFPRINTF_CHK:
4032 if (n == 3 || n == 4)
4033 return gimple_fold_builtin_fprintf (gsi,
4034 gimple_call_arg (stmt, 0),
4035 gimple_call_arg (stmt, 2),
4036 n == 4
4037 ? gimple_call_arg (stmt, 3)
4038 : NULL_TREE,
4039 fcode);
4040 break;
4041 case BUILT_IN_PRINTF:
4042 case BUILT_IN_PRINTF_UNLOCKED:
4043 case BUILT_IN_VPRINTF:
4044 if (n == 1 || n == 2)
4045 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4046 n == 2
4047 ? gimple_call_arg (stmt, 1)
4048 : NULL_TREE, fcode);
4049 break;
4050 case BUILT_IN_PRINTF_CHK:
4051 case BUILT_IN_VPRINTF_CHK:
4052 if (n == 2 || n == 3)
4053 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4054 n == 3
4055 ? gimple_call_arg (stmt, 2)
4056 : NULL_TREE, fcode);
4057 break;
4058 case BUILT_IN_ACC_ON_DEVICE:
4059 return gimple_fold_builtin_acc_on_device (gsi,
4060 gimple_call_arg (stmt, 0));
4061 case BUILT_IN_REALLOC:
4062 return gimple_fold_builtin_realloc (gsi);
4063
4064 default:;
4065 }
4066
4067 /* Try the generic builtin folder. */
4068 bool ignore = (gimple_call_lhs (stmt) == NULL);
4069 tree result = fold_call_stmt (stmt, ignore);
4070 if (result)
4071 {
4072 if (ignore)
4073 STRIP_NOPS (result);
4074 else
4075 result = fold_convert (gimple_call_return_type (stmt), result);
4076 if (!update_call_from_tree (gsi, result))
4077 gimplify_and_update_call_from_tree (gsi, result);
4078 return true;
4079 }
4080
4081 return false;
4082 }
4083
4084 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4085 function calls to constants, where possible. */
4086
4087 static tree
4088 fold_internal_goacc_dim (const gimple *call)
4089 {
4090 int axis = oacc_get_ifn_dim_arg (call);
4091 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4092 tree result = NULL_TREE;
4093 tree type = TREE_TYPE (gimple_call_lhs (call));
4094
4095 switch (gimple_call_internal_fn (call))
4096 {
4097 case IFN_GOACC_DIM_POS:
4098 /* If the size is 1, we know the answer. */
4099 if (size == 1)
4100 result = build_int_cst (type, 0);
4101 break;
4102 case IFN_GOACC_DIM_SIZE:
4103 /* If the size is not dynamic, we know the answer. */
4104 if (size)
4105 result = build_int_cst (type, size);
4106 break;
4107 default:
4108 break;
4109 }
4110
4111 return result;
4112 }
4113
4114 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4115 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4116 &var where var is only addressable because of such calls. */
4117
4118 bool
4119 optimize_atomic_compare_exchange_p (gimple *stmt)
4120 {
4121 if (gimple_call_num_args (stmt) != 6
4122 || !flag_inline_atomics
4123 || !optimize
4124 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4125 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4126 || !gimple_vdef (stmt)
4127 || !gimple_vuse (stmt))
4128 return false;
4129
4130 tree fndecl = gimple_call_fndecl (stmt);
4131 switch (DECL_FUNCTION_CODE (fndecl))
4132 {
4133 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4134 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4135 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4136 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4137 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4138 break;
4139 default:
4140 return false;
4141 }
4142
4143 tree expected = gimple_call_arg (stmt, 1);
4144 if (TREE_CODE (expected) != ADDR_EXPR
4145 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4146 return false;
4147
4148 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4149 if (!is_gimple_reg_type (etype)
4150 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4151 || TREE_THIS_VOLATILE (etype)
4152 || VECTOR_TYPE_P (etype)
4153 || TREE_CODE (etype) == COMPLEX_TYPE
4154 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4155 might not preserve all the bits. See PR71716. */
4156 || SCALAR_FLOAT_TYPE_P (etype)
4157 || maybe_ne (TYPE_PRECISION (etype),
4158 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4159 return false;
4160
4161 tree weak = gimple_call_arg (stmt, 3);
4162 if (!integer_zerop (weak) && !integer_onep (weak))
4163 return false;
4164
4165 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4166 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4167 machine_mode mode = TYPE_MODE (itype);
4168
4169 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4170 == CODE_FOR_nothing
4171 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4172 return false;
4173
4174 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4175 return false;
4176
4177 return true;
4178 }
4179
4180 /* Fold
4181 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4182 into
4183 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4184 i = IMAGPART_EXPR <t>;
4185 r = (_Bool) i;
4186 e = REALPART_EXPR <t>; */
4187
4188 void
4189 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4190 {
4191 gimple *stmt = gsi_stmt (*gsi);
4192 tree fndecl = gimple_call_fndecl (stmt);
4193 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4194 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4195 tree ctype = build_complex_type (itype);
4196 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4197 bool throws = false;
4198 edge e = NULL;
4199 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4200 expected);
4201 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4202 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4203 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4204 {
4205 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4206 build1 (VIEW_CONVERT_EXPR, itype,
4207 gimple_assign_lhs (g)));
4208 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4209 }
4210 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4211 + int_size_in_bytes (itype);
4212 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4213 gimple_call_arg (stmt, 0),
4214 gimple_assign_lhs (g),
4215 gimple_call_arg (stmt, 2),
4216 build_int_cst (integer_type_node, flag),
4217 gimple_call_arg (stmt, 4),
4218 gimple_call_arg (stmt, 5));
4219 tree lhs = make_ssa_name (ctype);
4220 gimple_call_set_lhs (g, lhs);
4221 gimple_move_vops (g, stmt);
4222 tree oldlhs = gimple_call_lhs (stmt);
4223 if (stmt_can_throw_internal (cfun, stmt))
4224 {
4225 throws = true;
4226 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4227 }
4228 gimple_call_set_nothrow (as_a <gcall *> (g),
4229 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4230 gimple_call_set_lhs (stmt, NULL_TREE);
4231 gsi_replace (gsi, g, true);
4232 if (oldlhs)
4233 {
4234 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4235 build1 (IMAGPART_EXPR, itype, lhs));
4236 if (throws)
4237 {
4238 gsi_insert_on_edge_immediate (e, g);
4239 *gsi = gsi_for_stmt (g);
4240 }
4241 else
4242 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4243 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4244 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4245 }
4246 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4247 build1 (REALPART_EXPR, itype, lhs));
4248 if (throws && oldlhs == NULL_TREE)
4249 {
4250 gsi_insert_on_edge_immediate (e, g);
4251 *gsi = gsi_for_stmt (g);
4252 }
4253 else
4254 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4255 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4256 {
4257 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4258 VIEW_CONVERT_EXPR,
4259 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4260 gimple_assign_lhs (g)));
4261 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4262 }
4263 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4264 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4265 *gsi = gsiret;
4266 }
4267
4268 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4269 doesn't fit into TYPE. The test for overflow should be regardless of
4270 -fwrapv, and even for unsigned types. */
4271
4272 bool
4273 arith_overflowed_p (enum tree_code code, const_tree type,
4274 const_tree arg0, const_tree arg1)
4275 {
4276 widest2_int warg0 = widest2_int_cst (arg0);
4277 widest2_int warg1 = widest2_int_cst (arg1);
4278 widest2_int wres;
4279 switch (code)
4280 {
4281 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4282 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4283 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4284 default: gcc_unreachable ();
4285 }
4286 signop sign = TYPE_SIGN (type);
4287 if (sign == UNSIGNED && wi::neg_p (wres))
4288 return true;
4289 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4290 }
4291
4292 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4293 for the memory it references, otherwise return null. VECTYPE is the
4294 type of the memory vector. */
4295
4296 static tree
4297 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4298 {
4299 tree ptr = gimple_call_arg (call, 0);
4300 tree alias_align = gimple_call_arg (call, 1);
4301 tree mask = gimple_call_arg (call, 2);
4302 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4303 return NULL_TREE;
4304
4305 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4306 if (TYPE_ALIGN (vectype) != align)
4307 vectype = build_aligned_type (vectype, align);
4308 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4309 return fold_build2 (MEM_REF, vectype, ptr, offset);
4310 }
4311
4312 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4313
4314 static bool
4315 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4316 {
4317 tree lhs = gimple_call_lhs (call);
4318 if (!lhs)
4319 return false;
4320
4321 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4322 {
4323 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4324 gimple_set_location (new_stmt, gimple_location (call));
4325 gimple_move_vops (new_stmt, call);
4326 gsi_replace (gsi, new_stmt, false);
4327 return true;
4328 }
4329 return false;
4330 }
4331
4332 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4333
4334 static bool
4335 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4336 {
4337 tree rhs = gimple_call_arg (call, 3);
4338 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4339 {
4340 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4341 gimple_set_location (new_stmt, gimple_location (call));
4342 gimple_move_vops (new_stmt, call);
4343 gsi_replace (gsi, new_stmt, false);
4344 return true;
4345 }
4346 return false;
4347 }
4348
4349 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4350 The statement may be replaced by another statement, e.g., if the call
4351 simplifies to a constant value. Return true if any changes were made.
4352 It is assumed that the operands have been previously folded. */
4353
4354 static bool
4355 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4356 {
4357 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4358 tree callee;
4359 bool changed = false;
4360 unsigned i;
4361
4362 /* Fold *& in call arguments. */
4363 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4364 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4365 {
4366 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4367 if (tmp)
4368 {
4369 gimple_call_set_arg (stmt, i, tmp);
4370 changed = true;
4371 }
4372 }
4373
4374 /* Check for virtual calls that became direct calls. */
4375 callee = gimple_call_fn (stmt);
4376 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4377 {
4378 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4379 {
4380 if (dump_file && virtual_method_call_p (callee)
4381 && !possible_polymorphic_call_target_p
4382 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4383 (OBJ_TYPE_REF_EXPR (callee)))))
4384 {
4385 fprintf (dump_file,
4386 "Type inheritance inconsistent devirtualization of ");
4387 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4388 fprintf (dump_file, " to ");
4389 print_generic_expr (dump_file, callee, TDF_SLIM);
4390 fprintf (dump_file, "\n");
4391 }
4392
4393 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4394 changed = true;
4395 }
4396 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4397 {
4398 bool final;
4399 vec <cgraph_node *>targets
4400 = possible_polymorphic_call_targets (callee, stmt, &final);
4401 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4402 {
4403 tree lhs = gimple_call_lhs (stmt);
4404 if (dump_enabled_p ())
4405 {
4406 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4407 "folding virtual function call to %s\n",
4408 targets.length () == 1
4409 ? targets[0]->name ()
4410 : "__builtin_unreachable");
4411 }
4412 if (targets.length () == 1)
4413 {
4414 tree fndecl = targets[0]->decl;
4415 gimple_call_set_fndecl (stmt, fndecl);
4416 changed = true;
4417 /* If changing the call to __cxa_pure_virtual
4418 or similar noreturn function, adjust gimple_call_fntype
4419 too. */
4420 if (gimple_call_noreturn_p (stmt)
4421 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4422 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4423 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4424 == void_type_node))
4425 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4426 /* If the call becomes noreturn, remove the lhs. */
4427 if (lhs
4428 && gimple_call_noreturn_p (stmt)
4429 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4430 || should_remove_lhs_p (lhs)))
4431 {
4432 if (TREE_CODE (lhs) == SSA_NAME)
4433 {
4434 tree var = create_tmp_var (TREE_TYPE (lhs));
4435 tree def = get_or_create_ssa_default_def (cfun, var);
4436 gimple *new_stmt = gimple_build_assign (lhs, def);
4437 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4438 }
4439 gimple_call_set_lhs (stmt, NULL_TREE);
4440 }
4441 maybe_remove_unused_call_args (cfun, stmt);
4442 }
4443 else
4444 {
4445 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4446 gimple *new_stmt = gimple_build_call (fndecl, 0);
4447 gimple_set_location (new_stmt, gimple_location (stmt));
4448 /* If the call had a SSA name as lhs morph that into
4449 an uninitialized value. */
4450 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4451 {
4452 tree var = create_tmp_var (TREE_TYPE (lhs));
4453 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4454 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4455 set_ssa_default_def (cfun, var, lhs);
4456 }
4457 gimple_move_vops (new_stmt, stmt);
4458 gsi_replace (gsi, new_stmt, false);
4459 return true;
4460 }
4461 }
4462 }
4463 }
4464
4465 /* Check for indirect calls that became direct calls, and then
4466 no longer require a static chain. */
4467 if (gimple_call_chain (stmt))
4468 {
4469 tree fn = gimple_call_fndecl (stmt);
4470 if (fn && !DECL_STATIC_CHAIN (fn))
4471 {
4472 gimple_call_set_chain (stmt, NULL);
4473 changed = true;
4474 }
4475 else
4476 {
4477 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4478 if (tmp)
4479 {
4480 gimple_call_set_chain (stmt, tmp);
4481 changed = true;
4482 }
4483 }
4484 }
4485
4486 if (inplace)
4487 return changed;
4488
4489 /* Check for builtins that CCP can handle using information not
4490 available in the generic fold routines. */
4491 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4492 {
4493 if (gimple_fold_builtin (gsi))
4494 changed = true;
4495 }
4496 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4497 {
4498 changed |= targetm.gimple_fold_builtin (gsi);
4499 }
4500 else if (gimple_call_internal_p (stmt))
4501 {
4502 enum tree_code subcode = ERROR_MARK;
4503 tree result = NULL_TREE;
4504 bool cplx_result = false;
4505 tree overflow = NULL_TREE;
4506 switch (gimple_call_internal_fn (stmt))
4507 {
4508 case IFN_BUILTIN_EXPECT:
4509 result = fold_builtin_expect (gimple_location (stmt),
4510 gimple_call_arg (stmt, 0),
4511 gimple_call_arg (stmt, 1),
4512 gimple_call_arg (stmt, 2),
4513 NULL_TREE);
4514 break;
4515 case IFN_UBSAN_OBJECT_SIZE:
4516 {
4517 tree offset = gimple_call_arg (stmt, 1);
4518 tree objsize = gimple_call_arg (stmt, 2);
4519 if (integer_all_onesp (objsize)
4520 || (TREE_CODE (offset) == INTEGER_CST
4521 && TREE_CODE (objsize) == INTEGER_CST
4522 && tree_int_cst_le (offset, objsize)))
4523 {
4524 replace_call_with_value (gsi, NULL_TREE);
4525 return true;
4526 }
4527 }
4528 break;
4529 case IFN_UBSAN_PTR:
4530 if (integer_zerop (gimple_call_arg (stmt, 1)))
4531 {
4532 replace_call_with_value (gsi, NULL_TREE);
4533 return true;
4534 }
4535 break;
4536 case IFN_UBSAN_BOUNDS:
4537 {
4538 tree index = gimple_call_arg (stmt, 1);
4539 tree bound = gimple_call_arg (stmt, 2);
4540 if (TREE_CODE (index) == INTEGER_CST
4541 && TREE_CODE (bound) == INTEGER_CST)
4542 {
4543 index = fold_convert (TREE_TYPE (bound), index);
4544 if (TREE_CODE (index) == INTEGER_CST
4545 && tree_int_cst_le (index, bound))
4546 {
4547 replace_call_with_value (gsi, NULL_TREE);
4548 return true;
4549 }
4550 }
4551 }
4552 break;
4553 case IFN_GOACC_DIM_SIZE:
4554 case IFN_GOACC_DIM_POS:
4555 result = fold_internal_goacc_dim (stmt);
4556 break;
4557 case IFN_UBSAN_CHECK_ADD:
4558 subcode = PLUS_EXPR;
4559 break;
4560 case IFN_UBSAN_CHECK_SUB:
4561 subcode = MINUS_EXPR;
4562 break;
4563 case IFN_UBSAN_CHECK_MUL:
4564 subcode = MULT_EXPR;
4565 break;
4566 case IFN_ADD_OVERFLOW:
4567 subcode = PLUS_EXPR;
4568 cplx_result = true;
4569 break;
4570 case IFN_SUB_OVERFLOW:
4571 subcode = MINUS_EXPR;
4572 cplx_result = true;
4573 break;
4574 case IFN_MUL_OVERFLOW:
4575 subcode = MULT_EXPR;
4576 cplx_result = true;
4577 break;
4578 case IFN_MASK_LOAD:
4579 changed |= gimple_fold_mask_load (gsi, stmt);
4580 break;
4581 case IFN_MASK_STORE:
4582 changed |= gimple_fold_mask_store (gsi, stmt);
4583 break;
4584 default:
4585 break;
4586 }
4587 if (subcode != ERROR_MARK)
4588 {
4589 tree arg0 = gimple_call_arg (stmt, 0);
4590 tree arg1 = gimple_call_arg (stmt, 1);
4591 tree type = TREE_TYPE (arg0);
4592 if (cplx_result)
4593 {
4594 tree lhs = gimple_call_lhs (stmt);
4595 if (lhs == NULL_TREE)
4596 type = NULL_TREE;
4597 else
4598 type = TREE_TYPE (TREE_TYPE (lhs));
4599 }
4600 if (type == NULL_TREE)
4601 ;
4602 /* x = y + 0; x = y - 0; x = y * 0; */
4603 else if (integer_zerop (arg1))
4604 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4605 /* x = 0 + y; x = 0 * y; */
4606 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4607 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4608 /* x = y - y; */
4609 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4610 result = integer_zero_node;
4611 /* x = y * 1; x = 1 * y; */
4612 else if (subcode == MULT_EXPR && integer_onep (arg1))
4613 result = arg0;
4614 else if (subcode == MULT_EXPR && integer_onep (arg0))
4615 result = arg1;
4616 else if (TREE_CODE (arg0) == INTEGER_CST
4617 && TREE_CODE (arg1) == INTEGER_CST)
4618 {
4619 if (cplx_result)
4620 result = int_const_binop (subcode, fold_convert (type, arg0),
4621 fold_convert (type, arg1));
4622 else
4623 result = int_const_binop (subcode, arg0, arg1);
4624 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4625 {
4626 if (cplx_result)
4627 overflow = build_one_cst (type);
4628 else
4629 result = NULL_TREE;
4630 }
4631 }
4632 if (result)
4633 {
4634 if (result == integer_zero_node)
4635 result = build_zero_cst (type);
4636 else if (cplx_result && TREE_TYPE (result) != type)
4637 {
4638 if (TREE_CODE (result) == INTEGER_CST)
4639 {
4640 if (arith_overflowed_p (PLUS_EXPR, type, result,
4641 integer_zero_node))
4642 overflow = build_one_cst (type);
4643 }
4644 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4645 && TYPE_UNSIGNED (type))
4646 || (TYPE_PRECISION (type)
4647 < (TYPE_PRECISION (TREE_TYPE (result))
4648 + (TYPE_UNSIGNED (TREE_TYPE (result))
4649 && !TYPE_UNSIGNED (type)))))
4650 result = NULL_TREE;
4651 if (result)
4652 result = fold_convert (type, result);
4653 }
4654 }
4655 }
4656
4657 if (result)
4658 {
4659 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4660 result = drop_tree_overflow (result);
4661 if (cplx_result)
4662 {
4663 if (overflow == NULL_TREE)
4664 overflow = build_zero_cst (TREE_TYPE (result));
4665 tree ctype = build_complex_type (TREE_TYPE (result));
4666 if (TREE_CODE (result) == INTEGER_CST
4667 && TREE_CODE (overflow) == INTEGER_CST)
4668 result = build_complex (ctype, result, overflow);
4669 else
4670 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4671 ctype, result, overflow);
4672 }
4673 if (!update_call_from_tree (gsi, result))
4674 gimplify_and_update_call_from_tree (gsi, result);
4675 changed = true;
4676 }
4677 }
4678
4679 return changed;
4680 }
4681
4682
4683 /* Return true whether NAME has a use on STMT. */
4684
4685 static bool
4686 has_use_on_stmt (tree name, gimple *stmt)
4687 {
4688 imm_use_iterator iter;
4689 use_operand_p use_p;
4690 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4691 if (USE_STMT (use_p) == stmt)
4692 return true;
4693 return false;
4694 }
4695
4696 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4697 gimple_simplify.
4698
4699 Replaces *GSI with the simplification result in RCODE and OPS
4700 and the associated statements in *SEQ. Does the replacement
4701 according to INPLACE and returns true if the operation succeeded. */
4702
4703 static bool
4704 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4705 gimple_match_op *res_op,
4706 gimple_seq *seq, bool inplace)
4707 {
4708 gimple *stmt = gsi_stmt (*gsi);
4709 tree *ops = res_op->ops;
4710 unsigned int num_ops = res_op->num_ops;
4711
4712 /* Play safe and do not allow abnormals to be mentioned in
4713 newly created statements. See also maybe_push_res_to_seq.
4714 As an exception allow such uses if there was a use of the
4715 same SSA name on the old stmt. */
4716 for (unsigned int i = 0; i < num_ops; ++i)
4717 if (TREE_CODE (ops[i]) == SSA_NAME
4718 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4719 && !has_use_on_stmt (ops[i], stmt))
4720 return false;
4721
4722 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4723 for (unsigned int i = 0; i < 2; ++i)
4724 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4725 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4726 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4727 return false;
4728
4729 /* Don't insert new statements when INPLACE is true, even if we could
4730 reuse STMT for the final statement. */
4731 if (inplace && !gimple_seq_empty_p (*seq))
4732 return false;
4733
4734 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4735 {
4736 gcc_assert (res_op->code.is_tree_code ());
4737 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4738 /* GIMPLE_CONDs condition may not throw. */
4739 && (!flag_exceptions
4740 || !cfun->can_throw_non_call_exceptions
4741 || !operation_could_trap_p (res_op->code,
4742 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4743 false, NULL_TREE)))
4744 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4745 else if (res_op->code == SSA_NAME)
4746 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4747 build_zero_cst (TREE_TYPE (ops[0])));
4748 else if (res_op->code == INTEGER_CST)
4749 {
4750 if (integer_zerop (ops[0]))
4751 gimple_cond_make_false (cond_stmt);
4752 else
4753 gimple_cond_make_true (cond_stmt);
4754 }
4755 else if (!inplace)
4756 {
4757 tree res = maybe_push_res_to_seq (res_op, seq);
4758 if (!res)
4759 return false;
4760 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4761 build_zero_cst (TREE_TYPE (res)));
4762 }
4763 else
4764 return false;
4765 if (dump_file && (dump_flags & TDF_DETAILS))
4766 {
4767 fprintf (dump_file, "gimple_simplified to ");
4768 if (!gimple_seq_empty_p (*seq))
4769 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4770 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4771 0, TDF_SLIM);
4772 }
4773 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4774 return true;
4775 }
4776 else if (is_gimple_assign (stmt)
4777 && res_op->code.is_tree_code ())
4778 {
4779 if (!inplace
4780 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4781 {
4782 maybe_build_generic_op (res_op);
4783 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4784 res_op->op_or_null (0),
4785 res_op->op_or_null (1),
4786 res_op->op_or_null (2));
4787 if (dump_file && (dump_flags & TDF_DETAILS))
4788 {
4789 fprintf (dump_file, "gimple_simplified to ");
4790 if (!gimple_seq_empty_p (*seq))
4791 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4792 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4793 0, TDF_SLIM);
4794 }
4795 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4796 return true;
4797 }
4798 }
4799 else if (res_op->code.is_fn_code ()
4800 && gimple_call_combined_fn (stmt) == res_op->code)
4801 {
4802 gcc_assert (num_ops == gimple_call_num_args (stmt));
4803 for (unsigned int i = 0; i < num_ops; ++i)
4804 gimple_call_set_arg (stmt, i, ops[i]);
4805 if (dump_file && (dump_flags & TDF_DETAILS))
4806 {
4807 fprintf (dump_file, "gimple_simplified to ");
4808 if (!gimple_seq_empty_p (*seq))
4809 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4810 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4811 }
4812 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4813 return true;
4814 }
4815 else if (!inplace)
4816 {
4817 if (gimple_has_lhs (stmt))
4818 {
4819 tree lhs = gimple_get_lhs (stmt);
4820 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4821 return false;
4822 if (dump_file && (dump_flags & TDF_DETAILS))
4823 {
4824 fprintf (dump_file, "gimple_simplified to ");
4825 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4826 }
4827 gsi_replace_with_seq_vops (gsi, *seq);
4828 return true;
4829 }
4830 else
4831 gcc_unreachable ();
4832 }
4833
4834 return false;
4835 }
4836
4837 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4838
4839 static bool
4840 maybe_canonicalize_mem_ref_addr (tree *t)
4841 {
4842 bool res = false;
4843
4844 if (TREE_CODE (*t) == ADDR_EXPR)
4845 t = &TREE_OPERAND (*t, 0);
4846
4847 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4848 generic vector extension. The actual vector referenced is
4849 view-converted to an array type for this purpose. If the index
4850 is constant the canonical representation in the middle-end is a
4851 BIT_FIELD_REF so re-write the former to the latter here. */
4852 if (TREE_CODE (*t) == ARRAY_REF
4853 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4854 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4855 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4856 {
4857 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4858 if (VECTOR_TYPE_P (vtype))
4859 {
4860 tree low = array_ref_low_bound (*t);
4861 if (TREE_CODE (low) == INTEGER_CST)
4862 {
4863 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4864 {
4865 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4866 wi::to_widest (low));
4867 idx = wi::mul (idx, wi::to_widest
4868 (TYPE_SIZE (TREE_TYPE (*t))));
4869 widest_int ext
4870 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4871 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4872 {
4873 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4874 TREE_TYPE (*t),
4875 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4876 TYPE_SIZE (TREE_TYPE (*t)),
4877 wide_int_to_tree (bitsizetype, idx));
4878 res = true;
4879 }
4880 }
4881 }
4882 }
4883 }
4884
4885 while (handled_component_p (*t))
4886 t = &TREE_OPERAND (*t, 0);
4887
4888 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4889 of invariant addresses into a SSA name MEM_REF address. */
4890 if (TREE_CODE (*t) == MEM_REF
4891 || TREE_CODE (*t) == TARGET_MEM_REF)
4892 {
4893 tree addr = TREE_OPERAND (*t, 0);
4894 if (TREE_CODE (addr) == ADDR_EXPR
4895 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4896 || handled_component_p (TREE_OPERAND (addr, 0))))
4897 {
4898 tree base;
4899 poly_int64 coffset;
4900 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4901 &coffset);
4902 if (!base)
4903 gcc_unreachable ();
4904
4905 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4906 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4907 TREE_OPERAND (*t, 1),
4908 size_int (coffset));
4909 res = true;
4910 }
4911 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4912 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4913 }
4914
4915 /* Canonicalize back MEM_REFs to plain reference trees if the object
4916 accessed is a decl that has the same access semantics as the MEM_REF. */
4917 if (TREE_CODE (*t) == MEM_REF
4918 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4919 && integer_zerop (TREE_OPERAND (*t, 1))
4920 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4921 {
4922 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4923 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4924 if (/* Same volatile qualification. */
4925 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4926 /* Same TBAA behavior with -fstrict-aliasing. */
4927 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4928 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4929 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4930 /* Same alignment. */
4931 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4932 /* We have to look out here to not drop a required conversion
4933 from the rhs to the lhs if *t appears on the lhs or vice-versa
4934 if it appears on the rhs. Thus require strict type
4935 compatibility. */
4936 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4937 {
4938 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4939 res = true;
4940 }
4941 }
4942
4943 /* Canonicalize TARGET_MEM_REF in particular with respect to
4944 the indexes becoming constant. */
4945 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4946 {
4947 tree tem = maybe_fold_tmr (*t);
4948 if (tem)
4949 {
4950 *t = tem;
4951 res = true;
4952 }
4953 }
4954
4955 return res;
4956 }
4957
4958 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4959 distinguishes both cases. */
4960
4961 static bool
4962 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4963 {
4964 bool changed = false;
4965 gimple *stmt = gsi_stmt (*gsi);
4966 bool nowarning = gimple_no_warning_p (stmt);
4967 unsigned i;
4968 fold_defer_overflow_warnings ();
4969
4970 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4971 after propagation.
4972 ??? This shouldn't be done in generic folding but in the
4973 propagation helpers which also know whether an address was
4974 propagated.
4975 Also canonicalize operand order. */
4976 switch (gimple_code (stmt))
4977 {
4978 case GIMPLE_ASSIGN:
4979 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4980 {
4981 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4982 if ((REFERENCE_CLASS_P (*rhs)
4983 || TREE_CODE (*rhs) == ADDR_EXPR)
4984 && maybe_canonicalize_mem_ref_addr (rhs))
4985 changed = true;
4986 tree *lhs = gimple_assign_lhs_ptr (stmt);
4987 if (REFERENCE_CLASS_P (*lhs)
4988 && maybe_canonicalize_mem_ref_addr (lhs))
4989 changed = true;
4990 }
4991 else
4992 {
4993 /* Canonicalize operand order. */
4994 enum tree_code code = gimple_assign_rhs_code (stmt);
4995 if (TREE_CODE_CLASS (code) == tcc_comparison
4996 || commutative_tree_code (code)
4997 || commutative_ternary_tree_code (code))
4998 {
4999 tree rhs1 = gimple_assign_rhs1 (stmt);
5000 tree rhs2 = gimple_assign_rhs2 (stmt);
5001 if (tree_swap_operands_p (rhs1, rhs2))
5002 {
5003 gimple_assign_set_rhs1 (stmt, rhs2);
5004 gimple_assign_set_rhs2 (stmt, rhs1);
5005 if (TREE_CODE_CLASS (code) == tcc_comparison)
5006 gimple_assign_set_rhs_code (stmt,
5007 swap_tree_comparison (code));
5008 changed = true;
5009 }
5010 }
5011 }
5012 break;
5013 case GIMPLE_CALL:
5014 {
5015 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5016 {
5017 tree *arg = gimple_call_arg_ptr (stmt, i);
5018 if (REFERENCE_CLASS_P (*arg)
5019 && maybe_canonicalize_mem_ref_addr (arg))
5020 changed = true;
5021 }
5022 tree *lhs = gimple_call_lhs_ptr (stmt);
5023 if (*lhs
5024 && REFERENCE_CLASS_P (*lhs)
5025 && maybe_canonicalize_mem_ref_addr (lhs))
5026 changed = true;
5027 break;
5028 }
5029 case GIMPLE_ASM:
5030 {
5031 gasm *asm_stmt = as_a <gasm *> (stmt);
5032 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5033 {
5034 tree link = gimple_asm_output_op (asm_stmt, i);
5035 tree op = TREE_VALUE (link);
5036 if (REFERENCE_CLASS_P (op)
5037 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5038 changed = true;
5039 }
5040 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5041 {
5042 tree link = gimple_asm_input_op (asm_stmt, i);
5043 tree op = TREE_VALUE (link);
5044 if ((REFERENCE_CLASS_P (op)
5045 || TREE_CODE (op) == ADDR_EXPR)
5046 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5047 changed = true;
5048 }
5049 }
5050 break;
5051 case GIMPLE_DEBUG:
5052 if (gimple_debug_bind_p (stmt))
5053 {
5054 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5055 if (*val
5056 && (REFERENCE_CLASS_P (*val)
5057 || TREE_CODE (*val) == ADDR_EXPR)
5058 && maybe_canonicalize_mem_ref_addr (val))
5059 changed = true;
5060 }
5061 break;
5062 case GIMPLE_COND:
5063 {
5064 /* Canonicalize operand order. */
5065 tree lhs = gimple_cond_lhs (stmt);
5066 tree rhs = gimple_cond_rhs (stmt);
5067 if (tree_swap_operands_p (lhs, rhs))
5068 {
5069 gcond *gc = as_a <gcond *> (stmt);
5070 gimple_cond_set_lhs (gc, rhs);
5071 gimple_cond_set_rhs (gc, lhs);
5072 gimple_cond_set_code (gc,
5073 swap_tree_comparison (gimple_cond_code (gc)));
5074 changed = true;
5075 }
5076 }
5077 default:;
5078 }
5079
5080 /* Dispatch to pattern-based folding. */
5081 if (!inplace
5082 || is_gimple_assign (stmt)
5083 || gimple_code (stmt) == GIMPLE_COND)
5084 {
5085 gimple_seq seq = NULL;
5086 gimple_match_op res_op;
5087 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
5088 valueize, valueize))
5089 {
5090 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
5091 changed = true;
5092 else
5093 gimple_seq_discard (seq);
5094 }
5095 }
5096
5097 stmt = gsi_stmt (*gsi);
5098
5099 /* Fold the main computation performed by the statement. */
5100 switch (gimple_code (stmt))
5101 {
5102 case GIMPLE_ASSIGN:
5103 {
5104 /* Try to canonicalize for boolean-typed X the comparisons
5105 X == 0, X == 1, X != 0, and X != 1. */
5106 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5107 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5108 {
5109 tree lhs = gimple_assign_lhs (stmt);
5110 tree op1 = gimple_assign_rhs1 (stmt);
5111 tree op2 = gimple_assign_rhs2 (stmt);
5112 tree type = TREE_TYPE (op1);
5113
5114 /* Check whether the comparison operands are of the same boolean
5115 type as the result type is.
5116 Check that second operand is an integer-constant with value
5117 one or zero. */
5118 if (TREE_CODE (op2) == INTEGER_CST
5119 && (integer_zerop (op2) || integer_onep (op2))
5120 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5121 {
5122 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5123 bool is_logical_not = false;
5124
5125 /* X == 0 and X != 1 is a logical-not.of X
5126 X == 1 and X != 0 is X */
5127 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5128 || (cmp_code == NE_EXPR && integer_onep (op2)))
5129 is_logical_not = true;
5130
5131 if (is_logical_not == false)
5132 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5133 /* Only for one-bit precision typed X the transformation
5134 !X -> ~X is valied. */
5135 else if (TYPE_PRECISION (type) == 1)
5136 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5137 /* Otherwise we use !X -> X ^ 1. */
5138 else
5139 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5140 build_int_cst (type, 1));
5141 changed = true;
5142 break;
5143 }
5144 }
5145
5146 unsigned old_num_ops = gimple_num_ops (stmt);
5147 tree lhs = gimple_assign_lhs (stmt);
5148 tree new_rhs = fold_gimple_assign (gsi);
5149 if (new_rhs
5150 && !useless_type_conversion_p (TREE_TYPE (lhs),
5151 TREE_TYPE (new_rhs)))
5152 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5153 if (new_rhs
5154 && (!inplace
5155 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5156 {
5157 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5158 changed = true;
5159 }
5160 break;
5161 }
5162
5163 case GIMPLE_CALL:
5164 changed |= gimple_fold_call (gsi, inplace);
5165 break;
5166
5167 case GIMPLE_ASM:
5168 /* Fold *& in asm operands. */
5169 {
5170 gasm *asm_stmt = as_a <gasm *> (stmt);
5171 size_t noutputs;
5172 const char **oconstraints;
5173 const char *constraint;
5174 bool allows_mem, allows_reg;
5175
5176 noutputs = gimple_asm_noutputs (asm_stmt);
5177 oconstraints = XALLOCAVEC (const char *, noutputs);
5178
5179 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5180 {
5181 tree link = gimple_asm_output_op (asm_stmt, i);
5182 tree op = TREE_VALUE (link);
5183 oconstraints[i]
5184 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5185 if (REFERENCE_CLASS_P (op)
5186 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5187 {
5188 TREE_VALUE (link) = op;
5189 changed = true;
5190 }
5191 }
5192 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5193 {
5194 tree link = gimple_asm_input_op (asm_stmt, i);
5195 tree op = TREE_VALUE (link);
5196 constraint
5197 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5198 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5199 oconstraints, &allows_mem, &allows_reg);
5200 if (REFERENCE_CLASS_P (op)
5201 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5202 != NULL_TREE)
5203 {
5204 TREE_VALUE (link) = op;
5205 changed = true;
5206 }
5207 }
5208 }
5209 break;
5210
5211 case GIMPLE_DEBUG:
5212 if (gimple_debug_bind_p (stmt))
5213 {
5214 tree val = gimple_debug_bind_get_value (stmt);
5215 if (val
5216 && REFERENCE_CLASS_P (val))
5217 {
5218 tree tem = maybe_fold_reference (val, false);
5219 if (tem)
5220 {
5221 gimple_debug_bind_set_value (stmt, tem);
5222 changed = true;
5223 }
5224 }
5225 else if (val
5226 && TREE_CODE (val) == ADDR_EXPR)
5227 {
5228 tree ref = TREE_OPERAND (val, 0);
5229 tree tem = maybe_fold_reference (ref, false);
5230 if (tem)
5231 {
5232 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5233 gimple_debug_bind_set_value (stmt, tem);
5234 changed = true;
5235 }
5236 }
5237 }
5238 break;
5239
5240 case GIMPLE_RETURN:
5241 {
5242 greturn *ret_stmt = as_a<greturn *> (stmt);
5243 tree ret = gimple_return_retval(ret_stmt);
5244
5245 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5246 {
5247 tree val = valueize (ret);
5248 if (val && val != ret
5249 && may_propagate_copy (ret, val))
5250 {
5251 gimple_return_set_retval (ret_stmt, val);
5252 changed = true;
5253 }
5254 }
5255 }
5256 break;
5257
5258 default:;
5259 }
5260
5261 stmt = gsi_stmt (*gsi);
5262
5263 /* Fold *& on the lhs. */
5264 if (gimple_has_lhs (stmt))
5265 {
5266 tree lhs = gimple_get_lhs (stmt);
5267 if (lhs && REFERENCE_CLASS_P (lhs))
5268 {
5269 tree new_lhs = maybe_fold_reference (lhs, true);
5270 if (new_lhs)
5271 {
5272 gimple_set_lhs (stmt, new_lhs);
5273 changed = true;
5274 }
5275 }
5276 }
5277
5278 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5279 return changed;
5280 }
5281
5282 /* Valueziation callback that ends up not following SSA edges. */
5283
5284 tree
5285 no_follow_ssa_edges (tree)
5286 {
5287 return NULL_TREE;
5288 }
5289
5290 /* Valueization callback that ends up following single-use SSA edges only. */
5291
5292 tree
5293 follow_single_use_edges (tree val)
5294 {
5295 if (TREE_CODE (val) == SSA_NAME
5296 && !has_single_use (val))
5297 return NULL_TREE;
5298 return val;
5299 }
5300
5301 /* Valueization callback that follows all SSA edges. */
5302
5303 tree
5304 follow_all_ssa_edges (tree val)
5305 {
5306 return val;
5307 }
5308
5309 /* Fold the statement pointed to by GSI. In some cases, this function may
5310 replace the whole statement with a new one. Returns true iff folding
5311 makes any changes.
5312 The statement pointed to by GSI should be in valid gimple form but may
5313 be in unfolded state as resulting from for example constant propagation
5314 which can produce *&x = 0. */
5315
5316 bool
5317 fold_stmt (gimple_stmt_iterator *gsi)
5318 {
5319 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5320 }
5321
5322 bool
5323 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5324 {
5325 return fold_stmt_1 (gsi, false, valueize);
5326 }
5327
5328 /* Perform the minimal folding on statement *GSI. Only operations like
5329 *&x created by constant propagation are handled. The statement cannot
5330 be replaced with a new one. Return true if the statement was
5331 changed, false otherwise.
5332 The statement *GSI should be in valid gimple form but may
5333 be in unfolded state as resulting from for example constant propagation
5334 which can produce *&x = 0. */
5335
5336 bool
5337 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5338 {
5339 gimple *stmt = gsi_stmt (*gsi);
5340 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5341 gcc_assert (gsi_stmt (*gsi) == stmt);
5342 return changed;
5343 }
5344
5345 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5346 if EXPR is null or we don't know how.
5347 If non-null, the result always has boolean type. */
5348
5349 static tree
5350 canonicalize_bool (tree expr, bool invert)
5351 {
5352 if (!expr)
5353 return NULL_TREE;
5354 else if (invert)
5355 {
5356 if (integer_nonzerop (expr))
5357 return boolean_false_node;
5358 else if (integer_zerop (expr))
5359 return boolean_true_node;
5360 else if (TREE_CODE (expr) == SSA_NAME)
5361 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5362 build_int_cst (TREE_TYPE (expr), 0));
5363 else if (COMPARISON_CLASS_P (expr))
5364 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5365 boolean_type_node,
5366 TREE_OPERAND (expr, 0),
5367 TREE_OPERAND (expr, 1));
5368 else
5369 return NULL_TREE;
5370 }
5371 else
5372 {
5373 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5374 return expr;
5375 if (integer_nonzerop (expr))
5376 return boolean_true_node;
5377 else if (integer_zerop (expr))
5378 return boolean_false_node;
5379 else if (TREE_CODE (expr) == SSA_NAME)
5380 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5381 build_int_cst (TREE_TYPE (expr), 0));
5382 else if (COMPARISON_CLASS_P (expr))
5383 return fold_build2 (TREE_CODE (expr),
5384 boolean_type_node,
5385 TREE_OPERAND (expr, 0),
5386 TREE_OPERAND (expr, 1));
5387 else
5388 return NULL_TREE;
5389 }
5390 }
5391
5392 /* Check to see if a boolean expression EXPR is logically equivalent to the
5393 comparison (OP1 CODE OP2). Check for various identities involving
5394 SSA_NAMEs. */
5395
5396 static bool
5397 same_bool_comparison_p (const_tree expr, enum tree_code code,
5398 const_tree op1, const_tree op2)
5399 {
5400 gimple *s;
5401
5402 /* The obvious case. */
5403 if (TREE_CODE (expr) == code
5404 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5405 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5406 return true;
5407
5408 /* Check for comparing (name, name != 0) and the case where expr
5409 is an SSA_NAME with a definition matching the comparison. */
5410 if (TREE_CODE (expr) == SSA_NAME
5411 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5412 {
5413 if (operand_equal_p (expr, op1, 0))
5414 return ((code == NE_EXPR && integer_zerop (op2))
5415 || (code == EQ_EXPR && integer_nonzerop (op2)));
5416 s = SSA_NAME_DEF_STMT (expr);
5417 if (is_gimple_assign (s)
5418 && gimple_assign_rhs_code (s) == code
5419 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5420 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5421 return true;
5422 }
5423
5424 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5425 of name is a comparison, recurse. */
5426 if (TREE_CODE (op1) == SSA_NAME
5427 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5428 {
5429 s = SSA_NAME_DEF_STMT (op1);
5430 if (is_gimple_assign (s)
5431 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5432 {
5433 enum tree_code c = gimple_assign_rhs_code (s);
5434 if ((c == NE_EXPR && integer_zerop (op2))
5435 || (c == EQ_EXPR && integer_nonzerop (op2)))
5436 return same_bool_comparison_p (expr, c,
5437 gimple_assign_rhs1 (s),
5438 gimple_assign_rhs2 (s));
5439 if ((c == EQ_EXPR && integer_zerop (op2))
5440 || (c == NE_EXPR && integer_nonzerop (op2)))
5441 return same_bool_comparison_p (expr,
5442 invert_tree_comparison (c, false),
5443 gimple_assign_rhs1 (s),
5444 gimple_assign_rhs2 (s));
5445 }
5446 }
5447 return false;
5448 }
5449
5450 /* Check to see if two boolean expressions OP1 and OP2 are logically
5451 equivalent. */
5452
5453 static bool
5454 same_bool_result_p (const_tree op1, const_tree op2)
5455 {
5456 /* Simple cases first. */
5457 if (operand_equal_p (op1, op2, 0))
5458 return true;
5459
5460 /* Check the cases where at least one of the operands is a comparison.
5461 These are a bit smarter than operand_equal_p in that they apply some
5462 identifies on SSA_NAMEs. */
5463 if (COMPARISON_CLASS_P (op2)
5464 && same_bool_comparison_p (op1, TREE_CODE (op2),
5465 TREE_OPERAND (op2, 0),
5466 TREE_OPERAND (op2, 1)))
5467 return true;
5468 if (COMPARISON_CLASS_P (op1)
5469 && same_bool_comparison_p (op2, TREE_CODE (op1),
5470 TREE_OPERAND (op1, 0),
5471 TREE_OPERAND (op1, 1)))
5472 return true;
5473
5474 /* Default case. */
5475 return false;
5476 }
5477
5478 /* Forward declarations for some mutually recursive functions. */
5479
5480 static tree
5481 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5482 enum tree_code code2, tree op2a, tree op2b);
5483 static tree
5484 and_var_with_comparison (tree type, tree var, bool invert,
5485 enum tree_code code2, tree op2a, tree op2b);
5486 static tree
5487 and_var_with_comparison_1 (tree type, gimple *stmt,
5488 enum tree_code code2, tree op2a, tree op2b);
5489 static tree
5490 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5491 enum tree_code code2, tree op2a, tree op2b);
5492 static tree
5493 or_var_with_comparison (tree, tree var, bool invert,
5494 enum tree_code code2, tree op2a, tree op2b);
5495 static tree
5496 or_var_with_comparison_1 (tree, gimple *stmt,
5497 enum tree_code code2, tree op2a, tree op2b);
5498
5499 /* Helper function for and_comparisons_1: try to simplify the AND of the
5500 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5501 If INVERT is true, invert the value of the VAR before doing the AND.
5502 Return NULL_EXPR if we can't simplify this to a single expression. */
5503
5504 static tree
5505 and_var_with_comparison (tree type, tree var, bool invert,
5506 enum tree_code code2, tree op2a, tree op2b)
5507 {
5508 tree t;
5509 gimple *stmt = SSA_NAME_DEF_STMT (var);
5510
5511 /* We can only deal with variables whose definitions are assignments. */
5512 if (!is_gimple_assign (stmt))
5513 return NULL_TREE;
5514
5515 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5516 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5517 Then we only have to consider the simpler non-inverted cases. */
5518 if (invert)
5519 t = or_var_with_comparison_1 (type, stmt,
5520 invert_tree_comparison (code2, false),
5521 op2a, op2b);
5522 else
5523 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5524 return canonicalize_bool (t, invert);
5525 }
5526
5527 /* Try to simplify the AND of the ssa variable defined by the assignment
5528 STMT with the comparison specified by (OP2A CODE2 OP2B).
5529 Return NULL_EXPR if we can't simplify this to a single expression. */
5530
5531 static tree
5532 and_var_with_comparison_1 (tree type, gimple *stmt,
5533 enum tree_code code2, tree op2a, tree op2b)
5534 {
5535 tree var = gimple_assign_lhs (stmt);
5536 tree true_test_var = NULL_TREE;
5537 tree false_test_var = NULL_TREE;
5538 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5539
5540 /* Check for identities like (var AND (var == 0)) => false. */
5541 if (TREE_CODE (op2a) == SSA_NAME
5542 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5543 {
5544 if ((code2 == NE_EXPR && integer_zerop (op2b))
5545 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5546 {
5547 true_test_var = op2a;
5548 if (var == true_test_var)
5549 return var;
5550 }
5551 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5552 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5553 {
5554 false_test_var = op2a;
5555 if (var == false_test_var)
5556 return boolean_false_node;
5557 }
5558 }
5559
5560 /* If the definition is a comparison, recurse on it. */
5561 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5562 {
5563 tree t = and_comparisons_1 (type, innercode,
5564 gimple_assign_rhs1 (stmt),
5565 gimple_assign_rhs2 (stmt),
5566 code2,
5567 op2a,
5568 op2b);
5569 if (t)
5570 return t;
5571 }
5572
5573 /* If the definition is an AND or OR expression, we may be able to
5574 simplify by reassociating. */
5575 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5576 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5577 {
5578 tree inner1 = gimple_assign_rhs1 (stmt);
5579 tree inner2 = gimple_assign_rhs2 (stmt);
5580 gimple *s;
5581 tree t;
5582 tree partial = NULL_TREE;
5583 bool is_and = (innercode == BIT_AND_EXPR);
5584
5585 /* Check for boolean identities that don't require recursive examination
5586 of inner1/inner2:
5587 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5588 inner1 AND (inner1 OR inner2) => inner1
5589 !inner1 AND (inner1 AND inner2) => false
5590 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5591 Likewise for similar cases involving inner2. */
5592 if (inner1 == true_test_var)
5593 return (is_and ? var : inner1);
5594 else if (inner2 == true_test_var)
5595 return (is_and ? var : inner2);
5596 else if (inner1 == false_test_var)
5597 return (is_and
5598 ? boolean_false_node
5599 : and_var_with_comparison (type, inner2, false, code2, op2a,
5600 op2b));
5601 else if (inner2 == false_test_var)
5602 return (is_and
5603 ? boolean_false_node
5604 : and_var_with_comparison (type, inner1, false, code2, op2a,
5605 op2b));
5606
5607 /* Next, redistribute/reassociate the AND across the inner tests.
5608 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5609 if (TREE_CODE (inner1) == SSA_NAME
5610 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5611 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5612 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5613 gimple_assign_rhs1 (s),
5614 gimple_assign_rhs2 (s),
5615 code2, op2a, op2b)))
5616 {
5617 /* Handle the AND case, where we are reassociating:
5618 (inner1 AND inner2) AND (op2a code2 op2b)
5619 => (t AND inner2)
5620 If the partial result t is a constant, we win. Otherwise
5621 continue on to try reassociating with the other inner test. */
5622 if (is_and)
5623 {
5624 if (integer_onep (t))
5625 return inner2;
5626 else if (integer_zerop (t))
5627 return boolean_false_node;
5628 }
5629
5630 /* Handle the OR case, where we are redistributing:
5631 (inner1 OR inner2) AND (op2a code2 op2b)
5632 => (t OR (inner2 AND (op2a code2 op2b))) */
5633 else if (integer_onep (t))
5634 return boolean_true_node;
5635
5636 /* Save partial result for later. */
5637 partial = t;
5638 }
5639
5640 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5641 if (TREE_CODE (inner2) == SSA_NAME
5642 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5643 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5644 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5645 gimple_assign_rhs1 (s),
5646 gimple_assign_rhs2 (s),
5647 code2, op2a, op2b)))
5648 {
5649 /* Handle the AND case, where we are reassociating:
5650 (inner1 AND inner2) AND (op2a code2 op2b)
5651 => (inner1 AND t) */
5652 if (is_and)
5653 {
5654 if (integer_onep (t))
5655 return inner1;
5656 else if (integer_zerop (t))
5657 return boolean_false_node;
5658 /* If both are the same, we can apply the identity
5659 (x AND x) == x. */
5660 else if (partial && same_bool_result_p (t, partial))
5661 return t;
5662 }
5663
5664 /* Handle the OR case. where we are redistributing:
5665 (inner1 OR inner2) AND (op2a code2 op2b)
5666 => (t OR (inner1 AND (op2a code2 op2b)))
5667 => (t OR partial) */
5668 else
5669 {
5670 if (integer_onep (t))
5671 return boolean_true_node;
5672 else if (partial)
5673 {
5674 /* We already got a simplification for the other
5675 operand to the redistributed OR expression. The
5676 interesting case is when at least one is false.
5677 Or, if both are the same, we can apply the identity
5678 (x OR x) == x. */
5679 if (integer_zerop (partial))
5680 return t;
5681 else if (integer_zerop (t))
5682 return partial;
5683 else if (same_bool_result_p (t, partial))
5684 return t;
5685 }
5686 }
5687 }
5688 }
5689 return NULL_TREE;
5690 }
5691
5692 /* Try to simplify the AND of two comparisons defined by
5693 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5694 If this can be done without constructing an intermediate value,
5695 return the resulting tree; otherwise NULL_TREE is returned.
5696 This function is deliberately asymmetric as it recurses on SSA_DEFs
5697 in the first comparison but not the second. */
5698
5699 static tree
5700 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5701 enum tree_code code2, tree op2a, tree op2b)
5702 {
5703 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5704
5705 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5706 if (operand_equal_p (op1a, op2a, 0)
5707 && operand_equal_p (op1b, op2b, 0))
5708 {
5709 /* Result will be either NULL_TREE, or a combined comparison. */
5710 tree t = combine_comparisons (UNKNOWN_LOCATION,
5711 TRUTH_ANDIF_EXPR, code1, code2,
5712 truth_type, op1a, op1b);
5713 if (t)
5714 return t;
5715 }
5716
5717 /* Likewise the swapped case of the above. */
5718 if (operand_equal_p (op1a, op2b, 0)
5719 && operand_equal_p (op1b, op2a, 0))
5720 {
5721 /* Result will be either NULL_TREE, or a combined comparison. */
5722 tree t = combine_comparisons (UNKNOWN_LOCATION,
5723 TRUTH_ANDIF_EXPR, code1,
5724 swap_tree_comparison (code2),
5725 truth_type, op1a, op1b);
5726 if (t)
5727 return t;
5728 }
5729
5730 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5731 NAME's definition is a truth value. See if there are any simplifications
5732 that can be done against the NAME's definition. */
5733 if (TREE_CODE (op1a) == SSA_NAME
5734 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5735 && (integer_zerop (op1b) || integer_onep (op1b)))
5736 {
5737 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5738 || (code1 == NE_EXPR && integer_onep (op1b)));
5739 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5740 switch (gimple_code (stmt))
5741 {
5742 case GIMPLE_ASSIGN:
5743 /* Try to simplify by copy-propagating the definition. */
5744 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5745 op2b);
5746
5747 case GIMPLE_PHI:
5748 /* If every argument to the PHI produces the same result when
5749 ANDed with the second comparison, we win.
5750 Do not do this unless the type is bool since we need a bool
5751 result here anyway. */
5752 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5753 {
5754 tree result = NULL_TREE;
5755 unsigned i;
5756 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5757 {
5758 tree arg = gimple_phi_arg_def (stmt, i);
5759
5760 /* If this PHI has itself as an argument, ignore it.
5761 If all the other args produce the same result,
5762 we're still OK. */
5763 if (arg == gimple_phi_result (stmt))
5764 continue;
5765 else if (TREE_CODE (arg) == INTEGER_CST)
5766 {
5767 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5768 {
5769 if (!result)
5770 result = boolean_false_node;
5771 else if (!integer_zerop (result))
5772 return NULL_TREE;
5773 }
5774 else if (!result)
5775 result = fold_build2 (code2, boolean_type_node,
5776 op2a, op2b);
5777 else if (!same_bool_comparison_p (result,
5778 code2, op2a, op2b))
5779 return NULL_TREE;
5780 }
5781 else if (TREE_CODE (arg) == SSA_NAME
5782 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5783 {
5784 tree temp;
5785 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5786 /* In simple cases we can look through PHI nodes,
5787 but we have to be careful with loops.
5788 See PR49073. */
5789 if (! dom_info_available_p (CDI_DOMINATORS)
5790 || gimple_bb (def_stmt) == gimple_bb (stmt)
5791 || dominated_by_p (CDI_DOMINATORS,
5792 gimple_bb (def_stmt),
5793 gimple_bb (stmt)))
5794 return NULL_TREE;
5795 temp = and_var_with_comparison (type, arg, invert, code2,
5796 op2a, op2b);
5797 if (!temp)
5798 return NULL_TREE;
5799 else if (!result)
5800 result = temp;
5801 else if (!same_bool_result_p (result, temp))
5802 return NULL_TREE;
5803 }
5804 else
5805 return NULL_TREE;
5806 }
5807 return result;
5808 }
5809
5810 default:
5811 break;
5812 }
5813 }
5814 return NULL_TREE;
5815 }
5816
5817 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5818 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5819 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5820 simplify this to a single expression. As we are going to lower the cost
5821 of building SSA names / gimple stmts significantly, we need to allocate
5822 them ont the stack. This will cause the code to be a bit ugly. */
5823
5824 static tree
5825 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5826 enum tree_code code1,
5827 tree op1a, tree op1b,
5828 enum tree_code code2, tree op2a,
5829 tree op2b)
5830 {
5831 /* Allocate gimple stmt1 on the stack. */
5832 gassign *stmt1
5833 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5834 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5835 gimple_assign_set_rhs_code (stmt1, code1);
5836 gimple_assign_set_rhs1 (stmt1, op1a);
5837 gimple_assign_set_rhs2 (stmt1, op1b);
5838
5839 /* Allocate gimple stmt2 on the stack. */
5840 gassign *stmt2
5841 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5842 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5843 gimple_assign_set_rhs_code (stmt2, code2);
5844 gimple_assign_set_rhs1 (stmt2, op2a);
5845 gimple_assign_set_rhs2 (stmt2, op2b);
5846
5847 /* Allocate SSA names(lhs1) on the stack. */
5848 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5849 memset (lhs1, 0, sizeof (tree_ssa_name));
5850 TREE_SET_CODE (lhs1, SSA_NAME);
5851 TREE_TYPE (lhs1) = type;
5852 init_ssa_name_imm_use (lhs1);
5853
5854 /* Allocate SSA names(lhs2) on the stack. */
5855 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5856 memset (lhs2, 0, sizeof (tree_ssa_name));
5857 TREE_SET_CODE (lhs2, SSA_NAME);
5858 TREE_TYPE (lhs2) = type;
5859 init_ssa_name_imm_use (lhs2);
5860
5861 gimple_assign_set_lhs (stmt1, lhs1);
5862 gimple_assign_set_lhs (stmt2, lhs2);
5863
5864 gimple_match_op op (gimple_match_cond::UNCOND, code,
5865 type, gimple_assign_lhs (stmt1),
5866 gimple_assign_lhs (stmt2));
5867 if (op.resimplify (NULL, follow_all_ssa_edges))
5868 {
5869 if (gimple_simplified_result_is_gimple_val (&op))
5870 {
5871 tree res = op.ops[0];
5872 if (res == lhs1)
5873 return build2 (code1, type, op1a, op1b);
5874 else if (res == lhs2)
5875 return build2 (code2, type, op2a, op2b);
5876 else
5877 return res;
5878 }
5879 else if (op.code.is_tree_code ()
5880 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5881 {
5882 tree op0 = op.ops[0];
5883 tree op1 = op.ops[1];
5884 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5885 return NULL_TREE; /* not simple */
5886
5887 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5888 }
5889 }
5890
5891 return NULL_TREE;
5892 }
5893
5894 /* Try to simplify the AND of two comparisons, specified by
5895 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5896 If this can be simplified to a single expression (without requiring
5897 introducing more SSA variables to hold intermediate values),
5898 return the resulting tree. Otherwise return NULL_TREE.
5899 If the result expression is non-null, it has boolean type. */
5900
5901 tree
5902 maybe_fold_and_comparisons (tree type,
5903 enum tree_code code1, tree op1a, tree op1b,
5904 enum tree_code code2, tree op2a, tree op2b)
5905 {
5906 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5907 return t;
5908
5909 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5910 return t;
5911
5912 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5913 op1a, op1b, code2, op2a,
5914 op2b))
5915 return t;
5916
5917 return NULL_TREE;
5918 }
5919
5920 /* Helper function for or_comparisons_1: try to simplify the OR of the
5921 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5922 If INVERT is true, invert the value of VAR before doing the OR.
5923 Return NULL_EXPR if we can't simplify this to a single expression. */
5924
5925 static tree
5926 or_var_with_comparison (tree type, tree var, bool invert,
5927 enum tree_code code2, tree op2a, tree op2b)
5928 {
5929 tree t;
5930 gimple *stmt = SSA_NAME_DEF_STMT (var);
5931
5932 /* We can only deal with variables whose definitions are assignments. */
5933 if (!is_gimple_assign (stmt))
5934 return NULL_TREE;
5935
5936 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5937 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5938 Then we only have to consider the simpler non-inverted cases. */
5939 if (invert)
5940 t = and_var_with_comparison_1 (type, stmt,
5941 invert_tree_comparison (code2, false),
5942 op2a, op2b);
5943 else
5944 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5945 return canonicalize_bool (t, invert);
5946 }
5947
5948 /* Try to simplify the OR of the ssa variable defined by the assignment
5949 STMT with the comparison specified by (OP2A CODE2 OP2B).
5950 Return NULL_EXPR if we can't simplify this to a single expression. */
5951
5952 static tree
5953 or_var_with_comparison_1 (tree type, gimple *stmt,
5954 enum tree_code code2, tree op2a, tree op2b)
5955 {
5956 tree var = gimple_assign_lhs (stmt);
5957 tree true_test_var = NULL_TREE;
5958 tree false_test_var = NULL_TREE;
5959 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5960
5961 /* Check for identities like (var OR (var != 0)) => true . */
5962 if (TREE_CODE (op2a) == SSA_NAME
5963 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5964 {
5965 if ((code2 == NE_EXPR && integer_zerop (op2b))
5966 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5967 {
5968 true_test_var = op2a;
5969 if (var == true_test_var)
5970 return var;
5971 }
5972 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5973 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5974 {
5975 false_test_var = op2a;
5976 if (var == false_test_var)
5977 return boolean_true_node;
5978 }
5979 }
5980
5981 /* If the definition is a comparison, recurse on it. */
5982 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5983 {
5984 tree t = or_comparisons_1 (type, innercode,
5985 gimple_assign_rhs1 (stmt),
5986 gimple_assign_rhs2 (stmt),
5987 code2,
5988 op2a,
5989 op2b);
5990 if (t)
5991 return t;
5992 }
5993
5994 /* If the definition is an AND or OR expression, we may be able to
5995 simplify by reassociating. */
5996 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5997 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5998 {
5999 tree inner1 = gimple_assign_rhs1 (stmt);
6000 tree inner2 = gimple_assign_rhs2 (stmt);
6001 gimple *s;
6002 tree t;
6003 tree partial = NULL_TREE;
6004 bool is_or = (innercode == BIT_IOR_EXPR);
6005
6006 /* Check for boolean identities that don't require recursive examination
6007 of inner1/inner2:
6008 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6009 inner1 OR (inner1 AND inner2) => inner1
6010 !inner1 OR (inner1 OR inner2) => true
6011 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6012 */
6013 if (inner1 == true_test_var)
6014 return (is_or ? var : inner1);
6015 else if (inner2 == true_test_var)
6016 return (is_or ? var : inner2);
6017 else if (inner1 == false_test_var)
6018 return (is_or
6019 ? boolean_true_node
6020 : or_var_with_comparison (type, inner2, false, code2, op2a,
6021 op2b));
6022 else if (inner2 == false_test_var)
6023 return (is_or
6024 ? boolean_true_node
6025 : or_var_with_comparison (type, inner1, false, code2, op2a,
6026 op2b));
6027
6028 /* Next, redistribute/reassociate the OR across the inner tests.
6029 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6030 if (TREE_CODE (inner1) == SSA_NAME
6031 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6032 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6033 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6034 gimple_assign_rhs1 (s),
6035 gimple_assign_rhs2 (s),
6036 code2, op2a, op2b)))
6037 {
6038 /* Handle the OR case, where we are reassociating:
6039 (inner1 OR inner2) OR (op2a code2 op2b)
6040 => (t OR inner2)
6041 If the partial result t is a constant, we win. Otherwise
6042 continue on to try reassociating with the other inner test. */
6043 if (is_or)
6044 {
6045 if (integer_onep (t))
6046 return boolean_true_node;
6047 else if (integer_zerop (t))
6048 return inner2;
6049 }
6050
6051 /* Handle the AND case, where we are redistributing:
6052 (inner1 AND inner2) OR (op2a code2 op2b)
6053 => (t AND (inner2 OR (op2a code op2b))) */
6054 else if (integer_zerop (t))
6055 return boolean_false_node;
6056
6057 /* Save partial result for later. */
6058 partial = t;
6059 }
6060
6061 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6062 if (TREE_CODE (inner2) == SSA_NAME
6063 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6064 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6065 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6066 gimple_assign_rhs1 (s),
6067 gimple_assign_rhs2 (s),
6068 code2, op2a, op2b)))
6069 {
6070 /* Handle the OR case, where we are reassociating:
6071 (inner1 OR inner2) OR (op2a code2 op2b)
6072 => (inner1 OR t)
6073 => (t OR partial) */
6074 if (is_or)
6075 {
6076 if (integer_zerop (t))
6077 return inner1;
6078 else if (integer_onep (t))
6079 return boolean_true_node;
6080 /* If both are the same, we can apply the identity
6081 (x OR x) == x. */
6082 else if (partial && same_bool_result_p (t, partial))
6083 return t;
6084 }
6085
6086 /* Handle the AND case, where we are redistributing:
6087 (inner1 AND inner2) OR (op2a code2 op2b)
6088 => (t AND (inner1 OR (op2a code2 op2b)))
6089 => (t AND partial) */
6090 else
6091 {
6092 if (integer_zerop (t))
6093 return boolean_false_node;
6094 else if (partial)
6095 {
6096 /* We already got a simplification for the other
6097 operand to the redistributed AND expression. The
6098 interesting case is when at least one is true.
6099 Or, if both are the same, we can apply the identity
6100 (x AND x) == x. */
6101 if (integer_onep (partial))
6102 return t;
6103 else if (integer_onep (t))
6104 return partial;
6105 else if (same_bool_result_p (t, partial))
6106 return t;
6107 }
6108 }
6109 }
6110 }
6111 return NULL_TREE;
6112 }
6113
6114 /* Try to simplify the OR of two comparisons defined by
6115 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6116 If this can be done without constructing an intermediate value,
6117 return the resulting tree; otherwise NULL_TREE is returned.
6118 This function is deliberately asymmetric as it recurses on SSA_DEFs
6119 in the first comparison but not the second. */
6120
6121 static tree
6122 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6123 enum tree_code code2, tree op2a, tree op2b)
6124 {
6125 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6126
6127 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6128 if (operand_equal_p (op1a, op2a, 0)
6129 && operand_equal_p (op1b, op2b, 0))
6130 {
6131 /* Result will be either NULL_TREE, or a combined comparison. */
6132 tree t = combine_comparisons (UNKNOWN_LOCATION,
6133 TRUTH_ORIF_EXPR, code1, code2,
6134 truth_type, op1a, op1b);
6135 if (t)
6136 return t;
6137 }
6138
6139 /* Likewise the swapped case of the above. */
6140 if (operand_equal_p (op1a, op2b, 0)
6141 && operand_equal_p (op1b, op2a, 0))
6142 {
6143 /* Result will be either NULL_TREE, or a combined comparison. */
6144 tree t = combine_comparisons (UNKNOWN_LOCATION,
6145 TRUTH_ORIF_EXPR, code1,
6146 swap_tree_comparison (code2),
6147 truth_type, op1a, op1b);
6148 if (t)
6149 return t;
6150 }
6151
6152 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6153 NAME's definition is a truth value. See if there are any simplifications
6154 that can be done against the NAME's definition. */
6155 if (TREE_CODE (op1a) == SSA_NAME
6156 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6157 && (integer_zerop (op1b) || integer_onep (op1b)))
6158 {
6159 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6160 || (code1 == NE_EXPR && integer_onep (op1b)));
6161 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6162 switch (gimple_code (stmt))
6163 {
6164 case GIMPLE_ASSIGN:
6165 /* Try to simplify by copy-propagating the definition. */
6166 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6167 op2b);
6168
6169 case GIMPLE_PHI:
6170 /* If every argument to the PHI produces the same result when
6171 ORed with the second comparison, we win.
6172 Do not do this unless the type is bool since we need a bool
6173 result here anyway. */
6174 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6175 {
6176 tree result = NULL_TREE;
6177 unsigned i;
6178 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6179 {
6180 tree arg = gimple_phi_arg_def (stmt, i);
6181
6182 /* If this PHI has itself as an argument, ignore it.
6183 If all the other args produce the same result,
6184 we're still OK. */
6185 if (arg == gimple_phi_result (stmt))
6186 continue;
6187 else if (TREE_CODE (arg) == INTEGER_CST)
6188 {
6189 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6190 {
6191 if (!result)
6192 result = boolean_true_node;
6193 else if (!integer_onep (result))
6194 return NULL_TREE;
6195 }
6196 else if (!result)
6197 result = fold_build2 (code2, boolean_type_node,
6198 op2a, op2b);
6199 else if (!same_bool_comparison_p (result,
6200 code2, op2a, op2b))
6201 return NULL_TREE;
6202 }
6203 else if (TREE_CODE (arg) == SSA_NAME
6204 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6205 {
6206 tree temp;
6207 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6208 /* In simple cases we can look through PHI nodes,
6209 but we have to be careful with loops.
6210 See PR49073. */
6211 if (! dom_info_available_p (CDI_DOMINATORS)
6212 || gimple_bb (def_stmt) == gimple_bb (stmt)
6213 || dominated_by_p (CDI_DOMINATORS,
6214 gimple_bb (def_stmt),
6215 gimple_bb (stmt)))
6216 return NULL_TREE;
6217 temp = or_var_with_comparison (type, arg, invert, code2,
6218 op2a, op2b);
6219 if (!temp)
6220 return NULL_TREE;
6221 else if (!result)
6222 result = temp;
6223 else if (!same_bool_result_p (result, temp))
6224 return NULL_TREE;
6225 }
6226 else
6227 return NULL_TREE;
6228 }
6229 return result;
6230 }
6231
6232 default:
6233 break;
6234 }
6235 }
6236 return NULL_TREE;
6237 }
6238
6239 /* Try to simplify the OR of two comparisons, specified by
6240 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6241 If this can be simplified to a single expression (without requiring
6242 introducing more SSA variables to hold intermediate values),
6243 return the resulting tree. Otherwise return NULL_TREE.
6244 If the result expression is non-null, it has boolean type. */
6245
6246 tree
6247 maybe_fold_or_comparisons (tree type,
6248 enum tree_code code1, tree op1a, tree op1b,
6249 enum tree_code code2, tree op2a, tree op2b)
6250 {
6251 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6252 return t;
6253
6254 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6255 return t;
6256
6257 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6258 op1a, op1b, code2, op2a,
6259 op2b))
6260 return t;
6261
6262 return NULL_TREE;
6263 }
6264
6265 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6266
6267 Either NULL_TREE, a simplified but non-constant or a constant
6268 is returned.
6269
6270 ??? This should go into a gimple-fold-inline.h file to be eventually
6271 privatized with the single valueize function used in the various TUs
6272 to avoid the indirect function call overhead. */
6273
6274 tree
6275 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6276 tree (*gvalueize) (tree))
6277 {
6278 gimple_match_op res_op;
6279 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6280 edges if there are intermediate VARYING defs. For this reason
6281 do not follow SSA edges here even though SCCVN can technically
6282 just deal fine with that. */
6283 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6284 {
6285 tree res = NULL_TREE;
6286 if (gimple_simplified_result_is_gimple_val (&res_op))
6287 res = res_op.ops[0];
6288 else if (mprts_hook)
6289 res = mprts_hook (&res_op);
6290 if (res)
6291 {
6292 if (dump_file && dump_flags & TDF_DETAILS)
6293 {
6294 fprintf (dump_file, "Match-and-simplified ");
6295 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6296 fprintf (dump_file, " to ");
6297 print_generic_expr (dump_file, res);
6298 fprintf (dump_file, "\n");
6299 }
6300 return res;
6301 }
6302 }
6303
6304 location_t loc = gimple_location (stmt);
6305 switch (gimple_code (stmt))
6306 {
6307 case GIMPLE_ASSIGN:
6308 {
6309 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6310
6311 switch (get_gimple_rhs_class (subcode))
6312 {
6313 case GIMPLE_SINGLE_RHS:
6314 {
6315 tree rhs = gimple_assign_rhs1 (stmt);
6316 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6317
6318 if (TREE_CODE (rhs) == SSA_NAME)
6319 {
6320 /* If the RHS is an SSA_NAME, return its known constant value,
6321 if any. */
6322 return (*valueize) (rhs);
6323 }
6324 /* Handle propagating invariant addresses into address
6325 operations. */
6326 else if (TREE_CODE (rhs) == ADDR_EXPR
6327 && !is_gimple_min_invariant (rhs))
6328 {
6329 poly_int64 offset = 0;
6330 tree base;
6331 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6332 &offset,
6333 valueize);
6334 if (base
6335 && (CONSTANT_CLASS_P (base)
6336 || decl_address_invariant_p (base)))
6337 return build_invariant_address (TREE_TYPE (rhs),
6338 base, offset);
6339 }
6340 else if (TREE_CODE (rhs) == CONSTRUCTOR
6341 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6342 && known_eq (CONSTRUCTOR_NELTS (rhs),
6343 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6344 {
6345 unsigned i, nelts;
6346 tree val;
6347
6348 nelts = CONSTRUCTOR_NELTS (rhs);
6349 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6350 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6351 {
6352 val = (*valueize) (val);
6353 if (TREE_CODE (val) == INTEGER_CST
6354 || TREE_CODE (val) == REAL_CST
6355 || TREE_CODE (val) == FIXED_CST)
6356 vec.quick_push (val);
6357 else
6358 return NULL_TREE;
6359 }
6360
6361 return vec.build ();
6362 }
6363 if (subcode == OBJ_TYPE_REF)
6364 {
6365 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6366 /* If callee is constant, we can fold away the wrapper. */
6367 if (is_gimple_min_invariant (val))
6368 return val;
6369 }
6370
6371 if (kind == tcc_reference)
6372 {
6373 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6374 || TREE_CODE (rhs) == REALPART_EXPR
6375 || TREE_CODE (rhs) == IMAGPART_EXPR)
6376 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6377 {
6378 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6379 return fold_unary_loc (EXPR_LOCATION (rhs),
6380 TREE_CODE (rhs),
6381 TREE_TYPE (rhs), val);
6382 }
6383 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6384 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6385 {
6386 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6387 return fold_ternary_loc (EXPR_LOCATION (rhs),
6388 TREE_CODE (rhs),
6389 TREE_TYPE (rhs), val,
6390 TREE_OPERAND (rhs, 1),
6391 TREE_OPERAND (rhs, 2));
6392 }
6393 else if (TREE_CODE (rhs) == MEM_REF
6394 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6395 {
6396 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6397 if (TREE_CODE (val) == ADDR_EXPR
6398 && is_gimple_min_invariant (val))
6399 {
6400 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6401 unshare_expr (val),
6402 TREE_OPERAND (rhs, 1));
6403 if (tem)
6404 rhs = tem;
6405 }
6406 }
6407 return fold_const_aggregate_ref_1 (rhs, valueize);
6408 }
6409 else if (kind == tcc_declaration)
6410 return get_symbol_constant_value (rhs);
6411 return rhs;
6412 }
6413
6414 case GIMPLE_UNARY_RHS:
6415 return NULL_TREE;
6416
6417 case GIMPLE_BINARY_RHS:
6418 /* Translate &x + CST into an invariant form suitable for
6419 further propagation. */
6420 if (subcode == POINTER_PLUS_EXPR)
6421 {
6422 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6423 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6424 if (TREE_CODE (op0) == ADDR_EXPR
6425 && TREE_CODE (op1) == INTEGER_CST)
6426 {
6427 tree off = fold_convert (ptr_type_node, op1);
6428 return build1_loc
6429 (loc, ADDR_EXPR, TREE_TYPE (op0),
6430 fold_build2 (MEM_REF,
6431 TREE_TYPE (TREE_TYPE (op0)),
6432 unshare_expr (op0), off));
6433 }
6434 }
6435 /* Canonicalize bool != 0 and bool == 0 appearing after
6436 valueization. While gimple_simplify handles this
6437 it can get confused by the ~X == 1 -> X == 0 transform
6438 which we cant reduce to a SSA name or a constant
6439 (and we have no way to tell gimple_simplify to not
6440 consider those transforms in the first place). */
6441 else if (subcode == EQ_EXPR
6442 || subcode == NE_EXPR)
6443 {
6444 tree lhs = gimple_assign_lhs (stmt);
6445 tree op0 = gimple_assign_rhs1 (stmt);
6446 if (useless_type_conversion_p (TREE_TYPE (lhs),
6447 TREE_TYPE (op0)))
6448 {
6449 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6450 op0 = (*valueize) (op0);
6451 if (TREE_CODE (op0) == INTEGER_CST)
6452 std::swap (op0, op1);
6453 if (TREE_CODE (op1) == INTEGER_CST
6454 && ((subcode == NE_EXPR && integer_zerop (op1))
6455 || (subcode == EQ_EXPR && integer_onep (op1))))
6456 return op0;
6457 }
6458 }
6459 return NULL_TREE;
6460
6461 case GIMPLE_TERNARY_RHS:
6462 {
6463 /* Handle ternary operators that can appear in GIMPLE form. */
6464 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6465 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6466 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6467 return fold_ternary_loc (loc, subcode,
6468 gimple_expr_type (stmt), op0, op1, op2);
6469 }
6470
6471 default:
6472 gcc_unreachable ();
6473 }
6474 }
6475
6476 case GIMPLE_CALL:
6477 {
6478 tree fn;
6479 gcall *call_stmt = as_a <gcall *> (stmt);
6480
6481 if (gimple_call_internal_p (stmt))
6482 {
6483 enum tree_code subcode = ERROR_MARK;
6484 switch (gimple_call_internal_fn (stmt))
6485 {
6486 case IFN_UBSAN_CHECK_ADD:
6487 subcode = PLUS_EXPR;
6488 break;
6489 case IFN_UBSAN_CHECK_SUB:
6490 subcode = MINUS_EXPR;
6491 break;
6492 case IFN_UBSAN_CHECK_MUL:
6493 subcode = MULT_EXPR;
6494 break;
6495 case IFN_BUILTIN_EXPECT:
6496 {
6497 tree arg0 = gimple_call_arg (stmt, 0);
6498 tree op0 = (*valueize) (arg0);
6499 if (TREE_CODE (op0) == INTEGER_CST)
6500 return op0;
6501 return NULL_TREE;
6502 }
6503 default:
6504 return NULL_TREE;
6505 }
6506 tree arg0 = gimple_call_arg (stmt, 0);
6507 tree arg1 = gimple_call_arg (stmt, 1);
6508 tree op0 = (*valueize) (arg0);
6509 tree op1 = (*valueize) (arg1);
6510
6511 if (TREE_CODE (op0) != INTEGER_CST
6512 || TREE_CODE (op1) != INTEGER_CST)
6513 {
6514 switch (subcode)
6515 {
6516 case MULT_EXPR:
6517 /* x * 0 = 0 * x = 0 without overflow. */
6518 if (integer_zerop (op0) || integer_zerop (op1))
6519 return build_zero_cst (TREE_TYPE (arg0));
6520 break;
6521 case MINUS_EXPR:
6522 /* y - y = 0 without overflow. */
6523 if (operand_equal_p (op0, op1, 0))
6524 return build_zero_cst (TREE_TYPE (arg0));
6525 break;
6526 default:
6527 break;
6528 }
6529 }
6530 tree res
6531 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6532 if (res
6533 && TREE_CODE (res) == INTEGER_CST
6534 && !TREE_OVERFLOW (res))
6535 return res;
6536 return NULL_TREE;
6537 }
6538
6539 fn = (*valueize) (gimple_call_fn (stmt));
6540 if (TREE_CODE (fn) == ADDR_EXPR
6541 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6542 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6543 && gimple_builtin_call_types_compatible_p (stmt,
6544 TREE_OPERAND (fn, 0)))
6545 {
6546 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6547 tree retval;
6548 unsigned i;
6549 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6550 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6551 retval = fold_builtin_call_array (loc,
6552 gimple_call_return_type (call_stmt),
6553 fn, gimple_call_num_args (stmt), args);
6554 if (retval)
6555 {
6556 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6557 STRIP_NOPS (retval);
6558 retval = fold_convert (gimple_call_return_type (call_stmt),
6559 retval);
6560 }
6561 return retval;
6562 }
6563 return NULL_TREE;
6564 }
6565
6566 default:
6567 return NULL_TREE;
6568 }
6569 }
6570
6571 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6572 Returns NULL_TREE if folding to a constant is not possible, otherwise
6573 returns a constant according to is_gimple_min_invariant. */
6574
6575 tree
6576 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6577 {
6578 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6579 if (res && is_gimple_min_invariant (res))
6580 return res;
6581 return NULL_TREE;
6582 }
6583
6584
6585 /* The following set of functions are supposed to fold references using
6586 their constant initializers. */
6587
6588 /* See if we can find constructor defining value of BASE.
6589 When we know the consructor with constant offset (such as
6590 base is array[40] and we do know constructor of array), then
6591 BIT_OFFSET is adjusted accordingly.
6592
6593 As a special case, return error_mark_node when constructor
6594 is not explicitly available, but it is known to be zero
6595 such as 'static const int a;'. */
6596 static tree
6597 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6598 tree (*valueize)(tree))
6599 {
6600 poly_int64 bit_offset2, size, max_size;
6601 bool reverse;
6602
6603 if (TREE_CODE (base) == MEM_REF)
6604 {
6605 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6606 if (!boff.to_shwi (bit_offset))
6607 return NULL_TREE;
6608
6609 if (valueize
6610 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6611 base = valueize (TREE_OPERAND (base, 0));
6612 if (!base || TREE_CODE (base) != ADDR_EXPR)
6613 return NULL_TREE;
6614 base = TREE_OPERAND (base, 0);
6615 }
6616 else if (valueize
6617 && TREE_CODE (base) == SSA_NAME)
6618 base = valueize (base);
6619
6620 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6621 DECL_INITIAL. If BASE is a nested reference into another
6622 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6623 the inner reference. */
6624 switch (TREE_CODE (base))
6625 {
6626 case VAR_DECL:
6627 case CONST_DECL:
6628 {
6629 tree init = ctor_for_folding (base);
6630
6631 /* Our semantic is exact opposite of ctor_for_folding;
6632 NULL means unknown, while error_mark_node is 0. */
6633 if (init == error_mark_node)
6634 return NULL_TREE;
6635 if (!init)
6636 return error_mark_node;
6637 return init;
6638 }
6639
6640 case VIEW_CONVERT_EXPR:
6641 return get_base_constructor (TREE_OPERAND (base, 0),
6642 bit_offset, valueize);
6643
6644 case ARRAY_REF:
6645 case COMPONENT_REF:
6646 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6647 &reverse);
6648 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6649 return NULL_TREE;
6650 *bit_offset += bit_offset2;
6651 return get_base_constructor (base, bit_offset, valueize);
6652
6653 case CONSTRUCTOR:
6654 return base;
6655
6656 default:
6657 if (CONSTANT_CLASS_P (base))
6658 return base;
6659
6660 return NULL_TREE;
6661 }
6662 }
6663
6664 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6665 to the memory at bit OFFSET. When non-null, TYPE is the expected
6666 type of the reference; otherwise the type of the referenced element
6667 is used instead. When SIZE is zero, attempt to fold a reference to
6668 the entire element which OFFSET refers to. Increment *SUBOFF by
6669 the bit offset of the accessed element. */
6670
6671 static tree
6672 fold_array_ctor_reference (tree type, tree ctor,
6673 unsigned HOST_WIDE_INT offset,
6674 unsigned HOST_WIDE_INT size,
6675 tree from_decl,
6676 unsigned HOST_WIDE_INT *suboff)
6677 {
6678 offset_int low_bound;
6679 offset_int elt_size;
6680 offset_int access_index;
6681 tree domain_type = NULL_TREE;
6682 HOST_WIDE_INT inner_offset;
6683
6684 /* Compute low bound and elt size. */
6685 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6686 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6687 if (domain_type && TYPE_MIN_VALUE (domain_type))
6688 {
6689 /* Static constructors for variably sized objects make no sense. */
6690 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6691 return NULL_TREE;
6692 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6693 }
6694 else
6695 low_bound = 0;
6696 /* Static constructors for variably sized objects make no sense. */
6697 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6698 return NULL_TREE;
6699 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6700
6701 /* When TYPE is non-null, verify that it specifies a constant-sized
6702 access of a multiple of the array element size. Avoid division
6703 by zero below when ELT_SIZE is zero, such as with the result of
6704 an initializer for a zero-length array or an empty struct. */
6705 if (elt_size == 0
6706 || (type
6707 && (!TYPE_SIZE_UNIT (type)
6708 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6709 return NULL_TREE;
6710
6711 /* Compute the array index we look for. */
6712 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6713 elt_size);
6714 access_index += low_bound;
6715
6716 /* And offset within the access. */
6717 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6718
6719 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
6720 if (size > elt_sz * BITS_PER_UNIT)
6721 {
6722 /* native_encode_expr constraints. */
6723 if (size > MAX_BITSIZE_MODE_ANY_MODE
6724 || size % BITS_PER_UNIT != 0
6725 || inner_offset % BITS_PER_UNIT != 0
6726 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
6727 return NULL_TREE;
6728
6729 unsigned ctor_idx;
6730 tree val = get_array_ctor_element_at_index (ctor, access_index,
6731 &ctor_idx);
6732 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6733 return build_zero_cst (type);
6734
6735 /* native-encode adjacent ctor elements. */
6736 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6737 unsigned bufoff = 0;
6738 offset_int index = 0;
6739 offset_int max_index = access_index;
6740 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6741 if (!val)
6742 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6743 else if (!CONSTANT_CLASS_P (val))
6744 return NULL_TREE;
6745 if (!elt->index)
6746 ;
6747 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6748 {
6749 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6750 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6751 }
6752 else
6753 index = max_index = wi::to_offset (elt->index);
6754 index = wi::umax (index, access_index);
6755 do
6756 {
6757 if (bufoff + elt_sz > sizeof (buf))
6758 elt_sz = sizeof (buf) - bufoff;
6759 int len = native_encode_expr (val, buf + bufoff, elt_sz,
6760 inner_offset / BITS_PER_UNIT);
6761 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
6762 return NULL_TREE;
6763 inner_offset = 0;
6764 bufoff += len;
6765
6766 access_index += 1;
6767 if (wi::cmpu (access_index, index) == 0)
6768 val = elt->value;
6769 else if (wi::cmpu (access_index, max_index) > 0)
6770 {
6771 ctor_idx++;
6772 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6773 {
6774 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6775 ++max_index;
6776 }
6777 else
6778 {
6779 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6780 index = 0;
6781 max_index = access_index;
6782 if (!elt->index)
6783 ;
6784 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6785 {
6786 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6787 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6788 }
6789 else
6790 index = max_index = wi::to_offset (elt->index);
6791 index = wi::umax (index, access_index);
6792 if (wi::cmpu (access_index, index) == 0)
6793 val = elt->value;
6794 else
6795 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6796 }
6797 }
6798 }
6799 while (bufoff < size / BITS_PER_UNIT);
6800 *suboff += size;
6801 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6802 }
6803
6804 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6805 {
6806 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6807 {
6808 /* For the final reference to the entire accessed element
6809 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6810 may be null) in favor of the type of the element, and set
6811 SIZE to the size of the accessed element. */
6812 inner_offset = 0;
6813 type = TREE_TYPE (val);
6814 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6815 }
6816
6817 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6818 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6819 suboff);
6820 }
6821
6822 /* Memory not explicitly mentioned in constructor is 0 (or
6823 the reference is out of range). */
6824 return type ? build_zero_cst (type) : NULL_TREE;
6825 }
6826
6827 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6828 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6829 is the expected type of the reference; otherwise the type of
6830 the referenced member is used instead. When SIZE is zero,
6831 attempt to fold a reference to the entire member which OFFSET
6832 refers to; in this case. Increment *SUBOFF by the bit offset
6833 of the accessed member. */
6834
6835 static tree
6836 fold_nonarray_ctor_reference (tree type, tree ctor,
6837 unsigned HOST_WIDE_INT offset,
6838 unsigned HOST_WIDE_INT size,
6839 tree from_decl,
6840 unsigned HOST_WIDE_INT *suboff)
6841 {
6842 unsigned HOST_WIDE_INT cnt;
6843 tree cfield, cval;
6844
6845 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6846 cval)
6847 {
6848 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6849 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6850 tree field_size = DECL_SIZE (cfield);
6851
6852 if (!field_size)
6853 {
6854 /* Determine the size of the flexible array member from
6855 the size of the initializer provided for it. */
6856 field_size = TYPE_SIZE (TREE_TYPE (cval));
6857 }
6858
6859 /* Variable sized objects in static constructors makes no sense,
6860 but field_size can be NULL for flexible array members. */
6861 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6862 && TREE_CODE (byte_offset) == INTEGER_CST
6863 && (field_size != NULL_TREE
6864 ? TREE_CODE (field_size) == INTEGER_CST
6865 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6866
6867 /* Compute bit offset of the field. */
6868 offset_int bitoffset
6869 = (wi::to_offset (field_offset)
6870 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6871 /* Compute bit offset where the field ends. */
6872 offset_int bitoffset_end;
6873 if (field_size != NULL_TREE)
6874 bitoffset_end = bitoffset + wi::to_offset (field_size);
6875 else
6876 bitoffset_end = 0;
6877
6878 /* Compute the bit offset of the end of the desired access.
6879 As a special case, if the size of the desired access is
6880 zero, assume the access is to the entire field (and let
6881 the caller make any necessary adjustments by storing
6882 the actual bounds of the field in FIELDBOUNDS). */
6883 offset_int access_end = offset_int (offset);
6884 if (size)
6885 access_end += size;
6886 else
6887 access_end = bitoffset_end;
6888
6889 /* Is there any overlap between the desired access at
6890 [OFFSET, OFFSET+SIZE) and the offset of the field within
6891 the object at [BITOFFSET, BITOFFSET_END)? */
6892 if (wi::cmps (access_end, bitoffset) > 0
6893 && (field_size == NULL_TREE
6894 || wi::lts_p (offset, bitoffset_end)))
6895 {
6896 *suboff += bitoffset.to_uhwi ();
6897
6898 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6899 {
6900 /* For the final reference to the entire accessed member
6901 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6902 be null) in favor of the type of the member, and set
6903 SIZE to the size of the accessed member. */
6904 offset = bitoffset.to_uhwi ();
6905 type = TREE_TYPE (cval);
6906 size = (bitoffset_end - bitoffset).to_uhwi ();
6907 }
6908
6909 /* We do have overlap. Now see if the field is large enough
6910 to cover the access. Give up for accesses that extend
6911 beyond the end of the object or that span multiple fields. */
6912 if (wi::cmps (access_end, bitoffset_end) > 0)
6913 return NULL_TREE;
6914 if (offset < bitoffset)
6915 return NULL_TREE;
6916
6917 offset_int inner_offset = offset_int (offset) - bitoffset;
6918 return fold_ctor_reference (type, cval,
6919 inner_offset.to_uhwi (), size,
6920 from_decl, suboff);
6921 }
6922 }
6923
6924 if (!type)
6925 return NULL_TREE;
6926
6927 return build_zero_cst (type);
6928 }
6929
6930 /* CTOR is value initializing memory. Fold a reference of TYPE and
6931 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
6932 is zero, attempt to fold a reference to the entire subobject
6933 which OFFSET refers to. This is used when folding accesses to
6934 string members of aggregates. When non-null, set *SUBOFF to
6935 the bit offset of the accessed subobject. */
6936
6937 tree
6938 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6939 const poly_uint64 &poly_size, tree from_decl,
6940 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6941 {
6942 tree ret;
6943
6944 /* We found the field with exact match. */
6945 if (type
6946 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6947 && known_eq (poly_offset, 0U))
6948 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6949
6950 /* The remaining optimizations need a constant size and offset. */
6951 unsigned HOST_WIDE_INT size, offset;
6952 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6953 return NULL_TREE;
6954
6955 /* We are at the end of walk, see if we can view convert the
6956 result. */
6957 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6958 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6959 && !compare_tree_int (TYPE_SIZE (type), size)
6960 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6961 {
6962 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6963 if (ret)
6964 {
6965 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6966 if (ret)
6967 STRIP_USELESS_TYPE_CONVERSION (ret);
6968 }
6969 return ret;
6970 }
6971 /* For constants and byte-aligned/sized reads try to go through
6972 native_encode/interpret. */
6973 if (CONSTANT_CLASS_P (ctor)
6974 && BITS_PER_UNIT == 8
6975 && offset % BITS_PER_UNIT == 0
6976 && offset / BITS_PER_UNIT <= INT_MAX
6977 && size % BITS_PER_UNIT == 0
6978 && size <= MAX_BITSIZE_MODE_ANY_MODE
6979 && can_native_interpret_type_p (type))
6980 {
6981 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6982 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6983 offset / BITS_PER_UNIT);
6984 if (len > 0)
6985 return native_interpret_expr (type, buf, len);
6986 }
6987 if (TREE_CODE (ctor) == CONSTRUCTOR)
6988 {
6989 unsigned HOST_WIDE_INT dummy = 0;
6990 if (!suboff)
6991 suboff = &dummy;
6992
6993 tree ret;
6994 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6995 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6996 ret = fold_array_ctor_reference (type, ctor, offset, size,
6997 from_decl, suboff);
6998 else
6999 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7000 from_decl, suboff);
7001
7002 /* Fall back to native_encode_initializer. Needs to be done
7003 only in the outermost fold_ctor_reference call (because it itself
7004 recurses into CONSTRUCTORs) and doesn't update suboff. */
7005 if (ret == NULL_TREE
7006 && suboff == &dummy
7007 && BITS_PER_UNIT == 8
7008 && offset % BITS_PER_UNIT == 0
7009 && offset / BITS_PER_UNIT <= INT_MAX
7010 && size % BITS_PER_UNIT == 0
7011 && size <= MAX_BITSIZE_MODE_ANY_MODE
7012 && can_native_interpret_type_p (type))
7013 {
7014 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7015 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7016 offset / BITS_PER_UNIT);
7017 if (len > 0)
7018 return native_interpret_expr (type, buf, len);
7019 }
7020
7021 return ret;
7022 }
7023
7024 return NULL_TREE;
7025 }
7026
7027 /* Return the tree representing the element referenced by T if T is an
7028 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7029 names using VALUEIZE. Return NULL_TREE otherwise. */
7030
7031 tree
7032 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7033 {
7034 tree ctor, idx, base;
7035 poly_int64 offset, size, max_size;
7036 tree tem;
7037 bool reverse;
7038
7039 if (TREE_THIS_VOLATILE (t))
7040 return NULL_TREE;
7041
7042 if (DECL_P (t))
7043 return get_symbol_constant_value (t);
7044
7045 tem = fold_read_from_constant_string (t);
7046 if (tem)
7047 return tem;
7048
7049 switch (TREE_CODE (t))
7050 {
7051 case ARRAY_REF:
7052 case ARRAY_RANGE_REF:
7053 /* Constant indexes are handled well by get_base_constructor.
7054 Only special case variable offsets.
7055 FIXME: This code can't handle nested references with variable indexes
7056 (they will be handled only by iteration of ccp). Perhaps we can bring
7057 get_ref_base_and_extent here and make it use a valueize callback. */
7058 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7059 && valueize
7060 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7061 && poly_int_tree_p (idx))
7062 {
7063 tree low_bound, unit_size;
7064
7065 /* If the resulting bit-offset is constant, track it. */
7066 if ((low_bound = array_ref_low_bound (t),
7067 poly_int_tree_p (low_bound))
7068 && (unit_size = array_ref_element_size (t),
7069 tree_fits_uhwi_p (unit_size)))
7070 {
7071 poly_offset_int woffset
7072 = wi::sext (wi::to_poly_offset (idx)
7073 - wi::to_poly_offset (low_bound),
7074 TYPE_PRECISION (TREE_TYPE (idx)));
7075 woffset *= tree_to_uhwi (unit_size);
7076 woffset *= BITS_PER_UNIT;
7077 if (woffset.to_shwi (&offset))
7078 {
7079 base = TREE_OPERAND (t, 0);
7080 ctor = get_base_constructor (base, &offset, valueize);
7081 /* Empty constructor. Always fold to 0. */
7082 if (ctor == error_mark_node)
7083 return build_zero_cst (TREE_TYPE (t));
7084 /* Out of bound array access. Value is undefined,
7085 but don't fold. */
7086 if (maybe_lt (offset, 0))
7087 return NULL_TREE;
7088 /* We cannot determine ctor. */
7089 if (!ctor)
7090 return NULL_TREE;
7091 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7092 tree_to_uhwi (unit_size)
7093 * BITS_PER_UNIT,
7094 base);
7095 }
7096 }
7097 }
7098 /* Fallthru. */
7099
7100 case COMPONENT_REF:
7101 case BIT_FIELD_REF:
7102 case TARGET_MEM_REF:
7103 case MEM_REF:
7104 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7105 ctor = get_base_constructor (base, &offset, valueize);
7106
7107 /* Empty constructor. Always fold to 0. */
7108 if (ctor == error_mark_node)
7109 return build_zero_cst (TREE_TYPE (t));
7110 /* We do not know precise address. */
7111 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7112 return NULL_TREE;
7113 /* We cannot determine ctor. */
7114 if (!ctor)
7115 return NULL_TREE;
7116
7117 /* Out of bound array access. Value is undefined, but don't fold. */
7118 if (maybe_lt (offset, 0))
7119 return NULL_TREE;
7120
7121 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7122 base);
7123
7124 case REALPART_EXPR:
7125 case IMAGPART_EXPR:
7126 {
7127 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7128 if (c && TREE_CODE (c) == COMPLEX_CST)
7129 return fold_build1_loc (EXPR_LOCATION (t),
7130 TREE_CODE (t), TREE_TYPE (t), c);
7131 break;
7132 }
7133
7134 default:
7135 break;
7136 }
7137
7138 return NULL_TREE;
7139 }
7140
7141 tree
7142 fold_const_aggregate_ref (tree t)
7143 {
7144 return fold_const_aggregate_ref_1 (t, NULL);
7145 }
7146
7147 /* Lookup virtual method with index TOKEN in a virtual table V
7148 at OFFSET.
7149 Set CAN_REFER if non-NULL to false if method
7150 is not referable or if the virtual table is ill-formed (such as rewriten
7151 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7152
7153 tree
7154 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7155 tree v,
7156 unsigned HOST_WIDE_INT offset,
7157 bool *can_refer)
7158 {
7159 tree vtable = v, init, fn;
7160 unsigned HOST_WIDE_INT size;
7161 unsigned HOST_WIDE_INT elt_size, access_index;
7162 tree domain_type;
7163
7164 if (can_refer)
7165 *can_refer = true;
7166
7167 /* First of all double check we have virtual table. */
7168 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7169 {
7170 /* Pass down that we lost track of the target. */
7171 if (can_refer)
7172 *can_refer = false;
7173 return NULL_TREE;
7174 }
7175
7176 init = ctor_for_folding (v);
7177
7178 /* The virtual tables should always be born with constructors
7179 and we always should assume that they are avaialble for
7180 folding. At the moment we do not stream them in all cases,
7181 but it should never happen that ctor seem unreachable. */
7182 gcc_assert (init);
7183 if (init == error_mark_node)
7184 {
7185 /* Pass down that we lost track of the target. */
7186 if (can_refer)
7187 *can_refer = false;
7188 return NULL_TREE;
7189 }
7190 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7191 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7192 offset *= BITS_PER_UNIT;
7193 offset += token * size;
7194
7195 /* Lookup the value in the constructor that is assumed to be array.
7196 This is equivalent to
7197 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7198 offset, size, NULL);
7199 but in a constant time. We expect that frontend produced a simple
7200 array without indexed initializers. */
7201
7202 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7203 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7204 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7205 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7206
7207 access_index = offset / BITS_PER_UNIT / elt_size;
7208 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7209
7210 /* The C++ FE can now produce indexed fields, and we check if the indexes
7211 match. */
7212 if (access_index < CONSTRUCTOR_NELTS (init))
7213 {
7214 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7215 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7216 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7217 STRIP_NOPS (fn);
7218 }
7219 else
7220 fn = NULL;
7221
7222 /* For type inconsistent program we may end up looking up virtual method
7223 in virtual table that does not contain TOKEN entries. We may overrun
7224 the virtual table and pick up a constant or RTTI info pointer.
7225 In any case the call is undefined. */
7226 if (!fn
7227 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7228 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7229 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7230 else
7231 {
7232 fn = TREE_OPERAND (fn, 0);
7233
7234 /* When cgraph node is missing and function is not public, we cannot
7235 devirtualize. This can happen in WHOPR when the actual method
7236 ends up in other partition, because we found devirtualization
7237 possibility too late. */
7238 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7239 {
7240 if (can_refer)
7241 {
7242 *can_refer = false;
7243 return fn;
7244 }
7245 return NULL_TREE;
7246 }
7247 }
7248
7249 /* Make sure we create a cgraph node for functions we'll reference.
7250 They can be non-existent if the reference comes from an entry
7251 of an external vtable for example. */
7252 cgraph_node::get_create (fn);
7253
7254 return fn;
7255 }
7256
7257 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7258 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7259 KNOWN_BINFO carries the binfo describing the true type of
7260 OBJ_TYPE_REF_OBJECT(REF).
7261 Set CAN_REFER if non-NULL to false if method
7262 is not referable or if the virtual table is ill-formed (such as rewriten
7263 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7264
7265 tree
7266 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7267 bool *can_refer)
7268 {
7269 unsigned HOST_WIDE_INT offset;
7270 tree v;
7271
7272 v = BINFO_VTABLE (known_binfo);
7273 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7274 if (!v)
7275 return NULL_TREE;
7276
7277 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7278 {
7279 if (can_refer)
7280 *can_refer = false;
7281 return NULL_TREE;
7282 }
7283 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7284 }
7285
7286 /* Given a pointer value T, return a simplified version of an
7287 indirection through T, or NULL_TREE if no simplification is
7288 possible. Note that the resulting type may be different from
7289 the type pointed to in the sense that it is still compatible
7290 from the langhooks point of view. */
7291
7292 tree
7293 gimple_fold_indirect_ref (tree t)
7294 {
7295 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7296 tree sub = t;
7297 tree subtype;
7298
7299 STRIP_NOPS (sub);
7300 subtype = TREE_TYPE (sub);
7301 if (!POINTER_TYPE_P (subtype)
7302 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7303 return NULL_TREE;
7304
7305 if (TREE_CODE (sub) == ADDR_EXPR)
7306 {
7307 tree op = TREE_OPERAND (sub, 0);
7308 tree optype = TREE_TYPE (op);
7309 /* *&p => p */
7310 if (useless_type_conversion_p (type, optype))
7311 return op;
7312
7313 /* *(foo *)&fooarray => fooarray[0] */
7314 if (TREE_CODE (optype) == ARRAY_TYPE
7315 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7316 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7317 {
7318 tree type_domain = TYPE_DOMAIN (optype);
7319 tree min_val = size_zero_node;
7320 if (type_domain && TYPE_MIN_VALUE (type_domain))
7321 min_val = TYPE_MIN_VALUE (type_domain);
7322 if (TREE_CODE (min_val) == INTEGER_CST)
7323 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7324 }
7325 /* *(foo *)&complexfoo => __real__ complexfoo */
7326 else if (TREE_CODE (optype) == COMPLEX_TYPE
7327 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7328 return fold_build1 (REALPART_EXPR, type, op);
7329 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7330 else if (TREE_CODE (optype) == VECTOR_TYPE
7331 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7332 {
7333 tree part_width = TYPE_SIZE (type);
7334 tree index = bitsize_int (0);
7335 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7336 }
7337 }
7338
7339 /* *(p + CST) -> ... */
7340 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7341 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7342 {
7343 tree addr = TREE_OPERAND (sub, 0);
7344 tree off = TREE_OPERAND (sub, 1);
7345 tree addrtype;
7346
7347 STRIP_NOPS (addr);
7348 addrtype = TREE_TYPE (addr);
7349
7350 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7351 if (TREE_CODE (addr) == ADDR_EXPR
7352 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7353 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7354 && tree_fits_uhwi_p (off))
7355 {
7356 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7357 tree part_width = TYPE_SIZE (type);
7358 unsigned HOST_WIDE_INT part_widthi
7359 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7360 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7361 tree index = bitsize_int (indexi);
7362 if (known_lt (offset / part_widthi,
7363 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7364 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7365 part_width, index);
7366 }
7367
7368 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7369 if (TREE_CODE (addr) == ADDR_EXPR
7370 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7371 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7372 {
7373 tree size = TYPE_SIZE_UNIT (type);
7374 if (tree_int_cst_equal (size, off))
7375 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7376 }
7377
7378 /* *(p + CST) -> MEM_REF <p, CST>. */
7379 if (TREE_CODE (addr) != ADDR_EXPR
7380 || DECL_P (TREE_OPERAND (addr, 0)))
7381 return fold_build2 (MEM_REF, type,
7382 addr,
7383 wide_int_to_tree (ptype, wi::to_wide (off)));
7384 }
7385
7386 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7387 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7388 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7389 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7390 {
7391 tree type_domain;
7392 tree min_val = size_zero_node;
7393 tree osub = sub;
7394 sub = gimple_fold_indirect_ref (sub);
7395 if (! sub)
7396 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7397 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7398 if (type_domain && TYPE_MIN_VALUE (type_domain))
7399 min_val = TYPE_MIN_VALUE (type_domain);
7400 if (TREE_CODE (min_val) == INTEGER_CST)
7401 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7402 }
7403
7404 return NULL_TREE;
7405 }
7406
7407 /* Return true if CODE is an operation that when operating on signed
7408 integer types involves undefined behavior on overflow and the
7409 operation can be expressed with unsigned arithmetic. */
7410
7411 bool
7412 arith_code_with_undefined_signed_overflow (tree_code code)
7413 {
7414 switch (code)
7415 {
7416 case ABS_EXPR:
7417 case PLUS_EXPR:
7418 case MINUS_EXPR:
7419 case MULT_EXPR:
7420 case NEGATE_EXPR:
7421 case POINTER_PLUS_EXPR:
7422 return true;
7423 default:
7424 return false;
7425 }
7426 }
7427
7428 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7429 operation that can be transformed to unsigned arithmetic by converting
7430 its operand, carrying out the operation in the corresponding unsigned
7431 type and converting the result back to the original type.
7432
7433 Returns a sequence of statements that replace STMT and also contain
7434 a modified form of STMT itself. */
7435
7436 gimple_seq
7437 rewrite_to_defined_overflow (gimple *stmt)
7438 {
7439 if (dump_file && (dump_flags & TDF_DETAILS))
7440 {
7441 fprintf (dump_file, "rewriting stmt with undefined signed "
7442 "overflow ");
7443 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7444 }
7445
7446 tree lhs = gimple_assign_lhs (stmt);
7447 tree type = unsigned_type_for (TREE_TYPE (lhs));
7448 gimple_seq stmts = NULL;
7449 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7450 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7451 else
7452 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7453 {
7454 tree op = gimple_op (stmt, i);
7455 op = gimple_convert (&stmts, type, op);
7456 gimple_set_op (stmt, i, op);
7457 }
7458 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7459 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7460 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7461 gimple_set_modified (stmt, true);
7462 gimple_seq_add_stmt (&stmts, stmt);
7463 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7464 gimple_seq_add_stmt (&stmts, cvt);
7465
7466 return stmts;
7467 }
7468
7469
7470 /* The valueization hook we use for the gimple_build API simplification.
7471 This makes us match fold_buildN behavior by only combining with
7472 statements in the sequence(s) we are currently building. */
7473
7474 static tree
7475 gimple_build_valueize (tree op)
7476 {
7477 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7478 return op;
7479 return NULL_TREE;
7480 }
7481
7482 /* Build the expression CODE OP0 of type TYPE with location LOC,
7483 simplifying it first if possible. Returns the built
7484 expression value and appends statements possibly defining it
7485 to SEQ. */
7486
7487 tree
7488 gimple_build (gimple_seq *seq, location_t loc,
7489 enum tree_code code, tree type, tree op0)
7490 {
7491 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7492 if (!res)
7493 {
7494 res = create_tmp_reg_or_ssa_name (type);
7495 gimple *stmt;
7496 if (code == REALPART_EXPR
7497 || code == IMAGPART_EXPR
7498 || code == VIEW_CONVERT_EXPR)
7499 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7500 else
7501 stmt = gimple_build_assign (res, code, op0);
7502 gimple_set_location (stmt, loc);
7503 gimple_seq_add_stmt_without_update (seq, stmt);
7504 }
7505 return res;
7506 }
7507
7508 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7509 simplifying it first if possible. Returns the built
7510 expression value and appends statements possibly defining it
7511 to SEQ. */
7512
7513 tree
7514 gimple_build (gimple_seq *seq, location_t loc,
7515 enum tree_code code, tree type, tree op0, tree op1)
7516 {
7517 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7518 if (!res)
7519 {
7520 res = create_tmp_reg_or_ssa_name (type);
7521 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7522 gimple_set_location (stmt, loc);
7523 gimple_seq_add_stmt_without_update (seq, stmt);
7524 }
7525 return res;
7526 }
7527
7528 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7529 simplifying it first if possible. Returns the built
7530 expression value and appends statements possibly defining it
7531 to SEQ. */
7532
7533 tree
7534 gimple_build (gimple_seq *seq, location_t loc,
7535 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7536 {
7537 tree res = gimple_simplify (code, type, op0, op1, op2,
7538 seq, gimple_build_valueize);
7539 if (!res)
7540 {
7541 res = create_tmp_reg_or_ssa_name (type);
7542 gimple *stmt;
7543 if (code == BIT_FIELD_REF)
7544 stmt = gimple_build_assign (res, code,
7545 build3 (code, type, op0, op1, op2));
7546 else
7547 stmt = gimple_build_assign (res, code, op0, op1, op2);
7548 gimple_set_location (stmt, loc);
7549 gimple_seq_add_stmt_without_update (seq, stmt);
7550 }
7551 return res;
7552 }
7553
7554 /* Build the call FN (ARG0) with a result of type TYPE
7555 (or no result if TYPE is void) with location LOC,
7556 simplifying it first if possible. Returns the built
7557 expression value (or NULL_TREE if TYPE is void) and appends
7558 statements possibly defining it to SEQ. */
7559
7560 tree
7561 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7562 tree type, tree arg0)
7563 {
7564 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7565 if (!res)
7566 {
7567 gcall *stmt;
7568 if (internal_fn_p (fn))
7569 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7570 else
7571 {
7572 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7573 stmt = gimple_build_call (decl, 1, arg0);
7574 }
7575 if (!VOID_TYPE_P (type))
7576 {
7577 res = create_tmp_reg_or_ssa_name (type);
7578 gimple_call_set_lhs (stmt, res);
7579 }
7580 gimple_set_location (stmt, loc);
7581 gimple_seq_add_stmt_without_update (seq, stmt);
7582 }
7583 return res;
7584 }
7585
7586 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7587 (or no result if TYPE is void) with location LOC,
7588 simplifying it first if possible. Returns the built
7589 expression value (or NULL_TREE if TYPE is void) and appends
7590 statements possibly defining it to SEQ. */
7591
7592 tree
7593 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7594 tree type, tree arg0, tree arg1)
7595 {
7596 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7597 if (!res)
7598 {
7599 gcall *stmt;
7600 if (internal_fn_p (fn))
7601 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7602 else
7603 {
7604 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7605 stmt = gimple_build_call (decl, 2, arg0, arg1);
7606 }
7607 if (!VOID_TYPE_P (type))
7608 {
7609 res = create_tmp_reg_or_ssa_name (type);
7610 gimple_call_set_lhs (stmt, res);
7611 }
7612 gimple_set_location (stmt, loc);
7613 gimple_seq_add_stmt_without_update (seq, stmt);
7614 }
7615 return res;
7616 }
7617
7618 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7619 (or no result if TYPE is void) with location LOC,
7620 simplifying it first if possible. Returns the built
7621 expression value (or NULL_TREE if TYPE is void) and appends
7622 statements possibly defining it to SEQ. */
7623
7624 tree
7625 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7626 tree type, tree arg0, tree arg1, tree arg2)
7627 {
7628 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7629 seq, gimple_build_valueize);
7630 if (!res)
7631 {
7632 gcall *stmt;
7633 if (internal_fn_p (fn))
7634 stmt = gimple_build_call_internal (as_internal_fn (fn),
7635 3, arg0, arg1, arg2);
7636 else
7637 {
7638 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7639 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7640 }
7641 if (!VOID_TYPE_P (type))
7642 {
7643 res = create_tmp_reg_or_ssa_name (type);
7644 gimple_call_set_lhs (stmt, res);
7645 }
7646 gimple_set_location (stmt, loc);
7647 gimple_seq_add_stmt_without_update (seq, stmt);
7648 }
7649 return res;
7650 }
7651
7652 /* Build the conversion (TYPE) OP with a result of type TYPE
7653 with location LOC if such conversion is neccesary in GIMPLE,
7654 simplifying it first.
7655 Returns the built expression value and appends
7656 statements possibly defining it to SEQ. */
7657
7658 tree
7659 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7660 {
7661 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7662 return op;
7663 return gimple_build (seq, loc, NOP_EXPR, type, op);
7664 }
7665
7666 /* Build the conversion (ptrofftype) OP with a result of a type
7667 compatible with ptrofftype with location LOC if such conversion
7668 is neccesary in GIMPLE, simplifying it first.
7669 Returns the built expression value and appends
7670 statements possibly defining it to SEQ. */
7671
7672 tree
7673 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7674 {
7675 if (ptrofftype_p (TREE_TYPE (op)))
7676 return op;
7677 return gimple_convert (seq, loc, sizetype, op);
7678 }
7679
7680 /* Build a vector of type TYPE in which each element has the value OP.
7681 Return a gimple value for the result, appending any new statements
7682 to SEQ. */
7683
7684 tree
7685 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7686 tree op)
7687 {
7688 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7689 && !CONSTANT_CLASS_P (op))
7690 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7691
7692 tree res, vec = build_vector_from_val (type, op);
7693 if (is_gimple_val (vec))
7694 return vec;
7695 if (gimple_in_ssa_p (cfun))
7696 res = make_ssa_name (type);
7697 else
7698 res = create_tmp_reg (type);
7699 gimple *stmt = gimple_build_assign (res, vec);
7700 gimple_set_location (stmt, loc);
7701 gimple_seq_add_stmt_without_update (seq, stmt);
7702 return res;
7703 }
7704
7705 /* Build a vector from BUILDER, handling the case in which some elements
7706 are non-constant. Return a gimple value for the result, appending any
7707 new instructions to SEQ.
7708
7709 BUILDER must not have a stepped encoding on entry. This is because
7710 the function is not geared up to handle the arithmetic that would
7711 be needed in the variable case, and any code building a vector that
7712 is known to be constant should use BUILDER->build () directly. */
7713
7714 tree
7715 gimple_build_vector (gimple_seq *seq, location_t loc,
7716 tree_vector_builder *builder)
7717 {
7718 gcc_assert (builder->nelts_per_pattern () <= 2);
7719 unsigned int encoded_nelts = builder->encoded_nelts ();
7720 for (unsigned int i = 0; i < encoded_nelts; ++i)
7721 if (!TREE_CONSTANT ((*builder)[i]))
7722 {
7723 tree type = builder->type ();
7724 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7725 vec<constructor_elt, va_gc> *v;
7726 vec_alloc (v, nelts);
7727 for (i = 0; i < nelts; ++i)
7728 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7729
7730 tree res;
7731 if (gimple_in_ssa_p (cfun))
7732 res = make_ssa_name (type);
7733 else
7734 res = create_tmp_reg (type);
7735 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7736 gimple_set_location (stmt, loc);
7737 gimple_seq_add_stmt_without_update (seq, stmt);
7738 return res;
7739 }
7740 return builder->build ();
7741 }
7742
7743 /* Return true if the result of assignment STMT is known to be non-negative.
7744 If the return value is based on the assumption that signed overflow is
7745 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7746 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7747
7748 static bool
7749 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7750 int depth)
7751 {
7752 enum tree_code code = gimple_assign_rhs_code (stmt);
7753 switch (get_gimple_rhs_class (code))
7754 {
7755 case GIMPLE_UNARY_RHS:
7756 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7757 gimple_expr_type (stmt),
7758 gimple_assign_rhs1 (stmt),
7759 strict_overflow_p, depth);
7760 case GIMPLE_BINARY_RHS:
7761 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7762 gimple_expr_type (stmt),
7763 gimple_assign_rhs1 (stmt),
7764 gimple_assign_rhs2 (stmt),
7765 strict_overflow_p, depth);
7766 case GIMPLE_TERNARY_RHS:
7767 return false;
7768 case GIMPLE_SINGLE_RHS:
7769 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7770 strict_overflow_p, depth);
7771 case GIMPLE_INVALID_RHS:
7772 break;
7773 }
7774 gcc_unreachable ();
7775 }
7776
7777 /* Return true if return value of call STMT is known to be non-negative.
7778 If the return value is based on the assumption that signed overflow is
7779 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7780 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7781
7782 static bool
7783 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7784 int depth)
7785 {
7786 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7787 gimple_call_arg (stmt, 0) : NULL_TREE;
7788 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7789 gimple_call_arg (stmt, 1) : NULL_TREE;
7790
7791 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7792 gimple_call_combined_fn (stmt),
7793 arg0,
7794 arg1,
7795 strict_overflow_p, depth);
7796 }
7797
7798 /* Return true if return value of call STMT is known to be non-negative.
7799 If the return value is based on the assumption that signed overflow is
7800 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7801 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7802
7803 static bool
7804 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7805 int depth)
7806 {
7807 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7808 {
7809 tree arg = gimple_phi_arg_def (stmt, i);
7810 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7811 return false;
7812 }
7813 return true;
7814 }
7815
7816 /* Return true if STMT is known to compute a non-negative value.
7817 If the return value is based on the assumption that signed overflow is
7818 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7819 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7820
7821 bool
7822 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7823 int depth)
7824 {
7825 switch (gimple_code (stmt))
7826 {
7827 case GIMPLE_ASSIGN:
7828 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7829 depth);
7830 case GIMPLE_CALL:
7831 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7832 depth);
7833 case GIMPLE_PHI:
7834 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7835 depth);
7836 default:
7837 return false;
7838 }
7839 }
7840
7841 /* Return true if the floating-point value computed by assignment STMT
7842 is known to have an integer value. We also allow +Inf, -Inf and NaN
7843 to be considered integer values. Return false for signaling NaN.
7844
7845 DEPTH is the current nesting depth of the query. */
7846
7847 static bool
7848 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7849 {
7850 enum tree_code code = gimple_assign_rhs_code (stmt);
7851 switch (get_gimple_rhs_class (code))
7852 {
7853 case GIMPLE_UNARY_RHS:
7854 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7855 gimple_assign_rhs1 (stmt), depth);
7856 case GIMPLE_BINARY_RHS:
7857 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7858 gimple_assign_rhs1 (stmt),
7859 gimple_assign_rhs2 (stmt), depth);
7860 case GIMPLE_TERNARY_RHS:
7861 return false;
7862 case GIMPLE_SINGLE_RHS:
7863 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7864 case GIMPLE_INVALID_RHS:
7865 break;
7866 }
7867 gcc_unreachable ();
7868 }
7869
7870 /* Return true if the floating-point value computed by call STMT is known
7871 to have an integer value. We also allow +Inf, -Inf and NaN to be
7872 considered integer values. Return false for signaling NaN.
7873
7874 DEPTH is the current nesting depth of the query. */
7875
7876 static bool
7877 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7878 {
7879 tree arg0 = (gimple_call_num_args (stmt) > 0
7880 ? gimple_call_arg (stmt, 0)
7881 : NULL_TREE);
7882 tree arg1 = (gimple_call_num_args (stmt) > 1
7883 ? gimple_call_arg (stmt, 1)
7884 : NULL_TREE);
7885 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7886 arg0, arg1, depth);
7887 }
7888
7889 /* Return true if the floating-point result of phi STMT is known to have
7890 an integer value. We also allow +Inf, -Inf and NaN to be considered
7891 integer values. Return false for signaling NaN.
7892
7893 DEPTH is the current nesting depth of the query. */
7894
7895 static bool
7896 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7897 {
7898 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7899 {
7900 tree arg = gimple_phi_arg_def (stmt, i);
7901 if (!integer_valued_real_single_p (arg, depth + 1))
7902 return false;
7903 }
7904 return true;
7905 }
7906
7907 /* Return true if the floating-point value computed by STMT is known
7908 to have an integer value. We also allow +Inf, -Inf and NaN to be
7909 considered integer values. Return false for signaling NaN.
7910
7911 DEPTH is the current nesting depth of the query. */
7912
7913 bool
7914 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7915 {
7916 switch (gimple_code (stmt))
7917 {
7918 case GIMPLE_ASSIGN:
7919 return gimple_assign_integer_valued_real_p (stmt, depth);
7920 case GIMPLE_CALL:
7921 return gimple_call_integer_valued_real_p (stmt, depth);
7922 case GIMPLE_PHI:
7923 return gimple_phi_integer_valued_real_p (stmt, depth);
7924 default:
7925 return false;
7926 }
7927 }