Update error message prefix in libgcov profiling.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82 };
83
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
86
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
91
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
111 {
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
115
116 if (DECL_ABSTRACT_P (decl))
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
123
124 /* Static objects can be referred only if they was not optimized out yet. */
125 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
126 {
127 /* Before we start optimizing unreachable code we can be sure all
128 static objects are defined. */
129 if (symtab->function_flags_ready)
130 return true;
131 snode = symtab_node::get (decl);
132 if (!snode || !snode->definition)
133 return false;
134 node = dyn_cast <cgraph_node *> (snode);
135 return !node || !node->global.inlined_to;
136 }
137
138 /* We will later output the initializer, so we can refer to it.
139 So we are concerned only when DECL comes from initializer of
140 external var or var that has been optimized out. */
141 if (!from_decl
142 || !VAR_P (from_decl)
143 || (!DECL_EXTERNAL (from_decl)
144 && (vnode = varpool_node::get (from_decl)) != NULL
145 && vnode->definition)
146 || (flag_ltrans
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->in_other_partition))
149 return true;
150 /* We are folding reference from external vtable. The vtable may reffer
151 to a symbol keyed to other compilation unit. The other compilation
152 unit may be in separate DSO and the symbol may be hidden. */
153 if (DECL_VISIBILITY_SPECIFIED (decl)
154 && DECL_EXTERNAL (decl)
155 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
156 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
157 return false;
158 /* When function is public, we always can introduce new reference.
159 Exception are the COMDAT functions where introducing a direct
160 reference imply need to include function body in the curren tunit. */
161 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
162 return true;
163 /* We have COMDAT. We are going to check if we still have definition
164 or if the definition is going to be output in other partition.
165 Bypass this when gimplifying; all needed functions will be produced.
166
167 As observed in PR20991 for already optimized out comdat virtual functions
168 it may be tempting to not necessarily give up because the copy will be
169 output elsewhere when corresponding vtable is output.
170 This is however not possible - ABI specify that COMDATs are output in
171 units where they are used and when the other unit was compiled with LTO
172 it is possible that vtable was kept public while the function itself
173 was privatized. */
174 if (!symtab->function_flags_ready)
175 return true;
176
177 snode = symtab_node::get (decl);
178 if (!snode
179 || ((!snode->definition || DECL_EXTERNAL (decl))
180 && (!snode->in_other_partition
181 || (!snode->forced_by_abi && !snode->force_output))))
182 return false;
183 node = dyn_cast <cgraph_node *> (snode);
184 return !node || !node->global.inlined_to;
185 }
186
187 /* Create a temporary for TYPE for a statement STMT. If the current function
188 is in SSA form, a SSA name is created. Otherwise a temporary register
189 is made. */
190
191 tree
192 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
193 {
194 if (gimple_in_ssa_p (cfun))
195 return make_ssa_name (type, stmt);
196 else
197 return create_tmp_reg (type);
198 }
199
200 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
201 acceptable form for is_gimple_min_invariant.
202 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
203
204 tree
205 canonicalize_constructor_val (tree cval, tree from_decl)
206 {
207 tree orig_cval = cval;
208 STRIP_NOPS (cval);
209 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
210 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
211 {
212 tree ptr = TREE_OPERAND (cval, 0);
213 if (is_gimple_min_invariant (ptr))
214 cval = build1_loc (EXPR_LOCATION (cval),
215 ADDR_EXPR, TREE_TYPE (ptr),
216 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
217 ptr,
218 fold_convert (ptr_type_node,
219 TREE_OPERAND (cval, 1))));
220 }
221 if (TREE_CODE (cval) == ADDR_EXPR)
222 {
223 tree base = NULL_TREE;
224 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
225 {
226 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
227 if (base)
228 TREE_OPERAND (cval, 0) = base;
229 }
230 else
231 base = get_base_address (TREE_OPERAND (cval, 0));
232 if (!base)
233 return NULL_TREE;
234
235 if (VAR_OR_FUNCTION_DECL_P (base)
236 && !can_refer_decl_in_current_unit_p (base, from_decl))
237 return NULL_TREE;
238 if (TREE_TYPE (base) == error_mark_node)
239 return NULL_TREE;
240 if (VAR_P (base))
241 TREE_ADDRESSABLE (base) = 1;
242 else if (TREE_CODE (base) == FUNCTION_DECL)
243 {
244 /* Make sure we create a cgraph node for functions we'll reference.
245 They can be non-existent if the reference comes from an entry
246 of an external vtable for example. */
247 cgraph_node::get_create (base);
248 }
249 /* Fixup types in global initializers. */
250 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
251 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
252
253 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
254 cval = fold_convert (TREE_TYPE (orig_cval), cval);
255 return cval;
256 }
257 if (TREE_OVERFLOW_P (cval))
258 return drop_tree_overflow (cval);
259 return orig_cval;
260 }
261
262 /* If SYM is a constant variable with known value, return the value.
263 NULL_TREE is returned otherwise. */
264
265 tree
266 get_symbol_constant_value (tree sym)
267 {
268 tree val = ctor_for_folding (sym);
269 if (val != error_mark_node)
270 {
271 if (val)
272 {
273 val = canonicalize_constructor_val (unshare_expr (val), sym);
274 if (val && is_gimple_min_invariant (val))
275 return val;
276 else
277 return NULL_TREE;
278 }
279 /* Variables declared 'const' without an initializer
280 have zero as the initializer if they may not be
281 overridden at link or run time. */
282 if (!val
283 && is_gimple_reg_type (TREE_TYPE (sym)))
284 return build_zero_cst (TREE_TYPE (sym));
285 }
286
287 return NULL_TREE;
288 }
289
290
291
292 /* Subroutine of fold_stmt. We perform several simplifications of the
293 memory reference tree EXPR and make sure to re-gimplify them properly
294 after propagation of constant addresses. IS_LHS is true if the
295 reference is supposed to be an lvalue. */
296
297 static tree
298 maybe_fold_reference (tree expr, bool is_lhs)
299 {
300 tree result;
301
302 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
303 || TREE_CODE (expr) == REALPART_EXPR
304 || TREE_CODE (expr) == IMAGPART_EXPR)
305 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
306 return fold_unary_loc (EXPR_LOCATION (expr),
307 TREE_CODE (expr),
308 TREE_TYPE (expr),
309 TREE_OPERAND (expr, 0));
310 else if (TREE_CODE (expr) == BIT_FIELD_REF
311 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
312 return fold_ternary_loc (EXPR_LOCATION (expr),
313 TREE_CODE (expr),
314 TREE_TYPE (expr),
315 TREE_OPERAND (expr, 0),
316 TREE_OPERAND (expr, 1),
317 TREE_OPERAND (expr, 2));
318
319 if (!is_lhs
320 && (result = fold_const_aggregate_ref (expr))
321 && is_gimple_min_invariant (result))
322 return result;
323
324 return NULL_TREE;
325 }
326
327
328 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
329 replacement rhs for the statement or NULL_TREE if no simplification
330 could be made. It is assumed that the operands have been previously
331 folded. */
332
333 static tree
334 fold_gimple_assign (gimple_stmt_iterator *si)
335 {
336 gimple *stmt = gsi_stmt (*si);
337 enum tree_code subcode = gimple_assign_rhs_code (stmt);
338 location_t loc = gimple_location (stmt);
339
340 tree result = NULL_TREE;
341
342 switch (get_gimple_rhs_class (subcode))
343 {
344 case GIMPLE_SINGLE_RHS:
345 {
346 tree rhs = gimple_assign_rhs1 (stmt);
347
348 if (TREE_CLOBBER_P (rhs))
349 return NULL_TREE;
350
351 if (REFERENCE_CLASS_P (rhs))
352 return maybe_fold_reference (rhs, false);
353
354 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
355 {
356 tree val = OBJ_TYPE_REF_EXPR (rhs);
357 if (is_gimple_min_invariant (val))
358 return val;
359 else if (flag_devirtualize && virtual_method_call_p (rhs))
360 {
361 bool final;
362 vec <cgraph_node *>targets
363 = possible_polymorphic_call_targets (rhs, stmt, &final);
364 if (final && targets.length () <= 1 && dbg_cnt (devirt))
365 {
366 if (dump_enabled_p ())
367 {
368 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
369 "resolving virtual function address "
370 "reference to function %s\n",
371 targets.length () == 1
372 ? targets[0]->name ()
373 : "NULL");
374 }
375 if (targets.length () == 1)
376 {
377 val = fold_convert (TREE_TYPE (val),
378 build_fold_addr_expr_loc
379 (loc, targets[0]->decl));
380 STRIP_USELESS_TYPE_CONVERSION (val);
381 }
382 else
383 /* We cannot use __builtin_unreachable here because it
384 cannot have address taken. */
385 val = build_int_cst (TREE_TYPE (val), 0);
386 return val;
387 }
388 }
389 }
390
391 else if (TREE_CODE (rhs) == ADDR_EXPR)
392 {
393 tree ref = TREE_OPERAND (rhs, 0);
394 tree tem = maybe_fold_reference (ref, true);
395 if (tem
396 && TREE_CODE (tem) == MEM_REF
397 && integer_zerop (TREE_OPERAND (tem, 1)))
398 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
399 else if (tem)
400 result = fold_convert (TREE_TYPE (rhs),
401 build_fold_addr_expr_loc (loc, tem));
402 else if (TREE_CODE (ref) == MEM_REF
403 && integer_zerop (TREE_OPERAND (ref, 1)))
404 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
405
406 if (result)
407 {
408 /* Strip away useless type conversions. Both the
409 NON_LVALUE_EXPR that may have been added by fold, and
410 "useless" type conversions that might now be apparent
411 due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
413
414 if (result != rhs && valid_gimple_rhs_p (result))
415 return result;
416 }
417 }
418
419 else if (TREE_CODE (rhs) == CONSTRUCTOR
420 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
421 {
422 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
423 unsigned i;
424 tree val;
425
426 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
427 if (! CONSTANT_CLASS_P (val))
428 return NULL_TREE;
429
430 return build_vector_from_ctor (TREE_TYPE (rhs),
431 CONSTRUCTOR_ELTS (rhs));
432 }
433
434 else if (DECL_P (rhs))
435 return get_symbol_constant_value (rhs);
436 }
437 break;
438
439 case GIMPLE_UNARY_RHS:
440 break;
441
442 case GIMPLE_BINARY_RHS:
443 break;
444
445 case GIMPLE_TERNARY_RHS:
446 result = fold_ternary_loc (loc, subcode,
447 TREE_TYPE (gimple_assign_lhs (stmt)),
448 gimple_assign_rhs1 (stmt),
449 gimple_assign_rhs2 (stmt),
450 gimple_assign_rhs3 (stmt));
451
452 if (result)
453 {
454 STRIP_USELESS_TYPE_CONVERSION (result);
455 if (valid_gimple_rhs_p (result))
456 return result;
457 }
458 break;
459
460 case GIMPLE_INVALID_RHS:
461 gcc_unreachable ();
462 }
463
464 return NULL_TREE;
465 }
466
467
468 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
469 adjusting the replacement stmts location and virtual operands.
470 If the statement has a lhs the last stmt in the sequence is expected
471 to assign to that lhs. */
472
473 static void
474 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
475 {
476 gimple *stmt = gsi_stmt (*si_p);
477
478 if (gimple_has_location (stmt))
479 annotate_all_with_location (stmts, gimple_location (stmt));
480
481 /* First iterate over the replacement statements backward, assigning
482 virtual operands to their defining statements. */
483 gimple *laststore = NULL;
484 for (gimple_stmt_iterator i = gsi_last (stmts);
485 !gsi_end_p (i); gsi_prev (&i))
486 {
487 gimple *new_stmt = gsi_stmt (i);
488 if ((gimple_assign_single_p (new_stmt)
489 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
490 || (is_gimple_call (new_stmt)
491 && (gimple_call_flags (new_stmt)
492 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
493 {
494 tree vdef;
495 if (!laststore)
496 vdef = gimple_vdef (stmt);
497 else
498 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
499 gimple_set_vdef (new_stmt, vdef);
500 if (vdef && TREE_CODE (vdef) == SSA_NAME)
501 SSA_NAME_DEF_STMT (vdef) = new_stmt;
502 laststore = new_stmt;
503 }
504 }
505
506 /* Second iterate over the statements forward, assigning virtual
507 operands to their uses. */
508 tree reaching_vuse = gimple_vuse (stmt);
509 for (gimple_stmt_iterator i = gsi_start (stmts);
510 !gsi_end_p (i); gsi_next (&i))
511 {
512 gimple *new_stmt = gsi_stmt (i);
513 /* If the new statement possibly has a VUSE, update it with exact SSA
514 name we know will reach this one. */
515 if (gimple_has_mem_ops (new_stmt))
516 gimple_set_vuse (new_stmt, reaching_vuse);
517 gimple_set_modified (new_stmt, true);
518 if (gimple_vdef (new_stmt))
519 reaching_vuse = gimple_vdef (new_stmt);
520 }
521
522 /* If the new sequence does not do a store release the virtual
523 definition of the original statement. */
524 if (reaching_vuse
525 && reaching_vuse == gimple_vuse (stmt))
526 {
527 tree vdef = gimple_vdef (stmt);
528 if (vdef
529 && TREE_CODE (vdef) == SSA_NAME)
530 {
531 unlink_stmt_vdef (stmt);
532 release_ssa_name (vdef);
533 }
534 }
535
536 /* Finally replace the original statement with the sequence. */
537 gsi_replace_with_seq (si_p, stmts, false);
538 }
539
540 /* Convert EXPR into a GIMPLE value suitable for substitution on the
541 RHS of an assignment. Insert the necessary statements before
542 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
543 is replaced. If the call is expected to produces a result, then it
544 is replaced by an assignment of the new RHS to the result variable.
545 If the result is to be ignored, then the call is replaced by a
546 GIMPLE_NOP. A proper VDEF chain is retained by making the first
547 VUSE and the last VDEF of the whole sequence be the same as the replaced
548 statement and using new SSA names for stores in between. */
549
550 void
551 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
552 {
553 tree lhs;
554 gimple *stmt, *new_stmt;
555 gimple_stmt_iterator i;
556 gimple_seq stmts = NULL;
557
558 stmt = gsi_stmt (*si_p);
559
560 gcc_assert (is_gimple_call (stmt));
561
562 push_gimplify_context (gimple_in_ssa_p (cfun));
563
564 lhs = gimple_call_lhs (stmt);
565 if (lhs == NULL_TREE)
566 {
567 gimplify_and_add (expr, &stmts);
568 /* We can end up with folding a memcpy of an empty class assignment
569 which gets optimized away by C++ gimplification. */
570 if (gimple_seq_empty_p (stmts))
571 {
572 pop_gimplify_context (NULL);
573 if (gimple_in_ssa_p (cfun))
574 {
575 unlink_stmt_vdef (stmt);
576 release_defs (stmt);
577 }
578 gsi_replace (si_p, gimple_build_nop (), false);
579 return;
580 }
581 }
582 else
583 {
584 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
585 new_stmt = gimple_build_assign (lhs, tmp);
586 i = gsi_last (stmts);
587 gsi_insert_after_without_update (&i, new_stmt,
588 GSI_CONTINUE_LINKING);
589 }
590
591 pop_gimplify_context (NULL);
592
593 gsi_replace_with_seq_vops (si_p, stmts);
594 }
595
596
597 /* Replace the call at *GSI with the gimple value VAL. */
598
599 void
600 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
601 {
602 gimple *stmt = gsi_stmt (*gsi);
603 tree lhs = gimple_call_lhs (stmt);
604 gimple *repl;
605 if (lhs)
606 {
607 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
608 val = fold_convert (TREE_TYPE (lhs), val);
609 repl = gimple_build_assign (lhs, val);
610 }
611 else
612 repl = gimple_build_nop ();
613 tree vdef = gimple_vdef (stmt);
614 if (vdef && TREE_CODE (vdef) == SSA_NAME)
615 {
616 unlink_stmt_vdef (stmt);
617 release_ssa_name (vdef);
618 }
619 gsi_replace (gsi, repl, false);
620 }
621
622 /* Replace the call at *GSI with the new call REPL and fold that
623 again. */
624
625 static void
626 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
627 {
628 gimple *stmt = gsi_stmt (*gsi);
629 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
630 gimple_set_location (repl, gimple_location (stmt));
631 if (gimple_vdef (stmt)
632 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
633 {
634 gimple_set_vdef (repl, gimple_vdef (stmt));
635 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
636 }
637 if (gimple_vuse (stmt))
638 gimple_set_vuse (repl, gimple_vuse (stmt));
639 gsi_replace (gsi, repl, false);
640 fold_stmt (gsi);
641 }
642
643 /* Return true if VAR is a VAR_DECL or a component thereof. */
644
645 static bool
646 var_decl_component_p (tree var)
647 {
648 tree inner = var;
649 while (handled_component_p (inner))
650 inner = TREE_OPERAND (inner, 0);
651 return (DECL_P (inner)
652 || (TREE_CODE (inner) == MEM_REF
653 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
654 }
655
656 /* Return TRUE if the SIZE argument, representing the size of an
657 object, is in a range of values of which exactly zero is valid. */
658
659 static bool
660 size_must_be_zero_p (tree size)
661 {
662 if (integer_zerop (size))
663 return true;
664
665 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
666 return false;
667
668 tree type = TREE_TYPE (size);
669 int prec = TYPE_PRECISION (type);
670
671 /* Compute the value of SSIZE_MAX, the largest positive value that
672 can be stored in ssize_t, the signed counterpart of size_t. */
673 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
674 value_range valid_range (VR_RANGE,
675 build_int_cst (type, 0),
676 wide_int_to_tree (type, ssize_max));
677 value_range vr;
678 get_range_info (size, vr);
679 vr.intersect (&valid_range);
680 return vr.zero_p ();
681 }
682
683 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
684 diagnose (otherwise undefined) overlapping copies without preventing
685 folding. When folded, GCC guarantees that overlapping memcpy has
686 the same semantics as memmove. Call to the library memcpy need not
687 provide the same guarantee. Return false if no simplification can
688 be made. */
689
690 static bool
691 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
692 tree dest, tree src, int endp)
693 {
694 gimple *stmt = gsi_stmt (*gsi);
695 tree lhs = gimple_call_lhs (stmt);
696 tree len = gimple_call_arg (stmt, 2);
697 tree destvar, srcvar;
698 location_t loc = gimple_location (stmt);
699
700 /* If the LEN parameter is a constant zero or in range where
701 the only valid value is zero, return DEST. */
702 if (size_must_be_zero_p (len))
703 {
704 gimple *repl;
705 if (gimple_call_lhs (stmt))
706 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
707 else
708 repl = gimple_build_nop ();
709 tree vdef = gimple_vdef (stmt);
710 if (vdef && TREE_CODE (vdef) == SSA_NAME)
711 {
712 unlink_stmt_vdef (stmt);
713 release_ssa_name (vdef);
714 }
715 gsi_replace (gsi, repl, false);
716 return true;
717 }
718
719 /* If SRC and DEST are the same (and not volatile), return
720 DEST{,+LEN,+LEN-1}. */
721 if (operand_equal_p (src, dest, 0))
722 {
723 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
724 It's safe and may even be emitted by GCC itself (see bug
725 32667). */
726 unlink_stmt_vdef (stmt);
727 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
728 release_ssa_name (gimple_vdef (stmt));
729 if (!lhs)
730 {
731 gsi_replace (gsi, gimple_build_nop (), false);
732 return true;
733 }
734 goto done;
735 }
736 else
737 {
738 tree srctype, desttype;
739 unsigned int src_align, dest_align;
740 tree off0;
741 const char *tmp_str;
742 unsigned HOST_WIDE_INT tmp_len;
743
744 /* Build accesses at offset zero with a ref-all character type. */
745 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
746 ptr_mode, true), 0);
747
748 /* If we can perform the copy efficiently with first doing all loads
749 and then all stores inline it that way. Currently efficiently
750 means that we can load all the memory into a single integer
751 register which is what MOVE_MAX gives us. */
752 src_align = get_pointer_alignment (src);
753 dest_align = get_pointer_alignment (dest);
754 if (tree_fits_uhwi_p (len)
755 && compare_tree_int (len, MOVE_MAX) <= 0
756 /* ??? Don't transform copies from strings with known length this
757 confuses the tree-ssa-strlen.c. This doesn't handle
758 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
759 reason. */
760 && !c_strlen (src, 2)
761 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
762 && memchr (tmp_str, 0, tmp_len) == NULL))
763 {
764 unsigned ilen = tree_to_uhwi (len);
765 if (pow2p_hwi (ilen))
766 {
767 /* Detect out-of-bounds accesses without issuing warnings.
768 Avoid folding out-of-bounds copies but to avoid false
769 positives for unreachable code defer warning until after
770 DCE has worked its magic.
771 -Wrestrict is still diagnosed. */
772 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
773 dest, src, len, len,
774 false, false))
775 if (warning != OPT_Wrestrict)
776 return false;
777
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
789 {
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
804 {
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
807 {
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
815 }
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
828 {
829 gsi_replace (gsi, new_stmt, false);
830 return true;
831 }
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
834 }
835 }
836 }
837 }
838
839 if (endp == 3)
840 {
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
844
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
852 {
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
861 }
862
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
866 {
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
870
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
884 {
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
889 }
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
892 {
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
903 }
904 else
905 return false;
906
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
923 {
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
928 {
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
938 }
939 }
940
941 return false;
942 }
943
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
966
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
983
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
989
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
995
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1000 {
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1006 {
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1010 }
1011 }
1012
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1015
1016 if (srcvar == NULL_TREE)
1017 {
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1021 {
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1027 }
1028 }
1029 else if (destvar == NULL_TREE)
1030 {
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
1043 /* Same as above, detect out-of-bounds accesses without issuing
1044 warnings. Avoid folding out-of-bounds copies but to avoid
1045 false positives for unreachable code defer warning until
1046 after DCE has worked its magic.
1047 -Wrestrict is still diagnosed. */
1048 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1049 dest, src, len, len,
1050 false, false))
1051 if (warning != OPT_Wrestrict)
1052 return false;
1053
1054 gimple *new_stmt;
1055 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1056 {
1057 tree tem = fold_const_aggregate_ref (srcvar);
1058 if (tem)
1059 srcvar = tem;
1060 if (! is_gimple_min_invariant (srcvar))
1061 {
1062 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1063 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1064 new_stmt);
1065 gimple_assign_set_lhs (new_stmt, srcvar);
1066 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1067 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1068 }
1069 new_stmt = gimple_build_assign (destvar, srcvar);
1070 goto set_vop_and_replace;
1071 }
1072
1073 /* We get an aggregate copy. Use an unsigned char[] type to
1074 perform the copying to preserve padding and to avoid any issues
1075 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1076 desttype = build_array_type_nelts (unsigned_char_type_node,
1077 tree_to_uhwi (len));
1078 srctype = desttype;
1079 if (src_align > TYPE_ALIGN (srctype))
1080 srctype = build_aligned_type (srctype, src_align);
1081 if (dest_align > TYPE_ALIGN (desttype))
1082 desttype = build_aligned_type (desttype, dest_align);
1083 new_stmt
1084 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1085 fold_build2 (MEM_REF, srctype, src, off0));
1086 set_vop_and_replace:
1087 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1088 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1089 if (gimple_vdef (new_stmt)
1090 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1091 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1092 if (!lhs)
1093 {
1094 gsi_replace (gsi, new_stmt, false);
1095 return true;
1096 }
1097 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 }
1099
1100 done:
1101 gimple_seq stmts = NULL;
1102 if (endp == 0 || endp == 3)
1103 len = NULL_TREE;
1104 else if (endp == 2)
1105 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1106 ssize_int (1));
1107 if (endp == 2 || endp == 1)
1108 {
1109 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1110 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1111 TREE_TYPE (dest), dest, len);
1112 }
1113
1114 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1115 gimple *repl = gimple_build_assign (lhs, dest);
1116 gsi_replace (gsi, repl, false);
1117 return true;
1118 }
1119
1120 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1121 to built-in memcmp (a, b, len). */
1122
1123 static bool
1124 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1125 {
1126 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1127
1128 if (!fn)
1129 return false;
1130
1131 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1132
1133 gimple *stmt = gsi_stmt (*gsi);
1134 tree a = gimple_call_arg (stmt, 0);
1135 tree b = gimple_call_arg (stmt, 1);
1136 tree len = gimple_call_arg (stmt, 2);
1137
1138 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1139 replace_call_with_call_and_fold (gsi, repl);
1140
1141 return true;
1142 }
1143
1144 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1145 to built-in memmove (dest, src, len). */
1146
1147 static bool
1148 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1149 {
1150 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1151
1152 if (!fn)
1153 return false;
1154
1155 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1156 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1157 len) into memmove (dest, src, len). */
1158
1159 gimple *stmt = gsi_stmt (*gsi);
1160 tree src = gimple_call_arg (stmt, 0);
1161 tree dest = gimple_call_arg (stmt, 1);
1162 tree len = gimple_call_arg (stmt, 2);
1163
1164 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1165 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1166 replace_call_with_call_and_fold (gsi, repl);
1167
1168 return true;
1169 }
1170
1171 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1172 to built-in memset (dest, 0, len). */
1173
1174 static bool
1175 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1176 {
1177 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1178
1179 if (!fn)
1180 return false;
1181
1182 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1183
1184 gimple *stmt = gsi_stmt (*gsi);
1185 tree dest = gimple_call_arg (stmt, 0);
1186 tree len = gimple_call_arg (stmt, 1);
1187
1188 gimple_seq seq = NULL;
1189 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1190 gimple_seq_add_stmt_without_update (&seq, repl);
1191 gsi_replace_with_seq_vops (gsi, seq);
1192 fold_stmt (gsi);
1193
1194 return true;
1195 }
1196
1197 /* Fold function call to builtin memset or bzero at *GSI setting the
1198 memory of size LEN to VAL. Return whether a simplification was made. */
1199
1200 static bool
1201 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1202 {
1203 gimple *stmt = gsi_stmt (*gsi);
1204 tree etype;
1205 unsigned HOST_WIDE_INT length, cval;
1206
1207 /* If the LEN parameter is zero, return DEST. */
1208 if (integer_zerop (len))
1209 {
1210 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1211 return true;
1212 }
1213
1214 if (! tree_fits_uhwi_p (len))
1215 return false;
1216
1217 if (TREE_CODE (c) != INTEGER_CST)
1218 return false;
1219
1220 tree dest = gimple_call_arg (stmt, 0);
1221 tree var = dest;
1222 if (TREE_CODE (var) != ADDR_EXPR)
1223 return false;
1224
1225 var = TREE_OPERAND (var, 0);
1226 if (TREE_THIS_VOLATILE (var))
1227 return false;
1228
1229 etype = TREE_TYPE (var);
1230 if (TREE_CODE (etype) == ARRAY_TYPE)
1231 etype = TREE_TYPE (etype);
1232
1233 if (!INTEGRAL_TYPE_P (etype)
1234 && !POINTER_TYPE_P (etype))
1235 return NULL_TREE;
1236
1237 if (! var_decl_component_p (var))
1238 return NULL_TREE;
1239
1240 length = tree_to_uhwi (len);
1241 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1242 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1243 return NULL_TREE;
1244
1245 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1246 return NULL_TREE;
1247
1248 if (integer_zerop (c))
1249 cval = 0;
1250 else
1251 {
1252 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1253 return NULL_TREE;
1254
1255 cval = TREE_INT_CST_LOW (c);
1256 cval &= 0xff;
1257 cval |= cval << 8;
1258 cval |= cval << 16;
1259 cval |= (cval << 31) << 1;
1260 }
1261
1262 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1263 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1264 gimple_set_vuse (store, gimple_vuse (stmt));
1265 tree vdef = gimple_vdef (stmt);
1266 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1267 {
1268 gimple_set_vdef (store, gimple_vdef (stmt));
1269 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1270 }
1271 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1272 if (gimple_call_lhs (stmt))
1273 {
1274 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1275 gsi_replace (gsi, asgn, false);
1276 }
1277 else
1278 {
1279 gimple_stmt_iterator gsi2 = *gsi;
1280 gsi_prev (gsi);
1281 gsi_remove (&gsi2, true);
1282 }
1283
1284 return true;
1285 }
1286
1287 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1288
1289 static bool
1290 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1291 c_strlen_data *pdata, unsigned eltsize)
1292 {
1293 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1294
1295 /* The length computed by this invocation of the function. */
1296 tree val = NULL_TREE;
1297
1298 /* True if VAL is an optimistic (tight) bound determined from
1299 the size of the character array in which the string may be
1300 stored. In that case, the computed VAL is used to set
1301 PDATA->MAXBOUND. */
1302 bool tight_bound = false;
1303
1304 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1305 if (TREE_CODE (arg) == ADDR_EXPR
1306 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1307 {
1308 tree op = TREE_OPERAND (arg, 0);
1309 if (integer_zerop (TREE_OPERAND (op, 1)))
1310 {
1311 tree aop0 = TREE_OPERAND (op, 0);
1312 if (TREE_CODE (aop0) == INDIRECT_REF
1313 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1314 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1315 pdata, eltsize);
1316 }
1317 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1318 && rkind == SRK_LENRANGE)
1319 {
1320 /* Fail if an array is the last member of a struct object
1321 since it could be treated as a (fake) flexible array
1322 member. */
1323 tree idx = TREE_OPERAND (op, 1);
1324
1325 arg = TREE_OPERAND (op, 0);
1326 tree optype = TREE_TYPE (arg);
1327 if (tree dom = TYPE_DOMAIN (optype))
1328 if (tree bound = TYPE_MAX_VALUE (dom))
1329 if (TREE_CODE (bound) == INTEGER_CST
1330 && TREE_CODE (idx) == INTEGER_CST
1331 && tree_int_cst_lt (bound, idx))
1332 return false;
1333 }
1334 }
1335
1336 if (rkind == SRK_INT_VALUE)
1337 {
1338 /* We are computing the maximum value (not string length). */
1339 val = arg;
1340 if (TREE_CODE (val) != INTEGER_CST
1341 || tree_int_cst_sgn (val) < 0)
1342 return false;
1343 }
1344 else
1345 {
1346 c_strlen_data lendata = { };
1347 val = c_strlen (arg, 1, &lendata, eltsize);
1348
1349 if (!val && lendata.decl)
1350 {
1351 /* ARG refers to an unterminated const character array.
1352 DATA.DECL with size DATA.LEN. */
1353 val = lendata.minlen;
1354 pdata->decl = lendata.decl;
1355 }
1356 }
1357
1358 if (!val && rkind == SRK_LENRANGE)
1359 {
1360 if (TREE_CODE (arg) == ADDR_EXPR)
1361 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1362 pdata, eltsize);
1363
1364 if (TREE_CODE (arg) == ARRAY_REF)
1365 {
1366 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1367
1368 /* Determine the "innermost" array type. */
1369 while (TREE_CODE (optype) == ARRAY_TYPE
1370 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1371 optype = TREE_TYPE (optype);
1372
1373 /* Avoid arrays of pointers. */
1374 tree eltype = TREE_TYPE (optype);
1375 if (TREE_CODE (optype) != ARRAY_TYPE
1376 || !INTEGRAL_TYPE_P (eltype))
1377 return false;
1378
1379 /* Fail when the array bound is unknown or zero. */
1380 val = TYPE_SIZE_UNIT (optype);
1381 if (!val || integer_zerop (val))
1382 return false;
1383
1384 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1385 integer_one_node);
1386
1387 /* Set the minimum size to zero since the string in
1388 the array could have zero length. */
1389 pdata->minlen = ssize_int (0);
1390
1391 tight_bound = true;
1392 }
1393 else if (TREE_CODE (arg) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
1396 {
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct. */
1403
1404 tree fld = TREE_OPERAND (arg, 1);
1405
1406 tree optype = TREE_TYPE (fld);
1407
1408 /* Determine the "innermost" array type. */
1409 while (TREE_CODE (optype) == ARRAY_TYPE
1410 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1411 optype = TREE_TYPE (optype);
1412
1413 /* Fail when the array bound is unknown or zero. */
1414 val = TYPE_SIZE_UNIT (optype);
1415 if (!val || integer_zerop (val))
1416 return false;
1417 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1418 integer_one_node);
1419
1420 /* Set the minimum size to zero since the string in
1421 the array could have zero length. */
1422 pdata->minlen = ssize_int (0);
1423
1424 /* The array size determined above is an optimistic bound
1425 on the length. If the array isn't nul-terminated the
1426 length computed by the library function would be greater.
1427 Even though using strlen to cross the subobject boundary
1428 is undefined, avoid drawing conclusions from the member
1429 type about the length here. */
1430 tight_bound = true;
1431 }
1432 else if (VAR_P (arg))
1433 {
1434 /* Avoid handling pointers to arrays. GCC might misuse
1435 a pointer to an array of one bound to point to an array
1436 object of a greater bound. */
1437 tree argtype = TREE_TYPE (arg);
1438 if (TREE_CODE (argtype) == ARRAY_TYPE)
1439 {
1440 val = TYPE_SIZE_UNIT (argtype);
1441 if (!val
1442 || TREE_CODE (val) != INTEGER_CST
1443 || integer_zerop (val))
1444 return false;
1445 val = wide_int_to_tree (TREE_TYPE (val),
1446 wi::sub (wi::to_wide (val), 1));
1447
1448 /* Set the minimum size to zero since the string in
1449 the array could have zero length. */
1450 pdata->minlen = ssize_int (0);
1451 }
1452 }
1453 }
1454
1455 if (!val)
1456 return false;
1457
1458 /* Adjust the lower bound on the string length as necessary. */
1459 if (!pdata->minlen
1460 || (rkind != SRK_STRLEN
1461 && TREE_CODE (pdata->minlen) == INTEGER_CST
1462 && TREE_CODE (val) == INTEGER_CST
1463 && tree_int_cst_lt (val, pdata->minlen)))
1464 pdata->minlen = val;
1465
1466 if (pdata->maxbound)
1467 {
1468 /* Adjust the tighter (more optimistic) string length bound
1469 if necessary and proceed to adjust the more conservative
1470 bound. */
1471 if (TREE_CODE (val) == INTEGER_CST)
1472 {
1473 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1474 {
1475 if (tree_int_cst_lt (pdata->maxbound, val))
1476 pdata->maxbound = val;
1477 }
1478 else
1479 pdata->maxbound = build_all_ones_cst (size_type_node);
1480 }
1481 else
1482 pdata->maxbound = val;
1483 }
1484 else
1485 pdata->maxbound = val;
1486
1487 if (tight_bound)
1488 {
1489 /* VAL computed above represents an optimistically tight bound
1490 on the length of the string based on the referenced object's
1491 or subobject's type. Determine the conservative upper bound
1492 based on the enclosing object's size if possible. */
1493 if (rkind == SRK_LENRANGE)
1494 {
1495 poly_int64 offset;
1496 tree base = get_addr_base_and_unit_offset (arg, &offset);
1497 if (!base)
1498 {
1499 /* When the call above fails due to a non-constant offset
1500 assume the offset is zero and use the size of the whole
1501 enclosing object instead. */
1502 base = get_base_address (arg);
1503 offset = 0;
1504 }
1505 /* If the base object is a pointer no upper bound on the length
1506 can be determined. Otherwise the maximum length is equal to
1507 the size of the enclosing object minus the offset of
1508 the referenced subobject minus 1 (for the terminating nul). */
1509 tree type = TREE_TYPE (base);
1510 if (TREE_CODE (type) == POINTER_TYPE
1511 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1512 val = build_all_ones_cst (size_type_node);
1513 else
1514 {
1515 val = DECL_SIZE_UNIT (base);
1516 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1517 size_int (offset + 1));
1518 }
1519 }
1520 else
1521 return false;
1522 }
1523
1524 if (pdata->maxlen)
1525 {
1526 /* Adjust the more conservative bound if possible/necessary
1527 and fail otherwise. */
1528 if (rkind != SRK_STRLEN)
1529 {
1530 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1531 || TREE_CODE (val) != INTEGER_CST)
1532 return false;
1533
1534 if (tree_int_cst_lt (pdata->maxlen, val))
1535 pdata->maxlen = val;
1536 return true;
1537 }
1538 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1539 {
1540 /* Fail if the length of this ARG is different from that
1541 previously determined from another ARG. */
1542 return false;
1543 }
1544 }
1545
1546 pdata->maxlen = val;
1547 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1548 }
1549
1550 /* For an ARG referencing one or more strings, try to obtain the range
1551 of their lengths, or the size of the largest array ARG referes to if
1552 the range of lengths cannot be determined, and store all in *PDATA.
1553 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1554 the maximum constant value.
1555 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1556 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1557 length or if we are unable to determine the length, return false.
1558 VISITED is a bitmap of visited variables.
1559 RKIND determines the kind of value or range to obtain (see
1560 strlen_range_kind).
1561 Set PDATA->DECL if ARG refers to an unterminated constant array.
1562 On input, set ELTSIZE to 1 for normal single byte character strings,
1563 and either 2 or 4 for wide characer strings (the size of wchar_t).
1564 Return true if *PDATA was successfully populated and false otherwise. */
1565
1566 static bool
1567 get_range_strlen (tree arg, bitmap *visited,
1568 strlen_range_kind rkind,
1569 c_strlen_data *pdata, unsigned eltsize)
1570 {
1571
1572 if (TREE_CODE (arg) != SSA_NAME)
1573 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1574
1575 /* If ARG is registered for SSA update we cannot look at its defining
1576 statement. */
1577 if (name_registered_for_update_p (arg))
1578 return false;
1579
1580 /* If we were already here, break the infinite cycle. */
1581 if (!*visited)
1582 *visited = BITMAP_ALLOC (NULL);
1583 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1584 return true;
1585
1586 tree var = arg;
1587 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1588
1589 switch (gimple_code (def_stmt))
1590 {
1591 case GIMPLE_ASSIGN:
1592 /* The RHS of the statement defining VAR must either have a
1593 constant length or come from another SSA_NAME with a constant
1594 length. */
1595 if (gimple_assign_single_p (def_stmt)
1596 || gimple_assign_unary_nop_p (def_stmt))
1597 {
1598 tree rhs = gimple_assign_rhs1 (def_stmt);
1599 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1600 }
1601 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1602 {
1603 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1604 gimple_assign_rhs3 (def_stmt) };
1605
1606 for (unsigned int i = 0; i < 2; i++)
1607 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1608 {
1609 if (rkind != SRK_LENRANGE)
1610 return false;
1611 /* Set the upper bound to the maximum to prevent
1612 it from being adjusted in the next iteration but
1613 leave MINLEN and the more conservative MAXBOUND
1614 determined so far alone (or leave them null if
1615 they haven't been set yet). That the MINLEN is
1616 in fact zero can be determined from MAXLEN being
1617 unbounded but the discovered minimum is used for
1618 diagnostics. */
1619 pdata->maxlen = build_all_ones_cst (size_type_node);
1620 }
1621 return true;
1622 }
1623 return false;
1624
1625 case GIMPLE_PHI:
1626 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1627 must have a constant length. */
1628 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1629 {
1630 tree arg = gimple_phi_arg (def_stmt, i)->def;
1631
1632 /* If this PHI has itself as an argument, we cannot
1633 determine the string length of this argument. However,
1634 if we can find a constant string length for the other
1635 PHI args then we can still be sure that this is a
1636 constant string length. So be optimistic and just
1637 continue with the next argument. */
1638 if (arg == gimple_phi_result (def_stmt))
1639 continue;
1640
1641 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1642 {
1643 if (rkind != SRK_LENRANGE)
1644 return false;
1645 /* Set the upper bound to the maximum to prevent
1646 it from being adjusted in the next iteration but
1647 leave MINLEN and the more conservative MAXBOUND
1648 determined so far alone (or leave them null if
1649 they haven't been set yet). That the MINLEN is
1650 in fact zero can be determined from MAXLEN being
1651 unbounded but the discovered minimum is used for
1652 diagnostics. */
1653 pdata->maxlen = build_all_ones_cst (size_type_node);
1654 }
1655 }
1656 return true;
1657
1658 default:
1659 return false;
1660 }
1661 }
1662
1663 /* Determine the minimum and maximum value or string length that ARG
1664 refers to and store each in the first two elements of MINMAXLEN.
1665 For expressions that point to strings of unknown lengths that are
1666 character arrays, use the upper bound of the array as the maximum
1667 length. For example, given an expression like 'x ? array : "xyz"'
1668 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1669 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1670 stored in array.
1671 Return true if the range of the string lengths has been obtained
1672 from the upper bound of an array at the end of a struct. Such
1673 an array may hold a string that's longer than its upper bound
1674 due to it being used as a poor-man's flexible array member.
1675
1676 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1677 and false if PHIs and COND_EXPRs are to be handled optimistically,
1678 if we can determine string length minimum and maximum; it will use
1679 the minimum from the ones where it can be determined.
1680 STRICT false should be only used for warning code.
1681 When non-null, clear *NONSTR if ARG refers to a constant array
1682 that is known not be nul-terminated. Otherwise set it to
1683 the declaration of the constant non-terminated array.
1684
1685 ELTSIZE is 1 for normal single byte character strings, and 2 or
1686 4 for wide characer strings. ELTSIZE is by default 1. */
1687
1688 bool
1689 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1690 {
1691 bitmap visited = NULL;
1692
1693 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1694 {
1695 /* On failure extend the length range to an impossible maximum
1696 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1697 members can stay unchanged regardless. */
1698 pdata->minlen = ssize_int (0);
1699 pdata->maxlen = build_all_ones_cst (size_type_node);
1700 }
1701 else if (!pdata->minlen)
1702 pdata->minlen = ssize_int (0);
1703
1704 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1705 if (!pdata->maxbound)
1706 pdata->maxbound = pdata->maxlen;
1707
1708 if (visited)
1709 BITMAP_FREE (visited);
1710
1711 return !integer_all_onesp (pdata->maxlen);
1712 }
1713
1714 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1715 For ARG of pointer types, NONSTR indicates if the caller is prepared
1716 to handle unterminated strings. For integer ARG and when RKIND ==
1717 SRK_INT_VALUE, NONSTR must be null.
1718
1719 If an unterminated array is discovered and our caller handles
1720 unterminated arrays, then bubble up the offending DECL and
1721 return the maximum size. Otherwise return NULL. */
1722
1723 static tree
1724 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1725 {
1726 /* A non-null NONSTR is meaningless when determining the maximum
1727 value of an integer ARG. */
1728 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1729 /* ARG must have an integral type when RKIND says so. */
1730 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1731
1732 bitmap visited = NULL;
1733
1734 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1735 is unbounded. */
1736 c_strlen_data lendata = { };
1737 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1738 lendata.maxlen = NULL_TREE;
1739 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1740 lendata.maxlen = NULL_TREE;
1741
1742 if (visited)
1743 BITMAP_FREE (visited);
1744
1745 if (nonstr)
1746 {
1747 /* For callers prepared to handle unterminated arrays set
1748 *NONSTR to point to the declaration of the array and return
1749 the maximum length/size. */
1750 *nonstr = lendata.decl;
1751 return lendata.maxlen;
1752 }
1753
1754 /* Fail if the constant array isn't nul-terminated. */
1755 return lendata.decl ? NULL_TREE : lendata.maxlen;
1756 }
1757
1758
1759 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1760 If LEN is not NULL, it represents the length of the string to be
1761 copied. Return NULL_TREE if no simplification can be made. */
1762
1763 static bool
1764 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1765 tree dest, tree src)
1766 {
1767 gimple *stmt = gsi_stmt (*gsi);
1768 location_t loc = gimple_location (stmt);
1769 tree fn;
1770
1771 /* If SRC and DEST are the same (and not volatile), return DEST. */
1772 if (operand_equal_p (src, dest, 0))
1773 {
1774 /* Issue -Wrestrict unless the pointers are null (those do
1775 not point to objects and so do not indicate an overlap;
1776 such calls could be the result of sanitization and jump
1777 threading). */
1778 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1779 {
1780 tree func = gimple_call_fndecl (stmt);
1781
1782 warning_at (loc, OPT_Wrestrict,
1783 "%qD source argument is the same as destination",
1784 func);
1785 }
1786
1787 replace_call_with_value (gsi, dest);
1788 return true;
1789 }
1790
1791 if (optimize_function_for_size_p (cfun))
1792 return false;
1793
1794 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1795 if (!fn)
1796 return false;
1797
1798 /* Set to non-null if ARG refers to an unterminated array. */
1799 tree nonstr = NULL;
1800 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1801
1802 if (nonstr)
1803 {
1804 /* Avoid folding calls with unterminated arrays. */
1805 if (!gimple_no_warning_p (stmt))
1806 warn_string_no_nul (loc, "strcpy", src, nonstr);
1807 gimple_set_no_warning (stmt, true);
1808 return false;
1809 }
1810
1811 if (!len)
1812 return false;
1813
1814 len = fold_convert_loc (loc, size_type_node, len);
1815 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1816 len = force_gimple_operand_gsi (gsi, len, true,
1817 NULL_TREE, true, GSI_SAME_STMT);
1818 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1819 replace_call_with_call_and_fold (gsi, repl);
1820 return true;
1821 }
1822
1823 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1824 If SLEN is not NULL, it represents the length of the source string.
1825 Return NULL_TREE if no simplification can be made. */
1826
1827 static bool
1828 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1829 tree dest, tree src, tree len)
1830 {
1831 gimple *stmt = gsi_stmt (*gsi);
1832 location_t loc = gimple_location (stmt);
1833 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1834
1835 /* If the LEN parameter is zero, return DEST. */
1836 if (integer_zerop (len))
1837 {
1838 /* Avoid warning if the destination refers to a an array/pointer
1839 decorate with attribute nonstring. */
1840 if (!nonstring)
1841 {
1842 tree fndecl = gimple_call_fndecl (stmt);
1843
1844 /* Warn about the lack of nul termination: the result is not
1845 a (nul-terminated) string. */
1846 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1847 if (slen && !integer_zerop (slen))
1848 warning_at (loc, OPT_Wstringop_truncation,
1849 "%G%qD destination unchanged after copying no bytes "
1850 "from a string of length %E",
1851 stmt, fndecl, slen);
1852 else
1853 warning_at (loc, OPT_Wstringop_truncation,
1854 "%G%qD destination unchanged after copying no bytes",
1855 stmt, fndecl);
1856 }
1857
1858 replace_call_with_value (gsi, dest);
1859 return true;
1860 }
1861
1862 /* We can't compare slen with len as constants below if len is not a
1863 constant. */
1864 if (TREE_CODE (len) != INTEGER_CST)
1865 return false;
1866
1867 /* Now, we must be passed a constant src ptr parameter. */
1868 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1869 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1870 return false;
1871
1872 /* The size of the source string including the terminating nul. */
1873 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1874
1875 /* We do not support simplification of this case, though we do
1876 support it when expanding trees into RTL. */
1877 /* FIXME: generate a call to __builtin_memset. */
1878 if (tree_int_cst_lt (ssize, len))
1879 return false;
1880
1881 /* Diagnose truncation that leaves the copy unterminated. */
1882 maybe_diag_stxncpy_trunc (*gsi, src, len);
1883
1884 /* OK transform into builtin memcpy. */
1885 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1886 if (!fn)
1887 return false;
1888
1889 len = fold_convert_loc (loc, size_type_node, len);
1890 len = force_gimple_operand_gsi (gsi, len, true,
1891 NULL_TREE, true, GSI_SAME_STMT);
1892 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1893 replace_call_with_call_and_fold (gsi, repl);
1894
1895 return true;
1896 }
1897
1898 /* Fold function call to builtin strchr or strrchr.
1899 If both arguments are constant, evaluate and fold the result,
1900 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1901 In general strlen is significantly faster than strchr
1902 due to being a simpler operation. */
1903 static bool
1904 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1905 {
1906 gimple *stmt = gsi_stmt (*gsi);
1907 tree str = gimple_call_arg (stmt, 0);
1908 tree c = gimple_call_arg (stmt, 1);
1909 location_t loc = gimple_location (stmt);
1910 const char *p;
1911 char ch;
1912
1913 if (!gimple_call_lhs (stmt))
1914 return false;
1915
1916 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1917 {
1918 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1919
1920 if (p1 == NULL)
1921 {
1922 replace_call_with_value (gsi, integer_zero_node);
1923 return true;
1924 }
1925
1926 tree len = build_int_cst (size_type_node, p1 - p);
1927 gimple_seq stmts = NULL;
1928 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1929 POINTER_PLUS_EXPR, str, len);
1930 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1931 gsi_replace_with_seq_vops (gsi, stmts);
1932 return true;
1933 }
1934
1935 if (!integer_zerop (c))
1936 return false;
1937
1938 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1939 if (is_strrchr && optimize_function_for_size_p (cfun))
1940 {
1941 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1942
1943 if (strchr_fn)
1944 {
1945 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1946 replace_call_with_call_and_fold (gsi, repl);
1947 return true;
1948 }
1949
1950 return false;
1951 }
1952
1953 tree len;
1954 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1955
1956 if (!strlen_fn)
1957 return false;
1958
1959 /* Create newstr = strlen (str). */
1960 gimple_seq stmts = NULL;
1961 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1962 gimple_set_location (new_stmt, loc);
1963 len = create_tmp_reg_or_ssa_name (size_type_node);
1964 gimple_call_set_lhs (new_stmt, len);
1965 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1966
1967 /* Create (str p+ strlen (str)). */
1968 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1969 POINTER_PLUS_EXPR, str, len);
1970 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1971 gsi_replace_with_seq_vops (gsi, stmts);
1972 /* gsi now points at the assignment to the lhs, get a
1973 stmt iterator to the strlen.
1974 ??? We can't use gsi_for_stmt as that doesn't work when the
1975 CFG isn't built yet. */
1976 gimple_stmt_iterator gsi2 = *gsi;
1977 gsi_prev (&gsi2);
1978 fold_stmt (&gsi2);
1979 return true;
1980 }
1981
1982 /* Fold function call to builtin strstr.
1983 If both arguments are constant, evaluate and fold the result,
1984 additionally fold strstr (x, "") into x and strstr (x, "c")
1985 into strchr (x, 'c'). */
1986 static bool
1987 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1988 {
1989 gimple *stmt = gsi_stmt (*gsi);
1990 tree haystack = gimple_call_arg (stmt, 0);
1991 tree needle = gimple_call_arg (stmt, 1);
1992 const char *p, *q;
1993
1994 if (!gimple_call_lhs (stmt))
1995 return false;
1996
1997 q = c_getstr (needle);
1998 if (q == NULL)
1999 return false;
2000
2001 if ((p = c_getstr (haystack)))
2002 {
2003 const char *r = strstr (p, q);
2004
2005 if (r == NULL)
2006 {
2007 replace_call_with_value (gsi, integer_zero_node);
2008 return true;
2009 }
2010
2011 tree len = build_int_cst (size_type_node, r - p);
2012 gimple_seq stmts = NULL;
2013 gimple *new_stmt
2014 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2015 haystack, len);
2016 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2017 gsi_replace_with_seq_vops (gsi, stmts);
2018 return true;
2019 }
2020
2021 /* For strstr (x, "") return x. */
2022 if (q[0] == '\0')
2023 {
2024 replace_call_with_value (gsi, haystack);
2025 return true;
2026 }
2027
2028 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2029 if (q[1] == '\0')
2030 {
2031 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2032 if (strchr_fn)
2033 {
2034 tree c = build_int_cst (integer_type_node, q[0]);
2035 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2036 replace_call_with_call_and_fold (gsi, repl);
2037 return true;
2038 }
2039 }
2040
2041 return false;
2042 }
2043
2044 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2045 to the call.
2046
2047 Return NULL_TREE if no simplification was possible, otherwise return the
2048 simplified form of the call as a tree.
2049
2050 The simplified form may be a constant or other expression which
2051 computes the same value, but in a more efficient manner (including
2052 calls to other builtin functions).
2053
2054 The call may contain arguments which need to be evaluated, but
2055 which are not useful to determine the result of the call. In
2056 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2057 COMPOUND_EXPR will be an argument which must be evaluated.
2058 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2059 COMPOUND_EXPR in the chain will contain the tree for the simplified
2060 form of the builtin function call. */
2061
2062 static bool
2063 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2064 {
2065 gimple *stmt = gsi_stmt (*gsi);
2066 location_t loc = gimple_location (stmt);
2067
2068 const char *p = c_getstr (src);
2069
2070 /* If the string length is zero, return the dst parameter. */
2071 if (p && *p == '\0')
2072 {
2073 replace_call_with_value (gsi, dst);
2074 return true;
2075 }
2076
2077 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2078 return false;
2079
2080 /* See if we can store by pieces into (dst + strlen(dst)). */
2081 tree newdst;
2082 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2083 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2084
2085 if (!strlen_fn || !memcpy_fn)
2086 return false;
2087
2088 /* If the length of the source string isn't computable don't
2089 split strcat into strlen and memcpy. */
2090 tree len = get_maxval_strlen (src, SRK_STRLEN);
2091 if (! len)
2092 return false;
2093
2094 /* Create strlen (dst). */
2095 gimple_seq stmts = NULL, stmts2;
2096 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2097 gimple_set_location (repl, loc);
2098 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2099 gimple_call_set_lhs (repl, newdst);
2100 gimple_seq_add_stmt_without_update (&stmts, repl);
2101
2102 /* Create (dst p+ strlen (dst)). */
2103 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2104 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2105 gimple_seq_add_seq_without_update (&stmts, stmts2);
2106
2107 len = fold_convert_loc (loc, size_type_node, len);
2108 len = size_binop_loc (loc, PLUS_EXPR, len,
2109 build_int_cst (size_type_node, 1));
2110 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2111 gimple_seq_add_seq_without_update (&stmts, stmts2);
2112
2113 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2114 gimple_seq_add_stmt_without_update (&stmts, repl);
2115 if (gimple_call_lhs (stmt))
2116 {
2117 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2118 gimple_seq_add_stmt_without_update (&stmts, repl);
2119 gsi_replace_with_seq_vops (gsi, stmts);
2120 /* gsi now points at the assignment to the lhs, get a
2121 stmt iterator to the memcpy call.
2122 ??? We can't use gsi_for_stmt as that doesn't work when the
2123 CFG isn't built yet. */
2124 gimple_stmt_iterator gsi2 = *gsi;
2125 gsi_prev (&gsi2);
2126 fold_stmt (&gsi2);
2127 }
2128 else
2129 {
2130 gsi_replace_with_seq_vops (gsi, stmts);
2131 fold_stmt (gsi);
2132 }
2133 return true;
2134 }
2135
2136 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2137 are the arguments to the call. */
2138
2139 static bool
2140 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2141 {
2142 gimple *stmt = gsi_stmt (*gsi);
2143 tree dest = gimple_call_arg (stmt, 0);
2144 tree src = gimple_call_arg (stmt, 1);
2145 tree size = gimple_call_arg (stmt, 2);
2146 tree fn;
2147 const char *p;
2148
2149
2150 p = c_getstr (src);
2151 /* If the SRC parameter is "", return DEST. */
2152 if (p && *p == '\0')
2153 {
2154 replace_call_with_value (gsi, dest);
2155 return true;
2156 }
2157
2158 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2159 return false;
2160
2161 /* If __builtin_strcat_chk is used, assume strcat is available. */
2162 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2163 if (!fn)
2164 return false;
2165
2166 gimple *repl = gimple_build_call (fn, 2, dest, src);
2167 replace_call_with_call_and_fold (gsi, repl);
2168 return true;
2169 }
2170
2171 /* Simplify a call to the strncat builtin. */
2172
2173 static bool
2174 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2175 {
2176 gimple *stmt = gsi_stmt (*gsi);
2177 tree dst = gimple_call_arg (stmt, 0);
2178 tree src = gimple_call_arg (stmt, 1);
2179 tree len = gimple_call_arg (stmt, 2);
2180
2181 const char *p = c_getstr (src);
2182
2183 /* If the requested length is zero, or the src parameter string
2184 length is zero, return the dst parameter. */
2185 if (integer_zerop (len) || (p && *p == '\0'))
2186 {
2187 replace_call_with_value (gsi, dst);
2188 return true;
2189 }
2190
2191 if (TREE_CODE (len) != INTEGER_CST || !p)
2192 return false;
2193
2194 unsigned srclen = strlen (p);
2195
2196 int cmpsrc = compare_tree_int (len, srclen);
2197
2198 /* Return early if the requested len is less than the string length.
2199 Warnings will be issued elsewhere later. */
2200 if (cmpsrc < 0)
2201 return false;
2202
2203 unsigned HOST_WIDE_INT dstsize;
2204
2205 bool nowarn = gimple_no_warning_p (stmt);
2206
2207 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2208 {
2209 int cmpdst = compare_tree_int (len, dstsize);
2210
2211 if (cmpdst >= 0)
2212 {
2213 tree fndecl = gimple_call_fndecl (stmt);
2214
2215 /* Strncat copies (at most) LEN bytes and always appends
2216 the terminating NUL so the specified bound should never
2217 be equal to (or greater than) the size of the destination.
2218 If it is, the copy could overflow. */
2219 location_t loc = gimple_location (stmt);
2220 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2221 cmpdst == 0
2222 ? G_("%G%qD specified bound %E equals "
2223 "destination size")
2224 : G_("%G%qD specified bound %E exceeds "
2225 "destination size %wu"),
2226 stmt, fndecl, len, dstsize);
2227 if (nowarn)
2228 gimple_set_no_warning (stmt, true);
2229 }
2230 }
2231
2232 if (!nowarn && cmpsrc == 0)
2233 {
2234 tree fndecl = gimple_call_fndecl (stmt);
2235 location_t loc = gimple_location (stmt);
2236
2237 /* To avoid possible overflow the specified bound should also
2238 not be equal to the length of the source, even when the size
2239 of the destination is unknown (it's not an uncommon mistake
2240 to specify as the bound to strncpy the length of the source). */
2241 if (warning_at (loc, OPT_Wstringop_overflow_,
2242 "%G%qD specified bound %E equals source length",
2243 stmt, fndecl, len))
2244 gimple_set_no_warning (stmt, true);
2245 }
2246
2247 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2248
2249 /* If the replacement _DECL isn't initialized, don't do the
2250 transformation. */
2251 if (!fn)
2252 return false;
2253
2254 /* Otherwise, emit a call to strcat. */
2255 gcall *repl = gimple_build_call (fn, 2, dst, src);
2256 replace_call_with_call_and_fold (gsi, repl);
2257 return true;
2258 }
2259
2260 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2261 LEN, and SIZE. */
2262
2263 static bool
2264 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2265 {
2266 gimple *stmt = gsi_stmt (*gsi);
2267 tree dest = gimple_call_arg (stmt, 0);
2268 tree src = gimple_call_arg (stmt, 1);
2269 tree len = gimple_call_arg (stmt, 2);
2270 tree size = gimple_call_arg (stmt, 3);
2271 tree fn;
2272 const char *p;
2273
2274 p = c_getstr (src);
2275 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2276 if ((p && *p == '\0')
2277 || integer_zerop (len))
2278 {
2279 replace_call_with_value (gsi, dest);
2280 return true;
2281 }
2282
2283 if (! tree_fits_uhwi_p (size))
2284 return false;
2285
2286 if (! integer_all_onesp (size))
2287 {
2288 tree src_len = c_strlen (src, 1);
2289 if (src_len
2290 && tree_fits_uhwi_p (src_len)
2291 && tree_fits_uhwi_p (len)
2292 && ! tree_int_cst_lt (len, src_len))
2293 {
2294 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2295 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2296 if (!fn)
2297 return false;
2298
2299 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2300 replace_call_with_call_and_fold (gsi, repl);
2301 return true;
2302 }
2303 return false;
2304 }
2305
2306 /* If __builtin_strncat_chk is used, assume strncat is available. */
2307 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2308 if (!fn)
2309 return false;
2310
2311 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2312 replace_call_with_call_and_fold (gsi, repl);
2313 return true;
2314 }
2315
2316 /* Build and append gimple statements to STMTS that would load a first
2317 character of a memory location identified by STR. LOC is location
2318 of the statement. */
2319
2320 static tree
2321 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2322 {
2323 tree var;
2324
2325 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2326 tree cst_uchar_ptr_node
2327 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2328 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2329
2330 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2331 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2332 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2333
2334 gimple_assign_set_lhs (stmt, var);
2335 gimple_seq_add_stmt_without_update (stmts, stmt);
2336
2337 return var;
2338 }
2339
2340 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2341 FCODE is the name of the builtin. */
2342
2343 static bool
2344 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2345 {
2346 gimple *stmt = gsi_stmt (*gsi);
2347 tree callee = gimple_call_fndecl (stmt);
2348 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2349
2350 tree type = integer_type_node;
2351 tree str1 = gimple_call_arg (stmt, 0);
2352 tree str2 = gimple_call_arg (stmt, 1);
2353 tree lhs = gimple_call_lhs (stmt);
2354 HOST_WIDE_INT length = -1;
2355
2356 /* Handle strncmp and strncasecmp functions. */
2357 if (gimple_call_num_args (stmt) == 3)
2358 {
2359 tree len = gimple_call_arg (stmt, 2);
2360 if (tree_fits_uhwi_p (len))
2361 length = tree_to_uhwi (len);
2362 }
2363
2364 /* If the LEN parameter is zero, return zero. */
2365 if (length == 0)
2366 {
2367 replace_call_with_value (gsi, integer_zero_node);
2368 return true;
2369 }
2370
2371 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2372 if (operand_equal_p (str1, str2, 0))
2373 {
2374 replace_call_with_value (gsi, integer_zero_node);
2375 return true;
2376 }
2377
2378 const char *p1 = c_getstr (str1);
2379 const char *p2 = c_getstr (str2);
2380
2381 /* For known strings, return an immediate value. */
2382 if (p1 && p2)
2383 {
2384 int r = 0;
2385 bool known_result = false;
2386
2387 switch (fcode)
2388 {
2389 case BUILT_IN_STRCMP:
2390 case BUILT_IN_STRCMP_EQ:
2391 {
2392 r = strcmp (p1, p2);
2393 known_result = true;
2394 break;
2395 }
2396 case BUILT_IN_STRNCMP:
2397 case BUILT_IN_STRNCMP_EQ:
2398 {
2399 if (length == -1)
2400 break;
2401 r = strncmp (p1, p2, length);
2402 known_result = true;
2403 break;
2404 }
2405 /* Only handleable situation is where the string are equal (result 0),
2406 which is already handled by operand_equal_p case. */
2407 case BUILT_IN_STRCASECMP:
2408 break;
2409 case BUILT_IN_STRNCASECMP:
2410 {
2411 if (length == -1)
2412 break;
2413 r = strncmp (p1, p2, length);
2414 if (r == 0)
2415 known_result = true;
2416 break;
2417 }
2418 default:
2419 gcc_unreachable ();
2420 }
2421
2422 if (known_result)
2423 {
2424 replace_call_with_value (gsi, build_cmp_result (type, r));
2425 return true;
2426 }
2427 }
2428
2429 bool nonzero_length = length >= 1
2430 || fcode == BUILT_IN_STRCMP
2431 || fcode == BUILT_IN_STRCMP_EQ
2432 || fcode == BUILT_IN_STRCASECMP;
2433
2434 location_t loc = gimple_location (stmt);
2435
2436 /* If the second arg is "", return *(const unsigned char*)arg1. */
2437 if (p2 && *p2 == '\0' && nonzero_length)
2438 {
2439 gimple_seq stmts = NULL;
2440 tree var = gimple_load_first_char (loc, str1, &stmts);
2441 if (lhs)
2442 {
2443 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2444 gimple_seq_add_stmt_without_update (&stmts, stmt);
2445 }
2446
2447 gsi_replace_with_seq_vops (gsi, stmts);
2448 return true;
2449 }
2450
2451 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2452 if (p1 && *p1 == '\0' && nonzero_length)
2453 {
2454 gimple_seq stmts = NULL;
2455 tree var = gimple_load_first_char (loc, str2, &stmts);
2456
2457 if (lhs)
2458 {
2459 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2460 stmt = gimple_build_assign (c, NOP_EXPR, var);
2461 gimple_seq_add_stmt_without_update (&stmts, stmt);
2462
2463 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2464 gimple_seq_add_stmt_without_update (&stmts, stmt);
2465 }
2466
2467 gsi_replace_with_seq_vops (gsi, stmts);
2468 return true;
2469 }
2470
2471 /* If len parameter is one, return an expression corresponding to
2472 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2473 if (fcode == BUILT_IN_STRNCMP && length == 1)
2474 {
2475 gimple_seq stmts = NULL;
2476 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2477 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2478
2479 if (lhs)
2480 {
2481 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2482 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2483 gimple_seq_add_stmt_without_update (&stmts, convert1);
2484
2485 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2486 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2487 gimple_seq_add_stmt_without_update (&stmts, convert2);
2488
2489 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2490 gimple_seq_add_stmt_without_update (&stmts, stmt);
2491 }
2492
2493 gsi_replace_with_seq_vops (gsi, stmts);
2494 return true;
2495 }
2496
2497 /* If length is larger than the length of one constant string,
2498 replace strncmp with corresponding strcmp */
2499 if (fcode == BUILT_IN_STRNCMP
2500 && length > 0
2501 && ((p2 && (size_t) length > strlen (p2))
2502 || (p1 && (size_t) length > strlen (p1))))
2503 {
2504 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2505 if (!fn)
2506 return false;
2507 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2508 replace_call_with_call_and_fold (gsi, repl);
2509 return true;
2510 }
2511
2512 return false;
2513 }
2514
2515 /* Fold a call to the memchr pointed by GSI iterator. */
2516
2517 static bool
2518 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2519 {
2520 gimple *stmt = gsi_stmt (*gsi);
2521 tree lhs = gimple_call_lhs (stmt);
2522 tree arg1 = gimple_call_arg (stmt, 0);
2523 tree arg2 = gimple_call_arg (stmt, 1);
2524 tree len = gimple_call_arg (stmt, 2);
2525
2526 /* If the LEN parameter is zero, return zero. */
2527 if (integer_zerop (len))
2528 {
2529 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2530 return true;
2531 }
2532
2533 char c;
2534 if (TREE_CODE (arg2) != INTEGER_CST
2535 || !tree_fits_uhwi_p (len)
2536 || !target_char_cst_p (arg2, &c))
2537 return false;
2538
2539 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2540 unsigned HOST_WIDE_INT string_length;
2541 const char *p1 = c_getstr (arg1, &string_length);
2542
2543 if (p1)
2544 {
2545 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2546 if (r == NULL)
2547 {
2548 if (length <= string_length)
2549 {
2550 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2551 return true;
2552 }
2553 }
2554 else
2555 {
2556 unsigned HOST_WIDE_INT offset = r - p1;
2557 gimple_seq stmts = NULL;
2558 if (lhs != NULL_TREE)
2559 {
2560 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2561 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2562 arg1, offset_cst);
2563 gimple_seq_add_stmt_without_update (&stmts, stmt);
2564 }
2565 else
2566 gimple_seq_add_stmt_without_update (&stmts,
2567 gimple_build_nop ());
2568
2569 gsi_replace_with_seq_vops (gsi, stmts);
2570 return true;
2571 }
2572 }
2573
2574 return false;
2575 }
2576
2577 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2578 to the call. IGNORE is true if the value returned
2579 by the builtin will be ignored. UNLOCKED is true is true if this
2580 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2581 the known length of the string. Return NULL_TREE if no simplification
2582 was possible. */
2583
2584 static bool
2585 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2586 tree arg0, tree arg1,
2587 bool unlocked)
2588 {
2589 gimple *stmt = gsi_stmt (*gsi);
2590
2591 /* If we're using an unlocked function, assume the other unlocked
2592 functions exist explicitly. */
2593 tree const fn_fputc = (unlocked
2594 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2595 : builtin_decl_implicit (BUILT_IN_FPUTC));
2596 tree const fn_fwrite = (unlocked
2597 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2598 : builtin_decl_implicit (BUILT_IN_FWRITE));
2599
2600 /* If the return value is used, don't do the transformation. */
2601 if (gimple_call_lhs (stmt))
2602 return false;
2603
2604 /* Get the length of the string passed to fputs. If the length
2605 can't be determined, punt. */
2606 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2607 if (!len
2608 || TREE_CODE (len) != INTEGER_CST)
2609 return false;
2610
2611 switch (compare_tree_int (len, 1))
2612 {
2613 case -1: /* length is 0, delete the call entirely . */
2614 replace_call_with_value (gsi, integer_zero_node);
2615 return true;
2616
2617 case 0: /* length is 1, call fputc. */
2618 {
2619 const char *p = c_getstr (arg0);
2620 if (p != NULL)
2621 {
2622 if (!fn_fputc)
2623 return false;
2624
2625 gimple *repl = gimple_build_call (fn_fputc, 2,
2626 build_int_cst
2627 (integer_type_node, p[0]), arg1);
2628 replace_call_with_call_and_fold (gsi, repl);
2629 return true;
2630 }
2631 }
2632 /* FALLTHROUGH */
2633 case 1: /* length is greater than 1, call fwrite. */
2634 {
2635 /* If optimizing for size keep fputs. */
2636 if (optimize_function_for_size_p (cfun))
2637 return false;
2638 /* New argument list transforming fputs(string, stream) to
2639 fwrite(string, 1, len, stream). */
2640 if (!fn_fwrite)
2641 return false;
2642
2643 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2644 size_one_node, len, arg1);
2645 replace_call_with_call_and_fold (gsi, repl);
2646 return true;
2647 }
2648 default:
2649 gcc_unreachable ();
2650 }
2651 return false;
2652 }
2653
2654 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2655 DEST, SRC, LEN, and SIZE are the arguments to the call.
2656 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2657 code of the builtin. If MAXLEN is not NULL, it is maximum length
2658 passed as third argument. */
2659
2660 static bool
2661 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2662 tree dest, tree src, tree len, tree size,
2663 enum built_in_function fcode)
2664 {
2665 gimple *stmt = gsi_stmt (*gsi);
2666 location_t loc = gimple_location (stmt);
2667 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2668 tree fn;
2669
2670 /* If SRC and DEST are the same (and not volatile), return DEST
2671 (resp. DEST+LEN for __mempcpy_chk). */
2672 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2673 {
2674 if (fcode != BUILT_IN_MEMPCPY_CHK)
2675 {
2676 replace_call_with_value (gsi, dest);
2677 return true;
2678 }
2679 else
2680 {
2681 gimple_seq stmts = NULL;
2682 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2683 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2684 TREE_TYPE (dest), dest, len);
2685 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2686 replace_call_with_value (gsi, temp);
2687 return true;
2688 }
2689 }
2690
2691 if (! tree_fits_uhwi_p (size))
2692 return false;
2693
2694 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2695 if (! integer_all_onesp (size))
2696 {
2697 if (! tree_fits_uhwi_p (len))
2698 {
2699 /* If LEN is not constant, try MAXLEN too.
2700 For MAXLEN only allow optimizing into non-_ocs function
2701 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2702 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2703 {
2704 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2705 {
2706 /* (void) __mempcpy_chk () can be optimized into
2707 (void) __memcpy_chk (). */
2708 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2709 if (!fn)
2710 return false;
2711
2712 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2713 replace_call_with_call_and_fold (gsi, repl);
2714 return true;
2715 }
2716 return false;
2717 }
2718 }
2719 else
2720 maxlen = len;
2721
2722 if (tree_int_cst_lt (size, maxlen))
2723 return false;
2724 }
2725
2726 fn = NULL_TREE;
2727 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2728 mem{cpy,pcpy,move,set} is available. */
2729 switch (fcode)
2730 {
2731 case BUILT_IN_MEMCPY_CHK:
2732 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2733 break;
2734 case BUILT_IN_MEMPCPY_CHK:
2735 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2736 break;
2737 case BUILT_IN_MEMMOVE_CHK:
2738 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2739 break;
2740 case BUILT_IN_MEMSET_CHK:
2741 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2742 break;
2743 default:
2744 break;
2745 }
2746
2747 if (!fn)
2748 return false;
2749
2750 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2751 replace_call_with_call_and_fold (gsi, repl);
2752 return true;
2753 }
2754
2755 /* Fold a call to the __st[rp]cpy_chk builtin.
2756 DEST, SRC, and SIZE are the arguments to the call.
2757 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2758 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2759 strings passed as second argument. */
2760
2761 static bool
2762 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2763 tree dest,
2764 tree src, tree size,
2765 enum built_in_function fcode)
2766 {
2767 gimple *stmt = gsi_stmt (*gsi);
2768 location_t loc = gimple_location (stmt);
2769 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2770 tree len, fn;
2771
2772 /* If SRC and DEST are the same (and not volatile), return DEST. */
2773 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2774 {
2775 /* Issue -Wrestrict unless the pointers are null (those do
2776 not point to objects and so do not indicate an overlap;
2777 such calls could be the result of sanitization and jump
2778 threading). */
2779 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2780 {
2781 tree func = gimple_call_fndecl (stmt);
2782
2783 warning_at (loc, OPT_Wrestrict,
2784 "%qD source argument is the same as destination",
2785 func);
2786 }
2787
2788 replace_call_with_value (gsi, dest);
2789 return true;
2790 }
2791
2792 if (! tree_fits_uhwi_p (size))
2793 return false;
2794
2795 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2796 if (! integer_all_onesp (size))
2797 {
2798 len = c_strlen (src, 1);
2799 if (! len || ! tree_fits_uhwi_p (len))
2800 {
2801 /* If LEN is not constant, try MAXLEN too.
2802 For MAXLEN only allow optimizing into non-_ocs function
2803 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2804 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2805 {
2806 if (fcode == BUILT_IN_STPCPY_CHK)
2807 {
2808 if (! ignore)
2809 return false;
2810
2811 /* If return value of __stpcpy_chk is ignored,
2812 optimize into __strcpy_chk. */
2813 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2814 if (!fn)
2815 return false;
2816
2817 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2818 replace_call_with_call_and_fold (gsi, repl);
2819 return true;
2820 }
2821
2822 if (! len || TREE_SIDE_EFFECTS (len))
2823 return false;
2824
2825 /* If c_strlen returned something, but not a constant,
2826 transform __strcpy_chk into __memcpy_chk. */
2827 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2828 if (!fn)
2829 return false;
2830
2831 gimple_seq stmts = NULL;
2832 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2833 len = gimple_convert (&stmts, loc, size_type_node, len);
2834 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2835 build_int_cst (size_type_node, 1));
2836 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2837 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2838 replace_call_with_call_and_fold (gsi, repl);
2839 return true;
2840 }
2841 }
2842 else
2843 maxlen = len;
2844
2845 if (! tree_int_cst_lt (maxlen, size))
2846 return false;
2847 }
2848
2849 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2850 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2851 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2852 if (!fn)
2853 return false;
2854
2855 gimple *repl = gimple_build_call (fn, 2, dest, src);
2856 replace_call_with_call_and_fold (gsi, repl);
2857 return true;
2858 }
2859
2860 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2861 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2862 length passed as third argument. IGNORE is true if return value can be
2863 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2864
2865 static bool
2866 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2867 tree dest, tree src,
2868 tree len, tree size,
2869 enum built_in_function fcode)
2870 {
2871 gimple *stmt = gsi_stmt (*gsi);
2872 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2873 tree fn;
2874
2875 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2876 {
2877 /* If return value of __stpncpy_chk is ignored,
2878 optimize into __strncpy_chk. */
2879 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2880 if (fn)
2881 {
2882 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2883 replace_call_with_call_and_fold (gsi, repl);
2884 return true;
2885 }
2886 }
2887
2888 if (! tree_fits_uhwi_p (size))
2889 return false;
2890
2891 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2892 if (! integer_all_onesp (size))
2893 {
2894 if (! tree_fits_uhwi_p (len))
2895 {
2896 /* If LEN is not constant, try MAXLEN too.
2897 For MAXLEN only allow optimizing into non-_ocs function
2898 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2899 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2900 return false;
2901 }
2902 else
2903 maxlen = len;
2904
2905 if (tree_int_cst_lt (size, maxlen))
2906 return false;
2907 }
2908
2909 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2910 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2911 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2912 if (!fn)
2913 return false;
2914
2915 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2916 replace_call_with_call_and_fold (gsi, repl);
2917 return true;
2918 }
2919
2920 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2921 Return NULL_TREE if no simplification can be made. */
2922
2923 static bool
2924 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2925 {
2926 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2927 location_t loc = gimple_location (stmt);
2928 tree dest = gimple_call_arg (stmt, 0);
2929 tree src = gimple_call_arg (stmt, 1);
2930 tree fn, lenp1;
2931
2932 /* If the result is unused, replace stpcpy with strcpy. */
2933 if (gimple_call_lhs (stmt) == NULL_TREE)
2934 {
2935 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2936 if (!fn)
2937 return false;
2938 gimple_call_set_fndecl (stmt, fn);
2939 fold_stmt (gsi);
2940 return true;
2941 }
2942
2943 /* Set to non-null if ARG refers to an unterminated array. */
2944 c_strlen_data data = { };
2945 tree len = c_strlen (src, 1, &data, 1);
2946 if (!len
2947 || TREE_CODE (len) != INTEGER_CST)
2948 {
2949 data.decl = unterminated_array (src);
2950 if (!data.decl)
2951 return false;
2952 }
2953
2954 if (data.decl)
2955 {
2956 /* Avoid folding calls with unterminated arrays. */
2957 if (!gimple_no_warning_p (stmt))
2958 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2959 gimple_set_no_warning (stmt, true);
2960 return false;
2961 }
2962
2963 if (optimize_function_for_size_p (cfun)
2964 /* If length is zero it's small enough. */
2965 && !integer_zerop (len))
2966 return false;
2967
2968 /* If the source has a known length replace stpcpy with memcpy. */
2969 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2970 if (!fn)
2971 return false;
2972
2973 gimple_seq stmts = NULL;
2974 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2975 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2976 tem, build_int_cst (size_type_node, 1));
2977 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2978 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2979 gimple_set_vuse (repl, gimple_vuse (stmt));
2980 gimple_set_vdef (repl, gimple_vdef (stmt));
2981 if (gimple_vdef (repl)
2982 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2983 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2984 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2985 /* Replace the result with dest + len. */
2986 stmts = NULL;
2987 tem = gimple_convert (&stmts, loc, sizetype, len);
2988 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2989 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2990 POINTER_PLUS_EXPR, dest, tem);
2991 gsi_replace (gsi, ret, false);
2992 /* Finally fold the memcpy call. */
2993 gimple_stmt_iterator gsi2 = *gsi;
2994 gsi_prev (&gsi2);
2995 fold_stmt (&gsi2);
2996 return true;
2997 }
2998
2999 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3000 NULL_TREE if a normal call should be emitted rather than expanding
3001 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3002 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3003 passed as second argument. */
3004
3005 static bool
3006 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3007 enum built_in_function fcode)
3008 {
3009 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3010 tree dest, size, len, fn, fmt, flag;
3011 const char *fmt_str;
3012
3013 /* Verify the required arguments in the original call. */
3014 if (gimple_call_num_args (stmt) < 5)
3015 return false;
3016
3017 dest = gimple_call_arg (stmt, 0);
3018 len = gimple_call_arg (stmt, 1);
3019 flag = gimple_call_arg (stmt, 2);
3020 size = gimple_call_arg (stmt, 3);
3021 fmt = gimple_call_arg (stmt, 4);
3022
3023 if (! tree_fits_uhwi_p (size))
3024 return false;
3025
3026 if (! integer_all_onesp (size))
3027 {
3028 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3029 if (! tree_fits_uhwi_p (len))
3030 {
3031 /* If LEN is not constant, try MAXLEN too.
3032 For MAXLEN only allow optimizing into non-_ocs function
3033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3034 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3035 return false;
3036 }
3037 else
3038 maxlen = len;
3039
3040 if (tree_int_cst_lt (size, maxlen))
3041 return false;
3042 }
3043
3044 if (!init_target_chars ())
3045 return false;
3046
3047 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3048 or if format doesn't contain % chars or is "%s". */
3049 if (! integer_zerop (flag))
3050 {
3051 fmt_str = c_getstr (fmt);
3052 if (fmt_str == NULL)
3053 return false;
3054 if (strchr (fmt_str, target_percent) != NULL
3055 && strcmp (fmt_str, target_percent_s))
3056 return false;
3057 }
3058
3059 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3060 available. */
3061 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3062 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3063 if (!fn)
3064 return false;
3065
3066 /* Replace the called function and the first 5 argument by 3 retaining
3067 trailing varargs. */
3068 gimple_call_set_fndecl (stmt, fn);
3069 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3070 gimple_call_set_arg (stmt, 0, dest);
3071 gimple_call_set_arg (stmt, 1, len);
3072 gimple_call_set_arg (stmt, 2, fmt);
3073 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3074 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3075 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3076 fold_stmt (gsi);
3077 return true;
3078 }
3079
3080 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3081 Return NULL_TREE if a normal call should be emitted rather than
3082 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3083 or BUILT_IN_VSPRINTF_CHK. */
3084
3085 static bool
3086 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3087 enum built_in_function fcode)
3088 {
3089 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3090 tree dest, size, len, fn, fmt, flag;
3091 const char *fmt_str;
3092 unsigned nargs = gimple_call_num_args (stmt);
3093
3094 /* Verify the required arguments in the original call. */
3095 if (nargs < 4)
3096 return false;
3097 dest = gimple_call_arg (stmt, 0);
3098 flag = gimple_call_arg (stmt, 1);
3099 size = gimple_call_arg (stmt, 2);
3100 fmt = gimple_call_arg (stmt, 3);
3101
3102 if (! tree_fits_uhwi_p (size))
3103 return false;
3104
3105 len = NULL_TREE;
3106
3107 if (!init_target_chars ())
3108 return false;
3109
3110 /* Check whether the format is a literal string constant. */
3111 fmt_str = c_getstr (fmt);
3112 if (fmt_str != NULL)
3113 {
3114 /* If the format doesn't contain % args or %%, we know the size. */
3115 if (strchr (fmt_str, target_percent) == 0)
3116 {
3117 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3118 len = build_int_cstu (size_type_node, strlen (fmt_str));
3119 }
3120 /* If the format is "%s" and first ... argument is a string literal,
3121 we know the size too. */
3122 else if (fcode == BUILT_IN_SPRINTF_CHK
3123 && strcmp (fmt_str, target_percent_s) == 0)
3124 {
3125 tree arg;
3126
3127 if (nargs == 5)
3128 {
3129 arg = gimple_call_arg (stmt, 4);
3130 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3131 {
3132 len = c_strlen (arg, 1);
3133 if (! len || ! tree_fits_uhwi_p (len))
3134 len = NULL_TREE;
3135 }
3136 }
3137 }
3138 }
3139
3140 if (! integer_all_onesp (size))
3141 {
3142 if (! len || ! tree_int_cst_lt (len, size))
3143 return false;
3144 }
3145
3146 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3147 or if format doesn't contain % chars or is "%s". */
3148 if (! integer_zerop (flag))
3149 {
3150 if (fmt_str == NULL)
3151 return false;
3152 if (strchr (fmt_str, target_percent) != NULL
3153 && strcmp (fmt_str, target_percent_s))
3154 return false;
3155 }
3156
3157 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3158 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3159 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3160 if (!fn)
3161 return false;
3162
3163 /* Replace the called function and the first 4 argument by 2 retaining
3164 trailing varargs. */
3165 gimple_call_set_fndecl (stmt, fn);
3166 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3167 gimple_call_set_arg (stmt, 0, dest);
3168 gimple_call_set_arg (stmt, 1, fmt);
3169 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3170 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3171 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3172 fold_stmt (gsi);
3173 return true;
3174 }
3175
3176 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3177 ORIG may be null if this is a 2-argument call. We don't attempt to
3178 simplify calls with more than 3 arguments.
3179
3180 Return true if simplification was possible, otherwise false. */
3181
3182 bool
3183 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3184 {
3185 gimple *stmt = gsi_stmt (*gsi);
3186 tree dest = gimple_call_arg (stmt, 0);
3187 tree fmt = gimple_call_arg (stmt, 1);
3188 tree orig = NULL_TREE;
3189 const char *fmt_str = NULL;
3190
3191 /* Verify the required arguments in the original call. We deal with two
3192 types of sprintf() calls: 'sprintf (str, fmt)' and
3193 'sprintf (dest, "%s", orig)'. */
3194 if (gimple_call_num_args (stmt) > 3)
3195 return false;
3196
3197 if (gimple_call_num_args (stmt) == 3)
3198 orig = gimple_call_arg (stmt, 2);
3199
3200 /* Check whether the format is a literal string constant. */
3201 fmt_str = c_getstr (fmt);
3202 if (fmt_str == NULL)
3203 return false;
3204
3205 if (!init_target_chars ())
3206 return false;
3207
3208 /* If the format doesn't contain % args or %%, use strcpy. */
3209 if (strchr (fmt_str, target_percent) == NULL)
3210 {
3211 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3212
3213 if (!fn)
3214 return false;
3215
3216 /* Don't optimize sprintf (buf, "abc", ptr++). */
3217 if (orig)
3218 return false;
3219
3220 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3221 'format' is known to contain no % formats. */
3222 gimple_seq stmts = NULL;
3223 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3224
3225 /* Propagate the NO_WARNING bit to avoid issuing the same
3226 warning more than once. */
3227 if (gimple_no_warning_p (stmt))
3228 gimple_set_no_warning (repl, true);
3229
3230 gimple_seq_add_stmt_without_update (&stmts, repl);
3231 if (gimple_call_lhs (stmt))
3232 {
3233 repl = gimple_build_assign (gimple_call_lhs (stmt),
3234 build_int_cst (integer_type_node,
3235 strlen (fmt_str)));
3236 gimple_seq_add_stmt_without_update (&stmts, repl);
3237 gsi_replace_with_seq_vops (gsi, stmts);
3238 /* gsi now points at the assignment to the lhs, get a
3239 stmt iterator to the memcpy call.
3240 ??? We can't use gsi_for_stmt as that doesn't work when the
3241 CFG isn't built yet. */
3242 gimple_stmt_iterator gsi2 = *gsi;
3243 gsi_prev (&gsi2);
3244 fold_stmt (&gsi2);
3245 }
3246 else
3247 {
3248 gsi_replace_with_seq_vops (gsi, stmts);
3249 fold_stmt (gsi);
3250 }
3251 return true;
3252 }
3253
3254 /* If the format is "%s", use strcpy if the result isn't used. */
3255 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3256 {
3257 tree fn;
3258 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3259
3260 if (!fn)
3261 return false;
3262
3263 /* Don't crash on sprintf (str1, "%s"). */
3264 if (!orig)
3265 return false;
3266
3267 tree orig_len = NULL_TREE;
3268 if (gimple_call_lhs (stmt))
3269 {
3270 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3271 if (!orig_len)
3272 return false;
3273 }
3274
3275 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3276 gimple_seq stmts = NULL;
3277 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3278
3279 /* Propagate the NO_WARNING bit to avoid issuing the same
3280 warning more than once. */
3281 if (gimple_no_warning_p (stmt))
3282 gimple_set_no_warning (repl, true);
3283
3284 gimple_seq_add_stmt_without_update (&stmts, repl);
3285 if (gimple_call_lhs (stmt))
3286 {
3287 if (!useless_type_conversion_p (integer_type_node,
3288 TREE_TYPE (orig_len)))
3289 orig_len = fold_convert (integer_type_node, orig_len);
3290 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3291 gimple_seq_add_stmt_without_update (&stmts, repl);
3292 gsi_replace_with_seq_vops (gsi, stmts);
3293 /* gsi now points at the assignment to the lhs, get a
3294 stmt iterator to the memcpy call.
3295 ??? We can't use gsi_for_stmt as that doesn't work when the
3296 CFG isn't built yet. */
3297 gimple_stmt_iterator gsi2 = *gsi;
3298 gsi_prev (&gsi2);
3299 fold_stmt (&gsi2);
3300 }
3301 else
3302 {
3303 gsi_replace_with_seq_vops (gsi, stmts);
3304 fold_stmt (gsi);
3305 }
3306 return true;
3307 }
3308 return false;
3309 }
3310
3311 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3312 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3313 attempt to simplify calls with more than 4 arguments.
3314
3315 Return true if simplification was possible, otherwise false. */
3316
3317 bool
3318 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3319 {
3320 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3321 tree dest = gimple_call_arg (stmt, 0);
3322 tree destsize = gimple_call_arg (stmt, 1);
3323 tree fmt = gimple_call_arg (stmt, 2);
3324 tree orig = NULL_TREE;
3325 const char *fmt_str = NULL;
3326
3327 if (gimple_call_num_args (stmt) > 4)
3328 return false;
3329
3330 if (gimple_call_num_args (stmt) == 4)
3331 orig = gimple_call_arg (stmt, 3);
3332
3333 if (!tree_fits_uhwi_p (destsize))
3334 return false;
3335 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3336
3337 /* Check whether the format is a literal string constant. */
3338 fmt_str = c_getstr (fmt);
3339 if (fmt_str == NULL)
3340 return false;
3341
3342 if (!init_target_chars ())
3343 return false;
3344
3345 /* If the format doesn't contain % args or %%, use strcpy. */
3346 if (strchr (fmt_str, target_percent) == NULL)
3347 {
3348 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3349 if (!fn)
3350 return false;
3351
3352 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3353 if (orig)
3354 return false;
3355
3356 /* We could expand this as
3357 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3358 or to
3359 memcpy (str, fmt_with_nul_at_cstm1, cst);
3360 but in the former case that might increase code size
3361 and in the latter case grow .rodata section too much.
3362 So punt for now. */
3363 size_t len = strlen (fmt_str);
3364 if (len >= destlen)
3365 return false;
3366
3367 gimple_seq stmts = NULL;
3368 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3369 gimple_seq_add_stmt_without_update (&stmts, repl);
3370 if (gimple_call_lhs (stmt))
3371 {
3372 repl = gimple_build_assign (gimple_call_lhs (stmt),
3373 build_int_cst (integer_type_node, len));
3374 gimple_seq_add_stmt_without_update (&stmts, repl);
3375 gsi_replace_with_seq_vops (gsi, stmts);
3376 /* gsi now points at the assignment to the lhs, get a
3377 stmt iterator to the memcpy call.
3378 ??? We can't use gsi_for_stmt as that doesn't work when the
3379 CFG isn't built yet. */
3380 gimple_stmt_iterator gsi2 = *gsi;
3381 gsi_prev (&gsi2);
3382 fold_stmt (&gsi2);
3383 }
3384 else
3385 {
3386 gsi_replace_with_seq_vops (gsi, stmts);
3387 fold_stmt (gsi);
3388 }
3389 return true;
3390 }
3391
3392 /* If the format is "%s", use strcpy if the result isn't used. */
3393 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3394 {
3395 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3396 if (!fn)
3397 return false;
3398
3399 /* Don't crash on snprintf (str1, cst, "%s"). */
3400 if (!orig)
3401 return false;
3402
3403 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3404 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3405 return false;
3406
3407 /* We could expand this as
3408 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3409 or to
3410 memcpy (str1, str2_with_nul_at_cstm1, cst);
3411 but in the former case that might increase code size
3412 and in the latter case grow .rodata section too much.
3413 So punt for now. */
3414 if (compare_tree_int (orig_len, destlen) >= 0)
3415 return false;
3416
3417 /* Convert snprintf (str1, cst, "%s", str2) into
3418 strcpy (str1, str2) if strlen (str2) < cst. */
3419 gimple_seq stmts = NULL;
3420 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3421 gimple_seq_add_stmt_without_update (&stmts, repl);
3422 if (gimple_call_lhs (stmt))
3423 {
3424 if (!useless_type_conversion_p (integer_type_node,
3425 TREE_TYPE (orig_len)))
3426 orig_len = fold_convert (integer_type_node, orig_len);
3427 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3428 gimple_seq_add_stmt_without_update (&stmts, repl);
3429 gsi_replace_with_seq_vops (gsi, stmts);
3430 /* gsi now points at the assignment to the lhs, get a
3431 stmt iterator to the memcpy call.
3432 ??? We can't use gsi_for_stmt as that doesn't work when the
3433 CFG isn't built yet. */
3434 gimple_stmt_iterator gsi2 = *gsi;
3435 gsi_prev (&gsi2);
3436 fold_stmt (&gsi2);
3437 }
3438 else
3439 {
3440 gsi_replace_with_seq_vops (gsi, stmts);
3441 fold_stmt (gsi);
3442 }
3443 return true;
3444 }
3445 return false;
3446 }
3447
3448 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3449 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3450 more than 3 arguments, and ARG may be null in the 2-argument case.
3451
3452 Return NULL_TREE if no simplification was possible, otherwise return the
3453 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3454 code of the function to be simplified. */
3455
3456 static bool
3457 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3458 tree fp, tree fmt, tree arg,
3459 enum built_in_function fcode)
3460 {
3461 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3462 tree fn_fputc, fn_fputs;
3463 const char *fmt_str = NULL;
3464
3465 /* If the return value is used, don't do the transformation. */
3466 if (gimple_call_lhs (stmt) != NULL_TREE)
3467 return false;
3468
3469 /* Check whether the format is a literal string constant. */
3470 fmt_str = c_getstr (fmt);
3471 if (fmt_str == NULL)
3472 return false;
3473
3474 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3475 {
3476 /* If we're using an unlocked function, assume the other
3477 unlocked functions exist explicitly. */
3478 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3479 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3480 }
3481 else
3482 {
3483 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3484 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3485 }
3486
3487 if (!init_target_chars ())
3488 return false;
3489
3490 /* If the format doesn't contain % args or %%, use strcpy. */
3491 if (strchr (fmt_str, target_percent) == NULL)
3492 {
3493 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3494 && arg)
3495 return false;
3496
3497 /* If the format specifier was "", fprintf does nothing. */
3498 if (fmt_str[0] == '\0')
3499 {
3500 replace_call_with_value (gsi, NULL_TREE);
3501 return true;
3502 }
3503
3504 /* When "string" doesn't contain %, replace all cases of
3505 fprintf (fp, string) with fputs (string, fp). The fputs
3506 builtin will take care of special cases like length == 1. */
3507 if (fn_fputs)
3508 {
3509 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3510 replace_call_with_call_and_fold (gsi, repl);
3511 return true;
3512 }
3513 }
3514
3515 /* The other optimizations can be done only on the non-va_list variants. */
3516 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3517 return false;
3518
3519 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3520 else if (strcmp (fmt_str, target_percent_s) == 0)
3521 {
3522 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3523 return false;
3524 if (fn_fputs)
3525 {
3526 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3527 replace_call_with_call_and_fold (gsi, repl);
3528 return true;
3529 }
3530 }
3531
3532 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3533 else if (strcmp (fmt_str, target_percent_c) == 0)
3534 {
3535 if (!arg
3536 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3537 return false;
3538 if (fn_fputc)
3539 {
3540 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3541 replace_call_with_call_and_fold (gsi, repl);
3542 return true;
3543 }
3544 }
3545
3546 return false;
3547 }
3548
3549 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3550 FMT and ARG are the arguments to the call; we don't fold cases with
3551 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3552
3553 Return NULL_TREE if no simplification was possible, otherwise return the
3554 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3555 code of the function to be simplified. */
3556
3557 static bool
3558 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3559 tree arg, enum built_in_function fcode)
3560 {
3561 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3562 tree fn_putchar, fn_puts, newarg;
3563 const char *fmt_str = NULL;
3564
3565 /* If the return value is used, don't do the transformation. */
3566 if (gimple_call_lhs (stmt) != NULL_TREE)
3567 return false;
3568
3569 /* Check whether the format is a literal string constant. */
3570 fmt_str = c_getstr (fmt);
3571 if (fmt_str == NULL)
3572 return false;
3573
3574 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3575 {
3576 /* If we're using an unlocked function, assume the other
3577 unlocked functions exist explicitly. */
3578 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3579 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3580 }
3581 else
3582 {
3583 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3584 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3585 }
3586
3587 if (!init_target_chars ())
3588 return false;
3589
3590 if (strcmp (fmt_str, target_percent_s) == 0
3591 || strchr (fmt_str, target_percent) == NULL)
3592 {
3593 const char *str;
3594
3595 if (strcmp (fmt_str, target_percent_s) == 0)
3596 {
3597 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3598 return false;
3599
3600 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3601 return false;
3602
3603 str = c_getstr (arg);
3604 if (str == NULL)
3605 return false;
3606 }
3607 else
3608 {
3609 /* The format specifier doesn't contain any '%' characters. */
3610 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3611 && arg)
3612 return false;
3613 str = fmt_str;
3614 }
3615
3616 /* If the string was "", printf does nothing. */
3617 if (str[0] == '\0')
3618 {
3619 replace_call_with_value (gsi, NULL_TREE);
3620 return true;
3621 }
3622
3623 /* If the string has length of 1, call putchar. */
3624 if (str[1] == '\0')
3625 {
3626 /* Given printf("c"), (where c is any one character,)
3627 convert "c"[0] to an int and pass that to the replacement
3628 function. */
3629 newarg = build_int_cst (integer_type_node, str[0]);
3630 if (fn_putchar)
3631 {
3632 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3633 replace_call_with_call_and_fold (gsi, repl);
3634 return true;
3635 }
3636 }
3637 else
3638 {
3639 /* If the string was "string\n", call puts("string"). */
3640 size_t len = strlen (str);
3641 if ((unsigned char)str[len - 1] == target_newline
3642 && (size_t) (int) len == len
3643 && (int) len > 0)
3644 {
3645 char *newstr;
3646
3647 /* Create a NUL-terminated string that's one char shorter
3648 than the original, stripping off the trailing '\n'. */
3649 newstr = xstrdup (str);
3650 newstr[len - 1] = '\0';
3651 newarg = build_string_literal (len, newstr);
3652 free (newstr);
3653 if (fn_puts)
3654 {
3655 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3656 replace_call_with_call_and_fold (gsi, repl);
3657 return true;
3658 }
3659 }
3660 else
3661 /* We'd like to arrange to call fputs(string,stdout) here,
3662 but we need stdout and don't have a way to get it yet. */
3663 return false;
3664 }
3665 }
3666
3667 /* The other optimizations can be done only on the non-va_list variants. */
3668 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3669 return false;
3670
3671 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3672 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3673 {
3674 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3675 return false;
3676 if (fn_puts)
3677 {
3678 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3679 replace_call_with_call_and_fold (gsi, repl);
3680 return true;
3681 }
3682 }
3683
3684 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3685 else if (strcmp (fmt_str, target_percent_c) == 0)
3686 {
3687 if (!arg || ! useless_type_conversion_p (integer_type_node,
3688 TREE_TYPE (arg)))
3689 return false;
3690 if (fn_putchar)
3691 {
3692 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3693 replace_call_with_call_and_fold (gsi, repl);
3694 return true;
3695 }
3696 }
3697
3698 return false;
3699 }
3700
3701
3702
3703 /* Fold a call to __builtin_strlen with known length LEN. */
3704
3705 static bool
3706 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3707 {
3708 gimple *stmt = gsi_stmt (*gsi);
3709 tree arg = gimple_call_arg (stmt, 0);
3710
3711 wide_int minlen;
3712 wide_int maxlen;
3713
3714 c_strlen_data lendata = { };
3715 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3716 && !lendata.decl
3717 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3718 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3719 {
3720 /* The range of lengths refers to either a single constant
3721 string or to the longest and shortest constant string
3722 referenced by the argument of the strlen() call, or to
3723 the strings that can possibly be stored in the arrays
3724 the argument refers to. */
3725 minlen = wi::to_wide (lendata.minlen);
3726 maxlen = wi::to_wide (lendata.maxlen);
3727 }
3728 else
3729 {
3730 unsigned prec = TYPE_PRECISION (sizetype);
3731
3732 minlen = wi::shwi (0, prec);
3733 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3734 }
3735
3736 if (minlen == maxlen)
3737 {
3738 /* Fold the strlen call to a constant. */
3739 tree type = TREE_TYPE (lendata.minlen);
3740 tree len = force_gimple_operand_gsi (gsi,
3741 wide_int_to_tree (type, minlen),
3742 true, NULL, true, GSI_SAME_STMT);
3743 replace_call_with_value (gsi, len);
3744 return true;
3745 }
3746
3747 /* Set the strlen() range to [0, MAXLEN]. */
3748 if (tree lhs = gimple_call_lhs (stmt))
3749 set_strlen_range (lhs, maxlen);
3750
3751 return false;
3752 }
3753
3754 /* Fold a call to __builtin_acc_on_device. */
3755
3756 static bool
3757 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3758 {
3759 /* Defer folding until we know which compiler we're in. */
3760 if (symtab->state != EXPANSION)
3761 return false;
3762
3763 unsigned val_host = GOMP_DEVICE_HOST;
3764 unsigned val_dev = GOMP_DEVICE_NONE;
3765
3766 #ifdef ACCEL_COMPILER
3767 val_host = GOMP_DEVICE_NOT_HOST;
3768 val_dev = ACCEL_COMPILER_acc_device;
3769 #endif
3770
3771 location_t loc = gimple_location (gsi_stmt (*gsi));
3772
3773 tree host_eq = make_ssa_name (boolean_type_node);
3774 gimple *host_ass = gimple_build_assign
3775 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3776 gimple_set_location (host_ass, loc);
3777 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3778
3779 tree dev_eq = make_ssa_name (boolean_type_node);
3780 gimple *dev_ass = gimple_build_assign
3781 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3782 gimple_set_location (dev_ass, loc);
3783 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3784
3785 tree result = make_ssa_name (boolean_type_node);
3786 gimple *result_ass = gimple_build_assign
3787 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3788 gimple_set_location (result_ass, loc);
3789 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3790
3791 replace_call_with_value (gsi, result);
3792
3793 return true;
3794 }
3795
3796 /* Fold realloc (0, n) -> malloc (n). */
3797
3798 static bool
3799 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3800 {
3801 gimple *stmt = gsi_stmt (*gsi);
3802 tree arg = gimple_call_arg (stmt, 0);
3803 tree size = gimple_call_arg (stmt, 1);
3804
3805 if (operand_equal_p (arg, null_pointer_node, 0))
3806 {
3807 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3808 if (fn_malloc)
3809 {
3810 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3811 replace_call_with_call_and_fold (gsi, repl);
3812 return true;
3813 }
3814 }
3815 return false;
3816 }
3817
3818 /* Fold the non-target builtin at *GSI and return whether any simplification
3819 was made. */
3820
3821 static bool
3822 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3823 {
3824 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3825 tree callee = gimple_call_fndecl (stmt);
3826
3827 /* Give up for always_inline inline builtins until they are
3828 inlined. */
3829 if (avoid_folding_inline_builtin (callee))
3830 return false;
3831
3832 unsigned n = gimple_call_num_args (stmt);
3833 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3834 switch (fcode)
3835 {
3836 case BUILT_IN_BCMP:
3837 return gimple_fold_builtin_bcmp (gsi);
3838 case BUILT_IN_BCOPY:
3839 return gimple_fold_builtin_bcopy (gsi);
3840 case BUILT_IN_BZERO:
3841 return gimple_fold_builtin_bzero (gsi);
3842
3843 case BUILT_IN_MEMSET:
3844 return gimple_fold_builtin_memset (gsi,
3845 gimple_call_arg (stmt, 1),
3846 gimple_call_arg (stmt, 2));
3847 case BUILT_IN_MEMCPY:
3848 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3849 gimple_call_arg (stmt, 1), 0);
3850 case BUILT_IN_MEMPCPY:
3851 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3852 gimple_call_arg (stmt, 1), 1);
3853 case BUILT_IN_MEMMOVE:
3854 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3855 gimple_call_arg (stmt, 1), 3);
3856 case BUILT_IN_SPRINTF_CHK:
3857 case BUILT_IN_VSPRINTF_CHK:
3858 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3859 case BUILT_IN_STRCAT_CHK:
3860 return gimple_fold_builtin_strcat_chk (gsi);
3861 case BUILT_IN_STRNCAT_CHK:
3862 return gimple_fold_builtin_strncat_chk (gsi);
3863 case BUILT_IN_STRLEN:
3864 return gimple_fold_builtin_strlen (gsi);
3865 case BUILT_IN_STRCPY:
3866 return gimple_fold_builtin_strcpy (gsi,
3867 gimple_call_arg (stmt, 0),
3868 gimple_call_arg (stmt, 1));
3869 case BUILT_IN_STRNCPY:
3870 return gimple_fold_builtin_strncpy (gsi,
3871 gimple_call_arg (stmt, 0),
3872 gimple_call_arg (stmt, 1),
3873 gimple_call_arg (stmt, 2));
3874 case BUILT_IN_STRCAT:
3875 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3876 gimple_call_arg (stmt, 1));
3877 case BUILT_IN_STRNCAT:
3878 return gimple_fold_builtin_strncat (gsi);
3879 case BUILT_IN_INDEX:
3880 case BUILT_IN_STRCHR:
3881 return gimple_fold_builtin_strchr (gsi, false);
3882 case BUILT_IN_RINDEX:
3883 case BUILT_IN_STRRCHR:
3884 return gimple_fold_builtin_strchr (gsi, true);
3885 case BUILT_IN_STRSTR:
3886 return gimple_fold_builtin_strstr (gsi);
3887 case BUILT_IN_STRCMP:
3888 case BUILT_IN_STRCMP_EQ:
3889 case BUILT_IN_STRCASECMP:
3890 case BUILT_IN_STRNCMP:
3891 case BUILT_IN_STRNCMP_EQ:
3892 case BUILT_IN_STRNCASECMP:
3893 return gimple_fold_builtin_string_compare (gsi);
3894 case BUILT_IN_MEMCHR:
3895 return gimple_fold_builtin_memchr (gsi);
3896 case BUILT_IN_FPUTS:
3897 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3898 gimple_call_arg (stmt, 1), false);
3899 case BUILT_IN_FPUTS_UNLOCKED:
3900 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3901 gimple_call_arg (stmt, 1), true);
3902 case BUILT_IN_MEMCPY_CHK:
3903 case BUILT_IN_MEMPCPY_CHK:
3904 case BUILT_IN_MEMMOVE_CHK:
3905 case BUILT_IN_MEMSET_CHK:
3906 return gimple_fold_builtin_memory_chk (gsi,
3907 gimple_call_arg (stmt, 0),
3908 gimple_call_arg (stmt, 1),
3909 gimple_call_arg (stmt, 2),
3910 gimple_call_arg (stmt, 3),
3911 fcode);
3912 case BUILT_IN_STPCPY:
3913 return gimple_fold_builtin_stpcpy (gsi);
3914 case BUILT_IN_STRCPY_CHK:
3915 case BUILT_IN_STPCPY_CHK:
3916 return gimple_fold_builtin_stxcpy_chk (gsi,
3917 gimple_call_arg (stmt, 0),
3918 gimple_call_arg (stmt, 1),
3919 gimple_call_arg (stmt, 2),
3920 fcode);
3921 case BUILT_IN_STRNCPY_CHK:
3922 case BUILT_IN_STPNCPY_CHK:
3923 return gimple_fold_builtin_stxncpy_chk (gsi,
3924 gimple_call_arg (stmt, 0),
3925 gimple_call_arg (stmt, 1),
3926 gimple_call_arg (stmt, 2),
3927 gimple_call_arg (stmt, 3),
3928 fcode);
3929 case BUILT_IN_SNPRINTF_CHK:
3930 case BUILT_IN_VSNPRINTF_CHK:
3931 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3932
3933 case BUILT_IN_FPRINTF:
3934 case BUILT_IN_FPRINTF_UNLOCKED:
3935 case BUILT_IN_VFPRINTF:
3936 if (n == 2 || n == 3)
3937 return gimple_fold_builtin_fprintf (gsi,
3938 gimple_call_arg (stmt, 0),
3939 gimple_call_arg (stmt, 1),
3940 n == 3
3941 ? gimple_call_arg (stmt, 2)
3942 : NULL_TREE,
3943 fcode);
3944 break;
3945 case BUILT_IN_FPRINTF_CHK:
3946 case BUILT_IN_VFPRINTF_CHK:
3947 if (n == 3 || n == 4)
3948 return gimple_fold_builtin_fprintf (gsi,
3949 gimple_call_arg (stmt, 0),
3950 gimple_call_arg (stmt, 2),
3951 n == 4
3952 ? gimple_call_arg (stmt, 3)
3953 : NULL_TREE,
3954 fcode);
3955 break;
3956 case BUILT_IN_PRINTF:
3957 case BUILT_IN_PRINTF_UNLOCKED:
3958 case BUILT_IN_VPRINTF:
3959 if (n == 1 || n == 2)
3960 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3961 n == 2
3962 ? gimple_call_arg (stmt, 1)
3963 : NULL_TREE, fcode);
3964 break;
3965 case BUILT_IN_PRINTF_CHK:
3966 case BUILT_IN_VPRINTF_CHK:
3967 if (n == 2 || n == 3)
3968 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3969 n == 3
3970 ? gimple_call_arg (stmt, 2)
3971 : NULL_TREE, fcode);
3972 break;
3973 case BUILT_IN_ACC_ON_DEVICE:
3974 return gimple_fold_builtin_acc_on_device (gsi,
3975 gimple_call_arg (stmt, 0));
3976 case BUILT_IN_REALLOC:
3977 return gimple_fold_builtin_realloc (gsi);
3978
3979 default:;
3980 }
3981
3982 /* Try the generic builtin folder. */
3983 bool ignore = (gimple_call_lhs (stmt) == NULL);
3984 tree result = fold_call_stmt (stmt, ignore);
3985 if (result)
3986 {
3987 if (ignore)
3988 STRIP_NOPS (result);
3989 else
3990 result = fold_convert (gimple_call_return_type (stmt), result);
3991 if (!update_call_from_tree (gsi, result))
3992 gimplify_and_update_call_from_tree (gsi, result);
3993 return true;
3994 }
3995
3996 return false;
3997 }
3998
3999 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4000 function calls to constants, where possible. */
4001
4002 static tree
4003 fold_internal_goacc_dim (const gimple *call)
4004 {
4005 int axis = oacc_get_ifn_dim_arg (call);
4006 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4007 tree result = NULL_TREE;
4008 tree type = TREE_TYPE (gimple_call_lhs (call));
4009
4010 switch (gimple_call_internal_fn (call))
4011 {
4012 case IFN_GOACC_DIM_POS:
4013 /* If the size is 1, we know the answer. */
4014 if (size == 1)
4015 result = build_int_cst (type, 0);
4016 break;
4017 case IFN_GOACC_DIM_SIZE:
4018 /* If the size is not dynamic, we know the answer. */
4019 if (size)
4020 result = build_int_cst (type, size);
4021 break;
4022 default:
4023 break;
4024 }
4025
4026 return result;
4027 }
4028
4029 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4030 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4031 &var where var is only addressable because of such calls. */
4032
4033 bool
4034 optimize_atomic_compare_exchange_p (gimple *stmt)
4035 {
4036 if (gimple_call_num_args (stmt) != 6
4037 || !flag_inline_atomics
4038 || !optimize
4039 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4040 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4041 || !gimple_vdef (stmt)
4042 || !gimple_vuse (stmt))
4043 return false;
4044
4045 tree fndecl = gimple_call_fndecl (stmt);
4046 switch (DECL_FUNCTION_CODE (fndecl))
4047 {
4048 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4049 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4050 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4051 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4052 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4053 break;
4054 default:
4055 return false;
4056 }
4057
4058 tree expected = gimple_call_arg (stmt, 1);
4059 if (TREE_CODE (expected) != ADDR_EXPR
4060 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4061 return false;
4062
4063 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4064 if (!is_gimple_reg_type (etype)
4065 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4066 || TREE_THIS_VOLATILE (etype)
4067 || VECTOR_TYPE_P (etype)
4068 || TREE_CODE (etype) == COMPLEX_TYPE
4069 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4070 might not preserve all the bits. See PR71716. */
4071 || SCALAR_FLOAT_TYPE_P (etype)
4072 || maybe_ne (TYPE_PRECISION (etype),
4073 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4074 return false;
4075
4076 tree weak = gimple_call_arg (stmt, 3);
4077 if (!integer_zerop (weak) && !integer_onep (weak))
4078 return false;
4079
4080 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4081 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4082 machine_mode mode = TYPE_MODE (itype);
4083
4084 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4085 == CODE_FOR_nothing
4086 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4087 return false;
4088
4089 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4090 return false;
4091
4092 return true;
4093 }
4094
4095 /* Fold
4096 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4097 into
4098 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4099 i = IMAGPART_EXPR <t>;
4100 r = (_Bool) i;
4101 e = REALPART_EXPR <t>; */
4102
4103 void
4104 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4105 {
4106 gimple *stmt = gsi_stmt (*gsi);
4107 tree fndecl = gimple_call_fndecl (stmt);
4108 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4109 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4110 tree ctype = build_complex_type (itype);
4111 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4112 bool throws = false;
4113 edge e = NULL;
4114 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4115 expected);
4116 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4117 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4118 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4119 {
4120 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4121 build1 (VIEW_CONVERT_EXPR, itype,
4122 gimple_assign_lhs (g)));
4123 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4124 }
4125 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4126 + int_size_in_bytes (itype);
4127 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4128 gimple_call_arg (stmt, 0),
4129 gimple_assign_lhs (g),
4130 gimple_call_arg (stmt, 2),
4131 build_int_cst (integer_type_node, flag),
4132 gimple_call_arg (stmt, 4),
4133 gimple_call_arg (stmt, 5));
4134 tree lhs = make_ssa_name (ctype);
4135 gimple_call_set_lhs (g, lhs);
4136 gimple_set_vdef (g, gimple_vdef (stmt));
4137 gimple_set_vuse (g, gimple_vuse (stmt));
4138 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4139 tree oldlhs = gimple_call_lhs (stmt);
4140 if (stmt_can_throw_internal (cfun, stmt))
4141 {
4142 throws = true;
4143 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4144 }
4145 gimple_call_set_nothrow (as_a <gcall *> (g),
4146 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4147 gimple_call_set_lhs (stmt, NULL_TREE);
4148 gsi_replace (gsi, g, true);
4149 if (oldlhs)
4150 {
4151 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4152 build1 (IMAGPART_EXPR, itype, lhs));
4153 if (throws)
4154 {
4155 gsi_insert_on_edge_immediate (e, g);
4156 *gsi = gsi_for_stmt (g);
4157 }
4158 else
4159 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4160 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4161 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4162 }
4163 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4164 build1 (REALPART_EXPR, itype, lhs));
4165 if (throws && oldlhs == NULL_TREE)
4166 {
4167 gsi_insert_on_edge_immediate (e, g);
4168 *gsi = gsi_for_stmt (g);
4169 }
4170 else
4171 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4172 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4173 {
4174 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4175 VIEW_CONVERT_EXPR,
4176 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4177 gimple_assign_lhs (g)));
4178 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4179 }
4180 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4181 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4182 *gsi = gsiret;
4183 }
4184
4185 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4186 doesn't fit into TYPE. The test for overflow should be regardless of
4187 -fwrapv, and even for unsigned types. */
4188
4189 bool
4190 arith_overflowed_p (enum tree_code code, const_tree type,
4191 const_tree arg0, const_tree arg1)
4192 {
4193 widest2_int warg0 = widest2_int_cst (arg0);
4194 widest2_int warg1 = widest2_int_cst (arg1);
4195 widest2_int wres;
4196 switch (code)
4197 {
4198 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4199 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4200 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4201 default: gcc_unreachable ();
4202 }
4203 signop sign = TYPE_SIGN (type);
4204 if (sign == UNSIGNED && wi::neg_p (wres))
4205 return true;
4206 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4207 }
4208
4209 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4210 The statement may be replaced by another statement, e.g., if the call
4211 simplifies to a constant value. Return true if any changes were made.
4212 It is assumed that the operands have been previously folded. */
4213
4214 static bool
4215 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4216 {
4217 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4218 tree callee;
4219 bool changed = false;
4220 unsigned i;
4221
4222 /* Fold *& in call arguments. */
4223 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4224 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4225 {
4226 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4227 if (tmp)
4228 {
4229 gimple_call_set_arg (stmt, i, tmp);
4230 changed = true;
4231 }
4232 }
4233
4234 /* Check for virtual calls that became direct calls. */
4235 callee = gimple_call_fn (stmt);
4236 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4237 {
4238 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4239 {
4240 if (dump_file && virtual_method_call_p (callee)
4241 && !possible_polymorphic_call_target_p
4242 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4243 (OBJ_TYPE_REF_EXPR (callee)))))
4244 {
4245 fprintf (dump_file,
4246 "Type inheritance inconsistent devirtualization of ");
4247 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4248 fprintf (dump_file, " to ");
4249 print_generic_expr (dump_file, callee, TDF_SLIM);
4250 fprintf (dump_file, "\n");
4251 }
4252
4253 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4254 changed = true;
4255 }
4256 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4257 {
4258 bool final;
4259 vec <cgraph_node *>targets
4260 = possible_polymorphic_call_targets (callee, stmt, &final);
4261 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4262 {
4263 tree lhs = gimple_call_lhs (stmt);
4264 if (dump_enabled_p ())
4265 {
4266 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4267 "folding virtual function call to %s\n",
4268 targets.length () == 1
4269 ? targets[0]->name ()
4270 : "__builtin_unreachable");
4271 }
4272 if (targets.length () == 1)
4273 {
4274 tree fndecl = targets[0]->decl;
4275 gimple_call_set_fndecl (stmt, fndecl);
4276 changed = true;
4277 /* If changing the call to __cxa_pure_virtual
4278 or similar noreturn function, adjust gimple_call_fntype
4279 too. */
4280 if (gimple_call_noreturn_p (stmt)
4281 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4282 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4283 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4284 == void_type_node))
4285 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4286 /* If the call becomes noreturn, remove the lhs. */
4287 if (lhs
4288 && gimple_call_noreturn_p (stmt)
4289 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4290 || should_remove_lhs_p (lhs)))
4291 {
4292 if (TREE_CODE (lhs) == SSA_NAME)
4293 {
4294 tree var = create_tmp_var (TREE_TYPE (lhs));
4295 tree def = get_or_create_ssa_default_def (cfun, var);
4296 gimple *new_stmt = gimple_build_assign (lhs, def);
4297 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4298 }
4299 gimple_call_set_lhs (stmt, NULL_TREE);
4300 }
4301 maybe_remove_unused_call_args (cfun, stmt);
4302 }
4303 else
4304 {
4305 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4306 gimple *new_stmt = gimple_build_call (fndecl, 0);
4307 gimple_set_location (new_stmt, gimple_location (stmt));
4308 /* If the call had a SSA name as lhs morph that into
4309 an uninitialized value. */
4310 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4311 {
4312 tree var = create_tmp_var (TREE_TYPE (lhs));
4313 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4314 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4315 set_ssa_default_def (cfun, var, lhs);
4316 }
4317 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4318 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4319 gsi_replace (gsi, new_stmt, false);
4320 return true;
4321 }
4322 }
4323 }
4324 }
4325
4326 /* Check for indirect calls that became direct calls, and then
4327 no longer require a static chain. */
4328 if (gimple_call_chain (stmt))
4329 {
4330 tree fn = gimple_call_fndecl (stmt);
4331 if (fn && !DECL_STATIC_CHAIN (fn))
4332 {
4333 gimple_call_set_chain (stmt, NULL);
4334 changed = true;
4335 }
4336 else
4337 {
4338 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4339 if (tmp)
4340 {
4341 gimple_call_set_chain (stmt, tmp);
4342 changed = true;
4343 }
4344 }
4345 }
4346
4347 if (inplace)
4348 return changed;
4349
4350 /* Check for builtins that CCP can handle using information not
4351 available in the generic fold routines. */
4352 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4353 {
4354 if (gimple_fold_builtin (gsi))
4355 changed = true;
4356 }
4357 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4358 {
4359 changed |= targetm.gimple_fold_builtin (gsi);
4360 }
4361 else if (gimple_call_internal_p (stmt))
4362 {
4363 enum tree_code subcode = ERROR_MARK;
4364 tree result = NULL_TREE;
4365 bool cplx_result = false;
4366 tree overflow = NULL_TREE;
4367 switch (gimple_call_internal_fn (stmt))
4368 {
4369 case IFN_BUILTIN_EXPECT:
4370 result = fold_builtin_expect (gimple_location (stmt),
4371 gimple_call_arg (stmt, 0),
4372 gimple_call_arg (stmt, 1),
4373 gimple_call_arg (stmt, 2),
4374 NULL_TREE);
4375 break;
4376 case IFN_UBSAN_OBJECT_SIZE:
4377 {
4378 tree offset = gimple_call_arg (stmt, 1);
4379 tree objsize = gimple_call_arg (stmt, 2);
4380 if (integer_all_onesp (objsize)
4381 || (TREE_CODE (offset) == INTEGER_CST
4382 && TREE_CODE (objsize) == INTEGER_CST
4383 && tree_int_cst_le (offset, objsize)))
4384 {
4385 replace_call_with_value (gsi, NULL_TREE);
4386 return true;
4387 }
4388 }
4389 break;
4390 case IFN_UBSAN_PTR:
4391 if (integer_zerop (gimple_call_arg (stmt, 1)))
4392 {
4393 replace_call_with_value (gsi, NULL_TREE);
4394 return true;
4395 }
4396 break;
4397 case IFN_UBSAN_BOUNDS:
4398 {
4399 tree index = gimple_call_arg (stmt, 1);
4400 tree bound = gimple_call_arg (stmt, 2);
4401 if (TREE_CODE (index) == INTEGER_CST
4402 && TREE_CODE (bound) == INTEGER_CST)
4403 {
4404 index = fold_convert (TREE_TYPE (bound), index);
4405 if (TREE_CODE (index) == INTEGER_CST
4406 && tree_int_cst_le (index, bound))
4407 {
4408 replace_call_with_value (gsi, NULL_TREE);
4409 return true;
4410 }
4411 }
4412 }
4413 break;
4414 case IFN_GOACC_DIM_SIZE:
4415 case IFN_GOACC_DIM_POS:
4416 result = fold_internal_goacc_dim (stmt);
4417 break;
4418 case IFN_UBSAN_CHECK_ADD:
4419 subcode = PLUS_EXPR;
4420 break;
4421 case IFN_UBSAN_CHECK_SUB:
4422 subcode = MINUS_EXPR;
4423 break;
4424 case IFN_UBSAN_CHECK_MUL:
4425 subcode = MULT_EXPR;
4426 break;
4427 case IFN_ADD_OVERFLOW:
4428 subcode = PLUS_EXPR;
4429 cplx_result = true;
4430 break;
4431 case IFN_SUB_OVERFLOW:
4432 subcode = MINUS_EXPR;
4433 cplx_result = true;
4434 break;
4435 case IFN_MUL_OVERFLOW:
4436 subcode = MULT_EXPR;
4437 cplx_result = true;
4438 break;
4439 default:
4440 break;
4441 }
4442 if (subcode != ERROR_MARK)
4443 {
4444 tree arg0 = gimple_call_arg (stmt, 0);
4445 tree arg1 = gimple_call_arg (stmt, 1);
4446 tree type = TREE_TYPE (arg0);
4447 if (cplx_result)
4448 {
4449 tree lhs = gimple_call_lhs (stmt);
4450 if (lhs == NULL_TREE)
4451 type = NULL_TREE;
4452 else
4453 type = TREE_TYPE (TREE_TYPE (lhs));
4454 }
4455 if (type == NULL_TREE)
4456 ;
4457 /* x = y + 0; x = y - 0; x = y * 0; */
4458 else if (integer_zerop (arg1))
4459 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4460 /* x = 0 + y; x = 0 * y; */
4461 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4462 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4463 /* x = y - y; */
4464 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4465 result = integer_zero_node;
4466 /* x = y * 1; x = 1 * y; */
4467 else if (subcode == MULT_EXPR && integer_onep (arg1))
4468 result = arg0;
4469 else if (subcode == MULT_EXPR && integer_onep (arg0))
4470 result = arg1;
4471 else if (TREE_CODE (arg0) == INTEGER_CST
4472 && TREE_CODE (arg1) == INTEGER_CST)
4473 {
4474 if (cplx_result)
4475 result = int_const_binop (subcode, fold_convert (type, arg0),
4476 fold_convert (type, arg1));
4477 else
4478 result = int_const_binop (subcode, arg0, arg1);
4479 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4480 {
4481 if (cplx_result)
4482 overflow = build_one_cst (type);
4483 else
4484 result = NULL_TREE;
4485 }
4486 }
4487 if (result)
4488 {
4489 if (result == integer_zero_node)
4490 result = build_zero_cst (type);
4491 else if (cplx_result && TREE_TYPE (result) != type)
4492 {
4493 if (TREE_CODE (result) == INTEGER_CST)
4494 {
4495 if (arith_overflowed_p (PLUS_EXPR, type, result,
4496 integer_zero_node))
4497 overflow = build_one_cst (type);
4498 }
4499 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4500 && TYPE_UNSIGNED (type))
4501 || (TYPE_PRECISION (type)
4502 < (TYPE_PRECISION (TREE_TYPE (result))
4503 + (TYPE_UNSIGNED (TREE_TYPE (result))
4504 && !TYPE_UNSIGNED (type)))))
4505 result = NULL_TREE;
4506 if (result)
4507 result = fold_convert (type, result);
4508 }
4509 }
4510 }
4511
4512 if (result)
4513 {
4514 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4515 result = drop_tree_overflow (result);
4516 if (cplx_result)
4517 {
4518 if (overflow == NULL_TREE)
4519 overflow = build_zero_cst (TREE_TYPE (result));
4520 tree ctype = build_complex_type (TREE_TYPE (result));
4521 if (TREE_CODE (result) == INTEGER_CST
4522 && TREE_CODE (overflow) == INTEGER_CST)
4523 result = build_complex (ctype, result, overflow);
4524 else
4525 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4526 ctype, result, overflow);
4527 }
4528 if (!update_call_from_tree (gsi, result))
4529 gimplify_and_update_call_from_tree (gsi, result);
4530 changed = true;
4531 }
4532 }
4533
4534 return changed;
4535 }
4536
4537
4538 /* Return true whether NAME has a use on STMT. */
4539
4540 static bool
4541 has_use_on_stmt (tree name, gimple *stmt)
4542 {
4543 imm_use_iterator iter;
4544 use_operand_p use_p;
4545 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4546 if (USE_STMT (use_p) == stmt)
4547 return true;
4548 return false;
4549 }
4550
4551 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4552 gimple_simplify.
4553
4554 Replaces *GSI with the simplification result in RCODE and OPS
4555 and the associated statements in *SEQ. Does the replacement
4556 according to INPLACE and returns true if the operation succeeded. */
4557
4558 static bool
4559 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4560 gimple_match_op *res_op,
4561 gimple_seq *seq, bool inplace)
4562 {
4563 gimple *stmt = gsi_stmt (*gsi);
4564 tree *ops = res_op->ops;
4565 unsigned int num_ops = res_op->num_ops;
4566
4567 /* Play safe and do not allow abnormals to be mentioned in
4568 newly created statements. See also maybe_push_res_to_seq.
4569 As an exception allow such uses if there was a use of the
4570 same SSA name on the old stmt. */
4571 for (unsigned int i = 0; i < num_ops; ++i)
4572 if (TREE_CODE (ops[i]) == SSA_NAME
4573 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4574 && !has_use_on_stmt (ops[i], stmt))
4575 return false;
4576
4577 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4578 for (unsigned int i = 0; i < 2; ++i)
4579 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4580 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4581 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4582 return false;
4583
4584 /* Don't insert new statements when INPLACE is true, even if we could
4585 reuse STMT for the final statement. */
4586 if (inplace && !gimple_seq_empty_p (*seq))
4587 return false;
4588
4589 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4590 {
4591 gcc_assert (res_op->code.is_tree_code ());
4592 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4593 /* GIMPLE_CONDs condition may not throw. */
4594 && (!flag_exceptions
4595 || !cfun->can_throw_non_call_exceptions
4596 || !operation_could_trap_p (res_op->code,
4597 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4598 false, NULL_TREE)))
4599 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4600 else if (res_op->code == SSA_NAME)
4601 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4602 build_zero_cst (TREE_TYPE (ops[0])));
4603 else if (res_op->code == INTEGER_CST)
4604 {
4605 if (integer_zerop (ops[0]))
4606 gimple_cond_make_false (cond_stmt);
4607 else
4608 gimple_cond_make_true (cond_stmt);
4609 }
4610 else if (!inplace)
4611 {
4612 tree res = maybe_push_res_to_seq (res_op, seq);
4613 if (!res)
4614 return false;
4615 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4616 build_zero_cst (TREE_TYPE (res)));
4617 }
4618 else
4619 return false;
4620 if (dump_file && (dump_flags & TDF_DETAILS))
4621 {
4622 fprintf (dump_file, "gimple_simplified to ");
4623 if (!gimple_seq_empty_p (*seq))
4624 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4625 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4626 0, TDF_SLIM);
4627 }
4628 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4629 return true;
4630 }
4631 else if (is_gimple_assign (stmt)
4632 && res_op->code.is_tree_code ())
4633 {
4634 if (!inplace
4635 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4636 {
4637 maybe_build_generic_op (res_op);
4638 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4639 res_op->op_or_null (0),
4640 res_op->op_or_null (1),
4641 res_op->op_or_null (2));
4642 if (dump_file && (dump_flags & TDF_DETAILS))
4643 {
4644 fprintf (dump_file, "gimple_simplified to ");
4645 if (!gimple_seq_empty_p (*seq))
4646 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4647 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4648 0, TDF_SLIM);
4649 }
4650 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4651 return true;
4652 }
4653 }
4654 else if (res_op->code.is_fn_code ()
4655 && gimple_call_combined_fn (stmt) == res_op->code)
4656 {
4657 gcc_assert (num_ops == gimple_call_num_args (stmt));
4658 for (unsigned int i = 0; i < num_ops; ++i)
4659 gimple_call_set_arg (stmt, i, ops[i]);
4660 if (dump_file && (dump_flags & TDF_DETAILS))
4661 {
4662 fprintf (dump_file, "gimple_simplified to ");
4663 if (!gimple_seq_empty_p (*seq))
4664 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4665 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4666 }
4667 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4668 return true;
4669 }
4670 else if (!inplace)
4671 {
4672 if (gimple_has_lhs (stmt))
4673 {
4674 tree lhs = gimple_get_lhs (stmt);
4675 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4676 return false;
4677 if (dump_file && (dump_flags & TDF_DETAILS))
4678 {
4679 fprintf (dump_file, "gimple_simplified to ");
4680 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4681 }
4682 gsi_replace_with_seq_vops (gsi, *seq);
4683 return true;
4684 }
4685 else
4686 gcc_unreachable ();
4687 }
4688
4689 return false;
4690 }
4691
4692 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4693
4694 static bool
4695 maybe_canonicalize_mem_ref_addr (tree *t)
4696 {
4697 bool res = false;
4698
4699 if (TREE_CODE (*t) == ADDR_EXPR)
4700 t = &TREE_OPERAND (*t, 0);
4701
4702 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4703 generic vector extension. The actual vector referenced is
4704 view-converted to an array type for this purpose. If the index
4705 is constant the canonical representation in the middle-end is a
4706 BIT_FIELD_REF so re-write the former to the latter here. */
4707 if (TREE_CODE (*t) == ARRAY_REF
4708 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4709 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4710 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4711 {
4712 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4713 if (VECTOR_TYPE_P (vtype))
4714 {
4715 tree low = array_ref_low_bound (*t);
4716 if (TREE_CODE (low) == INTEGER_CST)
4717 {
4718 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4719 {
4720 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4721 wi::to_widest (low));
4722 idx = wi::mul (idx, wi::to_widest
4723 (TYPE_SIZE (TREE_TYPE (*t))));
4724 widest_int ext
4725 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4726 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4727 {
4728 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4729 TREE_TYPE (*t),
4730 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4731 TYPE_SIZE (TREE_TYPE (*t)),
4732 wide_int_to_tree (bitsizetype, idx));
4733 res = true;
4734 }
4735 }
4736 }
4737 }
4738 }
4739
4740 while (handled_component_p (*t))
4741 t = &TREE_OPERAND (*t, 0);
4742
4743 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4744 of invariant addresses into a SSA name MEM_REF address. */
4745 if (TREE_CODE (*t) == MEM_REF
4746 || TREE_CODE (*t) == TARGET_MEM_REF)
4747 {
4748 tree addr = TREE_OPERAND (*t, 0);
4749 if (TREE_CODE (addr) == ADDR_EXPR
4750 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4751 || handled_component_p (TREE_OPERAND (addr, 0))))
4752 {
4753 tree base;
4754 poly_int64 coffset;
4755 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4756 &coffset);
4757 if (!base)
4758 gcc_unreachable ();
4759
4760 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4761 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4762 TREE_OPERAND (*t, 1),
4763 size_int (coffset));
4764 res = true;
4765 }
4766 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4767 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4768 }
4769
4770 /* Canonicalize back MEM_REFs to plain reference trees if the object
4771 accessed is a decl that has the same access semantics as the MEM_REF. */
4772 if (TREE_CODE (*t) == MEM_REF
4773 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4774 && integer_zerop (TREE_OPERAND (*t, 1))
4775 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4776 {
4777 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4778 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4779 if (/* Same volatile qualification. */
4780 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4781 /* Same TBAA behavior with -fstrict-aliasing. */
4782 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4783 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4784 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4785 /* Same alignment. */
4786 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4787 /* We have to look out here to not drop a required conversion
4788 from the rhs to the lhs if *t appears on the lhs or vice-versa
4789 if it appears on the rhs. Thus require strict type
4790 compatibility. */
4791 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4792 {
4793 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4794 res = true;
4795 }
4796 }
4797
4798 /* Canonicalize TARGET_MEM_REF in particular with respect to
4799 the indexes becoming constant. */
4800 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4801 {
4802 tree tem = maybe_fold_tmr (*t);
4803 if (tem)
4804 {
4805 *t = tem;
4806 res = true;
4807 }
4808 }
4809
4810 return res;
4811 }
4812
4813 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4814 distinguishes both cases. */
4815
4816 static bool
4817 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4818 {
4819 bool changed = false;
4820 gimple *stmt = gsi_stmt (*gsi);
4821 bool nowarning = gimple_no_warning_p (stmt);
4822 unsigned i;
4823 fold_defer_overflow_warnings ();
4824
4825 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4826 after propagation.
4827 ??? This shouldn't be done in generic folding but in the
4828 propagation helpers which also know whether an address was
4829 propagated.
4830 Also canonicalize operand order. */
4831 switch (gimple_code (stmt))
4832 {
4833 case GIMPLE_ASSIGN:
4834 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4835 {
4836 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4837 if ((REFERENCE_CLASS_P (*rhs)
4838 || TREE_CODE (*rhs) == ADDR_EXPR)
4839 && maybe_canonicalize_mem_ref_addr (rhs))
4840 changed = true;
4841 tree *lhs = gimple_assign_lhs_ptr (stmt);
4842 if (REFERENCE_CLASS_P (*lhs)
4843 && maybe_canonicalize_mem_ref_addr (lhs))
4844 changed = true;
4845 }
4846 else
4847 {
4848 /* Canonicalize operand order. */
4849 enum tree_code code = gimple_assign_rhs_code (stmt);
4850 if (TREE_CODE_CLASS (code) == tcc_comparison
4851 || commutative_tree_code (code)
4852 || commutative_ternary_tree_code (code))
4853 {
4854 tree rhs1 = gimple_assign_rhs1 (stmt);
4855 tree rhs2 = gimple_assign_rhs2 (stmt);
4856 if (tree_swap_operands_p (rhs1, rhs2))
4857 {
4858 gimple_assign_set_rhs1 (stmt, rhs2);
4859 gimple_assign_set_rhs2 (stmt, rhs1);
4860 if (TREE_CODE_CLASS (code) == tcc_comparison)
4861 gimple_assign_set_rhs_code (stmt,
4862 swap_tree_comparison (code));
4863 changed = true;
4864 }
4865 }
4866 }
4867 break;
4868 case GIMPLE_CALL:
4869 {
4870 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4871 {
4872 tree *arg = gimple_call_arg_ptr (stmt, i);
4873 if (REFERENCE_CLASS_P (*arg)
4874 && maybe_canonicalize_mem_ref_addr (arg))
4875 changed = true;
4876 }
4877 tree *lhs = gimple_call_lhs_ptr (stmt);
4878 if (*lhs
4879 && REFERENCE_CLASS_P (*lhs)
4880 && maybe_canonicalize_mem_ref_addr (lhs))
4881 changed = true;
4882 break;
4883 }
4884 case GIMPLE_ASM:
4885 {
4886 gasm *asm_stmt = as_a <gasm *> (stmt);
4887 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4888 {
4889 tree link = gimple_asm_output_op (asm_stmt, i);
4890 tree op = TREE_VALUE (link);
4891 if (REFERENCE_CLASS_P (op)
4892 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4893 changed = true;
4894 }
4895 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4896 {
4897 tree link = gimple_asm_input_op (asm_stmt, i);
4898 tree op = TREE_VALUE (link);
4899 if ((REFERENCE_CLASS_P (op)
4900 || TREE_CODE (op) == ADDR_EXPR)
4901 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4902 changed = true;
4903 }
4904 }
4905 break;
4906 case GIMPLE_DEBUG:
4907 if (gimple_debug_bind_p (stmt))
4908 {
4909 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4910 if (*val
4911 && (REFERENCE_CLASS_P (*val)
4912 || TREE_CODE (*val) == ADDR_EXPR)
4913 && maybe_canonicalize_mem_ref_addr (val))
4914 changed = true;
4915 }
4916 break;
4917 case GIMPLE_COND:
4918 {
4919 /* Canonicalize operand order. */
4920 tree lhs = gimple_cond_lhs (stmt);
4921 tree rhs = gimple_cond_rhs (stmt);
4922 if (tree_swap_operands_p (lhs, rhs))
4923 {
4924 gcond *gc = as_a <gcond *> (stmt);
4925 gimple_cond_set_lhs (gc, rhs);
4926 gimple_cond_set_rhs (gc, lhs);
4927 gimple_cond_set_code (gc,
4928 swap_tree_comparison (gimple_cond_code (gc)));
4929 changed = true;
4930 }
4931 }
4932 default:;
4933 }
4934
4935 /* Dispatch to pattern-based folding. */
4936 if (!inplace
4937 || is_gimple_assign (stmt)
4938 || gimple_code (stmt) == GIMPLE_COND)
4939 {
4940 gimple_seq seq = NULL;
4941 gimple_match_op res_op;
4942 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4943 valueize, valueize))
4944 {
4945 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4946 changed = true;
4947 else
4948 gimple_seq_discard (seq);
4949 }
4950 }
4951
4952 stmt = gsi_stmt (*gsi);
4953
4954 /* Fold the main computation performed by the statement. */
4955 switch (gimple_code (stmt))
4956 {
4957 case GIMPLE_ASSIGN:
4958 {
4959 /* Try to canonicalize for boolean-typed X the comparisons
4960 X == 0, X == 1, X != 0, and X != 1. */
4961 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4962 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4963 {
4964 tree lhs = gimple_assign_lhs (stmt);
4965 tree op1 = gimple_assign_rhs1 (stmt);
4966 tree op2 = gimple_assign_rhs2 (stmt);
4967 tree type = TREE_TYPE (op1);
4968
4969 /* Check whether the comparison operands are of the same boolean
4970 type as the result type is.
4971 Check that second operand is an integer-constant with value
4972 one or zero. */
4973 if (TREE_CODE (op2) == INTEGER_CST
4974 && (integer_zerop (op2) || integer_onep (op2))
4975 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4976 {
4977 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4978 bool is_logical_not = false;
4979
4980 /* X == 0 and X != 1 is a logical-not.of X
4981 X == 1 and X != 0 is X */
4982 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4983 || (cmp_code == NE_EXPR && integer_onep (op2)))
4984 is_logical_not = true;
4985
4986 if (is_logical_not == false)
4987 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4988 /* Only for one-bit precision typed X the transformation
4989 !X -> ~X is valied. */
4990 else if (TYPE_PRECISION (type) == 1)
4991 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4992 /* Otherwise we use !X -> X ^ 1. */
4993 else
4994 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4995 build_int_cst (type, 1));
4996 changed = true;
4997 break;
4998 }
4999 }
5000
5001 unsigned old_num_ops = gimple_num_ops (stmt);
5002 tree lhs = gimple_assign_lhs (stmt);
5003 tree new_rhs = fold_gimple_assign (gsi);
5004 if (new_rhs
5005 && !useless_type_conversion_p (TREE_TYPE (lhs),
5006 TREE_TYPE (new_rhs)))
5007 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5008 if (new_rhs
5009 && (!inplace
5010 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5011 {
5012 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5013 changed = true;
5014 }
5015 break;
5016 }
5017
5018 case GIMPLE_CALL:
5019 changed |= gimple_fold_call (gsi, inplace);
5020 break;
5021
5022 case GIMPLE_ASM:
5023 /* Fold *& in asm operands. */
5024 {
5025 gasm *asm_stmt = as_a <gasm *> (stmt);
5026 size_t noutputs;
5027 const char **oconstraints;
5028 const char *constraint;
5029 bool allows_mem, allows_reg;
5030
5031 noutputs = gimple_asm_noutputs (asm_stmt);
5032 oconstraints = XALLOCAVEC (const char *, noutputs);
5033
5034 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5035 {
5036 tree link = gimple_asm_output_op (asm_stmt, i);
5037 tree op = TREE_VALUE (link);
5038 oconstraints[i]
5039 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5040 if (REFERENCE_CLASS_P (op)
5041 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5042 {
5043 TREE_VALUE (link) = op;
5044 changed = true;
5045 }
5046 }
5047 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5048 {
5049 tree link = gimple_asm_input_op (asm_stmt, i);
5050 tree op = TREE_VALUE (link);
5051 constraint
5052 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5053 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5054 oconstraints, &allows_mem, &allows_reg);
5055 if (REFERENCE_CLASS_P (op)
5056 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5057 != NULL_TREE)
5058 {
5059 TREE_VALUE (link) = op;
5060 changed = true;
5061 }
5062 }
5063 }
5064 break;
5065
5066 case GIMPLE_DEBUG:
5067 if (gimple_debug_bind_p (stmt))
5068 {
5069 tree val = gimple_debug_bind_get_value (stmt);
5070 if (val
5071 && REFERENCE_CLASS_P (val))
5072 {
5073 tree tem = maybe_fold_reference (val, false);
5074 if (tem)
5075 {
5076 gimple_debug_bind_set_value (stmt, tem);
5077 changed = true;
5078 }
5079 }
5080 else if (val
5081 && TREE_CODE (val) == ADDR_EXPR)
5082 {
5083 tree ref = TREE_OPERAND (val, 0);
5084 tree tem = maybe_fold_reference (ref, false);
5085 if (tem)
5086 {
5087 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5088 gimple_debug_bind_set_value (stmt, tem);
5089 changed = true;
5090 }
5091 }
5092 }
5093 break;
5094
5095 case GIMPLE_RETURN:
5096 {
5097 greturn *ret_stmt = as_a<greturn *> (stmt);
5098 tree ret = gimple_return_retval(ret_stmt);
5099
5100 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5101 {
5102 tree val = valueize (ret);
5103 if (val && val != ret
5104 && may_propagate_copy (ret, val))
5105 {
5106 gimple_return_set_retval (ret_stmt, val);
5107 changed = true;
5108 }
5109 }
5110 }
5111 break;
5112
5113 default:;
5114 }
5115
5116 stmt = gsi_stmt (*gsi);
5117
5118 /* Fold *& on the lhs. */
5119 if (gimple_has_lhs (stmt))
5120 {
5121 tree lhs = gimple_get_lhs (stmt);
5122 if (lhs && REFERENCE_CLASS_P (lhs))
5123 {
5124 tree new_lhs = maybe_fold_reference (lhs, true);
5125 if (new_lhs)
5126 {
5127 gimple_set_lhs (stmt, new_lhs);
5128 changed = true;
5129 }
5130 }
5131 }
5132
5133 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5134 return changed;
5135 }
5136
5137 /* Valueziation callback that ends up not following SSA edges. */
5138
5139 tree
5140 no_follow_ssa_edges (tree)
5141 {
5142 return NULL_TREE;
5143 }
5144
5145 /* Valueization callback that ends up following single-use SSA edges only. */
5146
5147 tree
5148 follow_single_use_edges (tree val)
5149 {
5150 if (TREE_CODE (val) == SSA_NAME
5151 && !has_single_use (val))
5152 return NULL_TREE;
5153 return val;
5154 }
5155
5156 /* Valueization callback that follows all SSA edges. */
5157
5158 tree
5159 follow_all_ssa_edges (tree val)
5160 {
5161 return val;
5162 }
5163
5164 /* Fold the statement pointed to by GSI. In some cases, this function may
5165 replace the whole statement with a new one. Returns true iff folding
5166 makes any changes.
5167 The statement pointed to by GSI should be in valid gimple form but may
5168 be in unfolded state as resulting from for example constant propagation
5169 which can produce *&x = 0. */
5170
5171 bool
5172 fold_stmt (gimple_stmt_iterator *gsi)
5173 {
5174 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5175 }
5176
5177 bool
5178 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5179 {
5180 return fold_stmt_1 (gsi, false, valueize);
5181 }
5182
5183 /* Perform the minimal folding on statement *GSI. Only operations like
5184 *&x created by constant propagation are handled. The statement cannot
5185 be replaced with a new one. Return true if the statement was
5186 changed, false otherwise.
5187 The statement *GSI should be in valid gimple form but may
5188 be in unfolded state as resulting from for example constant propagation
5189 which can produce *&x = 0. */
5190
5191 bool
5192 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5193 {
5194 gimple *stmt = gsi_stmt (*gsi);
5195 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5196 gcc_assert (gsi_stmt (*gsi) == stmt);
5197 return changed;
5198 }
5199
5200 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5201 if EXPR is null or we don't know how.
5202 If non-null, the result always has boolean type. */
5203
5204 static tree
5205 canonicalize_bool (tree expr, bool invert)
5206 {
5207 if (!expr)
5208 return NULL_TREE;
5209 else if (invert)
5210 {
5211 if (integer_nonzerop (expr))
5212 return boolean_false_node;
5213 else if (integer_zerop (expr))
5214 return boolean_true_node;
5215 else if (TREE_CODE (expr) == SSA_NAME)
5216 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5217 build_int_cst (TREE_TYPE (expr), 0));
5218 else if (COMPARISON_CLASS_P (expr))
5219 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5220 boolean_type_node,
5221 TREE_OPERAND (expr, 0),
5222 TREE_OPERAND (expr, 1));
5223 else
5224 return NULL_TREE;
5225 }
5226 else
5227 {
5228 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5229 return expr;
5230 if (integer_nonzerop (expr))
5231 return boolean_true_node;
5232 else if (integer_zerop (expr))
5233 return boolean_false_node;
5234 else if (TREE_CODE (expr) == SSA_NAME)
5235 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5236 build_int_cst (TREE_TYPE (expr), 0));
5237 else if (COMPARISON_CLASS_P (expr))
5238 return fold_build2 (TREE_CODE (expr),
5239 boolean_type_node,
5240 TREE_OPERAND (expr, 0),
5241 TREE_OPERAND (expr, 1));
5242 else
5243 return NULL_TREE;
5244 }
5245 }
5246
5247 /* Check to see if a boolean expression EXPR is logically equivalent to the
5248 comparison (OP1 CODE OP2). Check for various identities involving
5249 SSA_NAMEs. */
5250
5251 static bool
5252 same_bool_comparison_p (const_tree expr, enum tree_code code,
5253 const_tree op1, const_tree op2)
5254 {
5255 gimple *s;
5256
5257 /* The obvious case. */
5258 if (TREE_CODE (expr) == code
5259 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5260 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5261 return true;
5262
5263 /* Check for comparing (name, name != 0) and the case where expr
5264 is an SSA_NAME with a definition matching the comparison. */
5265 if (TREE_CODE (expr) == SSA_NAME
5266 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5267 {
5268 if (operand_equal_p (expr, op1, 0))
5269 return ((code == NE_EXPR && integer_zerop (op2))
5270 || (code == EQ_EXPR && integer_nonzerop (op2)));
5271 s = SSA_NAME_DEF_STMT (expr);
5272 if (is_gimple_assign (s)
5273 && gimple_assign_rhs_code (s) == code
5274 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5275 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5276 return true;
5277 }
5278
5279 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5280 of name is a comparison, recurse. */
5281 if (TREE_CODE (op1) == SSA_NAME
5282 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5283 {
5284 s = SSA_NAME_DEF_STMT (op1);
5285 if (is_gimple_assign (s)
5286 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5287 {
5288 enum tree_code c = gimple_assign_rhs_code (s);
5289 if ((c == NE_EXPR && integer_zerop (op2))
5290 || (c == EQ_EXPR && integer_nonzerop (op2)))
5291 return same_bool_comparison_p (expr, c,
5292 gimple_assign_rhs1 (s),
5293 gimple_assign_rhs2 (s));
5294 if ((c == EQ_EXPR && integer_zerop (op2))
5295 || (c == NE_EXPR && integer_nonzerop (op2)))
5296 return same_bool_comparison_p (expr,
5297 invert_tree_comparison (c, false),
5298 gimple_assign_rhs1 (s),
5299 gimple_assign_rhs2 (s));
5300 }
5301 }
5302 return false;
5303 }
5304
5305 /* Check to see if two boolean expressions OP1 and OP2 are logically
5306 equivalent. */
5307
5308 static bool
5309 same_bool_result_p (const_tree op1, const_tree op2)
5310 {
5311 /* Simple cases first. */
5312 if (operand_equal_p (op1, op2, 0))
5313 return true;
5314
5315 /* Check the cases where at least one of the operands is a comparison.
5316 These are a bit smarter than operand_equal_p in that they apply some
5317 identifies on SSA_NAMEs. */
5318 if (COMPARISON_CLASS_P (op2)
5319 && same_bool_comparison_p (op1, TREE_CODE (op2),
5320 TREE_OPERAND (op2, 0),
5321 TREE_OPERAND (op2, 1)))
5322 return true;
5323 if (COMPARISON_CLASS_P (op1)
5324 && same_bool_comparison_p (op2, TREE_CODE (op1),
5325 TREE_OPERAND (op1, 0),
5326 TREE_OPERAND (op1, 1)))
5327 return true;
5328
5329 /* Default case. */
5330 return false;
5331 }
5332
5333 /* Forward declarations for some mutually recursive functions. */
5334
5335 static tree
5336 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5337 enum tree_code code2, tree op2a, tree op2b);
5338 static tree
5339 and_var_with_comparison (tree var, bool invert,
5340 enum tree_code code2, tree op2a, tree op2b);
5341 static tree
5342 and_var_with_comparison_1 (gimple *stmt,
5343 enum tree_code code2, tree op2a, tree op2b);
5344 static tree
5345 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5346 enum tree_code code2, tree op2a, tree op2b);
5347 static tree
5348 or_var_with_comparison (tree var, bool invert,
5349 enum tree_code code2, tree op2a, tree op2b);
5350 static tree
5351 or_var_with_comparison_1 (gimple *stmt,
5352 enum tree_code code2, tree op2a, tree op2b);
5353
5354 /* Helper function for and_comparisons_1: try to simplify the AND of the
5355 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5356 If INVERT is true, invert the value of the VAR before doing the AND.
5357 Return NULL_EXPR if we can't simplify this to a single expression. */
5358
5359 static tree
5360 and_var_with_comparison (tree var, bool invert,
5361 enum tree_code code2, tree op2a, tree op2b)
5362 {
5363 tree t;
5364 gimple *stmt = SSA_NAME_DEF_STMT (var);
5365
5366 /* We can only deal with variables whose definitions are assignments. */
5367 if (!is_gimple_assign (stmt))
5368 return NULL_TREE;
5369
5370 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5371 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5372 Then we only have to consider the simpler non-inverted cases. */
5373 if (invert)
5374 t = or_var_with_comparison_1 (stmt,
5375 invert_tree_comparison (code2, false),
5376 op2a, op2b);
5377 else
5378 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5379 return canonicalize_bool (t, invert);
5380 }
5381
5382 /* Try to simplify the AND of the ssa variable defined by the assignment
5383 STMT with the comparison specified by (OP2A CODE2 OP2B).
5384 Return NULL_EXPR if we can't simplify this to a single expression. */
5385
5386 static tree
5387 and_var_with_comparison_1 (gimple *stmt,
5388 enum tree_code code2, tree op2a, tree op2b)
5389 {
5390 tree var = gimple_assign_lhs (stmt);
5391 tree true_test_var = NULL_TREE;
5392 tree false_test_var = NULL_TREE;
5393 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5394
5395 /* Check for identities like (var AND (var == 0)) => false. */
5396 if (TREE_CODE (op2a) == SSA_NAME
5397 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5398 {
5399 if ((code2 == NE_EXPR && integer_zerop (op2b))
5400 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5401 {
5402 true_test_var = op2a;
5403 if (var == true_test_var)
5404 return var;
5405 }
5406 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5407 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5408 {
5409 false_test_var = op2a;
5410 if (var == false_test_var)
5411 return boolean_false_node;
5412 }
5413 }
5414
5415 /* If the definition is a comparison, recurse on it. */
5416 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5417 {
5418 tree t = and_comparisons_1 (innercode,
5419 gimple_assign_rhs1 (stmt),
5420 gimple_assign_rhs2 (stmt),
5421 code2,
5422 op2a,
5423 op2b);
5424 if (t)
5425 return t;
5426 }
5427
5428 /* If the definition is an AND or OR expression, we may be able to
5429 simplify by reassociating. */
5430 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5431 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5432 {
5433 tree inner1 = gimple_assign_rhs1 (stmt);
5434 tree inner2 = gimple_assign_rhs2 (stmt);
5435 gimple *s;
5436 tree t;
5437 tree partial = NULL_TREE;
5438 bool is_and = (innercode == BIT_AND_EXPR);
5439
5440 /* Check for boolean identities that don't require recursive examination
5441 of inner1/inner2:
5442 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5443 inner1 AND (inner1 OR inner2) => inner1
5444 !inner1 AND (inner1 AND inner2) => false
5445 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5446 Likewise for similar cases involving inner2. */
5447 if (inner1 == true_test_var)
5448 return (is_and ? var : inner1);
5449 else if (inner2 == true_test_var)
5450 return (is_and ? var : inner2);
5451 else if (inner1 == false_test_var)
5452 return (is_and
5453 ? boolean_false_node
5454 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5455 else if (inner2 == false_test_var)
5456 return (is_and
5457 ? boolean_false_node
5458 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5459
5460 /* Next, redistribute/reassociate the AND across the inner tests.
5461 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5462 if (TREE_CODE (inner1) == SSA_NAME
5463 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5464 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5465 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5466 gimple_assign_rhs1 (s),
5467 gimple_assign_rhs2 (s),
5468 code2, op2a, op2b)))
5469 {
5470 /* Handle the AND case, where we are reassociating:
5471 (inner1 AND inner2) AND (op2a code2 op2b)
5472 => (t AND inner2)
5473 If the partial result t is a constant, we win. Otherwise
5474 continue on to try reassociating with the other inner test. */
5475 if (is_and)
5476 {
5477 if (integer_onep (t))
5478 return inner2;
5479 else if (integer_zerop (t))
5480 return boolean_false_node;
5481 }
5482
5483 /* Handle the OR case, where we are redistributing:
5484 (inner1 OR inner2) AND (op2a code2 op2b)
5485 => (t OR (inner2 AND (op2a code2 op2b))) */
5486 else if (integer_onep (t))
5487 return boolean_true_node;
5488
5489 /* Save partial result for later. */
5490 partial = t;
5491 }
5492
5493 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5494 if (TREE_CODE (inner2) == SSA_NAME
5495 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5496 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5497 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5498 gimple_assign_rhs1 (s),
5499 gimple_assign_rhs2 (s),
5500 code2, op2a, op2b)))
5501 {
5502 /* Handle the AND case, where we are reassociating:
5503 (inner1 AND inner2) AND (op2a code2 op2b)
5504 => (inner1 AND t) */
5505 if (is_and)
5506 {
5507 if (integer_onep (t))
5508 return inner1;
5509 else if (integer_zerop (t))
5510 return boolean_false_node;
5511 /* If both are the same, we can apply the identity
5512 (x AND x) == x. */
5513 else if (partial && same_bool_result_p (t, partial))
5514 return t;
5515 }
5516
5517 /* Handle the OR case. where we are redistributing:
5518 (inner1 OR inner2) AND (op2a code2 op2b)
5519 => (t OR (inner1 AND (op2a code2 op2b)))
5520 => (t OR partial) */
5521 else
5522 {
5523 if (integer_onep (t))
5524 return boolean_true_node;
5525 else if (partial)
5526 {
5527 /* We already got a simplification for the other
5528 operand to the redistributed OR expression. The
5529 interesting case is when at least one is false.
5530 Or, if both are the same, we can apply the identity
5531 (x OR x) == x. */
5532 if (integer_zerop (partial))
5533 return t;
5534 else if (integer_zerop (t))
5535 return partial;
5536 else if (same_bool_result_p (t, partial))
5537 return t;
5538 }
5539 }
5540 }
5541 }
5542 return NULL_TREE;
5543 }
5544
5545 /* Try to simplify the AND of two comparisons defined by
5546 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5547 If this can be done without constructing an intermediate value,
5548 return the resulting tree; otherwise NULL_TREE is returned.
5549 This function is deliberately asymmetric as it recurses on SSA_DEFs
5550 in the first comparison but not the second. */
5551
5552 static tree
5553 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5554 enum tree_code code2, tree op2a, tree op2b)
5555 {
5556 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5557
5558 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5559 if (operand_equal_p (op1a, op2a, 0)
5560 && operand_equal_p (op1b, op2b, 0))
5561 {
5562 /* Result will be either NULL_TREE, or a combined comparison. */
5563 tree t = combine_comparisons (UNKNOWN_LOCATION,
5564 TRUTH_ANDIF_EXPR, code1, code2,
5565 truth_type, op1a, op1b);
5566 if (t)
5567 return t;
5568 }
5569
5570 /* Likewise the swapped case of the above. */
5571 if (operand_equal_p (op1a, op2b, 0)
5572 && operand_equal_p (op1b, op2a, 0))
5573 {
5574 /* Result will be either NULL_TREE, or a combined comparison. */
5575 tree t = combine_comparisons (UNKNOWN_LOCATION,
5576 TRUTH_ANDIF_EXPR, code1,
5577 swap_tree_comparison (code2),
5578 truth_type, op1a, op1b);
5579 if (t)
5580 return t;
5581 }
5582
5583 /* If both comparisons are of the same value against constants, we might
5584 be able to merge them. */
5585 if (operand_equal_p (op1a, op2a, 0)
5586 && TREE_CODE (op1b) == INTEGER_CST
5587 && TREE_CODE (op2b) == INTEGER_CST)
5588 {
5589 int cmp = tree_int_cst_compare (op1b, op2b);
5590
5591 /* If we have (op1a == op1b), we should either be able to
5592 return that or FALSE, depending on whether the constant op1b
5593 also satisfies the other comparison against op2b. */
5594 if (code1 == EQ_EXPR)
5595 {
5596 bool done = true;
5597 bool val;
5598 switch (code2)
5599 {
5600 case EQ_EXPR: val = (cmp == 0); break;
5601 case NE_EXPR: val = (cmp != 0); break;
5602 case LT_EXPR: val = (cmp < 0); break;
5603 case GT_EXPR: val = (cmp > 0); break;
5604 case LE_EXPR: val = (cmp <= 0); break;
5605 case GE_EXPR: val = (cmp >= 0); break;
5606 default: done = false;
5607 }
5608 if (done)
5609 {
5610 if (val)
5611 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5612 else
5613 return boolean_false_node;
5614 }
5615 }
5616 /* Likewise if the second comparison is an == comparison. */
5617 else if (code2 == EQ_EXPR)
5618 {
5619 bool done = true;
5620 bool val;
5621 switch (code1)
5622 {
5623 case EQ_EXPR: val = (cmp == 0); break;
5624 case NE_EXPR: val = (cmp != 0); break;
5625 case LT_EXPR: val = (cmp > 0); break;
5626 case GT_EXPR: val = (cmp < 0); break;
5627 case LE_EXPR: val = (cmp >= 0); break;
5628 case GE_EXPR: val = (cmp <= 0); break;
5629 default: done = false;
5630 }
5631 if (done)
5632 {
5633 if (val)
5634 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5635 else
5636 return boolean_false_node;
5637 }
5638 }
5639
5640 /* Same business with inequality tests. */
5641 else if (code1 == NE_EXPR)
5642 {
5643 bool val;
5644 switch (code2)
5645 {
5646 case EQ_EXPR: val = (cmp != 0); break;
5647 case NE_EXPR: val = (cmp == 0); break;
5648 case LT_EXPR: val = (cmp >= 0); break;
5649 case GT_EXPR: val = (cmp <= 0); break;
5650 case LE_EXPR: val = (cmp > 0); break;
5651 case GE_EXPR: val = (cmp < 0); break;
5652 default:
5653 val = false;
5654 }
5655 if (val)
5656 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5657 }
5658 else if (code2 == NE_EXPR)
5659 {
5660 bool val;
5661 switch (code1)
5662 {
5663 case EQ_EXPR: val = (cmp == 0); break;
5664 case NE_EXPR: val = (cmp != 0); break;
5665 case LT_EXPR: val = (cmp <= 0); break;
5666 case GT_EXPR: val = (cmp >= 0); break;
5667 case LE_EXPR: val = (cmp < 0); break;
5668 case GE_EXPR: val = (cmp > 0); break;
5669 default:
5670 val = false;
5671 }
5672 if (val)
5673 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5674 }
5675
5676 /* Chose the more restrictive of two < or <= comparisons. */
5677 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5678 && (code2 == LT_EXPR || code2 == LE_EXPR))
5679 {
5680 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5681 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5682 else
5683 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5684 }
5685
5686 /* Likewise chose the more restrictive of two > or >= comparisons. */
5687 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5688 && (code2 == GT_EXPR || code2 == GE_EXPR))
5689 {
5690 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5691 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5692 else
5693 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5694 }
5695
5696 /* Check for singleton ranges. */
5697 else if (cmp == 0
5698 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5699 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5700 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5701
5702 /* Check for disjoint ranges. */
5703 else if (cmp <= 0
5704 && (code1 == LT_EXPR || code1 == LE_EXPR)
5705 && (code2 == GT_EXPR || code2 == GE_EXPR))
5706 return boolean_false_node;
5707 else if (cmp >= 0
5708 && (code1 == GT_EXPR || code1 == GE_EXPR)
5709 && (code2 == LT_EXPR || code2 == LE_EXPR))
5710 return boolean_false_node;
5711 }
5712
5713 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5714 NAME's definition is a truth value. See if there are any simplifications
5715 that can be done against the NAME's definition. */
5716 if (TREE_CODE (op1a) == SSA_NAME
5717 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5718 && (integer_zerop (op1b) || integer_onep (op1b)))
5719 {
5720 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5721 || (code1 == NE_EXPR && integer_onep (op1b)));
5722 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5723 switch (gimple_code (stmt))
5724 {
5725 case GIMPLE_ASSIGN:
5726 /* Try to simplify by copy-propagating the definition. */
5727 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5728
5729 case GIMPLE_PHI:
5730 /* If every argument to the PHI produces the same result when
5731 ANDed with the second comparison, we win.
5732 Do not do this unless the type is bool since we need a bool
5733 result here anyway. */
5734 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5735 {
5736 tree result = NULL_TREE;
5737 unsigned i;
5738 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5739 {
5740 tree arg = gimple_phi_arg_def (stmt, i);
5741
5742 /* If this PHI has itself as an argument, ignore it.
5743 If all the other args produce the same result,
5744 we're still OK. */
5745 if (arg == gimple_phi_result (stmt))
5746 continue;
5747 else if (TREE_CODE (arg) == INTEGER_CST)
5748 {
5749 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5750 {
5751 if (!result)
5752 result = boolean_false_node;
5753 else if (!integer_zerop (result))
5754 return NULL_TREE;
5755 }
5756 else if (!result)
5757 result = fold_build2 (code2, boolean_type_node,
5758 op2a, op2b);
5759 else if (!same_bool_comparison_p (result,
5760 code2, op2a, op2b))
5761 return NULL_TREE;
5762 }
5763 else if (TREE_CODE (arg) == SSA_NAME
5764 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5765 {
5766 tree temp;
5767 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5768 /* In simple cases we can look through PHI nodes,
5769 but we have to be careful with loops.
5770 See PR49073. */
5771 if (! dom_info_available_p (CDI_DOMINATORS)
5772 || gimple_bb (def_stmt) == gimple_bb (stmt)
5773 || dominated_by_p (CDI_DOMINATORS,
5774 gimple_bb (def_stmt),
5775 gimple_bb (stmt)))
5776 return NULL_TREE;
5777 temp = and_var_with_comparison (arg, invert, code2,
5778 op2a, op2b);
5779 if (!temp)
5780 return NULL_TREE;
5781 else if (!result)
5782 result = temp;
5783 else if (!same_bool_result_p (result, temp))
5784 return NULL_TREE;
5785 }
5786 else
5787 return NULL_TREE;
5788 }
5789 return result;
5790 }
5791
5792 default:
5793 break;
5794 }
5795 }
5796 return NULL_TREE;
5797 }
5798
5799 /* Try to simplify the AND of two comparisons, specified by
5800 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5801 If this can be simplified to a single expression (without requiring
5802 introducing more SSA variables to hold intermediate values),
5803 return the resulting tree. Otherwise return NULL_TREE.
5804 If the result expression is non-null, it has boolean type. */
5805
5806 tree
5807 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5808 enum tree_code code2, tree op2a, tree op2b)
5809 {
5810 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5811 if (t)
5812 return t;
5813 else
5814 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5815 }
5816
5817 /* Helper function for or_comparisons_1: try to simplify the OR of the
5818 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5819 If INVERT is true, invert the value of VAR before doing the OR.
5820 Return NULL_EXPR if we can't simplify this to a single expression. */
5821
5822 static tree
5823 or_var_with_comparison (tree var, bool invert,
5824 enum tree_code code2, tree op2a, tree op2b)
5825 {
5826 tree t;
5827 gimple *stmt = SSA_NAME_DEF_STMT (var);
5828
5829 /* We can only deal with variables whose definitions are assignments. */
5830 if (!is_gimple_assign (stmt))
5831 return NULL_TREE;
5832
5833 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5834 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5835 Then we only have to consider the simpler non-inverted cases. */
5836 if (invert)
5837 t = and_var_with_comparison_1 (stmt,
5838 invert_tree_comparison (code2, false),
5839 op2a, op2b);
5840 else
5841 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5842 return canonicalize_bool (t, invert);
5843 }
5844
5845 /* Try to simplify the OR of the ssa variable defined by the assignment
5846 STMT with the comparison specified by (OP2A CODE2 OP2B).
5847 Return NULL_EXPR if we can't simplify this to a single expression. */
5848
5849 static tree
5850 or_var_with_comparison_1 (gimple *stmt,
5851 enum tree_code code2, tree op2a, tree op2b)
5852 {
5853 tree var = gimple_assign_lhs (stmt);
5854 tree true_test_var = NULL_TREE;
5855 tree false_test_var = NULL_TREE;
5856 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5857
5858 /* Check for identities like (var OR (var != 0)) => true . */
5859 if (TREE_CODE (op2a) == SSA_NAME
5860 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5861 {
5862 if ((code2 == NE_EXPR && integer_zerop (op2b))
5863 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5864 {
5865 true_test_var = op2a;
5866 if (var == true_test_var)
5867 return var;
5868 }
5869 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5870 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5871 {
5872 false_test_var = op2a;
5873 if (var == false_test_var)
5874 return boolean_true_node;
5875 }
5876 }
5877
5878 /* If the definition is a comparison, recurse on it. */
5879 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5880 {
5881 tree t = or_comparisons_1 (innercode,
5882 gimple_assign_rhs1 (stmt),
5883 gimple_assign_rhs2 (stmt),
5884 code2,
5885 op2a,
5886 op2b);
5887 if (t)
5888 return t;
5889 }
5890
5891 /* If the definition is an AND or OR expression, we may be able to
5892 simplify by reassociating. */
5893 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5894 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5895 {
5896 tree inner1 = gimple_assign_rhs1 (stmt);
5897 tree inner2 = gimple_assign_rhs2 (stmt);
5898 gimple *s;
5899 tree t;
5900 tree partial = NULL_TREE;
5901 bool is_or = (innercode == BIT_IOR_EXPR);
5902
5903 /* Check for boolean identities that don't require recursive examination
5904 of inner1/inner2:
5905 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5906 inner1 OR (inner1 AND inner2) => inner1
5907 !inner1 OR (inner1 OR inner2) => true
5908 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5909 */
5910 if (inner1 == true_test_var)
5911 return (is_or ? var : inner1);
5912 else if (inner2 == true_test_var)
5913 return (is_or ? var : inner2);
5914 else if (inner1 == false_test_var)
5915 return (is_or
5916 ? boolean_true_node
5917 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5918 else if (inner2 == false_test_var)
5919 return (is_or
5920 ? boolean_true_node
5921 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5922
5923 /* Next, redistribute/reassociate the OR across the inner tests.
5924 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5925 if (TREE_CODE (inner1) == SSA_NAME
5926 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5927 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5928 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5929 gimple_assign_rhs1 (s),
5930 gimple_assign_rhs2 (s),
5931 code2, op2a, op2b)))
5932 {
5933 /* Handle the OR case, where we are reassociating:
5934 (inner1 OR inner2) OR (op2a code2 op2b)
5935 => (t OR inner2)
5936 If the partial result t is a constant, we win. Otherwise
5937 continue on to try reassociating with the other inner test. */
5938 if (is_or)
5939 {
5940 if (integer_onep (t))
5941 return boolean_true_node;
5942 else if (integer_zerop (t))
5943 return inner2;
5944 }
5945
5946 /* Handle the AND case, where we are redistributing:
5947 (inner1 AND inner2) OR (op2a code2 op2b)
5948 => (t AND (inner2 OR (op2a code op2b))) */
5949 else if (integer_zerop (t))
5950 return boolean_false_node;
5951
5952 /* Save partial result for later. */
5953 partial = t;
5954 }
5955
5956 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5957 if (TREE_CODE (inner2) == SSA_NAME
5958 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5959 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5960 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5961 gimple_assign_rhs1 (s),
5962 gimple_assign_rhs2 (s),
5963 code2, op2a, op2b)))
5964 {
5965 /* Handle the OR case, where we are reassociating:
5966 (inner1 OR inner2) OR (op2a code2 op2b)
5967 => (inner1 OR t)
5968 => (t OR partial) */
5969 if (is_or)
5970 {
5971 if (integer_zerop (t))
5972 return inner1;
5973 else if (integer_onep (t))
5974 return boolean_true_node;
5975 /* If both are the same, we can apply the identity
5976 (x OR x) == x. */
5977 else if (partial && same_bool_result_p (t, partial))
5978 return t;
5979 }
5980
5981 /* Handle the AND case, where we are redistributing:
5982 (inner1 AND inner2) OR (op2a code2 op2b)
5983 => (t AND (inner1 OR (op2a code2 op2b)))
5984 => (t AND partial) */
5985 else
5986 {
5987 if (integer_zerop (t))
5988 return boolean_false_node;
5989 else if (partial)
5990 {
5991 /* We already got a simplification for the other
5992 operand to the redistributed AND expression. The
5993 interesting case is when at least one is true.
5994 Or, if both are the same, we can apply the identity
5995 (x AND x) == x. */
5996 if (integer_onep (partial))
5997 return t;
5998 else if (integer_onep (t))
5999 return partial;
6000 else if (same_bool_result_p (t, partial))
6001 return t;
6002 }
6003 }
6004 }
6005 }
6006 return NULL_TREE;
6007 }
6008
6009 /* Try to simplify the OR of two comparisons defined by
6010 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6011 If this can be done without constructing an intermediate value,
6012 return the resulting tree; otherwise NULL_TREE is returned.
6013 This function is deliberately asymmetric as it recurses on SSA_DEFs
6014 in the first comparison but not the second. */
6015
6016 static tree
6017 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6018 enum tree_code code2, tree op2a, tree op2b)
6019 {
6020 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6021
6022 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6023 if (operand_equal_p (op1a, op2a, 0)
6024 && operand_equal_p (op1b, op2b, 0))
6025 {
6026 /* Result will be either NULL_TREE, or a combined comparison. */
6027 tree t = combine_comparisons (UNKNOWN_LOCATION,
6028 TRUTH_ORIF_EXPR, code1, code2,
6029 truth_type, op1a, op1b);
6030 if (t)
6031 return t;
6032 }
6033
6034 /* Likewise the swapped case of the above. */
6035 if (operand_equal_p (op1a, op2b, 0)
6036 && operand_equal_p (op1b, op2a, 0))
6037 {
6038 /* Result will be either NULL_TREE, or a combined comparison. */
6039 tree t = combine_comparisons (UNKNOWN_LOCATION,
6040 TRUTH_ORIF_EXPR, code1,
6041 swap_tree_comparison (code2),
6042 truth_type, op1a, op1b);
6043 if (t)
6044 return t;
6045 }
6046
6047 /* If both comparisons are of the same value against constants, we might
6048 be able to merge them. */
6049 if (operand_equal_p (op1a, op2a, 0)
6050 && TREE_CODE (op1b) == INTEGER_CST
6051 && TREE_CODE (op2b) == INTEGER_CST)
6052 {
6053 int cmp = tree_int_cst_compare (op1b, op2b);
6054
6055 /* If we have (op1a != op1b), we should either be able to
6056 return that or TRUE, depending on whether the constant op1b
6057 also satisfies the other comparison against op2b. */
6058 if (code1 == NE_EXPR)
6059 {
6060 bool done = true;
6061 bool val;
6062 switch (code2)
6063 {
6064 case EQ_EXPR: val = (cmp == 0); break;
6065 case NE_EXPR: val = (cmp != 0); break;
6066 case LT_EXPR: val = (cmp < 0); break;
6067 case GT_EXPR: val = (cmp > 0); break;
6068 case LE_EXPR: val = (cmp <= 0); break;
6069 case GE_EXPR: val = (cmp >= 0); break;
6070 default: done = false;
6071 }
6072 if (done)
6073 {
6074 if (val)
6075 return boolean_true_node;
6076 else
6077 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6078 }
6079 }
6080 /* Likewise if the second comparison is a != comparison. */
6081 else if (code2 == NE_EXPR)
6082 {
6083 bool done = true;
6084 bool val;
6085 switch (code1)
6086 {
6087 case EQ_EXPR: val = (cmp == 0); break;
6088 case NE_EXPR: val = (cmp != 0); break;
6089 case LT_EXPR: val = (cmp > 0); break;
6090 case GT_EXPR: val = (cmp < 0); break;
6091 case LE_EXPR: val = (cmp >= 0); break;
6092 case GE_EXPR: val = (cmp <= 0); break;
6093 default: done = false;
6094 }
6095 if (done)
6096 {
6097 if (val)
6098 return boolean_true_node;
6099 else
6100 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6101 }
6102 }
6103
6104 /* See if an equality test is redundant with the other comparison. */
6105 else if (code1 == EQ_EXPR)
6106 {
6107 bool val;
6108 switch (code2)
6109 {
6110 case EQ_EXPR: val = (cmp == 0); break;
6111 case NE_EXPR: val = (cmp != 0); break;
6112 case LT_EXPR: val = (cmp < 0); break;
6113 case GT_EXPR: val = (cmp > 0); break;
6114 case LE_EXPR: val = (cmp <= 0); break;
6115 case GE_EXPR: val = (cmp >= 0); break;
6116 default:
6117 val = false;
6118 }
6119 if (val)
6120 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6121 }
6122 else if (code2 == EQ_EXPR)
6123 {
6124 bool val;
6125 switch (code1)
6126 {
6127 case EQ_EXPR: val = (cmp == 0); break;
6128 case NE_EXPR: val = (cmp != 0); break;
6129 case LT_EXPR: val = (cmp > 0); break;
6130 case GT_EXPR: val = (cmp < 0); break;
6131 case LE_EXPR: val = (cmp >= 0); break;
6132 case GE_EXPR: val = (cmp <= 0); break;
6133 default:
6134 val = false;
6135 }
6136 if (val)
6137 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6138 }
6139
6140 /* Chose the less restrictive of two < or <= comparisons. */
6141 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6142 && (code2 == LT_EXPR || code2 == LE_EXPR))
6143 {
6144 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6145 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6146 else
6147 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6148 }
6149
6150 /* Likewise chose the less restrictive of two > or >= comparisons. */
6151 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6152 && (code2 == GT_EXPR || code2 == GE_EXPR))
6153 {
6154 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6155 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6156 else
6157 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6158 }
6159
6160 /* Check for singleton ranges. */
6161 else if (cmp == 0
6162 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6163 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6164 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6165
6166 /* Check for less/greater pairs that don't restrict the range at all. */
6167 else if (cmp >= 0
6168 && (code1 == LT_EXPR || code1 == LE_EXPR)
6169 && (code2 == GT_EXPR || code2 == GE_EXPR))
6170 return boolean_true_node;
6171 else if (cmp <= 0
6172 && (code1 == GT_EXPR || code1 == GE_EXPR)
6173 && (code2 == LT_EXPR || code2 == LE_EXPR))
6174 return boolean_true_node;
6175 }
6176
6177 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6178 NAME's definition is a truth value. See if there are any simplifications
6179 that can be done against the NAME's definition. */
6180 if (TREE_CODE (op1a) == SSA_NAME
6181 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6182 && (integer_zerop (op1b) || integer_onep (op1b)))
6183 {
6184 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6185 || (code1 == NE_EXPR && integer_onep (op1b)));
6186 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6187 switch (gimple_code (stmt))
6188 {
6189 case GIMPLE_ASSIGN:
6190 /* Try to simplify by copy-propagating the definition. */
6191 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6192
6193 case GIMPLE_PHI:
6194 /* If every argument to the PHI produces the same result when
6195 ORed with the second comparison, we win.
6196 Do not do this unless the type is bool since we need a bool
6197 result here anyway. */
6198 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6199 {
6200 tree result = NULL_TREE;
6201 unsigned i;
6202 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6203 {
6204 tree arg = gimple_phi_arg_def (stmt, i);
6205
6206 /* If this PHI has itself as an argument, ignore it.
6207 If all the other args produce the same result,
6208 we're still OK. */
6209 if (arg == gimple_phi_result (stmt))
6210 continue;
6211 else if (TREE_CODE (arg) == INTEGER_CST)
6212 {
6213 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6214 {
6215 if (!result)
6216 result = boolean_true_node;
6217 else if (!integer_onep (result))
6218 return NULL_TREE;
6219 }
6220 else if (!result)
6221 result = fold_build2 (code2, boolean_type_node,
6222 op2a, op2b);
6223 else if (!same_bool_comparison_p (result,
6224 code2, op2a, op2b))
6225 return NULL_TREE;
6226 }
6227 else if (TREE_CODE (arg) == SSA_NAME
6228 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6229 {
6230 tree temp;
6231 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6232 /* In simple cases we can look through PHI nodes,
6233 but we have to be careful with loops.
6234 See PR49073. */
6235 if (! dom_info_available_p (CDI_DOMINATORS)
6236 || gimple_bb (def_stmt) == gimple_bb (stmt)
6237 || dominated_by_p (CDI_DOMINATORS,
6238 gimple_bb (def_stmt),
6239 gimple_bb (stmt)))
6240 return NULL_TREE;
6241 temp = or_var_with_comparison (arg, invert, code2,
6242 op2a, op2b);
6243 if (!temp)
6244 return NULL_TREE;
6245 else if (!result)
6246 result = temp;
6247 else if (!same_bool_result_p (result, temp))
6248 return NULL_TREE;
6249 }
6250 else
6251 return NULL_TREE;
6252 }
6253 return result;
6254 }
6255
6256 default:
6257 break;
6258 }
6259 }
6260 return NULL_TREE;
6261 }
6262
6263 /* Try to simplify the OR of two comparisons, specified by
6264 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6265 If this can be simplified to a single expression (without requiring
6266 introducing more SSA variables to hold intermediate values),
6267 return the resulting tree. Otherwise return NULL_TREE.
6268 If the result expression is non-null, it has boolean type. */
6269
6270 tree
6271 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6272 enum tree_code code2, tree op2a, tree op2b)
6273 {
6274 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6275 if (t)
6276 return t;
6277 else
6278 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6279 }
6280
6281
6282 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6283
6284 Either NULL_TREE, a simplified but non-constant or a constant
6285 is returned.
6286
6287 ??? This should go into a gimple-fold-inline.h file to be eventually
6288 privatized with the single valueize function used in the various TUs
6289 to avoid the indirect function call overhead. */
6290
6291 tree
6292 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6293 tree (*gvalueize) (tree))
6294 {
6295 gimple_match_op res_op;
6296 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6297 edges if there are intermediate VARYING defs. For this reason
6298 do not follow SSA edges here even though SCCVN can technically
6299 just deal fine with that. */
6300 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6301 {
6302 tree res = NULL_TREE;
6303 if (gimple_simplified_result_is_gimple_val (&res_op))
6304 res = res_op.ops[0];
6305 else if (mprts_hook)
6306 res = mprts_hook (&res_op);
6307 if (res)
6308 {
6309 if (dump_file && dump_flags & TDF_DETAILS)
6310 {
6311 fprintf (dump_file, "Match-and-simplified ");
6312 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6313 fprintf (dump_file, " to ");
6314 print_generic_expr (dump_file, res);
6315 fprintf (dump_file, "\n");
6316 }
6317 return res;
6318 }
6319 }
6320
6321 location_t loc = gimple_location (stmt);
6322 switch (gimple_code (stmt))
6323 {
6324 case GIMPLE_ASSIGN:
6325 {
6326 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6327
6328 switch (get_gimple_rhs_class (subcode))
6329 {
6330 case GIMPLE_SINGLE_RHS:
6331 {
6332 tree rhs = gimple_assign_rhs1 (stmt);
6333 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6334
6335 if (TREE_CODE (rhs) == SSA_NAME)
6336 {
6337 /* If the RHS is an SSA_NAME, return its known constant value,
6338 if any. */
6339 return (*valueize) (rhs);
6340 }
6341 /* Handle propagating invariant addresses into address
6342 operations. */
6343 else if (TREE_CODE (rhs) == ADDR_EXPR
6344 && !is_gimple_min_invariant (rhs))
6345 {
6346 poly_int64 offset = 0;
6347 tree base;
6348 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6349 &offset,
6350 valueize);
6351 if (base
6352 && (CONSTANT_CLASS_P (base)
6353 || decl_address_invariant_p (base)))
6354 return build_invariant_address (TREE_TYPE (rhs),
6355 base, offset);
6356 }
6357 else if (TREE_CODE (rhs) == CONSTRUCTOR
6358 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6359 && known_eq (CONSTRUCTOR_NELTS (rhs),
6360 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6361 {
6362 unsigned i, nelts;
6363 tree val;
6364
6365 nelts = CONSTRUCTOR_NELTS (rhs);
6366 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6367 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6368 {
6369 val = (*valueize) (val);
6370 if (TREE_CODE (val) == INTEGER_CST
6371 || TREE_CODE (val) == REAL_CST
6372 || TREE_CODE (val) == FIXED_CST)
6373 vec.quick_push (val);
6374 else
6375 return NULL_TREE;
6376 }
6377
6378 return vec.build ();
6379 }
6380 if (subcode == OBJ_TYPE_REF)
6381 {
6382 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6383 /* If callee is constant, we can fold away the wrapper. */
6384 if (is_gimple_min_invariant (val))
6385 return val;
6386 }
6387
6388 if (kind == tcc_reference)
6389 {
6390 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6391 || TREE_CODE (rhs) == REALPART_EXPR
6392 || TREE_CODE (rhs) == IMAGPART_EXPR)
6393 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6394 {
6395 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6396 return fold_unary_loc (EXPR_LOCATION (rhs),
6397 TREE_CODE (rhs),
6398 TREE_TYPE (rhs), val);
6399 }
6400 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6401 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6402 {
6403 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6404 return fold_ternary_loc (EXPR_LOCATION (rhs),
6405 TREE_CODE (rhs),
6406 TREE_TYPE (rhs), val,
6407 TREE_OPERAND (rhs, 1),
6408 TREE_OPERAND (rhs, 2));
6409 }
6410 else if (TREE_CODE (rhs) == MEM_REF
6411 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6412 {
6413 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6414 if (TREE_CODE (val) == ADDR_EXPR
6415 && is_gimple_min_invariant (val))
6416 {
6417 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6418 unshare_expr (val),
6419 TREE_OPERAND (rhs, 1));
6420 if (tem)
6421 rhs = tem;
6422 }
6423 }
6424 return fold_const_aggregate_ref_1 (rhs, valueize);
6425 }
6426 else if (kind == tcc_declaration)
6427 return get_symbol_constant_value (rhs);
6428 return rhs;
6429 }
6430
6431 case GIMPLE_UNARY_RHS:
6432 return NULL_TREE;
6433
6434 case GIMPLE_BINARY_RHS:
6435 /* Translate &x + CST into an invariant form suitable for
6436 further propagation. */
6437 if (subcode == POINTER_PLUS_EXPR)
6438 {
6439 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6440 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6441 if (TREE_CODE (op0) == ADDR_EXPR
6442 && TREE_CODE (op1) == INTEGER_CST)
6443 {
6444 tree off = fold_convert (ptr_type_node, op1);
6445 return build_fold_addr_expr_loc
6446 (loc,
6447 fold_build2 (MEM_REF,
6448 TREE_TYPE (TREE_TYPE (op0)),
6449 unshare_expr (op0), off));
6450 }
6451 }
6452 /* Canonicalize bool != 0 and bool == 0 appearing after
6453 valueization. While gimple_simplify handles this
6454 it can get confused by the ~X == 1 -> X == 0 transform
6455 which we cant reduce to a SSA name or a constant
6456 (and we have no way to tell gimple_simplify to not
6457 consider those transforms in the first place). */
6458 else if (subcode == EQ_EXPR
6459 || subcode == NE_EXPR)
6460 {
6461 tree lhs = gimple_assign_lhs (stmt);
6462 tree op0 = gimple_assign_rhs1 (stmt);
6463 if (useless_type_conversion_p (TREE_TYPE (lhs),
6464 TREE_TYPE (op0)))
6465 {
6466 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6467 op0 = (*valueize) (op0);
6468 if (TREE_CODE (op0) == INTEGER_CST)
6469 std::swap (op0, op1);
6470 if (TREE_CODE (op1) == INTEGER_CST
6471 && ((subcode == NE_EXPR && integer_zerop (op1))
6472 || (subcode == EQ_EXPR && integer_onep (op1))))
6473 return op0;
6474 }
6475 }
6476 return NULL_TREE;
6477
6478 case GIMPLE_TERNARY_RHS:
6479 {
6480 /* Handle ternary operators that can appear in GIMPLE form. */
6481 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6482 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6483 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6484 return fold_ternary_loc (loc, subcode,
6485 gimple_expr_type (stmt), op0, op1, op2);
6486 }
6487
6488 default:
6489 gcc_unreachable ();
6490 }
6491 }
6492
6493 case GIMPLE_CALL:
6494 {
6495 tree fn;
6496 gcall *call_stmt = as_a <gcall *> (stmt);
6497
6498 if (gimple_call_internal_p (stmt))
6499 {
6500 enum tree_code subcode = ERROR_MARK;
6501 switch (gimple_call_internal_fn (stmt))
6502 {
6503 case IFN_UBSAN_CHECK_ADD:
6504 subcode = PLUS_EXPR;
6505 break;
6506 case IFN_UBSAN_CHECK_SUB:
6507 subcode = MINUS_EXPR;
6508 break;
6509 case IFN_UBSAN_CHECK_MUL:
6510 subcode = MULT_EXPR;
6511 break;
6512 case IFN_BUILTIN_EXPECT:
6513 {
6514 tree arg0 = gimple_call_arg (stmt, 0);
6515 tree op0 = (*valueize) (arg0);
6516 if (TREE_CODE (op0) == INTEGER_CST)
6517 return op0;
6518 return NULL_TREE;
6519 }
6520 default:
6521 return NULL_TREE;
6522 }
6523 tree arg0 = gimple_call_arg (stmt, 0);
6524 tree arg1 = gimple_call_arg (stmt, 1);
6525 tree op0 = (*valueize) (arg0);
6526 tree op1 = (*valueize) (arg1);
6527
6528 if (TREE_CODE (op0) != INTEGER_CST
6529 || TREE_CODE (op1) != INTEGER_CST)
6530 {
6531 switch (subcode)
6532 {
6533 case MULT_EXPR:
6534 /* x * 0 = 0 * x = 0 without overflow. */
6535 if (integer_zerop (op0) || integer_zerop (op1))
6536 return build_zero_cst (TREE_TYPE (arg0));
6537 break;
6538 case MINUS_EXPR:
6539 /* y - y = 0 without overflow. */
6540 if (operand_equal_p (op0, op1, 0))
6541 return build_zero_cst (TREE_TYPE (arg0));
6542 break;
6543 default:
6544 break;
6545 }
6546 }
6547 tree res
6548 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6549 if (res
6550 && TREE_CODE (res) == INTEGER_CST
6551 && !TREE_OVERFLOW (res))
6552 return res;
6553 return NULL_TREE;
6554 }
6555
6556 fn = (*valueize) (gimple_call_fn (stmt));
6557 if (TREE_CODE (fn) == ADDR_EXPR
6558 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6559 && gimple_builtin_call_types_compatible_p (stmt,
6560 TREE_OPERAND (fn, 0)))
6561 {
6562 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6563 tree retval;
6564 unsigned i;
6565 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6566 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6567 retval = fold_builtin_call_array (loc,
6568 gimple_call_return_type (call_stmt),
6569 fn, gimple_call_num_args (stmt), args);
6570 if (retval)
6571 {
6572 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6573 STRIP_NOPS (retval);
6574 retval = fold_convert (gimple_call_return_type (call_stmt),
6575 retval);
6576 }
6577 return retval;
6578 }
6579 return NULL_TREE;
6580 }
6581
6582 default:
6583 return NULL_TREE;
6584 }
6585 }
6586
6587 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6588 Returns NULL_TREE if folding to a constant is not possible, otherwise
6589 returns a constant according to is_gimple_min_invariant. */
6590
6591 tree
6592 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6593 {
6594 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6595 if (res && is_gimple_min_invariant (res))
6596 return res;
6597 return NULL_TREE;
6598 }
6599
6600
6601 /* The following set of functions are supposed to fold references using
6602 their constant initializers. */
6603
6604 /* See if we can find constructor defining value of BASE.
6605 When we know the consructor with constant offset (such as
6606 base is array[40] and we do know constructor of array), then
6607 BIT_OFFSET is adjusted accordingly.
6608
6609 As a special case, return error_mark_node when constructor
6610 is not explicitly available, but it is known to be zero
6611 such as 'static const int a;'. */
6612 static tree
6613 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6614 tree (*valueize)(tree))
6615 {
6616 poly_int64 bit_offset2, size, max_size;
6617 bool reverse;
6618
6619 if (TREE_CODE (base) == MEM_REF)
6620 {
6621 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6622 if (!boff.to_shwi (bit_offset))
6623 return NULL_TREE;
6624
6625 if (valueize
6626 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6627 base = valueize (TREE_OPERAND (base, 0));
6628 if (!base || TREE_CODE (base) != ADDR_EXPR)
6629 return NULL_TREE;
6630 base = TREE_OPERAND (base, 0);
6631 }
6632 else if (valueize
6633 && TREE_CODE (base) == SSA_NAME)
6634 base = valueize (base);
6635
6636 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6637 DECL_INITIAL. If BASE is a nested reference into another
6638 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6639 the inner reference. */
6640 switch (TREE_CODE (base))
6641 {
6642 case VAR_DECL:
6643 case CONST_DECL:
6644 {
6645 tree init = ctor_for_folding (base);
6646
6647 /* Our semantic is exact opposite of ctor_for_folding;
6648 NULL means unknown, while error_mark_node is 0. */
6649 if (init == error_mark_node)
6650 return NULL_TREE;
6651 if (!init)
6652 return error_mark_node;
6653 return init;
6654 }
6655
6656 case VIEW_CONVERT_EXPR:
6657 return get_base_constructor (TREE_OPERAND (base, 0),
6658 bit_offset, valueize);
6659
6660 case ARRAY_REF:
6661 case COMPONENT_REF:
6662 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6663 &reverse);
6664 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6665 return NULL_TREE;
6666 *bit_offset += bit_offset2;
6667 return get_base_constructor (base, bit_offset, valueize);
6668
6669 case CONSTRUCTOR:
6670 return base;
6671
6672 default:
6673 if (CONSTANT_CLASS_P (base))
6674 return base;
6675
6676 return NULL_TREE;
6677 }
6678 }
6679
6680 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6681 to the memory at bit OFFSET. When non-null, TYPE is the expected
6682 type of the reference; otherwise the type of the referenced element
6683 is used instead. When SIZE is zero, attempt to fold a reference to
6684 the entire element which OFFSET refers to. Increment *SUBOFF by
6685 the bit offset of the accessed element. */
6686
6687 static tree
6688 fold_array_ctor_reference (tree type, tree ctor,
6689 unsigned HOST_WIDE_INT offset,
6690 unsigned HOST_WIDE_INT size,
6691 tree from_decl,
6692 unsigned HOST_WIDE_INT *suboff)
6693 {
6694 offset_int low_bound;
6695 offset_int elt_size;
6696 offset_int access_index;
6697 tree domain_type = NULL_TREE;
6698 HOST_WIDE_INT inner_offset;
6699
6700 /* Compute low bound and elt size. */
6701 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6702 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6703 if (domain_type && TYPE_MIN_VALUE (domain_type))
6704 {
6705 /* Static constructors for variably sized objects makes no sense. */
6706 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6707 return NULL_TREE;
6708 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6709 }
6710 else
6711 low_bound = 0;
6712 /* Static constructors for variably sized objects makes no sense. */
6713 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6714 return NULL_TREE;
6715 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6716
6717 /* When TYPE is non-null, verify that it specifies a constant-sized
6718 accessed not larger than size of array element. */
6719 if (type
6720 && (!TYPE_SIZE_UNIT (type)
6721 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6722 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6723 || elt_size == 0))
6724 return NULL_TREE;
6725
6726 /* Compute the array index we look for. */
6727 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6728 elt_size);
6729 access_index += low_bound;
6730
6731 /* And offset within the access. */
6732 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6733
6734 /* See if the array field is large enough to span whole access. We do not
6735 care to fold accesses spanning multiple array indexes. */
6736 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6737 return NULL_TREE;
6738 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6739 {
6740 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6741 {
6742 /* For the final reference to the entire accessed element
6743 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6744 may be null) in favor of the type of the element, and set
6745 SIZE to the size of the accessed element. */
6746 inner_offset = 0;
6747 type = TREE_TYPE (val);
6748 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6749 }
6750
6751 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6752 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6753 suboff);
6754 }
6755
6756 /* Memory not explicitly mentioned in constructor is 0 (or
6757 the reference is out of range). */
6758 return type ? build_zero_cst (type) : NULL_TREE;
6759 }
6760
6761 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6762 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6763 is the expected type of the reference; otherwise the type of
6764 the referenced member is used instead. When SIZE is zero,
6765 attempt to fold a reference to the entire member which OFFSET
6766 refers to; in this case. Increment *SUBOFF by the bit offset
6767 of the accessed member. */
6768
6769 static tree
6770 fold_nonarray_ctor_reference (tree type, tree ctor,
6771 unsigned HOST_WIDE_INT offset,
6772 unsigned HOST_WIDE_INT size,
6773 tree from_decl,
6774 unsigned HOST_WIDE_INT *suboff)
6775 {
6776 unsigned HOST_WIDE_INT cnt;
6777 tree cfield, cval;
6778
6779 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6780 cval)
6781 {
6782 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6783 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6784 tree field_size = DECL_SIZE (cfield);
6785
6786 if (!field_size)
6787 {
6788 /* Determine the size of the flexible array member from
6789 the size of the initializer provided for it. */
6790 field_size = TYPE_SIZE (TREE_TYPE (cval));
6791 }
6792
6793 /* Variable sized objects in static constructors makes no sense,
6794 but field_size can be NULL for flexible array members. */
6795 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6796 && TREE_CODE (byte_offset) == INTEGER_CST
6797 && (field_size != NULL_TREE
6798 ? TREE_CODE (field_size) == INTEGER_CST
6799 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6800
6801 /* Compute bit offset of the field. */
6802 offset_int bitoffset
6803 = (wi::to_offset (field_offset)
6804 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6805 /* Compute bit offset where the field ends. */
6806 offset_int bitoffset_end;
6807 if (field_size != NULL_TREE)
6808 bitoffset_end = bitoffset + wi::to_offset (field_size);
6809 else
6810 bitoffset_end = 0;
6811
6812 /* Compute the bit offset of the end of the desired access.
6813 As a special case, if the size of the desired access is
6814 zero, assume the access is to the entire field (and let
6815 the caller make any necessary adjustments by storing
6816 the actual bounds of the field in FIELDBOUNDS). */
6817 offset_int access_end = offset_int (offset);
6818 if (size)
6819 access_end += size;
6820 else
6821 access_end = bitoffset_end;
6822
6823 /* Is there any overlap between the desired access at
6824 [OFFSET, OFFSET+SIZE) and the offset of the field within
6825 the object at [BITOFFSET, BITOFFSET_END)? */
6826 if (wi::cmps (access_end, bitoffset) > 0
6827 && (field_size == NULL_TREE
6828 || wi::lts_p (offset, bitoffset_end)))
6829 {
6830 *suboff += bitoffset.to_uhwi ();
6831
6832 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6833 {
6834 /* For the final reference to the entire accessed member
6835 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6836 be null) in favor of the type of the member, and set
6837 SIZE to the size of the accessed member. */
6838 offset = bitoffset.to_uhwi ();
6839 type = TREE_TYPE (cval);
6840 size = (bitoffset_end - bitoffset).to_uhwi ();
6841 }
6842
6843 /* We do have overlap. Now see if the field is large enough
6844 to cover the access. Give up for accesses that extend
6845 beyond the end of the object or that span multiple fields. */
6846 if (wi::cmps (access_end, bitoffset_end) > 0)
6847 return NULL_TREE;
6848 if (offset < bitoffset)
6849 return NULL_TREE;
6850
6851 offset_int inner_offset = offset_int (offset) - bitoffset;
6852 return fold_ctor_reference (type, cval,
6853 inner_offset.to_uhwi (), size,
6854 from_decl, suboff);
6855 }
6856 }
6857 /* Memory not explicitly mentioned in constructor is 0. */
6858 return type ? build_zero_cst (type) : NULL_TREE;
6859 }
6860
6861 /* CTOR is value initializing memory. Fold a reference of TYPE and
6862 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6863 is zero, attempt to fold a reference to the entire subobject
6864 which OFFSET refers to. This is used when folding accesses to
6865 string members of aggregates. When non-null, set *SUBOFF to
6866 the bit offset of the accessed subobject. */
6867
6868 tree
6869 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6870 const poly_uint64 &poly_size, tree from_decl,
6871 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6872 {
6873 tree ret;
6874
6875 /* We found the field with exact match. */
6876 if (type
6877 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6878 && known_eq (poly_offset, 0U))
6879 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6880
6881 /* The remaining optimizations need a constant size and offset. */
6882 unsigned HOST_WIDE_INT size, offset;
6883 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6884 return NULL_TREE;
6885
6886 /* We are at the end of walk, see if we can view convert the
6887 result. */
6888 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6889 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6890 && !compare_tree_int (TYPE_SIZE (type), size)
6891 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6892 {
6893 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6894 if (ret)
6895 {
6896 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6897 if (ret)
6898 STRIP_USELESS_TYPE_CONVERSION (ret);
6899 }
6900 return ret;
6901 }
6902 /* For constants and byte-aligned/sized reads try to go through
6903 native_encode/interpret. */
6904 if (CONSTANT_CLASS_P (ctor)
6905 && BITS_PER_UNIT == 8
6906 && offset % BITS_PER_UNIT == 0
6907 && size % BITS_PER_UNIT == 0
6908 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6909 {
6910 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6911 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6912 offset / BITS_PER_UNIT);
6913 if (len > 0)
6914 return native_interpret_expr (type, buf, len);
6915 }
6916 if (TREE_CODE (ctor) == CONSTRUCTOR)
6917 {
6918 unsigned HOST_WIDE_INT dummy = 0;
6919 if (!suboff)
6920 suboff = &dummy;
6921
6922 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6923 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6924 return fold_array_ctor_reference (type, ctor, offset, size,
6925 from_decl, suboff);
6926
6927 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6928 from_decl, suboff);
6929 }
6930
6931 return NULL_TREE;
6932 }
6933
6934 /* Return the tree representing the element referenced by T if T is an
6935 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6936 names using VALUEIZE. Return NULL_TREE otherwise. */
6937
6938 tree
6939 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6940 {
6941 tree ctor, idx, base;
6942 poly_int64 offset, size, max_size;
6943 tree tem;
6944 bool reverse;
6945
6946 if (TREE_THIS_VOLATILE (t))
6947 return NULL_TREE;
6948
6949 if (DECL_P (t))
6950 return get_symbol_constant_value (t);
6951
6952 tem = fold_read_from_constant_string (t);
6953 if (tem)
6954 return tem;
6955
6956 switch (TREE_CODE (t))
6957 {
6958 case ARRAY_REF:
6959 case ARRAY_RANGE_REF:
6960 /* Constant indexes are handled well by get_base_constructor.
6961 Only special case variable offsets.
6962 FIXME: This code can't handle nested references with variable indexes
6963 (they will be handled only by iteration of ccp). Perhaps we can bring
6964 get_ref_base_and_extent here and make it use a valueize callback. */
6965 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6966 && valueize
6967 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6968 && poly_int_tree_p (idx))
6969 {
6970 tree low_bound, unit_size;
6971
6972 /* If the resulting bit-offset is constant, track it. */
6973 if ((low_bound = array_ref_low_bound (t),
6974 poly_int_tree_p (low_bound))
6975 && (unit_size = array_ref_element_size (t),
6976 tree_fits_uhwi_p (unit_size)))
6977 {
6978 poly_offset_int woffset
6979 = wi::sext (wi::to_poly_offset (idx)
6980 - wi::to_poly_offset (low_bound),
6981 TYPE_PRECISION (TREE_TYPE (idx)));
6982
6983 if (woffset.to_shwi (&offset))
6984 {
6985 /* TODO: This code seems wrong, multiply then check
6986 to see if it fits. */
6987 offset *= tree_to_uhwi (unit_size);
6988 offset *= BITS_PER_UNIT;
6989
6990 base = TREE_OPERAND (t, 0);
6991 ctor = get_base_constructor (base, &offset, valueize);
6992 /* Empty constructor. Always fold to 0. */
6993 if (ctor == error_mark_node)
6994 return build_zero_cst (TREE_TYPE (t));
6995 /* Out of bound array access. Value is undefined,
6996 but don't fold. */
6997 if (maybe_lt (offset, 0))
6998 return NULL_TREE;
6999 /* We cannot determine ctor. */
7000 if (!ctor)
7001 return NULL_TREE;
7002 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7003 tree_to_uhwi (unit_size)
7004 * BITS_PER_UNIT,
7005 base);
7006 }
7007 }
7008 }
7009 /* Fallthru. */
7010
7011 case COMPONENT_REF:
7012 case BIT_FIELD_REF:
7013 case TARGET_MEM_REF:
7014 case MEM_REF:
7015 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7016 ctor = get_base_constructor (base, &offset, valueize);
7017
7018 /* Empty constructor. Always fold to 0. */
7019 if (ctor == error_mark_node)
7020 return build_zero_cst (TREE_TYPE (t));
7021 /* We do not know precise address. */
7022 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7023 return NULL_TREE;
7024 /* We cannot determine ctor. */
7025 if (!ctor)
7026 return NULL_TREE;
7027
7028 /* Out of bound array access. Value is undefined, but don't fold. */
7029 if (maybe_lt (offset, 0))
7030 return NULL_TREE;
7031
7032 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7033 base);
7034
7035 case REALPART_EXPR:
7036 case IMAGPART_EXPR:
7037 {
7038 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7039 if (c && TREE_CODE (c) == COMPLEX_CST)
7040 return fold_build1_loc (EXPR_LOCATION (t),
7041 TREE_CODE (t), TREE_TYPE (t), c);
7042 break;
7043 }
7044
7045 default:
7046 break;
7047 }
7048
7049 return NULL_TREE;
7050 }
7051
7052 tree
7053 fold_const_aggregate_ref (tree t)
7054 {
7055 return fold_const_aggregate_ref_1 (t, NULL);
7056 }
7057
7058 /* Lookup virtual method with index TOKEN in a virtual table V
7059 at OFFSET.
7060 Set CAN_REFER if non-NULL to false if method
7061 is not referable or if the virtual table is ill-formed (such as rewriten
7062 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7063
7064 tree
7065 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7066 tree v,
7067 unsigned HOST_WIDE_INT offset,
7068 bool *can_refer)
7069 {
7070 tree vtable = v, init, fn;
7071 unsigned HOST_WIDE_INT size;
7072 unsigned HOST_WIDE_INT elt_size, access_index;
7073 tree domain_type;
7074
7075 if (can_refer)
7076 *can_refer = true;
7077
7078 /* First of all double check we have virtual table. */
7079 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7080 {
7081 /* Pass down that we lost track of the target. */
7082 if (can_refer)
7083 *can_refer = false;
7084 return NULL_TREE;
7085 }
7086
7087 init = ctor_for_folding (v);
7088
7089 /* The virtual tables should always be born with constructors
7090 and we always should assume that they are avaialble for
7091 folding. At the moment we do not stream them in all cases,
7092 but it should never happen that ctor seem unreachable. */
7093 gcc_assert (init);
7094 if (init == error_mark_node)
7095 {
7096 /* Pass down that we lost track of the target. */
7097 if (can_refer)
7098 *can_refer = false;
7099 return NULL_TREE;
7100 }
7101 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7102 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7103 offset *= BITS_PER_UNIT;
7104 offset += token * size;
7105
7106 /* Lookup the value in the constructor that is assumed to be array.
7107 This is equivalent to
7108 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7109 offset, size, NULL);
7110 but in a constant time. We expect that frontend produced a simple
7111 array without indexed initializers. */
7112
7113 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7114 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7115 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7116 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7117
7118 access_index = offset / BITS_PER_UNIT / elt_size;
7119 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7120
7121 /* The C++ FE can now produce indexed fields, and we check if the indexes
7122 match. */
7123 if (access_index < CONSTRUCTOR_NELTS (init))
7124 {
7125 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7126 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7127 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7128 STRIP_NOPS (fn);
7129 }
7130 else
7131 fn = NULL;
7132
7133 /* For type inconsistent program we may end up looking up virtual method
7134 in virtual table that does not contain TOKEN entries. We may overrun
7135 the virtual table and pick up a constant or RTTI info pointer.
7136 In any case the call is undefined. */
7137 if (!fn
7138 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7139 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7140 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7141 else
7142 {
7143 fn = TREE_OPERAND (fn, 0);
7144
7145 /* When cgraph node is missing and function is not public, we cannot
7146 devirtualize. This can happen in WHOPR when the actual method
7147 ends up in other partition, because we found devirtualization
7148 possibility too late. */
7149 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7150 {
7151 if (can_refer)
7152 {
7153 *can_refer = false;
7154 return fn;
7155 }
7156 return NULL_TREE;
7157 }
7158 }
7159
7160 /* Make sure we create a cgraph node for functions we'll reference.
7161 They can be non-existent if the reference comes from an entry
7162 of an external vtable for example. */
7163 cgraph_node::get_create (fn);
7164
7165 return fn;
7166 }
7167
7168 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7169 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7170 KNOWN_BINFO carries the binfo describing the true type of
7171 OBJ_TYPE_REF_OBJECT(REF).
7172 Set CAN_REFER if non-NULL to false if method
7173 is not referable or if the virtual table is ill-formed (such as rewriten
7174 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7175
7176 tree
7177 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7178 bool *can_refer)
7179 {
7180 unsigned HOST_WIDE_INT offset;
7181 tree v;
7182
7183 v = BINFO_VTABLE (known_binfo);
7184 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7185 if (!v)
7186 return NULL_TREE;
7187
7188 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7189 {
7190 if (can_refer)
7191 *can_refer = false;
7192 return NULL_TREE;
7193 }
7194 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7195 }
7196
7197 /* Given a pointer value T, return a simplified version of an
7198 indirection through T, or NULL_TREE if no simplification is
7199 possible. Note that the resulting type may be different from
7200 the type pointed to in the sense that it is still compatible
7201 from the langhooks point of view. */
7202
7203 tree
7204 gimple_fold_indirect_ref (tree t)
7205 {
7206 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7207 tree sub = t;
7208 tree subtype;
7209
7210 STRIP_NOPS (sub);
7211 subtype = TREE_TYPE (sub);
7212 if (!POINTER_TYPE_P (subtype)
7213 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7214 return NULL_TREE;
7215
7216 if (TREE_CODE (sub) == ADDR_EXPR)
7217 {
7218 tree op = TREE_OPERAND (sub, 0);
7219 tree optype = TREE_TYPE (op);
7220 /* *&p => p */
7221 if (useless_type_conversion_p (type, optype))
7222 return op;
7223
7224 /* *(foo *)&fooarray => fooarray[0] */
7225 if (TREE_CODE (optype) == ARRAY_TYPE
7226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7227 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7228 {
7229 tree type_domain = TYPE_DOMAIN (optype);
7230 tree min_val = size_zero_node;
7231 if (type_domain && TYPE_MIN_VALUE (type_domain))
7232 min_val = TYPE_MIN_VALUE (type_domain);
7233 if (TREE_CODE (min_val) == INTEGER_CST)
7234 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7235 }
7236 /* *(foo *)&complexfoo => __real__ complexfoo */
7237 else if (TREE_CODE (optype) == COMPLEX_TYPE
7238 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7239 return fold_build1 (REALPART_EXPR, type, op);
7240 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7241 else if (TREE_CODE (optype) == VECTOR_TYPE
7242 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7243 {
7244 tree part_width = TYPE_SIZE (type);
7245 tree index = bitsize_int (0);
7246 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7247 }
7248 }
7249
7250 /* *(p + CST) -> ... */
7251 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7252 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7253 {
7254 tree addr = TREE_OPERAND (sub, 0);
7255 tree off = TREE_OPERAND (sub, 1);
7256 tree addrtype;
7257
7258 STRIP_NOPS (addr);
7259 addrtype = TREE_TYPE (addr);
7260
7261 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7262 if (TREE_CODE (addr) == ADDR_EXPR
7263 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7264 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7265 && tree_fits_uhwi_p (off))
7266 {
7267 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7268 tree part_width = TYPE_SIZE (type);
7269 unsigned HOST_WIDE_INT part_widthi
7270 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7271 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7272 tree index = bitsize_int (indexi);
7273 if (known_lt (offset / part_widthi,
7274 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7275 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7276 part_width, index);
7277 }
7278
7279 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7280 if (TREE_CODE (addr) == ADDR_EXPR
7281 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7282 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7283 {
7284 tree size = TYPE_SIZE_UNIT (type);
7285 if (tree_int_cst_equal (size, off))
7286 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7287 }
7288
7289 /* *(p + CST) -> MEM_REF <p, CST>. */
7290 if (TREE_CODE (addr) != ADDR_EXPR
7291 || DECL_P (TREE_OPERAND (addr, 0)))
7292 return fold_build2 (MEM_REF, type,
7293 addr,
7294 wide_int_to_tree (ptype, wi::to_wide (off)));
7295 }
7296
7297 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7298 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7299 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7300 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7301 {
7302 tree type_domain;
7303 tree min_val = size_zero_node;
7304 tree osub = sub;
7305 sub = gimple_fold_indirect_ref (sub);
7306 if (! sub)
7307 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7308 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7309 if (type_domain && TYPE_MIN_VALUE (type_domain))
7310 min_val = TYPE_MIN_VALUE (type_domain);
7311 if (TREE_CODE (min_val) == INTEGER_CST)
7312 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7313 }
7314
7315 return NULL_TREE;
7316 }
7317
7318 /* Return true if CODE is an operation that when operating on signed
7319 integer types involves undefined behavior on overflow and the
7320 operation can be expressed with unsigned arithmetic. */
7321
7322 bool
7323 arith_code_with_undefined_signed_overflow (tree_code code)
7324 {
7325 switch (code)
7326 {
7327 case PLUS_EXPR:
7328 case MINUS_EXPR:
7329 case MULT_EXPR:
7330 case NEGATE_EXPR:
7331 case POINTER_PLUS_EXPR:
7332 return true;
7333 default:
7334 return false;
7335 }
7336 }
7337
7338 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7339 operation that can be transformed to unsigned arithmetic by converting
7340 its operand, carrying out the operation in the corresponding unsigned
7341 type and converting the result back to the original type.
7342
7343 Returns a sequence of statements that replace STMT and also contain
7344 a modified form of STMT itself. */
7345
7346 gimple_seq
7347 rewrite_to_defined_overflow (gimple *stmt)
7348 {
7349 if (dump_file && (dump_flags & TDF_DETAILS))
7350 {
7351 fprintf (dump_file, "rewriting stmt with undefined signed "
7352 "overflow ");
7353 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7354 }
7355
7356 tree lhs = gimple_assign_lhs (stmt);
7357 tree type = unsigned_type_for (TREE_TYPE (lhs));
7358 gimple_seq stmts = NULL;
7359 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7360 {
7361 tree op = gimple_op (stmt, i);
7362 op = gimple_convert (&stmts, type, op);
7363 gimple_set_op (stmt, i, op);
7364 }
7365 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7366 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7367 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7368 gimple_seq_add_stmt (&stmts, stmt);
7369 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7370 gimple_seq_add_stmt (&stmts, cvt);
7371
7372 return stmts;
7373 }
7374
7375
7376 /* The valueization hook we use for the gimple_build API simplification.
7377 This makes us match fold_buildN behavior by only combining with
7378 statements in the sequence(s) we are currently building. */
7379
7380 static tree
7381 gimple_build_valueize (tree op)
7382 {
7383 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7384 return op;
7385 return NULL_TREE;
7386 }
7387
7388 /* Build the expression CODE OP0 of type TYPE with location LOC,
7389 simplifying it first if possible. Returns the built
7390 expression value and appends statements possibly defining it
7391 to SEQ. */
7392
7393 tree
7394 gimple_build (gimple_seq *seq, location_t loc,
7395 enum tree_code code, tree type, tree op0)
7396 {
7397 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7398 if (!res)
7399 {
7400 res = create_tmp_reg_or_ssa_name (type);
7401 gimple *stmt;
7402 if (code == REALPART_EXPR
7403 || code == IMAGPART_EXPR
7404 || code == VIEW_CONVERT_EXPR)
7405 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7406 else
7407 stmt = gimple_build_assign (res, code, op0);
7408 gimple_set_location (stmt, loc);
7409 gimple_seq_add_stmt_without_update (seq, stmt);
7410 }
7411 return res;
7412 }
7413
7414 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7415 simplifying it first if possible. Returns the built
7416 expression value and appends statements possibly defining it
7417 to SEQ. */
7418
7419 tree
7420 gimple_build (gimple_seq *seq, location_t loc,
7421 enum tree_code code, tree type, tree op0, tree op1)
7422 {
7423 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7424 if (!res)
7425 {
7426 res = create_tmp_reg_or_ssa_name (type);
7427 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7428 gimple_set_location (stmt, loc);
7429 gimple_seq_add_stmt_without_update (seq, stmt);
7430 }
7431 return res;
7432 }
7433
7434 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7435 simplifying it first if possible. Returns the built
7436 expression value and appends statements possibly defining it
7437 to SEQ. */
7438
7439 tree
7440 gimple_build (gimple_seq *seq, location_t loc,
7441 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7442 {
7443 tree res = gimple_simplify (code, type, op0, op1, op2,
7444 seq, gimple_build_valueize);
7445 if (!res)
7446 {
7447 res = create_tmp_reg_or_ssa_name (type);
7448 gimple *stmt;
7449 if (code == BIT_FIELD_REF)
7450 stmt = gimple_build_assign (res, code,
7451 build3 (code, type, op0, op1, op2));
7452 else
7453 stmt = gimple_build_assign (res, code, op0, op1, op2);
7454 gimple_set_location (stmt, loc);
7455 gimple_seq_add_stmt_without_update (seq, stmt);
7456 }
7457 return res;
7458 }
7459
7460 /* Build the call FN (ARG0) with a result of type TYPE
7461 (or no result if TYPE is void) with location LOC,
7462 simplifying it first if possible. Returns the built
7463 expression value (or NULL_TREE if TYPE is void) and appends
7464 statements possibly defining it to SEQ. */
7465
7466 tree
7467 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7468 tree type, tree arg0)
7469 {
7470 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7471 if (!res)
7472 {
7473 gcall *stmt;
7474 if (internal_fn_p (fn))
7475 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7476 else
7477 {
7478 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7479 stmt = gimple_build_call (decl, 1, arg0);
7480 }
7481 if (!VOID_TYPE_P (type))
7482 {
7483 res = create_tmp_reg_or_ssa_name (type);
7484 gimple_call_set_lhs (stmt, res);
7485 }
7486 gimple_set_location (stmt, loc);
7487 gimple_seq_add_stmt_without_update (seq, stmt);
7488 }
7489 return res;
7490 }
7491
7492 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7493 (or no result if TYPE is void) with location LOC,
7494 simplifying it first if possible. Returns the built
7495 expression value (or NULL_TREE if TYPE is void) and appends
7496 statements possibly defining it to SEQ. */
7497
7498 tree
7499 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7500 tree type, tree arg0, tree arg1)
7501 {
7502 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7503 if (!res)
7504 {
7505 gcall *stmt;
7506 if (internal_fn_p (fn))
7507 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7508 else
7509 {
7510 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7511 stmt = gimple_build_call (decl, 2, arg0, arg1);
7512 }
7513 if (!VOID_TYPE_P (type))
7514 {
7515 res = create_tmp_reg_or_ssa_name (type);
7516 gimple_call_set_lhs (stmt, res);
7517 }
7518 gimple_set_location (stmt, loc);
7519 gimple_seq_add_stmt_without_update (seq, stmt);
7520 }
7521 return res;
7522 }
7523
7524 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7525 (or no result if TYPE is void) with location LOC,
7526 simplifying it first if possible. Returns the built
7527 expression value (or NULL_TREE if TYPE is void) and appends
7528 statements possibly defining it to SEQ. */
7529
7530 tree
7531 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7532 tree type, tree arg0, tree arg1, tree arg2)
7533 {
7534 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7535 seq, gimple_build_valueize);
7536 if (!res)
7537 {
7538 gcall *stmt;
7539 if (internal_fn_p (fn))
7540 stmt = gimple_build_call_internal (as_internal_fn (fn),
7541 3, arg0, arg1, arg2);
7542 else
7543 {
7544 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7545 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7546 }
7547 if (!VOID_TYPE_P (type))
7548 {
7549 res = create_tmp_reg_or_ssa_name (type);
7550 gimple_call_set_lhs (stmt, res);
7551 }
7552 gimple_set_location (stmt, loc);
7553 gimple_seq_add_stmt_without_update (seq, stmt);
7554 }
7555 return res;
7556 }
7557
7558 /* Build the conversion (TYPE) OP with a result of type TYPE
7559 with location LOC if such conversion is neccesary in GIMPLE,
7560 simplifying it first.
7561 Returns the built expression value and appends
7562 statements possibly defining it to SEQ. */
7563
7564 tree
7565 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7566 {
7567 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7568 return op;
7569 return gimple_build (seq, loc, NOP_EXPR, type, op);
7570 }
7571
7572 /* Build the conversion (ptrofftype) OP with a result of a type
7573 compatible with ptrofftype with location LOC if such conversion
7574 is neccesary in GIMPLE, simplifying it first.
7575 Returns the built expression value and appends
7576 statements possibly defining it to SEQ. */
7577
7578 tree
7579 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7580 {
7581 if (ptrofftype_p (TREE_TYPE (op)))
7582 return op;
7583 return gimple_convert (seq, loc, sizetype, op);
7584 }
7585
7586 /* Build a vector of type TYPE in which each element has the value OP.
7587 Return a gimple value for the result, appending any new statements
7588 to SEQ. */
7589
7590 tree
7591 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7592 tree op)
7593 {
7594 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7595 && !CONSTANT_CLASS_P (op))
7596 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7597
7598 tree res, vec = build_vector_from_val (type, op);
7599 if (is_gimple_val (vec))
7600 return vec;
7601 if (gimple_in_ssa_p (cfun))
7602 res = make_ssa_name (type);
7603 else
7604 res = create_tmp_reg (type);
7605 gimple *stmt = gimple_build_assign (res, vec);
7606 gimple_set_location (stmt, loc);
7607 gimple_seq_add_stmt_without_update (seq, stmt);
7608 return res;
7609 }
7610
7611 /* Build a vector from BUILDER, handling the case in which some elements
7612 are non-constant. Return a gimple value for the result, appending any
7613 new instructions to SEQ.
7614
7615 BUILDER must not have a stepped encoding on entry. This is because
7616 the function is not geared up to handle the arithmetic that would
7617 be needed in the variable case, and any code building a vector that
7618 is known to be constant should use BUILDER->build () directly. */
7619
7620 tree
7621 gimple_build_vector (gimple_seq *seq, location_t loc,
7622 tree_vector_builder *builder)
7623 {
7624 gcc_assert (builder->nelts_per_pattern () <= 2);
7625 unsigned int encoded_nelts = builder->encoded_nelts ();
7626 for (unsigned int i = 0; i < encoded_nelts; ++i)
7627 if (!TREE_CONSTANT ((*builder)[i]))
7628 {
7629 tree type = builder->type ();
7630 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7631 vec<constructor_elt, va_gc> *v;
7632 vec_alloc (v, nelts);
7633 for (i = 0; i < nelts; ++i)
7634 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7635
7636 tree res;
7637 if (gimple_in_ssa_p (cfun))
7638 res = make_ssa_name (type);
7639 else
7640 res = create_tmp_reg (type);
7641 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7642 gimple_set_location (stmt, loc);
7643 gimple_seq_add_stmt_without_update (seq, stmt);
7644 return res;
7645 }
7646 return builder->build ();
7647 }
7648
7649 /* Return true if the result of assignment STMT is known to be non-negative.
7650 If the return value is based on the assumption that signed overflow is
7651 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7652 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7653
7654 static bool
7655 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7656 int depth)
7657 {
7658 enum tree_code code = gimple_assign_rhs_code (stmt);
7659 switch (get_gimple_rhs_class (code))
7660 {
7661 case GIMPLE_UNARY_RHS:
7662 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7663 gimple_expr_type (stmt),
7664 gimple_assign_rhs1 (stmt),
7665 strict_overflow_p, depth);
7666 case GIMPLE_BINARY_RHS:
7667 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7668 gimple_expr_type (stmt),
7669 gimple_assign_rhs1 (stmt),
7670 gimple_assign_rhs2 (stmt),
7671 strict_overflow_p, depth);
7672 case GIMPLE_TERNARY_RHS:
7673 return false;
7674 case GIMPLE_SINGLE_RHS:
7675 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7676 strict_overflow_p, depth);
7677 case GIMPLE_INVALID_RHS:
7678 break;
7679 }
7680 gcc_unreachable ();
7681 }
7682
7683 /* Return true if return value of call STMT is known to be non-negative.
7684 If the return value is based on the assumption that signed overflow is
7685 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7686 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7687
7688 static bool
7689 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7690 int depth)
7691 {
7692 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7693 gimple_call_arg (stmt, 0) : NULL_TREE;
7694 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7695 gimple_call_arg (stmt, 1) : NULL_TREE;
7696
7697 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7698 gimple_call_combined_fn (stmt),
7699 arg0,
7700 arg1,
7701 strict_overflow_p, depth);
7702 }
7703
7704 /* Return true if return value of call STMT is known to be non-negative.
7705 If the return value is based on the assumption that signed overflow is
7706 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7707 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7708
7709 static bool
7710 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7711 int depth)
7712 {
7713 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7714 {
7715 tree arg = gimple_phi_arg_def (stmt, i);
7716 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7717 return false;
7718 }
7719 return true;
7720 }
7721
7722 /* Return true if STMT is known to compute a non-negative value.
7723 If the return value is based on the assumption that signed overflow is
7724 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7725 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7726
7727 bool
7728 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7729 int depth)
7730 {
7731 switch (gimple_code (stmt))
7732 {
7733 case GIMPLE_ASSIGN:
7734 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7735 depth);
7736 case GIMPLE_CALL:
7737 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7738 depth);
7739 case GIMPLE_PHI:
7740 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7741 depth);
7742 default:
7743 return false;
7744 }
7745 }
7746
7747 /* Return true if the floating-point value computed by assignment STMT
7748 is known to have an integer value. We also allow +Inf, -Inf and NaN
7749 to be considered integer values. Return false for signaling NaN.
7750
7751 DEPTH is the current nesting depth of the query. */
7752
7753 static bool
7754 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7755 {
7756 enum tree_code code = gimple_assign_rhs_code (stmt);
7757 switch (get_gimple_rhs_class (code))
7758 {
7759 case GIMPLE_UNARY_RHS:
7760 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7761 gimple_assign_rhs1 (stmt), depth);
7762 case GIMPLE_BINARY_RHS:
7763 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7764 gimple_assign_rhs1 (stmt),
7765 gimple_assign_rhs2 (stmt), depth);
7766 case GIMPLE_TERNARY_RHS:
7767 return false;
7768 case GIMPLE_SINGLE_RHS:
7769 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7770 case GIMPLE_INVALID_RHS:
7771 break;
7772 }
7773 gcc_unreachable ();
7774 }
7775
7776 /* Return true if the floating-point value computed by call STMT is known
7777 to have an integer value. We also allow +Inf, -Inf and NaN to be
7778 considered integer values. Return false for signaling NaN.
7779
7780 DEPTH is the current nesting depth of the query. */
7781
7782 static bool
7783 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7784 {
7785 tree arg0 = (gimple_call_num_args (stmt) > 0
7786 ? gimple_call_arg (stmt, 0)
7787 : NULL_TREE);
7788 tree arg1 = (gimple_call_num_args (stmt) > 1
7789 ? gimple_call_arg (stmt, 1)
7790 : NULL_TREE);
7791 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7792 arg0, arg1, depth);
7793 }
7794
7795 /* Return true if the floating-point result of phi STMT is known to have
7796 an integer value. We also allow +Inf, -Inf and NaN to be considered
7797 integer values. Return false for signaling NaN.
7798
7799 DEPTH is the current nesting depth of the query. */
7800
7801 static bool
7802 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7803 {
7804 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7805 {
7806 tree arg = gimple_phi_arg_def (stmt, i);
7807 if (!integer_valued_real_single_p (arg, depth + 1))
7808 return false;
7809 }
7810 return true;
7811 }
7812
7813 /* Return true if the floating-point value computed by STMT is known
7814 to have an integer value. We also allow +Inf, -Inf and NaN to be
7815 considered integer values. Return false for signaling NaN.
7816
7817 DEPTH is the current nesting depth of the query. */
7818
7819 bool
7820 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7821 {
7822 switch (gimple_code (stmt))
7823 {
7824 case GIMPLE_ASSIGN:
7825 return gimple_assign_integer_valued_real_p (stmt, depth);
7826 case GIMPLE_CALL:
7827 return gimple_call_integer_valued_real_p (stmt, depth);
7828 case GIMPLE_PHI:
7829 return gimple_phi_integer_valued_real_p (stmt, depth);
7830 default:
7831 return false;
7832 }
7833 }