1836927e28903b613bce3b3626eceac319667c69
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58
59
60 /* Return true if T is a constant and the value cast to a target char
61 can be represented by a host char.
62 Store the casted char constant in *P if so. */
63
64 static bool
65 target_char_cst_p (tree t, char *p)
66 {
67 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
68 return false;
69
70 *p = (char)tree_to_uhwi (t);
71 return true;
72 }
73
74 /* Return true when DECL can be referenced from current unit.
75 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
76 We can get declarations that are not possible to reference for various
77 reasons:
78
79 1) When analyzing C++ virtual tables.
80 C++ virtual tables do have known constructors even
81 when they are keyed to other compilation unit.
82 Those tables can contain pointers to methods and vars
83 in other units. Those methods have both STATIC and EXTERNAL
84 set.
85 2) In WHOPR mode devirtualization might lead to reference
86 to method that was partitioned elsehwere.
87 In this case we have static VAR_DECL or FUNCTION_DECL
88 that has no corresponding callgraph/varpool node
89 declaring the body.
90 3) COMDAT functions referred by external vtables that
91 we devirtualize only during final compilation stage.
92 At this time we already decided that we will not output
93 the function body and thus we can't reference the symbol
94 directly. */
95
96 static bool
97 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
98 {
99 varpool_node *vnode;
100 struct cgraph_node *node;
101 symtab_node *snode;
102
103 if (DECL_ABSTRACT_P (decl))
104 return false;
105
106 /* We are concerned only about static/external vars and functions. */
107 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
108 || !VAR_OR_FUNCTION_DECL_P (decl))
109 return true;
110
111 /* Static objects can be referred only if they was not optimized out yet. */
112 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
113 {
114 /* Before we start optimizing unreachable code we can be sure all
115 static objects are defined. */
116 if (symtab->function_flags_ready)
117 return true;
118 snode = symtab_node::get (decl);
119 if (!snode || !snode->definition)
120 return false;
121 node = dyn_cast <cgraph_node *> (snode);
122 return !node || !node->global.inlined_to;
123 }
124
125 /* We will later output the initializer, so we can refer to it.
126 So we are concerned only when DECL comes from initializer of
127 external var or var that has been optimized out. */
128 if (!from_decl
129 || !VAR_P (from_decl)
130 || (!DECL_EXTERNAL (from_decl)
131 && (vnode = varpool_node::get (from_decl)) != NULL
132 && vnode->definition)
133 || (flag_ltrans
134 && (vnode = varpool_node::get (from_decl)) != NULL
135 && vnode->in_other_partition))
136 return true;
137 /* We are folding reference from external vtable. The vtable may reffer
138 to a symbol keyed to other compilation unit. The other compilation
139 unit may be in separate DSO and the symbol may be hidden. */
140 if (DECL_VISIBILITY_SPECIFIED (decl)
141 && DECL_EXTERNAL (decl)
142 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
143 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
144 return false;
145 /* When function is public, we always can introduce new reference.
146 Exception are the COMDAT functions where introducing a direct
147 reference imply need to include function body in the curren tunit. */
148 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
149 return true;
150 /* We have COMDAT. We are going to check if we still have definition
151 or if the definition is going to be output in other partition.
152 Bypass this when gimplifying; all needed functions will be produced.
153
154 As observed in PR20991 for already optimized out comdat virtual functions
155 it may be tempting to not necessarily give up because the copy will be
156 output elsewhere when corresponding vtable is output.
157 This is however not possible - ABI specify that COMDATs are output in
158 units where they are used and when the other unit was compiled with LTO
159 it is possible that vtable was kept public while the function itself
160 was privatized. */
161 if (!symtab->function_flags_ready)
162 return true;
163
164 snode = symtab_node::get (decl);
165 if (!snode
166 || ((!snode->definition || DECL_EXTERNAL (decl))
167 && (!snode->in_other_partition
168 || (!snode->forced_by_abi && !snode->force_output))))
169 return false;
170 node = dyn_cast <cgraph_node *> (snode);
171 return !node || !node->global.inlined_to;
172 }
173
174 /* Create a temporary for TYPE for a statement STMT. If the current function
175 is in SSA form, a SSA name is created. Otherwise a temporary register
176 is made. */
177
178 static tree
179 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL)
180 {
181 if (gimple_in_ssa_p (cfun))
182 return make_ssa_name (type, stmt);
183 else
184 return create_tmp_reg (type);
185 }
186
187 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
188 acceptable form for is_gimple_min_invariant.
189 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
190
191 tree
192 canonicalize_constructor_val (tree cval, tree from_decl)
193 {
194 tree orig_cval = cval;
195 STRIP_NOPS (cval);
196 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
197 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
198 {
199 tree ptr = TREE_OPERAND (cval, 0);
200 if (is_gimple_min_invariant (ptr))
201 cval = build1_loc (EXPR_LOCATION (cval),
202 ADDR_EXPR, TREE_TYPE (ptr),
203 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
204 ptr,
205 fold_convert (ptr_type_node,
206 TREE_OPERAND (cval, 1))));
207 }
208 if (TREE_CODE (cval) == ADDR_EXPR)
209 {
210 tree base = NULL_TREE;
211 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
212 {
213 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
214 if (base)
215 TREE_OPERAND (cval, 0) = base;
216 }
217 else
218 base = get_base_address (TREE_OPERAND (cval, 0));
219 if (!base)
220 return NULL_TREE;
221
222 if (VAR_OR_FUNCTION_DECL_P (base)
223 && !can_refer_decl_in_current_unit_p (base, from_decl))
224 return NULL_TREE;
225 if (TREE_TYPE (base) == error_mark_node)
226 return NULL_TREE;
227 if (VAR_P (base))
228 TREE_ADDRESSABLE (base) = 1;
229 else if (TREE_CODE (base) == FUNCTION_DECL)
230 {
231 /* Make sure we create a cgraph node for functions we'll reference.
232 They can be non-existent if the reference comes from an entry
233 of an external vtable for example. */
234 cgraph_node::get_create (base);
235 }
236 /* Fixup types in global initializers. */
237 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
238 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
239
240 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
241 cval = fold_convert (TREE_TYPE (orig_cval), cval);
242 return cval;
243 }
244 if (TREE_OVERFLOW_P (cval))
245 return drop_tree_overflow (cval);
246 return orig_cval;
247 }
248
249 /* If SYM is a constant variable with known value, return the value.
250 NULL_TREE is returned otherwise. */
251
252 tree
253 get_symbol_constant_value (tree sym)
254 {
255 tree val = ctor_for_folding (sym);
256 if (val != error_mark_node)
257 {
258 if (val)
259 {
260 val = canonicalize_constructor_val (unshare_expr (val), sym);
261 if (val && is_gimple_min_invariant (val))
262 return val;
263 else
264 return NULL_TREE;
265 }
266 /* Variables declared 'const' without an initializer
267 have zero as the initializer if they may not be
268 overridden at link or run time. */
269 if (!val
270 && is_gimple_reg_type (TREE_TYPE (sym)))
271 return build_zero_cst (TREE_TYPE (sym));
272 }
273
274 return NULL_TREE;
275 }
276
277
278
279 /* Subroutine of fold_stmt. We perform several simplifications of the
280 memory reference tree EXPR and make sure to re-gimplify them properly
281 after propagation of constant addresses. IS_LHS is true if the
282 reference is supposed to be an lvalue. */
283
284 static tree
285 maybe_fold_reference (tree expr, bool is_lhs)
286 {
287 tree result;
288
289 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
290 || TREE_CODE (expr) == REALPART_EXPR
291 || TREE_CODE (expr) == IMAGPART_EXPR)
292 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
293 return fold_unary_loc (EXPR_LOCATION (expr),
294 TREE_CODE (expr),
295 TREE_TYPE (expr),
296 TREE_OPERAND (expr, 0));
297 else if (TREE_CODE (expr) == BIT_FIELD_REF
298 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
299 return fold_ternary_loc (EXPR_LOCATION (expr),
300 TREE_CODE (expr),
301 TREE_TYPE (expr),
302 TREE_OPERAND (expr, 0),
303 TREE_OPERAND (expr, 1),
304 TREE_OPERAND (expr, 2));
305
306 if (!is_lhs
307 && (result = fold_const_aggregate_ref (expr))
308 && is_gimple_min_invariant (result))
309 return result;
310
311 return NULL_TREE;
312 }
313
314
315 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
316 replacement rhs for the statement or NULL_TREE if no simplification
317 could be made. It is assumed that the operands have been previously
318 folded. */
319
320 static tree
321 fold_gimple_assign (gimple_stmt_iterator *si)
322 {
323 gimple *stmt = gsi_stmt (*si);
324 enum tree_code subcode = gimple_assign_rhs_code (stmt);
325 location_t loc = gimple_location (stmt);
326
327 tree result = NULL_TREE;
328
329 switch (get_gimple_rhs_class (subcode))
330 {
331 case GIMPLE_SINGLE_RHS:
332 {
333 tree rhs = gimple_assign_rhs1 (stmt);
334
335 if (TREE_CLOBBER_P (rhs))
336 return NULL_TREE;
337
338 if (REFERENCE_CLASS_P (rhs))
339 return maybe_fold_reference (rhs, false);
340
341 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
342 {
343 tree val = OBJ_TYPE_REF_EXPR (rhs);
344 if (is_gimple_min_invariant (val))
345 return val;
346 else if (flag_devirtualize && virtual_method_call_p (rhs))
347 {
348 bool final;
349 vec <cgraph_node *>targets
350 = possible_polymorphic_call_targets (rhs, stmt, &final);
351 if (final && targets.length () <= 1 && dbg_cnt (devirt))
352 {
353 if (dump_enabled_p ())
354 {
355 location_t loc = gimple_location_safe (stmt);
356 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
357 "resolving virtual function address "
358 "reference to function %s\n",
359 targets.length () == 1
360 ? targets[0]->name ()
361 : "NULL");
362 }
363 if (targets.length () == 1)
364 {
365 val = fold_convert (TREE_TYPE (val),
366 build_fold_addr_expr_loc
367 (loc, targets[0]->decl));
368 STRIP_USELESS_TYPE_CONVERSION (val);
369 }
370 else
371 /* We can not use __builtin_unreachable here because it
372 can not have address taken. */
373 val = build_int_cst (TREE_TYPE (val), 0);
374 return val;
375 }
376 }
377 }
378
379 else if (TREE_CODE (rhs) == ADDR_EXPR)
380 {
381 tree ref = TREE_OPERAND (rhs, 0);
382 tree tem = maybe_fold_reference (ref, true);
383 if (tem
384 && TREE_CODE (tem) == MEM_REF
385 && integer_zerop (TREE_OPERAND (tem, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
387 else if (tem)
388 result = fold_convert (TREE_TYPE (rhs),
389 build_fold_addr_expr_loc (loc, tem));
390 else if (TREE_CODE (ref) == MEM_REF
391 && integer_zerop (TREE_OPERAND (ref, 1)))
392 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
393
394 if (result)
395 {
396 /* Strip away useless type conversions. Both the
397 NON_LVALUE_EXPR that may have been added by fold, and
398 "useless" type conversions that might now be apparent
399 due to propagation. */
400 STRIP_USELESS_TYPE_CONVERSION (result);
401
402 if (result != rhs && valid_gimple_rhs_p (result))
403 return result;
404 }
405 }
406
407 else if (TREE_CODE (rhs) == CONSTRUCTOR
408 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
409 {
410 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
411 unsigned i;
412 tree val;
413
414 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
415 if (! CONSTANT_CLASS_P (val))
416 return NULL_TREE;
417
418 return build_vector_from_ctor (TREE_TYPE (rhs),
419 CONSTRUCTOR_ELTS (rhs));
420 }
421
422 else if (DECL_P (rhs))
423 return get_symbol_constant_value (rhs);
424 }
425 break;
426
427 case GIMPLE_UNARY_RHS:
428 break;
429
430 case GIMPLE_BINARY_RHS:
431 break;
432
433 case GIMPLE_TERNARY_RHS:
434 result = fold_ternary_loc (loc, subcode,
435 TREE_TYPE (gimple_assign_lhs (stmt)),
436 gimple_assign_rhs1 (stmt),
437 gimple_assign_rhs2 (stmt),
438 gimple_assign_rhs3 (stmt));
439
440 if (result)
441 {
442 STRIP_USELESS_TYPE_CONVERSION (result);
443 if (valid_gimple_rhs_p (result))
444 return result;
445 }
446 break;
447
448 case GIMPLE_INVALID_RHS:
449 gcc_unreachable ();
450 }
451
452 return NULL_TREE;
453 }
454
455
456 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
457 adjusting the replacement stmts location and virtual operands.
458 If the statement has a lhs the last stmt in the sequence is expected
459 to assign to that lhs. */
460
461 static void
462 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
463 {
464 gimple *stmt = gsi_stmt (*si_p);
465
466 if (gimple_has_location (stmt))
467 annotate_all_with_location (stmts, gimple_location (stmt));
468
469 /* First iterate over the replacement statements backward, assigning
470 virtual operands to their defining statements. */
471 gimple *laststore = NULL;
472 for (gimple_stmt_iterator i = gsi_last (stmts);
473 !gsi_end_p (i); gsi_prev (&i))
474 {
475 gimple *new_stmt = gsi_stmt (i);
476 if ((gimple_assign_single_p (new_stmt)
477 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
478 || (is_gimple_call (new_stmt)
479 && (gimple_call_flags (new_stmt)
480 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
481 {
482 tree vdef;
483 if (!laststore)
484 vdef = gimple_vdef (stmt);
485 else
486 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
487 gimple_set_vdef (new_stmt, vdef);
488 if (vdef && TREE_CODE (vdef) == SSA_NAME)
489 SSA_NAME_DEF_STMT (vdef) = new_stmt;
490 laststore = new_stmt;
491 }
492 }
493
494 /* Second iterate over the statements forward, assigning virtual
495 operands to their uses. */
496 tree reaching_vuse = gimple_vuse (stmt);
497 for (gimple_stmt_iterator i = gsi_start (stmts);
498 !gsi_end_p (i); gsi_next (&i))
499 {
500 gimple *new_stmt = gsi_stmt (i);
501 /* If the new statement possibly has a VUSE, update it with exact SSA
502 name we know will reach this one. */
503 if (gimple_has_mem_ops (new_stmt))
504 gimple_set_vuse (new_stmt, reaching_vuse);
505 gimple_set_modified (new_stmt, true);
506 if (gimple_vdef (new_stmt))
507 reaching_vuse = gimple_vdef (new_stmt);
508 }
509
510 /* If the new sequence does not do a store release the virtual
511 definition of the original statement. */
512 if (reaching_vuse
513 && reaching_vuse == gimple_vuse (stmt))
514 {
515 tree vdef = gimple_vdef (stmt);
516 if (vdef
517 && TREE_CODE (vdef) == SSA_NAME)
518 {
519 unlink_stmt_vdef (stmt);
520 release_ssa_name (vdef);
521 }
522 }
523
524 /* Finally replace the original statement with the sequence. */
525 gsi_replace_with_seq (si_p, stmts, false);
526 }
527
528 /* Convert EXPR into a GIMPLE value suitable for substitution on the
529 RHS of an assignment. Insert the necessary statements before
530 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
531 is replaced. If the call is expected to produces a result, then it
532 is replaced by an assignment of the new RHS to the result variable.
533 If the result is to be ignored, then the call is replaced by a
534 GIMPLE_NOP. A proper VDEF chain is retained by making the first
535 VUSE and the last VDEF of the whole sequence be the same as the replaced
536 statement and using new SSA names for stores in between. */
537
538 void
539 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
540 {
541 tree lhs;
542 gimple *stmt, *new_stmt;
543 gimple_stmt_iterator i;
544 gimple_seq stmts = NULL;
545
546 stmt = gsi_stmt (*si_p);
547
548 gcc_assert (is_gimple_call (stmt));
549
550 push_gimplify_context (gimple_in_ssa_p (cfun));
551
552 lhs = gimple_call_lhs (stmt);
553 if (lhs == NULL_TREE)
554 {
555 gimplify_and_add (expr, &stmts);
556 /* We can end up with folding a memcpy of an empty class assignment
557 which gets optimized away by C++ gimplification. */
558 if (gimple_seq_empty_p (stmts))
559 {
560 pop_gimplify_context (NULL);
561 if (gimple_in_ssa_p (cfun))
562 {
563 unlink_stmt_vdef (stmt);
564 release_defs (stmt);
565 }
566 gsi_replace (si_p, gimple_build_nop (), false);
567 return;
568 }
569 }
570 else
571 {
572 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
573 new_stmt = gimple_build_assign (lhs, tmp);
574 i = gsi_last (stmts);
575 gsi_insert_after_without_update (&i, new_stmt,
576 GSI_CONTINUE_LINKING);
577 }
578
579 pop_gimplify_context (NULL);
580
581 gsi_replace_with_seq_vops (si_p, stmts);
582 }
583
584
585 /* Replace the call at *GSI with the gimple value VAL. */
586
587 static void
588 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
589 {
590 gimple *stmt = gsi_stmt (*gsi);
591 tree lhs = gimple_call_lhs (stmt);
592 gimple *repl;
593 if (lhs)
594 {
595 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
596 val = fold_convert (TREE_TYPE (lhs), val);
597 repl = gimple_build_assign (lhs, val);
598 }
599 else
600 repl = gimple_build_nop ();
601 tree vdef = gimple_vdef (stmt);
602 if (vdef && TREE_CODE (vdef) == SSA_NAME)
603 {
604 unlink_stmt_vdef (stmt);
605 release_ssa_name (vdef);
606 }
607 gsi_replace (gsi, repl, false);
608 }
609
610 /* Replace the call at *GSI with the new call REPL and fold that
611 again. */
612
613 static void
614 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
615 {
616 gimple *stmt = gsi_stmt (*gsi);
617 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
618 gimple_set_location (repl, gimple_location (stmt));
619 if (gimple_vdef (stmt)
620 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
621 {
622 gimple_set_vdef (repl, gimple_vdef (stmt));
623 gimple_set_vuse (repl, gimple_vuse (stmt));
624 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
625 }
626 gsi_replace (gsi, repl, false);
627 fold_stmt (gsi);
628 }
629
630 /* Return true if VAR is a VAR_DECL or a component thereof. */
631
632 static bool
633 var_decl_component_p (tree var)
634 {
635 tree inner = var;
636 while (handled_component_p (inner))
637 inner = TREE_OPERAND (inner, 0);
638 return SSA_VAR_P (inner);
639 }
640
641 /* Fold function call to builtin mem{{,p}cpy,move}. Return
642 false if no simplification can be made.
643 If ENDP is 0, return DEST (like memcpy).
644 If ENDP is 1, return DEST+LEN (like mempcpy).
645 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
646 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
647 (memmove). */
648
649 static bool
650 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
651 tree dest, tree src, int endp)
652 {
653 gimple *stmt = gsi_stmt (*gsi);
654 tree lhs = gimple_call_lhs (stmt);
655 tree len = gimple_call_arg (stmt, 2);
656 tree destvar, srcvar;
657 location_t loc = gimple_location (stmt);
658
659 /* If the LEN parameter is zero, return DEST. */
660 if (integer_zerop (len))
661 {
662 gimple *repl;
663 if (gimple_call_lhs (stmt))
664 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
665 else
666 repl = gimple_build_nop ();
667 tree vdef = gimple_vdef (stmt);
668 if (vdef && TREE_CODE (vdef) == SSA_NAME)
669 {
670 unlink_stmt_vdef (stmt);
671 release_ssa_name (vdef);
672 }
673 gsi_replace (gsi, repl, false);
674 return true;
675 }
676
677 /* If SRC and DEST are the same (and not volatile), return
678 DEST{,+LEN,+LEN-1}. */
679 if (operand_equal_p (src, dest, 0))
680 {
681 unlink_stmt_vdef (stmt);
682 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
683 release_ssa_name (gimple_vdef (stmt));
684 if (!lhs)
685 {
686 gsi_replace (gsi, gimple_build_nop (), false);
687 return true;
688 }
689 goto done;
690 }
691 else
692 {
693 tree srctype, desttype;
694 unsigned int src_align, dest_align;
695 tree off0;
696
697 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
698 pointers as wide integer) and also may result in huge function
699 size because of inlined bounds copy. Thus don't inline for
700 functions we want to instrument. */
701 if (flag_check_pointer_bounds
702 && chkp_instrumentable_p (cfun->decl)
703 /* Even if data may contain pointers we can inline if copy
704 less than a pointer size. */
705 && (!tree_fits_uhwi_p (len)
706 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
707 return false;
708
709 /* Build accesses at offset zero with a ref-all character type. */
710 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
711 ptr_mode, true), 0);
712
713 /* If we can perform the copy efficiently with first doing all loads
714 and then all stores inline it that way. Currently efficiently
715 means that we can load all the memory into a single integer
716 register which is what MOVE_MAX gives us. */
717 src_align = get_pointer_alignment (src);
718 dest_align = get_pointer_alignment (dest);
719 if (tree_fits_uhwi_p (len)
720 && compare_tree_int (len, MOVE_MAX) <= 0
721 /* ??? Don't transform copies from strings with known length this
722 confuses the tree-ssa-strlen.c. This doesn't handle
723 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
724 reason. */
725 && !c_strlen (src, 2))
726 {
727 unsigned ilen = tree_to_uhwi (len);
728 if (pow2p_hwi (ilen))
729 {
730 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
731 if (type
732 && TYPE_MODE (type) != BLKmode
733 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
734 == ilen * 8)
735 /* If the destination pointer is not aligned we must be able
736 to emit an unaligned store. */
737 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
738 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
739 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
740 != CODE_FOR_nothing)))
741 {
742 tree srctype = type;
743 tree desttype = type;
744 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
745 srctype = build_aligned_type (type, src_align);
746 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
747 tree tem = fold_const_aggregate_ref (srcmem);
748 if (tem)
749 srcmem = tem;
750 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
751 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
752 src_align)
753 && (optab_handler (movmisalign_optab,
754 TYPE_MODE (type))
755 == CODE_FOR_nothing))
756 srcmem = NULL_TREE;
757 if (srcmem)
758 {
759 gimple *new_stmt;
760 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
761 {
762 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
763 srcmem
764 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
765 new_stmt);
766 gimple_assign_set_lhs (new_stmt, srcmem);
767 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
768 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
769 }
770 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
771 desttype = build_aligned_type (type, dest_align);
772 new_stmt
773 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
774 dest, off0),
775 srcmem);
776 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
777 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
778 if (gimple_vdef (new_stmt)
779 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
780 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
781 if (!lhs)
782 {
783 gsi_replace (gsi, new_stmt, false);
784 return true;
785 }
786 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
787 goto done;
788 }
789 }
790 }
791 }
792
793 if (endp == 3)
794 {
795 /* Both DEST and SRC must be pointer types.
796 ??? This is what old code did. Is the testing for pointer types
797 really mandatory?
798
799 If either SRC is readonly or length is 1, we can use memcpy. */
800 if (!dest_align || !src_align)
801 return false;
802 if (readonly_data_expr (src)
803 || (tree_fits_uhwi_p (len)
804 && (MIN (src_align, dest_align) / BITS_PER_UNIT
805 >= tree_to_uhwi (len))))
806 {
807 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
808 if (!fn)
809 return false;
810 gimple_call_set_fndecl (stmt, fn);
811 gimple_call_set_arg (stmt, 0, dest);
812 gimple_call_set_arg (stmt, 1, src);
813 fold_stmt (gsi);
814 return true;
815 }
816
817 /* If *src and *dest can't overlap, optimize into memcpy as well. */
818 if (TREE_CODE (src) == ADDR_EXPR
819 && TREE_CODE (dest) == ADDR_EXPR)
820 {
821 tree src_base, dest_base, fn;
822 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
823 HOST_WIDE_INT maxsize;
824
825 srcvar = TREE_OPERAND (src, 0);
826 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
827 if (src_base == NULL)
828 src_base = srcvar;
829 destvar = TREE_OPERAND (dest, 0);
830 dest_base = get_addr_base_and_unit_offset (destvar,
831 &dest_offset);
832 if (dest_base == NULL)
833 dest_base = destvar;
834 if (tree_fits_uhwi_p (len))
835 maxsize = tree_to_uhwi (len);
836 else
837 maxsize = -1;
838 if (SSA_VAR_P (src_base)
839 && SSA_VAR_P (dest_base))
840 {
841 if (operand_equal_p (src_base, dest_base, 0)
842 && ranges_overlap_p (src_offset, maxsize,
843 dest_offset, maxsize))
844 return false;
845 }
846 else if (TREE_CODE (src_base) == MEM_REF
847 && TREE_CODE (dest_base) == MEM_REF)
848 {
849 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
850 TREE_OPERAND (dest_base, 0), 0))
851 return false;
852 offset_int off = mem_ref_offset (src_base) + src_offset;
853 if (!wi::fits_shwi_p (off))
854 return false;
855 src_offset = off.to_shwi ();
856
857 off = mem_ref_offset (dest_base) + dest_offset;
858 if (!wi::fits_shwi_p (off))
859 return false;
860 dest_offset = off.to_shwi ();
861 if (ranges_overlap_p (src_offset, maxsize,
862 dest_offset, maxsize))
863 return false;
864 }
865 else
866 return false;
867
868 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
869 if (!fn)
870 return false;
871 gimple_call_set_fndecl (stmt, fn);
872 gimple_call_set_arg (stmt, 0, dest);
873 gimple_call_set_arg (stmt, 1, src);
874 fold_stmt (gsi);
875 return true;
876 }
877
878 /* If the destination and source do not alias optimize into
879 memcpy as well. */
880 if ((is_gimple_min_invariant (dest)
881 || TREE_CODE (dest) == SSA_NAME)
882 && (is_gimple_min_invariant (src)
883 || TREE_CODE (src) == SSA_NAME))
884 {
885 ao_ref destr, srcr;
886 ao_ref_init_from_ptr_and_size (&destr, dest, len);
887 ao_ref_init_from_ptr_and_size (&srcr, src, len);
888 if (!refs_may_alias_p_1 (&destr, &srcr, false))
889 {
890 tree fn;
891 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
892 if (!fn)
893 return false;
894 gimple_call_set_fndecl (stmt, fn);
895 gimple_call_set_arg (stmt, 0, dest);
896 gimple_call_set_arg (stmt, 1, src);
897 fold_stmt (gsi);
898 return true;
899 }
900 }
901
902 return false;
903 }
904
905 if (!tree_fits_shwi_p (len))
906 return false;
907 /* FIXME:
908 This logic lose for arguments like (type *)malloc (sizeof (type)),
909 since we strip the casts of up to VOID return value from malloc.
910 Perhaps we ought to inherit type from non-VOID argument here? */
911 STRIP_NOPS (src);
912 STRIP_NOPS (dest);
913 if (!POINTER_TYPE_P (TREE_TYPE (src))
914 || !POINTER_TYPE_P (TREE_TYPE (dest)))
915 return false;
916 /* In the following try to find a type that is most natural to be
917 used for the memcpy source and destination and that allows
918 the most optimization when memcpy is turned into a plain assignment
919 using that type. In theory we could always use a char[len] type
920 but that only gains us that the destination and source possibly
921 no longer will have their address taken. */
922 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
923 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
924 {
925 tree tem = TREE_OPERAND (src, 0);
926 STRIP_NOPS (tem);
927 if (tem != TREE_OPERAND (src, 0))
928 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
929 }
930 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
931 {
932 tree tem = TREE_OPERAND (dest, 0);
933 STRIP_NOPS (tem);
934 if (tem != TREE_OPERAND (dest, 0))
935 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
936 }
937 srctype = TREE_TYPE (TREE_TYPE (src));
938 if (TREE_CODE (srctype) == ARRAY_TYPE
939 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
940 {
941 srctype = TREE_TYPE (srctype);
942 STRIP_NOPS (src);
943 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
944 }
945 desttype = TREE_TYPE (TREE_TYPE (dest));
946 if (TREE_CODE (desttype) == ARRAY_TYPE
947 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
948 {
949 desttype = TREE_TYPE (desttype);
950 STRIP_NOPS (dest);
951 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
952 }
953 if (TREE_ADDRESSABLE (srctype)
954 || TREE_ADDRESSABLE (desttype))
955 return false;
956
957 /* Make sure we are not copying using a floating-point mode or
958 a type whose size possibly does not match its precision. */
959 if (FLOAT_MODE_P (TYPE_MODE (desttype))
960 || TREE_CODE (desttype) == BOOLEAN_TYPE
961 || TREE_CODE (desttype) == ENUMERAL_TYPE)
962 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
963 if (FLOAT_MODE_P (TYPE_MODE (srctype))
964 || TREE_CODE (srctype) == BOOLEAN_TYPE
965 || TREE_CODE (srctype) == ENUMERAL_TYPE)
966 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
967 if (!srctype)
968 srctype = desttype;
969 if (!desttype)
970 desttype = srctype;
971 if (!srctype)
972 return false;
973
974 src_align = get_pointer_alignment (src);
975 dest_align = get_pointer_alignment (dest);
976 if (dest_align < TYPE_ALIGN (desttype)
977 || src_align < TYPE_ALIGN (srctype))
978 return false;
979
980 destvar = dest;
981 STRIP_NOPS (destvar);
982 if (TREE_CODE (destvar) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (destvar, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
985 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
986 else
987 destvar = NULL_TREE;
988
989 srcvar = src;
990 STRIP_NOPS (srcvar);
991 if (TREE_CODE (srcvar) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
994 {
995 if (!destvar
996 || src_align >= TYPE_ALIGN (desttype))
997 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
998 srcvar, off0);
999 else if (!STRICT_ALIGNMENT)
1000 {
1001 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1002 src_align);
1003 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1004 }
1005 else
1006 srcvar = NULL_TREE;
1007 }
1008 else
1009 srcvar = NULL_TREE;
1010
1011 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1012 return false;
1013
1014 if (srcvar == NULL_TREE)
1015 {
1016 STRIP_NOPS (src);
1017 if (src_align >= TYPE_ALIGN (desttype))
1018 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1019 else
1020 {
1021 if (STRICT_ALIGNMENT)
1022 return false;
1023 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1024 src_align);
1025 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1026 }
1027 }
1028 else if (destvar == NULL_TREE)
1029 {
1030 STRIP_NOPS (dest);
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
1043 gimple *new_stmt;
1044 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1045 {
1046 tree tem = fold_const_aggregate_ref (srcvar);
1047 if (tem)
1048 srcvar = tem;
1049 if (! is_gimple_min_invariant (srcvar))
1050 {
1051 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1052 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1053 new_stmt);
1054 gimple_assign_set_lhs (new_stmt, srcvar);
1055 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1056 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1057 }
1058 }
1059 new_stmt = gimple_build_assign (destvar, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1062 if (gimple_vdef (new_stmt)
1063 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1065 if (!lhs)
1066 {
1067 gsi_replace (gsi, new_stmt, false);
1068 return true;
1069 }
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1071 }
1072
1073 done:
1074 gimple_seq stmts = NULL;
1075 if (endp == 0 || endp == 3)
1076 len = NULL_TREE;
1077 else if (endp == 2)
1078 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1079 ssize_int (1));
1080 if (endp == 2 || endp == 1)
1081 {
1082 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1083 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1084 TREE_TYPE (dest), dest, len);
1085 }
1086
1087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1088 gimple *repl = gimple_build_assign (lhs, dest);
1089 gsi_replace (gsi, repl, false);
1090 return true;
1091 }
1092
1093 /* Fold function call to builtin memset or bzero at *GSI setting the
1094 memory of size LEN to VAL. Return whether a simplification was made. */
1095
1096 static bool
1097 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1098 {
1099 gimple *stmt = gsi_stmt (*gsi);
1100 tree etype;
1101 unsigned HOST_WIDE_INT length, cval;
1102
1103 /* If the LEN parameter is zero, return DEST. */
1104 if (integer_zerop (len))
1105 {
1106 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1107 return true;
1108 }
1109
1110 if (! tree_fits_uhwi_p (len))
1111 return false;
1112
1113 if (TREE_CODE (c) != INTEGER_CST)
1114 return false;
1115
1116 tree dest = gimple_call_arg (stmt, 0);
1117 tree var = dest;
1118 if (TREE_CODE (var) != ADDR_EXPR)
1119 return false;
1120
1121 var = TREE_OPERAND (var, 0);
1122 if (TREE_THIS_VOLATILE (var))
1123 return false;
1124
1125 etype = TREE_TYPE (var);
1126 if (TREE_CODE (etype) == ARRAY_TYPE)
1127 etype = TREE_TYPE (etype);
1128
1129 if (!INTEGRAL_TYPE_P (etype)
1130 && !POINTER_TYPE_P (etype))
1131 return NULL_TREE;
1132
1133 if (! var_decl_component_p (var))
1134 return NULL_TREE;
1135
1136 length = tree_to_uhwi (len);
1137 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1138 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1139 return NULL_TREE;
1140
1141 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1142 return NULL_TREE;
1143
1144 if (integer_zerop (c))
1145 cval = 0;
1146 else
1147 {
1148 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1149 return NULL_TREE;
1150
1151 cval = TREE_INT_CST_LOW (c);
1152 cval &= 0xff;
1153 cval |= cval << 8;
1154 cval |= cval << 16;
1155 cval |= (cval << 31) << 1;
1156 }
1157
1158 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1159 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1160 gimple_set_vuse (store, gimple_vuse (stmt));
1161 tree vdef = gimple_vdef (stmt);
1162 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1163 {
1164 gimple_set_vdef (store, gimple_vdef (stmt));
1165 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1166 }
1167 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1168 if (gimple_call_lhs (stmt))
1169 {
1170 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1171 gsi_replace (gsi, asgn, false);
1172 }
1173 else
1174 {
1175 gimple_stmt_iterator gsi2 = *gsi;
1176 gsi_prev (gsi);
1177 gsi_remove (&gsi2, true);
1178 }
1179
1180 return true;
1181 }
1182
1183
1184 /* Obtain the minimum and maximum string length or minimum and maximum
1185 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1186 If ARG is an SSA name variable, follow its use-def chains. When
1187 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1188 if we are unable to determine the length or value, return False.
1189 VISITED is a bitmap of visited variables.
1190 TYPE is 0 if string length should be obtained, 1 for maximum string
1191 length and 2 for maximum value ARG can have.
1192 When FUZZY is set and the length of a string cannot be determined,
1193 the function instead considers as the maximum possible length the
1194 size of a character array it may refer to. */
1195
1196 static bool
1197 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1198 bool fuzzy)
1199 {
1200 tree var, val;
1201 gimple *def_stmt;
1202
1203 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1204 but MINLEN may be cleared during the execution of the function. */
1205 tree *minlen = length;
1206 tree *const maxlen = length + 1;
1207
1208 if (TREE_CODE (arg) != SSA_NAME)
1209 {
1210 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1211 if (TREE_CODE (arg) == ADDR_EXPR
1212 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1213 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1214 {
1215 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1216 if (TREE_CODE (aop0) == INDIRECT_REF
1217 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1218 return get_range_strlen (TREE_OPERAND (aop0, 0),
1219 length, visited, type, fuzzy);
1220 }
1221
1222 if (type == 2)
1223 {
1224 val = arg;
1225 if (TREE_CODE (val) != INTEGER_CST
1226 || tree_int_cst_sgn (val) < 0)
1227 return false;
1228 }
1229 else
1230 val = c_strlen (arg, 1);
1231
1232 if (!val && fuzzy)
1233 {
1234 if (TREE_CODE (arg) == ADDR_EXPR)
1235 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1236 visited, type, fuzzy);
1237
1238 if (TREE_CODE (arg) == COMPONENT_REF
1239 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1240 {
1241 /* Use the type of the member array to determine the upper
1242 bound on the length of the array. This may be overly
1243 optimistic if the array itself isn't NUL-terminated and
1244 the caller relies on the subsequent member to contain
1245 the NUL. */
1246 arg = TREE_OPERAND (arg, 1);
1247 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1248 if (!val || integer_zerop (val))
1249 return false;
1250 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1251 integer_one_node);
1252 /* Avoid using the array size as the minimum. */
1253 minlen = NULL;
1254 }
1255 }
1256
1257 if (!val)
1258 return false;
1259
1260 if (minlen
1261 && (!*minlen
1262 || (type > 0
1263 && TREE_CODE (*minlen) == INTEGER_CST
1264 && TREE_CODE (val) == INTEGER_CST
1265 && tree_int_cst_lt (val, *minlen))))
1266 *minlen = val;
1267
1268 if (*maxlen)
1269 {
1270 if (type > 0)
1271 {
1272 if (TREE_CODE (*maxlen) != INTEGER_CST
1273 || TREE_CODE (val) != INTEGER_CST)
1274 return false;
1275
1276 if (tree_int_cst_lt (*maxlen, val))
1277 *maxlen = val;
1278 return true;
1279 }
1280 else if (simple_cst_equal (val, *maxlen) != 1)
1281 return false;
1282 }
1283
1284 *maxlen = val;
1285 return true;
1286 }
1287
1288 /* If ARG is registered for SSA update we cannot look at its defining
1289 statement. */
1290 if (name_registered_for_update_p (arg))
1291 return false;
1292
1293 /* If we were already here, break the infinite cycle. */
1294 if (!*visited)
1295 *visited = BITMAP_ALLOC (NULL);
1296 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1297 return true;
1298
1299 var = arg;
1300 def_stmt = SSA_NAME_DEF_STMT (var);
1301
1302 switch (gimple_code (def_stmt))
1303 {
1304 case GIMPLE_ASSIGN:
1305 /* The RHS of the statement defining VAR must either have a
1306 constant length or come from another SSA_NAME with a constant
1307 length. */
1308 if (gimple_assign_single_p (def_stmt)
1309 || gimple_assign_unary_nop_p (def_stmt))
1310 {
1311 tree rhs = gimple_assign_rhs1 (def_stmt);
1312 return get_range_strlen (rhs, length, visited, type, fuzzy);
1313 }
1314 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1315 {
1316 tree op2 = gimple_assign_rhs2 (def_stmt);
1317 tree op3 = gimple_assign_rhs3 (def_stmt);
1318 return get_range_strlen (op2, length, visited, type, fuzzy)
1319 && get_range_strlen (op3, length, visited, type, fuzzy);
1320 }
1321 return false;
1322
1323 case GIMPLE_PHI:
1324 {
1325 /* All the arguments of the PHI node must have the same constant
1326 length. */
1327 unsigned i;
1328
1329 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1330 {
1331 tree arg = gimple_phi_arg (def_stmt, i)->def;
1332
1333 /* If this PHI has itself as an argument, we cannot
1334 determine the string length of this argument. However,
1335 if we can find a constant string length for the other
1336 PHI args then we can still be sure that this is a
1337 constant string length. So be optimistic and just
1338 continue with the next argument. */
1339 if (arg == gimple_phi_result (def_stmt))
1340 continue;
1341
1342 if (!get_range_strlen (arg, length, visited, type, fuzzy))
1343 {
1344 if (fuzzy)
1345 *maxlen = build_all_ones_cst (size_type_node);
1346 else
1347 return false;
1348 }
1349 }
1350 }
1351 return true;
1352
1353 default:
1354 return false;
1355 }
1356 }
1357
1358 /* Determine the minimum and maximum value or string length that ARG
1359 refers to and store each in the first two elements of MINMAXLEN.
1360 For expressions that point to strings of unknown lengths that are
1361 character arrays, use the upper bound of the array as the maximum
1362 length. For example, given an expression like 'x ? array : "xyz"'
1363 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1364 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1365 stored in array.
1366 */
1367
1368 void get_range_strlen (tree arg, tree minmaxlen[2])
1369 {
1370 bitmap visited = NULL;
1371
1372 minmaxlen[0] = NULL_TREE;
1373 minmaxlen[1] = NULL_TREE;
1374
1375 get_range_strlen (arg, minmaxlen, &visited, 1, true);
1376
1377 if (visited)
1378 BITMAP_FREE (visited);
1379 }
1380
1381 tree
1382 get_maxval_strlen (tree arg, int type)
1383 {
1384 bitmap visited = NULL;
1385 tree len[2] = { NULL_TREE, NULL_TREE };
1386 if (!get_range_strlen (arg, len, &visited, type, false))
1387 len[1] = NULL_TREE;
1388 if (visited)
1389 BITMAP_FREE (visited);
1390
1391 return len[1];
1392 }
1393
1394
1395 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1396 If LEN is not NULL, it represents the length of the string to be
1397 copied. Return NULL_TREE if no simplification can be made. */
1398
1399 static bool
1400 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1401 tree dest, tree src)
1402 {
1403 location_t loc = gimple_location (gsi_stmt (*gsi));
1404 tree fn;
1405
1406 /* If SRC and DEST are the same (and not volatile), return DEST. */
1407 if (operand_equal_p (src, dest, 0))
1408 {
1409 replace_call_with_value (gsi, dest);
1410 return true;
1411 }
1412
1413 if (optimize_function_for_size_p (cfun))
1414 return false;
1415
1416 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1417 if (!fn)
1418 return false;
1419
1420 tree len = get_maxval_strlen (src, 0);
1421 if (!len)
1422 return false;
1423
1424 len = fold_convert_loc (loc, size_type_node, len);
1425 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1426 len = force_gimple_operand_gsi (gsi, len, true,
1427 NULL_TREE, true, GSI_SAME_STMT);
1428 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1429 replace_call_with_call_and_fold (gsi, repl);
1430 return true;
1431 }
1432
1433 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1434 If SLEN is not NULL, it represents the length of the source string.
1435 Return NULL_TREE if no simplification can be made. */
1436
1437 static bool
1438 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1439 tree dest, tree src, tree len)
1440 {
1441 location_t loc = gimple_location (gsi_stmt (*gsi));
1442 tree fn;
1443
1444 /* If the LEN parameter is zero, return DEST. */
1445 if (integer_zerop (len))
1446 {
1447 replace_call_with_value (gsi, dest);
1448 return true;
1449 }
1450
1451 /* We can't compare slen with len as constants below if len is not a
1452 constant. */
1453 if (TREE_CODE (len) != INTEGER_CST)
1454 return false;
1455
1456 /* Now, we must be passed a constant src ptr parameter. */
1457 tree slen = get_maxval_strlen (src, 0);
1458 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1459 return false;
1460
1461 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1462
1463 /* We do not support simplification of this case, though we do
1464 support it when expanding trees into RTL. */
1465 /* FIXME: generate a call to __builtin_memset. */
1466 if (tree_int_cst_lt (slen, len))
1467 return false;
1468
1469 /* OK transform into builtin memcpy. */
1470 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1471 if (!fn)
1472 return false;
1473
1474 len = fold_convert_loc (loc, size_type_node, len);
1475 len = force_gimple_operand_gsi (gsi, len, true,
1476 NULL_TREE, true, GSI_SAME_STMT);
1477 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1478 replace_call_with_call_and_fold (gsi, repl);
1479 return true;
1480 }
1481
1482 /* Fold function call to builtin strchr or strrchr.
1483 If both arguments are constant, evaluate and fold the result,
1484 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1485 In general strlen is significantly faster than strchr
1486 due to being a simpler operation. */
1487 static bool
1488 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1489 {
1490 gimple *stmt = gsi_stmt (*gsi);
1491 tree str = gimple_call_arg (stmt, 0);
1492 tree c = gimple_call_arg (stmt, 1);
1493 location_t loc = gimple_location (stmt);
1494 const char *p;
1495 char ch;
1496
1497 if (!gimple_call_lhs (stmt))
1498 return false;
1499
1500 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1501 {
1502 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1503
1504 if (p1 == NULL)
1505 {
1506 replace_call_with_value (gsi, integer_zero_node);
1507 return true;
1508 }
1509
1510 tree len = build_int_cst (size_type_node, p1 - p);
1511 gimple_seq stmts = NULL;
1512 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1513 POINTER_PLUS_EXPR, str, len);
1514 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1515 gsi_replace_with_seq_vops (gsi, stmts);
1516 return true;
1517 }
1518
1519 if (!integer_zerop (c))
1520 return false;
1521
1522 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1523 if (optimize_function_for_size_p (cfun))
1524 {
1525 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1526
1527 if (is_strrchr && strchr_fn)
1528 {
1529 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1530 replace_call_with_call_and_fold (gsi, repl);
1531 return true;
1532 }
1533
1534 return false;
1535 }
1536
1537 tree len;
1538 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1539
1540 if (!strlen_fn)
1541 return false;
1542
1543 /* Create newstr = strlen (str). */
1544 gimple_seq stmts = NULL;
1545 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1546 gimple_set_location (new_stmt, loc);
1547 len = create_tmp_reg_or_ssa_name (size_type_node);
1548 gimple_call_set_lhs (new_stmt, len);
1549 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1550
1551 /* Create (str p+ strlen (str)). */
1552 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1553 POINTER_PLUS_EXPR, str, len);
1554 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1555 gsi_replace_with_seq_vops (gsi, stmts);
1556 /* gsi now points at the assignment to the lhs, get a
1557 stmt iterator to the strlen.
1558 ??? We can't use gsi_for_stmt as that doesn't work when the
1559 CFG isn't built yet. */
1560 gimple_stmt_iterator gsi2 = *gsi;
1561 gsi_prev (&gsi2);
1562 fold_stmt (&gsi2);
1563 return true;
1564 }
1565
1566 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1567 to the call.
1568
1569 Return NULL_TREE if no simplification was possible, otherwise return the
1570 simplified form of the call as a tree.
1571
1572 The simplified form may be a constant or other expression which
1573 computes the same value, but in a more efficient manner (including
1574 calls to other builtin functions).
1575
1576 The call may contain arguments which need to be evaluated, but
1577 which are not useful to determine the result of the call. In
1578 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1579 COMPOUND_EXPR will be an argument which must be evaluated.
1580 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1581 COMPOUND_EXPR in the chain will contain the tree for the simplified
1582 form of the builtin function call. */
1583
1584 static bool
1585 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1586 {
1587 gimple *stmt = gsi_stmt (*gsi);
1588 location_t loc = gimple_location (stmt);
1589
1590 const char *p = c_getstr (src);
1591
1592 /* If the string length is zero, return the dst parameter. */
1593 if (p && *p == '\0')
1594 {
1595 replace_call_with_value (gsi, dst);
1596 return true;
1597 }
1598
1599 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1600 return false;
1601
1602 /* See if we can store by pieces into (dst + strlen(dst)). */
1603 tree newdst;
1604 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1605 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1606
1607 if (!strlen_fn || !memcpy_fn)
1608 return false;
1609
1610 /* If the length of the source string isn't computable don't
1611 split strcat into strlen and memcpy. */
1612 tree len = get_maxval_strlen (src, 0);
1613 if (! len)
1614 return false;
1615
1616 /* Create strlen (dst). */
1617 gimple_seq stmts = NULL, stmts2;
1618 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1619 gimple_set_location (repl, loc);
1620 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1621 gimple_call_set_lhs (repl, newdst);
1622 gimple_seq_add_stmt_without_update (&stmts, repl);
1623
1624 /* Create (dst p+ strlen (dst)). */
1625 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1626 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1627 gimple_seq_add_seq_without_update (&stmts, stmts2);
1628
1629 len = fold_convert_loc (loc, size_type_node, len);
1630 len = size_binop_loc (loc, PLUS_EXPR, len,
1631 build_int_cst (size_type_node, 1));
1632 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1633 gimple_seq_add_seq_without_update (&stmts, stmts2);
1634
1635 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1636 gimple_seq_add_stmt_without_update (&stmts, repl);
1637 if (gimple_call_lhs (stmt))
1638 {
1639 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1640 gimple_seq_add_stmt_without_update (&stmts, repl);
1641 gsi_replace_with_seq_vops (gsi, stmts);
1642 /* gsi now points at the assignment to the lhs, get a
1643 stmt iterator to the memcpy call.
1644 ??? We can't use gsi_for_stmt as that doesn't work when the
1645 CFG isn't built yet. */
1646 gimple_stmt_iterator gsi2 = *gsi;
1647 gsi_prev (&gsi2);
1648 fold_stmt (&gsi2);
1649 }
1650 else
1651 {
1652 gsi_replace_with_seq_vops (gsi, stmts);
1653 fold_stmt (gsi);
1654 }
1655 return true;
1656 }
1657
1658 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1659 are the arguments to the call. */
1660
1661 static bool
1662 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1663 {
1664 gimple *stmt = gsi_stmt (*gsi);
1665 tree dest = gimple_call_arg (stmt, 0);
1666 tree src = gimple_call_arg (stmt, 1);
1667 tree size = gimple_call_arg (stmt, 2);
1668 tree fn;
1669 const char *p;
1670
1671
1672 p = c_getstr (src);
1673 /* If the SRC parameter is "", return DEST. */
1674 if (p && *p == '\0')
1675 {
1676 replace_call_with_value (gsi, dest);
1677 return true;
1678 }
1679
1680 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1681 return false;
1682
1683 /* If __builtin_strcat_chk is used, assume strcat is available. */
1684 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1685 if (!fn)
1686 return false;
1687
1688 gimple *repl = gimple_build_call (fn, 2, dest, src);
1689 replace_call_with_call_and_fold (gsi, repl);
1690 return true;
1691 }
1692
1693 /* Simplify a call to the strncat builtin. */
1694
1695 static bool
1696 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1697 {
1698 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1699 tree dst = gimple_call_arg (stmt, 0);
1700 tree src = gimple_call_arg (stmt, 1);
1701 tree len = gimple_call_arg (stmt, 2);
1702
1703 const char *p = c_getstr (src);
1704
1705 /* If the requested length is zero, or the src parameter string
1706 length is zero, return the dst parameter. */
1707 if (integer_zerop (len) || (p && *p == '\0'))
1708 {
1709 replace_call_with_value (gsi, dst);
1710 return true;
1711 }
1712
1713 /* If the requested len is greater than or equal to the string
1714 length, call strcat. */
1715 if (TREE_CODE (len) == INTEGER_CST && p
1716 && compare_tree_int (len, strlen (p)) >= 0)
1717 {
1718 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1719
1720 /* If the replacement _DECL isn't initialized, don't do the
1721 transformation. */
1722 if (!fn)
1723 return false;
1724
1725 gcall *repl = gimple_build_call (fn, 2, dst, src);
1726 replace_call_with_call_and_fold (gsi, repl);
1727 return true;
1728 }
1729
1730 return false;
1731 }
1732
1733 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1734 LEN, and SIZE. */
1735
1736 static bool
1737 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1738 {
1739 gimple *stmt = gsi_stmt (*gsi);
1740 tree dest = gimple_call_arg (stmt, 0);
1741 tree src = gimple_call_arg (stmt, 1);
1742 tree len = gimple_call_arg (stmt, 2);
1743 tree size = gimple_call_arg (stmt, 3);
1744 tree fn;
1745 const char *p;
1746
1747 p = c_getstr (src);
1748 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1749 if ((p && *p == '\0')
1750 || integer_zerop (len))
1751 {
1752 replace_call_with_value (gsi, dest);
1753 return true;
1754 }
1755
1756 if (! tree_fits_uhwi_p (size))
1757 return false;
1758
1759 if (! integer_all_onesp (size))
1760 {
1761 tree src_len = c_strlen (src, 1);
1762 if (src_len
1763 && tree_fits_uhwi_p (src_len)
1764 && tree_fits_uhwi_p (len)
1765 && ! tree_int_cst_lt (len, src_len))
1766 {
1767 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1768 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1769 if (!fn)
1770 return false;
1771
1772 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1773 replace_call_with_call_and_fold (gsi, repl);
1774 return true;
1775 }
1776 return false;
1777 }
1778
1779 /* If __builtin_strncat_chk is used, assume strncat is available. */
1780 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1781 if (!fn)
1782 return false;
1783
1784 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1785 replace_call_with_call_and_fold (gsi, repl);
1786 return true;
1787 }
1788
1789 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1790 to the call. IGNORE is true if the value returned
1791 by the builtin will be ignored. UNLOCKED is true is true if this
1792 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1793 the known length of the string. Return NULL_TREE if no simplification
1794 was possible. */
1795
1796 static bool
1797 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1798 tree arg0, tree arg1,
1799 bool unlocked)
1800 {
1801 gimple *stmt = gsi_stmt (*gsi);
1802
1803 /* If we're using an unlocked function, assume the other unlocked
1804 functions exist explicitly. */
1805 tree const fn_fputc = (unlocked
1806 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1807 : builtin_decl_implicit (BUILT_IN_FPUTC));
1808 tree const fn_fwrite = (unlocked
1809 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1810 : builtin_decl_implicit (BUILT_IN_FWRITE));
1811
1812 /* If the return value is used, don't do the transformation. */
1813 if (gimple_call_lhs (stmt))
1814 return false;
1815
1816 /* Get the length of the string passed to fputs. If the length
1817 can't be determined, punt. */
1818 tree len = get_maxval_strlen (arg0, 0);
1819 if (!len
1820 || TREE_CODE (len) != INTEGER_CST)
1821 return false;
1822
1823 switch (compare_tree_int (len, 1))
1824 {
1825 case -1: /* length is 0, delete the call entirely . */
1826 replace_call_with_value (gsi, integer_zero_node);
1827 return true;
1828
1829 case 0: /* length is 1, call fputc. */
1830 {
1831 const char *p = c_getstr (arg0);
1832 if (p != NULL)
1833 {
1834 if (!fn_fputc)
1835 return false;
1836
1837 gimple *repl = gimple_build_call (fn_fputc, 2,
1838 build_int_cst
1839 (integer_type_node, p[0]), arg1);
1840 replace_call_with_call_and_fold (gsi, repl);
1841 return true;
1842 }
1843 }
1844 /* FALLTHROUGH */
1845 case 1: /* length is greater than 1, call fwrite. */
1846 {
1847 /* If optimizing for size keep fputs. */
1848 if (optimize_function_for_size_p (cfun))
1849 return false;
1850 /* New argument list transforming fputs(string, stream) to
1851 fwrite(string, 1, len, stream). */
1852 if (!fn_fwrite)
1853 return false;
1854
1855 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1856 size_one_node, len, arg1);
1857 replace_call_with_call_and_fold (gsi, repl);
1858 return true;
1859 }
1860 default:
1861 gcc_unreachable ();
1862 }
1863 return false;
1864 }
1865
1866 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1867 DEST, SRC, LEN, and SIZE are the arguments to the call.
1868 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length
1870 passed as third argument. */
1871
1872 static bool
1873 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1874 tree dest, tree src, tree len, tree size,
1875 enum built_in_function fcode)
1876 {
1877 gimple *stmt = gsi_stmt (*gsi);
1878 location_t loc = gimple_location (stmt);
1879 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1880 tree fn;
1881
1882 /* If SRC and DEST are the same (and not volatile), return DEST
1883 (resp. DEST+LEN for __mempcpy_chk). */
1884 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1885 {
1886 if (fcode != BUILT_IN_MEMPCPY_CHK)
1887 {
1888 replace_call_with_value (gsi, dest);
1889 return true;
1890 }
1891 else
1892 {
1893 gimple_seq stmts = NULL;
1894 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1895 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1896 TREE_TYPE (dest), dest, len);
1897 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1898 replace_call_with_value (gsi, temp);
1899 return true;
1900 }
1901 }
1902
1903 if (! tree_fits_uhwi_p (size))
1904 return false;
1905
1906 tree maxlen = get_maxval_strlen (len, 2);
1907 if (! integer_all_onesp (size))
1908 {
1909 if (! tree_fits_uhwi_p (len))
1910 {
1911 /* If LEN is not constant, try MAXLEN too.
1912 For MAXLEN only allow optimizing into non-_ocs function
1913 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1914 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1915 {
1916 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1917 {
1918 /* (void) __mempcpy_chk () can be optimized into
1919 (void) __memcpy_chk (). */
1920 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1921 if (!fn)
1922 return false;
1923
1924 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1925 replace_call_with_call_and_fold (gsi, repl);
1926 return true;
1927 }
1928 return false;
1929 }
1930 }
1931 else
1932 maxlen = len;
1933
1934 if (tree_int_cst_lt (size, maxlen))
1935 return false;
1936 }
1937
1938 fn = NULL_TREE;
1939 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1940 mem{cpy,pcpy,move,set} is available. */
1941 switch (fcode)
1942 {
1943 case BUILT_IN_MEMCPY_CHK:
1944 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1945 break;
1946 case BUILT_IN_MEMPCPY_CHK:
1947 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1948 break;
1949 case BUILT_IN_MEMMOVE_CHK:
1950 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1951 break;
1952 case BUILT_IN_MEMSET_CHK:
1953 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1954 break;
1955 default:
1956 break;
1957 }
1958
1959 if (!fn)
1960 return false;
1961
1962 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1963 replace_call_with_call_and_fold (gsi, repl);
1964 return true;
1965 }
1966
1967 /* Fold a call to the __st[rp]cpy_chk builtin.
1968 DEST, SRC, and SIZE are the arguments to the call.
1969 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1970 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1971 strings passed as second argument. */
1972
1973 static bool
1974 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1975 tree dest,
1976 tree src, tree size,
1977 enum built_in_function fcode)
1978 {
1979 gimple *stmt = gsi_stmt (*gsi);
1980 location_t loc = gimple_location (stmt);
1981 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1982 tree len, fn;
1983
1984 /* If SRC and DEST are the same (and not volatile), return DEST. */
1985 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1986 {
1987 replace_call_with_value (gsi, dest);
1988 return true;
1989 }
1990
1991 if (! tree_fits_uhwi_p (size))
1992 return false;
1993
1994 tree maxlen = get_maxval_strlen (src, 1);
1995 if (! integer_all_onesp (size))
1996 {
1997 len = c_strlen (src, 1);
1998 if (! len || ! tree_fits_uhwi_p (len))
1999 {
2000 /* If LEN is not constant, try MAXLEN too.
2001 For MAXLEN only allow optimizing into non-_ocs function
2002 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2003 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2004 {
2005 if (fcode == BUILT_IN_STPCPY_CHK)
2006 {
2007 if (! ignore)
2008 return false;
2009
2010 /* If return value of __stpcpy_chk is ignored,
2011 optimize into __strcpy_chk. */
2012 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2013 if (!fn)
2014 return false;
2015
2016 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2017 replace_call_with_call_and_fold (gsi, repl);
2018 return true;
2019 }
2020
2021 if (! len || TREE_SIDE_EFFECTS (len))
2022 return false;
2023
2024 /* If c_strlen returned something, but not a constant,
2025 transform __strcpy_chk into __memcpy_chk. */
2026 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2027 if (!fn)
2028 return false;
2029
2030 gimple_seq stmts = NULL;
2031 len = gimple_convert (&stmts, loc, size_type_node, len);
2032 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2033 build_int_cst (size_type_node, 1));
2034 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2035 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2036 replace_call_with_call_and_fold (gsi, repl);
2037 return true;
2038 }
2039 }
2040 else
2041 maxlen = len;
2042
2043 if (! tree_int_cst_lt (maxlen, size))
2044 return false;
2045 }
2046
2047 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2048 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2049 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2050 if (!fn)
2051 return false;
2052
2053 gimple *repl = gimple_build_call (fn, 2, dest, src);
2054 replace_call_with_call_and_fold (gsi, repl);
2055 return true;
2056 }
2057
2058 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2059 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2060 length passed as third argument. IGNORE is true if return value can be
2061 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2062
2063 static bool
2064 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2065 tree dest, tree src,
2066 tree len, tree size,
2067 enum built_in_function fcode)
2068 {
2069 gimple *stmt = gsi_stmt (*gsi);
2070 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2071 tree fn;
2072
2073 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2074 {
2075 /* If return value of __stpncpy_chk is ignored,
2076 optimize into __strncpy_chk. */
2077 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2078 if (fn)
2079 {
2080 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2081 replace_call_with_call_and_fold (gsi, repl);
2082 return true;
2083 }
2084 }
2085
2086 if (! tree_fits_uhwi_p (size))
2087 return false;
2088
2089 tree maxlen = get_maxval_strlen (len, 2);
2090 if (! integer_all_onesp (size))
2091 {
2092 if (! tree_fits_uhwi_p (len))
2093 {
2094 /* If LEN is not constant, try MAXLEN too.
2095 For MAXLEN only allow optimizing into non-_ocs function
2096 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2097 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2098 return false;
2099 }
2100 else
2101 maxlen = len;
2102
2103 if (tree_int_cst_lt (size, maxlen))
2104 return false;
2105 }
2106
2107 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2108 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2109 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2110 if (!fn)
2111 return false;
2112
2113 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2114 replace_call_with_call_and_fold (gsi, repl);
2115 return true;
2116 }
2117
2118 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2119 Return NULL_TREE if no simplification can be made. */
2120
2121 static bool
2122 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2123 {
2124 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2125 location_t loc = gimple_location (stmt);
2126 tree dest = gimple_call_arg (stmt, 0);
2127 tree src = gimple_call_arg (stmt, 1);
2128 tree fn, len, lenp1;
2129
2130 /* If the result is unused, replace stpcpy with strcpy. */
2131 if (gimple_call_lhs (stmt) == NULL_TREE)
2132 {
2133 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2134 if (!fn)
2135 return false;
2136 gimple_call_set_fndecl (stmt, fn);
2137 fold_stmt (gsi);
2138 return true;
2139 }
2140
2141 len = c_strlen (src, 1);
2142 if (!len
2143 || TREE_CODE (len) != INTEGER_CST)
2144 return false;
2145
2146 if (optimize_function_for_size_p (cfun)
2147 /* If length is zero it's small enough. */
2148 && !integer_zerop (len))
2149 return false;
2150
2151 /* If the source has a known length replace stpcpy with memcpy. */
2152 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2153 if (!fn)
2154 return false;
2155
2156 gimple_seq stmts = NULL;
2157 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2158 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2159 tem, build_int_cst (size_type_node, 1));
2160 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2161 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2162 gimple_set_vuse (repl, gimple_vuse (stmt));
2163 gimple_set_vdef (repl, gimple_vdef (stmt));
2164 if (gimple_vdef (repl)
2165 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2166 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2167 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2168 /* Replace the result with dest + len. */
2169 stmts = NULL;
2170 tem = gimple_convert (&stmts, loc, sizetype, len);
2171 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2172 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2173 POINTER_PLUS_EXPR, dest, tem);
2174 gsi_replace (gsi, ret, false);
2175 /* Finally fold the memcpy call. */
2176 gimple_stmt_iterator gsi2 = *gsi;
2177 gsi_prev (&gsi2);
2178 fold_stmt (&gsi2);
2179 return true;
2180 }
2181
2182 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2183 NULL_TREE if a normal call should be emitted rather than expanding
2184 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2185 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2186 passed as second argument. */
2187
2188 static bool
2189 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2190 enum built_in_function fcode)
2191 {
2192 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2193 tree dest, size, len, fn, fmt, flag;
2194 const char *fmt_str;
2195
2196 /* Verify the required arguments in the original call. */
2197 if (gimple_call_num_args (stmt) < 5)
2198 return false;
2199
2200 dest = gimple_call_arg (stmt, 0);
2201 len = gimple_call_arg (stmt, 1);
2202 flag = gimple_call_arg (stmt, 2);
2203 size = gimple_call_arg (stmt, 3);
2204 fmt = gimple_call_arg (stmt, 4);
2205
2206 if (! tree_fits_uhwi_p (size))
2207 return false;
2208
2209 if (! integer_all_onesp (size))
2210 {
2211 tree maxlen = get_maxval_strlen (len, 2);
2212 if (! tree_fits_uhwi_p (len))
2213 {
2214 /* If LEN is not constant, try MAXLEN too.
2215 For MAXLEN only allow optimizing into non-_ocs function
2216 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2217 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2218 return false;
2219 }
2220 else
2221 maxlen = len;
2222
2223 if (tree_int_cst_lt (size, maxlen))
2224 return false;
2225 }
2226
2227 if (!init_target_chars ())
2228 return false;
2229
2230 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2231 or if format doesn't contain % chars or is "%s". */
2232 if (! integer_zerop (flag))
2233 {
2234 fmt_str = c_getstr (fmt);
2235 if (fmt_str == NULL)
2236 return false;
2237 if (strchr (fmt_str, target_percent) != NULL
2238 && strcmp (fmt_str, target_percent_s))
2239 return false;
2240 }
2241
2242 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2243 available. */
2244 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2245 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2246 if (!fn)
2247 return false;
2248
2249 /* Replace the called function and the first 5 argument by 3 retaining
2250 trailing varargs. */
2251 gimple_call_set_fndecl (stmt, fn);
2252 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2253 gimple_call_set_arg (stmt, 0, dest);
2254 gimple_call_set_arg (stmt, 1, len);
2255 gimple_call_set_arg (stmt, 2, fmt);
2256 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2257 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2258 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2259 fold_stmt (gsi);
2260 return true;
2261 }
2262
2263 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2264 Return NULL_TREE if a normal call should be emitted rather than
2265 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2266 or BUILT_IN_VSPRINTF_CHK. */
2267
2268 static bool
2269 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2270 enum built_in_function fcode)
2271 {
2272 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2273 tree dest, size, len, fn, fmt, flag;
2274 const char *fmt_str;
2275 unsigned nargs = gimple_call_num_args (stmt);
2276
2277 /* Verify the required arguments in the original call. */
2278 if (nargs < 4)
2279 return false;
2280 dest = gimple_call_arg (stmt, 0);
2281 flag = gimple_call_arg (stmt, 1);
2282 size = gimple_call_arg (stmt, 2);
2283 fmt = gimple_call_arg (stmt, 3);
2284
2285 if (! tree_fits_uhwi_p (size))
2286 return false;
2287
2288 len = NULL_TREE;
2289
2290 if (!init_target_chars ())
2291 return false;
2292
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str = c_getstr (fmt);
2295 if (fmt_str != NULL)
2296 {
2297 /* If the format doesn't contain % args or %%, we know the size. */
2298 if (strchr (fmt_str, target_percent) == 0)
2299 {
2300 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2301 len = build_int_cstu (size_type_node, strlen (fmt_str));
2302 }
2303 /* If the format is "%s" and first ... argument is a string literal,
2304 we know the size too. */
2305 else if (fcode == BUILT_IN_SPRINTF_CHK
2306 && strcmp (fmt_str, target_percent_s) == 0)
2307 {
2308 tree arg;
2309
2310 if (nargs == 5)
2311 {
2312 arg = gimple_call_arg (stmt, 4);
2313 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2314 {
2315 len = c_strlen (arg, 1);
2316 if (! len || ! tree_fits_uhwi_p (len))
2317 len = NULL_TREE;
2318 }
2319 }
2320 }
2321 }
2322
2323 if (! integer_all_onesp (size))
2324 {
2325 if (! len || ! tree_int_cst_lt (len, size))
2326 return false;
2327 }
2328
2329 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2330 or if format doesn't contain % chars or is "%s". */
2331 if (! integer_zerop (flag))
2332 {
2333 if (fmt_str == NULL)
2334 return false;
2335 if (strchr (fmt_str, target_percent) != NULL
2336 && strcmp (fmt_str, target_percent_s))
2337 return false;
2338 }
2339
2340 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2341 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2342 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2343 if (!fn)
2344 return false;
2345
2346 /* Replace the called function and the first 4 argument by 2 retaining
2347 trailing varargs. */
2348 gimple_call_set_fndecl (stmt, fn);
2349 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2350 gimple_call_set_arg (stmt, 0, dest);
2351 gimple_call_set_arg (stmt, 1, fmt);
2352 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2353 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2354 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2355 fold_stmt (gsi);
2356 return true;
2357 }
2358
2359 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2360 ORIG may be null if this is a 2-argument call. We don't attempt to
2361 simplify calls with more than 3 arguments.
2362
2363 Return NULL_TREE if no simplification was possible, otherwise return the
2364 simplified form of the call as a tree. If IGNORED is true, it means that
2365 the caller does not use the returned value of the function. */
2366
2367 static bool
2368 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2369 {
2370 gimple *stmt = gsi_stmt (*gsi);
2371 tree dest = gimple_call_arg (stmt, 0);
2372 tree fmt = gimple_call_arg (stmt, 1);
2373 tree orig = NULL_TREE;
2374 const char *fmt_str = NULL;
2375
2376 /* Verify the required arguments in the original call. We deal with two
2377 types of sprintf() calls: 'sprintf (str, fmt)' and
2378 'sprintf (dest, "%s", orig)'. */
2379 if (gimple_call_num_args (stmt) > 3)
2380 return false;
2381
2382 if (gimple_call_num_args (stmt) == 3)
2383 orig = gimple_call_arg (stmt, 2);
2384
2385 /* Check whether the format is a literal string constant. */
2386 fmt_str = c_getstr (fmt);
2387 if (fmt_str == NULL)
2388 return false;
2389
2390 if (!init_target_chars ())
2391 return false;
2392
2393 /* If the format doesn't contain % args or %%, use strcpy. */
2394 if (strchr (fmt_str, target_percent) == NULL)
2395 {
2396 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2397
2398 if (!fn)
2399 return false;
2400
2401 /* Don't optimize sprintf (buf, "abc", ptr++). */
2402 if (orig)
2403 return false;
2404
2405 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2406 'format' is known to contain no % formats. */
2407 gimple_seq stmts = NULL;
2408 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2409 gimple_seq_add_stmt_without_update (&stmts, repl);
2410 if (gimple_call_lhs (stmt))
2411 {
2412 repl = gimple_build_assign (gimple_call_lhs (stmt),
2413 build_int_cst (integer_type_node,
2414 strlen (fmt_str)));
2415 gimple_seq_add_stmt_without_update (&stmts, repl);
2416 gsi_replace_with_seq_vops (gsi, stmts);
2417 /* gsi now points at the assignment to the lhs, get a
2418 stmt iterator to the memcpy call.
2419 ??? We can't use gsi_for_stmt as that doesn't work when the
2420 CFG isn't built yet. */
2421 gimple_stmt_iterator gsi2 = *gsi;
2422 gsi_prev (&gsi2);
2423 fold_stmt (&gsi2);
2424 }
2425 else
2426 {
2427 gsi_replace_with_seq_vops (gsi, stmts);
2428 fold_stmt (gsi);
2429 }
2430 return true;
2431 }
2432
2433 /* If the format is "%s", use strcpy if the result isn't used. */
2434 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2435 {
2436 tree fn;
2437 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2438
2439 if (!fn)
2440 return false;
2441
2442 /* Don't crash on sprintf (str1, "%s"). */
2443 if (!orig)
2444 return false;
2445
2446 tree orig_len = NULL_TREE;
2447 if (gimple_call_lhs (stmt))
2448 {
2449 orig_len = get_maxval_strlen (orig, 0);
2450 if (!orig_len)
2451 return false;
2452 }
2453
2454 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2455 gimple_seq stmts = NULL;
2456 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2457 gimple_seq_add_stmt_without_update (&stmts, repl);
2458 if (gimple_call_lhs (stmt))
2459 {
2460 if (!useless_type_conversion_p (integer_type_node,
2461 TREE_TYPE (orig_len)))
2462 orig_len = fold_convert (integer_type_node, orig_len);
2463 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2464 gimple_seq_add_stmt_without_update (&stmts, repl);
2465 gsi_replace_with_seq_vops (gsi, stmts);
2466 /* gsi now points at the assignment to the lhs, get a
2467 stmt iterator to the memcpy call.
2468 ??? We can't use gsi_for_stmt as that doesn't work when the
2469 CFG isn't built yet. */
2470 gimple_stmt_iterator gsi2 = *gsi;
2471 gsi_prev (&gsi2);
2472 fold_stmt (&gsi2);
2473 }
2474 else
2475 {
2476 gsi_replace_with_seq_vops (gsi, stmts);
2477 fold_stmt (gsi);
2478 }
2479 return true;
2480 }
2481 return false;
2482 }
2483
2484 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2485 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2486 attempt to simplify calls with more than 4 arguments.
2487
2488 Return NULL_TREE if no simplification was possible, otherwise return the
2489 simplified form of the call as a tree. If IGNORED is true, it means that
2490 the caller does not use the returned value of the function. */
2491
2492 static bool
2493 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2494 {
2495 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2496 tree dest = gimple_call_arg (stmt, 0);
2497 tree destsize = gimple_call_arg (stmt, 1);
2498 tree fmt = gimple_call_arg (stmt, 2);
2499 tree orig = NULL_TREE;
2500 const char *fmt_str = NULL;
2501
2502 if (gimple_call_num_args (stmt) > 4)
2503 return false;
2504
2505 if (gimple_call_num_args (stmt) == 4)
2506 orig = gimple_call_arg (stmt, 3);
2507
2508 if (!tree_fits_uhwi_p (destsize))
2509 return false;
2510 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2511
2512 /* Check whether the format is a literal string constant. */
2513 fmt_str = c_getstr (fmt);
2514 if (fmt_str == NULL)
2515 return false;
2516
2517 if (!init_target_chars ())
2518 return false;
2519
2520 /* If the format doesn't contain % args or %%, use strcpy. */
2521 if (strchr (fmt_str, target_percent) == NULL)
2522 {
2523 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2524 if (!fn)
2525 return false;
2526
2527 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2528 if (orig)
2529 return false;
2530
2531 /* We could expand this as
2532 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2533 or to
2534 memcpy (str, fmt_with_nul_at_cstm1, cst);
2535 but in the former case that might increase code size
2536 and in the latter case grow .rodata section too much.
2537 So punt for now. */
2538 size_t len = strlen (fmt_str);
2539 if (len >= destlen)
2540 return false;
2541
2542 gimple_seq stmts = NULL;
2543 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2544 gimple_seq_add_stmt_without_update (&stmts, repl);
2545 if (gimple_call_lhs (stmt))
2546 {
2547 repl = gimple_build_assign (gimple_call_lhs (stmt),
2548 build_int_cst (integer_type_node, len));
2549 gimple_seq_add_stmt_without_update (&stmts, repl);
2550 gsi_replace_with_seq_vops (gsi, stmts);
2551 /* gsi now points at the assignment to the lhs, get a
2552 stmt iterator to the memcpy call.
2553 ??? We can't use gsi_for_stmt as that doesn't work when the
2554 CFG isn't built yet. */
2555 gimple_stmt_iterator gsi2 = *gsi;
2556 gsi_prev (&gsi2);
2557 fold_stmt (&gsi2);
2558 }
2559 else
2560 {
2561 gsi_replace_with_seq_vops (gsi, stmts);
2562 fold_stmt (gsi);
2563 }
2564 return true;
2565 }
2566
2567 /* If the format is "%s", use strcpy if the result isn't used. */
2568 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2569 {
2570 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2571 if (!fn)
2572 return false;
2573
2574 /* Don't crash on snprintf (str1, cst, "%s"). */
2575 if (!orig)
2576 return false;
2577
2578 tree orig_len = get_maxval_strlen (orig, 0);
2579 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2580 return false;
2581
2582 /* We could expand this as
2583 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2584 or to
2585 memcpy (str1, str2_with_nul_at_cstm1, cst);
2586 but in the former case that might increase code size
2587 and in the latter case grow .rodata section too much.
2588 So punt for now. */
2589 if (compare_tree_int (orig_len, destlen) >= 0)
2590 return false;
2591
2592 /* Convert snprintf (str1, cst, "%s", str2) into
2593 strcpy (str1, str2) if strlen (str2) < cst. */
2594 gimple_seq stmts = NULL;
2595 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2596 gimple_seq_add_stmt_without_update (&stmts, repl);
2597 if (gimple_call_lhs (stmt))
2598 {
2599 if (!useless_type_conversion_p (integer_type_node,
2600 TREE_TYPE (orig_len)))
2601 orig_len = fold_convert (integer_type_node, orig_len);
2602 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2603 gimple_seq_add_stmt_without_update (&stmts, repl);
2604 gsi_replace_with_seq_vops (gsi, stmts);
2605 /* gsi now points at the assignment to the lhs, get a
2606 stmt iterator to the memcpy call.
2607 ??? We can't use gsi_for_stmt as that doesn't work when the
2608 CFG isn't built yet. */
2609 gimple_stmt_iterator gsi2 = *gsi;
2610 gsi_prev (&gsi2);
2611 fold_stmt (&gsi2);
2612 }
2613 else
2614 {
2615 gsi_replace_with_seq_vops (gsi, stmts);
2616 fold_stmt (gsi);
2617 }
2618 return true;
2619 }
2620 return false;
2621 }
2622
2623 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2624 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2625 more than 3 arguments, and ARG may be null in the 2-argument case.
2626
2627 Return NULL_TREE if no simplification was possible, otherwise return the
2628 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2629 code of the function to be simplified. */
2630
2631 static bool
2632 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2633 tree fp, tree fmt, tree arg,
2634 enum built_in_function fcode)
2635 {
2636 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2637 tree fn_fputc, fn_fputs;
2638 const char *fmt_str = NULL;
2639
2640 /* If the return value is used, don't do the transformation. */
2641 if (gimple_call_lhs (stmt) != NULL_TREE)
2642 return false;
2643
2644 /* Check whether the format is a literal string constant. */
2645 fmt_str = c_getstr (fmt);
2646 if (fmt_str == NULL)
2647 return false;
2648
2649 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2650 {
2651 /* If we're using an unlocked function, assume the other
2652 unlocked functions exist explicitly. */
2653 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2654 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2655 }
2656 else
2657 {
2658 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2659 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2660 }
2661
2662 if (!init_target_chars ())
2663 return false;
2664
2665 /* If the format doesn't contain % args or %%, use strcpy. */
2666 if (strchr (fmt_str, target_percent) == NULL)
2667 {
2668 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2669 && arg)
2670 return false;
2671
2672 /* If the format specifier was "", fprintf does nothing. */
2673 if (fmt_str[0] == '\0')
2674 {
2675 replace_call_with_value (gsi, NULL_TREE);
2676 return true;
2677 }
2678
2679 /* When "string" doesn't contain %, replace all cases of
2680 fprintf (fp, string) with fputs (string, fp). The fputs
2681 builtin will take care of special cases like length == 1. */
2682 if (fn_fputs)
2683 {
2684 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2685 replace_call_with_call_and_fold (gsi, repl);
2686 return true;
2687 }
2688 }
2689
2690 /* The other optimizations can be done only on the non-va_list variants. */
2691 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2692 return false;
2693
2694 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2695 else if (strcmp (fmt_str, target_percent_s) == 0)
2696 {
2697 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2698 return false;
2699 if (fn_fputs)
2700 {
2701 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2702 replace_call_with_call_and_fold (gsi, repl);
2703 return true;
2704 }
2705 }
2706
2707 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2708 else if (strcmp (fmt_str, target_percent_c) == 0)
2709 {
2710 if (!arg
2711 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2712 return false;
2713 if (fn_fputc)
2714 {
2715 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2716 replace_call_with_call_and_fold (gsi, repl);
2717 return true;
2718 }
2719 }
2720
2721 return false;
2722 }
2723
2724 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2725 FMT and ARG are the arguments to the call; we don't fold cases with
2726 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2727
2728 Return NULL_TREE if no simplification was possible, otherwise return the
2729 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2730 code of the function to be simplified. */
2731
2732 static bool
2733 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2734 tree arg, enum built_in_function fcode)
2735 {
2736 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2737 tree fn_putchar, fn_puts, newarg;
2738 const char *fmt_str = NULL;
2739
2740 /* If the return value is used, don't do the transformation. */
2741 if (gimple_call_lhs (stmt) != NULL_TREE)
2742 return false;
2743
2744 /* Check whether the format is a literal string constant. */
2745 fmt_str = c_getstr (fmt);
2746 if (fmt_str == NULL)
2747 return false;
2748
2749 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2750 {
2751 /* If we're using an unlocked function, assume the other
2752 unlocked functions exist explicitly. */
2753 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2754 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2755 }
2756 else
2757 {
2758 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2759 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2760 }
2761
2762 if (!init_target_chars ())
2763 return false;
2764
2765 if (strcmp (fmt_str, target_percent_s) == 0
2766 || strchr (fmt_str, target_percent) == NULL)
2767 {
2768 const char *str;
2769
2770 if (strcmp (fmt_str, target_percent_s) == 0)
2771 {
2772 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2773 return false;
2774
2775 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2776 return false;
2777
2778 str = c_getstr (arg);
2779 if (str == NULL)
2780 return false;
2781 }
2782 else
2783 {
2784 /* The format specifier doesn't contain any '%' characters. */
2785 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2786 && arg)
2787 return false;
2788 str = fmt_str;
2789 }
2790
2791 /* If the string was "", printf does nothing. */
2792 if (str[0] == '\0')
2793 {
2794 replace_call_with_value (gsi, NULL_TREE);
2795 return true;
2796 }
2797
2798 /* If the string has length of 1, call putchar. */
2799 if (str[1] == '\0')
2800 {
2801 /* Given printf("c"), (where c is any one character,)
2802 convert "c"[0] to an int and pass that to the replacement
2803 function. */
2804 newarg = build_int_cst (integer_type_node, str[0]);
2805 if (fn_putchar)
2806 {
2807 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2808 replace_call_with_call_and_fold (gsi, repl);
2809 return true;
2810 }
2811 }
2812 else
2813 {
2814 /* If the string was "string\n", call puts("string"). */
2815 size_t len = strlen (str);
2816 if ((unsigned char)str[len - 1] == target_newline
2817 && (size_t) (int) len == len
2818 && (int) len > 0)
2819 {
2820 char *newstr;
2821 tree offset_node, string_cst;
2822
2823 /* Create a NUL-terminated string that's one char shorter
2824 than the original, stripping off the trailing '\n'. */
2825 newarg = build_string_literal (len, str);
2826 string_cst = string_constant (newarg, &offset_node);
2827 gcc_checking_assert (string_cst
2828 && (TREE_STRING_LENGTH (string_cst)
2829 == (int) len)
2830 && integer_zerop (offset_node)
2831 && (unsigned char)
2832 TREE_STRING_POINTER (string_cst)[len - 1]
2833 == target_newline);
2834 /* build_string_literal creates a new STRING_CST,
2835 modify it in place to avoid double copying. */
2836 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2837 newstr[len - 1] = '\0';
2838 if (fn_puts)
2839 {
2840 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2841 replace_call_with_call_and_fold (gsi, repl);
2842 return true;
2843 }
2844 }
2845 else
2846 /* We'd like to arrange to call fputs(string,stdout) here,
2847 but we need stdout and don't have a way to get it yet. */
2848 return false;
2849 }
2850 }
2851
2852 /* The other optimizations can be done only on the non-va_list variants. */
2853 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2854 return false;
2855
2856 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2857 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2858 {
2859 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2860 return false;
2861 if (fn_puts)
2862 {
2863 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2864 replace_call_with_call_and_fold (gsi, repl);
2865 return true;
2866 }
2867 }
2868
2869 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2870 else if (strcmp (fmt_str, target_percent_c) == 0)
2871 {
2872 if (!arg || ! useless_type_conversion_p (integer_type_node,
2873 TREE_TYPE (arg)))
2874 return false;
2875 if (fn_putchar)
2876 {
2877 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2878 replace_call_with_call_and_fold (gsi, repl);
2879 return true;
2880 }
2881 }
2882
2883 return false;
2884 }
2885
2886
2887
2888 /* Fold a call to __builtin_strlen with known length LEN. */
2889
2890 static bool
2891 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2892 {
2893 gimple *stmt = gsi_stmt (*gsi);
2894 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2895 if (!len)
2896 return false;
2897 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2898 replace_call_with_value (gsi, len);
2899 return true;
2900 }
2901
2902 /* Fold a call to __builtin_acc_on_device. */
2903
2904 static bool
2905 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2906 {
2907 /* Defer folding until we know which compiler we're in. */
2908 if (symtab->state != EXPANSION)
2909 return false;
2910
2911 unsigned val_host = GOMP_DEVICE_HOST;
2912 unsigned val_dev = GOMP_DEVICE_NONE;
2913
2914 #ifdef ACCEL_COMPILER
2915 val_host = GOMP_DEVICE_NOT_HOST;
2916 val_dev = ACCEL_COMPILER_acc_device;
2917 #endif
2918
2919 location_t loc = gimple_location (gsi_stmt (*gsi));
2920
2921 tree host_eq = make_ssa_name (boolean_type_node);
2922 gimple *host_ass = gimple_build_assign
2923 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2924 gimple_set_location (host_ass, loc);
2925 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2926
2927 tree dev_eq = make_ssa_name (boolean_type_node);
2928 gimple *dev_ass = gimple_build_assign
2929 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2930 gimple_set_location (dev_ass, loc);
2931 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2932
2933 tree result = make_ssa_name (boolean_type_node);
2934 gimple *result_ass = gimple_build_assign
2935 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2936 gimple_set_location (result_ass, loc);
2937 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2938
2939 replace_call_with_value (gsi, result);
2940
2941 return true;
2942 }
2943
2944 /* Fold the non-target builtin at *GSI and return whether any simplification
2945 was made. */
2946
2947 static bool
2948 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2949 {
2950 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2951 tree callee = gimple_call_fndecl (stmt);
2952
2953 /* Give up for always_inline inline builtins until they are
2954 inlined. */
2955 if (avoid_folding_inline_builtin (callee))
2956 return false;
2957
2958 unsigned n = gimple_call_num_args (stmt);
2959 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2960 switch (fcode)
2961 {
2962 case BUILT_IN_BZERO:
2963 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2964 gimple_call_arg (stmt, 1));
2965 case BUILT_IN_MEMSET:
2966 return gimple_fold_builtin_memset (gsi,
2967 gimple_call_arg (stmt, 1),
2968 gimple_call_arg (stmt, 2));
2969 case BUILT_IN_BCOPY:
2970 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2971 gimple_call_arg (stmt, 0), 3);
2972 case BUILT_IN_MEMCPY:
2973 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2974 gimple_call_arg (stmt, 1), 0);
2975 case BUILT_IN_MEMPCPY:
2976 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2977 gimple_call_arg (stmt, 1), 1);
2978 case BUILT_IN_MEMMOVE:
2979 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2980 gimple_call_arg (stmt, 1), 3);
2981 case BUILT_IN_SPRINTF_CHK:
2982 case BUILT_IN_VSPRINTF_CHK:
2983 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2984 case BUILT_IN_STRCAT_CHK:
2985 return gimple_fold_builtin_strcat_chk (gsi);
2986 case BUILT_IN_STRNCAT_CHK:
2987 return gimple_fold_builtin_strncat_chk (gsi);
2988 case BUILT_IN_STRLEN:
2989 return gimple_fold_builtin_strlen (gsi);
2990 case BUILT_IN_STRCPY:
2991 return gimple_fold_builtin_strcpy (gsi,
2992 gimple_call_arg (stmt, 0),
2993 gimple_call_arg (stmt, 1));
2994 case BUILT_IN_STRNCPY:
2995 return gimple_fold_builtin_strncpy (gsi,
2996 gimple_call_arg (stmt, 0),
2997 gimple_call_arg (stmt, 1),
2998 gimple_call_arg (stmt, 2));
2999 case BUILT_IN_STRCAT:
3000 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3001 gimple_call_arg (stmt, 1));
3002 case BUILT_IN_STRNCAT:
3003 return gimple_fold_builtin_strncat (gsi);
3004 case BUILT_IN_INDEX:
3005 case BUILT_IN_STRCHR:
3006 return gimple_fold_builtin_strchr (gsi, false);
3007 case BUILT_IN_RINDEX:
3008 case BUILT_IN_STRRCHR:
3009 return gimple_fold_builtin_strchr (gsi, true);
3010 case BUILT_IN_FPUTS:
3011 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3012 gimple_call_arg (stmt, 1), false);
3013 case BUILT_IN_FPUTS_UNLOCKED:
3014 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3015 gimple_call_arg (stmt, 1), true);
3016 case BUILT_IN_MEMCPY_CHK:
3017 case BUILT_IN_MEMPCPY_CHK:
3018 case BUILT_IN_MEMMOVE_CHK:
3019 case BUILT_IN_MEMSET_CHK:
3020 return gimple_fold_builtin_memory_chk (gsi,
3021 gimple_call_arg (stmt, 0),
3022 gimple_call_arg (stmt, 1),
3023 gimple_call_arg (stmt, 2),
3024 gimple_call_arg (stmt, 3),
3025 fcode);
3026 case BUILT_IN_STPCPY:
3027 return gimple_fold_builtin_stpcpy (gsi);
3028 case BUILT_IN_STRCPY_CHK:
3029 case BUILT_IN_STPCPY_CHK:
3030 return gimple_fold_builtin_stxcpy_chk (gsi,
3031 gimple_call_arg (stmt, 0),
3032 gimple_call_arg (stmt, 1),
3033 gimple_call_arg (stmt, 2),
3034 fcode);
3035 case BUILT_IN_STRNCPY_CHK:
3036 case BUILT_IN_STPNCPY_CHK:
3037 return gimple_fold_builtin_stxncpy_chk (gsi,
3038 gimple_call_arg (stmt, 0),
3039 gimple_call_arg (stmt, 1),
3040 gimple_call_arg (stmt, 2),
3041 gimple_call_arg (stmt, 3),
3042 fcode);
3043 case BUILT_IN_SNPRINTF_CHK:
3044 case BUILT_IN_VSNPRINTF_CHK:
3045 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3046 case BUILT_IN_SNPRINTF:
3047 return gimple_fold_builtin_snprintf (gsi);
3048 case BUILT_IN_SPRINTF:
3049 return gimple_fold_builtin_sprintf (gsi);
3050 case BUILT_IN_FPRINTF:
3051 case BUILT_IN_FPRINTF_UNLOCKED:
3052 case BUILT_IN_VFPRINTF:
3053 if (n == 2 || n == 3)
3054 return gimple_fold_builtin_fprintf (gsi,
3055 gimple_call_arg (stmt, 0),
3056 gimple_call_arg (stmt, 1),
3057 n == 3
3058 ? gimple_call_arg (stmt, 2)
3059 : NULL_TREE,
3060 fcode);
3061 break;
3062 case BUILT_IN_FPRINTF_CHK:
3063 case BUILT_IN_VFPRINTF_CHK:
3064 if (n == 3 || n == 4)
3065 return gimple_fold_builtin_fprintf (gsi,
3066 gimple_call_arg (stmt, 0),
3067 gimple_call_arg (stmt, 2),
3068 n == 4
3069 ? gimple_call_arg (stmt, 3)
3070 : NULL_TREE,
3071 fcode);
3072 break;
3073 case BUILT_IN_PRINTF:
3074 case BUILT_IN_PRINTF_UNLOCKED:
3075 case BUILT_IN_VPRINTF:
3076 if (n == 1 || n == 2)
3077 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3078 n == 2
3079 ? gimple_call_arg (stmt, 1)
3080 : NULL_TREE, fcode);
3081 break;
3082 case BUILT_IN_PRINTF_CHK:
3083 case BUILT_IN_VPRINTF_CHK:
3084 if (n == 2 || n == 3)
3085 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3086 n == 3
3087 ? gimple_call_arg (stmt, 2)
3088 : NULL_TREE, fcode);
3089 break;
3090 case BUILT_IN_ACC_ON_DEVICE:
3091 return gimple_fold_builtin_acc_on_device (gsi,
3092 gimple_call_arg (stmt, 0));
3093 default:;
3094 }
3095
3096 /* Try the generic builtin folder. */
3097 bool ignore = (gimple_call_lhs (stmt) == NULL);
3098 tree result = fold_call_stmt (stmt, ignore);
3099 if (result)
3100 {
3101 if (ignore)
3102 STRIP_NOPS (result);
3103 else
3104 result = fold_convert (gimple_call_return_type (stmt), result);
3105 if (!update_call_from_tree (gsi, result))
3106 gimplify_and_update_call_from_tree (gsi, result);
3107 return true;
3108 }
3109
3110 return false;
3111 }
3112
3113 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3114 function calls to constants, where possible. */
3115
3116 static tree
3117 fold_internal_goacc_dim (const gimple *call)
3118 {
3119 int axis = get_oacc_ifn_dim_arg (call);
3120 int size = get_oacc_fn_dim_size (current_function_decl, axis);
3121 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3122 tree result = NULL_TREE;
3123
3124 /* If the size is 1, or we only want the size and it is not dynamic,
3125 we know the answer. */
3126 if (size == 1 || (!is_pos && size))
3127 {
3128 tree type = TREE_TYPE (gimple_call_lhs (call));
3129 result = build_int_cst (type, size - is_pos);
3130 }
3131
3132 return result;
3133 }
3134
3135 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3136 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3137 &var where var is only addressable because of such calls. */
3138
3139 bool
3140 optimize_atomic_compare_exchange_p (gimple *stmt)
3141 {
3142 if (gimple_call_num_args (stmt) != 6
3143 || !flag_inline_atomics
3144 || !optimize
3145 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3146 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3147 || !gimple_vdef (stmt)
3148 || !gimple_vuse (stmt))
3149 return false;
3150
3151 tree fndecl = gimple_call_fndecl (stmt);
3152 switch (DECL_FUNCTION_CODE (fndecl))
3153 {
3154 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3155 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3156 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3157 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3158 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3159 break;
3160 default:
3161 return false;
3162 }
3163
3164 tree expected = gimple_call_arg (stmt, 1);
3165 if (TREE_CODE (expected) != ADDR_EXPR
3166 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3167 return false;
3168
3169 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3170 if (!is_gimple_reg_type (etype)
3171 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3172 || TREE_THIS_VOLATILE (etype)
3173 || VECTOR_TYPE_P (etype)
3174 || TREE_CODE (etype) == COMPLEX_TYPE
3175 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3176 might not preserve all the bits. See PR71716. */
3177 || SCALAR_FLOAT_TYPE_P (etype)
3178 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3179 return false;
3180
3181 tree weak = gimple_call_arg (stmt, 3);
3182 if (!integer_zerop (weak) && !integer_onep (weak))
3183 return false;
3184
3185 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3186 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3187 machine_mode mode = TYPE_MODE (itype);
3188
3189 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3190 == CODE_FOR_nothing
3191 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3192 return false;
3193
3194 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3195 return false;
3196
3197 return true;
3198 }
3199
3200 /* Fold
3201 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3202 into
3203 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3204 i = IMAGPART_EXPR <t>;
3205 r = (_Bool) i;
3206 e = REALPART_EXPR <t>; */
3207
3208 void
3209 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3210 {
3211 gimple *stmt = gsi_stmt (*gsi);
3212 tree fndecl = gimple_call_fndecl (stmt);
3213 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3214 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3215 tree ctype = build_complex_type (itype);
3216 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3217 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3218 expected);
3219 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3220 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3221 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3222 {
3223 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3224 build1 (VIEW_CONVERT_EXPR, itype,
3225 gimple_assign_lhs (g)));
3226 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3227 }
3228 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3229 + int_size_in_bytes (itype);
3230 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3231 gimple_call_arg (stmt, 0),
3232 gimple_assign_lhs (g),
3233 gimple_call_arg (stmt, 2),
3234 build_int_cst (integer_type_node, flag),
3235 gimple_call_arg (stmt, 4),
3236 gimple_call_arg (stmt, 5));
3237 tree lhs = make_ssa_name (ctype);
3238 gimple_call_set_lhs (g, lhs);
3239 gimple_set_vdef (g, gimple_vdef (stmt));
3240 gimple_set_vuse (g, gimple_vuse (stmt));
3241 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3242 if (gimple_call_lhs (stmt))
3243 {
3244 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3245 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3246 build1 (IMAGPART_EXPR, itype, lhs));
3247 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3248 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3249 gimple_assign_lhs (g));
3250 }
3251 gsi_replace (gsi, g, true);
3252 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3253 build1 (REALPART_EXPR, itype, lhs));
3254 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3255 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3256 {
3257 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3258 VIEW_CONVERT_EXPR,
3259 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3260 gimple_assign_lhs (g)));
3261 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3262 }
3263 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3264 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3265 *gsi = gsiret;
3266 }
3267
3268 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3269 doesn't fit into TYPE. The test for overflow should be regardless of
3270 -fwrapv, and even for unsigned types. */
3271
3272 bool
3273 arith_overflowed_p (enum tree_code code, const_tree type,
3274 const_tree arg0, const_tree arg1)
3275 {
3276 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3277 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3278 widest2_int_cst;
3279 widest2_int warg0 = widest2_int_cst (arg0);
3280 widest2_int warg1 = widest2_int_cst (arg1);
3281 widest2_int wres;
3282 switch (code)
3283 {
3284 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3285 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3286 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3287 default: gcc_unreachable ();
3288 }
3289 signop sign = TYPE_SIGN (type);
3290 if (sign == UNSIGNED && wi::neg_p (wres))
3291 return true;
3292 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3293 }
3294
3295 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3296 The statement may be replaced by another statement, e.g., if the call
3297 simplifies to a constant value. Return true if any changes were made.
3298 It is assumed that the operands have been previously folded. */
3299
3300 static bool
3301 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3302 {
3303 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3304 tree callee;
3305 bool changed = false;
3306 unsigned i;
3307
3308 /* Fold *& in call arguments. */
3309 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3310 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3311 {
3312 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3313 if (tmp)
3314 {
3315 gimple_call_set_arg (stmt, i, tmp);
3316 changed = true;
3317 }
3318 }
3319
3320 /* Check for virtual calls that became direct calls. */
3321 callee = gimple_call_fn (stmt);
3322 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3323 {
3324 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3325 {
3326 if (dump_file && virtual_method_call_p (callee)
3327 && !possible_polymorphic_call_target_p
3328 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3329 (OBJ_TYPE_REF_EXPR (callee)))))
3330 {
3331 fprintf (dump_file,
3332 "Type inheritance inconsistent devirtualization of ");
3333 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3334 fprintf (dump_file, " to ");
3335 print_generic_expr (dump_file, callee, TDF_SLIM);
3336 fprintf (dump_file, "\n");
3337 }
3338
3339 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3340 changed = true;
3341 }
3342 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3343 {
3344 bool final;
3345 vec <cgraph_node *>targets
3346 = possible_polymorphic_call_targets (callee, stmt, &final);
3347 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3348 {
3349 tree lhs = gimple_call_lhs (stmt);
3350 if (dump_enabled_p ())
3351 {
3352 location_t loc = gimple_location_safe (stmt);
3353 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3354 "folding virtual function call to %s\n",
3355 targets.length () == 1
3356 ? targets[0]->name ()
3357 : "__builtin_unreachable");
3358 }
3359 if (targets.length () == 1)
3360 {
3361 tree fndecl = targets[0]->decl;
3362 gimple_call_set_fndecl (stmt, fndecl);
3363 changed = true;
3364 /* If changing the call to __cxa_pure_virtual
3365 or similar noreturn function, adjust gimple_call_fntype
3366 too. */
3367 if (gimple_call_noreturn_p (stmt)
3368 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3369 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3370 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3371 == void_type_node))
3372 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3373 /* If the call becomes noreturn, remove the lhs. */
3374 if (lhs
3375 && gimple_call_noreturn_p (stmt)
3376 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3377 || should_remove_lhs_p (lhs)))
3378 {
3379 if (TREE_CODE (lhs) == SSA_NAME)
3380 {
3381 tree var = create_tmp_var (TREE_TYPE (lhs));
3382 tree def = get_or_create_ssa_default_def (cfun, var);
3383 gimple *new_stmt = gimple_build_assign (lhs, def);
3384 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3385 }
3386 gimple_call_set_lhs (stmt, NULL_TREE);
3387 }
3388 maybe_remove_unused_call_args (cfun, stmt);
3389 }
3390 else
3391 {
3392 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3393 gimple *new_stmt = gimple_build_call (fndecl, 0);
3394 gimple_set_location (new_stmt, gimple_location (stmt));
3395 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3396 {
3397 tree var = create_tmp_var (TREE_TYPE (lhs));
3398 tree def = get_or_create_ssa_default_def (cfun, var);
3399
3400 /* To satisfy condition for
3401 cgraph_update_edges_for_call_stmt_node,
3402 we need to preserve GIMPLE_CALL statement
3403 at position of GSI iterator. */
3404 update_call_from_tree (gsi, def);
3405 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3406 }
3407 else
3408 {
3409 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3410 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3411 gsi_replace (gsi, new_stmt, false);
3412 }
3413 return true;
3414 }
3415 }
3416 }
3417 }
3418
3419 /* Check for indirect calls that became direct calls, and then
3420 no longer require a static chain. */
3421 if (gimple_call_chain (stmt))
3422 {
3423 tree fn = gimple_call_fndecl (stmt);
3424 if (fn && !DECL_STATIC_CHAIN (fn))
3425 {
3426 gimple_call_set_chain (stmt, NULL);
3427 changed = true;
3428 }
3429 else
3430 {
3431 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3432 if (tmp)
3433 {
3434 gimple_call_set_chain (stmt, tmp);
3435 changed = true;
3436 }
3437 }
3438 }
3439
3440 if (inplace)
3441 return changed;
3442
3443 /* Check for builtins that CCP can handle using information not
3444 available in the generic fold routines. */
3445 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3446 {
3447 if (gimple_fold_builtin (gsi))
3448 changed = true;
3449 }
3450 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3451 {
3452 changed |= targetm.gimple_fold_builtin (gsi);
3453 }
3454 else if (gimple_call_internal_p (stmt))
3455 {
3456 enum tree_code subcode = ERROR_MARK;
3457 tree result = NULL_TREE;
3458 bool cplx_result = false;
3459 tree overflow = NULL_TREE;
3460 switch (gimple_call_internal_fn (stmt))
3461 {
3462 case IFN_BUILTIN_EXPECT:
3463 result = fold_builtin_expect (gimple_location (stmt),
3464 gimple_call_arg (stmt, 0),
3465 gimple_call_arg (stmt, 1),
3466 gimple_call_arg (stmt, 2));
3467 break;
3468 case IFN_UBSAN_OBJECT_SIZE:
3469 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3470 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3471 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3472 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3473 gimple_call_arg (stmt, 2))))
3474 {
3475 gsi_replace (gsi, gimple_build_nop (), false);
3476 unlink_stmt_vdef (stmt);
3477 release_defs (stmt);
3478 return true;
3479 }
3480 break;
3481 case IFN_GOACC_DIM_SIZE:
3482 case IFN_GOACC_DIM_POS:
3483 result = fold_internal_goacc_dim (stmt);
3484 break;
3485 case IFN_UBSAN_CHECK_ADD:
3486 subcode = PLUS_EXPR;
3487 break;
3488 case IFN_UBSAN_CHECK_SUB:
3489 subcode = MINUS_EXPR;
3490 break;
3491 case IFN_UBSAN_CHECK_MUL:
3492 subcode = MULT_EXPR;
3493 break;
3494 case IFN_ADD_OVERFLOW:
3495 subcode = PLUS_EXPR;
3496 cplx_result = true;
3497 break;
3498 case IFN_SUB_OVERFLOW:
3499 subcode = MINUS_EXPR;
3500 cplx_result = true;
3501 break;
3502 case IFN_MUL_OVERFLOW:
3503 subcode = MULT_EXPR;
3504 cplx_result = true;
3505 break;
3506 default:
3507 break;
3508 }
3509 if (subcode != ERROR_MARK)
3510 {
3511 tree arg0 = gimple_call_arg (stmt, 0);
3512 tree arg1 = gimple_call_arg (stmt, 1);
3513 tree type = TREE_TYPE (arg0);
3514 if (cplx_result)
3515 {
3516 tree lhs = gimple_call_lhs (stmt);
3517 if (lhs == NULL_TREE)
3518 type = NULL_TREE;
3519 else
3520 type = TREE_TYPE (TREE_TYPE (lhs));
3521 }
3522 if (type == NULL_TREE)
3523 ;
3524 /* x = y + 0; x = y - 0; x = y * 0; */
3525 else if (integer_zerop (arg1))
3526 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3527 /* x = 0 + y; x = 0 * y; */
3528 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3529 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3530 /* x = y - y; */
3531 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3532 result = integer_zero_node;
3533 /* x = y * 1; x = 1 * y; */
3534 else if (subcode == MULT_EXPR && integer_onep (arg1))
3535 result = arg0;
3536 else if (subcode == MULT_EXPR && integer_onep (arg0))
3537 result = arg1;
3538 else if (TREE_CODE (arg0) == INTEGER_CST
3539 && TREE_CODE (arg1) == INTEGER_CST)
3540 {
3541 if (cplx_result)
3542 result = int_const_binop (subcode, fold_convert (type, arg0),
3543 fold_convert (type, arg1));
3544 else
3545 result = int_const_binop (subcode, arg0, arg1);
3546 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3547 {
3548 if (cplx_result)
3549 overflow = build_one_cst (type);
3550 else
3551 result = NULL_TREE;
3552 }
3553 }
3554 if (result)
3555 {
3556 if (result == integer_zero_node)
3557 result = build_zero_cst (type);
3558 else if (cplx_result && TREE_TYPE (result) != type)
3559 {
3560 if (TREE_CODE (result) == INTEGER_CST)
3561 {
3562 if (arith_overflowed_p (PLUS_EXPR, type, result,
3563 integer_zero_node))
3564 overflow = build_one_cst (type);
3565 }
3566 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3567 && TYPE_UNSIGNED (type))
3568 || (TYPE_PRECISION (type)
3569 < (TYPE_PRECISION (TREE_TYPE (result))
3570 + (TYPE_UNSIGNED (TREE_TYPE (result))
3571 && !TYPE_UNSIGNED (type)))))
3572 result = NULL_TREE;
3573 if (result)
3574 result = fold_convert (type, result);
3575 }
3576 }
3577 }
3578
3579 if (result)
3580 {
3581 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3582 result = drop_tree_overflow (result);
3583 if (cplx_result)
3584 {
3585 if (overflow == NULL_TREE)
3586 overflow = build_zero_cst (TREE_TYPE (result));
3587 tree ctype = build_complex_type (TREE_TYPE (result));
3588 if (TREE_CODE (result) == INTEGER_CST
3589 && TREE_CODE (overflow) == INTEGER_CST)
3590 result = build_complex (ctype, result, overflow);
3591 else
3592 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3593 ctype, result, overflow);
3594 }
3595 if (!update_call_from_tree (gsi, result))
3596 gimplify_and_update_call_from_tree (gsi, result);
3597 changed = true;
3598 }
3599 }
3600
3601 return changed;
3602 }
3603
3604
3605 /* Return true whether NAME has a use on STMT. */
3606
3607 static bool
3608 has_use_on_stmt (tree name, gimple *stmt)
3609 {
3610 imm_use_iterator iter;
3611 use_operand_p use_p;
3612 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3613 if (USE_STMT (use_p) == stmt)
3614 return true;
3615 return false;
3616 }
3617
3618 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3619 gimple_simplify.
3620
3621 Replaces *GSI with the simplification result in RCODE and OPS
3622 and the associated statements in *SEQ. Does the replacement
3623 according to INPLACE and returns true if the operation succeeded. */
3624
3625 static bool
3626 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3627 code_helper rcode, tree *ops,
3628 gimple_seq *seq, bool inplace)
3629 {
3630 gimple *stmt = gsi_stmt (*gsi);
3631
3632 /* Play safe and do not allow abnormals to be mentioned in
3633 newly created statements. See also maybe_push_res_to_seq.
3634 As an exception allow such uses if there was a use of the
3635 same SSA name on the old stmt. */
3636 if ((TREE_CODE (ops[0]) == SSA_NAME
3637 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3638 && !has_use_on_stmt (ops[0], stmt))
3639 || (ops[1]
3640 && TREE_CODE (ops[1]) == SSA_NAME
3641 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3642 && !has_use_on_stmt (ops[1], stmt))
3643 || (ops[2]
3644 && TREE_CODE (ops[2]) == SSA_NAME
3645 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3646 && !has_use_on_stmt (ops[2], stmt))
3647 || (COMPARISON_CLASS_P (ops[0])
3648 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3649 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3650 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3651 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3652 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3653 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3654 return false;
3655
3656 /* Don't insert new statements when INPLACE is true, even if we could
3657 reuse STMT for the final statement. */
3658 if (inplace && !gimple_seq_empty_p (*seq))
3659 return false;
3660
3661 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3662 {
3663 gcc_assert (rcode.is_tree_code ());
3664 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3665 /* GIMPLE_CONDs condition may not throw. */
3666 && (!flag_exceptions
3667 || !cfun->can_throw_non_call_exceptions
3668 || !operation_could_trap_p (rcode,
3669 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3670 false, NULL_TREE)))
3671 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3672 else if (rcode == SSA_NAME)
3673 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3674 build_zero_cst (TREE_TYPE (ops[0])));
3675 else if (rcode == INTEGER_CST)
3676 {
3677 if (integer_zerop (ops[0]))
3678 gimple_cond_make_false (cond_stmt);
3679 else
3680 gimple_cond_make_true (cond_stmt);
3681 }
3682 else if (!inplace)
3683 {
3684 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3685 ops, seq);
3686 if (!res)
3687 return false;
3688 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3689 build_zero_cst (TREE_TYPE (res)));
3690 }
3691 else
3692 return false;
3693 if (dump_file && (dump_flags & TDF_DETAILS))
3694 {
3695 fprintf (dump_file, "gimple_simplified to ");
3696 if (!gimple_seq_empty_p (*seq))
3697 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3698 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3699 0, TDF_SLIM);
3700 }
3701 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3702 return true;
3703 }
3704 else if (is_gimple_assign (stmt)
3705 && rcode.is_tree_code ())
3706 {
3707 if (!inplace
3708 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3709 {
3710 maybe_build_generic_op (rcode,
3711 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3712 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3713 if (dump_file && (dump_flags & TDF_DETAILS))
3714 {
3715 fprintf (dump_file, "gimple_simplified to ");
3716 if (!gimple_seq_empty_p (*seq))
3717 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3718 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3719 0, TDF_SLIM);
3720 }
3721 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3722 return true;
3723 }
3724 }
3725 else if (rcode.is_fn_code ()
3726 && gimple_call_combined_fn (stmt) == rcode)
3727 {
3728 unsigned i;
3729 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3730 {
3731 gcc_assert (ops[i] != NULL_TREE);
3732 gimple_call_set_arg (stmt, i, ops[i]);
3733 }
3734 if (i < 3)
3735 gcc_assert (ops[i] == NULL_TREE);
3736 if (dump_file && (dump_flags & TDF_DETAILS))
3737 {
3738 fprintf (dump_file, "gimple_simplified to ");
3739 if (!gimple_seq_empty_p (*seq))
3740 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3741 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3742 }
3743 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3744 return true;
3745 }
3746 else if (!inplace)
3747 {
3748 if (gimple_has_lhs (stmt))
3749 {
3750 tree lhs = gimple_get_lhs (stmt);
3751 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3752 ops, seq, lhs))
3753 return false;
3754 if (dump_file && (dump_flags & TDF_DETAILS))
3755 {
3756 fprintf (dump_file, "gimple_simplified to ");
3757 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3758 }
3759 gsi_replace_with_seq_vops (gsi, *seq);
3760 return true;
3761 }
3762 else
3763 gcc_unreachable ();
3764 }
3765
3766 return false;
3767 }
3768
3769 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3770
3771 static bool
3772 maybe_canonicalize_mem_ref_addr (tree *t)
3773 {
3774 bool res = false;
3775
3776 if (TREE_CODE (*t) == ADDR_EXPR)
3777 t = &TREE_OPERAND (*t, 0);
3778
3779 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3780 generic vector extension. The actual vector referenced is
3781 view-converted to an array type for this purpose. If the index
3782 is constant the canonical representation in the middle-end is a
3783 BIT_FIELD_REF so re-write the former to the latter here. */
3784 if (TREE_CODE (*t) == ARRAY_REF
3785 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3786 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3787 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3788 {
3789 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3790 if (VECTOR_TYPE_P (vtype))
3791 {
3792 tree low = array_ref_low_bound (*t);
3793 if (TREE_CODE (low) == INTEGER_CST)
3794 {
3795 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3796 {
3797 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3798 wi::to_widest (low));
3799 idx = wi::mul (idx, wi::to_widest
3800 (TYPE_SIZE (TREE_TYPE (*t))));
3801 widest_int ext
3802 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3803 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3804 {
3805 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3806 TREE_TYPE (*t),
3807 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3808 TYPE_SIZE (TREE_TYPE (*t)),
3809 wide_int_to_tree (sizetype, idx));
3810 res = true;
3811 }
3812 }
3813 }
3814 }
3815 }
3816
3817 while (handled_component_p (*t))
3818 t = &TREE_OPERAND (*t, 0);
3819
3820 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3821 of invariant addresses into a SSA name MEM_REF address. */
3822 if (TREE_CODE (*t) == MEM_REF
3823 || TREE_CODE (*t) == TARGET_MEM_REF)
3824 {
3825 tree addr = TREE_OPERAND (*t, 0);
3826 if (TREE_CODE (addr) == ADDR_EXPR
3827 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3828 || handled_component_p (TREE_OPERAND (addr, 0))))
3829 {
3830 tree base;
3831 HOST_WIDE_INT coffset;
3832 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3833 &coffset);
3834 if (!base)
3835 gcc_unreachable ();
3836
3837 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3838 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3839 TREE_OPERAND (*t, 1),
3840 size_int (coffset));
3841 res = true;
3842 }
3843 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3844 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3845 }
3846
3847 /* Canonicalize back MEM_REFs to plain reference trees if the object
3848 accessed is a decl that has the same access semantics as the MEM_REF. */
3849 if (TREE_CODE (*t) == MEM_REF
3850 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3851 && integer_zerop (TREE_OPERAND (*t, 1))
3852 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3853 {
3854 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3855 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3856 if (/* Same volatile qualification. */
3857 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3858 /* Same TBAA behavior with -fstrict-aliasing. */
3859 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3860 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3861 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3862 /* Same alignment. */
3863 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3864 /* We have to look out here to not drop a required conversion
3865 from the rhs to the lhs if *t appears on the lhs or vice-versa
3866 if it appears on the rhs. Thus require strict type
3867 compatibility. */
3868 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3869 {
3870 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3871 res = true;
3872 }
3873 }
3874
3875 /* Canonicalize TARGET_MEM_REF in particular with respect to
3876 the indexes becoming constant. */
3877 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3878 {
3879 tree tem = maybe_fold_tmr (*t);
3880 if (tem)
3881 {
3882 *t = tem;
3883 res = true;
3884 }
3885 }
3886
3887 return res;
3888 }
3889
3890 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3891 distinguishes both cases. */
3892
3893 static bool
3894 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3895 {
3896 bool changed = false;
3897 gimple *stmt = gsi_stmt (*gsi);
3898 bool nowarning = gimple_no_warning_p (stmt);
3899 unsigned i;
3900 fold_defer_overflow_warnings ();
3901
3902 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3903 after propagation.
3904 ??? This shouldn't be done in generic folding but in the
3905 propagation helpers which also know whether an address was
3906 propagated.
3907 Also canonicalize operand order. */
3908 switch (gimple_code (stmt))
3909 {
3910 case GIMPLE_ASSIGN:
3911 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3912 {
3913 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3914 if ((REFERENCE_CLASS_P (*rhs)
3915 || TREE_CODE (*rhs) == ADDR_EXPR)
3916 && maybe_canonicalize_mem_ref_addr (rhs))
3917 changed = true;
3918 tree *lhs = gimple_assign_lhs_ptr (stmt);
3919 if (REFERENCE_CLASS_P (*lhs)
3920 && maybe_canonicalize_mem_ref_addr (lhs))
3921 changed = true;
3922 }
3923 else
3924 {
3925 /* Canonicalize operand order. */
3926 enum tree_code code = gimple_assign_rhs_code (stmt);
3927 if (TREE_CODE_CLASS (code) == tcc_comparison
3928 || commutative_tree_code (code)
3929 || commutative_ternary_tree_code (code))
3930 {
3931 tree rhs1 = gimple_assign_rhs1 (stmt);
3932 tree rhs2 = gimple_assign_rhs2 (stmt);
3933 if (tree_swap_operands_p (rhs1, rhs2, false))
3934 {
3935 gimple_assign_set_rhs1 (stmt, rhs2);
3936 gimple_assign_set_rhs2 (stmt, rhs1);
3937 if (TREE_CODE_CLASS (code) == tcc_comparison)
3938 gimple_assign_set_rhs_code (stmt,
3939 swap_tree_comparison (code));
3940 changed = true;
3941 }
3942 }
3943 }
3944 break;
3945 case GIMPLE_CALL:
3946 {
3947 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3948 {
3949 tree *arg = gimple_call_arg_ptr (stmt, i);
3950 if (REFERENCE_CLASS_P (*arg)
3951 && maybe_canonicalize_mem_ref_addr (arg))
3952 changed = true;
3953 }
3954 tree *lhs = gimple_call_lhs_ptr (stmt);
3955 if (*lhs
3956 && REFERENCE_CLASS_P (*lhs)
3957 && maybe_canonicalize_mem_ref_addr (lhs))
3958 changed = true;
3959 break;
3960 }
3961 case GIMPLE_ASM:
3962 {
3963 gasm *asm_stmt = as_a <gasm *> (stmt);
3964 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3965 {
3966 tree link = gimple_asm_output_op (asm_stmt, i);
3967 tree op = TREE_VALUE (link);
3968 if (REFERENCE_CLASS_P (op)
3969 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3970 changed = true;
3971 }
3972 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3973 {
3974 tree link = gimple_asm_input_op (asm_stmt, i);
3975 tree op = TREE_VALUE (link);
3976 if ((REFERENCE_CLASS_P (op)
3977 || TREE_CODE (op) == ADDR_EXPR)
3978 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3979 changed = true;
3980 }
3981 }
3982 break;
3983 case GIMPLE_DEBUG:
3984 if (gimple_debug_bind_p (stmt))
3985 {
3986 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3987 if (*val
3988 && (REFERENCE_CLASS_P (*val)
3989 || TREE_CODE (*val) == ADDR_EXPR)
3990 && maybe_canonicalize_mem_ref_addr (val))
3991 changed = true;
3992 }
3993 break;
3994 case GIMPLE_COND:
3995 {
3996 /* Canonicalize operand order. */
3997 tree lhs = gimple_cond_lhs (stmt);
3998 tree rhs = gimple_cond_rhs (stmt);
3999 if (tree_swap_operands_p (lhs, rhs, false))
4000 {
4001 gcond *gc = as_a <gcond *> (stmt);
4002 gimple_cond_set_lhs (gc, rhs);
4003 gimple_cond_set_rhs (gc, lhs);
4004 gimple_cond_set_code (gc,
4005 swap_tree_comparison (gimple_cond_code (gc)));
4006 changed = true;
4007 }
4008 }
4009 default:;
4010 }
4011
4012 /* Dispatch to pattern-based folding. */
4013 if (!inplace
4014 || is_gimple_assign (stmt)
4015 || gimple_code (stmt) == GIMPLE_COND)
4016 {
4017 gimple_seq seq = NULL;
4018 code_helper rcode;
4019 tree ops[3] = {};
4020 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4021 valueize, valueize))
4022 {
4023 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4024 changed = true;
4025 else
4026 gimple_seq_discard (seq);
4027 }
4028 }
4029
4030 stmt = gsi_stmt (*gsi);
4031
4032 /* Fold the main computation performed by the statement. */
4033 switch (gimple_code (stmt))
4034 {
4035 case GIMPLE_ASSIGN:
4036 {
4037 /* Try to canonicalize for boolean-typed X the comparisons
4038 X == 0, X == 1, X != 0, and X != 1. */
4039 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4040 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4041 {
4042 tree lhs = gimple_assign_lhs (stmt);
4043 tree op1 = gimple_assign_rhs1 (stmt);
4044 tree op2 = gimple_assign_rhs2 (stmt);
4045 tree type = TREE_TYPE (op1);
4046
4047 /* Check whether the comparison operands are of the same boolean
4048 type as the result type is.
4049 Check that second operand is an integer-constant with value
4050 one or zero. */
4051 if (TREE_CODE (op2) == INTEGER_CST
4052 && (integer_zerop (op2) || integer_onep (op2))
4053 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4054 {
4055 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4056 bool is_logical_not = false;
4057
4058 /* X == 0 and X != 1 is a logical-not.of X
4059 X == 1 and X != 0 is X */
4060 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4061 || (cmp_code == NE_EXPR && integer_onep (op2)))
4062 is_logical_not = true;
4063
4064 if (is_logical_not == false)
4065 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4066 /* Only for one-bit precision typed X the transformation
4067 !X -> ~X is valied. */
4068 else if (TYPE_PRECISION (type) == 1)
4069 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4070 /* Otherwise we use !X -> X ^ 1. */
4071 else
4072 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4073 build_int_cst (type, 1));
4074 changed = true;
4075 break;
4076 }
4077 }
4078
4079 unsigned old_num_ops = gimple_num_ops (stmt);
4080 tree lhs = gimple_assign_lhs (stmt);
4081 tree new_rhs = fold_gimple_assign (gsi);
4082 if (new_rhs
4083 && !useless_type_conversion_p (TREE_TYPE (lhs),
4084 TREE_TYPE (new_rhs)))
4085 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4086 if (new_rhs
4087 && (!inplace
4088 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4089 {
4090 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4091 changed = true;
4092 }
4093 break;
4094 }
4095
4096 case GIMPLE_CALL:
4097 changed |= gimple_fold_call (gsi, inplace);
4098 break;
4099
4100 case GIMPLE_ASM:
4101 /* Fold *& in asm operands. */
4102 {
4103 gasm *asm_stmt = as_a <gasm *> (stmt);
4104 size_t noutputs;
4105 const char **oconstraints;
4106 const char *constraint;
4107 bool allows_mem, allows_reg;
4108
4109 noutputs = gimple_asm_noutputs (asm_stmt);
4110 oconstraints = XALLOCAVEC (const char *, noutputs);
4111
4112 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4113 {
4114 tree link = gimple_asm_output_op (asm_stmt, i);
4115 tree op = TREE_VALUE (link);
4116 oconstraints[i]
4117 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4118 if (REFERENCE_CLASS_P (op)
4119 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4120 {
4121 TREE_VALUE (link) = op;
4122 changed = true;
4123 }
4124 }
4125 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4126 {
4127 tree link = gimple_asm_input_op (asm_stmt, i);
4128 tree op = TREE_VALUE (link);
4129 constraint
4130 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4131 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4132 oconstraints, &allows_mem, &allows_reg);
4133 if (REFERENCE_CLASS_P (op)
4134 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4135 != NULL_TREE)
4136 {
4137 TREE_VALUE (link) = op;
4138 changed = true;
4139 }
4140 }
4141 }
4142 break;
4143
4144 case GIMPLE_DEBUG:
4145 if (gimple_debug_bind_p (stmt))
4146 {
4147 tree val = gimple_debug_bind_get_value (stmt);
4148 if (val
4149 && REFERENCE_CLASS_P (val))
4150 {
4151 tree tem = maybe_fold_reference (val, false);
4152 if (tem)
4153 {
4154 gimple_debug_bind_set_value (stmt, tem);
4155 changed = true;
4156 }
4157 }
4158 else if (val
4159 && TREE_CODE (val) == ADDR_EXPR)
4160 {
4161 tree ref = TREE_OPERAND (val, 0);
4162 tree tem = maybe_fold_reference (ref, false);
4163 if (tem)
4164 {
4165 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4166 gimple_debug_bind_set_value (stmt, tem);
4167 changed = true;
4168 }
4169 }
4170 }
4171 break;
4172
4173 default:;
4174 }
4175
4176 stmt = gsi_stmt (*gsi);
4177
4178 /* Fold *& on the lhs. */
4179 if (gimple_has_lhs (stmt))
4180 {
4181 tree lhs = gimple_get_lhs (stmt);
4182 if (lhs && REFERENCE_CLASS_P (lhs))
4183 {
4184 tree new_lhs = maybe_fold_reference (lhs, true);
4185 if (new_lhs)
4186 {
4187 gimple_set_lhs (stmt, new_lhs);
4188 changed = true;
4189 }
4190 }
4191 }
4192
4193 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4194 return changed;
4195 }
4196
4197 /* Valueziation callback that ends up not following SSA edges. */
4198
4199 tree
4200 no_follow_ssa_edges (tree)
4201 {
4202 return NULL_TREE;
4203 }
4204
4205 /* Valueization callback that ends up following single-use SSA edges only. */
4206
4207 tree
4208 follow_single_use_edges (tree val)
4209 {
4210 if (TREE_CODE (val) == SSA_NAME
4211 && !has_single_use (val))
4212 return NULL_TREE;
4213 return val;
4214 }
4215
4216 /* Fold the statement pointed to by GSI. In some cases, this function may
4217 replace the whole statement with a new one. Returns true iff folding
4218 makes any changes.
4219 The statement pointed to by GSI should be in valid gimple form but may
4220 be in unfolded state as resulting from for example constant propagation
4221 which can produce *&x = 0. */
4222
4223 bool
4224 fold_stmt (gimple_stmt_iterator *gsi)
4225 {
4226 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4227 }
4228
4229 bool
4230 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4231 {
4232 return fold_stmt_1 (gsi, false, valueize);
4233 }
4234
4235 /* Perform the minimal folding on statement *GSI. Only operations like
4236 *&x created by constant propagation are handled. The statement cannot
4237 be replaced with a new one. Return true if the statement was
4238 changed, false otherwise.
4239 The statement *GSI should be in valid gimple form but may
4240 be in unfolded state as resulting from for example constant propagation
4241 which can produce *&x = 0. */
4242
4243 bool
4244 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4245 {
4246 gimple *stmt = gsi_stmt (*gsi);
4247 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4248 gcc_assert (gsi_stmt (*gsi) == stmt);
4249 return changed;
4250 }
4251
4252 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4253 if EXPR is null or we don't know how.
4254 If non-null, the result always has boolean type. */
4255
4256 static tree
4257 canonicalize_bool (tree expr, bool invert)
4258 {
4259 if (!expr)
4260 return NULL_TREE;
4261 else if (invert)
4262 {
4263 if (integer_nonzerop (expr))
4264 return boolean_false_node;
4265 else if (integer_zerop (expr))
4266 return boolean_true_node;
4267 else if (TREE_CODE (expr) == SSA_NAME)
4268 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4269 build_int_cst (TREE_TYPE (expr), 0));
4270 else if (COMPARISON_CLASS_P (expr))
4271 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4272 boolean_type_node,
4273 TREE_OPERAND (expr, 0),
4274 TREE_OPERAND (expr, 1));
4275 else
4276 return NULL_TREE;
4277 }
4278 else
4279 {
4280 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4281 return expr;
4282 if (integer_nonzerop (expr))
4283 return boolean_true_node;
4284 else if (integer_zerop (expr))
4285 return boolean_false_node;
4286 else if (TREE_CODE (expr) == SSA_NAME)
4287 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4288 build_int_cst (TREE_TYPE (expr), 0));
4289 else if (COMPARISON_CLASS_P (expr))
4290 return fold_build2 (TREE_CODE (expr),
4291 boolean_type_node,
4292 TREE_OPERAND (expr, 0),
4293 TREE_OPERAND (expr, 1));
4294 else
4295 return NULL_TREE;
4296 }
4297 }
4298
4299 /* Check to see if a boolean expression EXPR is logically equivalent to the
4300 comparison (OP1 CODE OP2). Check for various identities involving
4301 SSA_NAMEs. */
4302
4303 static bool
4304 same_bool_comparison_p (const_tree expr, enum tree_code code,
4305 const_tree op1, const_tree op2)
4306 {
4307 gimple *s;
4308
4309 /* The obvious case. */
4310 if (TREE_CODE (expr) == code
4311 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4312 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4313 return true;
4314
4315 /* Check for comparing (name, name != 0) and the case where expr
4316 is an SSA_NAME with a definition matching the comparison. */
4317 if (TREE_CODE (expr) == SSA_NAME
4318 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4319 {
4320 if (operand_equal_p (expr, op1, 0))
4321 return ((code == NE_EXPR && integer_zerop (op2))
4322 || (code == EQ_EXPR && integer_nonzerop (op2)));
4323 s = SSA_NAME_DEF_STMT (expr);
4324 if (is_gimple_assign (s)
4325 && gimple_assign_rhs_code (s) == code
4326 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4327 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4328 return true;
4329 }
4330
4331 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4332 of name is a comparison, recurse. */
4333 if (TREE_CODE (op1) == SSA_NAME
4334 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4335 {
4336 s = SSA_NAME_DEF_STMT (op1);
4337 if (is_gimple_assign (s)
4338 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4339 {
4340 enum tree_code c = gimple_assign_rhs_code (s);
4341 if ((c == NE_EXPR && integer_zerop (op2))
4342 || (c == EQ_EXPR && integer_nonzerop (op2)))
4343 return same_bool_comparison_p (expr, c,
4344 gimple_assign_rhs1 (s),
4345 gimple_assign_rhs2 (s));
4346 if ((c == EQ_EXPR && integer_zerop (op2))
4347 || (c == NE_EXPR && integer_nonzerop (op2)))
4348 return same_bool_comparison_p (expr,
4349 invert_tree_comparison (c, false),
4350 gimple_assign_rhs1 (s),
4351 gimple_assign_rhs2 (s));
4352 }
4353 }
4354 return false;
4355 }
4356
4357 /* Check to see if two boolean expressions OP1 and OP2 are logically
4358 equivalent. */
4359
4360 static bool
4361 same_bool_result_p (const_tree op1, const_tree op2)
4362 {
4363 /* Simple cases first. */
4364 if (operand_equal_p (op1, op2, 0))
4365 return true;
4366
4367 /* Check the cases where at least one of the operands is a comparison.
4368 These are a bit smarter than operand_equal_p in that they apply some
4369 identifies on SSA_NAMEs. */
4370 if (COMPARISON_CLASS_P (op2)
4371 && same_bool_comparison_p (op1, TREE_CODE (op2),
4372 TREE_OPERAND (op2, 0),
4373 TREE_OPERAND (op2, 1)))
4374 return true;
4375 if (COMPARISON_CLASS_P (op1)
4376 && same_bool_comparison_p (op2, TREE_CODE (op1),
4377 TREE_OPERAND (op1, 0),
4378 TREE_OPERAND (op1, 1)))
4379 return true;
4380
4381 /* Default case. */
4382 return false;
4383 }
4384
4385 /* Forward declarations for some mutually recursive functions. */
4386
4387 static tree
4388 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4389 enum tree_code code2, tree op2a, tree op2b);
4390 static tree
4391 and_var_with_comparison (tree var, bool invert,
4392 enum tree_code code2, tree op2a, tree op2b);
4393 static tree
4394 and_var_with_comparison_1 (gimple *stmt,
4395 enum tree_code code2, tree op2a, tree op2b);
4396 static tree
4397 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4398 enum tree_code code2, tree op2a, tree op2b);
4399 static tree
4400 or_var_with_comparison (tree var, bool invert,
4401 enum tree_code code2, tree op2a, tree op2b);
4402 static tree
4403 or_var_with_comparison_1 (gimple *stmt,
4404 enum tree_code code2, tree op2a, tree op2b);
4405
4406 /* Helper function for and_comparisons_1: try to simplify the AND of the
4407 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4408 If INVERT is true, invert the value of the VAR before doing the AND.
4409 Return NULL_EXPR if we can't simplify this to a single expression. */
4410
4411 static tree
4412 and_var_with_comparison (tree var, bool invert,
4413 enum tree_code code2, tree op2a, tree op2b)
4414 {
4415 tree t;
4416 gimple *stmt = SSA_NAME_DEF_STMT (var);
4417
4418 /* We can only deal with variables whose definitions are assignments. */
4419 if (!is_gimple_assign (stmt))
4420 return NULL_TREE;
4421
4422 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4423 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4424 Then we only have to consider the simpler non-inverted cases. */
4425 if (invert)
4426 t = or_var_with_comparison_1 (stmt,
4427 invert_tree_comparison (code2, false),
4428 op2a, op2b);
4429 else
4430 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4431 return canonicalize_bool (t, invert);
4432 }
4433
4434 /* Try to simplify the AND of the ssa variable defined by the assignment
4435 STMT with the comparison specified by (OP2A CODE2 OP2B).
4436 Return NULL_EXPR if we can't simplify this to a single expression. */
4437
4438 static tree
4439 and_var_with_comparison_1 (gimple *stmt,
4440 enum tree_code code2, tree op2a, tree op2b)
4441 {
4442 tree var = gimple_assign_lhs (stmt);
4443 tree true_test_var = NULL_TREE;
4444 tree false_test_var = NULL_TREE;
4445 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4446
4447 /* Check for identities like (var AND (var == 0)) => false. */
4448 if (TREE_CODE (op2a) == SSA_NAME
4449 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4450 {
4451 if ((code2 == NE_EXPR && integer_zerop (op2b))
4452 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4453 {
4454 true_test_var = op2a;
4455 if (var == true_test_var)
4456 return var;
4457 }
4458 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4459 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4460 {
4461 false_test_var = op2a;
4462 if (var == false_test_var)
4463 return boolean_false_node;
4464 }
4465 }
4466
4467 /* If the definition is a comparison, recurse on it. */
4468 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4469 {
4470 tree t = and_comparisons_1 (innercode,
4471 gimple_assign_rhs1 (stmt),
4472 gimple_assign_rhs2 (stmt),
4473 code2,
4474 op2a,
4475 op2b);
4476 if (t)
4477 return t;
4478 }
4479
4480 /* If the definition is an AND or OR expression, we may be able to
4481 simplify by reassociating. */
4482 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4483 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4484 {
4485 tree inner1 = gimple_assign_rhs1 (stmt);
4486 tree inner2 = gimple_assign_rhs2 (stmt);
4487 gimple *s;
4488 tree t;
4489 tree partial = NULL_TREE;
4490 bool is_and = (innercode == BIT_AND_EXPR);
4491
4492 /* Check for boolean identities that don't require recursive examination
4493 of inner1/inner2:
4494 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4495 inner1 AND (inner1 OR inner2) => inner1
4496 !inner1 AND (inner1 AND inner2) => false
4497 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4498 Likewise for similar cases involving inner2. */
4499 if (inner1 == true_test_var)
4500 return (is_and ? var : inner1);
4501 else if (inner2 == true_test_var)
4502 return (is_and ? var : inner2);
4503 else if (inner1 == false_test_var)
4504 return (is_and
4505 ? boolean_false_node
4506 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4507 else if (inner2 == false_test_var)
4508 return (is_and
4509 ? boolean_false_node
4510 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4511
4512 /* Next, redistribute/reassociate the AND across the inner tests.
4513 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4514 if (TREE_CODE (inner1) == SSA_NAME
4515 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4516 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4517 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4518 gimple_assign_rhs1 (s),
4519 gimple_assign_rhs2 (s),
4520 code2, op2a, op2b)))
4521 {
4522 /* Handle the AND case, where we are reassociating:
4523 (inner1 AND inner2) AND (op2a code2 op2b)
4524 => (t AND inner2)
4525 If the partial result t is a constant, we win. Otherwise
4526 continue on to try reassociating with the other inner test. */
4527 if (is_and)
4528 {
4529 if (integer_onep (t))
4530 return inner2;
4531 else if (integer_zerop (t))
4532 return boolean_false_node;
4533 }
4534
4535 /* Handle the OR case, where we are redistributing:
4536 (inner1 OR inner2) AND (op2a code2 op2b)
4537 => (t OR (inner2 AND (op2a code2 op2b))) */
4538 else if (integer_onep (t))
4539 return boolean_true_node;
4540
4541 /* Save partial result for later. */
4542 partial = t;
4543 }
4544
4545 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4546 if (TREE_CODE (inner2) == SSA_NAME
4547 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4548 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4549 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4550 gimple_assign_rhs1 (s),
4551 gimple_assign_rhs2 (s),
4552 code2, op2a, op2b)))
4553 {
4554 /* Handle the AND case, where we are reassociating:
4555 (inner1 AND inner2) AND (op2a code2 op2b)
4556 => (inner1 AND t) */
4557 if (is_and)
4558 {
4559 if (integer_onep (t))
4560 return inner1;
4561 else if (integer_zerop (t))
4562 return boolean_false_node;
4563 /* If both are the same, we can apply the identity
4564 (x AND x) == x. */
4565 else if (partial && same_bool_result_p (t, partial))
4566 return t;
4567 }
4568
4569 /* Handle the OR case. where we are redistributing:
4570 (inner1 OR inner2) AND (op2a code2 op2b)
4571 => (t OR (inner1 AND (op2a code2 op2b)))
4572 => (t OR partial) */
4573 else
4574 {
4575 if (integer_onep (t))
4576 return boolean_true_node;
4577 else if (partial)
4578 {
4579 /* We already got a simplification for the other
4580 operand to the redistributed OR expression. The
4581 interesting case is when at least one is false.
4582 Or, if both are the same, we can apply the identity
4583 (x OR x) == x. */
4584 if (integer_zerop (partial))
4585 return t;
4586 else if (integer_zerop (t))
4587 return partial;
4588 else if (same_bool_result_p (t, partial))
4589 return t;
4590 }
4591 }
4592 }
4593 }
4594 return NULL_TREE;
4595 }
4596
4597 /* Try to simplify the AND of two comparisons defined by
4598 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4599 If this can be done without constructing an intermediate value,
4600 return the resulting tree; otherwise NULL_TREE is returned.
4601 This function is deliberately asymmetric as it recurses on SSA_DEFs
4602 in the first comparison but not the second. */
4603
4604 static tree
4605 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4606 enum tree_code code2, tree op2a, tree op2b)
4607 {
4608 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4609
4610 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4611 if (operand_equal_p (op1a, op2a, 0)
4612 && operand_equal_p (op1b, op2b, 0))
4613 {
4614 /* Result will be either NULL_TREE, or a combined comparison. */
4615 tree t = combine_comparisons (UNKNOWN_LOCATION,
4616 TRUTH_ANDIF_EXPR, code1, code2,
4617 truth_type, op1a, op1b);
4618 if (t)
4619 return t;
4620 }
4621
4622 /* Likewise the swapped case of the above. */
4623 if (operand_equal_p (op1a, op2b, 0)
4624 && operand_equal_p (op1b, op2a, 0))
4625 {
4626 /* Result will be either NULL_TREE, or a combined comparison. */
4627 tree t = combine_comparisons (UNKNOWN_LOCATION,
4628 TRUTH_ANDIF_EXPR, code1,
4629 swap_tree_comparison (code2),
4630 truth_type, op1a, op1b);
4631 if (t)
4632 return t;
4633 }
4634
4635 /* If both comparisons are of the same value against constants, we might
4636 be able to merge them. */
4637 if (operand_equal_p (op1a, op2a, 0)
4638 && TREE_CODE (op1b) == INTEGER_CST
4639 && TREE_CODE (op2b) == INTEGER_CST)
4640 {
4641 int cmp = tree_int_cst_compare (op1b, op2b);
4642
4643 /* If we have (op1a == op1b), we should either be able to
4644 return that or FALSE, depending on whether the constant op1b
4645 also satisfies the other comparison against op2b. */
4646 if (code1 == EQ_EXPR)
4647 {
4648 bool done = true;
4649 bool val;
4650 switch (code2)
4651 {
4652 case EQ_EXPR: val = (cmp == 0); break;
4653 case NE_EXPR: val = (cmp != 0); break;
4654 case LT_EXPR: val = (cmp < 0); break;
4655 case GT_EXPR: val = (cmp > 0); break;
4656 case LE_EXPR: val = (cmp <= 0); break;
4657 case GE_EXPR: val = (cmp >= 0); break;
4658 default: done = false;
4659 }
4660 if (done)
4661 {
4662 if (val)
4663 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4664 else
4665 return boolean_false_node;
4666 }
4667 }
4668 /* Likewise if the second comparison is an == comparison. */
4669 else if (code2 == EQ_EXPR)
4670 {
4671 bool done = true;
4672 bool val;
4673 switch (code1)
4674 {
4675 case EQ_EXPR: val = (cmp == 0); break;
4676 case NE_EXPR: val = (cmp != 0); break;
4677 case LT_EXPR: val = (cmp > 0); break;
4678 case GT_EXPR: val = (cmp < 0); break;
4679 case LE_EXPR: val = (cmp >= 0); break;
4680 case GE_EXPR: val = (cmp <= 0); break;
4681 default: done = false;
4682 }
4683 if (done)
4684 {
4685 if (val)
4686 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4687 else
4688 return boolean_false_node;
4689 }
4690 }
4691
4692 /* Same business with inequality tests. */
4693 else if (code1 == NE_EXPR)
4694 {
4695 bool val;
4696 switch (code2)
4697 {
4698 case EQ_EXPR: val = (cmp != 0); break;
4699 case NE_EXPR: val = (cmp == 0); break;
4700 case LT_EXPR: val = (cmp >= 0); break;
4701 case GT_EXPR: val = (cmp <= 0); break;
4702 case LE_EXPR: val = (cmp > 0); break;
4703 case GE_EXPR: val = (cmp < 0); break;
4704 default:
4705 val = false;
4706 }
4707 if (val)
4708 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4709 }
4710 else if (code2 == NE_EXPR)
4711 {
4712 bool val;
4713 switch (code1)
4714 {
4715 case EQ_EXPR: val = (cmp == 0); break;
4716 case NE_EXPR: val = (cmp != 0); break;
4717 case LT_EXPR: val = (cmp <= 0); break;
4718 case GT_EXPR: val = (cmp >= 0); break;
4719 case LE_EXPR: val = (cmp < 0); break;
4720 case GE_EXPR: val = (cmp > 0); break;
4721 default:
4722 val = false;
4723 }
4724 if (val)
4725 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4726 }
4727
4728 /* Chose the more restrictive of two < or <= comparisons. */
4729 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4730 && (code2 == LT_EXPR || code2 == LE_EXPR))
4731 {
4732 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4733 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4734 else
4735 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4736 }
4737
4738 /* Likewise chose the more restrictive of two > or >= comparisons. */
4739 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4740 && (code2 == GT_EXPR || code2 == GE_EXPR))
4741 {
4742 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4743 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4744 else
4745 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4746 }
4747
4748 /* Check for singleton ranges. */
4749 else if (cmp == 0
4750 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4751 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4752 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4753
4754 /* Check for disjoint ranges. */
4755 else if (cmp <= 0
4756 && (code1 == LT_EXPR || code1 == LE_EXPR)
4757 && (code2 == GT_EXPR || code2 == GE_EXPR))
4758 return boolean_false_node;
4759 else if (cmp >= 0
4760 && (code1 == GT_EXPR || code1 == GE_EXPR)
4761 && (code2 == LT_EXPR || code2 == LE_EXPR))
4762 return boolean_false_node;
4763 }
4764
4765 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4766 NAME's definition is a truth value. See if there are any simplifications
4767 that can be done against the NAME's definition. */
4768 if (TREE_CODE (op1a) == SSA_NAME
4769 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4770 && (integer_zerop (op1b) || integer_onep (op1b)))
4771 {
4772 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4773 || (code1 == NE_EXPR && integer_onep (op1b)));
4774 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4775 switch (gimple_code (stmt))
4776 {
4777 case GIMPLE_ASSIGN:
4778 /* Try to simplify by copy-propagating the definition. */
4779 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4780
4781 case GIMPLE_PHI:
4782 /* If every argument to the PHI produces the same result when
4783 ANDed with the second comparison, we win.
4784 Do not do this unless the type is bool since we need a bool
4785 result here anyway. */
4786 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4787 {
4788 tree result = NULL_TREE;
4789 unsigned i;
4790 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4791 {
4792 tree arg = gimple_phi_arg_def (stmt, i);
4793
4794 /* If this PHI has itself as an argument, ignore it.
4795 If all the other args produce the same result,
4796 we're still OK. */
4797 if (arg == gimple_phi_result (stmt))
4798 continue;
4799 else if (TREE_CODE (arg) == INTEGER_CST)
4800 {
4801 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4802 {
4803 if (!result)
4804 result = boolean_false_node;
4805 else if (!integer_zerop (result))
4806 return NULL_TREE;
4807 }
4808 else if (!result)
4809 result = fold_build2 (code2, boolean_type_node,
4810 op2a, op2b);
4811 else if (!same_bool_comparison_p (result,
4812 code2, op2a, op2b))
4813 return NULL_TREE;
4814 }
4815 else if (TREE_CODE (arg) == SSA_NAME
4816 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4817 {
4818 tree temp;
4819 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4820 /* In simple cases we can look through PHI nodes,
4821 but we have to be careful with loops.
4822 See PR49073. */
4823 if (! dom_info_available_p (CDI_DOMINATORS)
4824 || gimple_bb (def_stmt) == gimple_bb (stmt)
4825 || dominated_by_p (CDI_DOMINATORS,
4826 gimple_bb (def_stmt),
4827 gimple_bb (stmt)))
4828 return NULL_TREE;
4829 temp = and_var_with_comparison (arg, invert, code2,
4830 op2a, op2b);
4831 if (!temp)
4832 return NULL_TREE;
4833 else if (!result)
4834 result = temp;
4835 else if (!same_bool_result_p (result, temp))
4836 return NULL_TREE;
4837 }
4838 else
4839 return NULL_TREE;
4840 }
4841 return result;
4842 }
4843
4844 default:
4845 break;
4846 }
4847 }
4848 return NULL_TREE;
4849 }
4850
4851 /* Try to simplify the AND of two comparisons, specified by
4852 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4853 If this can be simplified to a single expression (without requiring
4854 introducing more SSA variables to hold intermediate values),
4855 return the resulting tree. Otherwise return NULL_TREE.
4856 If the result expression is non-null, it has boolean type. */
4857
4858 tree
4859 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4860 enum tree_code code2, tree op2a, tree op2b)
4861 {
4862 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4863 if (t)
4864 return t;
4865 else
4866 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4867 }
4868
4869 /* Helper function for or_comparisons_1: try to simplify the OR of the
4870 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4871 If INVERT is true, invert the value of VAR before doing the OR.
4872 Return NULL_EXPR if we can't simplify this to a single expression. */
4873
4874 static tree
4875 or_var_with_comparison (tree var, bool invert,
4876 enum tree_code code2, tree op2a, tree op2b)
4877 {
4878 tree t;
4879 gimple *stmt = SSA_NAME_DEF_STMT (var);
4880
4881 /* We can only deal with variables whose definitions are assignments. */
4882 if (!is_gimple_assign (stmt))
4883 return NULL_TREE;
4884
4885 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4886 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4887 Then we only have to consider the simpler non-inverted cases. */
4888 if (invert)
4889 t = and_var_with_comparison_1 (stmt,
4890 invert_tree_comparison (code2, false),
4891 op2a, op2b);
4892 else
4893 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4894 return canonicalize_bool (t, invert);
4895 }
4896
4897 /* Try to simplify the OR of the ssa variable defined by the assignment
4898 STMT with the comparison specified by (OP2A CODE2 OP2B).
4899 Return NULL_EXPR if we can't simplify this to a single expression. */
4900
4901 static tree
4902 or_var_with_comparison_1 (gimple *stmt,
4903 enum tree_code code2, tree op2a, tree op2b)
4904 {
4905 tree var = gimple_assign_lhs (stmt);
4906 tree true_test_var = NULL_TREE;
4907 tree false_test_var = NULL_TREE;
4908 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4909
4910 /* Check for identities like (var OR (var != 0)) => true . */
4911 if (TREE_CODE (op2a) == SSA_NAME
4912 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4913 {
4914 if ((code2 == NE_EXPR && integer_zerop (op2b))
4915 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4916 {
4917 true_test_var = op2a;
4918 if (var == true_test_var)
4919 return var;
4920 }
4921 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4922 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4923 {
4924 false_test_var = op2a;
4925 if (var == false_test_var)
4926 return boolean_true_node;
4927 }
4928 }
4929
4930 /* If the definition is a comparison, recurse on it. */
4931 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4932 {
4933 tree t = or_comparisons_1 (innercode,
4934 gimple_assign_rhs1 (stmt),
4935 gimple_assign_rhs2 (stmt),
4936 code2,
4937 op2a,
4938 op2b);
4939 if (t)
4940 return t;
4941 }
4942
4943 /* If the definition is an AND or OR expression, we may be able to
4944 simplify by reassociating. */
4945 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4946 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4947 {
4948 tree inner1 = gimple_assign_rhs1 (stmt);
4949 tree inner2 = gimple_assign_rhs2 (stmt);
4950 gimple *s;
4951 tree t;
4952 tree partial = NULL_TREE;
4953 bool is_or = (innercode == BIT_IOR_EXPR);
4954
4955 /* Check for boolean identities that don't require recursive examination
4956 of inner1/inner2:
4957 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4958 inner1 OR (inner1 AND inner2) => inner1
4959 !inner1 OR (inner1 OR inner2) => true
4960 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4961 */
4962 if (inner1 == true_test_var)
4963 return (is_or ? var : inner1);
4964 else if (inner2 == true_test_var)
4965 return (is_or ? var : inner2);
4966 else if (inner1 == false_test_var)
4967 return (is_or
4968 ? boolean_true_node
4969 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4970 else if (inner2 == false_test_var)
4971 return (is_or
4972 ? boolean_true_node
4973 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4974
4975 /* Next, redistribute/reassociate the OR across the inner tests.
4976 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4977 if (TREE_CODE (inner1) == SSA_NAME
4978 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4979 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4980 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4981 gimple_assign_rhs1 (s),
4982 gimple_assign_rhs2 (s),
4983 code2, op2a, op2b)))
4984 {
4985 /* Handle the OR case, where we are reassociating:
4986 (inner1 OR inner2) OR (op2a code2 op2b)
4987 => (t OR inner2)
4988 If the partial result t is a constant, we win. Otherwise
4989 continue on to try reassociating with the other inner test. */
4990 if (is_or)
4991 {
4992 if (integer_onep (t))
4993 return boolean_true_node;
4994 else if (integer_zerop (t))
4995 return inner2;
4996 }
4997
4998 /* Handle the AND case, where we are redistributing:
4999 (inner1 AND inner2) OR (op2a code2 op2b)
5000 => (t AND (inner2 OR (op2a code op2b))) */
5001 else if (integer_zerop (t))
5002 return boolean_false_node;
5003
5004 /* Save partial result for later. */
5005 partial = t;
5006 }
5007
5008 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5009 if (TREE_CODE (inner2) == SSA_NAME
5010 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5011 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5012 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5013 gimple_assign_rhs1 (s),
5014 gimple_assign_rhs2 (s),
5015 code2, op2a, op2b)))
5016 {
5017 /* Handle the OR case, where we are reassociating:
5018 (inner1 OR inner2) OR (op2a code2 op2b)
5019 => (inner1 OR t)
5020 => (t OR partial) */
5021 if (is_or)
5022 {
5023 if (integer_zerop (t))
5024 return inner1;
5025 else if (integer_onep (t))
5026 return boolean_true_node;
5027 /* If both are the same, we can apply the identity
5028 (x OR x) == x. */
5029 else if (partial && same_bool_result_p (t, partial))
5030 return t;
5031 }
5032
5033 /* Handle the AND case, where we are redistributing:
5034 (inner1 AND inner2) OR (op2a code2 op2b)
5035 => (t AND (inner1 OR (op2a code2 op2b)))
5036 => (t AND partial) */
5037 else
5038 {
5039 if (integer_zerop (t))
5040 return boolean_false_node;
5041 else if (partial)
5042 {
5043 /* We already got a simplification for the other
5044 operand to the redistributed AND expression. The
5045 interesting case is when at least one is true.
5046 Or, if both are the same, we can apply the identity
5047 (x AND x) == x. */
5048 if (integer_onep (partial))
5049 return t;
5050 else if (integer_onep (t))
5051 return partial;
5052 else if (same_bool_result_p (t, partial))
5053 return t;
5054 }
5055 }
5056 }
5057 }
5058 return NULL_TREE;
5059 }
5060
5061 /* Try to simplify the OR of two comparisons defined by
5062 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5063 If this can be done without constructing an intermediate value,
5064 return the resulting tree; otherwise NULL_TREE is returned.
5065 This function is deliberately asymmetric as it recurses on SSA_DEFs
5066 in the first comparison but not the second. */
5067
5068 static tree
5069 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5070 enum tree_code code2, tree op2a, tree op2b)
5071 {
5072 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5073
5074 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5075 if (operand_equal_p (op1a, op2a, 0)
5076 && operand_equal_p (op1b, op2b, 0))
5077 {
5078 /* Result will be either NULL_TREE, or a combined comparison. */
5079 tree t = combine_comparisons (UNKNOWN_LOCATION,
5080 TRUTH_ORIF_EXPR, code1, code2,
5081 truth_type, op1a, op1b);
5082 if (t)
5083 return t;
5084 }
5085
5086 /* Likewise the swapped case of the above. */
5087 if (operand_equal_p (op1a, op2b, 0)
5088 && operand_equal_p (op1b, op2a, 0))
5089 {
5090 /* Result will be either NULL_TREE, or a combined comparison. */
5091 tree t = combine_comparisons (UNKNOWN_LOCATION,
5092 TRUTH_ORIF_EXPR, code1,
5093 swap_tree_comparison (code2),
5094 truth_type, op1a, op1b);
5095 if (t)
5096 return t;
5097 }
5098
5099 /* If both comparisons are of the same value against constants, we might
5100 be able to merge them. */
5101 if (operand_equal_p (op1a, op2a, 0)
5102 && TREE_CODE (op1b) == INTEGER_CST
5103 && TREE_CODE (op2b) == INTEGER_CST)
5104 {
5105 int cmp = tree_int_cst_compare (op1b, op2b);
5106
5107 /* If we have (op1a != op1b), we should either be able to
5108 return that or TRUE, depending on whether the constant op1b
5109 also satisfies the other comparison against op2b. */
5110 if (code1 == NE_EXPR)
5111 {
5112 bool done = true;
5113 bool val;
5114 switch (code2)
5115 {
5116 case EQ_EXPR: val = (cmp == 0); break;
5117 case NE_EXPR: val = (cmp != 0); break;
5118 case LT_EXPR: val = (cmp < 0); break;
5119 case GT_EXPR: val = (cmp > 0); break;
5120 case LE_EXPR: val = (cmp <= 0); break;
5121 case GE_EXPR: val = (cmp >= 0); break;
5122 default: done = false;
5123 }
5124 if (done)
5125 {
5126 if (val)
5127 return boolean_true_node;
5128 else
5129 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5130 }
5131 }
5132 /* Likewise if the second comparison is a != comparison. */
5133 else if (code2 == NE_EXPR)
5134 {
5135 bool done = true;
5136 bool val;
5137 switch (code1)
5138 {
5139 case EQ_EXPR: val = (cmp == 0); break;
5140 case NE_EXPR: val = (cmp != 0); break;
5141 case LT_EXPR: val = (cmp > 0); break;
5142 case GT_EXPR: val = (cmp < 0); break;
5143 case LE_EXPR: val = (cmp >= 0); break;
5144 case GE_EXPR: val = (cmp <= 0); break;
5145 default: done = false;
5146 }
5147 if (done)
5148 {
5149 if (val)
5150 return boolean_true_node;
5151 else
5152 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5153 }
5154 }
5155
5156 /* See if an equality test is redundant with the other comparison. */
5157 else if (code1 == EQ_EXPR)
5158 {
5159 bool val;
5160 switch (code2)
5161 {
5162 case EQ_EXPR: val = (cmp == 0); break;
5163 case NE_EXPR: val = (cmp != 0); break;
5164 case LT_EXPR: val = (cmp < 0); break;
5165 case GT_EXPR: val = (cmp > 0); break;
5166 case LE_EXPR: val = (cmp <= 0); break;
5167 case GE_EXPR: val = (cmp >= 0); break;
5168 default:
5169 val = false;
5170 }
5171 if (val)
5172 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5173 }
5174 else if (code2 == EQ_EXPR)
5175 {
5176 bool val;
5177 switch (code1)
5178 {
5179 case EQ_EXPR: val = (cmp == 0); break;
5180 case NE_EXPR: val = (cmp != 0); break;
5181 case LT_EXPR: val = (cmp > 0); break;
5182 case GT_EXPR: val = (cmp < 0); break;
5183 case LE_EXPR: val = (cmp >= 0); break;
5184 case GE_EXPR: val = (cmp <= 0); break;
5185 default:
5186 val = false;
5187 }
5188 if (val)
5189 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5190 }
5191
5192 /* Chose the less restrictive of two < or <= comparisons. */
5193 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5194 && (code2 == LT_EXPR || code2 == LE_EXPR))
5195 {
5196 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5197 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5198 else
5199 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5200 }
5201
5202 /* Likewise chose the less restrictive of two > or >= comparisons. */
5203 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5204 && (code2 == GT_EXPR || code2 == GE_EXPR))
5205 {
5206 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5207 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5208 else
5209 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5210 }
5211
5212 /* Check for singleton ranges. */
5213 else if (cmp == 0
5214 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5215 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5216 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5217
5218 /* Check for less/greater pairs that don't restrict the range at all. */
5219 else if (cmp >= 0
5220 && (code1 == LT_EXPR || code1 == LE_EXPR)
5221 && (code2 == GT_EXPR || code2 == GE_EXPR))
5222 return boolean_true_node;
5223 else if (cmp <= 0
5224 && (code1 == GT_EXPR || code1 == GE_EXPR)
5225 && (code2 == LT_EXPR || code2 == LE_EXPR))
5226 return boolean_true_node;
5227 }
5228
5229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5230 NAME's definition is a truth value. See if there are any simplifications
5231 that can be done against the NAME's definition. */
5232 if (TREE_CODE (op1a) == SSA_NAME
5233 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5234 && (integer_zerop (op1b) || integer_onep (op1b)))
5235 {
5236 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5237 || (code1 == NE_EXPR && integer_onep (op1b)));
5238 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5239 switch (gimple_code (stmt))
5240 {
5241 case GIMPLE_ASSIGN:
5242 /* Try to simplify by copy-propagating the definition. */
5243 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5244
5245 case GIMPLE_PHI:
5246 /* If every argument to the PHI produces the same result when
5247 ORed with the second comparison, we win.
5248 Do not do this unless the type is bool since we need a bool
5249 result here anyway. */
5250 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5251 {
5252 tree result = NULL_TREE;
5253 unsigned i;
5254 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5255 {
5256 tree arg = gimple_phi_arg_def (stmt, i);
5257
5258 /* If this PHI has itself as an argument, ignore it.
5259 If all the other args produce the same result,
5260 we're still OK. */
5261 if (arg == gimple_phi_result (stmt))
5262 continue;
5263 else if (TREE_CODE (arg) == INTEGER_CST)
5264 {
5265 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5266 {
5267 if (!result)
5268 result = boolean_true_node;
5269 else if (!integer_onep (result))
5270 return NULL_TREE;
5271 }
5272 else if (!result)
5273 result = fold_build2 (code2, boolean_type_node,
5274 op2a, op2b);
5275 else if (!same_bool_comparison_p (result,
5276 code2, op2a, op2b))
5277 return NULL_TREE;
5278 }
5279 else if (TREE_CODE (arg) == SSA_NAME
5280 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5281 {
5282 tree temp;
5283 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5284 /* In simple cases we can look through PHI nodes,
5285 but we have to be careful with loops.
5286 See PR49073. */
5287 if (! dom_info_available_p (CDI_DOMINATORS)
5288 || gimple_bb (def_stmt) == gimple_bb (stmt)
5289 || dominated_by_p (CDI_DOMINATORS,
5290 gimple_bb (def_stmt),
5291 gimple_bb (stmt)))
5292 return NULL_TREE;
5293 temp = or_var_with_comparison (arg, invert, code2,
5294 op2a, op2b);
5295 if (!temp)
5296 return NULL_TREE;
5297 else if (!result)
5298 result = temp;
5299 else if (!same_bool_result_p (result, temp))
5300 return NULL_TREE;
5301 }
5302 else
5303 return NULL_TREE;
5304 }
5305 return result;
5306 }
5307
5308 default:
5309 break;
5310 }
5311 }
5312 return NULL_TREE;
5313 }
5314
5315 /* Try to simplify the OR of two comparisons, specified by
5316 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5317 If this can be simplified to a single expression (without requiring
5318 introducing more SSA variables to hold intermediate values),
5319 return the resulting tree. Otherwise return NULL_TREE.
5320 If the result expression is non-null, it has boolean type. */
5321
5322 tree
5323 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5324 enum tree_code code2, tree op2a, tree op2b)
5325 {
5326 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5327 if (t)
5328 return t;
5329 else
5330 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5331 }
5332
5333
5334 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5335
5336 Either NULL_TREE, a simplified but non-constant or a constant
5337 is returned.
5338
5339 ??? This should go into a gimple-fold-inline.h file to be eventually
5340 privatized with the single valueize function used in the various TUs
5341 to avoid the indirect function call overhead. */
5342
5343 tree
5344 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5345 tree (*gvalueize) (tree))
5346 {
5347 code_helper rcode;
5348 tree ops[3] = {};
5349 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5350 edges if there are intermediate VARYING defs. For this reason
5351 do not follow SSA edges here even though SCCVN can technically
5352 just deal fine with that. */
5353 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5354 {
5355 tree res = NULL_TREE;
5356 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5357 res = ops[0];
5358 else if (mprts_hook)
5359 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5360 if (res)
5361 {
5362 if (dump_file && dump_flags & TDF_DETAILS)
5363 {
5364 fprintf (dump_file, "Match-and-simplified ");
5365 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5366 fprintf (dump_file, " to ");
5367 print_generic_expr (dump_file, res, 0);
5368 fprintf (dump_file, "\n");
5369 }
5370 return res;
5371 }
5372 }
5373
5374 location_t loc = gimple_location (stmt);
5375 switch (gimple_code (stmt))
5376 {
5377 case GIMPLE_ASSIGN:
5378 {
5379 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5380
5381 switch (get_gimple_rhs_class (subcode))
5382 {
5383 case GIMPLE_SINGLE_RHS:
5384 {
5385 tree rhs = gimple_assign_rhs1 (stmt);
5386 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5387
5388 if (TREE_CODE (rhs) == SSA_NAME)
5389 {
5390 /* If the RHS is an SSA_NAME, return its known constant value,
5391 if any. */
5392 return (*valueize) (rhs);
5393 }
5394 /* Handle propagating invariant addresses into address
5395 operations. */
5396 else if (TREE_CODE (rhs) == ADDR_EXPR
5397 && !is_gimple_min_invariant (rhs))
5398 {
5399 HOST_WIDE_INT offset = 0;
5400 tree base;
5401 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5402 &offset,
5403 valueize);
5404 if (base
5405 && (CONSTANT_CLASS_P (base)
5406 || decl_address_invariant_p (base)))
5407 return build_invariant_address (TREE_TYPE (rhs),
5408 base, offset);
5409 }
5410 else if (TREE_CODE (rhs) == CONSTRUCTOR
5411 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5412 && (CONSTRUCTOR_NELTS (rhs)
5413 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5414 {
5415 unsigned i;
5416 tree val, *vec;
5417
5418 vec = XALLOCAVEC (tree,
5419 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5420 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5421 {
5422 val = (*valueize) (val);
5423 if (TREE_CODE (val) == INTEGER_CST
5424 || TREE_CODE (val) == REAL_CST
5425 || TREE_CODE (val) == FIXED_CST)
5426 vec[i] = val;
5427 else
5428 return NULL_TREE;
5429 }
5430
5431 return build_vector (TREE_TYPE (rhs), vec);
5432 }
5433 if (subcode == OBJ_TYPE_REF)
5434 {
5435 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5436 /* If callee is constant, we can fold away the wrapper. */
5437 if (is_gimple_min_invariant (val))
5438 return val;
5439 }
5440
5441 if (kind == tcc_reference)
5442 {
5443 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5444 || TREE_CODE (rhs) == REALPART_EXPR
5445 || TREE_CODE (rhs) == IMAGPART_EXPR)
5446 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5447 {
5448 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5449 return fold_unary_loc (EXPR_LOCATION (rhs),
5450 TREE_CODE (rhs),
5451 TREE_TYPE (rhs), val);
5452 }
5453 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5454 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5455 {
5456 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5457 return fold_ternary_loc (EXPR_LOCATION (rhs),
5458 TREE_CODE (rhs),
5459 TREE_TYPE (rhs), val,
5460 TREE_OPERAND (rhs, 1),
5461 TREE_OPERAND (rhs, 2));
5462 }
5463 else if (TREE_CODE (rhs) == MEM_REF
5464 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5465 {
5466 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5467 if (TREE_CODE (val) == ADDR_EXPR
5468 && is_gimple_min_invariant (val))
5469 {
5470 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5471 unshare_expr (val),
5472 TREE_OPERAND (rhs, 1));
5473 if (tem)
5474 rhs = tem;
5475 }
5476 }
5477 return fold_const_aggregate_ref_1 (rhs, valueize);
5478 }
5479 else if (kind == tcc_declaration)
5480 return get_symbol_constant_value (rhs);
5481 return rhs;
5482 }
5483
5484 case GIMPLE_UNARY_RHS:
5485 return NULL_TREE;
5486
5487 case GIMPLE_BINARY_RHS:
5488 /* Translate &x + CST into an invariant form suitable for
5489 further propagation. */
5490 if (subcode == POINTER_PLUS_EXPR)
5491 {
5492 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5493 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5494 if (TREE_CODE (op0) == ADDR_EXPR
5495 && TREE_CODE (op1) == INTEGER_CST)
5496 {
5497 tree off = fold_convert (ptr_type_node, op1);
5498 return build_fold_addr_expr_loc
5499 (loc,
5500 fold_build2 (MEM_REF,
5501 TREE_TYPE (TREE_TYPE (op0)),
5502 unshare_expr (op0), off));
5503 }
5504 }
5505 /* Canonicalize bool != 0 and bool == 0 appearing after
5506 valueization. While gimple_simplify handles this
5507 it can get confused by the ~X == 1 -> X == 0 transform
5508 which we cant reduce to a SSA name or a constant
5509 (and we have no way to tell gimple_simplify to not
5510 consider those transforms in the first place). */
5511 else if (subcode == EQ_EXPR
5512 || subcode == NE_EXPR)
5513 {
5514 tree lhs = gimple_assign_lhs (stmt);
5515 tree op0 = gimple_assign_rhs1 (stmt);
5516 if (useless_type_conversion_p (TREE_TYPE (lhs),
5517 TREE_TYPE (op0)))
5518 {
5519 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5520 op0 = (*valueize) (op0);
5521 if (TREE_CODE (op0) == INTEGER_CST)
5522 std::swap (op0, op1);
5523 if (TREE_CODE (op1) == INTEGER_CST
5524 && ((subcode == NE_EXPR && integer_zerop (op1))
5525 || (subcode == EQ_EXPR && integer_onep (op1))))
5526 return op0;
5527 }
5528 }
5529 return NULL_TREE;
5530
5531 case GIMPLE_TERNARY_RHS:
5532 {
5533 /* Handle ternary operators that can appear in GIMPLE form. */
5534 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5535 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5536 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5537 return fold_ternary_loc (loc, subcode,
5538 gimple_expr_type (stmt), op0, op1, op2);
5539 }
5540
5541 default:
5542 gcc_unreachable ();
5543 }
5544 }
5545
5546 case GIMPLE_CALL:
5547 {
5548 tree fn;
5549 gcall *call_stmt = as_a <gcall *> (stmt);
5550
5551 if (gimple_call_internal_p (stmt))
5552 {
5553 enum tree_code subcode = ERROR_MARK;
5554 switch (gimple_call_internal_fn (stmt))
5555 {
5556 case IFN_UBSAN_CHECK_ADD:
5557 subcode = PLUS_EXPR;
5558 break;
5559 case IFN_UBSAN_CHECK_SUB:
5560 subcode = MINUS_EXPR;
5561 break;
5562 case IFN_UBSAN_CHECK_MUL:
5563 subcode = MULT_EXPR;
5564 break;
5565 case IFN_BUILTIN_EXPECT:
5566 {
5567 tree arg0 = gimple_call_arg (stmt, 0);
5568 tree op0 = (*valueize) (arg0);
5569 if (TREE_CODE (op0) == INTEGER_CST)
5570 return op0;
5571 return NULL_TREE;
5572 }
5573 default:
5574 return NULL_TREE;
5575 }
5576 tree arg0 = gimple_call_arg (stmt, 0);
5577 tree arg1 = gimple_call_arg (stmt, 1);
5578 tree op0 = (*valueize) (arg0);
5579 tree op1 = (*valueize) (arg1);
5580
5581 if (TREE_CODE (op0) != INTEGER_CST
5582 || TREE_CODE (op1) != INTEGER_CST)
5583 {
5584 switch (subcode)
5585 {
5586 case MULT_EXPR:
5587 /* x * 0 = 0 * x = 0 without overflow. */
5588 if (integer_zerop (op0) || integer_zerop (op1))
5589 return build_zero_cst (TREE_TYPE (arg0));
5590 break;
5591 case MINUS_EXPR:
5592 /* y - y = 0 without overflow. */
5593 if (operand_equal_p (op0, op1, 0))
5594 return build_zero_cst (TREE_TYPE (arg0));
5595 break;
5596 default:
5597 break;
5598 }
5599 }
5600 tree res
5601 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5602 if (res
5603 && TREE_CODE (res) == INTEGER_CST
5604 && !TREE_OVERFLOW (res))
5605 return res;
5606 return NULL_TREE;
5607 }
5608
5609 fn = (*valueize) (gimple_call_fn (stmt));
5610 if (TREE_CODE (fn) == ADDR_EXPR
5611 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5612 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5613 && gimple_builtin_call_types_compatible_p (stmt,
5614 TREE_OPERAND (fn, 0)))
5615 {
5616 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5617 tree retval;
5618 unsigned i;
5619 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5620 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5621 retval = fold_builtin_call_array (loc,
5622 gimple_call_return_type (call_stmt),
5623 fn, gimple_call_num_args (stmt), args);
5624 if (retval)
5625 {
5626 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5627 STRIP_NOPS (retval);
5628 retval = fold_convert (gimple_call_return_type (call_stmt),
5629 retval);
5630 }
5631 return retval;
5632 }
5633 return NULL_TREE;
5634 }
5635
5636 default:
5637 return NULL_TREE;
5638 }
5639 }
5640
5641 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5642 Returns NULL_TREE if folding to a constant is not possible, otherwise
5643 returns a constant according to is_gimple_min_invariant. */
5644
5645 tree
5646 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5647 {
5648 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5649 if (res && is_gimple_min_invariant (res))
5650 return res;
5651 return NULL_TREE;
5652 }
5653
5654
5655 /* The following set of functions are supposed to fold references using
5656 their constant initializers. */
5657
5658 /* See if we can find constructor defining value of BASE.
5659 When we know the consructor with constant offset (such as
5660 base is array[40] and we do know constructor of array), then
5661 BIT_OFFSET is adjusted accordingly.
5662
5663 As a special case, return error_mark_node when constructor
5664 is not explicitly available, but it is known to be zero
5665 such as 'static const int a;'. */
5666 static tree
5667 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5668 tree (*valueize)(tree))
5669 {
5670 HOST_WIDE_INT bit_offset2, size, max_size;
5671 bool reverse;
5672
5673 if (TREE_CODE (base) == MEM_REF)
5674 {
5675 if (!integer_zerop (TREE_OPERAND (base, 1)))
5676 {
5677 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5678 return NULL_TREE;
5679 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5680 * BITS_PER_UNIT);
5681 }
5682
5683 if (valueize
5684 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5685 base = valueize (TREE_OPERAND (base, 0));
5686 if (!base || TREE_CODE (base) != ADDR_EXPR)
5687 return NULL_TREE;
5688 base = TREE_OPERAND (base, 0);
5689 }
5690 else if (valueize
5691 && TREE_CODE (base) == SSA_NAME)
5692 base = valueize (base);
5693
5694 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5695 DECL_INITIAL. If BASE is a nested reference into another
5696 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5697 the inner reference. */
5698 switch (TREE_CODE (base))
5699 {
5700 case VAR_DECL:
5701 case CONST_DECL:
5702 {
5703 tree init = ctor_for_folding (base);
5704
5705 /* Our semantic is exact opposite of ctor_for_folding;
5706 NULL means unknown, while error_mark_node is 0. */
5707 if (init == error_mark_node)
5708 return NULL_TREE;
5709 if (!init)
5710 return error_mark_node;
5711 return init;
5712 }
5713
5714 case VIEW_CONVERT_EXPR:
5715 return get_base_constructor (TREE_OPERAND (base, 0),
5716 bit_offset, valueize);
5717
5718 case ARRAY_REF:
5719 case COMPONENT_REF:
5720 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5721 &reverse);
5722 if (max_size == -1 || size != max_size)
5723 return NULL_TREE;
5724 *bit_offset += bit_offset2;
5725 return get_base_constructor (base, bit_offset, valueize);
5726
5727 case CONSTRUCTOR:
5728 return base;
5729
5730 default:
5731 if (CONSTANT_CLASS_P (base))
5732 return base;
5733
5734 return NULL_TREE;
5735 }
5736 }
5737
5738 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5739 SIZE to the memory at bit OFFSET. */
5740
5741 static tree
5742 fold_array_ctor_reference (tree type, tree ctor,
5743 unsigned HOST_WIDE_INT offset,
5744 unsigned HOST_WIDE_INT size,
5745 tree from_decl)
5746 {
5747 offset_int low_bound;
5748 offset_int elt_size;
5749 offset_int access_index;
5750 tree domain_type = NULL_TREE;
5751 HOST_WIDE_INT inner_offset;
5752
5753 /* Compute low bound and elt size. */
5754 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5755 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5756 if (domain_type && TYPE_MIN_VALUE (domain_type))
5757 {
5758 /* Static constructors for variably sized objects makes no sense. */
5759 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5760 return NULL_TREE;
5761 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5762 }
5763 else
5764 low_bound = 0;
5765 /* Static constructors for variably sized objects makes no sense. */
5766 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5767 return NULL_TREE;
5768 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5769
5770 /* We can handle only constantly sized accesses that are known to not
5771 be larger than size of array element. */
5772 if (!TYPE_SIZE_UNIT (type)
5773 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5774 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5775 || elt_size == 0)
5776 return NULL_TREE;
5777
5778 /* Compute the array index we look for. */
5779 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5780 elt_size);
5781 access_index += low_bound;
5782
5783 /* And offset within the access. */
5784 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5785
5786 /* See if the array field is large enough to span whole access. We do not
5787 care to fold accesses spanning multiple array indexes. */
5788 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5789 return NULL_TREE;
5790 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5791 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5792
5793 /* When memory is not explicitely mentioned in constructor,
5794 it is 0 (or out of range). */
5795 return build_zero_cst (type);
5796 }
5797
5798 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5799 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5800
5801 static tree
5802 fold_nonarray_ctor_reference (tree type, tree ctor,
5803 unsigned HOST_WIDE_INT offset,
5804 unsigned HOST_WIDE_INT size,
5805 tree from_decl)
5806 {
5807 unsigned HOST_WIDE_INT cnt;
5808 tree cfield, cval;
5809
5810 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5811 cval)
5812 {
5813 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5814 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5815 tree field_size = DECL_SIZE (cfield);
5816 offset_int bitoffset;
5817 offset_int bitoffset_end, access_end;
5818
5819 /* Variable sized objects in static constructors makes no sense,
5820 but field_size can be NULL for flexible array members. */
5821 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5822 && TREE_CODE (byte_offset) == INTEGER_CST
5823 && (field_size != NULL_TREE
5824 ? TREE_CODE (field_size) == INTEGER_CST
5825 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5826
5827 /* Compute bit offset of the field. */
5828 bitoffset = (wi::to_offset (field_offset)
5829 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5830 /* Compute bit offset where the field ends. */
5831 if (field_size != NULL_TREE)
5832 bitoffset_end = bitoffset + wi::to_offset (field_size);
5833 else
5834 bitoffset_end = 0;
5835
5836 access_end = offset_int (offset) + size;
5837
5838 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5839 [BITOFFSET, BITOFFSET_END)? */
5840 if (wi::cmps (access_end, bitoffset) > 0
5841 && (field_size == NULL_TREE
5842 || wi::lts_p (offset, bitoffset_end)))
5843 {
5844 offset_int inner_offset = offset_int (offset) - bitoffset;
5845 /* We do have overlap. Now see if field is large enough to
5846 cover the access. Give up for accesses spanning multiple
5847 fields. */
5848 if (wi::cmps (access_end, bitoffset_end) > 0)
5849 return NULL_TREE;
5850 if (offset < bitoffset)
5851 return NULL_TREE;
5852 return fold_ctor_reference (type, cval,
5853 inner_offset.to_uhwi (), size,
5854 from_decl);
5855 }
5856 }
5857 /* When memory is not explicitely mentioned in constructor, it is 0. */
5858 return build_zero_cst (type);
5859 }
5860
5861 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5862 to the memory at bit OFFSET. */
5863
5864 tree
5865 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5866 unsigned HOST_WIDE_INT size, tree from_decl)
5867 {
5868 tree ret;
5869
5870 /* We found the field with exact match. */
5871 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5872 && !offset)
5873 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5874
5875 /* We are at the end of walk, see if we can view convert the
5876 result. */
5877 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5878 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5879 && !compare_tree_int (TYPE_SIZE (type), size)
5880 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5881 {
5882 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5883 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5884 if (ret)
5885 STRIP_USELESS_TYPE_CONVERSION (ret);
5886 return ret;
5887 }
5888 /* For constants and byte-aligned/sized reads try to go through
5889 native_encode/interpret. */
5890 if (CONSTANT_CLASS_P (ctor)
5891 && BITS_PER_UNIT == 8
5892 && offset % BITS_PER_UNIT == 0
5893 && size % BITS_PER_UNIT == 0
5894 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5895 {
5896 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5897 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5898 offset / BITS_PER_UNIT);
5899 if (len > 0)
5900 return native_interpret_expr (type, buf, len);
5901 }
5902 if (TREE_CODE (ctor) == CONSTRUCTOR)
5903 {
5904
5905 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5906 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5907 return fold_array_ctor_reference (type, ctor, offset, size,
5908 from_decl);
5909 else
5910 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5911 from_decl);
5912 }
5913
5914 return NULL_TREE;
5915 }
5916
5917 /* Return the tree representing the element referenced by T if T is an
5918 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5919 names using VALUEIZE. Return NULL_TREE otherwise. */
5920
5921 tree
5922 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5923 {
5924 tree ctor, idx, base;
5925 HOST_WIDE_INT offset, size, max_size;
5926 tree tem;
5927 bool reverse;
5928
5929 if (TREE_THIS_VOLATILE (t))
5930 return NULL_TREE;
5931
5932 if (DECL_P (t))
5933 return get_symbol_constant_value (t);
5934
5935 tem = fold_read_from_constant_string (t);
5936 if (tem)
5937 return tem;
5938
5939 switch (TREE_CODE (t))
5940 {
5941 case ARRAY_REF:
5942 case ARRAY_RANGE_REF:
5943 /* Constant indexes are handled well by get_base_constructor.
5944 Only special case variable offsets.
5945 FIXME: This code can't handle nested references with variable indexes
5946 (they will be handled only by iteration of ccp). Perhaps we can bring
5947 get_ref_base_and_extent here and make it use a valueize callback. */
5948 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5949 && valueize
5950 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5951 && TREE_CODE (idx) == INTEGER_CST)
5952 {
5953 tree low_bound, unit_size;
5954
5955 /* If the resulting bit-offset is constant, track it. */
5956 if ((low_bound = array_ref_low_bound (t),
5957 TREE_CODE (low_bound) == INTEGER_CST)
5958 && (unit_size = array_ref_element_size (t),
5959 tree_fits_uhwi_p (unit_size)))
5960 {
5961 offset_int woffset
5962 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5963 TYPE_PRECISION (TREE_TYPE (idx)));
5964
5965 if (wi::fits_shwi_p (woffset))
5966 {
5967 offset = woffset.to_shwi ();
5968 /* TODO: This code seems wrong, multiply then check
5969 to see if it fits. */
5970 offset *= tree_to_uhwi (unit_size);
5971 offset *= BITS_PER_UNIT;
5972
5973 base = TREE_OPERAND (t, 0);
5974 ctor = get_base_constructor (base, &offset, valueize);
5975 /* Empty constructor. Always fold to 0. */
5976 if (ctor == error_mark_node)
5977 return build_zero_cst (TREE_TYPE (t));
5978 /* Out of bound array access. Value is undefined,
5979 but don't fold. */
5980 if (offset < 0)
5981 return NULL_TREE;
5982 /* We can not determine ctor. */
5983 if (!ctor)
5984 return NULL_TREE;
5985 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5986 tree_to_uhwi (unit_size)
5987 * BITS_PER_UNIT,
5988 base);
5989 }
5990 }
5991 }
5992 /* Fallthru. */
5993
5994 case COMPONENT_REF:
5995 case BIT_FIELD_REF:
5996 case TARGET_MEM_REF:
5997 case MEM_REF:
5998 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5999 ctor = get_base_constructor (base, &offset, valueize);
6000
6001 /* Empty constructor. Always fold to 0. */
6002 if (ctor == error_mark_node)
6003 return build_zero_cst (TREE_TYPE (t));
6004 /* We do not know precise address. */
6005 if (max_size == -1 || max_size != size)
6006 return NULL_TREE;
6007 /* We can not determine ctor. */
6008 if (!ctor)
6009 return NULL_TREE;
6010
6011 /* Out of bound array access. Value is undefined, but don't fold. */
6012 if (offset < 0)
6013 return NULL_TREE;
6014
6015 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6016 base);
6017
6018 case REALPART_EXPR:
6019 case IMAGPART_EXPR:
6020 {
6021 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6022 if (c && TREE_CODE (c) == COMPLEX_CST)
6023 return fold_build1_loc (EXPR_LOCATION (t),
6024 TREE_CODE (t), TREE_TYPE (t), c);
6025 break;
6026 }
6027
6028 default:
6029 break;
6030 }
6031
6032 return NULL_TREE;
6033 }
6034
6035 tree
6036 fold_const_aggregate_ref (tree t)
6037 {
6038 return fold_const_aggregate_ref_1 (t, NULL);
6039 }
6040
6041 /* Lookup virtual method with index TOKEN in a virtual table V
6042 at OFFSET.
6043 Set CAN_REFER if non-NULL to false if method
6044 is not referable or if the virtual table is ill-formed (such as rewriten
6045 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6046
6047 tree
6048 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6049 tree v,
6050 unsigned HOST_WIDE_INT offset,
6051 bool *can_refer)
6052 {
6053 tree vtable = v, init, fn;
6054 unsigned HOST_WIDE_INT size;
6055 unsigned HOST_WIDE_INT elt_size, access_index;
6056 tree domain_type;
6057
6058 if (can_refer)
6059 *can_refer = true;
6060
6061 /* First of all double check we have virtual table. */
6062 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6063 {
6064 /* Pass down that we lost track of the target. */
6065 if (can_refer)
6066 *can_refer = false;
6067 return NULL_TREE;
6068 }
6069
6070 init = ctor_for_folding (v);
6071
6072 /* The virtual tables should always be born with constructors
6073 and we always should assume that they are avaialble for
6074 folding. At the moment we do not stream them in all cases,
6075 but it should never happen that ctor seem unreachable. */
6076 gcc_assert (init);
6077 if (init == error_mark_node)
6078 {
6079 gcc_assert (in_lto_p);
6080 /* Pass down that we lost track of the target. */
6081 if (can_refer)
6082 *can_refer = false;
6083 return NULL_TREE;
6084 }
6085 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6086 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6087 offset *= BITS_PER_UNIT;
6088 offset += token * size;
6089
6090 /* Lookup the value in the constructor that is assumed to be array.
6091 This is equivalent to
6092 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6093 offset, size, NULL);
6094 but in a constant time. We expect that frontend produced a simple
6095 array without indexed initializers. */
6096
6097 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6098 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6099 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6100 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6101
6102 access_index = offset / BITS_PER_UNIT / elt_size;
6103 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6104
6105 /* This code makes an assumption that there are no
6106 indexed fileds produced by C++ FE, so we can directly index the array. */
6107 if (access_index < CONSTRUCTOR_NELTS (init))
6108 {
6109 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6110 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6111 STRIP_NOPS (fn);
6112 }
6113 else
6114 fn = NULL;
6115
6116 /* For type inconsistent program we may end up looking up virtual method
6117 in virtual table that does not contain TOKEN entries. We may overrun
6118 the virtual table and pick up a constant or RTTI info pointer.
6119 In any case the call is undefined. */
6120 if (!fn
6121 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6122 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6123 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6124 else
6125 {
6126 fn = TREE_OPERAND (fn, 0);
6127
6128 /* When cgraph node is missing and function is not public, we cannot
6129 devirtualize. This can happen in WHOPR when the actual method
6130 ends up in other partition, because we found devirtualization
6131 possibility too late. */
6132 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6133 {
6134 if (can_refer)
6135 {
6136 *can_refer = false;
6137 return fn;
6138 }
6139 return NULL_TREE;
6140 }
6141 }
6142
6143 /* Make sure we create a cgraph node for functions we'll reference.
6144 They can be non-existent if the reference comes from an entry
6145 of an external vtable for example. */
6146 cgraph_node::get_create (fn);
6147
6148 return fn;
6149 }
6150
6151 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6152 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6153 KNOWN_BINFO carries the binfo describing the true type of
6154 OBJ_TYPE_REF_OBJECT(REF).
6155 Set CAN_REFER if non-NULL to false if method
6156 is not referable or if the virtual table is ill-formed (such as rewriten
6157 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6158
6159 tree
6160 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6161 bool *can_refer)
6162 {
6163 unsigned HOST_WIDE_INT offset;
6164 tree v;
6165
6166 v = BINFO_VTABLE (known_binfo);
6167 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6168 if (!v)
6169 return NULL_TREE;
6170
6171 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6172 {
6173 if (can_refer)
6174 *can_refer = false;
6175 return NULL_TREE;
6176 }
6177 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6178 }
6179
6180 /* Given a pointer value OP0, return a simplified version of an
6181 indirection through OP0, or NULL_TREE if no simplification is
6182 possible. Note that the resulting type may be different from
6183 the type pointed to in the sense that it is still compatible
6184 from the langhooks point of view. */
6185
6186 tree
6187 gimple_fold_indirect_ref (tree t)
6188 {
6189 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6190 tree sub = t;
6191 tree subtype;
6192
6193 STRIP_NOPS (sub);
6194 subtype = TREE_TYPE (sub);
6195 if (!POINTER_TYPE_P (subtype))
6196 return NULL_TREE;
6197
6198 if (TREE_CODE (sub) == ADDR_EXPR)
6199 {
6200 tree op = TREE_OPERAND (sub, 0);
6201 tree optype = TREE_TYPE (op);
6202 /* *&p => p */
6203 if (useless_type_conversion_p (type, optype))
6204 return op;
6205
6206 /* *(foo *)&fooarray => fooarray[0] */
6207 if (TREE_CODE (optype) == ARRAY_TYPE
6208 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6209 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6210 {
6211 tree type_domain = TYPE_DOMAIN (optype);
6212 tree min_val = size_zero_node;
6213 if (type_domain && TYPE_MIN_VALUE (type_domain))
6214 min_val = TYPE_MIN_VALUE (type_domain);
6215 if (TREE_CODE (min_val) == INTEGER_CST)
6216 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6217 }
6218 /* *(foo *)&complexfoo => __real__ complexfoo */
6219 else if (TREE_CODE (optype) == COMPLEX_TYPE
6220 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6221 return fold_build1 (REALPART_EXPR, type, op);
6222 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6223 else if (TREE_CODE (optype) == VECTOR_TYPE
6224 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6225 {
6226 tree part_width = TYPE_SIZE (type);
6227 tree index = bitsize_int (0);
6228 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6229 }
6230 }
6231
6232 /* *(p + CST) -> ... */
6233 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6234 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6235 {
6236 tree addr = TREE_OPERAND (sub, 0);
6237 tree off = TREE_OPERAND (sub, 1);
6238 tree addrtype;
6239
6240 STRIP_NOPS (addr);
6241 addrtype = TREE_TYPE (addr);
6242
6243 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6244 if (TREE_CODE (addr) == ADDR_EXPR
6245 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6246 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6247 && tree_fits_uhwi_p (off))
6248 {
6249 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6250 tree part_width = TYPE_SIZE (type);
6251 unsigned HOST_WIDE_INT part_widthi
6252 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6253 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6254 tree index = bitsize_int (indexi);
6255 if (offset / part_widthi
6256 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6257 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6258 part_width, index);
6259 }
6260
6261 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6262 if (TREE_CODE (addr) == ADDR_EXPR
6263 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6264 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6265 {
6266 tree size = TYPE_SIZE_UNIT (type);
6267 if (tree_int_cst_equal (size, off))
6268 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6269 }
6270
6271 /* *(p + CST) -> MEM_REF <p, CST>. */
6272 if (TREE_CODE (addr) != ADDR_EXPR
6273 || DECL_P (TREE_OPERAND (addr, 0)))
6274 return fold_build2 (MEM_REF, type,
6275 addr,
6276 wide_int_to_tree (ptype, off));
6277 }
6278
6279 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6280 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6282 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6283 {
6284 tree type_domain;
6285 tree min_val = size_zero_node;
6286 tree osub = sub;
6287 sub = gimple_fold_indirect_ref (sub);
6288 if (! sub)
6289 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6290 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6291 if (type_domain && TYPE_MIN_VALUE (type_domain))
6292 min_val = TYPE_MIN_VALUE (type_domain);
6293 if (TREE_CODE (min_val) == INTEGER_CST)
6294 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6295 }
6296
6297 return NULL_TREE;
6298 }
6299
6300 /* Return true if CODE is an operation that when operating on signed
6301 integer types involves undefined behavior on overflow and the
6302 operation can be expressed with unsigned arithmetic. */
6303
6304 bool
6305 arith_code_with_undefined_signed_overflow (tree_code code)
6306 {
6307 switch (code)
6308 {
6309 case PLUS_EXPR:
6310 case MINUS_EXPR:
6311 case MULT_EXPR:
6312 case NEGATE_EXPR:
6313 case POINTER_PLUS_EXPR:
6314 return true;
6315 default:
6316 return false;
6317 }
6318 }
6319
6320 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6321 operation that can be transformed to unsigned arithmetic by converting
6322 its operand, carrying out the operation in the corresponding unsigned
6323 type and converting the result back to the original type.
6324
6325 Returns a sequence of statements that replace STMT and also contain
6326 a modified form of STMT itself. */
6327
6328 gimple_seq
6329 rewrite_to_defined_overflow (gimple *stmt)
6330 {
6331 if (dump_file && (dump_flags & TDF_DETAILS))
6332 {
6333 fprintf (dump_file, "rewriting stmt with undefined signed "
6334 "overflow ");
6335 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6336 }
6337
6338 tree lhs = gimple_assign_lhs (stmt);
6339 tree type = unsigned_type_for (TREE_TYPE (lhs));
6340 gimple_seq stmts = NULL;
6341 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6342 {
6343 tree op = gimple_op (stmt, i);
6344 op = gimple_convert (&stmts, type, op);
6345 gimple_set_op (stmt, i, op);
6346 }
6347 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6348 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6349 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6350 gimple_seq_add_stmt (&stmts, stmt);
6351 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6352 gimple_seq_add_stmt (&stmts, cvt);
6353
6354 return stmts;
6355 }
6356
6357
6358 /* The valueization hook we use for the gimple_build API simplification.
6359 This makes us match fold_buildN behavior by only combining with
6360 statements in the sequence(s) we are currently building. */
6361
6362 static tree
6363 gimple_build_valueize (tree op)
6364 {
6365 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6366 return op;
6367 return NULL_TREE;
6368 }
6369
6370 /* Build the expression CODE OP0 of type TYPE with location LOC,
6371 simplifying it first if possible. Returns the built
6372 expression value and appends statements possibly defining it
6373 to SEQ. */
6374
6375 tree
6376 gimple_build (gimple_seq *seq, location_t loc,
6377 enum tree_code code, tree type, tree op0)
6378 {
6379 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6380 if (!res)
6381 {
6382 res = create_tmp_reg_or_ssa_name (type);
6383 gimple *stmt;
6384 if (code == REALPART_EXPR
6385 || code == IMAGPART_EXPR
6386 || code == VIEW_CONVERT_EXPR)
6387 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6388 else
6389 stmt = gimple_build_assign (res, code, op0);
6390 gimple_set_location (stmt, loc);
6391 gimple_seq_add_stmt_without_update (seq, stmt);
6392 }
6393 return res;
6394 }
6395
6396 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6397 simplifying it first if possible. Returns the built
6398 expression value and appends statements possibly defining it
6399 to SEQ. */
6400
6401 tree
6402 gimple_build (gimple_seq *seq, location_t loc,
6403 enum tree_code code, tree type, tree op0, tree op1)
6404 {
6405 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6406 if (!res)
6407 {
6408 res = create_tmp_reg_or_ssa_name (type);
6409 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6410 gimple_set_location (stmt, loc);
6411 gimple_seq_add_stmt_without_update (seq, stmt);
6412 }
6413 return res;
6414 }
6415
6416 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6417 simplifying it first if possible. Returns the built
6418 expression value and appends statements possibly defining it
6419 to SEQ. */
6420
6421 tree
6422 gimple_build (gimple_seq *seq, location_t loc,
6423 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6424 {
6425 tree res = gimple_simplify (code, type, op0, op1, op2,
6426 seq, gimple_build_valueize);
6427 if (!res)
6428 {
6429 res = create_tmp_reg_or_ssa_name (type);
6430 gimple *stmt;
6431 if (code == BIT_FIELD_REF)
6432 stmt = gimple_build_assign (res, code,
6433 build3 (code, type, op0, op1, op2));
6434 else
6435 stmt = gimple_build_assign (res, code, op0, op1, op2);
6436 gimple_set_location (stmt, loc);
6437 gimple_seq_add_stmt_without_update (seq, stmt);
6438 }
6439 return res;
6440 }
6441
6442 /* Build the call FN (ARG0) with a result of type TYPE
6443 (or no result if TYPE is void) with location LOC,
6444 simplifying it first if possible. Returns the built
6445 expression value (or NULL_TREE if TYPE is void) and appends
6446 statements possibly defining it to SEQ. */
6447
6448 tree
6449 gimple_build (gimple_seq *seq, location_t loc,
6450 enum built_in_function fn, tree type, tree arg0)
6451 {
6452 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6453 if (!res)
6454 {
6455 tree decl = builtin_decl_implicit (fn);
6456 gimple *stmt = gimple_build_call (decl, 1, arg0);
6457 if (!VOID_TYPE_P (type))
6458 {
6459 res = create_tmp_reg_or_ssa_name (type);
6460 gimple_call_set_lhs (stmt, res);
6461 }
6462 gimple_set_location (stmt, loc);
6463 gimple_seq_add_stmt_without_update (seq, stmt);
6464 }
6465 return res;
6466 }
6467
6468 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6469 (or no result if TYPE is void) with location LOC,
6470 simplifying it first if possible. Returns the built
6471 expression value (or NULL_TREE if TYPE is void) and appends
6472 statements possibly defining it to SEQ. */
6473
6474 tree
6475 gimple_build (gimple_seq *seq, location_t loc,
6476 enum built_in_function fn, tree type, tree arg0, tree arg1)
6477 {
6478 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6479 if (!res)
6480 {
6481 tree decl = builtin_decl_implicit (fn);
6482 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6483 if (!VOID_TYPE_P (type))
6484 {
6485 res = create_tmp_reg_or_ssa_name (type);
6486 gimple_call_set_lhs (stmt, res);
6487 }
6488 gimple_set_location (stmt, loc);
6489 gimple_seq_add_stmt_without_update (seq, stmt);
6490 }
6491 return res;
6492 }
6493
6494 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6495 (or no result if TYPE is void) with location LOC,
6496 simplifying it first if possible. Returns the built
6497 expression value (or NULL_TREE if TYPE is void) and appends
6498 statements possibly defining it to SEQ. */
6499
6500 tree
6501 gimple_build (gimple_seq *seq, location_t loc,
6502 enum built_in_function fn, tree type,
6503 tree arg0, tree arg1, tree arg2)
6504 {
6505 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6506 seq, gimple_build_valueize);
6507 if (!res)
6508 {
6509 tree decl = builtin_decl_implicit (fn);
6510 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6511 if (!VOID_TYPE_P (type))
6512 {
6513 res = create_tmp_reg_or_ssa_name (type);
6514 gimple_call_set_lhs (stmt, res);
6515 }
6516 gimple_set_location (stmt, loc);
6517 gimple_seq_add_stmt_without_update (seq, stmt);
6518 }
6519 return res;
6520 }
6521
6522 /* Build the conversion (TYPE) OP with a result of type TYPE
6523 with location LOC if such conversion is neccesary in GIMPLE,
6524 simplifying it first.
6525 Returns the built expression value and appends
6526 statements possibly defining it to SEQ. */
6527
6528 tree
6529 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6530 {
6531 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6532 return op;
6533 return gimple_build (seq, loc, NOP_EXPR, type, op);
6534 }
6535
6536 /* Build the conversion (ptrofftype) OP with a result of a type
6537 compatible with ptrofftype with location LOC if such conversion
6538 is neccesary in GIMPLE, simplifying it first.
6539 Returns the built expression value and appends
6540 statements possibly defining it to SEQ. */
6541
6542 tree
6543 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6544 {
6545 if (ptrofftype_p (TREE_TYPE (op)))
6546 return op;
6547 return gimple_convert (seq, loc, sizetype, op);
6548 }
6549
6550 /* Return true if the result of assignment STMT is known to be non-negative.
6551 If the return value is based on the assumption that signed overflow is
6552 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6553 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6554
6555 static bool
6556 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6557 int depth)
6558 {
6559 enum tree_code code = gimple_assign_rhs_code (stmt);
6560 switch (get_gimple_rhs_class (code))
6561 {
6562 case GIMPLE_UNARY_RHS:
6563 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6564 gimple_expr_type (stmt),
6565 gimple_assign_rhs1 (stmt),
6566 strict_overflow_p, depth);
6567 case GIMPLE_BINARY_RHS:
6568 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6569 gimple_expr_type (stmt),
6570 gimple_assign_rhs1 (stmt),
6571 gimple_assign_rhs2 (stmt),
6572 strict_overflow_p, depth);
6573 case GIMPLE_TERNARY_RHS:
6574 return false;
6575 case GIMPLE_SINGLE_RHS:
6576 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6577 strict_overflow_p, depth);
6578 case GIMPLE_INVALID_RHS:
6579 break;
6580 }
6581 gcc_unreachable ();
6582 }
6583
6584 /* Return true if return value of call STMT is known to be non-negative.
6585 If the return value is based on the assumption that signed overflow is
6586 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6587 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6588
6589 static bool
6590 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6591 int depth)
6592 {
6593 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6594 gimple_call_arg (stmt, 0) : NULL_TREE;
6595 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6596 gimple_call_arg (stmt, 1) : NULL_TREE;
6597
6598 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6599 gimple_call_combined_fn (stmt),
6600 arg0,
6601 arg1,
6602 strict_overflow_p, depth);
6603 }
6604
6605 /* Return true if return value of call STMT is known to be non-negative.
6606 If the return value is based on the assumption that signed overflow is
6607 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6608 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6609
6610 static bool
6611 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6612 int depth)
6613 {
6614 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6615 {
6616 tree arg = gimple_phi_arg_def (stmt, i);
6617 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6618 return false;
6619 }
6620 return true;
6621 }
6622
6623 /* Return true if STMT is known to compute a non-negative value.
6624 If the return value is based on the assumption that signed overflow is
6625 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6626 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6627
6628 bool
6629 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6630 int depth)
6631 {
6632 switch (gimple_code (stmt))
6633 {
6634 case GIMPLE_ASSIGN:
6635 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6636 depth);
6637 case GIMPLE_CALL:
6638 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6639 depth);
6640 case GIMPLE_PHI:
6641 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6642 depth);
6643 default:
6644 return false;
6645 }
6646 }
6647
6648 /* Return true if the floating-point value computed by assignment STMT
6649 is known to have an integer value. We also allow +Inf, -Inf and NaN
6650 to be considered integer values. Return false for signaling NaN.
6651
6652 DEPTH is the current nesting depth of the query. */
6653
6654 static bool
6655 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6656 {
6657 enum tree_code code = gimple_assign_rhs_code (stmt);
6658 switch (get_gimple_rhs_class (code))
6659 {
6660 case GIMPLE_UNARY_RHS:
6661 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6662 gimple_assign_rhs1 (stmt), depth);
6663 case GIMPLE_BINARY_RHS:
6664 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6665 gimple_assign_rhs1 (stmt),
6666 gimple_assign_rhs2 (stmt), depth);
6667 case GIMPLE_TERNARY_RHS:
6668 return false;
6669 case GIMPLE_SINGLE_RHS:
6670 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6671 case GIMPLE_INVALID_RHS:
6672 break;
6673 }
6674 gcc_unreachable ();
6675 }
6676
6677 /* Return true if the floating-point value computed by call STMT is known
6678 to have an integer value. We also allow +Inf, -Inf and NaN to be
6679 considered integer values. Return false for signaling NaN.
6680
6681 DEPTH is the current nesting depth of the query. */
6682
6683 static bool
6684 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6685 {
6686 tree arg0 = (gimple_call_num_args (stmt) > 0
6687 ? gimple_call_arg (stmt, 0)
6688 : NULL_TREE);
6689 tree arg1 = (gimple_call_num_args (stmt) > 1
6690 ? gimple_call_arg (stmt, 1)
6691 : NULL_TREE);
6692 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6693 arg0, arg1, depth);
6694 }
6695
6696 /* Return true if the floating-point result of phi STMT is known to have
6697 an integer value. We also allow +Inf, -Inf and NaN to be considered
6698 integer values. Return false for signaling NaN.
6699
6700 DEPTH is the current nesting depth of the query. */
6701
6702 static bool
6703 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6704 {
6705 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6706 {
6707 tree arg = gimple_phi_arg_def (stmt, i);
6708 if (!integer_valued_real_single_p (arg, depth + 1))
6709 return false;
6710 }
6711 return true;
6712 }
6713
6714 /* Return true if the floating-point value computed by STMT is known
6715 to have an integer value. We also allow +Inf, -Inf and NaN to be
6716 considered integer values. Return false for signaling NaN.
6717
6718 DEPTH is the current nesting depth of the query. */
6719
6720 bool
6721 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6722 {
6723 switch (gimple_code (stmt))
6724 {
6725 case GIMPLE_ASSIGN:
6726 return gimple_assign_integer_valued_real_p (stmt, depth);
6727 case GIMPLE_CALL:
6728 return gimple_call_integer_valued_real_p (stmt, depth);
6729 case GIMPLE_PHI:
6730 return gimple_phi_integer_valued_real_p (stmt, depth);
6731 default:
6732 return false;
6733 }
6734 }