Optimize strchr (s, 0) to s + strlen (s).
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58
59
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
64
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 {
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
88
89 if (DECL_ABSTRACT_P (decl))
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
109 }
110
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || TREE_CODE (from_decl) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
139
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
149
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
158 }
159
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163
164 tree
165 canonicalize_constructor_val (tree cval, tree from_decl)
166 {
167 tree orig_cval = cval;
168 STRIP_NOPS (cval);
169 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 {
172 tree ptr = TREE_OPERAND (cval, 0);
173 if (is_gimple_min_invariant (ptr))
174 cval = build1_loc (EXPR_LOCATION (cval),
175 ADDR_EXPR, TREE_TYPE (ptr),
176 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 ptr,
178 fold_convert (ptr_type_node,
179 TREE_OPERAND (cval, 1))));
180 }
181 if (TREE_CODE (cval) == ADDR_EXPR)
182 {
183 tree base = NULL_TREE;
184 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 {
186 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 if (base)
188 TREE_OPERAND (cval, 0) = base;
189 }
190 else
191 base = get_base_address (TREE_OPERAND (cval, 0));
192 if (!base)
193 return NULL_TREE;
194
195 if ((TREE_CODE (base) == VAR_DECL
196 || TREE_CODE (base) == FUNCTION_DECL)
197 && !can_refer_decl_in_current_unit_p (base, from_decl))
198 return NULL_TREE;
199 if (TREE_TYPE (base) == error_mark_node)
200 return NULL_TREE;
201 if (TREE_CODE (base) == VAR_DECL)
202 TREE_ADDRESSABLE (base) = 1;
203 else if (TREE_CODE (base) == FUNCTION_DECL)
204 {
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base);
209 }
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
212 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
213
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
215 cval = fold_convert (TREE_TYPE (orig_cval), cval);
216 return cval;
217 }
218 if (TREE_OVERFLOW_P (cval))
219 return drop_tree_overflow (cval);
220 return orig_cval;
221 }
222
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
225
226 tree
227 get_symbol_constant_value (tree sym)
228 {
229 tree val = ctor_for_folding (sym);
230 if (val != error_mark_node)
231 {
232 if (val)
233 {
234 val = canonicalize_constructor_val (unshare_expr (val), sym);
235 if (val && is_gimple_min_invariant (val))
236 return val;
237 else
238 return NULL_TREE;
239 }
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
243 if (!val
244 && is_gimple_reg_type (TREE_TYPE (sym)))
245 return build_zero_cst (TREE_TYPE (sym));
246 }
247
248 return NULL_TREE;
249 }
250
251
252
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
257
258 static tree
259 maybe_fold_reference (tree expr, bool is_lhs)
260 {
261 tree result;
262
263 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr) == REALPART_EXPR
265 || TREE_CODE (expr) == IMAGPART_EXPR)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr),
268 TREE_CODE (expr),
269 TREE_TYPE (expr),
270 TREE_OPERAND (expr, 0));
271 else if (TREE_CODE (expr) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0),
277 TREE_OPERAND (expr, 1),
278 TREE_OPERAND (expr, 2));
279
280 if (!is_lhs
281 && (result = fold_const_aggregate_ref (expr))
282 && is_gimple_min_invariant (result))
283 return result;
284
285 return NULL_TREE;
286 }
287
288
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
292 folded. */
293
294 static tree
295 fold_gimple_assign (gimple_stmt_iterator *si)
296 {
297 gimple *stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
300
301 tree result = NULL_TREE;
302
303 switch (get_gimple_rhs_class (subcode))
304 {
305 case GIMPLE_SINGLE_RHS:
306 {
307 tree rhs = gimple_assign_rhs1 (stmt);
308
309 if (TREE_CLOBBER_P (rhs))
310 return NULL_TREE;
311
312 if (REFERENCE_CLASS_P (rhs))
313 return maybe_fold_reference (rhs, false);
314
315 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 {
317 tree val = OBJ_TYPE_REF_EXPR (rhs);
318 if (is_gimple_min_invariant (val))
319 return val;
320 else if (flag_devirtualize && virtual_method_call_p (rhs))
321 {
322 bool final;
323 vec <cgraph_node *>targets
324 = possible_polymorphic_call_targets (rhs, stmt, &final);
325 if (final && targets.length () <= 1 && dbg_cnt (devirt))
326 {
327 if (dump_enabled_p ())
328 {
329 location_t loc = gimple_location_safe (stmt);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets.length () == 1
334 ? targets[0]->name ()
335 : "NULL");
336 }
337 if (targets.length () == 1)
338 {
339 val = fold_convert (TREE_TYPE (val),
340 build_fold_addr_expr_loc
341 (loc, targets[0]->decl));
342 STRIP_USELESS_TYPE_CONVERSION (val);
343 }
344 else
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val = build_int_cst (TREE_TYPE (val), 0);
348 return val;
349 }
350 }
351 }
352
353 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 {
355 tree ref = TREE_OPERAND (rhs, 0);
356 tree tem = maybe_fold_reference (ref, true);
357 if (tem
358 && TREE_CODE (tem) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem, 1)))
360 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 else if (tem)
362 result = fold_convert (TREE_TYPE (rhs),
363 build_fold_addr_expr_loc (loc, tem));
364 else if (TREE_CODE (ref) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
367
368 if (result)
369 {
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result);
375
376 if (result != rhs && valid_gimple_rhs_p (result))
377 return result;
378 }
379 }
380
381 else if (TREE_CODE (rhs) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
383 {
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 unsigned i;
386 tree val;
387
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
389 if (! CONSTANT_CLASS_P (val))
390 return NULL_TREE;
391
392 return build_vector_from_ctor (TREE_TYPE (rhs),
393 CONSTRUCTOR_ELTS (rhs));
394 }
395
396 else if (DECL_P (rhs))
397 return get_symbol_constant_value (rhs);
398 }
399 break;
400
401 case GIMPLE_UNARY_RHS:
402 break;
403
404 case GIMPLE_BINARY_RHS:
405 break;
406
407 case GIMPLE_TERNARY_RHS:
408 result = fold_ternary_loc (loc, subcode,
409 TREE_TYPE (gimple_assign_lhs (stmt)),
410 gimple_assign_rhs1 (stmt),
411 gimple_assign_rhs2 (stmt),
412 gimple_assign_rhs3 (stmt));
413
414 if (result)
415 {
416 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (valid_gimple_rhs_p (result))
418 return result;
419 }
420 break;
421
422 case GIMPLE_INVALID_RHS:
423 gcc_unreachable ();
424 }
425
426 return NULL_TREE;
427 }
428
429
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
434
435 static void
436 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437 {
438 gimple *stmt = gsi_stmt (*si_p);
439
440 if (gimple_has_location (stmt))
441 annotate_all_with_location (stmts, gimple_location (stmt));
442
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple *laststore = NULL;
446 for (gimple_stmt_iterator i = gsi_last (stmts);
447 !gsi_end_p (i); gsi_prev (&i))
448 {
449 gimple *new_stmt = gsi_stmt (i);
450 if ((gimple_assign_single_p (new_stmt)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
452 || (is_gimple_call (new_stmt)
453 && (gimple_call_flags (new_stmt)
454 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
455 {
456 tree vdef;
457 if (!laststore)
458 vdef = gimple_vdef (stmt);
459 else
460 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
461 gimple_set_vdef (new_stmt, vdef);
462 if (vdef && TREE_CODE (vdef) == SSA_NAME)
463 SSA_NAME_DEF_STMT (vdef) = new_stmt;
464 laststore = new_stmt;
465 }
466 }
467
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse = gimple_vuse (stmt);
471 for (gimple_stmt_iterator i = gsi_start (stmts);
472 !gsi_end_p (i); gsi_next (&i))
473 {
474 gimple *new_stmt = gsi_stmt (i);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt))
478 gimple_set_vuse (new_stmt, reaching_vuse);
479 gimple_set_modified (new_stmt, true);
480 if (gimple_vdef (new_stmt))
481 reaching_vuse = gimple_vdef (new_stmt);
482 }
483
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
486 if (reaching_vuse
487 && reaching_vuse == gimple_vuse (stmt))
488 {
489 tree vdef = gimple_vdef (stmt);
490 if (vdef
491 && TREE_CODE (vdef) == SSA_NAME)
492 {
493 unlink_stmt_vdef (stmt);
494 release_ssa_name (vdef);
495 }
496 }
497
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p, stmts, false);
500 }
501
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
511
512 void
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
514 {
515 tree lhs;
516 gimple *stmt, *new_stmt;
517 gimple_stmt_iterator i;
518 gimple_seq stmts = NULL;
519
520 stmt = gsi_stmt (*si_p);
521
522 gcc_assert (is_gimple_call (stmt));
523
524 push_gimplify_context (gimple_in_ssa_p (cfun));
525
526 lhs = gimple_call_lhs (stmt);
527 if (lhs == NULL_TREE)
528 {
529 gimplify_and_add (expr, &stmts);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts))
533 {
534 pop_gimplify_context (NULL);
535 if (gimple_in_ssa_p (cfun))
536 {
537 unlink_stmt_vdef (stmt);
538 release_defs (stmt);
539 }
540 gsi_replace (si_p, gimple_build_nop (), false);
541 return;
542 }
543 }
544 else
545 {
546 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
547 new_stmt = gimple_build_assign (lhs, tmp);
548 i = gsi_last (stmts);
549 gsi_insert_after_without_update (&i, new_stmt,
550 GSI_CONTINUE_LINKING);
551 }
552
553 pop_gimplify_context (NULL);
554
555 gsi_replace_with_seq_vops (si_p, stmts);
556 }
557
558
559 /* Replace the call at *GSI with the gimple value VAL. */
560
561 static void
562 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563 {
564 gimple *stmt = gsi_stmt (*gsi);
565 tree lhs = gimple_call_lhs (stmt);
566 gimple *repl;
567 if (lhs)
568 {
569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
570 val = fold_convert (TREE_TYPE (lhs), val);
571 repl = gimple_build_assign (lhs, val);
572 }
573 else
574 repl = gimple_build_nop ();
575 tree vdef = gimple_vdef (stmt);
576 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 {
578 unlink_stmt_vdef (stmt);
579 release_ssa_name (vdef);
580 }
581 gsi_replace (gsi, repl, false);
582 }
583
584 /* Replace the call at *GSI with the new call REPL and fold that
585 again. */
586
587 static void
588 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
589 {
590 gimple *stmt = gsi_stmt (*gsi);
591 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
592 gimple_set_location (repl, gimple_location (stmt));
593 if (gimple_vdef (stmt)
594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 {
596 gimple_set_vdef (repl, gimple_vdef (stmt));
597 gimple_set_vuse (repl, gimple_vuse (stmt));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 }
600 gsi_replace (gsi, repl, false);
601 fold_stmt (gsi);
602 }
603
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
605
606 static bool
607 var_decl_component_p (tree var)
608 {
609 tree inner = var;
610 while (handled_component_p (inner))
611 inner = TREE_OPERAND (inner, 0);
612 return SSA_VAR_P (inner);
613 }
614
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
621 (memmove). */
622
623 static bool
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
625 tree dest, tree src, int endp)
626 {
627 gimple *stmt = gsi_stmt (*gsi);
628 tree lhs = gimple_call_lhs (stmt);
629 tree len = gimple_call_arg (stmt, 2);
630 tree destvar, srcvar;
631 location_t loc = gimple_location (stmt);
632
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len))
635 {
636 gimple *repl;
637 if (gimple_call_lhs (stmt))
638 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 else
640 repl = gimple_build_nop ();
641 tree vdef = gimple_vdef (stmt);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 {
644 unlink_stmt_vdef (stmt);
645 release_ssa_name (vdef);
646 }
647 gsi_replace (gsi, repl, false);
648 return true;
649 }
650
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src, dest, 0))
654 {
655 unlink_stmt_vdef (stmt);
656 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
657 release_ssa_name (gimple_vdef (stmt));
658 if (!lhs)
659 {
660 gsi_replace (gsi, gimple_build_nop (), false);
661 return true;
662 }
663 goto done;
664 }
665 else
666 {
667 tree srctype, desttype;
668 unsigned int src_align, dest_align;
669 tree off0;
670
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun->decl)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len)
680 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
681 return false;
682
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
686
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
700 {
701 unsigned ilen = tree_to_uhwi (len);
702 if (pow2p_hwi (ilen))
703 {
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
713 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
714 != CODE_FOR_nothing)))
715 {
716 tree srctype = type;
717 tree desttype = type;
718 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
719 srctype = build_aligned_type (type, src_align);
720 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
721 tree tem = fold_const_aggregate_ref (srcmem);
722 if (tem)
723 srcmem = tem;
724 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
726 src_align)
727 && (optab_handler (movmisalign_optab,
728 TYPE_MODE (type))
729 == CODE_FOR_nothing))
730 srcmem = NULL_TREE;
731 if (srcmem)
732 {
733 gimple *new_stmt;
734 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 {
736 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
737 if (gimple_in_ssa_p (cfun))
738 srcmem = make_ssa_name (TREE_TYPE (srcmem),
739 new_stmt);
740 else
741 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
742 gimple_assign_set_lhs (new_stmt, srcmem);
743 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
744 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 }
746 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
747 desttype = build_aligned_type (type, dest_align);
748 new_stmt
749 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
750 dest, off0),
751 srcmem);
752 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
753 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
754 if (gimple_vdef (new_stmt)
755 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
757 if (!lhs)
758 {
759 gsi_replace (gsi, new_stmt, false);
760 return true;
761 }
762 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
763 goto done;
764 }
765 }
766 }
767 }
768
769 if (endp == 3)
770 {
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
773 really mandatory?
774
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align || !src_align)
777 return false;
778 if (readonly_data_expr (src)
779 || (tree_fits_uhwi_p (len)
780 && (MIN (src_align, dest_align) / BITS_PER_UNIT
781 >= tree_to_uhwi (len))))
782 {
783 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
784 if (!fn)
785 return false;
786 gimple_call_set_fndecl (stmt, fn);
787 gimple_call_set_arg (stmt, 0, dest);
788 gimple_call_set_arg (stmt, 1, src);
789 fold_stmt (gsi);
790 return true;
791 }
792
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src) == ADDR_EXPR
795 && TREE_CODE (dest) == ADDR_EXPR)
796 {
797 tree src_base, dest_base, fn;
798 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
799 HOST_WIDE_INT maxsize;
800
801 srcvar = TREE_OPERAND (src, 0);
802 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
803 if (src_base == NULL)
804 src_base = srcvar;
805 destvar = TREE_OPERAND (dest, 0);
806 dest_base = get_addr_base_and_unit_offset (destvar,
807 &dest_offset);
808 if (dest_base == NULL)
809 dest_base = destvar;
810 if (tree_fits_uhwi_p (len))
811 maxsize = tree_to_uhwi (len);
812 else
813 maxsize = -1;
814 if (SSA_VAR_P (src_base)
815 && SSA_VAR_P (dest_base))
816 {
817 if (operand_equal_p (src_base, dest_base, 0)
818 && ranges_overlap_p (src_offset, maxsize,
819 dest_offset, maxsize))
820 return false;
821 }
822 else if (TREE_CODE (src_base) == MEM_REF
823 && TREE_CODE (dest_base) == MEM_REF)
824 {
825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
826 TREE_OPERAND (dest_base, 0), 0))
827 return false;
828 offset_int off = mem_ref_offset (src_base) + src_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 src_offset = off.to_shwi ();
832
833 off = mem_ref_offset (dest_base) + dest_offset;
834 if (!wi::fits_shwi_p (off))
835 return false;
836 dest_offset = off.to_shwi ();
837 if (ranges_overlap_p (src_offset, maxsize,
838 dest_offset, maxsize))
839 return false;
840 }
841 else
842 return false;
843
844 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
852 }
853
854 /* If the destination and source do not alias optimize into
855 memcpy as well. */
856 if ((is_gimple_min_invariant (dest)
857 || TREE_CODE (dest) == SSA_NAME)
858 && (is_gimple_min_invariant (src)
859 || TREE_CODE (src) == SSA_NAME))
860 {
861 ao_ref destr, srcr;
862 ao_ref_init_from_ptr_and_size (&destr, dest, len);
863 ao_ref_init_from_ptr_and_size (&srcr, src, len);
864 if (!refs_may_alias_p_1 (&destr, &srcr, false))
865 {
866 tree fn;
867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
868 if (!fn)
869 return false;
870 gimple_call_set_fndecl (stmt, fn);
871 gimple_call_set_arg (stmt, 0, dest);
872 gimple_call_set_arg (stmt, 1, src);
873 fold_stmt (gsi);
874 return true;
875 }
876 }
877
878 return false;
879 }
880
881 if (!tree_fits_shwi_p (len))
882 return false;
883 /* FIXME:
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
887 STRIP_NOPS (src);
888 STRIP_NOPS (dest);
889 if (!POINTER_TYPE_P (TREE_TYPE (src))
890 || !POINTER_TYPE_P (TREE_TYPE (dest)))
891 return false;
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
900 {
901 tree tem = TREE_OPERAND (src, 0);
902 STRIP_NOPS (tem);
903 if (tem != TREE_OPERAND (src, 0))
904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
905 }
906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
907 {
908 tree tem = TREE_OPERAND (dest, 0);
909 STRIP_NOPS (tem);
910 if (tem != TREE_OPERAND (dest, 0))
911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
912 }
913 srctype = TREE_TYPE (TREE_TYPE (src));
914 if (TREE_CODE (srctype) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
916 {
917 srctype = TREE_TYPE (srctype);
918 STRIP_NOPS (src);
919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
920 }
921 desttype = TREE_TYPE (TREE_TYPE (dest));
922 if (TREE_CODE (desttype) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
924 {
925 desttype = TREE_TYPE (desttype);
926 STRIP_NOPS (dest);
927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
928 }
929 if (TREE_ADDRESSABLE (srctype)
930 || TREE_ADDRESSABLE (desttype))
931 return false;
932
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype))
936 || TREE_CODE (desttype) == BOOLEAN_TYPE
937 || TREE_CODE (desttype) == ENUMERAL_TYPE)
938 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype))
940 || TREE_CODE (srctype) == BOOLEAN_TYPE
941 || TREE_CODE (srctype) == ENUMERAL_TYPE)
942 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
943 if (!srctype)
944 srctype = desttype;
945 if (!desttype)
946 desttype = srctype;
947 if (!srctype)
948 return false;
949
950 src_align = get_pointer_alignment (src);
951 dest_align = get_pointer_alignment (dest);
952 if (dest_align < TYPE_ALIGN (desttype)
953 || src_align < TYPE_ALIGN (srctype))
954 return false;
955
956 destvar = dest;
957 STRIP_NOPS (destvar);
958 if (TREE_CODE (destvar) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
961 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
962 else
963 destvar = NULL_TREE;
964
965 srcvar = src;
966 STRIP_NOPS (srcvar);
967 if (TREE_CODE (srcvar) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
970 {
971 if (!destvar
972 || src_align >= TYPE_ALIGN (desttype))
973 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
974 srcvar, off0);
975 else if (!STRICT_ALIGNMENT)
976 {
977 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
978 src_align);
979 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
980 }
981 else
982 srcvar = NULL_TREE;
983 }
984 else
985 srcvar = NULL_TREE;
986
987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
988 return false;
989
990 if (srcvar == NULL_TREE)
991 {
992 STRIP_NOPS (src);
993 if (src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
995 else
996 {
997 if (STRICT_ALIGNMENT)
998 return false;
999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1000 src_align);
1001 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1002 }
1003 }
1004 else if (destvar == NULL_TREE)
1005 {
1006 STRIP_NOPS (dest);
1007 if (dest_align >= TYPE_ALIGN (srctype))
1008 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1009 else
1010 {
1011 if (STRICT_ALIGNMENT)
1012 return false;
1013 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1014 dest_align);
1015 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1016 }
1017 }
1018
1019 gimple *new_stmt;
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1021 {
1022 tree tem = fold_const_aggregate_ref (srcvar);
1023 if (tem)
1024 srcvar = tem;
1025 if (! is_gimple_min_invariant (srcvar))
1026 {
1027 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1028 if (gimple_in_ssa_p (cfun))
1029 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1030 else
1031 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1032 gimple_assign_set_lhs (new_stmt, srcvar);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1035 }
1036 }
1037 new_stmt = gimple_build_assign (destvar, srcvar);
1038 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1039 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1040 if (gimple_vdef (new_stmt)
1041 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1042 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1043 if (!lhs)
1044 {
1045 gsi_replace (gsi, new_stmt, false);
1046 return true;
1047 }
1048 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1049 }
1050
1051 done:
1052 gimple_seq stmts = NULL;
1053 if (endp == 0 || endp == 3)
1054 len = NULL_TREE;
1055 else if (endp == 2)
1056 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1057 ssize_int (1));
1058 if (endp == 2 || endp == 1)
1059 {
1060 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1061 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1062 TREE_TYPE (dest), dest, len);
1063 }
1064
1065 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1066 gimple *repl = gimple_build_assign (lhs, dest);
1067 gsi_replace (gsi, repl, false);
1068 return true;
1069 }
1070
1071 /* Fold function call to builtin memset or bzero at *GSI setting the
1072 memory of size LEN to VAL. Return whether a simplification was made. */
1073
1074 static bool
1075 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 tree etype;
1079 unsigned HOST_WIDE_INT length, cval;
1080
1081 /* If the LEN parameter is zero, return DEST. */
1082 if (integer_zerop (len))
1083 {
1084 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1085 return true;
1086 }
1087
1088 if (! tree_fits_uhwi_p (len))
1089 return false;
1090
1091 if (TREE_CODE (c) != INTEGER_CST)
1092 return false;
1093
1094 tree dest = gimple_call_arg (stmt, 0);
1095 tree var = dest;
1096 if (TREE_CODE (var) != ADDR_EXPR)
1097 return false;
1098
1099 var = TREE_OPERAND (var, 0);
1100 if (TREE_THIS_VOLATILE (var))
1101 return false;
1102
1103 etype = TREE_TYPE (var);
1104 if (TREE_CODE (etype) == ARRAY_TYPE)
1105 etype = TREE_TYPE (etype);
1106
1107 if (!INTEGRAL_TYPE_P (etype)
1108 && !POINTER_TYPE_P (etype))
1109 return NULL_TREE;
1110
1111 if (! var_decl_component_p (var))
1112 return NULL_TREE;
1113
1114 length = tree_to_uhwi (len);
1115 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1116 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1117 return NULL_TREE;
1118
1119 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1120 return NULL_TREE;
1121
1122 if (integer_zerop (c))
1123 cval = 0;
1124 else
1125 {
1126 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1127 return NULL_TREE;
1128
1129 cval = TREE_INT_CST_LOW (c);
1130 cval &= 0xff;
1131 cval |= cval << 8;
1132 cval |= cval << 16;
1133 cval |= (cval << 31) << 1;
1134 }
1135
1136 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1137 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1138 gimple_set_vuse (store, gimple_vuse (stmt));
1139 tree vdef = gimple_vdef (stmt);
1140 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1141 {
1142 gimple_set_vdef (store, gimple_vdef (stmt));
1143 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1144 }
1145 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1146 if (gimple_call_lhs (stmt))
1147 {
1148 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1149 gsi_replace (gsi, asgn, false);
1150 }
1151 else
1152 {
1153 gimple_stmt_iterator gsi2 = *gsi;
1154 gsi_prev (gsi);
1155 gsi_remove (&gsi2, true);
1156 }
1157
1158 return true;
1159 }
1160
1161
1162 /* Obtain the minimum and maximum string length or minimum and maximum
1163 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1164 If ARG is an SSA name variable, follow its use-def chains. When
1165 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1166 if we are unable to determine the length or value, return False.
1167 VISITED is a bitmap of visited variables.
1168 TYPE is 0 if string length should be obtained, 1 for maximum string
1169 length and 2 for maximum value ARG can have.
1170 When FUZZY is set and the length of a string cannot be determined,
1171 the function instead considers as the maximum possible length the
1172 size of a character array it may refer to. */
1173
1174 static bool
1175 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1176 bool fuzzy)
1177 {
1178 tree var, val;
1179 gimple *def_stmt;
1180
1181 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1182 but MINLEN may be cleared during the execution of the function. */
1183 tree *minlen = length;
1184 tree *const maxlen = length + 1;
1185
1186 if (TREE_CODE (arg) != SSA_NAME)
1187 {
1188 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1189 if (TREE_CODE (arg) == ADDR_EXPR
1190 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1191 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1192 {
1193 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1194 if (TREE_CODE (aop0) == INDIRECT_REF
1195 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1196 return get_range_strlen (TREE_OPERAND (aop0, 0),
1197 length, visited, type, fuzzy);
1198 }
1199
1200 if (type == 2)
1201 {
1202 val = arg;
1203 if (TREE_CODE (val) != INTEGER_CST
1204 || tree_int_cst_sgn (val) < 0)
1205 return false;
1206 }
1207 else
1208 val = c_strlen (arg, 1);
1209
1210 if (!val && fuzzy)
1211 {
1212 if (TREE_CODE (arg) == ADDR_EXPR)
1213 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1214 visited, type, fuzzy);
1215
1216 if (TREE_CODE (arg) == COMPONENT_REF
1217 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1218 {
1219 /* Use the type of the member array to determine the upper
1220 bound on the length of the array. This may be overly
1221 optimistic if the array itself isn't NUL-terminated and
1222 the caller relies on the subsequent member to contain
1223 the NUL. */
1224 arg = TREE_OPERAND (arg, 1);
1225 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1226 if (!val || integer_zerop (val))
1227 return false;
1228 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1229 integer_one_node);
1230 /* Avoid using the array size as the minimum. */
1231 minlen = NULL;
1232 }
1233 }
1234
1235 if (!val)
1236 return false;
1237
1238 if (minlen
1239 && (!*minlen
1240 || (type > 0
1241 && TREE_CODE (*minlen) == INTEGER_CST
1242 && TREE_CODE (val) == INTEGER_CST
1243 && tree_int_cst_lt (val, *minlen))))
1244 *minlen = val;
1245
1246 if (*maxlen)
1247 {
1248 if (type > 0)
1249 {
1250 if (TREE_CODE (*maxlen) != INTEGER_CST
1251 || TREE_CODE (val) != INTEGER_CST)
1252 return false;
1253
1254 if (tree_int_cst_lt (*maxlen, val))
1255 *maxlen = val;
1256 return true;
1257 }
1258 else if (simple_cst_equal (val, *maxlen) != 1)
1259 return false;
1260 }
1261
1262 *maxlen = val;
1263 return true;
1264 }
1265
1266 /* If ARG is registered for SSA update we cannot look at its defining
1267 statement. */
1268 if (name_registered_for_update_p (arg))
1269 return false;
1270
1271 /* If we were already here, break the infinite cycle. */
1272 if (!*visited)
1273 *visited = BITMAP_ALLOC (NULL);
1274 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1275 return true;
1276
1277 var = arg;
1278 def_stmt = SSA_NAME_DEF_STMT (var);
1279
1280 switch (gimple_code (def_stmt))
1281 {
1282 case GIMPLE_ASSIGN:
1283 /* The RHS of the statement defining VAR must either have a
1284 constant length or come from another SSA_NAME with a constant
1285 length. */
1286 if (gimple_assign_single_p (def_stmt)
1287 || gimple_assign_unary_nop_p (def_stmt))
1288 {
1289 tree rhs = gimple_assign_rhs1 (def_stmt);
1290 return get_range_strlen (rhs, length, visited, type, fuzzy);
1291 }
1292 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1293 {
1294 tree op2 = gimple_assign_rhs2 (def_stmt);
1295 tree op3 = gimple_assign_rhs3 (def_stmt);
1296 return get_range_strlen (op2, length, visited, type, fuzzy)
1297 && get_range_strlen (op3, length, visited, type, fuzzy);
1298 }
1299 return false;
1300
1301 case GIMPLE_PHI:
1302 {
1303 /* All the arguments of the PHI node must have the same constant
1304 length. */
1305 unsigned i;
1306
1307 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1308 {
1309 tree arg = gimple_phi_arg (def_stmt, i)->def;
1310
1311 /* If this PHI has itself as an argument, we cannot
1312 determine the string length of this argument. However,
1313 if we can find a constant string length for the other
1314 PHI args then we can still be sure that this is a
1315 constant string length. So be optimistic and just
1316 continue with the next argument. */
1317 if (arg == gimple_phi_result (def_stmt))
1318 continue;
1319
1320 if (!get_range_strlen (arg, length, visited, type, fuzzy))
1321 {
1322 if (fuzzy)
1323 *maxlen = build_all_ones_cst (size_type_node);
1324 else
1325 return false;
1326 }
1327 }
1328 }
1329 return true;
1330
1331 default:
1332 return false;
1333 }
1334 }
1335
1336 /* Determine the minimum and maximum value or string length that ARG
1337 refers to and store each in the first two elements of MINMAXLEN.
1338 For expressions that point to strings of unknown lengths that are
1339 character arrays, use the upper bound of the array as the maximum
1340 length. For example, given an expression like 'x ? array : "xyz"'
1341 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1342 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1343 stored in array.
1344 */
1345
1346 void get_range_strlen (tree arg, tree minmaxlen[2])
1347 {
1348 bitmap visited = NULL;
1349
1350 minmaxlen[0] = NULL_TREE;
1351 minmaxlen[1] = NULL_TREE;
1352
1353 get_range_strlen (arg, minmaxlen, &visited, 1, true);
1354
1355 if (visited)
1356 BITMAP_FREE (visited);
1357 }
1358
1359 tree
1360 get_maxval_strlen (tree arg, int type)
1361 {
1362 bitmap visited = NULL;
1363 tree len[2] = { NULL_TREE, NULL_TREE };
1364 if (!get_range_strlen (arg, len, &visited, type, false))
1365 len[1] = NULL_TREE;
1366 if (visited)
1367 BITMAP_FREE (visited);
1368
1369 return len[1];
1370 }
1371
1372
1373 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1374 If LEN is not NULL, it represents the length of the string to be
1375 copied. Return NULL_TREE if no simplification can be made. */
1376
1377 static bool
1378 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1379 tree dest, tree src)
1380 {
1381 location_t loc = gimple_location (gsi_stmt (*gsi));
1382 tree fn;
1383
1384 /* If SRC and DEST are the same (and not volatile), return DEST. */
1385 if (operand_equal_p (src, dest, 0))
1386 {
1387 replace_call_with_value (gsi, dest);
1388 return true;
1389 }
1390
1391 if (optimize_function_for_size_p (cfun))
1392 return false;
1393
1394 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1395 if (!fn)
1396 return false;
1397
1398 tree len = get_maxval_strlen (src, 0);
1399 if (!len)
1400 return false;
1401
1402 len = fold_convert_loc (loc, size_type_node, len);
1403 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1404 len = force_gimple_operand_gsi (gsi, len, true,
1405 NULL_TREE, true, GSI_SAME_STMT);
1406 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1407 replace_call_with_call_and_fold (gsi, repl);
1408 return true;
1409 }
1410
1411 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1412 If SLEN is not NULL, it represents the length of the source string.
1413 Return NULL_TREE if no simplification can be made. */
1414
1415 static bool
1416 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1417 tree dest, tree src, tree len)
1418 {
1419 location_t loc = gimple_location (gsi_stmt (*gsi));
1420 tree fn;
1421
1422 /* If the LEN parameter is zero, return DEST. */
1423 if (integer_zerop (len))
1424 {
1425 replace_call_with_value (gsi, dest);
1426 return true;
1427 }
1428
1429 /* We can't compare slen with len as constants below if len is not a
1430 constant. */
1431 if (TREE_CODE (len) != INTEGER_CST)
1432 return false;
1433
1434 /* Now, we must be passed a constant src ptr parameter. */
1435 tree slen = get_maxval_strlen (src, 0);
1436 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1437 return false;
1438
1439 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1440
1441 /* We do not support simplification of this case, though we do
1442 support it when expanding trees into RTL. */
1443 /* FIXME: generate a call to __builtin_memset. */
1444 if (tree_int_cst_lt (slen, len))
1445 return false;
1446
1447 /* OK transform into builtin memcpy. */
1448 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1449 if (!fn)
1450 return false;
1451
1452 len = fold_convert_loc (loc, size_type_node, len);
1453 len = force_gimple_operand_gsi (gsi, len, true,
1454 NULL_TREE, true, GSI_SAME_STMT);
1455 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1456 replace_call_with_call_and_fold (gsi, repl);
1457 return true;
1458 }
1459
1460 /* Simplify strchr (str, 0) into str + strlen (str).
1461 In general strlen is significantly faster than strchr
1462 due to being a simpler operation. */
1463 static bool
1464 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi)
1465 {
1466 gimple *stmt = gsi_stmt (*gsi);
1467 tree str = gimple_call_arg (stmt, 0);
1468 tree c = gimple_call_arg (stmt, 1);
1469 location_t loc = gimple_location (stmt);
1470
1471 if (optimize_function_for_size_p (cfun))
1472 return false;
1473
1474 if (!integer_zerop (c) || !gimple_call_lhs (stmt))
1475 return false;
1476
1477 tree len;
1478 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1479
1480 if (!strlen_fn)
1481 return false;
1482
1483 /* Create newstr = strlen (str). */
1484 gimple_seq stmts = NULL;
1485 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1486 gimple_set_location (new_stmt, loc);
1487 if (gimple_in_ssa_p (cfun))
1488 len = make_ssa_name (size_type_node);
1489 else
1490 len = create_tmp_reg (size_type_node);
1491 gimple_call_set_lhs (new_stmt, len);
1492 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1493
1494 /* Create (str p+ strlen (str)). */
1495 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1496 POINTER_PLUS_EXPR, str, len);
1497 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1498 gsi_replace_with_seq_vops (gsi, stmts);
1499 /* gsi now points at the assignment to the lhs, get a
1500 stmt iterator to the strlen.
1501 ??? We can't use gsi_for_stmt as that doesn't work when the
1502 CFG isn't built yet. */
1503 gimple_stmt_iterator gsi2 = *gsi;
1504 gsi_prev (&gsi2);
1505 fold_stmt (&gsi2);
1506 return true;
1507 }
1508
1509 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1510 to the call.
1511
1512 Return NULL_TREE if no simplification was possible, otherwise return the
1513 simplified form of the call as a tree.
1514
1515 The simplified form may be a constant or other expression which
1516 computes the same value, but in a more efficient manner (including
1517 calls to other builtin functions).
1518
1519 The call may contain arguments which need to be evaluated, but
1520 which are not useful to determine the result of the call. In
1521 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1522 COMPOUND_EXPR will be an argument which must be evaluated.
1523 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1524 COMPOUND_EXPR in the chain will contain the tree for the simplified
1525 form of the builtin function call. */
1526
1527 static bool
1528 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1529 {
1530 gimple *stmt = gsi_stmt (*gsi);
1531 location_t loc = gimple_location (stmt);
1532
1533 const char *p = c_getstr (src);
1534
1535 /* If the string length is zero, return the dst parameter. */
1536 if (p && *p == '\0')
1537 {
1538 replace_call_with_value (gsi, dst);
1539 return true;
1540 }
1541
1542 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1543 return false;
1544
1545 /* See if we can store by pieces into (dst + strlen(dst)). */
1546 tree newdst;
1547 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1548 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1549
1550 if (!strlen_fn || !memcpy_fn)
1551 return false;
1552
1553 /* If the length of the source string isn't computable don't
1554 split strcat into strlen and memcpy. */
1555 tree len = get_maxval_strlen (src, 0);
1556 if (! len)
1557 return false;
1558
1559 /* Create strlen (dst). */
1560 gimple_seq stmts = NULL, stmts2;
1561 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1562 gimple_set_location (repl, loc);
1563 if (gimple_in_ssa_p (cfun))
1564 newdst = make_ssa_name (size_type_node);
1565 else
1566 newdst = create_tmp_reg (size_type_node);
1567 gimple_call_set_lhs (repl, newdst);
1568 gimple_seq_add_stmt_without_update (&stmts, repl);
1569
1570 /* Create (dst p+ strlen (dst)). */
1571 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1572 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1573 gimple_seq_add_seq_without_update (&stmts, stmts2);
1574
1575 len = fold_convert_loc (loc, size_type_node, len);
1576 len = size_binop_loc (loc, PLUS_EXPR, len,
1577 build_int_cst (size_type_node, 1));
1578 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1579 gimple_seq_add_seq_without_update (&stmts, stmts2);
1580
1581 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1582 gimple_seq_add_stmt_without_update (&stmts, repl);
1583 if (gimple_call_lhs (stmt))
1584 {
1585 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1586 gimple_seq_add_stmt_without_update (&stmts, repl);
1587 gsi_replace_with_seq_vops (gsi, stmts);
1588 /* gsi now points at the assignment to the lhs, get a
1589 stmt iterator to the memcpy call.
1590 ??? We can't use gsi_for_stmt as that doesn't work when the
1591 CFG isn't built yet. */
1592 gimple_stmt_iterator gsi2 = *gsi;
1593 gsi_prev (&gsi2);
1594 fold_stmt (&gsi2);
1595 }
1596 else
1597 {
1598 gsi_replace_with_seq_vops (gsi, stmts);
1599 fold_stmt (gsi);
1600 }
1601 return true;
1602 }
1603
1604 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1605 are the arguments to the call. */
1606
1607 static bool
1608 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1609 {
1610 gimple *stmt = gsi_stmt (*gsi);
1611 tree dest = gimple_call_arg (stmt, 0);
1612 tree src = gimple_call_arg (stmt, 1);
1613 tree size = gimple_call_arg (stmt, 2);
1614 tree fn;
1615 const char *p;
1616
1617
1618 p = c_getstr (src);
1619 /* If the SRC parameter is "", return DEST. */
1620 if (p && *p == '\0')
1621 {
1622 replace_call_with_value (gsi, dest);
1623 return true;
1624 }
1625
1626 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1627 return false;
1628
1629 /* If __builtin_strcat_chk is used, assume strcat is available. */
1630 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1631 if (!fn)
1632 return false;
1633
1634 gimple *repl = gimple_build_call (fn, 2, dest, src);
1635 replace_call_with_call_and_fold (gsi, repl);
1636 return true;
1637 }
1638
1639 /* Simplify a call to the strncat builtin. */
1640
1641 static bool
1642 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1643 {
1644 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1645 tree dst = gimple_call_arg (stmt, 0);
1646 tree src = gimple_call_arg (stmt, 1);
1647 tree len = gimple_call_arg (stmt, 2);
1648
1649 const char *p = c_getstr (src);
1650
1651 /* If the requested length is zero, or the src parameter string
1652 length is zero, return the dst parameter. */
1653 if (integer_zerop (len) || (p && *p == '\0'))
1654 {
1655 replace_call_with_value (gsi, dst);
1656 return true;
1657 }
1658
1659 /* If the requested len is greater than or equal to the string
1660 length, call strcat. */
1661 if (TREE_CODE (len) == INTEGER_CST && p
1662 && compare_tree_int (len, strlen (p)) >= 0)
1663 {
1664 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1665
1666 /* If the replacement _DECL isn't initialized, don't do the
1667 transformation. */
1668 if (!fn)
1669 return false;
1670
1671 gcall *repl = gimple_build_call (fn, 2, dst, src);
1672 replace_call_with_call_and_fold (gsi, repl);
1673 return true;
1674 }
1675
1676 return false;
1677 }
1678
1679 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1680 LEN, and SIZE. */
1681
1682 static bool
1683 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1684 {
1685 gimple *stmt = gsi_stmt (*gsi);
1686 tree dest = gimple_call_arg (stmt, 0);
1687 tree src = gimple_call_arg (stmt, 1);
1688 tree len = gimple_call_arg (stmt, 2);
1689 tree size = gimple_call_arg (stmt, 3);
1690 tree fn;
1691 const char *p;
1692
1693 p = c_getstr (src);
1694 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1695 if ((p && *p == '\0')
1696 || integer_zerop (len))
1697 {
1698 replace_call_with_value (gsi, dest);
1699 return true;
1700 }
1701
1702 if (! tree_fits_uhwi_p (size))
1703 return false;
1704
1705 if (! integer_all_onesp (size))
1706 {
1707 tree src_len = c_strlen (src, 1);
1708 if (src_len
1709 && tree_fits_uhwi_p (src_len)
1710 && tree_fits_uhwi_p (len)
1711 && ! tree_int_cst_lt (len, src_len))
1712 {
1713 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1714 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1715 if (!fn)
1716 return false;
1717
1718 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1719 replace_call_with_call_and_fold (gsi, repl);
1720 return true;
1721 }
1722 return false;
1723 }
1724
1725 /* If __builtin_strncat_chk is used, assume strncat is available. */
1726 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1727 if (!fn)
1728 return false;
1729
1730 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1731 replace_call_with_call_and_fold (gsi, repl);
1732 return true;
1733 }
1734
1735 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1736 to the call. IGNORE is true if the value returned
1737 by the builtin will be ignored. UNLOCKED is true is true if this
1738 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1739 the known length of the string. Return NULL_TREE if no simplification
1740 was possible. */
1741
1742 static bool
1743 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1744 tree arg0, tree arg1,
1745 bool unlocked)
1746 {
1747 gimple *stmt = gsi_stmt (*gsi);
1748
1749 /* If we're using an unlocked function, assume the other unlocked
1750 functions exist explicitly. */
1751 tree const fn_fputc = (unlocked
1752 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1753 : builtin_decl_implicit (BUILT_IN_FPUTC));
1754 tree const fn_fwrite = (unlocked
1755 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1756 : builtin_decl_implicit (BUILT_IN_FWRITE));
1757
1758 /* If the return value is used, don't do the transformation. */
1759 if (gimple_call_lhs (stmt))
1760 return false;
1761
1762 /* Get the length of the string passed to fputs. If the length
1763 can't be determined, punt. */
1764 tree len = get_maxval_strlen (arg0, 0);
1765 if (!len
1766 || TREE_CODE (len) != INTEGER_CST)
1767 return false;
1768
1769 switch (compare_tree_int (len, 1))
1770 {
1771 case -1: /* length is 0, delete the call entirely . */
1772 replace_call_with_value (gsi, integer_zero_node);
1773 return true;
1774
1775 case 0: /* length is 1, call fputc. */
1776 {
1777 const char *p = c_getstr (arg0);
1778 if (p != NULL)
1779 {
1780 if (!fn_fputc)
1781 return false;
1782
1783 gimple *repl = gimple_build_call (fn_fputc, 2,
1784 build_int_cst
1785 (integer_type_node, p[0]), arg1);
1786 replace_call_with_call_and_fold (gsi, repl);
1787 return true;
1788 }
1789 }
1790 /* FALLTHROUGH */
1791 case 1: /* length is greater than 1, call fwrite. */
1792 {
1793 /* If optimizing for size keep fputs. */
1794 if (optimize_function_for_size_p (cfun))
1795 return false;
1796 /* New argument list transforming fputs(string, stream) to
1797 fwrite(string, 1, len, stream). */
1798 if (!fn_fwrite)
1799 return false;
1800
1801 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1802 size_one_node, len, arg1);
1803 replace_call_with_call_and_fold (gsi, repl);
1804 return true;
1805 }
1806 default:
1807 gcc_unreachable ();
1808 }
1809 return false;
1810 }
1811
1812 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1813 DEST, SRC, LEN, and SIZE are the arguments to the call.
1814 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length
1816 passed as third argument. */
1817
1818 static bool
1819 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1820 tree dest, tree src, tree len, tree size,
1821 enum built_in_function fcode)
1822 {
1823 gimple *stmt = gsi_stmt (*gsi);
1824 location_t loc = gimple_location (stmt);
1825 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1826 tree fn;
1827
1828 /* If SRC and DEST are the same (and not volatile), return DEST
1829 (resp. DEST+LEN for __mempcpy_chk). */
1830 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1831 {
1832 if (fcode != BUILT_IN_MEMPCPY_CHK)
1833 {
1834 replace_call_with_value (gsi, dest);
1835 return true;
1836 }
1837 else
1838 {
1839 gimple_seq stmts = NULL;
1840 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1841 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1842 TREE_TYPE (dest), dest, len);
1843 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1844 replace_call_with_value (gsi, temp);
1845 return true;
1846 }
1847 }
1848
1849 if (! tree_fits_uhwi_p (size))
1850 return false;
1851
1852 tree maxlen = get_maxval_strlen (len, 2);
1853 if (! integer_all_onesp (size))
1854 {
1855 if (! tree_fits_uhwi_p (len))
1856 {
1857 /* If LEN is not constant, try MAXLEN too.
1858 For MAXLEN only allow optimizing into non-_ocs function
1859 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1860 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1861 {
1862 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1863 {
1864 /* (void) __mempcpy_chk () can be optimized into
1865 (void) __memcpy_chk (). */
1866 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1867 if (!fn)
1868 return false;
1869
1870 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1871 replace_call_with_call_and_fold (gsi, repl);
1872 return true;
1873 }
1874 return false;
1875 }
1876 }
1877 else
1878 maxlen = len;
1879
1880 if (tree_int_cst_lt (size, maxlen))
1881 return false;
1882 }
1883
1884 fn = NULL_TREE;
1885 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1886 mem{cpy,pcpy,move,set} is available. */
1887 switch (fcode)
1888 {
1889 case BUILT_IN_MEMCPY_CHK:
1890 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1891 break;
1892 case BUILT_IN_MEMPCPY_CHK:
1893 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1894 break;
1895 case BUILT_IN_MEMMOVE_CHK:
1896 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1897 break;
1898 case BUILT_IN_MEMSET_CHK:
1899 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1900 break;
1901 default:
1902 break;
1903 }
1904
1905 if (!fn)
1906 return false;
1907
1908 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1909 replace_call_with_call_and_fold (gsi, repl);
1910 return true;
1911 }
1912
1913 /* Fold a call to the __st[rp]cpy_chk builtin.
1914 DEST, SRC, and SIZE are the arguments to the call.
1915 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1916 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1917 strings passed as second argument. */
1918
1919 static bool
1920 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1921 tree dest,
1922 tree src, tree size,
1923 enum built_in_function fcode)
1924 {
1925 gimple *stmt = gsi_stmt (*gsi);
1926 location_t loc = gimple_location (stmt);
1927 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1928 tree len, fn;
1929
1930 /* If SRC and DEST are the same (and not volatile), return DEST. */
1931 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1932 {
1933 replace_call_with_value (gsi, dest);
1934 return true;
1935 }
1936
1937 if (! tree_fits_uhwi_p (size))
1938 return false;
1939
1940 tree maxlen = get_maxval_strlen (src, 1);
1941 if (! integer_all_onesp (size))
1942 {
1943 len = c_strlen (src, 1);
1944 if (! len || ! tree_fits_uhwi_p (len))
1945 {
1946 /* If LEN is not constant, try MAXLEN too.
1947 For MAXLEN only allow optimizing into non-_ocs function
1948 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1949 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1950 {
1951 if (fcode == BUILT_IN_STPCPY_CHK)
1952 {
1953 if (! ignore)
1954 return false;
1955
1956 /* If return value of __stpcpy_chk is ignored,
1957 optimize into __strcpy_chk. */
1958 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1959 if (!fn)
1960 return false;
1961
1962 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1963 replace_call_with_call_and_fold (gsi, repl);
1964 return true;
1965 }
1966
1967 if (! len || TREE_SIDE_EFFECTS (len))
1968 return false;
1969
1970 /* If c_strlen returned something, but not a constant,
1971 transform __strcpy_chk into __memcpy_chk. */
1972 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1973 if (!fn)
1974 return false;
1975
1976 gimple_seq stmts = NULL;
1977 len = gimple_convert (&stmts, loc, size_type_node, len);
1978 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1979 build_int_cst (size_type_node, 1));
1980 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1981 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1982 replace_call_with_call_and_fold (gsi, repl);
1983 return true;
1984 }
1985 }
1986 else
1987 maxlen = len;
1988
1989 if (! tree_int_cst_lt (maxlen, size))
1990 return false;
1991 }
1992
1993 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1994 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1995 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1996 if (!fn)
1997 return false;
1998
1999 gimple *repl = gimple_build_call (fn, 2, dest, src);
2000 replace_call_with_call_and_fold (gsi, repl);
2001 return true;
2002 }
2003
2004 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2005 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2006 length passed as third argument. IGNORE is true if return value can be
2007 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2008
2009 static bool
2010 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2011 tree dest, tree src,
2012 tree len, tree size,
2013 enum built_in_function fcode)
2014 {
2015 gimple *stmt = gsi_stmt (*gsi);
2016 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2017 tree fn;
2018
2019 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2020 {
2021 /* If return value of __stpncpy_chk is ignored,
2022 optimize into __strncpy_chk. */
2023 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2024 if (fn)
2025 {
2026 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2027 replace_call_with_call_and_fold (gsi, repl);
2028 return true;
2029 }
2030 }
2031
2032 if (! tree_fits_uhwi_p (size))
2033 return false;
2034
2035 tree maxlen = get_maxval_strlen (len, 2);
2036 if (! integer_all_onesp (size))
2037 {
2038 if (! tree_fits_uhwi_p (len))
2039 {
2040 /* If LEN is not constant, try MAXLEN too.
2041 For MAXLEN only allow optimizing into non-_ocs function
2042 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2043 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2044 return false;
2045 }
2046 else
2047 maxlen = len;
2048
2049 if (tree_int_cst_lt (size, maxlen))
2050 return false;
2051 }
2052
2053 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2054 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2055 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2056 if (!fn)
2057 return false;
2058
2059 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2060 replace_call_with_call_and_fold (gsi, repl);
2061 return true;
2062 }
2063
2064 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2065 Return NULL_TREE if no simplification can be made. */
2066
2067 static bool
2068 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2069 {
2070 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2071 location_t loc = gimple_location (stmt);
2072 tree dest = gimple_call_arg (stmt, 0);
2073 tree src = gimple_call_arg (stmt, 1);
2074 tree fn, len, lenp1;
2075
2076 /* If the result is unused, replace stpcpy with strcpy. */
2077 if (gimple_call_lhs (stmt) == NULL_TREE)
2078 {
2079 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2080 if (!fn)
2081 return false;
2082 gimple_call_set_fndecl (stmt, fn);
2083 fold_stmt (gsi);
2084 return true;
2085 }
2086
2087 len = c_strlen (src, 1);
2088 if (!len
2089 || TREE_CODE (len) != INTEGER_CST)
2090 return false;
2091
2092 if (optimize_function_for_size_p (cfun)
2093 /* If length is zero it's small enough. */
2094 && !integer_zerop (len))
2095 return false;
2096
2097 /* If the source has a known length replace stpcpy with memcpy. */
2098 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2099 if (!fn)
2100 return false;
2101
2102 gimple_seq stmts = NULL;
2103 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2104 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2105 tem, build_int_cst (size_type_node, 1));
2106 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2107 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2108 gimple_set_vuse (repl, gimple_vuse (stmt));
2109 gimple_set_vdef (repl, gimple_vdef (stmt));
2110 if (gimple_vdef (repl)
2111 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2112 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2113 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2114 /* Replace the result with dest + len. */
2115 stmts = NULL;
2116 tem = gimple_convert (&stmts, loc, sizetype, len);
2117 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2118 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2119 POINTER_PLUS_EXPR, dest, tem);
2120 gsi_replace (gsi, ret, false);
2121 /* Finally fold the memcpy call. */
2122 gimple_stmt_iterator gsi2 = *gsi;
2123 gsi_prev (&gsi2);
2124 fold_stmt (&gsi2);
2125 return true;
2126 }
2127
2128 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2129 NULL_TREE if a normal call should be emitted rather than expanding
2130 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2131 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2132 passed as second argument. */
2133
2134 static bool
2135 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2136 enum built_in_function fcode)
2137 {
2138 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2139 tree dest, size, len, fn, fmt, flag;
2140 const char *fmt_str;
2141
2142 /* Verify the required arguments in the original call. */
2143 if (gimple_call_num_args (stmt) < 5)
2144 return false;
2145
2146 dest = gimple_call_arg (stmt, 0);
2147 len = gimple_call_arg (stmt, 1);
2148 flag = gimple_call_arg (stmt, 2);
2149 size = gimple_call_arg (stmt, 3);
2150 fmt = gimple_call_arg (stmt, 4);
2151
2152 if (! tree_fits_uhwi_p (size))
2153 return false;
2154
2155 if (! integer_all_onesp (size))
2156 {
2157 tree maxlen = get_maxval_strlen (len, 2);
2158 if (! tree_fits_uhwi_p (len))
2159 {
2160 /* If LEN is not constant, try MAXLEN too.
2161 For MAXLEN only allow optimizing into non-_ocs function
2162 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2163 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2164 return false;
2165 }
2166 else
2167 maxlen = len;
2168
2169 if (tree_int_cst_lt (size, maxlen))
2170 return false;
2171 }
2172
2173 if (!init_target_chars ())
2174 return false;
2175
2176 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2177 or if format doesn't contain % chars or is "%s". */
2178 if (! integer_zerop (flag))
2179 {
2180 fmt_str = c_getstr (fmt);
2181 if (fmt_str == NULL)
2182 return false;
2183 if (strchr (fmt_str, target_percent) != NULL
2184 && strcmp (fmt_str, target_percent_s))
2185 return false;
2186 }
2187
2188 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2189 available. */
2190 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2191 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2192 if (!fn)
2193 return false;
2194
2195 /* Replace the called function and the first 5 argument by 3 retaining
2196 trailing varargs. */
2197 gimple_call_set_fndecl (stmt, fn);
2198 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2199 gimple_call_set_arg (stmt, 0, dest);
2200 gimple_call_set_arg (stmt, 1, len);
2201 gimple_call_set_arg (stmt, 2, fmt);
2202 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2203 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2204 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2205 fold_stmt (gsi);
2206 return true;
2207 }
2208
2209 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2210 Return NULL_TREE if a normal call should be emitted rather than
2211 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2212 or BUILT_IN_VSPRINTF_CHK. */
2213
2214 static bool
2215 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2216 enum built_in_function fcode)
2217 {
2218 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2219 tree dest, size, len, fn, fmt, flag;
2220 const char *fmt_str;
2221 unsigned nargs = gimple_call_num_args (stmt);
2222
2223 /* Verify the required arguments in the original call. */
2224 if (nargs < 4)
2225 return false;
2226 dest = gimple_call_arg (stmt, 0);
2227 flag = gimple_call_arg (stmt, 1);
2228 size = gimple_call_arg (stmt, 2);
2229 fmt = gimple_call_arg (stmt, 3);
2230
2231 if (! tree_fits_uhwi_p (size))
2232 return false;
2233
2234 len = NULL_TREE;
2235
2236 if (!init_target_chars ())
2237 return false;
2238
2239 /* Check whether the format is a literal string constant. */
2240 fmt_str = c_getstr (fmt);
2241 if (fmt_str != NULL)
2242 {
2243 /* If the format doesn't contain % args or %%, we know the size. */
2244 if (strchr (fmt_str, target_percent) == 0)
2245 {
2246 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2247 len = build_int_cstu (size_type_node, strlen (fmt_str));
2248 }
2249 /* If the format is "%s" and first ... argument is a string literal,
2250 we know the size too. */
2251 else if (fcode == BUILT_IN_SPRINTF_CHK
2252 && strcmp (fmt_str, target_percent_s) == 0)
2253 {
2254 tree arg;
2255
2256 if (nargs == 5)
2257 {
2258 arg = gimple_call_arg (stmt, 4);
2259 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2260 {
2261 len = c_strlen (arg, 1);
2262 if (! len || ! tree_fits_uhwi_p (len))
2263 len = NULL_TREE;
2264 }
2265 }
2266 }
2267 }
2268
2269 if (! integer_all_onesp (size))
2270 {
2271 if (! len || ! tree_int_cst_lt (len, size))
2272 return false;
2273 }
2274
2275 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2276 or if format doesn't contain % chars or is "%s". */
2277 if (! integer_zerop (flag))
2278 {
2279 if (fmt_str == NULL)
2280 return false;
2281 if (strchr (fmt_str, target_percent) != NULL
2282 && strcmp (fmt_str, target_percent_s))
2283 return false;
2284 }
2285
2286 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2287 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2288 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2289 if (!fn)
2290 return false;
2291
2292 /* Replace the called function and the first 4 argument by 2 retaining
2293 trailing varargs. */
2294 gimple_call_set_fndecl (stmt, fn);
2295 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2296 gimple_call_set_arg (stmt, 0, dest);
2297 gimple_call_set_arg (stmt, 1, fmt);
2298 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2299 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2300 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2301 fold_stmt (gsi);
2302 return true;
2303 }
2304
2305 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2306 ORIG may be null if this is a 2-argument call. We don't attempt to
2307 simplify calls with more than 3 arguments.
2308
2309 Return NULL_TREE if no simplification was possible, otherwise return the
2310 simplified form of the call as a tree. If IGNORED is true, it means that
2311 the caller does not use the returned value of the function. */
2312
2313 static bool
2314 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2315 {
2316 gimple *stmt = gsi_stmt (*gsi);
2317 tree dest = gimple_call_arg (stmt, 0);
2318 tree fmt = gimple_call_arg (stmt, 1);
2319 tree orig = NULL_TREE;
2320 const char *fmt_str = NULL;
2321
2322 /* Verify the required arguments in the original call. We deal with two
2323 types of sprintf() calls: 'sprintf (str, fmt)' and
2324 'sprintf (dest, "%s", orig)'. */
2325 if (gimple_call_num_args (stmt) > 3)
2326 return false;
2327
2328 if (gimple_call_num_args (stmt) == 3)
2329 orig = gimple_call_arg (stmt, 2);
2330
2331 /* Check whether the format is a literal string constant. */
2332 fmt_str = c_getstr (fmt);
2333 if (fmt_str == NULL)
2334 return false;
2335
2336 if (!init_target_chars ())
2337 return false;
2338
2339 /* If the format doesn't contain % args or %%, use strcpy. */
2340 if (strchr (fmt_str, target_percent) == NULL)
2341 {
2342 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2343
2344 if (!fn)
2345 return false;
2346
2347 /* Don't optimize sprintf (buf, "abc", ptr++). */
2348 if (orig)
2349 return false;
2350
2351 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2352 'format' is known to contain no % formats. */
2353 gimple_seq stmts = NULL;
2354 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2355 gimple_seq_add_stmt_without_update (&stmts, repl);
2356 if (gimple_call_lhs (stmt))
2357 {
2358 repl = gimple_build_assign (gimple_call_lhs (stmt),
2359 build_int_cst (integer_type_node,
2360 strlen (fmt_str)));
2361 gimple_seq_add_stmt_without_update (&stmts, repl);
2362 gsi_replace_with_seq_vops (gsi, stmts);
2363 /* gsi now points at the assignment to the lhs, get a
2364 stmt iterator to the memcpy call.
2365 ??? We can't use gsi_for_stmt as that doesn't work when the
2366 CFG isn't built yet. */
2367 gimple_stmt_iterator gsi2 = *gsi;
2368 gsi_prev (&gsi2);
2369 fold_stmt (&gsi2);
2370 }
2371 else
2372 {
2373 gsi_replace_with_seq_vops (gsi, stmts);
2374 fold_stmt (gsi);
2375 }
2376 return true;
2377 }
2378
2379 /* If the format is "%s", use strcpy if the result isn't used. */
2380 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2381 {
2382 tree fn;
2383 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2384
2385 if (!fn)
2386 return false;
2387
2388 /* Don't crash on sprintf (str1, "%s"). */
2389 if (!orig)
2390 return false;
2391
2392 tree orig_len = NULL_TREE;
2393 if (gimple_call_lhs (stmt))
2394 {
2395 orig_len = get_maxval_strlen (orig, 0);
2396 if (!orig_len)
2397 return false;
2398 }
2399
2400 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2401 gimple_seq stmts = NULL;
2402 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2403 gimple_seq_add_stmt_without_update (&stmts, repl);
2404 if (gimple_call_lhs (stmt))
2405 {
2406 if (!useless_type_conversion_p (integer_type_node,
2407 TREE_TYPE (orig_len)))
2408 orig_len = fold_convert (integer_type_node, orig_len);
2409 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2410 gimple_seq_add_stmt_without_update (&stmts, repl);
2411 gsi_replace_with_seq_vops (gsi, stmts);
2412 /* gsi now points at the assignment to the lhs, get a
2413 stmt iterator to the memcpy call.
2414 ??? We can't use gsi_for_stmt as that doesn't work when the
2415 CFG isn't built yet. */
2416 gimple_stmt_iterator gsi2 = *gsi;
2417 gsi_prev (&gsi2);
2418 fold_stmt (&gsi2);
2419 }
2420 else
2421 {
2422 gsi_replace_with_seq_vops (gsi, stmts);
2423 fold_stmt (gsi);
2424 }
2425 return true;
2426 }
2427 return false;
2428 }
2429
2430 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2431 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2432 attempt to simplify calls with more than 4 arguments.
2433
2434 Return NULL_TREE if no simplification was possible, otherwise return the
2435 simplified form of the call as a tree. If IGNORED is true, it means that
2436 the caller does not use the returned value of the function. */
2437
2438 static bool
2439 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2440 {
2441 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2442 tree dest = gimple_call_arg (stmt, 0);
2443 tree destsize = gimple_call_arg (stmt, 1);
2444 tree fmt = gimple_call_arg (stmt, 2);
2445 tree orig = NULL_TREE;
2446 const char *fmt_str = NULL;
2447
2448 if (gimple_call_num_args (stmt) > 4)
2449 return false;
2450
2451 if (gimple_call_num_args (stmt) == 4)
2452 orig = gimple_call_arg (stmt, 3);
2453
2454 if (!tree_fits_uhwi_p (destsize))
2455 return false;
2456 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2457
2458 /* Check whether the format is a literal string constant. */
2459 fmt_str = c_getstr (fmt);
2460 if (fmt_str == NULL)
2461 return false;
2462
2463 if (!init_target_chars ())
2464 return false;
2465
2466 /* If the format doesn't contain % args or %%, use strcpy. */
2467 if (strchr (fmt_str, target_percent) == NULL)
2468 {
2469 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2470 if (!fn)
2471 return false;
2472
2473 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2474 if (orig)
2475 return false;
2476
2477 /* We could expand this as
2478 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2479 or to
2480 memcpy (str, fmt_with_nul_at_cstm1, cst);
2481 but in the former case that might increase code size
2482 and in the latter case grow .rodata section too much.
2483 So punt for now. */
2484 size_t len = strlen (fmt_str);
2485 if (len >= destlen)
2486 return false;
2487
2488 gimple_seq stmts = NULL;
2489 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2490 gimple_seq_add_stmt_without_update (&stmts, repl);
2491 if (gimple_call_lhs (stmt))
2492 {
2493 repl = gimple_build_assign (gimple_call_lhs (stmt),
2494 build_int_cst (integer_type_node, len));
2495 gimple_seq_add_stmt_without_update (&stmts, repl);
2496 gsi_replace_with_seq_vops (gsi, stmts);
2497 /* gsi now points at the assignment to the lhs, get a
2498 stmt iterator to the memcpy call.
2499 ??? We can't use gsi_for_stmt as that doesn't work when the
2500 CFG isn't built yet. */
2501 gimple_stmt_iterator gsi2 = *gsi;
2502 gsi_prev (&gsi2);
2503 fold_stmt (&gsi2);
2504 }
2505 else
2506 {
2507 gsi_replace_with_seq_vops (gsi, stmts);
2508 fold_stmt (gsi);
2509 }
2510 return true;
2511 }
2512
2513 /* If the format is "%s", use strcpy if the result isn't used. */
2514 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2515 {
2516 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2517 if (!fn)
2518 return false;
2519
2520 /* Don't crash on snprintf (str1, cst, "%s"). */
2521 if (!orig)
2522 return false;
2523
2524 tree orig_len = get_maxval_strlen (orig, 0);
2525 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2526 return false;
2527
2528 /* We could expand this as
2529 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2530 or to
2531 memcpy (str1, str2_with_nul_at_cstm1, cst);
2532 but in the former case that might increase code size
2533 and in the latter case grow .rodata section too much.
2534 So punt for now. */
2535 if (compare_tree_int (orig_len, destlen) >= 0)
2536 return false;
2537
2538 /* Convert snprintf (str1, cst, "%s", str2) into
2539 strcpy (str1, str2) if strlen (str2) < cst. */
2540 gimple_seq stmts = NULL;
2541 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2542 gimple_seq_add_stmt_without_update (&stmts, repl);
2543 if (gimple_call_lhs (stmt))
2544 {
2545 if (!useless_type_conversion_p (integer_type_node,
2546 TREE_TYPE (orig_len)))
2547 orig_len = fold_convert (integer_type_node, orig_len);
2548 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2549 gimple_seq_add_stmt_without_update (&stmts, repl);
2550 gsi_replace_with_seq_vops (gsi, stmts);
2551 /* gsi now points at the assignment to the lhs, get a
2552 stmt iterator to the memcpy call.
2553 ??? We can't use gsi_for_stmt as that doesn't work when the
2554 CFG isn't built yet. */
2555 gimple_stmt_iterator gsi2 = *gsi;
2556 gsi_prev (&gsi2);
2557 fold_stmt (&gsi2);
2558 }
2559 else
2560 {
2561 gsi_replace_with_seq_vops (gsi, stmts);
2562 fold_stmt (gsi);
2563 }
2564 return true;
2565 }
2566 return false;
2567 }
2568
2569 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2570 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2571 more than 3 arguments, and ARG may be null in the 2-argument case.
2572
2573 Return NULL_TREE if no simplification was possible, otherwise return the
2574 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2575 code of the function to be simplified. */
2576
2577 static bool
2578 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2579 tree fp, tree fmt, tree arg,
2580 enum built_in_function fcode)
2581 {
2582 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2583 tree fn_fputc, fn_fputs;
2584 const char *fmt_str = NULL;
2585
2586 /* If the return value is used, don't do the transformation. */
2587 if (gimple_call_lhs (stmt) != NULL_TREE)
2588 return false;
2589
2590 /* Check whether the format is a literal string constant. */
2591 fmt_str = c_getstr (fmt);
2592 if (fmt_str == NULL)
2593 return false;
2594
2595 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2596 {
2597 /* If we're using an unlocked function, assume the other
2598 unlocked functions exist explicitly. */
2599 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2600 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2601 }
2602 else
2603 {
2604 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2605 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2606 }
2607
2608 if (!init_target_chars ())
2609 return false;
2610
2611 /* If the format doesn't contain % args or %%, use strcpy. */
2612 if (strchr (fmt_str, target_percent) == NULL)
2613 {
2614 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2615 && arg)
2616 return false;
2617
2618 /* If the format specifier was "", fprintf does nothing. */
2619 if (fmt_str[0] == '\0')
2620 {
2621 replace_call_with_value (gsi, NULL_TREE);
2622 return true;
2623 }
2624
2625 /* When "string" doesn't contain %, replace all cases of
2626 fprintf (fp, string) with fputs (string, fp). The fputs
2627 builtin will take care of special cases like length == 1. */
2628 if (fn_fputs)
2629 {
2630 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2631 replace_call_with_call_and_fold (gsi, repl);
2632 return true;
2633 }
2634 }
2635
2636 /* The other optimizations can be done only on the non-va_list variants. */
2637 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2638 return false;
2639
2640 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2641 else if (strcmp (fmt_str, target_percent_s) == 0)
2642 {
2643 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2644 return false;
2645 if (fn_fputs)
2646 {
2647 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2648 replace_call_with_call_and_fold (gsi, repl);
2649 return true;
2650 }
2651 }
2652
2653 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2654 else if (strcmp (fmt_str, target_percent_c) == 0)
2655 {
2656 if (!arg
2657 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2658 return false;
2659 if (fn_fputc)
2660 {
2661 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2662 replace_call_with_call_and_fold (gsi, repl);
2663 return true;
2664 }
2665 }
2666
2667 return false;
2668 }
2669
2670 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2671 FMT and ARG are the arguments to the call; we don't fold cases with
2672 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2673
2674 Return NULL_TREE if no simplification was possible, otherwise return the
2675 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2676 code of the function to be simplified. */
2677
2678 static bool
2679 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2680 tree arg, enum built_in_function fcode)
2681 {
2682 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2683 tree fn_putchar, fn_puts, newarg;
2684 const char *fmt_str = NULL;
2685
2686 /* If the return value is used, don't do the transformation. */
2687 if (gimple_call_lhs (stmt) != NULL_TREE)
2688 return false;
2689
2690 /* Check whether the format is a literal string constant. */
2691 fmt_str = c_getstr (fmt);
2692 if (fmt_str == NULL)
2693 return false;
2694
2695 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2696 {
2697 /* If we're using an unlocked function, assume the other
2698 unlocked functions exist explicitly. */
2699 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2700 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2701 }
2702 else
2703 {
2704 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2705 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2706 }
2707
2708 if (!init_target_chars ())
2709 return false;
2710
2711 if (strcmp (fmt_str, target_percent_s) == 0
2712 || strchr (fmt_str, target_percent) == NULL)
2713 {
2714 const char *str;
2715
2716 if (strcmp (fmt_str, target_percent_s) == 0)
2717 {
2718 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2719 return false;
2720
2721 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2722 return false;
2723
2724 str = c_getstr (arg);
2725 if (str == NULL)
2726 return false;
2727 }
2728 else
2729 {
2730 /* The format specifier doesn't contain any '%' characters. */
2731 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2732 && arg)
2733 return false;
2734 str = fmt_str;
2735 }
2736
2737 /* If the string was "", printf does nothing. */
2738 if (str[0] == '\0')
2739 {
2740 replace_call_with_value (gsi, NULL_TREE);
2741 return true;
2742 }
2743
2744 /* If the string has length of 1, call putchar. */
2745 if (str[1] == '\0')
2746 {
2747 /* Given printf("c"), (where c is any one character,)
2748 convert "c"[0] to an int and pass that to the replacement
2749 function. */
2750 newarg = build_int_cst (integer_type_node, str[0]);
2751 if (fn_putchar)
2752 {
2753 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2754 replace_call_with_call_and_fold (gsi, repl);
2755 return true;
2756 }
2757 }
2758 else
2759 {
2760 /* If the string was "string\n", call puts("string"). */
2761 size_t len = strlen (str);
2762 if ((unsigned char)str[len - 1] == target_newline
2763 && (size_t) (int) len == len
2764 && (int) len > 0)
2765 {
2766 char *newstr;
2767 tree offset_node, string_cst;
2768
2769 /* Create a NUL-terminated string that's one char shorter
2770 than the original, stripping off the trailing '\n'. */
2771 newarg = build_string_literal (len, str);
2772 string_cst = string_constant (newarg, &offset_node);
2773 gcc_checking_assert (string_cst
2774 && (TREE_STRING_LENGTH (string_cst)
2775 == (int) len)
2776 && integer_zerop (offset_node)
2777 && (unsigned char)
2778 TREE_STRING_POINTER (string_cst)[len - 1]
2779 == target_newline);
2780 /* build_string_literal creates a new STRING_CST,
2781 modify it in place to avoid double copying. */
2782 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2783 newstr[len - 1] = '\0';
2784 if (fn_puts)
2785 {
2786 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2787 replace_call_with_call_and_fold (gsi, repl);
2788 return true;
2789 }
2790 }
2791 else
2792 /* We'd like to arrange to call fputs(string,stdout) here,
2793 but we need stdout and don't have a way to get it yet. */
2794 return false;
2795 }
2796 }
2797
2798 /* The other optimizations can be done only on the non-va_list variants. */
2799 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2800 return false;
2801
2802 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2803 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2804 {
2805 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2806 return false;
2807 if (fn_puts)
2808 {
2809 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2810 replace_call_with_call_and_fold (gsi, repl);
2811 return true;
2812 }
2813 }
2814
2815 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2816 else if (strcmp (fmt_str, target_percent_c) == 0)
2817 {
2818 if (!arg || ! useless_type_conversion_p (integer_type_node,
2819 TREE_TYPE (arg)))
2820 return false;
2821 if (fn_putchar)
2822 {
2823 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2824 replace_call_with_call_and_fold (gsi, repl);
2825 return true;
2826 }
2827 }
2828
2829 return false;
2830 }
2831
2832
2833
2834 /* Fold a call to __builtin_strlen with known length LEN. */
2835
2836 static bool
2837 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2838 {
2839 gimple *stmt = gsi_stmt (*gsi);
2840 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2841 if (!len)
2842 return false;
2843 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2844 replace_call_with_value (gsi, len);
2845 return true;
2846 }
2847
2848 /* Fold a call to __builtin_acc_on_device. */
2849
2850 static bool
2851 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2852 {
2853 /* Defer folding until we know which compiler we're in. */
2854 if (symtab->state != EXPANSION)
2855 return false;
2856
2857 unsigned val_host = GOMP_DEVICE_HOST;
2858 unsigned val_dev = GOMP_DEVICE_NONE;
2859
2860 #ifdef ACCEL_COMPILER
2861 val_host = GOMP_DEVICE_NOT_HOST;
2862 val_dev = ACCEL_COMPILER_acc_device;
2863 #endif
2864
2865 location_t loc = gimple_location (gsi_stmt (*gsi));
2866
2867 tree host_eq = make_ssa_name (boolean_type_node);
2868 gimple *host_ass = gimple_build_assign
2869 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2870 gimple_set_location (host_ass, loc);
2871 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2872
2873 tree dev_eq = make_ssa_name (boolean_type_node);
2874 gimple *dev_ass = gimple_build_assign
2875 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2876 gimple_set_location (dev_ass, loc);
2877 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2878
2879 tree result = make_ssa_name (boolean_type_node);
2880 gimple *result_ass = gimple_build_assign
2881 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2882 gimple_set_location (result_ass, loc);
2883 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2884
2885 replace_call_with_value (gsi, result);
2886
2887 return true;
2888 }
2889
2890 /* Fold the non-target builtin at *GSI and return whether any simplification
2891 was made. */
2892
2893 static bool
2894 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2895 {
2896 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2897 tree callee = gimple_call_fndecl (stmt);
2898
2899 /* Give up for always_inline inline builtins until they are
2900 inlined. */
2901 if (avoid_folding_inline_builtin (callee))
2902 return false;
2903
2904 unsigned n = gimple_call_num_args (stmt);
2905 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2906 switch (fcode)
2907 {
2908 case BUILT_IN_BZERO:
2909 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2910 gimple_call_arg (stmt, 1));
2911 case BUILT_IN_MEMSET:
2912 return gimple_fold_builtin_memset (gsi,
2913 gimple_call_arg (stmt, 1),
2914 gimple_call_arg (stmt, 2));
2915 case BUILT_IN_BCOPY:
2916 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2917 gimple_call_arg (stmt, 0), 3);
2918 case BUILT_IN_MEMCPY:
2919 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2920 gimple_call_arg (stmt, 1), 0);
2921 case BUILT_IN_MEMPCPY:
2922 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2923 gimple_call_arg (stmt, 1), 1);
2924 case BUILT_IN_MEMMOVE:
2925 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2926 gimple_call_arg (stmt, 1), 3);
2927 case BUILT_IN_SPRINTF_CHK:
2928 case BUILT_IN_VSPRINTF_CHK:
2929 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2930 case BUILT_IN_STRCAT_CHK:
2931 return gimple_fold_builtin_strcat_chk (gsi);
2932 case BUILT_IN_STRNCAT_CHK:
2933 return gimple_fold_builtin_strncat_chk (gsi);
2934 case BUILT_IN_STRLEN:
2935 return gimple_fold_builtin_strlen (gsi);
2936 case BUILT_IN_STRCPY:
2937 return gimple_fold_builtin_strcpy (gsi,
2938 gimple_call_arg (stmt, 0),
2939 gimple_call_arg (stmt, 1));
2940 case BUILT_IN_STRNCPY:
2941 return gimple_fold_builtin_strncpy (gsi,
2942 gimple_call_arg (stmt, 0),
2943 gimple_call_arg (stmt, 1),
2944 gimple_call_arg (stmt, 2));
2945 case BUILT_IN_STRCAT:
2946 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2947 gimple_call_arg (stmt, 1));
2948 case BUILT_IN_STRNCAT:
2949 return gimple_fold_builtin_strncat (gsi);
2950 case BUILT_IN_STRCHR:
2951 return gimple_fold_builtin_strchr (gsi);
2952 case BUILT_IN_FPUTS:
2953 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2954 gimple_call_arg (stmt, 1), false);
2955 case BUILT_IN_FPUTS_UNLOCKED:
2956 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2957 gimple_call_arg (stmt, 1), true);
2958 case BUILT_IN_MEMCPY_CHK:
2959 case BUILT_IN_MEMPCPY_CHK:
2960 case BUILT_IN_MEMMOVE_CHK:
2961 case BUILT_IN_MEMSET_CHK:
2962 return gimple_fold_builtin_memory_chk (gsi,
2963 gimple_call_arg (stmt, 0),
2964 gimple_call_arg (stmt, 1),
2965 gimple_call_arg (stmt, 2),
2966 gimple_call_arg (stmt, 3),
2967 fcode);
2968 case BUILT_IN_STPCPY:
2969 return gimple_fold_builtin_stpcpy (gsi);
2970 case BUILT_IN_STRCPY_CHK:
2971 case BUILT_IN_STPCPY_CHK:
2972 return gimple_fold_builtin_stxcpy_chk (gsi,
2973 gimple_call_arg (stmt, 0),
2974 gimple_call_arg (stmt, 1),
2975 gimple_call_arg (stmt, 2),
2976 fcode);
2977 case BUILT_IN_STRNCPY_CHK:
2978 case BUILT_IN_STPNCPY_CHK:
2979 return gimple_fold_builtin_stxncpy_chk (gsi,
2980 gimple_call_arg (stmt, 0),
2981 gimple_call_arg (stmt, 1),
2982 gimple_call_arg (stmt, 2),
2983 gimple_call_arg (stmt, 3),
2984 fcode);
2985 case BUILT_IN_SNPRINTF_CHK:
2986 case BUILT_IN_VSNPRINTF_CHK:
2987 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2988 case BUILT_IN_SNPRINTF:
2989 return gimple_fold_builtin_snprintf (gsi);
2990 case BUILT_IN_SPRINTF:
2991 return gimple_fold_builtin_sprintf (gsi);
2992 case BUILT_IN_FPRINTF:
2993 case BUILT_IN_FPRINTF_UNLOCKED:
2994 case BUILT_IN_VFPRINTF:
2995 if (n == 2 || n == 3)
2996 return gimple_fold_builtin_fprintf (gsi,
2997 gimple_call_arg (stmt, 0),
2998 gimple_call_arg (stmt, 1),
2999 n == 3
3000 ? gimple_call_arg (stmt, 2)
3001 : NULL_TREE,
3002 fcode);
3003 break;
3004 case BUILT_IN_FPRINTF_CHK:
3005 case BUILT_IN_VFPRINTF_CHK:
3006 if (n == 3 || n == 4)
3007 return gimple_fold_builtin_fprintf (gsi,
3008 gimple_call_arg (stmt, 0),
3009 gimple_call_arg (stmt, 2),
3010 n == 4
3011 ? gimple_call_arg (stmt, 3)
3012 : NULL_TREE,
3013 fcode);
3014 break;
3015 case BUILT_IN_PRINTF:
3016 case BUILT_IN_PRINTF_UNLOCKED:
3017 case BUILT_IN_VPRINTF:
3018 if (n == 1 || n == 2)
3019 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3020 n == 2
3021 ? gimple_call_arg (stmt, 1)
3022 : NULL_TREE, fcode);
3023 break;
3024 case BUILT_IN_PRINTF_CHK:
3025 case BUILT_IN_VPRINTF_CHK:
3026 if (n == 2 || n == 3)
3027 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3028 n == 3
3029 ? gimple_call_arg (stmt, 2)
3030 : NULL_TREE, fcode);
3031 break;
3032 case BUILT_IN_ACC_ON_DEVICE:
3033 return gimple_fold_builtin_acc_on_device (gsi,
3034 gimple_call_arg (stmt, 0));
3035 default:;
3036 }
3037
3038 /* Try the generic builtin folder. */
3039 bool ignore = (gimple_call_lhs (stmt) == NULL);
3040 tree result = fold_call_stmt (stmt, ignore);
3041 if (result)
3042 {
3043 if (ignore)
3044 STRIP_NOPS (result);
3045 else
3046 result = fold_convert (gimple_call_return_type (stmt), result);
3047 if (!update_call_from_tree (gsi, result))
3048 gimplify_and_update_call_from_tree (gsi, result);
3049 return true;
3050 }
3051
3052 return false;
3053 }
3054
3055 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3056 function calls to constants, where possible. */
3057
3058 static tree
3059 fold_internal_goacc_dim (const gimple *call)
3060 {
3061 int axis = get_oacc_ifn_dim_arg (call);
3062 int size = get_oacc_fn_dim_size (current_function_decl, axis);
3063 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3064 tree result = NULL_TREE;
3065
3066 /* If the size is 1, or we only want the size and it is not dynamic,
3067 we know the answer. */
3068 if (size == 1 || (!is_pos && size))
3069 {
3070 tree type = TREE_TYPE (gimple_call_lhs (call));
3071 result = build_int_cst (type, size - is_pos);
3072 }
3073
3074 return result;
3075 }
3076
3077 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3078 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3079 &var where var is only addressable because of such calls. */
3080
3081 bool
3082 optimize_atomic_compare_exchange_p (gimple *stmt)
3083 {
3084 if (gimple_call_num_args (stmt) != 6
3085 || !flag_inline_atomics
3086 || !optimize
3087 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3088 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3089 || !gimple_vdef (stmt)
3090 || !gimple_vuse (stmt))
3091 return false;
3092
3093 tree fndecl = gimple_call_fndecl (stmt);
3094 switch (DECL_FUNCTION_CODE (fndecl))
3095 {
3096 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3097 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3098 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3099 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3100 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3101 break;
3102 default:
3103 return false;
3104 }
3105
3106 tree expected = gimple_call_arg (stmt, 1);
3107 if (TREE_CODE (expected) != ADDR_EXPR
3108 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3109 return false;
3110
3111 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3112 if (!is_gimple_reg_type (etype)
3113 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3114 || TREE_THIS_VOLATILE (etype)
3115 || VECTOR_TYPE_P (etype)
3116 || TREE_CODE (etype) == COMPLEX_TYPE
3117 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3118 might not preserve all the bits. See PR71716. */
3119 || SCALAR_FLOAT_TYPE_P (etype)
3120 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3121 return false;
3122
3123 tree weak = gimple_call_arg (stmt, 3);
3124 if (!integer_zerop (weak) && !integer_onep (weak))
3125 return false;
3126
3127 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3128 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3129 machine_mode mode = TYPE_MODE (itype);
3130
3131 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3132 == CODE_FOR_nothing
3133 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3134 return false;
3135
3136 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3137 return false;
3138
3139 return true;
3140 }
3141
3142 /* Fold
3143 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3144 into
3145 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3146 i = IMAGPART_EXPR <t>;
3147 r = (_Bool) i;
3148 e = REALPART_EXPR <t>; */
3149
3150 void
3151 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3152 {
3153 gimple *stmt = gsi_stmt (*gsi);
3154 tree fndecl = gimple_call_fndecl (stmt);
3155 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3156 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3157 tree ctype = build_complex_type (itype);
3158 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3159 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3160 expected);
3161 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3162 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3163 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3164 {
3165 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3166 build1 (VIEW_CONVERT_EXPR, itype,
3167 gimple_assign_lhs (g)));
3168 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3169 }
3170 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3171 + int_size_in_bytes (itype);
3172 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3173 gimple_call_arg (stmt, 0),
3174 gimple_assign_lhs (g),
3175 gimple_call_arg (stmt, 2),
3176 build_int_cst (integer_type_node, flag),
3177 gimple_call_arg (stmt, 4),
3178 gimple_call_arg (stmt, 5));
3179 tree lhs = make_ssa_name (ctype);
3180 gimple_call_set_lhs (g, lhs);
3181 gimple_set_vdef (g, gimple_vdef (stmt));
3182 gimple_set_vuse (g, gimple_vuse (stmt));
3183 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3184 if (gimple_call_lhs (stmt))
3185 {
3186 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3187 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3188 build1 (IMAGPART_EXPR, itype, lhs));
3189 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3190 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3191 gimple_assign_lhs (g));
3192 }
3193 gsi_replace (gsi, g, true);
3194 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3195 build1 (REALPART_EXPR, itype, lhs));
3196 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3197 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3198 {
3199 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3200 VIEW_CONVERT_EXPR,
3201 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3202 gimple_assign_lhs (g)));
3203 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3204 }
3205 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3206 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3207 *gsi = gsiret;
3208 }
3209
3210 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3211 doesn't fit into TYPE. The test for overflow should be regardless of
3212 -fwrapv, and even for unsigned types. */
3213
3214 bool
3215 arith_overflowed_p (enum tree_code code, const_tree type,
3216 const_tree arg0, const_tree arg1)
3217 {
3218 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3219 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3220 widest2_int_cst;
3221 widest2_int warg0 = widest2_int_cst (arg0);
3222 widest2_int warg1 = widest2_int_cst (arg1);
3223 widest2_int wres;
3224 switch (code)
3225 {
3226 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3227 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3228 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3229 default: gcc_unreachable ();
3230 }
3231 signop sign = TYPE_SIGN (type);
3232 if (sign == UNSIGNED && wi::neg_p (wres))
3233 return true;
3234 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3235 }
3236
3237 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3238 The statement may be replaced by another statement, e.g., if the call
3239 simplifies to a constant value. Return true if any changes were made.
3240 It is assumed that the operands have been previously folded. */
3241
3242 static bool
3243 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3244 {
3245 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3246 tree callee;
3247 bool changed = false;
3248 unsigned i;
3249
3250 /* Fold *& in call arguments. */
3251 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3252 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3253 {
3254 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3255 if (tmp)
3256 {
3257 gimple_call_set_arg (stmt, i, tmp);
3258 changed = true;
3259 }
3260 }
3261
3262 /* Check for virtual calls that became direct calls. */
3263 callee = gimple_call_fn (stmt);
3264 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3265 {
3266 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3267 {
3268 if (dump_file && virtual_method_call_p (callee)
3269 && !possible_polymorphic_call_target_p
3270 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3271 (OBJ_TYPE_REF_EXPR (callee)))))
3272 {
3273 fprintf (dump_file,
3274 "Type inheritance inconsistent devirtualization of ");
3275 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3276 fprintf (dump_file, " to ");
3277 print_generic_expr (dump_file, callee, TDF_SLIM);
3278 fprintf (dump_file, "\n");
3279 }
3280
3281 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3282 changed = true;
3283 }
3284 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3285 {
3286 bool final;
3287 vec <cgraph_node *>targets
3288 = possible_polymorphic_call_targets (callee, stmt, &final);
3289 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3290 {
3291 tree lhs = gimple_call_lhs (stmt);
3292 if (dump_enabled_p ())
3293 {
3294 location_t loc = gimple_location_safe (stmt);
3295 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3296 "folding virtual function call to %s\n",
3297 targets.length () == 1
3298 ? targets[0]->name ()
3299 : "__builtin_unreachable");
3300 }
3301 if (targets.length () == 1)
3302 {
3303 tree fndecl = targets[0]->decl;
3304 gimple_call_set_fndecl (stmt, fndecl);
3305 changed = true;
3306 /* If changing the call to __cxa_pure_virtual
3307 or similar noreturn function, adjust gimple_call_fntype
3308 too. */
3309 if (gimple_call_noreturn_p (stmt)
3310 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3311 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3312 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3313 == void_type_node))
3314 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3315 /* If the call becomes noreturn, remove the lhs. */
3316 if (lhs
3317 && gimple_call_noreturn_p (stmt)
3318 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3319 || should_remove_lhs_p (lhs)))
3320 {
3321 if (TREE_CODE (lhs) == SSA_NAME)
3322 {
3323 tree var = create_tmp_var (TREE_TYPE (lhs));
3324 tree def = get_or_create_ssa_default_def (cfun, var);
3325 gimple *new_stmt = gimple_build_assign (lhs, def);
3326 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3327 }
3328 gimple_call_set_lhs (stmt, NULL_TREE);
3329 }
3330 maybe_remove_unused_call_args (cfun, stmt);
3331 }
3332 else
3333 {
3334 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3335 gimple *new_stmt = gimple_build_call (fndecl, 0);
3336 gimple_set_location (new_stmt, gimple_location (stmt));
3337 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3338 {
3339 tree var = create_tmp_var (TREE_TYPE (lhs));
3340 tree def = get_or_create_ssa_default_def (cfun, var);
3341
3342 /* To satisfy condition for
3343 cgraph_update_edges_for_call_stmt_node,
3344 we need to preserve GIMPLE_CALL statement
3345 at position of GSI iterator. */
3346 update_call_from_tree (gsi, def);
3347 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3348 }
3349 else
3350 {
3351 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3352 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3353 gsi_replace (gsi, new_stmt, false);
3354 }
3355 return true;
3356 }
3357 }
3358 }
3359 }
3360
3361 /* Check for indirect calls that became direct calls, and then
3362 no longer require a static chain. */
3363 if (gimple_call_chain (stmt))
3364 {
3365 tree fn = gimple_call_fndecl (stmt);
3366 if (fn && !DECL_STATIC_CHAIN (fn))
3367 {
3368 gimple_call_set_chain (stmt, NULL);
3369 changed = true;
3370 }
3371 else
3372 {
3373 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3374 if (tmp)
3375 {
3376 gimple_call_set_chain (stmt, tmp);
3377 changed = true;
3378 }
3379 }
3380 }
3381
3382 if (inplace)
3383 return changed;
3384
3385 /* Check for builtins that CCP can handle using information not
3386 available in the generic fold routines. */
3387 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3388 {
3389 if (gimple_fold_builtin (gsi))
3390 changed = true;
3391 }
3392 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3393 {
3394 changed |= targetm.gimple_fold_builtin (gsi);
3395 }
3396 else if (gimple_call_internal_p (stmt))
3397 {
3398 enum tree_code subcode = ERROR_MARK;
3399 tree result = NULL_TREE;
3400 bool cplx_result = false;
3401 tree overflow = NULL_TREE;
3402 switch (gimple_call_internal_fn (stmt))
3403 {
3404 case IFN_BUILTIN_EXPECT:
3405 result = fold_builtin_expect (gimple_location (stmt),
3406 gimple_call_arg (stmt, 0),
3407 gimple_call_arg (stmt, 1),
3408 gimple_call_arg (stmt, 2));
3409 break;
3410 case IFN_UBSAN_OBJECT_SIZE:
3411 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3412 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3413 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3414 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3415 gimple_call_arg (stmt, 2))))
3416 {
3417 gsi_replace (gsi, gimple_build_nop (), false);
3418 unlink_stmt_vdef (stmt);
3419 release_defs (stmt);
3420 return true;
3421 }
3422 break;
3423 case IFN_GOACC_DIM_SIZE:
3424 case IFN_GOACC_DIM_POS:
3425 result = fold_internal_goacc_dim (stmt);
3426 break;
3427 case IFN_UBSAN_CHECK_ADD:
3428 subcode = PLUS_EXPR;
3429 break;
3430 case IFN_UBSAN_CHECK_SUB:
3431 subcode = MINUS_EXPR;
3432 break;
3433 case IFN_UBSAN_CHECK_MUL:
3434 subcode = MULT_EXPR;
3435 break;
3436 case IFN_ADD_OVERFLOW:
3437 subcode = PLUS_EXPR;
3438 cplx_result = true;
3439 break;
3440 case IFN_SUB_OVERFLOW:
3441 subcode = MINUS_EXPR;
3442 cplx_result = true;
3443 break;
3444 case IFN_MUL_OVERFLOW:
3445 subcode = MULT_EXPR;
3446 cplx_result = true;
3447 break;
3448 default:
3449 break;
3450 }
3451 if (subcode != ERROR_MARK)
3452 {
3453 tree arg0 = gimple_call_arg (stmt, 0);
3454 tree arg1 = gimple_call_arg (stmt, 1);
3455 tree type = TREE_TYPE (arg0);
3456 if (cplx_result)
3457 {
3458 tree lhs = gimple_call_lhs (stmt);
3459 if (lhs == NULL_TREE)
3460 type = NULL_TREE;
3461 else
3462 type = TREE_TYPE (TREE_TYPE (lhs));
3463 }
3464 if (type == NULL_TREE)
3465 ;
3466 /* x = y + 0; x = y - 0; x = y * 0; */
3467 else if (integer_zerop (arg1))
3468 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3469 /* x = 0 + y; x = 0 * y; */
3470 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3471 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3472 /* x = y - y; */
3473 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3474 result = integer_zero_node;
3475 /* x = y * 1; x = 1 * y; */
3476 else if (subcode == MULT_EXPR && integer_onep (arg1))
3477 result = arg0;
3478 else if (subcode == MULT_EXPR && integer_onep (arg0))
3479 result = arg1;
3480 else if (TREE_CODE (arg0) == INTEGER_CST
3481 && TREE_CODE (arg1) == INTEGER_CST)
3482 {
3483 if (cplx_result)
3484 result = int_const_binop (subcode, fold_convert (type, arg0),
3485 fold_convert (type, arg1));
3486 else
3487 result = int_const_binop (subcode, arg0, arg1);
3488 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3489 {
3490 if (cplx_result)
3491 overflow = build_one_cst (type);
3492 else
3493 result = NULL_TREE;
3494 }
3495 }
3496 if (result)
3497 {
3498 if (result == integer_zero_node)
3499 result = build_zero_cst (type);
3500 else if (cplx_result && TREE_TYPE (result) != type)
3501 {
3502 if (TREE_CODE (result) == INTEGER_CST)
3503 {
3504 if (arith_overflowed_p (PLUS_EXPR, type, result,
3505 integer_zero_node))
3506 overflow = build_one_cst (type);
3507 }
3508 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3509 && TYPE_UNSIGNED (type))
3510 || (TYPE_PRECISION (type)
3511 < (TYPE_PRECISION (TREE_TYPE (result))
3512 + (TYPE_UNSIGNED (TREE_TYPE (result))
3513 && !TYPE_UNSIGNED (type)))))
3514 result = NULL_TREE;
3515 if (result)
3516 result = fold_convert (type, result);
3517 }
3518 }
3519 }
3520
3521 if (result)
3522 {
3523 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3524 result = drop_tree_overflow (result);
3525 if (cplx_result)
3526 {
3527 if (overflow == NULL_TREE)
3528 overflow = build_zero_cst (TREE_TYPE (result));
3529 tree ctype = build_complex_type (TREE_TYPE (result));
3530 if (TREE_CODE (result) == INTEGER_CST
3531 && TREE_CODE (overflow) == INTEGER_CST)
3532 result = build_complex (ctype, result, overflow);
3533 else
3534 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3535 ctype, result, overflow);
3536 }
3537 if (!update_call_from_tree (gsi, result))
3538 gimplify_and_update_call_from_tree (gsi, result);
3539 changed = true;
3540 }
3541 }
3542
3543 return changed;
3544 }
3545
3546
3547 /* Return true whether NAME has a use on STMT. */
3548
3549 static bool
3550 has_use_on_stmt (tree name, gimple *stmt)
3551 {
3552 imm_use_iterator iter;
3553 use_operand_p use_p;
3554 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3555 if (USE_STMT (use_p) == stmt)
3556 return true;
3557 return false;
3558 }
3559
3560 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3561 gimple_simplify.
3562
3563 Replaces *GSI with the simplification result in RCODE and OPS
3564 and the associated statements in *SEQ. Does the replacement
3565 according to INPLACE and returns true if the operation succeeded. */
3566
3567 static bool
3568 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3569 code_helper rcode, tree *ops,
3570 gimple_seq *seq, bool inplace)
3571 {
3572 gimple *stmt = gsi_stmt (*gsi);
3573
3574 /* Play safe and do not allow abnormals to be mentioned in
3575 newly created statements. See also maybe_push_res_to_seq.
3576 As an exception allow such uses if there was a use of the
3577 same SSA name on the old stmt. */
3578 if ((TREE_CODE (ops[0]) == SSA_NAME
3579 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3580 && !has_use_on_stmt (ops[0], stmt))
3581 || (ops[1]
3582 && TREE_CODE (ops[1]) == SSA_NAME
3583 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3584 && !has_use_on_stmt (ops[1], stmt))
3585 || (ops[2]
3586 && TREE_CODE (ops[2]) == SSA_NAME
3587 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3588 && !has_use_on_stmt (ops[2], stmt))
3589 || (COMPARISON_CLASS_P (ops[0])
3590 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3591 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3592 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3593 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3594 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3595 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3596 return false;
3597
3598 /* Don't insert new statements when INPLACE is true, even if we could
3599 reuse STMT for the final statement. */
3600 if (inplace && !gimple_seq_empty_p (*seq))
3601 return false;
3602
3603 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3604 {
3605 gcc_assert (rcode.is_tree_code ());
3606 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3607 /* GIMPLE_CONDs condition may not throw. */
3608 && (!flag_exceptions
3609 || !cfun->can_throw_non_call_exceptions
3610 || !operation_could_trap_p (rcode,
3611 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3612 false, NULL_TREE)))
3613 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3614 else if (rcode == SSA_NAME)
3615 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3616 build_zero_cst (TREE_TYPE (ops[0])));
3617 else if (rcode == INTEGER_CST)
3618 {
3619 if (integer_zerop (ops[0]))
3620 gimple_cond_make_false (cond_stmt);
3621 else
3622 gimple_cond_make_true (cond_stmt);
3623 }
3624 else if (!inplace)
3625 {
3626 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3627 ops, seq);
3628 if (!res)
3629 return false;
3630 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3631 build_zero_cst (TREE_TYPE (res)));
3632 }
3633 else
3634 return false;
3635 if (dump_file && (dump_flags & TDF_DETAILS))
3636 {
3637 fprintf (dump_file, "gimple_simplified to ");
3638 if (!gimple_seq_empty_p (*seq))
3639 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3640 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3641 0, TDF_SLIM);
3642 }
3643 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3644 return true;
3645 }
3646 else if (is_gimple_assign (stmt)
3647 && rcode.is_tree_code ())
3648 {
3649 if (!inplace
3650 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3651 {
3652 maybe_build_generic_op (rcode,
3653 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3654 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3655 if (dump_file && (dump_flags & TDF_DETAILS))
3656 {
3657 fprintf (dump_file, "gimple_simplified to ");
3658 if (!gimple_seq_empty_p (*seq))
3659 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3660 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3661 0, TDF_SLIM);
3662 }
3663 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3664 return true;
3665 }
3666 }
3667 else if (rcode.is_fn_code ()
3668 && gimple_call_combined_fn (stmt) == rcode)
3669 {
3670 unsigned i;
3671 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3672 {
3673 gcc_assert (ops[i] != NULL_TREE);
3674 gimple_call_set_arg (stmt, i, ops[i]);
3675 }
3676 if (i < 3)
3677 gcc_assert (ops[i] == NULL_TREE);
3678 if (dump_file && (dump_flags & TDF_DETAILS))
3679 {
3680 fprintf (dump_file, "gimple_simplified to ");
3681 if (!gimple_seq_empty_p (*seq))
3682 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3683 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3684 }
3685 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3686 return true;
3687 }
3688 else if (!inplace)
3689 {
3690 if (gimple_has_lhs (stmt))
3691 {
3692 tree lhs = gimple_get_lhs (stmt);
3693 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3694 ops, seq, lhs))
3695 return false;
3696 if (dump_file && (dump_flags & TDF_DETAILS))
3697 {
3698 fprintf (dump_file, "gimple_simplified to ");
3699 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3700 }
3701 gsi_replace_with_seq_vops (gsi, *seq);
3702 return true;
3703 }
3704 else
3705 gcc_unreachable ();
3706 }
3707
3708 return false;
3709 }
3710
3711 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3712
3713 static bool
3714 maybe_canonicalize_mem_ref_addr (tree *t)
3715 {
3716 bool res = false;
3717
3718 if (TREE_CODE (*t) == ADDR_EXPR)
3719 t = &TREE_OPERAND (*t, 0);
3720
3721 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3722 generic vector extension. The actual vector referenced is
3723 view-converted to an array type for this purpose. If the index
3724 is constant the canonical representation in the middle-end is a
3725 BIT_FIELD_REF so re-write the former to the latter here. */
3726 if (TREE_CODE (*t) == ARRAY_REF
3727 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3728 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3729 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3730 {
3731 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3732 if (VECTOR_TYPE_P (vtype))
3733 {
3734 tree low = array_ref_low_bound (*t);
3735 if (TREE_CODE (low) == INTEGER_CST)
3736 {
3737 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3738 {
3739 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3740 wi::to_widest (low));
3741 idx = wi::mul (idx, wi::to_widest
3742 (TYPE_SIZE (TREE_TYPE (*t))));
3743 widest_int ext
3744 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3745 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3746 {
3747 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3748 TREE_TYPE (*t),
3749 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3750 TYPE_SIZE (TREE_TYPE (*t)),
3751 wide_int_to_tree (sizetype, idx));
3752 res = true;
3753 }
3754 }
3755 }
3756 }
3757 }
3758
3759 while (handled_component_p (*t))
3760 t = &TREE_OPERAND (*t, 0);
3761
3762 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3763 of invariant addresses into a SSA name MEM_REF address. */
3764 if (TREE_CODE (*t) == MEM_REF
3765 || TREE_CODE (*t) == TARGET_MEM_REF)
3766 {
3767 tree addr = TREE_OPERAND (*t, 0);
3768 if (TREE_CODE (addr) == ADDR_EXPR
3769 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3770 || handled_component_p (TREE_OPERAND (addr, 0))))
3771 {
3772 tree base;
3773 HOST_WIDE_INT coffset;
3774 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3775 &coffset);
3776 if (!base)
3777 gcc_unreachable ();
3778
3779 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3780 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3781 TREE_OPERAND (*t, 1),
3782 size_int (coffset));
3783 res = true;
3784 }
3785 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3786 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3787 }
3788
3789 /* Canonicalize back MEM_REFs to plain reference trees if the object
3790 accessed is a decl that has the same access semantics as the MEM_REF. */
3791 if (TREE_CODE (*t) == MEM_REF
3792 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3793 && integer_zerop (TREE_OPERAND (*t, 1))
3794 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3795 {
3796 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3797 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3798 if (/* Same volatile qualification. */
3799 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3800 /* Same TBAA behavior with -fstrict-aliasing. */
3801 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3802 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3803 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3804 /* Same alignment. */
3805 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3806 /* We have to look out here to not drop a required conversion
3807 from the rhs to the lhs if *t appears on the lhs or vice-versa
3808 if it appears on the rhs. Thus require strict type
3809 compatibility. */
3810 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3811 {
3812 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3813 res = true;
3814 }
3815 }
3816
3817 /* Canonicalize TARGET_MEM_REF in particular with respect to
3818 the indexes becoming constant. */
3819 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3820 {
3821 tree tem = maybe_fold_tmr (*t);
3822 if (tem)
3823 {
3824 *t = tem;
3825 res = true;
3826 }
3827 }
3828
3829 return res;
3830 }
3831
3832 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3833 distinguishes both cases. */
3834
3835 static bool
3836 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3837 {
3838 bool changed = false;
3839 gimple *stmt = gsi_stmt (*gsi);
3840 bool nowarning = gimple_no_warning_p (stmt);
3841 unsigned i;
3842 fold_defer_overflow_warnings ();
3843
3844 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3845 after propagation.
3846 ??? This shouldn't be done in generic folding but in the
3847 propagation helpers which also know whether an address was
3848 propagated.
3849 Also canonicalize operand order. */
3850 switch (gimple_code (stmt))
3851 {
3852 case GIMPLE_ASSIGN:
3853 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3854 {
3855 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3856 if ((REFERENCE_CLASS_P (*rhs)
3857 || TREE_CODE (*rhs) == ADDR_EXPR)
3858 && maybe_canonicalize_mem_ref_addr (rhs))
3859 changed = true;
3860 tree *lhs = gimple_assign_lhs_ptr (stmt);
3861 if (REFERENCE_CLASS_P (*lhs)
3862 && maybe_canonicalize_mem_ref_addr (lhs))
3863 changed = true;
3864 }
3865 else
3866 {
3867 /* Canonicalize operand order. */
3868 enum tree_code code = gimple_assign_rhs_code (stmt);
3869 if (TREE_CODE_CLASS (code) == tcc_comparison
3870 || commutative_tree_code (code)
3871 || commutative_ternary_tree_code (code))
3872 {
3873 tree rhs1 = gimple_assign_rhs1 (stmt);
3874 tree rhs2 = gimple_assign_rhs2 (stmt);
3875 if (tree_swap_operands_p (rhs1, rhs2, false))
3876 {
3877 gimple_assign_set_rhs1 (stmt, rhs2);
3878 gimple_assign_set_rhs2 (stmt, rhs1);
3879 if (TREE_CODE_CLASS (code) == tcc_comparison)
3880 gimple_assign_set_rhs_code (stmt,
3881 swap_tree_comparison (code));
3882 changed = true;
3883 }
3884 }
3885 }
3886 break;
3887 case GIMPLE_CALL:
3888 {
3889 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3890 {
3891 tree *arg = gimple_call_arg_ptr (stmt, i);
3892 if (REFERENCE_CLASS_P (*arg)
3893 && maybe_canonicalize_mem_ref_addr (arg))
3894 changed = true;
3895 }
3896 tree *lhs = gimple_call_lhs_ptr (stmt);
3897 if (*lhs
3898 && REFERENCE_CLASS_P (*lhs)
3899 && maybe_canonicalize_mem_ref_addr (lhs))
3900 changed = true;
3901 break;
3902 }
3903 case GIMPLE_ASM:
3904 {
3905 gasm *asm_stmt = as_a <gasm *> (stmt);
3906 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3907 {
3908 tree link = gimple_asm_output_op (asm_stmt, i);
3909 tree op = TREE_VALUE (link);
3910 if (REFERENCE_CLASS_P (op)
3911 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3912 changed = true;
3913 }
3914 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3915 {
3916 tree link = gimple_asm_input_op (asm_stmt, i);
3917 tree op = TREE_VALUE (link);
3918 if ((REFERENCE_CLASS_P (op)
3919 || TREE_CODE (op) == ADDR_EXPR)
3920 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3921 changed = true;
3922 }
3923 }
3924 break;
3925 case GIMPLE_DEBUG:
3926 if (gimple_debug_bind_p (stmt))
3927 {
3928 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3929 if (*val
3930 && (REFERENCE_CLASS_P (*val)
3931 || TREE_CODE (*val) == ADDR_EXPR)
3932 && maybe_canonicalize_mem_ref_addr (val))
3933 changed = true;
3934 }
3935 break;
3936 case GIMPLE_COND:
3937 {
3938 /* Canonicalize operand order. */
3939 tree lhs = gimple_cond_lhs (stmt);
3940 tree rhs = gimple_cond_rhs (stmt);
3941 if (tree_swap_operands_p (lhs, rhs, false))
3942 {
3943 gcond *gc = as_a <gcond *> (stmt);
3944 gimple_cond_set_lhs (gc, rhs);
3945 gimple_cond_set_rhs (gc, lhs);
3946 gimple_cond_set_code (gc,
3947 swap_tree_comparison (gimple_cond_code (gc)));
3948 changed = true;
3949 }
3950 }
3951 default:;
3952 }
3953
3954 /* Dispatch to pattern-based folding. */
3955 if (!inplace
3956 || is_gimple_assign (stmt)
3957 || gimple_code (stmt) == GIMPLE_COND)
3958 {
3959 gimple_seq seq = NULL;
3960 code_helper rcode;
3961 tree ops[3] = {};
3962 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3963 valueize, valueize))
3964 {
3965 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3966 changed = true;
3967 else
3968 gimple_seq_discard (seq);
3969 }
3970 }
3971
3972 stmt = gsi_stmt (*gsi);
3973
3974 /* Fold the main computation performed by the statement. */
3975 switch (gimple_code (stmt))
3976 {
3977 case GIMPLE_ASSIGN:
3978 {
3979 /* Try to canonicalize for boolean-typed X the comparisons
3980 X == 0, X == 1, X != 0, and X != 1. */
3981 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3982 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3983 {
3984 tree lhs = gimple_assign_lhs (stmt);
3985 tree op1 = gimple_assign_rhs1 (stmt);
3986 tree op2 = gimple_assign_rhs2 (stmt);
3987 tree type = TREE_TYPE (op1);
3988
3989 /* Check whether the comparison operands are of the same boolean
3990 type as the result type is.
3991 Check that second operand is an integer-constant with value
3992 one or zero. */
3993 if (TREE_CODE (op2) == INTEGER_CST
3994 && (integer_zerop (op2) || integer_onep (op2))
3995 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3996 {
3997 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3998 bool is_logical_not = false;
3999
4000 /* X == 0 and X != 1 is a logical-not.of X
4001 X == 1 and X != 0 is X */
4002 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4003 || (cmp_code == NE_EXPR && integer_onep (op2)))
4004 is_logical_not = true;
4005
4006 if (is_logical_not == false)
4007 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4008 /* Only for one-bit precision typed X the transformation
4009 !X -> ~X is valied. */
4010 else if (TYPE_PRECISION (type) == 1)
4011 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4012 /* Otherwise we use !X -> X ^ 1. */
4013 else
4014 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4015 build_int_cst (type, 1));
4016 changed = true;
4017 break;
4018 }
4019 }
4020
4021 unsigned old_num_ops = gimple_num_ops (stmt);
4022 tree lhs = gimple_assign_lhs (stmt);
4023 tree new_rhs = fold_gimple_assign (gsi);
4024 if (new_rhs
4025 && !useless_type_conversion_p (TREE_TYPE (lhs),
4026 TREE_TYPE (new_rhs)))
4027 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4028 if (new_rhs
4029 && (!inplace
4030 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4031 {
4032 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4033 changed = true;
4034 }
4035 break;
4036 }
4037
4038 case GIMPLE_CALL:
4039 changed |= gimple_fold_call (gsi, inplace);
4040 break;
4041
4042 case GIMPLE_ASM:
4043 /* Fold *& in asm operands. */
4044 {
4045 gasm *asm_stmt = as_a <gasm *> (stmt);
4046 size_t noutputs;
4047 const char **oconstraints;
4048 const char *constraint;
4049 bool allows_mem, allows_reg;
4050
4051 noutputs = gimple_asm_noutputs (asm_stmt);
4052 oconstraints = XALLOCAVEC (const char *, noutputs);
4053
4054 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4055 {
4056 tree link = gimple_asm_output_op (asm_stmt, i);
4057 tree op = TREE_VALUE (link);
4058 oconstraints[i]
4059 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4060 if (REFERENCE_CLASS_P (op)
4061 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4062 {
4063 TREE_VALUE (link) = op;
4064 changed = true;
4065 }
4066 }
4067 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4068 {
4069 tree link = gimple_asm_input_op (asm_stmt, i);
4070 tree op = TREE_VALUE (link);
4071 constraint
4072 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4073 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4074 oconstraints, &allows_mem, &allows_reg);
4075 if (REFERENCE_CLASS_P (op)
4076 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4077 != NULL_TREE)
4078 {
4079 TREE_VALUE (link) = op;
4080 changed = true;
4081 }
4082 }
4083 }
4084 break;
4085
4086 case GIMPLE_DEBUG:
4087 if (gimple_debug_bind_p (stmt))
4088 {
4089 tree val = gimple_debug_bind_get_value (stmt);
4090 if (val
4091 && REFERENCE_CLASS_P (val))
4092 {
4093 tree tem = maybe_fold_reference (val, false);
4094 if (tem)
4095 {
4096 gimple_debug_bind_set_value (stmt, tem);
4097 changed = true;
4098 }
4099 }
4100 else if (val
4101 && TREE_CODE (val) == ADDR_EXPR)
4102 {
4103 tree ref = TREE_OPERAND (val, 0);
4104 tree tem = maybe_fold_reference (ref, false);
4105 if (tem)
4106 {
4107 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4108 gimple_debug_bind_set_value (stmt, tem);
4109 changed = true;
4110 }
4111 }
4112 }
4113 break;
4114
4115 default:;
4116 }
4117
4118 stmt = gsi_stmt (*gsi);
4119
4120 /* Fold *& on the lhs. */
4121 if (gimple_has_lhs (stmt))
4122 {
4123 tree lhs = gimple_get_lhs (stmt);
4124 if (lhs && REFERENCE_CLASS_P (lhs))
4125 {
4126 tree new_lhs = maybe_fold_reference (lhs, true);
4127 if (new_lhs)
4128 {
4129 gimple_set_lhs (stmt, new_lhs);
4130 changed = true;
4131 }
4132 }
4133 }
4134
4135 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4136 return changed;
4137 }
4138
4139 /* Valueziation callback that ends up not following SSA edges. */
4140
4141 tree
4142 no_follow_ssa_edges (tree)
4143 {
4144 return NULL_TREE;
4145 }
4146
4147 /* Valueization callback that ends up following single-use SSA edges only. */
4148
4149 tree
4150 follow_single_use_edges (tree val)
4151 {
4152 if (TREE_CODE (val) == SSA_NAME
4153 && !has_single_use (val))
4154 return NULL_TREE;
4155 return val;
4156 }
4157
4158 /* Fold the statement pointed to by GSI. In some cases, this function may
4159 replace the whole statement with a new one. Returns true iff folding
4160 makes any changes.
4161 The statement pointed to by GSI should be in valid gimple form but may
4162 be in unfolded state as resulting from for example constant propagation
4163 which can produce *&x = 0. */
4164
4165 bool
4166 fold_stmt (gimple_stmt_iterator *gsi)
4167 {
4168 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4169 }
4170
4171 bool
4172 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4173 {
4174 return fold_stmt_1 (gsi, false, valueize);
4175 }
4176
4177 /* Perform the minimal folding on statement *GSI. Only operations like
4178 *&x created by constant propagation are handled. The statement cannot
4179 be replaced with a new one. Return true if the statement was
4180 changed, false otherwise.
4181 The statement *GSI should be in valid gimple form but may
4182 be in unfolded state as resulting from for example constant propagation
4183 which can produce *&x = 0. */
4184
4185 bool
4186 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4187 {
4188 gimple *stmt = gsi_stmt (*gsi);
4189 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4190 gcc_assert (gsi_stmt (*gsi) == stmt);
4191 return changed;
4192 }
4193
4194 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4195 if EXPR is null or we don't know how.
4196 If non-null, the result always has boolean type. */
4197
4198 static tree
4199 canonicalize_bool (tree expr, bool invert)
4200 {
4201 if (!expr)
4202 return NULL_TREE;
4203 else if (invert)
4204 {
4205 if (integer_nonzerop (expr))
4206 return boolean_false_node;
4207 else if (integer_zerop (expr))
4208 return boolean_true_node;
4209 else if (TREE_CODE (expr) == SSA_NAME)
4210 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4211 build_int_cst (TREE_TYPE (expr), 0));
4212 else if (COMPARISON_CLASS_P (expr))
4213 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4214 boolean_type_node,
4215 TREE_OPERAND (expr, 0),
4216 TREE_OPERAND (expr, 1));
4217 else
4218 return NULL_TREE;
4219 }
4220 else
4221 {
4222 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4223 return expr;
4224 if (integer_nonzerop (expr))
4225 return boolean_true_node;
4226 else if (integer_zerop (expr))
4227 return boolean_false_node;
4228 else if (TREE_CODE (expr) == SSA_NAME)
4229 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4230 build_int_cst (TREE_TYPE (expr), 0));
4231 else if (COMPARISON_CLASS_P (expr))
4232 return fold_build2 (TREE_CODE (expr),
4233 boolean_type_node,
4234 TREE_OPERAND (expr, 0),
4235 TREE_OPERAND (expr, 1));
4236 else
4237 return NULL_TREE;
4238 }
4239 }
4240
4241 /* Check to see if a boolean expression EXPR is logically equivalent to the
4242 comparison (OP1 CODE OP2). Check for various identities involving
4243 SSA_NAMEs. */
4244
4245 static bool
4246 same_bool_comparison_p (const_tree expr, enum tree_code code,
4247 const_tree op1, const_tree op2)
4248 {
4249 gimple *s;
4250
4251 /* The obvious case. */
4252 if (TREE_CODE (expr) == code
4253 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4254 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4255 return true;
4256
4257 /* Check for comparing (name, name != 0) and the case where expr
4258 is an SSA_NAME with a definition matching the comparison. */
4259 if (TREE_CODE (expr) == SSA_NAME
4260 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4261 {
4262 if (operand_equal_p (expr, op1, 0))
4263 return ((code == NE_EXPR && integer_zerop (op2))
4264 || (code == EQ_EXPR && integer_nonzerop (op2)));
4265 s = SSA_NAME_DEF_STMT (expr);
4266 if (is_gimple_assign (s)
4267 && gimple_assign_rhs_code (s) == code
4268 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4269 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4270 return true;
4271 }
4272
4273 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4274 of name is a comparison, recurse. */
4275 if (TREE_CODE (op1) == SSA_NAME
4276 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4277 {
4278 s = SSA_NAME_DEF_STMT (op1);
4279 if (is_gimple_assign (s)
4280 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4281 {
4282 enum tree_code c = gimple_assign_rhs_code (s);
4283 if ((c == NE_EXPR && integer_zerop (op2))
4284 || (c == EQ_EXPR && integer_nonzerop (op2)))
4285 return same_bool_comparison_p (expr, c,
4286 gimple_assign_rhs1 (s),
4287 gimple_assign_rhs2 (s));
4288 if ((c == EQ_EXPR && integer_zerop (op2))
4289 || (c == NE_EXPR && integer_nonzerop (op2)))
4290 return same_bool_comparison_p (expr,
4291 invert_tree_comparison (c, false),
4292 gimple_assign_rhs1 (s),
4293 gimple_assign_rhs2 (s));
4294 }
4295 }
4296 return false;
4297 }
4298
4299 /* Check to see if two boolean expressions OP1 and OP2 are logically
4300 equivalent. */
4301
4302 static bool
4303 same_bool_result_p (const_tree op1, const_tree op2)
4304 {
4305 /* Simple cases first. */
4306 if (operand_equal_p (op1, op2, 0))
4307 return true;
4308
4309 /* Check the cases where at least one of the operands is a comparison.
4310 These are a bit smarter than operand_equal_p in that they apply some
4311 identifies on SSA_NAMEs. */
4312 if (COMPARISON_CLASS_P (op2)
4313 && same_bool_comparison_p (op1, TREE_CODE (op2),
4314 TREE_OPERAND (op2, 0),
4315 TREE_OPERAND (op2, 1)))
4316 return true;
4317 if (COMPARISON_CLASS_P (op1)
4318 && same_bool_comparison_p (op2, TREE_CODE (op1),
4319 TREE_OPERAND (op1, 0),
4320 TREE_OPERAND (op1, 1)))
4321 return true;
4322
4323 /* Default case. */
4324 return false;
4325 }
4326
4327 /* Forward declarations for some mutually recursive functions. */
4328
4329 static tree
4330 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4331 enum tree_code code2, tree op2a, tree op2b);
4332 static tree
4333 and_var_with_comparison (tree var, bool invert,
4334 enum tree_code code2, tree op2a, tree op2b);
4335 static tree
4336 and_var_with_comparison_1 (gimple *stmt,
4337 enum tree_code code2, tree op2a, tree op2b);
4338 static tree
4339 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4340 enum tree_code code2, tree op2a, tree op2b);
4341 static tree
4342 or_var_with_comparison (tree var, bool invert,
4343 enum tree_code code2, tree op2a, tree op2b);
4344 static tree
4345 or_var_with_comparison_1 (gimple *stmt,
4346 enum tree_code code2, tree op2a, tree op2b);
4347
4348 /* Helper function for and_comparisons_1: try to simplify the AND of the
4349 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4350 If INVERT is true, invert the value of the VAR before doing the AND.
4351 Return NULL_EXPR if we can't simplify this to a single expression. */
4352
4353 static tree
4354 and_var_with_comparison (tree var, bool invert,
4355 enum tree_code code2, tree op2a, tree op2b)
4356 {
4357 tree t;
4358 gimple *stmt = SSA_NAME_DEF_STMT (var);
4359
4360 /* We can only deal with variables whose definitions are assignments. */
4361 if (!is_gimple_assign (stmt))
4362 return NULL_TREE;
4363
4364 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4365 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4366 Then we only have to consider the simpler non-inverted cases. */
4367 if (invert)
4368 t = or_var_with_comparison_1 (stmt,
4369 invert_tree_comparison (code2, false),
4370 op2a, op2b);
4371 else
4372 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4373 return canonicalize_bool (t, invert);
4374 }
4375
4376 /* Try to simplify the AND of the ssa variable defined by the assignment
4377 STMT with the comparison specified by (OP2A CODE2 OP2B).
4378 Return NULL_EXPR if we can't simplify this to a single expression. */
4379
4380 static tree
4381 and_var_with_comparison_1 (gimple *stmt,
4382 enum tree_code code2, tree op2a, tree op2b)
4383 {
4384 tree var = gimple_assign_lhs (stmt);
4385 tree true_test_var = NULL_TREE;
4386 tree false_test_var = NULL_TREE;
4387 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4388
4389 /* Check for identities like (var AND (var == 0)) => false. */
4390 if (TREE_CODE (op2a) == SSA_NAME
4391 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4392 {
4393 if ((code2 == NE_EXPR && integer_zerop (op2b))
4394 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4395 {
4396 true_test_var = op2a;
4397 if (var == true_test_var)
4398 return var;
4399 }
4400 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4401 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4402 {
4403 false_test_var = op2a;
4404 if (var == false_test_var)
4405 return boolean_false_node;
4406 }
4407 }
4408
4409 /* If the definition is a comparison, recurse on it. */
4410 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4411 {
4412 tree t = and_comparisons_1 (innercode,
4413 gimple_assign_rhs1 (stmt),
4414 gimple_assign_rhs2 (stmt),
4415 code2,
4416 op2a,
4417 op2b);
4418 if (t)
4419 return t;
4420 }
4421
4422 /* If the definition is an AND or OR expression, we may be able to
4423 simplify by reassociating. */
4424 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4425 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4426 {
4427 tree inner1 = gimple_assign_rhs1 (stmt);
4428 tree inner2 = gimple_assign_rhs2 (stmt);
4429 gimple *s;
4430 tree t;
4431 tree partial = NULL_TREE;
4432 bool is_and = (innercode == BIT_AND_EXPR);
4433
4434 /* Check for boolean identities that don't require recursive examination
4435 of inner1/inner2:
4436 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4437 inner1 AND (inner1 OR inner2) => inner1
4438 !inner1 AND (inner1 AND inner2) => false
4439 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4440 Likewise for similar cases involving inner2. */
4441 if (inner1 == true_test_var)
4442 return (is_and ? var : inner1);
4443 else if (inner2 == true_test_var)
4444 return (is_and ? var : inner2);
4445 else if (inner1 == false_test_var)
4446 return (is_and
4447 ? boolean_false_node
4448 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4449 else if (inner2 == false_test_var)
4450 return (is_and
4451 ? boolean_false_node
4452 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4453
4454 /* Next, redistribute/reassociate the AND across the inner tests.
4455 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4456 if (TREE_CODE (inner1) == SSA_NAME
4457 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4458 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4459 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4460 gimple_assign_rhs1 (s),
4461 gimple_assign_rhs2 (s),
4462 code2, op2a, op2b)))
4463 {
4464 /* Handle the AND case, where we are reassociating:
4465 (inner1 AND inner2) AND (op2a code2 op2b)
4466 => (t AND inner2)
4467 If the partial result t is a constant, we win. Otherwise
4468 continue on to try reassociating with the other inner test. */
4469 if (is_and)
4470 {
4471 if (integer_onep (t))
4472 return inner2;
4473 else if (integer_zerop (t))
4474 return boolean_false_node;
4475 }
4476
4477 /* Handle the OR case, where we are redistributing:
4478 (inner1 OR inner2) AND (op2a code2 op2b)
4479 => (t OR (inner2 AND (op2a code2 op2b))) */
4480 else if (integer_onep (t))
4481 return boolean_true_node;
4482
4483 /* Save partial result for later. */
4484 partial = t;
4485 }
4486
4487 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4488 if (TREE_CODE (inner2) == SSA_NAME
4489 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4490 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4491 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4492 gimple_assign_rhs1 (s),
4493 gimple_assign_rhs2 (s),
4494 code2, op2a, op2b)))
4495 {
4496 /* Handle the AND case, where we are reassociating:
4497 (inner1 AND inner2) AND (op2a code2 op2b)
4498 => (inner1 AND t) */
4499 if (is_and)
4500 {
4501 if (integer_onep (t))
4502 return inner1;
4503 else if (integer_zerop (t))
4504 return boolean_false_node;
4505 /* If both are the same, we can apply the identity
4506 (x AND x) == x. */
4507 else if (partial && same_bool_result_p (t, partial))
4508 return t;
4509 }
4510
4511 /* Handle the OR case. where we are redistributing:
4512 (inner1 OR inner2) AND (op2a code2 op2b)
4513 => (t OR (inner1 AND (op2a code2 op2b)))
4514 => (t OR partial) */
4515 else
4516 {
4517 if (integer_onep (t))
4518 return boolean_true_node;
4519 else if (partial)
4520 {
4521 /* We already got a simplification for the other
4522 operand to the redistributed OR expression. The
4523 interesting case is when at least one is false.
4524 Or, if both are the same, we can apply the identity
4525 (x OR x) == x. */
4526 if (integer_zerop (partial))
4527 return t;
4528 else if (integer_zerop (t))
4529 return partial;
4530 else if (same_bool_result_p (t, partial))
4531 return t;
4532 }
4533 }
4534 }
4535 }
4536 return NULL_TREE;
4537 }
4538
4539 /* Try to simplify the AND of two comparisons defined by
4540 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4541 If this can be done without constructing an intermediate value,
4542 return the resulting tree; otherwise NULL_TREE is returned.
4543 This function is deliberately asymmetric as it recurses on SSA_DEFs
4544 in the first comparison but not the second. */
4545
4546 static tree
4547 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4548 enum tree_code code2, tree op2a, tree op2b)
4549 {
4550 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4551
4552 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4553 if (operand_equal_p (op1a, op2a, 0)
4554 && operand_equal_p (op1b, op2b, 0))
4555 {
4556 /* Result will be either NULL_TREE, or a combined comparison. */
4557 tree t = combine_comparisons (UNKNOWN_LOCATION,
4558 TRUTH_ANDIF_EXPR, code1, code2,
4559 truth_type, op1a, op1b);
4560 if (t)
4561 return t;
4562 }
4563
4564 /* Likewise the swapped case of the above. */
4565 if (operand_equal_p (op1a, op2b, 0)
4566 && operand_equal_p (op1b, op2a, 0))
4567 {
4568 /* Result will be either NULL_TREE, or a combined comparison. */
4569 tree t = combine_comparisons (UNKNOWN_LOCATION,
4570 TRUTH_ANDIF_EXPR, code1,
4571 swap_tree_comparison (code2),
4572 truth_type, op1a, op1b);
4573 if (t)
4574 return t;
4575 }
4576
4577 /* If both comparisons are of the same value against constants, we might
4578 be able to merge them. */
4579 if (operand_equal_p (op1a, op2a, 0)
4580 && TREE_CODE (op1b) == INTEGER_CST
4581 && TREE_CODE (op2b) == INTEGER_CST)
4582 {
4583 int cmp = tree_int_cst_compare (op1b, op2b);
4584
4585 /* If we have (op1a == op1b), we should either be able to
4586 return that or FALSE, depending on whether the constant op1b
4587 also satisfies the other comparison against op2b. */
4588 if (code1 == EQ_EXPR)
4589 {
4590 bool done = true;
4591 bool val;
4592 switch (code2)
4593 {
4594 case EQ_EXPR: val = (cmp == 0); break;
4595 case NE_EXPR: val = (cmp != 0); break;
4596 case LT_EXPR: val = (cmp < 0); break;
4597 case GT_EXPR: val = (cmp > 0); break;
4598 case LE_EXPR: val = (cmp <= 0); break;
4599 case GE_EXPR: val = (cmp >= 0); break;
4600 default: done = false;
4601 }
4602 if (done)
4603 {
4604 if (val)
4605 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4606 else
4607 return boolean_false_node;
4608 }
4609 }
4610 /* Likewise if the second comparison is an == comparison. */
4611 else if (code2 == EQ_EXPR)
4612 {
4613 bool done = true;
4614 bool val;
4615 switch (code1)
4616 {
4617 case EQ_EXPR: val = (cmp == 0); break;
4618 case NE_EXPR: val = (cmp != 0); break;
4619 case LT_EXPR: val = (cmp > 0); break;
4620 case GT_EXPR: val = (cmp < 0); break;
4621 case LE_EXPR: val = (cmp >= 0); break;
4622 case GE_EXPR: val = (cmp <= 0); break;
4623 default: done = false;
4624 }
4625 if (done)
4626 {
4627 if (val)
4628 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4629 else
4630 return boolean_false_node;
4631 }
4632 }
4633
4634 /* Same business with inequality tests. */
4635 else if (code1 == NE_EXPR)
4636 {
4637 bool val;
4638 switch (code2)
4639 {
4640 case EQ_EXPR: val = (cmp != 0); break;
4641 case NE_EXPR: val = (cmp == 0); break;
4642 case LT_EXPR: val = (cmp >= 0); break;
4643 case GT_EXPR: val = (cmp <= 0); break;
4644 case LE_EXPR: val = (cmp > 0); break;
4645 case GE_EXPR: val = (cmp < 0); break;
4646 default:
4647 val = false;
4648 }
4649 if (val)
4650 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4651 }
4652 else if (code2 == NE_EXPR)
4653 {
4654 bool val;
4655 switch (code1)
4656 {
4657 case EQ_EXPR: val = (cmp == 0); break;
4658 case NE_EXPR: val = (cmp != 0); break;
4659 case LT_EXPR: val = (cmp <= 0); break;
4660 case GT_EXPR: val = (cmp >= 0); break;
4661 case LE_EXPR: val = (cmp < 0); break;
4662 case GE_EXPR: val = (cmp > 0); break;
4663 default:
4664 val = false;
4665 }
4666 if (val)
4667 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4668 }
4669
4670 /* Chose the more restrictive of two < or <= comparisons. */
4671 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4672 && (code2 == LT_EXPR || code2 == LE_EXPR))
4673 {
4674 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4675 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4676 else
4677 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4678 }
4679
4680 /* Likewise chose the more restrictive of two > or >= comparisons. */
4681 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4682 && (code2 == GT_EXPR || code2 == GE_EXPR))
4683 {
4684 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4685 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4686 else
4687 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4688 }
4689
4690 /* Check for singleton ranges. */
4691 else if (cmp == 0
4692 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4693 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4694 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4695
4696 /* Check for disjoint ranges. */
4697 else if (cmp <= 0
4698 && (code1 == LT_EXPR || code1 == LE_EXPR)
4699 && (code2 == GT_EXPR || code2 == GE_EXPR))
4700 return boolean_false_node;
4701 else if (cmp >= 0
4702 && (code1 == GT_EXPR || code1 == GE_EXPR)
4703 && (code2 == LT_EXPR || code2 == LE_EXPR))
4704 return boolean_false_node;
4705 }
4706
4707 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4708 NAME's definition is a truth value. See if there are any simplifications
4709 that can be done against the NAME's definition. */
4710 if (TREE_CODE (op1a) == SSA_NAME
4711 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4712 && (integer_zerop (op1b) || integer_onep (op1b)))
4713 {
4714 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4715 || (code1 == NE_EXPR && integer_onep (op1b)));
4716 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4717 switch (gimple_code (stmt))
4718 {
4719 case GIMPLE_ASSIGN:
4720 /* Try to simplify by copy-propagating the definition. */
4721 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4722
4723 case GIMPLE_PHI:
4724 /* If every argument to the PHI produces the same result when
4725 ANDed with the second comparison, we win.
4726 Do not do this unless the type is bool since we need a bool
4727 result here anyway. */
4728 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4729 {
4730 tree result = NULL_TREE;
4731 unsigned i;
4732 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4733 {
4734 tree arg = gimple_phi_arg_def (stmt, i);
4735
4736 /* If this PHI has itself as an argument, ignore it.
4737 If all the other args produce the same result,
4738 we're still OK. */
4739 if (arg == gimple_phi_result (stmt))
4740 continue;
4741 else if (TREE_CODE (arg) == INTEGER_CST)
4742 {
4743 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4744 {
4745 if (!result)
4746 result = boolean_false_node;
4747 else if (!integer_zerop (result))
4748 return NULL_TREE;
4749 }
4750 else if (!result)
4751 result = fold_build2 (code2, boolean_type_node,
4752 op2a, op2b);
4753 else if (!same_bool_comparison_p (result,
4754 code2, op2a, op2b))
4755 return NULL_TREE;
4756 }
4757 else if (TREE_CODE (arg) == SSA_NAME
4758 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4759 {
4760 tree temp;
4761 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4762 /* In simple cases we can look through PHI nodes,
4763 but we have to be careful with loops.
4764 See PR49073. */
4765 if (! dom_info_available_p (CDI_DOMINATORS)
4766 || gimple_bb (def_stmt) == gimple_bb (stmt)
4767 || dominated_by_p (CDI_DOMINATORS,
4768 gimple_bb (def_stmt),
4769 gimple_bb (stmt)))
4770 return NULL_TREE;
4771 temp = and_var_with_comparison (arg, invert, code2,
4772 op2a, op2b);
4773 if (!temp)
4774 return NULL_TREE;
4775 else if (!result)
4776 result = temp;
4777 else if (!same_bool_result_p (result, temp))
4778 return NULL_TREE;
4779 }
4780 else
4781 return NULL_TREE;
4782 }
4783 return result;
4784 }
4785
4786 default:
4787 break;
4788 }
4789 }
4790 return NULL_TREE;
4791 }
4792
4793 /* Try to simplify the AND of two comparisons, specified by
4794 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4795 If this can be simplified to a single expression (without requiring
4796 introducing more SSA variables to hold intermediate values),
4797 return the resulting tree. Otherwise return NULL_TREE.
4798 If the result expression is non-null, it has boolean type. */
4799
4800 tree
4801 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4802 enum tree_code code2, tree op2a, tree op2b)
4803 {
4804 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4805 if (t)
4806 return t;
4807 else
4808 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4809 }
4810
4811 /* Helper function for or_comparisons_1: try to simplify the OR of the
4812 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4813 If INVERT is true, invert the value of VAR before doing the OR.
4814 Return NULL_EXPR if we can't simplify this to a single expression. */
4815
4816 static tree
4817 or_var_with_comparison (tree var, bool invert,
4818 enum tree_code code2, tree op2a, tree op2b)
4819 {
4820 tree t;
4821 gimple *stmt = SSA_NAME_DEF_STMT (var);
4822
4823 /* We can only deal with variables whose definitions are assignments. */
4824 if (!is_gimple_assign (stmt))
4825 return NULL_TREE;
4826
4827 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4828 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4829 Then we only have to consider the simpler non-inverted cases. */
4830 if (invert)
4831 t = and_var_with_comparison_1 (stmt,
4832 invert_tree_comparison (code2, false),
4833 op2a, op2b);
4834 else
4835 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4836 return canonicalize_bool (t, invert);
4837 }
4838
4839 /* Try to simplify the OR of the ssa variable defined by the assignment
4840 STMT with the comparison specified by (OP2A CODE2 OP2B).
4841 Return NULL_EXPR if we can't simplify this to a single expression. */
4842
4843 static tree
4844 or_var_with_comparison_1 (gimple *stmt,
4845 enum tree_code code2, tree op2a, tree op2b)
4846 {
4847 tree var = gimple_assign_lhs (stmt);
4848 tree true_test_var = NULL_TREE;
4849 tree false_test_var = NULL_TREE;
4850 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4851
4852 /* Check for identities like (var OR (var != 0)) => true . */
4853 if (TREE_CODE (op2a) == SSA_NAME
4854 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4855 {
4856 if ((code2 == NE_EXPR && integer_zerop (op2b))
4857 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4858 {
4859 true_test_var = op2a;
4860 if (var == true_test_var)
4861 return var;
4862 }
4863 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4864 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4865 {
4866 false_test_var = op2a;
4867 if (var == false_test_var)
4868 return boolean_true_node;
4869 }
4870 }
4871
4872 /* If the definition is a comparison, recurse on it. */
4873 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4874 {
4875 tree t = or_comparisons_1 (innercode,
4876 gimple_assign_rhs1 (stmt),
4877 gimple_assign_rhs2 (stmt),
4878 code2,
4879 op2a,
4880 op2b);
4881 if (t)
4882 return t;
4883 }
4884
4885 /* If the definition is an AND or OR expression, we may be able to
4886 simplify by reassociating. */
4887 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4888 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4889 {
4890 tree inner1 = gimple_assign_rhs1 (stmt);
4891 tree inner2 = gimple_assign_rhs2 (stmt);
4892 gimple *s;
4893 tree t;
4894 tree partial = NULL_TREE;
4895 bool is_or = (innercode == BIT_IOR_EXPR);
4896
4897 /* Check for boolean identities that don't require recursive examination
4898 of inner1/inner2:
4899 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4900 inner1 OR (inner1 AND inner2) => inner1
4901 !inner1 OR (inner1 OR inner2) => true
4902 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4903 */
4904 if (inner1 == true_test_var)
4905 return (is_or ? var : inner1);
4906 else if (inner2 == true_test_var)
4907 return (is_or ? var : inner2);
4908 else if (inner1 == false_test_var)
4909 return (is_or
4910 ? boolean_true_node
4911 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4912 else if (inner2 == false_test_var)
4913 return (is_or
4914 ? boolean_true_node
4915 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4916
4917 /* Next, redistribute/reassociate the OR across the inner tests.
4918 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4919 if (TREE_CODE (inner1) == SSA_NAME
4920 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4921 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4922 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4923 gimple_assign_rhs1 (s),
4924 gimple_assign_rhs2 (s),
4925 code2, op2a, op2b)))
4926 {
4927 /* Handle the OR case, where we are reassociating:
4928 (inner1 OR inner2) OR (op2a code2 op2b)
4929 => (t OR inner2)
4930 If the partial result t is a constant, we win. Otherwise
4931 continue on to try reassociating with the other inner test. */
4932 if (is_or)
4933 {
4934 if (integer_onep (t))
4935 return boolean_true_node;
4936 else if (integer_zerop (t))
4937 return inner2;
4938 }
4939
4940 /* Handle the AND case, where we are redistributing:
4941 (inner1 AND inner2) OR (op2a code2 op2b)
4942 => (t AND (inner2 OR (op2a code op2b))) */
4943 else if (integer_zerop (t))
4944 return boolean_false_node;
4945
4946 /* Save partial result for later. */
4947 partial = t;
4948 }
4949
4950 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4951 if (TREE_CODE (inner2) == SSA_NAME
4952 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4953 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4954 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4955 gimple_assign_rhs1 (s),
4956 gimple_assign_rhs2 (s),
4957 code2, op2a, op2b)))
4958 {
4959 /* Handle the OR case, where we are reassociating:
4960 (inner1 OR inner2) OR (op2a code2 op2b)
4961 => (inner1 OR t)
4962 => (t OR partial) */
4963 if (is_or)
4964 {
4965 if (integer_zerop (t))
4966 return inner1;
4967 else if (integer_onep (t))
4968 return boolean_true_node;
4969 /* If both are the same, we can apply the identity
4970 (x OR x) == x. */
4971 else if (partial && same_bool_result_p (t, partial))
4972 return t;
4973 }
4974
4975 /* Handle the AND case, where we are redistributing:
4976 (inner1 AND inner2) OR (op2a code2 op2b)
4977 => (t AND (inner1 OR (op2a code2 op2b)))
4978 => (t AND partial) */
4979 else
4980 {
4981 if (integer_zerop (t))
4982 return boolean_false_node;
4983 else if (partial)
4984 {
4985 /* We already got a simplification for the other
4986 operand to the redistributed AND expression. The
4987 interesting case is when at least one is true.
4988 Or, if both are the same, we can apply the identity
4989 (x AND x) == x. */
4990 if (integer_onep (partial))
4991 return t;
4992 else if (integer_onep (t))
4993 return partial;
4994 else if (same_bool_result_p (t, partial))
4995 return t;
4996 }
4997 }
4998 }
4999 }
5000 return NULL_TREE;
5001 }
5002
5003 /* Try to simplify the OR of two comparisons defined by
5004 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5005 If this can be done without constructing an intermediate value,
5006 return the resulting tree; otherwise NULL_TREE is returned.
5007 This function is deliberately asymmetric as it recurses on SSA_DEFs
5008 in the first comparison but not the second. */
5009
5010 static tree
5011 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5012 enum tree_code code2, tree op2a, tree op2b)
5013 {
5014 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5015
5016 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5017 if (operand_equal_p (op1a, op2a, 0)
5018 && operand_equal_p (op1b, op2b, 0))
5019 {
5020 /* Result will be either NULL_TREE, or a combined comparison. */
5021 tree t = combine_comparisons (UNKNOWN_LOCATION,
5022 TRUTH_ORIF_EXPR, code1, code2,
5023 truth_type, op1a, op1b);
5024 if (t)
5025 return t;
5026 }
5027
5028 /* Likewise the swapped case of the above. */
5029 if (operand_equal_p (op1a, op2b, 0)
5030 && operand_equal_p (op1b, op2a, 0))
5031 {
5032 /* Result will be either NULL_TREE, or a combined comparison. */
5033 tree t = combine_comparisons (UNKNOWN_LOCATION,
5034 TRUTH_ORIF_EXPR, code1,
5035 swap_tree_comparison (code2),
5036 truth_type, op1a, op1b);
5037 if (t)
5038 return t;
5039 }
5040
5041 /* If both comparisons are of the same value against constants, we might
5042 be able to merge them. */
5043 if (operand_equal_p (op1a, op2a, 0)
5044 && TREE_CODE (op1b) == INTEGER_CST
5045 && TREE_CODE (op2b) == INTEGER_CST)
5046 {
5047 int cmp = tree_int_cst_compare (op1b, op2b);
5048
5049 /* If we have (op1a != op1b), we should either be able to
5050 return that or TRUE, depending on whether the constant op1b
5051 also satisfies the other comparison against op2b. */
5052 if (code1 == NE_EXPR)
5053 {
5054 bool done = true;
5055 bool val;
5056 switch (code2)
5057 {
5058 case EQ_EXPR: val = (cmp == 0); break;
5059 case NE_EXPR: val = (cmp != 0); break;
5060 case LT_EXPR: val = (cmp < 0); break;
5061 case GT_EXPR: val = (cmp > 0); break;
5062 case LE_EXPR: val = (cmp <= 0); break;
5063 case GE_EXPR: val = (cmp >= 0); break;
5064 default: done = false;
5065 }
5066 if (done)
5067 {
5068 if (val)
5069 return boolean_true_node;
5070 else
5071 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5072 }
5073 }
5074 /* Likewise if the second comparison is a != comparison. */
5075 else if (code2 == NE_EXPR)
5076 {
5077 bool done = true;
5078 bool val;
5079 switch (code1)
5080 {
5081 case EQ_EXPR: val = (cmp == 0); break;
5082 case NE_EXPR: val = (cmp != 0); break;
5083 case LT_EXPR: val = (cmp > 0); break;
5084 case GT_EXPR: val = (cmp < 0); break;
5085 case LE_EXPR: val = (cmp >= 0); break;
5086 case GE_EXPR: val = (cmp <= 0); break;
5087 default: done = false;
5088 }
5089 if (done)
5090 {
5091 if (val)
5092 return boolean_true_node;
5093 else
5094 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5095 }
5096 }
5097
5098 /* See if an equality test is redundant with the other comparison. */
5099 else if (code1 == EQ_EXPR)
5100 {
5101 bool val;
5102 switch (code2)
5103 {
5104 case EQ_EXPR: val = (cmp == 0); break;
5105 case NE_EXPR: val = (cmp != 0); break;
5106 case LT_EXPR: val = (cmp < 0); break;
5107 case GT_EXPR: val = (cmp > 0); break;
5108 case LE_EXPR: val = (cmp <= 0); break;
5109 case GE_EXPR: val = (cmp >= 0); break;
5110 default:
5111 val = false;
5112 }
5113 if (val)
5114 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5115 }
5116 else if (code2 == EQ_EXPR)
5117 {
5118 bool val;
5119 switch (code1)
5120 {
5121 case EQ_EXPR: val = (cmp == 0); break;
5122 case NE_EXPR: val = (cmp != 0); break;
5123 case LT_EXPR: val = (cmp > 0); break;
5124 case GT_EXPR: val = (cmp < 0); break;
5125 case LE_EXPR: val = (cmp >= 0); break;
5126 case GE_EXPR: val = (cmp <= 0); break;
5127 default:
5128 val = false;
5129 }
5130 if (val)
5131 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5132 }
5133
5134 /* Chose the less restrictive of two < or <= comparisons. */
5135 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5136 && (code2 == LT_EXPR || code2 == LE_EXPR))
5137 {
5138 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5139 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5140 else
5141 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5142 }
5143
5144 /* Likewise chose the less restrictive of two > or >= comparisons. */
5145 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5146 && (code2 == GT_EXPR || code2 == GE_EXPR))
5147 {
5148 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5149 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5150 else
5151 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5152 }
5153
5154 /* Check for singleton ranges. */
5155 else if (cmp == 0
5156 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5157 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5158 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5159
5160 /* Check for less/greater pairs that don't restrict the range at all. */
5161 else if (cmp >= 0
5162 && (code1 == LT_EXPR || code1 == LE_EXPR)
5163 && (code2 == GT_EXPR || code2 == GE_EXPR))
5164 return boolean_true_node;
5165 else if (cmp <= 0
5166 && (code1 == GT_EXPR || code1 == GE_EXPR)
5167 && (code2 == LT_EXPR || code2 == LE_EXPR))
5168 return boolean_true_node;
5169 }
5170
5171 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5172 NAME's definition is a truth value. See if there are any simplifications
5173 that can be done against the NAME's definition. */
5174 if (TREE_CODE (op1a) == SSA_NAME
5175 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5176 && (integer_zerop (op1b) || integer_onep (op1b)))
5177 {
5178 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5179 || (code1 == NE_EXPR && integer_onep (op1b)));
5180 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5181 switch (gimple_code (stmt))
5182 {
5183 case GIMPLE_ASSIGN:
5184 /* Try to simplify by copy-propagating the definition. */
5185 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5186
5187 case GIMPLE_PHI:
5188 /* If every argument to the PHI produces the same result when
5189 ORed with the second comparison, we win.
5190 Do not do this unless the type is bool since we need a bool
5191 result here anyway. */
5192 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5193 {
5194 tree result = NULL_TREE;
5195 unsigned i;
5196 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5197 {
5198 tree arg = gimple_phi_arg_def (stmt, i);
5199
5200 /* If this PHI has itself as an argument, ignore it.
5201 If all the other args produce the same result,
5202 we're still OK. */
5203 if (arg == gimple_phi_result (stmt))
5204 continue;
5205 else if (TREE_CODE (arg) == INTEGER_CST)
5206 {
5207 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5208 {
5209 if (!result)
5210 result = boolean_true_node;
5211 else if (!integer_onep (result))
5212 return NULL_TREE;
5213 }
5214 else if (!result)
5215 result = fold_build2 (code2, boolean_type_node,
5216 op2a, op2b);
5217 else if (!same_bool_comparison_p (result,
5218 code2, op2a, op2b))
5219 return NULL_TREE;
5220 }
5221 else if (TREE_CODE (arg) == SSA_NAME
5222 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5223 {
5224 tree temp;
5225 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5226 /* In simple cases we can look through PHI nodes,
5227 but we have to be careful with loops.
5228 See PR49073. */
5229 if (! dom_info_available_p (CDI_DOMINATORS)
5230 || gimple_bb (def_stmt) == gimple_bb (stmt)
5231 || dominated_by_p (CDI_DOMINATORS,
5232 gimple_bb (def_stmt),
5233 gimple_bb (stmt)))
5234 return NULL_TREE;
5235 temp = or_var_with_comparison (arg, invert, code2,
5236 op2a, op2b);
5237 if (!temp)
5238 return NULL_TREE;
5239 else if (!result)
5240 result = temp;
5241 else if (!same_bool_result_p (result, temp))
5242 return NULL_TREE;
5243 }
5244 else
5245 return NULL_TREE;
5246 }
5247 return result;
5248 }
5249
5250 default:
5251 break;
5252 }
5253 }
5254 return NULL_TREE;
5255 }
5256
5257 /* Try to simplify the OR of two comparisons, specified by
5258 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5259 If this can be simplified to a single expression (without requiring
5260 introducing more SSA variables to hold intermediate values),
5261 return the resulting tree. Otherwise return NULL_TREE.
5262 If the result expression is non-null, it has boolean type. */
5263
5264 tree
5265 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5266 enum tree_code code2, tree op2a, tree op2b)
5267 {
5268 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5269 if (t)
5270 return t;
5271 else
5272 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5273 }
5274
5275
5276 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5277
5278 Either NULL_TREE, a simplified but non-constant or a constant
5279 is returned.
5280
5281 ??? This should go into a gimple-fold-inline.h file to be eventually
5282 privatized with the single valueize function used in the various TUs
5283 to avoid the indirect function call overhead. */
5284
5285 tree
5286 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5287 tree (*gvalueize) (tree))
5288 {
5289 code_helper rcode;
5290 tree ops[3] = {};
5291 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5292 edges if there are intermediate VARYING defs. For this reason
5293 do not follow SSA edges here even though SCCVN can technically
5294 just deal fine with that. */
5295 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5296 {
5297 tree res = NULL_TREE;
5298 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5299 res = ops[0];
5300 else if (mprts_hook)
5301 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5302 if (res)
5303 {
5304 if (dump_file && dump_flags & TDF_DETAILS)
5305 {
5306 fprintf (dump_file, "Match-and-simplified ");
5307 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5308 fprintf (dump_file, " to ");
5309 print_generic_expr (dump_file, res, 0);
5310 fprintf (dump_file, "\n");
5311 }
5312 return res;
5313 }
5314 }
5315
5316 location_t loc = gimple_location (stmt);
5317 switch (gimple_code (stmt))
5318 {
5319 case GIMPLE_ASSIGN:
5320 {
5321 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5322
5323 switch (get_gimple_rhs_class (subcode))
5324 {
5325 case GIMPLE_SINGLE_RHS:
5326 {
5327 tree rhs = gimple_assign_rhs1 (stmt);
5328 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5329
5330 if (TREE_CODE (rhs) == SSA_NAME)
5331 {
5332 /* If the RHS is an SSA_NAME, return its known constant value,
5333 if any. */
5334 return (*valueize) (rhs);
5335 }
5336 /* Handle propagating invariant addresses into address
5337 operations. */
5338 else if (TREE_CODE (rhs) == ADDR_EXPR
5339 && !is_gimple_min_invariant (rhs))
5340 {
5341 HOST_WIDE_INT offset = 0;
5342 tree base;
5343 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5344 &offset,
5345 valueize);
5346 if (base
5347 && (CONSTANT_CLASS_P (base)
5348 || decl_address_invariant_p (base)))
5349 return build_invariant_address (TREE_TYPE (rhs),
5350 base, offset);
5351 }
5352 else if (TREE_CODE (rhs) == CONSTRUCTOR
5353 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5354 && (CONSTRUCTOR_NELTS (rhs)
5355 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5356 {
5357 unsigned i;
5358 tree val, *vec;
5359
5360 vec = XALLOCAVEC (tree,
5361 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5362 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5363 {
5364 val = (*valueize) (val);
5365 if (TREE_CODE (val) == INTEGER_CST
5366 || TREE_CODE (val) == REAL_CST
5367 || TREE_CODE (val) == FIXED_CST)
5368 vec[i] = val;
5369 else
5370 return NULL_TREE;
5371 }
5372
5373 return build_vector (TREE_TYPE (rhs), vec);
5374 }
5375 if (subcode == OBJ_TYPE_REF)
5376 {
5377 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5378 /* If callee is constant, we can fold away the wrapper. */
5379 if (is_gimple_min_invariant (val))
5380 return val;
5381 }
5382
5383 if (kind == tcc_reference)
5384 {
5385 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5386 || TREE_CODE (rhs) == REALPART_EXPR
5387 || TREE_CODE (rhs) == IMAGPART_EXPR)
5388 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5389 {
5390 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5391 return fold_unary_loc (EXPR_LOCATION (rhs),
5392 TREE_CODE (rhs),
5393 TREE_TYPE (rhs), val);
5394 }
5395 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5396 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5397 {
5398 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5399 return fold_ternary_loc (EXPR_LOCATION (rhs),
5400 TREE_CODE (rhs),
5401 TREE_TYPE (rhs), val,
5402 TREE_OPERAND (rhs, 1),
5403 TREE_OPERAND (rhs, 2));
5404 }
5405 else if (TREE_CODE (rhs) == MEM_REF
5406 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5407 {
5408 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5409 if (TREE_CODE (val) == ADDR_EXPR
5410 && is_gimple_min_invariant (val))
5411 {
5412 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5413 unshare_expr (val),
5414 TREE_OPERAND (rhs, 1));
5415 if (tem)
5416 rhs = tem;
5417 }
5418 }
5419 return fold_const_aggregate_ref_1 (rhs, valueize);
5420 }
5421 else if (kind == tcc_declaration)
5422 return get_symbol_constant_value (rhs);
5423 return rhs;
5424 }
5425
5426 case GIMPLE_UNARY_RHS:
5427 return NULL_TREE;
5428
5429 case GIMPLE_BINARY_RHS:
5430 /* Translate &x + CST into an invariant form suitable for
5431 further propagation. */
5432 if (subcode == POINTER_PLUS_EXPR)
5433 {
5434 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5435 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5436 if (TREE_CODE (op0) == ADDR_EXPR
5437 && TREE_CODE (op1) == INTEGER_CST)
5438 {
5439 tree off = fold_convert (ptr_type_node, op1);
5440 return build_fold_addr_expr_loc
5441 (loc,
5442 fold_build2 (MEM_REF,
5443 TREE_TYPE (TREE_TYPE (op0)),
5444 unshare_expr (op0), off));
5445 }
5446 }
5447 /* Canonicalize bool != 0 and bool == 0 appearing after
5448 valueization. While gimple_simplify handles this
5449 it can get confused by the ~X == 1 -> X == 0 transform
5450 which we cant reduce to a SSA name or a constant
5451 (and we have no way to tell gimple_simplify to not
5452 consider those transforms in the first place). */
5453 else if (subcode == EQ_EXPR
5454 || subcode == NE_EXPR)
5455 {
5456 tree lhs = gimple_assign_lhs (stmt);
5457 tree op0 = gimple_assign_rhs1 (stmt);
5458 if (useless_type_conversion_p (TREE_TYPE (lhs),
5459 TREE_TYPE (op0)))
5460 {
5461 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5462 op0 = (*valueize) (op0);
5463 if (TREE_CODE (op0) == INTEGER_CST)
5464 std::swap (op0, op1);
5465 if (TREE_CODE (op1) == INTEGER_CST
5466 && ((subcode == NE_EXPR && integer_zerop (op1))
5467 || (subcode == EQ_EXPR && integer_onep (op1))))
5468 return op0;
5469 }
5470 }
5471 return NULL_TREE;
5472
5473 case GIMPLE_TERNARY_RHS:
5474 {
5475 /* Handle ternary operators that can appear in GIMPLE form. */
5476 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5477 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5478 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5479 return fold_ternary_loc (loc, subcode,
5480 gimple_expr_type (stmt), op0, op1, op2);
5481 }
5482
5483 default:
5484 gcc_unreachable ();
5485 }
5486 }
5487
5488 case GIMPLE_CALL:
5489 {
5490 tree fn;
5491 gcall *call_stmt = as_a <gcall *> (stmt);
5492
5493 if (gimple_call_internal_p (stmt))
5494 {
5495 enum tree_code subcode = ERROR_MARK;
5496 switch (gimple_call_internal_fn (stmt))
5497 {
5498 case IFN_UBSAN_CHECK_ADD:
5499 subcode = PLUS_EXPR;
5500 break;
5501 case IFN_UBSAN_CHECK_SUB:
5502 subcode = MINUS_EXPR;
5503 break;
5504 case IFN_UBSAN_CHECK_MUL:
5505 subcode = MULT_EXPR;
5506 break;
5507 case IFN_BUILTIN_EXPECT:
5508 {
5509 tree arg0 = gimple_call_arg (stmt, 0);
5510 tree op0 = (*valueize) (arg0);
5511 if (TREE_CODE (op0) == INTEGER_CST)
5512 return op0;
5513 return NULL_TREE;
5514 }
5515 default:
5516 return NULL_TREE;
5517 }
5518 tree arg0 = gimple_call_arg (stmt, 0);
5519 tree arg1 = gimple_call_arg (stmt, 1);
5520 tree op0 = (*valueize) (arg0);
5521 tree op1 = (*valueize) (arg1);
5522
5523 if (TREE_CODE (op0) != INTEGER_CST
5524 || TREE_CODE (op1) != INTEGER_CST)
5525 {
5526 switch (subcode)
5527 {
5528 case MULT_EXPR:
5529 /* x * 0 = 0 * x = 0 without overflow. */
5530 if (integer_zerop (op0) || integer_zerop (op1))
5531 return build_zero_cst (TREE_TYPE (arg0));
5532 break;
5533 case MINUS_EXPR:
5534 /* y - y = 0 without overflow. */
5535 if (operand_equal_p (op0, op1, 0))
5536 return build_zero_cst (TREE_TYPE (arg0));
5537 break;
5538 default:
5539 break;
5540 }
5541 }
5542 tree res
5543 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5544 if (res
5545 && TREE_CODE (res) == INTEGER_CST
5546 && !TREE_OVERFLOW (res))
5547 return res;
5548 return NULL_TREE;
5549 }
5550
5551 fn = (*valueize) (gimple_call_fn (stmt));
5552 if (TREE_CODE (fn) == ADDR_EXPR
5553 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5554 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5555 && gimple_builtin_call_types_compatible_p (stmt,
5556 TREE_OPERAND (fn, 0)))
5557 {
5558 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5559 tree retval;
5560 unsigned i;
5561 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5562 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5563 retval = fold_builtin_call_array (loc,
5564 gimple_call_return_type (call_stmt),
5565 fn, gimple_call_num_args (stmt), args);
5566 if (retval)
5567 {
5568 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5569 STRIP_NOPS (retval);
5570 retval = fold_convert (gimple_call_return_type (call_stmt),
5571 retval);
5572 }
5573 return retval;
5574 }
5575 return NULL_TREE;
5576 }
5577
5578 default:
5579 return NULL_TREE;
5580 }
5581 }
5582
5583 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5584 Returns NULL_TREE if folding to a constant is not possible, otherwise
5585 returns a constant according to is_gimple_min_invariant. */
5586
5587 tree
5588 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5589 {
5590 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5591 if (res && is_gimple_min_invariant (res))
5592 return res;
5593 return NULL_TREE;
5594 }
5595
5596
5597 /* The following set of functions are supposed to fold references using
5598 their constant initializers. */
5599
5600 /* See if we can find constructor defining value of BASE.
5601 When we know the consructor with constant offset (such as
5602 base is array[40] and we do know constructor of array), then
5603 BIT_OFFSET is adjusted accordingly.
5604
5605 As a special case, return error_mark_node when constructor
5606 is not explicitly available, but it is known to be zero
5607 such as 'static const int a;'. */
5608 static tree
5609 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5610 tree (*valueize)(tree))
5611 {
5612 HOST_WIDE_INT bit_offset2, size, max_size;
5613 bool reverse;
5614
5615 if (TREE_CODE (base) == MEM_REF)
5616 {
5617 if (!integer_zerop (TREE_OPERAND (base, 1)))
5618 {
5619 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5620 return NULL_TREE;
5621 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5622 * BITS_PER_UNIT);
5623 }
5624
5625 if (valueize
5626 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5627 base = valueize (TREE_OPERAND (base, 0));
5628 if (!base || TREE_CODE (base) != ADDR_EXPR)
5629 return NULL_TREE;
5630 base = TREE_OPERAND (base, 0);
5631 }
5632 else if (valueize
5633 && TREE_CODE (base) == SSA_NAME)
5634 base = valueize (base);
5635
5636 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5637 DECL_INITIAL. If BASE is a nested reference into another
5638 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5639 the inner reference. */
5640 switch (TREE_CODE (base))
5641 {
5642 case VAR_DECL:
5643 case CONST_DECL:
5644 {
5645 tree init = ctor_for_folding (base);
5646
5647 /* Our semantic is exact opposite of ctor_for_folding;
5648 NULL means unknown, while error_mark_node is 0. */
5649 if (init == error_mark_node)
5650 return NULL_TREE;
5651 if (!init)
5652 return error_mark_node;
5653 return init;
5654 }
5655
5656 case VIEW_CONVERT_EXPR:
5657 return get_base_constructor (TREE_OPERAND (base, 0),
5658 bit_offset, valueize);
5659
5660 case ARRAY_REF:
5661 case COMPONENT_REF:
5662 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5663 &reverse);
5664 if (max_size == -1 || size != max_size)
5665 return NULL_TREE;
5666 *bit_offset += bit_offset2;
5667 return get_base_constructor (base, bit_offset, valueize);
5668
5669 case CONSTRUCTOR:
5670 return base;
5671
5672 default:
5673 if (CONSTANT_CLASS_P (base))
5674 return base;
5675
5676 return NULL_TREE;
5677 }
5678 }
5679
5680 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5681 SIZE to the memory at bit OFFSET. */
5682
5683 static tree
5684 fold_array_ctor_reference (tree type, tree ctor,
5685 unsigned HOST_WIDE_INT offset,
5686 unsigned HOST_WIDE_INT size,
5687 tree from_decl)
5688 {
5689 offset_int low_bound;
5690 offset_int elt_size;
5691 offset_int access_index;
5692 tree domain_type = NULL_TREE;
5693 HOST_WIDE_INT inner_offset;
5694
5695 /* Compute low bound and elt size. */
5696 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5697 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5698 if (domain_type && TYPE_MIN_VALUE (domain_type))
5699 {
5700 /* Static constructors for variably sized objects makes no sense. */
5701 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5702 return NULL_TREE;
5703 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5704 }
5705 else
5706 low_bound = 0;
5707 /* Static constructors for variably sized objects makes no sense. */
5708 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5709 return NULL_TREE;
5710 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5711
5712 /* We can handle only constantly sized accesses that are known to not
5713 be larger than size of array element. */
5714 if (!TYPE_SIZE_UNIT (type)
5715 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5716 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5717 || elt_size == 0)
5718 return NULL_TREE;
5719
5720 /* Compute the array index we look for. */
5721 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5722 elt_size);
5723 access_index += low_bound;
5724
5725 /* And offset within the access. */
5726 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5727
5728 /* See if the array field is large enough to span whole access. We do not
5729 care to fold accesses spanning multiple array indexes. */
5730 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5731 return NULL_TREE;
5732 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5733 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5734
5735 /* When memory is not explicitely mentioned in constructor,
5736 it is 0 (or out of range). */
5737 return build_zero_cst (type);
5738 }
5739
5740 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5741 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5742
5743 static tree
5744 fold_nonarray_ctor_reference (tree type, tree ctor,
5745 unsigned HOST_WIDE_INT offset,
5746 unsigned HOST_WIDE_INT size,
5747 tree from_decl)
5748 {
5749 unsigned HOST_WIDE_INT cnt;
5750 tree cfield, cval;
5751
5752 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5753 cval)
5754 {
5755 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5756 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5757 tree field_size = DECL_SIZE (cfield);
5758 offset_int bitoffset;
5759 offset_int bitoffset_end, access_end;
5760
5761 /* Variable sized objects in static constructors makes no sense,
5762 but field_size can be NULL for flexible array members. */
5763 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5764 && TREE_CODE (byte_offset) == INTEGER_CST
5765 && (field_size != NULL_TREE
5766 ? TREE_CODE (field_size) == INTEGER_CST
5767 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5768
5769 /* Compute bit offset of the field. */
5770 bitoffset = (wi::to_offset (field_offset)
5771 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5772 /* Compute bit offset where the field ends. */
5773 if (field_size != NULL_TREE)
5774 bitoffset_end = bitoffset + wi::to_offset (field_size);
5775 else
5776 bitoffset_end = 0;
5777
5778 access_end = offset_int (offset) + size;
5779
5780 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5781 [BITOFFSET, BITOFFSET_END)? */
5782 if (wi::cmps (access_end, bitoffset) > 0
5783 && (field_size == NULL_TREE
5784 || wi::lts_p (offset, bitoffset_end)))
5785 {
5786 offset_int inner_offset = offset_int (offset) - bitoffset;
5787 /* We do have overlap. Now see if field is large enough to
5788 cover the access. Give up for accesses spanning multiple
5789 fields. */
5790 if (wi::cmps (access_end, bitoffset_end) > 0)
5791 return NULL_TREE;
5792 if (offset < bitoffset)
5793 return NULL_TREE;
5794 return fold_ctor_reference (type, cval,
5795 inner_offset.to_uhwi (), size,
5796 from_decl);
5797 }
5798 }
5799 /* When memory is not explicitely mentioned in constructor, it is 0. */
5800 return build_zero_cst (type);
5801 }
5802
5803 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5804 to the memory at bit OFFSET. */
5805
5806 tree
5807 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5808 unsigned HOST_WIDE_INT size, tree from_decl)
5809 {
5810 tree ret;
5811
5812 /* We found the field with exact match. */
5813 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5814 && !offset)
5815 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5816
5817 /* We are at the end of walk, see if we can view convert the
5818 result. */
5819 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5820 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5821 && !compare_tree_int (TYPE_SIZE (type), size)
5822 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5823 {
5824 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5825 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5826 if (ret)
5827 STRIP_USELESS_TYPE_CONVERSION (ret);
5828 return ret;
5829 }
5830 /* For constants and byte-aligned/sized reads try to go through
5831 native_encode/interpret. */
5832 if (CONSTANT_CLASS_P (ctor)
5833 && BITS_PER_UNIT == 8
5834 && offset % BITS_PER_UNIT == 0
5835 && size % BITS_PER_UNIT == 0
5836 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5837 {
5838 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5839 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5840 offset / BITS_PER_UNIT);
5841 if (len > 0)
5842 return native_interpret_expr (type, buf, len);
5843 }
5844 if (TREE_CODE (ctor) == CONSTRUCTOR)
5845 {
5846
5847 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5848 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5849 return fold_array_ctor_reference (type, ctor, offset, size,
5850 from_decl);
5851 else
5852 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5853 from_decl);
5854 }
5855
5856 return NULL_TREE;
5857 }
5858
5859 /* Return the tree representing the element referenced by T if T is an
5860 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5861 names using VALUEIZE. Return NULL_TREE otherwise. */
5862
5863 tree
5864 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5865 {
5866 tree ctor, idx, base;
5867 HOST_WIDE_INT offset, size, max_size;
5868 tree tem;
5869 bool reverse;
5870
5871 if (TREE_THIS_VOLATILE (t))
5872 return NULL_TREE;
5873
5874 if (DECL_P (t))
5875 return get_symbol_constant_value (t);
5876
5877 tem = fold_read_from_constant_string (t);
5878 if (tem)
5879 return tem;
5880
5881 switch (TREE_CODE (t))
5882 {
5883 case ARRAY_REF:
5884 case ARRAY_RANGE_REF:
5885 /* Constant indexes are handled well by get_base_constructor.
5886 Only special case variable offsets.
5887 FIXME: This code can't handle nested references with variable indexes
5888 (they will be handled only by iteration of ccp). Perhaps we can bring
5889 get_ref_base_and_extent here and make it use a valueize callback. */
5890 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5891 && valueize
5892 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5893 && TREE_CODE (idx) == INTEGER_CST)
5894 {
5895 tree low_bound, unit_size;
5896
5897 /* If the resulting bit-offset is constant, track it. */
5898 if ((low_bound = array_ref_low_bound (t),
5899 TREE_CODE (low_bound) == INTEGER_CST)
5900 && (unit_size = array_ref_element_size (t),
5901 tree_fits_uhwi_p (unit_size)))
5902 {
5903 offset_int woffset
5904 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5905 TYPE_PRECISION (TREE_TYPE (idx)));
5906
5907 if (wi::fits_shwi_p (woffset))
5908 {
5909 offset = woffset.to_shwi ();
5910 /* TODO: This code seems wrong, multiply then check
5911 to see if it fits. */
5912 offset *= tree_to_uhwi (unit_size);
5913 offset *= BITS_PER_UNIT;
5914
5915 base = TREE_OPERAND (t, 0);
5916 ctor = get_base_constructor (base, &offset, valueize);
5917 /* Empty constructor. Always fold to 0. */
5918 if (ctor == error_mark_node)
5919 return build_zero_cst (TREE_TYPE (t));
5920 /* Out of bound array access. Value is undefined,
5921 but don't fold. */
5922 if (offset < 0)
5923 return NULL_TREE;
5924 /* We can not determine ctor. */
5925 if (!ctor)
5926 return NULL_TREE;
5927 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5928 tree_to_uhwi (unit_size)
5929 * BITS_PER_UNIT,
5930 base);
5931 }
5932 }
5933 }
5934 /* Fallthru. */
5935
5936 case COMPONENT_REF:
5937 case BIT_FIELD_REF:
5938 case TARGET_MEM_REF:
5939 case MEM_REF:
5940 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5941 ctor = get_base_constructor (base, &offset, valueize);
5942
5943 /* Empty constructor. Always fold to 0. */
5944 if (ctor == error_mark_node)
5945 return build_zero_cst (TREE_TYPE (t));
5946 /* We do not know precise address. */
5947 if (max_size == -1 || max_size != size)
5948 return NULL_TREE;
5949 /* We can not determine ctor. */
5950 if (!ctor)
5951 return NULL_TREE;
5952
5953 /* Out of bound array access. Value is undefined, but don't fold. */
5954 if (offset < 0)
5955 return NULL_TREE;
5956
5957 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5958 base);
5959
5960 case REALPART_EXPR:
5961 case IMAGPART_EXPR:
5962 {
5963 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5964 if (c && TREE_CODE (c) == COMPLEX_CST)
5965 return fold_build1_loc (EXPR_LOCATION (t),
5966 TREE_CODE (t), TREE_TYPE (t), c);
5967 break;
5968 }
5969
5970 default:
5971 break;
5972 }
5973
5974 return NULL_TREE;
5975 }
5976
5977 tree
5978 fold_const_aggregate_ref (tree t)
5979 {
5980 return fold_const_aggregate_ref_1 (t, NULL);
5981 }
5982
5983 /* Lookup virtual method with index TOKEN in a virtual table V
5984 at OFFSET.
5985 Set CAN_REFER if non-NULL to false if method
5986 is not referable or if the virtual table is ill-formed (such as rewriten
5987 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5988
5989 tree
5990 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5991 tree v,
5992 unsigned HOST_WIDE_INT offset,
5993 bool *can_refer)
5994 {
5995 tree vtable = v, init, fn;
5996 unsigned HOST_WIDE_INT size;
5997 unsigned HOST_WIDE_INT elt_size, access_index;
5998 tree domain_type;
5999
6000 if (can_refer)
6001 *can_refer = true;
6002
6003 /* First of all double check we have virtual table. */
6004 if (TREE_CODE (v) != VAR_DECL
6005 || !DECL_VIRTUAL_P (v))
6006 {
6007 /* Pass down that we lost track of the target. */
6008 if (can_refer)
6009 *can_refer = false;
6010 return NULL_TREE;
6011 }
6012
6013 init = ctor_for_folding (v);
6014
6015 /* The virtual tables should always be born with constructors
6016 and we always should assume that they are avaialble for
6017 folding. At the moment we do not stream them in all cases,
6018 but it should never happen that ctor seem unreachable. */
6019 gcc_assert (init);
6020 if (init == error_mark_node)
6021 {
6022 gcc_assert (in_lto_p);
6023 /* Pass down that we lost track of the target. */
6024 if (can_refer)
6025 *can_refer = false;
6026 return NULL_TREE;
6027 }
6028 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6029 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6030 offset *= BITS_PER_UNIT;
6031 offset += token * size;
6032
6033 /* Lookup the value in the constructor that is assumed to be array.
6034 This is equivalent to
6035 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6036 offset, size, NULL);
6037 but in a constant time. We expect that frontend produced a simple
6038 array without indexed initializers. */
6039
6040 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6041 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6042 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6043 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6044
6045 access_index = offset / BITS_PER_UNIT / elt_size;
6046 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6047
6048 /* This code makes an assumption that there are no
6049 indexed fileds produced by C++ FE, so we can directly index the array. */
6050 if (access_index < CONSTRUCTOR_NELTS (init))
6051 {
6052 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6053 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6054 STRIP_NOPS (fn);
6055 }
6056 else
6057 fn = NULL;
6058
6059 /* For type inconsistent program we may end up looking up virtual method
6060 in virtual table that does not contain TOKEN entries. We may overrun
6061 the virtual table and pick up a constant or RTTI info pointer.
6062 In any case the call is undefined. */
6063 if (!fn
6064 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6065 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6066 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6067 else
6068 {
6069 fn = TREE_OPERAND (fn, 0);
6070
6071 /* When cgraph node is missing and function is not public, we cannot
6072 devirtualize. This can happen in WHOPR when the actual method
6073 ends up in other partition, because we found devirtualization
6074 possibility too late. */
6075 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6076 {
6077 if (can_refer)
6078 {
6079 *can_refer = false;
6080 return fn;
6081 }
6082 return NULL_TREE;
6083 }
6084 }
6085
6086 /* Make sure we create a cgraph node for functions we'll reference.
6087 They can be non-existent if the reference comes from an entry
6088 of an external vtable for example. */
6089 cgraph_node::get_create (fn);
6090
6091 return fn;
6092 }
6093
6094 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6095 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6096 KNOWN_BINFO carries the binfo describing the true type of
6097 OBJ_TYPE_REF_OBJECT(REF).
6098 Set CAN_REFER if non-NULL to false if method
6099 is not referable or if the virtual table is ill-formed (such as rewriten
6100 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6101
6102 tree
6103 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6104 bool *can_refer)
6105 {
6106 unsigned HOST_WIDE_INT offset;
6107 tree v;
6108
6109 v = BINFO_VTABLE (known_binfo);
6110 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6111 if (!v)
6112 return NULL_TREE;
6113
6114 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6115 {
6116 if (can_refer)
6117 *can_refer = false;
6118 return NULL_TREE;
6119 }
6120 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6121 }
6122
6123 /* Given a pointer value OP0, return a simplified version of an
6124 indirection through OP0, or NULL_TREE if no simplification is
6125 possible. Note that the resulting type may be different from
6126 the type pointed to in the sense that it is still compatible
6127 from the langhooks point of view. */
6128
6129 tree
6130 gimple_fold_indirect_ref (tree t)
6131 {
6132 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6133 tree sub = t;
6134 tree subtype;
6135
6136 STRIP_NOPS (sub);
6137 subtype = TREE_TYPE (sub);
6138 if (!POINTER_TYPE_P (subtype))
6139 return NULL_TREE;
6140
6141 if (TREE_CODE (sub) == ADDR_EXPR)
6142 {
6143 tree op = TREE_OPERAND (sub, 0);
6144 tree optype = TREE_TYPE (op);
6145 /* *&p => p */
6146 if (useless_type_conversion_p (type, optype))
6147 return op;
6148
6149 /* *(foo *)&fooarray => fooarray[0] */
6150 if (TREE_CODE (optype) == ARRAY_TYPE
6151 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6152 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6153 {
6154 tree type_domain = TYPE_DOMAIN (optype);
6155 tree min_val = size_zero_node;
6156 if (type_domain && TYPE_MIN_VALUE (type_domain))
6157 min_val = TYPE_MIN_VALUE (type_domain);
6158 if (TREE_CODE (min_val) == INTEGER_CST)
6159 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6160 }
6161 /* *(foo *)&complexfoo => __real__ complexfoo */
6162 else if (TREE_CODE (optype) == COMPLEX_TYPE
6163 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6164 return fold_build1 (REALPART_EXPR, type, op);
6165 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6166 else if (TREE_CODE (optype) == VECTOR_TYPE
6167 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6168 {
6169 tree part_width = TYPE_SIZE (type);
6170 tree index = bitsize_int (0);
6171 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6172 }
6173 }
6174
6175 /* *(p + CST) -> ... */
6176 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6177 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6178 {
6179 tree addr = TREE_OPERAND (sub, 0);
6180 tree off = TREE_OPERAND (sub, 1);
6181 tree addrtype;
6182
6183 STRIP_NOPS (addr);
6184 addrtype = TREE_TYPE (addr);
6185
6186 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6187 if (TREE_CODE (addr) == ADDR_EXPR
6188 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6189 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6190 && tree_fits_uhwi_p (off))
6191 {
6192 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6193 tree part_width = TYPE_SIZE (type);
6194 unsigned HOST_WIDE_INT part_widthi
6195 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6196 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6197 tree index = bitsize_int (indexi);
6198 if (offset / part_widthi
6199 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6200 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6201 part_width, index);
6202 }
6203
6204 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6205 if (TREE_CODE (addr) == ADDR_EXPR
6206 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6207 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6208 {
6209 tree size = TYPE_SIZE_UNIT (type);
6210 if (tree_int_cst_equal (size, off))
6211 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6212 }
6213
6214 /* *(p + CST) -> MEM_REF <p, CST>. */
6215 if (TREE_CODE (addr) != ADDR_EXPR
6216 || DECL_P (TREE_OPERAND (addr, 0)))
6217 return fold_build2 (MEM_REF, type,
6218 addr,
6219 wide_int_to_tree (ptype, off));
6220 }
6221
6222 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6223 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6225 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6226 {
6227 tree type_domain;
6228 tree min_val = size_zero_node;
6229 tree osub = sub;
6230 sub = gimple_fold_indirect_ref (sub);
6231 if (! sub)
6232 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6233 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6234 if (type_domain && TYPE_MIN_VALUE (type_domain))
6235 min_val = TYPE_MIN_VALUE (type_domain);
6236 if (TREE_CODE (min_val) == INTEGER_CST)
6237 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6238 }
6239
6240 return NULL_TREE;
6241 }
6242
6243 /* Return true if CODE is an operation that when operating on signed
6244 integer types involves undefined behavior on overflow and the
6245 operation can be expressed with unsigned arithmetic. */
6246
6247 bool
6248 arith_code_with_undefined_signed_overflow (tree_code code)
6249 {
6250 switch (code)
6251 {
6252 case PLUS_EXPR:
6253 case MINUS_EXPR:
6254 case MULT_EXPR:
6255 case NEGATE_EXPR:
6256 case POINTER_PLUS_EXPR:
6257 return true;
6258 default:
6259 return false;
6260 }
6261 }
6262
6263 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6264 operation that can be transformed to unsigned arithmetic by converting
6265 its operand, carrying out the operation in the corresponding unsigned
6266 type and converting the result back to the original type.
6267
6268 Returns a sequence of statements that replace STMT and also contain
6269 a modified form of STMT itself. */
6270
6271 gimple_seq
6272 rewrite_to_defined_overflow (gimple *stmt)
6273 {
6274 if (dump_file && (dump_flags & TDF_DETAILS))
6275 {
6276 fprintf (dump_file, "rewriting stmt with undefined signed "
6277 "overflow ");
6278 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6279 }
6280
6281 tree lhs = gimple_assign_lhs (stmt);
6282 tree type = unsigned_type_for (TREE_TYPE (lhs));
6283 gimple_seq stmts = NULL;
6284 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6285 {
6286 tree op = gimple_op (stmt, i);
6287 op = gimple_convert (&stmts, type, op);
6288 gimple_set_op (stmt, i, op);
6289 }
6290 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6291 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6292 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6293 gimple_seq_add_stmt (&stmts, stmt);
6294 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6295 gimple_seq_add_stmt (&stmts, cvt);
6296
6297 return stmts;
6298 }
6299
6300
6301 /* The valueization hook we use for the gimple_build API simplification.
6302 This makes us match fold_buildN behavior by only combining with
6303 statements in the sequence(s) we are currently building. */
6304
6305 static tree
6306 gimple_build_valueize (tree op)
6307 {
6308 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6309 return op;
6310 return NULL_TREE;
6311 }
6312
6313 /* Build the expression CODE OP0 of type TYPE with location LOC,
6314 simplifying it first if possible. Returns the built
6315 expression value and appends statements possibly defining it
6316 to SEQ. */
6317
6318 tree
6319 gimple_build (gimple_seq *seq, location_t loc,
6320 enum tree_code code, tree type, tree op0)
6321 {
6322 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6323 if (!res)
6324 {
6325 if (gimple_in_ssa_p (cfun))
6326 res = make_ssa_name (type);
6327 else
6328 res = create_tmp_reg (type);
6329 gimple *stmt;
6330 if (code == REALPART_EXPR
6331 || code == IMAGPART_EXPR
6332 || code == VIEW_CONVERT_EXPR)
6333 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6334 else
6335 stmt = gimple_build_assign (res, code, op0);
6336 gimple_set_location (stmt, loc);
6337 gimple_seq_add_stmt_without_update (seq, stmt);
6338 }
6339 return res;
6340 }
6341
6342 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6343 simplifying it first if possible. Returns the built
6344 expression value and appends statements possibly defining it
6345 to SEQ. */
6346
6347 tree
6348 gimple_build (gimple_seq *seq, location_t loc,
6349 enum tree_code code, tree type, tree op0, tree op1)
6350 {
6351 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6352 if (!res)
6353 {
6354 if (gimple_in_ssa_p (cfun))
6355 res = make_ssa_name (type);
6356 else
6357 res = create_tmp_reg (type);
6358 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6359 gimple_set_location (stmt, loc);
6360 gimple_seq_add_stmt_without_update (seq, stmt);
6361 }
6362 return res;
6363 }
6364
6365 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6366 simplifying it first if possible. Returns the built
6367 expression value and appends statements possibly defining it
6368 to SEQ. */
6369
6370 tree
6371 gimple_build (gimple_seq *seq, location_t loc,
6372 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6373 {
6374 tree res = gimple_simplify (code, type, op0, op1, op2,
6375 seq, gimple_build_valueize);
6376 if (!res)
6377 {
6378 if (gimple_in_ssa_p (cfun))
6379 res = make_ssa_name (type);
6380 else
6381 res = create_tmp_reg (type);
6382 gimple *stmt;
6383 if (code == BIT_FIELD_REF)
6384 stmt = gimple_build_assign (res, code,
6385 build3 (code, type, op0, op1, op2));
6386 else
6387 stmt = gimple_build_assign (res, code, op0, op1, op2);
6388 gimple_set_location (stmt, loc);
6389 gimple_seq_add_stmt_without_update (seq, stmt);
6390 }
6391 return res;
6392 }
6393
6394 /* Build the call FN (ARG0) with a result of type TYPE
6395 (or no result if TYPE is void) with location LOC,
6396 simplifying it first if possible. Returns the built
6397 expression value (or NULL_TREE if TYPE is void) and appends
6398 statements possibly defining it to SEQ. */
6399
6400 tree
6401 gimple_build (gimple_seq *seq, location_t loc,
6402 enum built_in_function fn, tree type, tree arg0)
6403 {
6404 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6405 if (!res)
6406 {
6407 tree decl = builtin_decl_implicit (fn);
6408 gimple *stmt = gimple_build_call (decl, 1, arg0);
6409 if (!VOID_TYPE_P (type))
6410 {
6411 if (gimple_in_ssa_p (cfun))
6412 res = make_ssa_name (type);
6413 else
6414 res = create_tmp_reg (type);
6415 gimple_call_set_lhs (stmt, res);
6416 }
6417 gimple_set_location (stmt, loc);
6418 gimple_seq_add_stmt_without_update (seq, stmt);
6419 }
6420 return res;
6421 }
6422
6423 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6424 (or no result if TYPE is void) with location LOC,
6425 simplifying it first if possible. Returns the built
6426 expression value (or NULL_TREE if TYPE is void) and appends
6427 statements possibly defining it to SEQ. */
6428
6429 tree
6430 gimple_build (gimple_seq *seq, location_t loc,
6431 enum built_in_function fn, tree type, tree arg0, tree arg1)
6432 {
6433 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6434 if (!res)
6435 {
6436 tree decl = builtin_decl_implicit (fn);
6437 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6438 if (!VOID_TYPE_P (type))
6439 {
6440 if (gimple_in_ssa_p (cfun))
6441 res = make_ssa_name (type);
6442 else
6443 res = create_tmp_reg (type);
6444 gimple_call_set_lhs (stmt, res);
6445 }
6446 gimple_set_location (stmt, loc);
6447 gimple_seq_add_stmt_without_update (seq, stmt);
6448 }
6449 return res;
6450 }
6451
6452 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6453 (or no result if TYPE is void) with location LOC,
6454 simplifying it first if possible. Returns the built
6455 expression value (or NULL_TREE if TYPE is void) and appends
6456 statements possibly defining it to SEQ. */
6457
6458 tree
6459 gimple_build (gimple_seq *seq, location_t loc,
6460 enum built_in_function fn, tree type,
6461 tree arg0, tree arg1, tree arg2)
6462 {
6463 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6464 seq, gimple_build_valueize);
6465 if (!res)
6466 {
6467 tree decl = builtin_decl_implicit (fn);
6468 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6469 if (!VOID_TYPE_P (type))
6470 {
6471 if (gimple_in_ssa_p (cfun))
6472 res = make_ssa_name (type);
6473 else
6474 res = create_tmp_reg (type);
6475 gimple_call_set_lhs (stmt, res);
6476 }
6477 gimple_set_location (stmt, loc);
6478 gimple_seq_add_stmt_without_update (seq, stmt);
6479 }
6480 return res;
6481 }
6482
6483 /* Build the conversion (TYPE) OP with a result of type TYPE
6484 with location LOC if such conversion is neccesary in GIMPLE,
6485 simplifying it first.
6486 Returns the built expression value and appends
6487 statements possibly defining it to SEQ. */
6488
6489 tree
6490 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6491 {
6492 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6493 return op;
6494 return gimple_build (seq, loc, NOP_EXPR, type, op);
6495 }
6496
6497 /* Build the conversion (ptrofftype) OP with a result of a type
6498 compatible with ptrofftype with location LOC if such conversion
6499 is neccesary in GIMPLE, simplifying it first.
6500 Returns the built expression value and appends
6501 statements possibly defining it to SEQ. */
6502
6503 tree
6504 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6505 {
6506 if (ptrofftype_p (TREE_TYPE (op)))
6507 return op;
6508 return gimple_convert (seq, loc, sizetype, op);
6509 }
6510
6511 /* Return true if the result of assignment STMT is known to be non-negative.
6512 If the return value is based on the assumption that signed overflow is
6513 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6514 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6515
6516 static bool
6517 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6518 int depth)
6519 {
6520 enum tree_code code = gimple_assign_rhs_code (stmt);
6521 switch (get_gimple_rhs_class (code))
6522 {
6523 case GIMPLE_UNARY_RHS:
6524 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6525 gimple_expr_type (stmt),
6526 gimple_assign_rhs1 (stmt),
6527 strict_overflow_p, depth);
6528 case GIMPLE_BINARY_RHS:
6529 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6530 gimple_expr_type (stmt),
6531 gimple_assign_rhs1 (stmt),
6532 gimple_assign_rhs2 (stmt),
6533 strict_overflow_p, depth);
6534 case GIMPLE_TERNARY_RHS:
6535 return false;
6536 case GIMPLE_SINGLE_RHS:
6537 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6538 strict_overflow_p, depth);
6539 case GIMPLE_INVALID_RHS:
6540 break;
6541 }
6542 gcc_unreachable ();
6543 }
6544
6545 /* Return true if return value of call STMT is known to be non-negative.
6546 If the return value is based on the assumption that signed overflow is
6547 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6548 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6549
6550 static bool
6551 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6552 int depth)
6553 {
6554 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6555 gimple_call_arg (stmt, 0) : NULL_TREE;
6556 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6557 gimple_call_arg (stmt, 1) : NULL_TREE;
6558
6559 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6560 gimple_call_combined_fn (stmt),
6561 arg0,
6562 arg1,
6563 strict_overflow_p, depth);
6564 }
6565
6566 /* Return true if return value of call STMT is known to be non-negative.
6567 If the return value is based on the assumption that signed overflow is
6568 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6569 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6570
6571 static bool
6572 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6573 int depth)
6574 {
6575 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6576 {
6577 tree arg = gimple_phi_arg_def (stmt, i);
6578 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6579 return false;
6580 }
6581 return true;
6582 }
6583
6584 /* Return true if STMT is known to compute a non-negative value.
6585 If the return value is based on the assumption that signed overflow is
6586 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6587 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6588
6589 bool
6590 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6591 int depth)
6592 {
6593 switch (gimple_code (stmt))
6594 {
6595 case GIMPLE_ASSIGN:
6596 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6597 depth);
6598 case GIMPLE_CALL:
6599 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6600 depth);
6601 case GIMPLE_PHI:
6602 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6603 depth);
6604 default:
6605 return false;
6606 }
6607 }
6608
6609 /* Return true if the floating-point value computed by assignment STMT
6610 is known to have an integer value. We also allow +Inf, -Inf and NaN
6611 to be considered integer values. Return false for signaling NaN.
6612
6613 DEPTH is the current nesting depth of the query. */
6614
6615 static bool
6616 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6617 {
6618 enum tree_code code = gimple_assign_rhs_code (stmt);
6619 switch (get_gimple_rhs_class (code))
6620 {
6621 case GIMPLE_UNARY_RHS:
6622 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6623 gimple_assign_rhs1 (stmt), depth);
6624 case GIMPLE_BINARY_RHS:
6625 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6626 gimple_assign_rhs1 (stmt),
6627 gimple_assign_rhs2 (stmt), depth);
6628 case GIMPLE_TERNARY_RHS:
6629 return false;
6630 case GIMPLE_SINGLE_RHS:
6631 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6632 case GIMPLE_INVALID_RHS:
6633 break;
6634 }
6635 gcc_unreachable ();
6636 }
6637
6638 /* Return true if the floating-point value computed by call STMT is known
6639 to have an integer value. We also allow +Inf, -Inf and NaN to be
6640 considered integer values. Return false for signaling NaN.
6641
6642 DEPTH is the current nesting depth of the query. */
6643
6644 static bool
6645 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6646 {
6647 tree arg0 = (gimple_call_num_args (stmt) > 0
6648 ? gimple_call_arg (stmt, 0)
6649 : NULL_TREE);
6650 tree arg1 = (gimple_call_num_args (stmt) > 1
6651 ? gimple_call_arg (stmt, 1)
6652 : NULL_TREE);
6653 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6654 arg0, arg1, depth);
6655 }
6656
6657 /* Return true if the floating-point result of phi STMT is known to have
6658 an integer value. We also allow +Inf, -Inf and NaN to be considered
6659 integer values. Return false for signaling NaN.
6660
6661 DEPTH is the current nesting depth of the query. */
6662
6663 static bool
6664 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6665 {
6666 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6667 {
6668 tree arg = gimple_phi_arg_def (stmt, i);
6669 if (!integer_valued_real_single_p (arg, depth + 1))
6670 return false;
6671 }
6672 return true;
6673 }
6674
6675 /* Return true if the floating-point value computed by STMT is known
6676 to have an integer value. We also allow +Inf, -Inf and NaN to be
6677 considered integer values. Return false for signaling NaN.
6678
6679 DEPTH is the current nesting depth of the query. */
6680
6681 bool
6682 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6683 {
6684 switch (gimple_code (stmt))
6685 {
6686 case GIMPLE_ASSIGN:
6687 return gimple_assign_integer_valued_real_p (stmt, depth);
6688 case GIMPLE_CALL:
6689 return gimple_call_integer_valued_real_p (stmt, depth);
6690 case GIMPLE_PHI:
6691 return gimple_phi_integer_valued_real_p (stmt, depth);
6692 default:
6693 return false;
6694 }
6695 }