PR tree-optimization/ 79376 - wrong lower bound with %s and non-constant
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58 #include "fold-const-call.h"
59
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
64
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 {
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
88
89 if (DECL_ABSTRACT_P (decl))
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || !VAR_OR_FUNCTION_DECL_P (decl))
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
109 }
110
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || !VAR_P (from_decl)
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
139
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
149
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
158 }
159
160 /* Create a temporary for TYPE for a statement STMT. If the current function
161 is in SSA form, a SSA name is created. Otherwise a temporary register
162 is made. */
163
164 static tree
165 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL)
166 {
167 if (gimple_in_ssa_p (cfun))
168 return make_ssa_name (type, stmt);
169 else
170 return create_tmp_reg (type);
171 }
172
173 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
174 acceptable form for is_gimple_min_invariant.
175 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
176
177 tree
178 canonicalize_constructor_val (tree cval, tree from_decl)
179 {
180 tree orig_cval = cval;
181 STRIP_NOPS (cval);
182 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
183 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
184 {
185 tree ptr = TREE_OPERAND (cval, 0);
186 if (is_gimple_min_invariant (ptr))
187 cval = build1_loc (EXPR_LOCATION (cval),
188 ADDR_EXPR, TREE_TYPE (ptr),
189 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
190 ptr,
191 fold_convert (ptr_type_node,
192 TREE_OPERAND (cval, 1))));
193 }
194 if (TREE_CODE (cval) == ADDR_EXPR)
195 {
196 tree base = NULL_TREE;
197 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
198 {
199 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
200 if (base)
201 TREE_OPERAND (cval, 0) = base;
202 }
203 else
204 base = get_base_address (TREE_OPERAND (cval, 0));
205 if (!base)
206 return NULL_TREE;
207
208 if (VAR_OR_FUNCTION_DECL_P (base)
209 && !can_refer_decl_in_current_unit_p (base, from_decl))
210 return NULL_TREE;
211 if (TREE_TYPE (base) == error_mark_node)
212 return NULL_TREE;
213 if (VAR_P (base))
214 TREE_ADDRESSABLE (base) = 1;
215 else if (TREE_CODE (base) == FUNCTION_DECL)
216 {
217 /* Make sure we create a cgraph node for functions we'll reference.
218 They can be non-existent if the reference comes from an entry
219 of an external vtable for example. */
220 cgraph_node::get_create (base);
221 }
222 /* Fixup types in global initializers. */
223 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
224 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
225
226 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
227 cval = fold_convert (TREE_TYPE (orig_cval), cval);
228 return cval;
229 }
230 if (TREE_OVERFLOW_P (cval))
231 return drop_tree_overflow (cval);
232 return orig_cval;
233 }
234
235 /* If SYM is a constant variable with known value, return the value.
236 NULL_TREE is returned otherwise. */
237
238 tree
239 get_symbol_constant_value (tree sym)
240 {
241 tree val = ctor_for_folding (sym);
242 if (val != error_mark_node)
243 {
244 if (val)
245 {
246 val = canonicalize_constructor_val (unshare_expr (val), sym);
247 if (val && is_gimple_min_invariant (val))
248 return val;
249 else
250 return NULL_TREE;
251 }
252 /* Variables declared 'const' without an initializer
253 have zero as the initializer if they may not be
254 overridden at link or run time. */
255 if (!val
256 && is_gimple_reg_type (TREE_TYPE (sym)))
257 return build_zero_cst (TREE_TYPE (sym));
258 }
259
260 return NULL_TREE;
261 }
262
263
264
265 /* Subroutine of fold_stmt. We perform several simplifications of the
266 memory reference tree EXPR and make sure to re-gimplify them properly
267 after propagation of constant addresses. IS_LHS is true if the
268 reference is supposed to be an lvalue. */
269
270 static tree
271 maybe_fold_reference (tree expr, bool is_lhs)
272 {
273 tree result;
274
275 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
276 || TREE_CODE (expr) == REALPART_EXPR
277 || TREE_CODE (expr) == IMAGPART_EXPR)
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_unary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0));
283 else if (TREE_CODE (expr) == BIT_FIELD_REF
284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
285 return fold_ternary_loc (EXPR_LOCATION (expr),
286 TREE_CODE (expr),
287 TREE_TYPE (expr),
288 TREE_OPERAND (expr, 0),
289 TREE_OPERAND (expr, 1),
290 TREE_OPERAND (expr, 2));
291
292 if (!is_lhs
293 && (result = fold_const_aggregate_ref (expr))
294 && is_gimple_min_invariant (result))
295 return result;
296
297 return NULL_TREE;
298 }
299
300
301 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
302 replacement rhs for the statement or NULL_TREE if no simplification
303 could be made. It is assumed that the operands have been previously
304 folded. */
305
306 static tree
307 fold_gimple_assign (gimple_stmt_iterator *si)
308 {
309 gimple *stmt = gsi_stmt (*si);
310 enum tree_code subcode = gimple_assign_rhs_code (stmt);
311 location_t loc = gimple_location (stmt);
312
313 tree result = NULL_TREE;
314
315 switch (get_gimple_rhs_class (subcode))
316 {
317 case GIMPLE_SINGLE_RHS:
318 {
319 tree rhs = gimple_assign_rhs1 (stmt);
320
321 if (TREE_CLOBBER_P (rhs))
322 return NULL_TREE;
323
324 if (REFERENCE_CLASS_P (rhs))
325 return maybe_fold_reference (rhs, false);
326
327 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
328 {
329 tree val = OBJ_TYPE_REF_EXPR (rhs);
330 if (is_gimple_min_invariant (val))
331 return val;
332 else if (flag_devirtualize && virtual_method_call_p (rhs))
333 {
334 bool final;
335 vec <cgraph_node *>targets
336 = possible_polymorphic_call_targets (rhs, stmt, &final);
337 if (final && targets.length () <= 1 && dbg_cnt (devirt))
338 {
339 if (dump_enabled_p ())
340 {
341 location_t loc = gimple_location_safe (stmt);
342 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
343 "resolving virtual function address "
344 "reference to function %s\n",
345 targets.length () == 1
346 ? targets[0]->name ()
347 : "NULL");
348 }
349 if (targets.length () == 1)
350 {
351 val = fold_convert (TREE_TYPE (val),
352 build_fold_addr_expr_loc
353 (loc, targets[0]->decl));
354 STRIP_USELESS_TYPE_CONVERSION (val);
355 }
356 else
357 /* We can not use __builtin_unreachable here because it
358 can not have address taken. */
359 val = build_int_cst (TREE_TYPE (val), 0);
360 return val;
361 }
362 }
363 }
364
365 else if (TREE_CODE (rhs) == ADDR_EXPR)
366 {
367 tree ref = TREE_OPERAND (rhs, 0);
368 tree tem = maybe_fold_reference (ref, true);
369 if (tem
370 && TREE_CODE (tem) == MEM_REF
371 && integer_zerop (TREE_OPERAND (tem, 1)))
372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
373 else if (tem)
374 result = fold_convert (TREE_TYPE (rhs),
375 build_fold_addr_expr_loc (loc, tem));
376 else if (TREE_CODE (ref) == MEM_REF
377 && integer_zerop (TREE_OPERAND (ref, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
379
380 if (result)
381 {
382 /* Strip away useless type conversions. Both the
383 NON_LVALUE_EXPR that may have been added by fold, and
384 "useless" type conversions that might now be apparent
385 due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
387
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
390 }
391 }
392
393 else if (TREE_CODE (rhs) == CONSTRUCTOR
394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
395 {
396 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
397 unsigned i;
398 tree val;
399
400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
401 if (! CONSTANT_CLASS_P (val))
402 return NULL_TREE;
403
404 return build_vector_from_ctor (TREE_TYPE (rhs),
405 CONSTRUCTOR_ELTS (rhs));
406 }
407
408 else if (DECL_P (rhs))
409 return get_symbol_constant_value (rhs);
410 }
411 break;
412
413 case GIMPLE_UNARY_RHS:
414 break;
415
416 case GIMPLE_BINARY_RHS:
417 break;
418
419 case GIMPLE_TERNARY_RHS:
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
425
426 if (result)
427 {
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
431 }
432 break;
433
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
436 }
437
438 return NULL_TREE;
439 }
440
441
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
446
447 static void
448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
449 {
450 gimple *stmt = gsi_stmt (*si_p);
451
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
454
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple *laststore = NULL;
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
460 {
461 gimple *new_stmt = gsi_stmt (i);
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
467 {
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
477 }
478 }
479
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
485 {
486 gimple *new_stmt = gsi_stmt (i);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
494 }
495
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
500 {
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
504 {
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
507 }
508 }
509
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
512 }
513
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
523
524 void
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
526 {
527 tree lhs;
528 gimple *stmt, *new_stmt;
529 gimple_stmt_iterator i;
530 gimple_seq stmts = NULL;
531
532 stmt = gsi_stmt (*si_p);
533
534 gcc_assert (is_gimple_call (stmt));
535
536 push_gimplify_context (gimple_in_ssa_p (cfun));
537
538 lhs = gimple_call_lhs (stmt);
539 if (lhs == NULL_TREE)
540 {
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
545 {
546 pop_gimplify_context (NULL);
547 if (gimple_in_ssa_p (cfun))
548 {
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
551 }
552 gsi_replace (si_p, gimple_build_nop (), false);
553 return;
554 }
555 }
556 else
557 {
558 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
563 }
564
565 pop_gimplify_context (NULL);
566
567 gsi_replace_with_seq_vops (si_p, stmts);
568 }
569
570
571 /* Replace the call at *GSI with the gimple value VAL. */
572
573 static void
574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
575 {
576 gimple *stmt = gsi_stmt (*gsi);
577 tree lhs = gimple_call_lhs (stmt);
578 gimple *repl;
579 if (lhs)
580 {
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
584 }
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
589 {
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
592 }
593 gsi_replace (gsi, repl, false);
594 }
595
596 /* Replace the call at *GSI with the new call REPL and fold that
597 again. */
598
599 static void
600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
601 {
602 gimple *stmt = gsi_stmt (*gsi);
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
607 {
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
611 }
612 gsi_replace (gsi, repl, false);
613 fold_stmt (gsi);
614 }
615
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
617
618 static bool
619 var_decl_component_p (tree var)
620 {
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
625 }
626
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
634
635 static bool
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
638 {
639 gimple *stmt = gsi_stmt (*gsi);
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
644
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
647 {
648 gimple *repl;
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
655 {
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
658 }
659 gsi_replace (gsi, repl, false);
660 return true;
661 }
662
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
666 {
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
671 {
672 gsi_replace (gsi, gimple_build_nop (), false);
673 return true;
674 }
675 goto done;
676 }
677 else
678 {
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
682
683 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
684 pointers as wide integer) and also may result in huge function
685 size because of inlined bounds copy. Thus don't inline for
686 functions we want to instrument. */
687 if (flag_check_pointer_bounds
688 && chkp_instrumentable_p (cfun->decl)
689 /* Even if data may contain pointers we can inline if copy
690 less than a pointer size. */
691 && (!tree_fits_uhwi_p (len)
692 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
693 return false;
694
695 /* Build accesses at offset zero with a ref-all character type. */
696 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
697 ptr_mode, true), 0);
698
699 /* If we can perform the copy efficiently with first doing all loads
700 and then all stores inline it that way. Currently efficiently
701 means that we can load all the memory into a single integer
702 register which is what MOVE_MAX gives us. */
703 src_align = get_pointer_alignment (src);
704 dest_align = get_pointer_alignment (dest);
705 if (tree_fits_uhwi_p (len)
706 && compare_tree_int (len, MOVE_MAX) <= 0
707 /* ??? Don't transform copies from strings with known length this
708 confuses the tree-ssa-strlen.c. This doesn't handle
709 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
710 reason. */
711 && !c_strlen (src, 2))
712 {
713 unsigned ilen = tree_to_uhwi (len);
714 if (pow2p_hwi (ilen))
715 {
716 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
717 if (type
718 && TYPE_MODE (type) != BLKmode
719 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
720 == ilen * 8)
721 /* If the destination pointer is not aligned we must be able
722 to emit an unaligned store. */
723 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
725 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
726 != CODE_FOR_nothing)))
727 {
728 tree srctype = type;
729 tree desttype = type;
730 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 srctype = build_aligned_type (type, src_align);
732 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
733 tree tem = fold_const_aggregate_ref (srcmem);
734 if (tem)
735 srcmem = tem;
736 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
737 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
738 src_align)
739 && (optab_handler (movmisalign_optab,
740 TYPE_MODE (type))
741 == CODE_FOR_nothing))
742 srcmem = NULL_TREE;
743 if (srcmem)
744 {
745 gimple *new_stmt;
746 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
747 {
748 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
749 srcmem
750 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
751 new_stmt);
752 gimple_assign_set_lhs (new_stmt, srcmem);
753 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
754 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
755 }
756 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
757 desttype = build_aligned_type (type, dest_align);
758 new_stmt
759 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
760 dest, off0),
761 srcmem);
762 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
763 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
764 if (gimple_vdef (new_stmt)
765 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
766 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
767 if (!lhs)
768 {
769 gsi_replace (gsi, new_stmt, false);
770 return true;
771 }
772 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
773 goto done;
774 }
775 }
776 }
777 }
778
779 if (endp == 3)
780 {
781 /* Both DEST and SRC must be pointer types.
782 ??? This is what old code did. Is the testing for pointer types
783 really mandatory?
784
785 If either SRC is readonly or length is 1, we can use memcpy. */
786 if (!dest_align || !src_align)
787 return false;
788 if (readonly_data_expr (src)
789 || (tree_fits_uhwi_p (len)
790 && (MIN (src_align, dest_align) / BITS_PER_UNIT
791 >= tree_to_uhwi (len))))
792 {
793 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
794 if (!fn)
795 return false;
796 gimple_call_set_fndecl (stmt, fn);
797 gimple_call_set_arg (stmt, 0, dest);
798 gimple_call_set_arg (stmt, 1, src);
799 fold_stmt (gsi);
800 return true;
801 }
802
803 /* If *src and *dest can't overlap, optimize into memcpy as well. */
804 if (TREE_CODE (src) == ADDR_EXPR
805 && TREE_CODE (dest) == ADDR_EXPR)
806 {
807 tree src_base, dest_base, fn;
808 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
809 HOST_WIDE_INT maxsize;
810
811 srcvar = TREE_OPERAND (src, 0);
812 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
813 if (src_base == NULL)
814 src_base = srcvar;
815 destvar = TREE_OPERAND (dest, 0);
816 dest_base = get_addr_base_and_unit_offset (destvar,
817 &dest_offset);
818 if (dest_base == NULL)
819 dest_base = destvar;
820 if (tree_fits_uhwi_p (len))
821 maxsize = tree_to_uhwi (len);
822 else
823 maxsize = -1;
824 if (SSA_VAR_P (src_base)
825 && SSA_VAR_P (dest_base))
826 {
827 if (operand_equal_p (src_base, dest_base, 0)
828 && ranges_overlap_p (src_offset, maxsize,
829 dest_offset, maxsize))
830 return false;
831 }
832 else if (TREE_CODE (src_base) == MEM_REF
833 && TREE_CODE (dest_base) == MEM_REF)
834 {
835 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
836 TREE_OPERAND (dest_base, 0), 0))
837 return false;
838 offset_int off = mem_ref_offset (src_base) + src_offset;
839 if (!wi::fits_shwi_p (off))
840 return false;
841 src_offset = off.to_shwi ();
842
843 off = mem_ref_offset (dest_base) + dest_offset;
844 if (!wi::fits_shwi_p (off))
845 return false;
846 dest_offset = off.to_shwi ();
847 if (ranges_overlap_p (src_offset, maxsize,
848 dest_offset, maxsize))
849 return false;
850 }
851 else
852 return false;
853
854 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
855 if (!fn)
856 return false;
857 gimple_call_set_fndecl (stmt, fn);
858 gimple_call_set_arg (stmt, 0, dest);
859 gimple_call_set_arg (stmt, 1, src);
860 fold_stmt (gsi);
861 return true;
862 }
863
864 /* If the destination and source do not alias optimize into
865 memcpy as well. */
866 if ((is_gimple_min_invariant (dest)
867 || TREE_CODE (dest) == SSA_NAME)
868 && (is_gimple_min_invariant (src)
869 || TREE_CODE (src) == SSA_NAME))
870 {
871 ao_ref destr, srcr;
872 ao_ref_init_from_ptr_and_size (&destr, dest, len);
873 ao_ref_init_from_ptr_and_size (&srcr, src, len);
874 if (!refs_may_alias_p_1 (&destr, &srcr, false))
875 {
876 tree fn;
877 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
878 if (!fn)
879 return false;
880 gimple_call_set_fndecl (stmt, fn);
881 gimple_call_set_arg (stmt, 0, dest);
882 gimple_call_set_arg (stmt, 1, src);
883 fold_stmt (gsi);
884 return true;
885 }
886 }
887
888 return false;
889 }
890
891 if (!tree_fits_shwi_p (len))
892 return false;
893 /* FIXME:
894 This logic lose for arguments like (type *)malloc (sizeof (type)),
895 since we strip the casts of up to VOID return value from malloc.
896 Perhaps we ought to inherit type from non-VOID argument here? */
897 STRIP_NOPS (src);
898 STRIP_NOPS (dest);
899 if (!POINTER_TYPE_P (TREE_TYPE (src))
900 || !POINTER_TYPE_P (TREE_TYPE (dest)))
901 return false;
902 /* In the following try to find a type that is most natural to be
903 used for the memcpy source and destination and that allows
904 the most optimization when memcpy is turned into a plain assignment
905 using that type. In theory we could always use a char[len] type
906 but that only gains us that the destination and source possibly
907 no longer will have their address taken. */
908 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
909 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
910 {
911 tree tem = TREE_OPERAND (src, 0);
912 STRIP_NOPS (tem);
913 if (tem != TREE_OPERAND (src, 0))
914 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
915 }
916 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
917 {
918 tree tem = TREE_OPERAND (dest, 0);
919 STRIP_NOPS (tem);
920 if (tem != TREE_OPERAND (dest, 0))
921 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
922 }
923 srctype = TREE_TYPE (TREE_TYPE (src));
924 if (TREE_CODE (srctype) == ARRAY_TYPE
925 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
926 {
927 srctype = TREE_TYPE (srctype);
928 STRIP_NOPS (src);
929 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
930 }
931 desttype = TREE_TYPE (TREE_TYPE (dest));
932 if (TREE_CODE (desttype) == ARRAY_TYPE
933 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
934 {
935 desttype = TREE_TYPE (desttype);
936 STRIP_NOPS (dest);
937 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
938 }
939 if (TREE_ADDRESSABLE (srctype)
940 || TREE_ADDRESSABLE (desttype))
941 return false;
942
943 /* Make sure we are not copying using a floating-point mode or
944 a type whose size possibly does not match its precision. */
945 if (FLOAT_MODE_P (TYPE_MODE (desttype))
946 || TREE_CODE (desttype) == BOOLEAN_TYPE
947 || TREE_CODE (desttype) == ENUMERAL_TYPE)
948 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
949 if (FLOAT_MODE_P (TYPE_MODE (srctype))
950 || TREE_CODE (srctype) == BOOLEAN_TYPE
951 || TREE_CODE (srctype) == ENUMERAL_TYPE)
952 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
953 if (!srctype)
954 srctype = desttype;
955 if (!desttype)
956 desttype = srctype;
957 if (!srctype)
958 return false;
959
960 src_align = get_pointer_alignment (src);
961 dest_align = get_pointer_alignment (dest);
962 if (dest_align < TYPE_ALIGN (desttype)
963 || src_align < TYPE_ALIGN (srctype))
964 return false;
965
966 destvar = dest;
967 STRIP_NOPS (destvar);
968 if (TREE_CODE (destvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (destvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
971 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
972 else
973 destvar = NULL_TREE;
974
975 srcvar = src;
976 STRIP_NOPS (srcvar);
977 if (TREE_CODE (srcvar) == ADDR_EXPR
978 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 {
981 if (!destvar
982 || src_align >= TYPE_ALIGN (desttype))
983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
984 srcvar, off0);
985 else if (!STRICT_ALIGNMENT)
986 {
987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
988 src_align);
989 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
990 }
991 else
992 srcvar = NULL_TREE;
993 }
994 else
995 srcvar = NULL_TREE;
996
997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
998 return false;
999
1000 if (srcvar == NULL_TREE)
1001 {
1002 STRIP_NOPS (src);
1003 if (src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1005 else
1006 {
1007 if (STRICT_ALIGNMENT)
1008 return false;
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 }
1013 }
1014 else if (destvar == NULL_TREE)
1015 {
1016 STRIP_NOPS (dest);
1017 if (dest_align >= TYPE_ALIGN (srctype))
1018 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1019 else
1020 {
1021 if (STRICT_ALIGNMENT)
1022 return false;
1023 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1024 dest_align);
1025 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 }
1027 }
1028
1029 gimple *new_stmt;
1030 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 {
1032 tree tem = fold_const_aggregate_ref (srcvar);
1033 if (tem)
1034 srcvar = tem;
1035 if (! is_gimple_min_invariant (srcvar))
1036 {
1037 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1038 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1039 new_stmt);
1040 gimple_assign_set_lhs (new_stmt, srcvar);
1041 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 }
1044 }
1045 new_stmt = gimple_build_assign (destvar, srcvar);
1046 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (gimple_vdef (new_stmt)
1049 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1050 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1051 if (!lhs)
1052 {
1053 gsi_replace (gsi, new_stmt, false);
1054 return true;
1055 }
1056 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1057 }
1058
1059 done:
1060 gimple_seq stmts = NULL;
1061 if (endp == 0 || endp == 3)
1062 len = NULL_TREE;
1063 else if (endp == 2)
1064 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1065 ssize_int (1));
1066 if (endp == 2 || endp == 1)
1067 {
1068 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1069 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1070 TREE_TYPE (dest), dest, len);
1071 }
1072
1073 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1074 gimple *repl = gimple_build_assign (lhs, dest);
1075 gsi_replace (gsi, repl, false);
1076 return true;
1077 }
1078
1079 /* Fold function call to builtin memset or bzero at *GSI setting the
1080 memory of size LEN to VAL. Return whether a simplification was made. */
1081
1082 static bool
1083 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1084 {
1085 gimple *stmt = gsi_stmt (*gsi);
1086 tree etype;
1087 unsigned HOST_WIDE_INT length, cval;
1088
1089 /* If the LEN parameter is zero, return DEST. */
1090 if (integer_zerop (len))
1091 {
1092 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1093 return true;
1094 }
1095
1096 if (! tree_fits_uhwi_p (len))
1097 return false;
1098
1099 if (TREE_CODE (c) != INTEGER_CST)
1100 return false;
1101
1102 tree dest = gimple_call_arg (stmt, 0);
1103 tree var = dest;
1104 if (TREE_CODE (var) != ADDR_EXPR)
1105 return false;
1106
1107 var = TREE_OPERAND (var, 0);
1108 if (TREE_THIS_VOLATILE (var))
1109 return false;
1110
1111 etype = TREE_TYPE (var);
1112 if (TREE_CODE (etype) == ARRAY_TYPE)
1113 etype = TREE_TYPE (etype);
1114
1115 if (!INTEGRAL_TYPE_P (etype)
1116 && !POINTER_TYPE_P (etype))
1117 return NULL_TREE;
1118
1119 if (! var_decl_component_p (var))
1120 return NULL_TREE;
1121
1122 length = tree_to_uhwi (len);
1123 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1124 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1125 return NULL_TREE;
1126
1127 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1128 return NULL_TREE;
1129
1130 if (integer_zerop (c))
1131 cval = 0;
1132 else
1133 {
1134 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1135 return NULL_TREE;
1136
1137 cval = TREE_INT_CST_LOW (c);
1138 cval &= 0xff;
1139 cval |= cval << 8;
1140 cval |= cval << 16;
1141 cval |= (cval << 31) << 1;
1142 }
1143
1144 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1145 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1146 gimple_set_vuse (store, gimple_vuse (stmt));
1147 tree vdef = gimple_vdef (stmt);
1148 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1149 {
1150 gimple_set_vdef (store, gimple_vdef (stmt));
1151 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1152 }
1153 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1154 if (gimple_call_lhs (stmt))
1155 {
1156 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1157 gsi_replace (gsi, asgn, false);
1158 }
1159 else
1160 {
1161 gimple_stmt_iterator gsi2 = *gsi;
1162 gsi_prev (gsi);
1163 gsi_remove (&gsi2, true);
1164 }
1165
1166 return true;
1167 }
1168
1169
1170 /* Obtain the minimum and maximum string length or minimum and maximum
1171 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1172 If ARG is an SSA name variable, follow its use-def chains. When
1173 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1174 if we are unable to determine the length or value, return False.
1175 VISITED is a bitmap of visited variables.
1176 TYPE is 0 if string length should be obtained, 1 for maximum string
1177 length and 2 for maximum value ARG can have.
1178 When FUZZY is set and the length of a string cannot be determined,
1179 the function instead considers as the maximum possible length the
1180 size of a character array it may refer to.
1181 Set *FLEXP to true if the range of the string lengths has been
1182 obtained from the upper bound of an array at the end of a struct.
1183 Such an array may hold a string that's longer than its upper bound
1184 due to it being used as a poor-man's flexible array member. */
1185
1186 static bool
1187 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1188 bool fuzzy, bool *flexp)
1189 {
1190 tree var, val;
1191 gimple *def_stmt;
1192
1193 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1194 but MINLEN may be cleared during the execution of the function. */
1195 tree *minlen = length;
1196 tree *const maxlen = length + 1;
1197
1198 if (TREE_CODE (arg) != SSA_NAME)
1199 {
1200 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1201 if (TREE_CODE (arg) == ADDR_EXPR
1202 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1203 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1204 {
1205 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1206 if (TREE_CODE (aop0) == INDIRECT_REF
1207 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1208 return get_range_strlen (TREE_OPERAND (aop0, 0),
1209 length, visited, type, fuzzy, flexp);
1210 }
1211
1212 if (type == 2)
1213 {
1214 val = arg;
1215 if (TREE_CODE (val) != INTEGER_CST
1216 || tree_int_cst_sgn (val) < 0)
1217 return false;
1218 }
1219 else
1220 val = c_strlen (arg, 1);
1221
1222 if (!val && fuzzy)
1223 {
1224 if (TREE_CODE (arg) == ADDR_EXPR)
1225 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1226 visited, type, fuzzy, flexp);
1227
1228 if (TREE_CODE (arg) == COMPONENT_REF
1229 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1230 {
1231 /* Use the type of the member array to determine the upper
1232 bound on the length of the array. This may be overly
1233 optimistic if the array itself isn't NUL-terminated and
1234 the caller relies on the subsequent member to contain
1235 the NUL.
1236 Set *FLEXP to true if the array whose bound is being
1237 used is at the end of a struct. */
1238 if (array_at_struct_end_p (arg, true))
1239 *flexp = true;
1240
1241 arg = TREE_OPERAND (arg, 1);
1242 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1243 if (!val || integer_zerop (val))
1244 return false;
1245 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1246 integer_one_node);
1247 /* Set the minimum size to zero since the string in
1248 the array could have zero length. */
1249 *minlen = ssize_int (0);
1250 }
1251 }
1252
1253 if (!val)
1254 return false;
1255
1256 if (minlen
1257 && (!*minlen
1258 || (type > 0
1259 && TREE_CODE (*minlen) == INTEGER_CST
1260 && TREE_CODE (val) == INTEGER_CST
1261 && tree_int_cst_lt (val, *minlen))))
1262 *minlen = val;
1263
1264 if (*maxlen)
1265 {
1266 if (type > 0)
1267 {
1268 if (TREE_CODE (*maxlen) != INTEGER_CST
1269 || TREE_CODE (val) != INTEGER_CST)
1270 return false;
1271
1272 if (tree_int_cst_lt (*maxlen, val))
1273 *maxlen = val;
1274 return true;
1275 }
1276 else if (simple_cst_equal (val, *maxlen) != 1)
1277 return false;
1278 }
1279
1280 *maxlen = val;
1281 return true;
1282 }
1283
1284 /* If ARG is registered for SSA update we cannot look at its defining
1285 statement. */
1286 if (name_registered_for_update_p (arg))
1287 return false;
1288
1289 /* If we were already here, break the infinite cycle. */
1290 if (!*visited)
1291 *visited = BITMAP_ALLOC (NULL);
1292 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1293 return true;
1294
1295 var = arg;
1296 def_stmt = SSA_NAME_DEF_STMT (var);
1297
1298 switch (gimple_code (def_stmt))
1299 {
1300 case GIMPLE_ASSIGN:
1301 /* The RHS of the statement defining VAR must either have a
1302 constant length or come from another SSA_NAME with a constant
1303 length. */
1304 if (gimple_assign_single_p (def_stmt)
1305 || gimple_assign_unary_nop_p (def_stmt))
1306 {
1307 tree rhs = gimple_assign_rhs1 (def_stmt);
1308 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1309 }
1310 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1311 {
1312 tree op2 = gimple_assign_rhs2 (def_stmt);
1313 tree op3 = gimple_assign_rhs3 (def_stmt);
1314 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1315 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1316 }
1317 return false;
1318
1319 case GIMPLE_PHI:
1320 {
1321 /* All the arguments of the PHI node must have the same constant
1322 length. */
1323 unsigned i;
1324
1325 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1326 {
1327 tree arg = gimple_phi_arg (def_stmt, i)->def;
1328
1329 /* If this PHI has itself as an argument, we cannot
1330 determine the string length of this argument. However,
1331 if we can find a constant string length for the other
1332 PHI args then we can still be sure that this is a
1333 constant string length. So be optimistic and just
1334 continue with the next argument. */
1335 if (arg == gimple_phi_result (def_stmt))
1336 continue;
1337
1338 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1339 {
1340 if (fuzzy)
1341 *maxlen = build_all_ones_cst (size_type_node);
1342 else
1343 return false;
1344 }
1345 }
1346 }
1347 return true;
1348
1349 default:
1350 return false;
1351 }
1352 }
1353
1354 /* Determine the minimum and maximum value or string length that ARG
1355 refers to and store each in the first two elements of MINMAXLEN.
1356 For expressions that point to strings of unknown lengths that are
1357 character arrays, use the upper bound of the array as the maximum
1358 length. For example, given an expression like 'x ? array : "xyz"'
1359 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1360 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1361 stored in array.
1362 Return true if the range of the string lengths has been obtained
1363 from the upper bound of an array at the end of a struct. Such
1364 an array may hold a string that's longer than its upper bound
1365 due to it being used as a poor-man's flexible array member. */
1366
1367 bool
1368 get_range_strlen (tree arg, tree minmaxlen[2])
1369 {
1370 bitmap visited = NULL;
1371
1372 minmaxlen[0] = NULL_TREE;
1373 minmaxlen[1] = NULL_TREE;
1374
1375 bool flexarray = false;
1376 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1377
1378 if (visited)
1379 BITMAP_FREE (visited);
1380
1381 return flexarray;
1382 }
1383
1384 tree
1385 get_maxval_strlen (tree arg, int type)
1386 {
1387 bitmap visited = NULL;
1388 tree len[2] = { NULL_TREE, NULL_TREE };
1389
1390 bool dummy;
1391 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1392 len[1] = NULL_TREE;
1393 if (visited)
1394 BITMAP_FREE (visited);
1395
1396 return len[1];
1397 }
1398
1399
1400 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1401 If LEN is not NULL, it represents the length of the string to be
1402 copied. Return NULL_TREE if no simplification can be made. */
1403
1404 static bool
1405 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1406 tree dest, tree src)
1407 {
1408 location_t loc = gimple_location (gsi_stmt (*gsi));
1409 tree fn;
1410
1411 /* If SRC and DEST are the same (and not volatile), return DEST. */
1412 if (operand_equal_p (src, dest, 0))
1413 {
1414 replace_call_with_value (gsi, dest);
1415 return true;
1416 }
1417
1418 if (optimize_function_for_size_p (cfun))
1419 return false;
1420
1421 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1422 if (!fn)
1423 return false;
1424
1425 tree len = get_maxval_strlen (src, 0);
1426 if (!len)
1427 return false;
1428
1429 len = fold_convert_loc (loc, size_type_node, len);
1430 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1431 len = force_gimple_operand_gsi (gsi, len, true,
1432 NULL_TREE, true, GSI_SAME_STMT);
1433 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1434 replace_call_with_call_and_fold (gsi, repl);
1435 return true;
1436 }
1437
1438 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1439 If SLEN is not NULL, it represents the length of the source string.
1440 Return NULL_TREE if no simplification can be made. */
1441
1442 static bool
1443 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1444 tree dest, tree src, tree len)
1445 {
1446 location_t loc = gimple_location (gsi_stmt (*gsi));
1447 tree fn;
1448
1449 /* If the LEN parameter is zero, return DEST. */
1450 if (integer_zerop (len))
1451 {
1452 replace_call_with_value (gsi, dest);
1453 return true;
1454 }
1455
1456 /* We can't compare slen with len as constants below if len is not a
1457 constant. */
1458 if (TREE_CODE (len) != INTEGER_CST)
1459 return false;
1460
1461 /* Now, we must be passed a constant src ptr parameter. */
1462 tree slen = get_maxval_strlen (src, 0);
1463 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1464 return false;
1465
1466 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1467
1468 /* We do not support simplification of this case, though we do
1469 support it when expanding trees into RTL. */
1470 /* FIXME: generate a call to __builtin_memset. */
1471 if (tree_int_cst_lt (slen, len))
1472 return false;
1473
1474 /* OK transform into builtin memcpy. */
1475 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1476 if (!fn)
1477 return false;
1478
1479 len = fold_convert_loc (loc, size_type_node, len);
1480 len = force_gimple_operand_gsi (gsi, len, true,
1481 NULL_TREE, true, GSI_SAME_STMT);
1482 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1483 replace_call_with_call_and_fold (gsi, repl);
1484 return true;
1485 }
1486
1487 /* Fold function call to builtin strchr or strrchr.
1488 If both arguments are constant, evaluate and fold the result,
1489 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1490 In general strlen is significantly faster than strchr
1491 due to being a simpler operation. */
1492 static bool
1493 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1494 {
1495 gimple *stmt = gsi_stmt (*gsi);
1496 tree str = gimple_call_arg (stmt, 0);
1497 tree c = gimple_call_arg (stmt, 1);
1498 location_t loc = gimple_location (stmt);
1499 const char *p;
1500 char ch;
1501
1502 if (!gimple_call_lhs (stmt))
1503 return false;
1504
1505 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1506 {
1507 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1508
1509 if (p1 == NULL)
1510 {
1511 replace_call_with_value (gsi, integer_zero_node);
1512 return true;
1513 }
1514
1515 tree len = build_int_cst (size_type_node, p1 - p);
1516 gimple_seq stmts = NULL;
1517 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1518 POINTER_PLUS_EXPR, str, len);
1519 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1520 gsi_replace_with_seq_vops (gsi, stmts);
1521 return true;
1522 }
1523
1524 if (!integer_zerop (c))
1525 return false;
1526
1527 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1528 if (is_strrchr && optimize_function_for_size_p (cfun))
1529 {
1530 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1531
1532 if (strchr_fn)
1533 {
1534 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1535 replace_call_with_call_and_fold (gsi, repl);
1536 return true;
1537 }
1538
1539 return false;
1540 }
1541
1542 tree len;
1543 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1544
1545 if (!strlen_fn)
1546 return false;
1547
1548 /* Create newstr = strlen (str). */
1549 gimple_seq stmts = NULL;
1550 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1551 gimple_set_location (new_stmt, loc);
1552 len = create_tmp_reg_or_ssa_name (size_type_node);
1553 gimple_call_set_lhs (new_stmt, len);
1554 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1555
1556 /* Create (str p+ strlen (str)). */
1557 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1558 POINTER_PLUS_EXPR, str, len);
1559 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1560 gsi_replace_with_seq_vops (gsi, stmts);
1561 /* gsi now points at the assignment to the lhs, get a
1562 stmt iterator to the strlen.
1563 ??? We can't use gsi_for_stmt as that doesn't work when the
1564 CFG isn't built yet. */
1565 gimple_stmt_iterator gsi2 = *gsi;
1566 gsi_prev (&gsi2);
1567 fold_stmt (&gsi2);
1568 return true;
1569 }
1570
1571 /* Fold function call to builtin strstr.
1572 If both arguments are constant, evaluate and fold the result,
1573 additionally fold strstr (x, "") into x and strstr (x, "c")
1574 into strchr (x, 'c'). */
1575 static bool
1576 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1577 {
1578 gimple *stmt = gsi_stmt (*gsi);
1579 tree haystack = gimple_call_arg (stmt, 0);
1580 tree needle = gimple_call_arg (stmt, 1);
1581 const char *p, *q;
1582
1583 if (!gimple_call_lhs (stmt))
1584 return false;
1585
1586 q = c_getstr (needle);
1587 if (q == NULL)
1588 return false;
1589
1590 if ((p = c_getstr (haystack)))
1591 {
1592 const char *r = strstr (p, q);
1593
1594 if (r == NULL)
1595 {
1596 replace_call_with_value (gsi, integer_zero_node);
1597 return true;
1598 }
1599
1600 tree len = build_int_cst (size_type_node, r - p);
1601 gimple_seq stmts = NULL;
1602 gimple *new_stmt
1603 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1604 haystack, len);
1605 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1606 gsi_replace_with_seq_vops (gsi, stmts);
1607 return true;
1608 }
1609
1610 /* For strstr (x, "") return x. */
1611 if (q[0] == '\0')
1612 {
1613 replace_call_with_value (gsi, haystack);
1614 return true;
1615 }
1616
1617 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1618 if (q[1] == '\0')
1619 {
1620 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1621 if (strchr_fn)
1622 {
1623 tree c = build_int_cst (integer_type_node, q[0]);
1624 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1625 replace_call_with_call_and_fold (gsi, repl);
1626 return true;
1627 }
1628 }
1629
1630 return false;
1631 }
1632
1633 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1634 to the call.
1635
1636 Return NULL_TREE if no simplification was possible, otherwise return the
1637 simplified form of the call as a tree.
1638
1639 The simplified form may be a constant or other expression which
1640 computes the same value, but in a more efficient manner (including
1641 calls to other builtin functions).
1642
1643 The call may contain arguments which need to be evaluated, but
1644 which are not useful to determine the result of the call. In
1645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1646 COMPOUND_EXPR will be an argument which must be evaluated.
1647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1648 COMPOUND_EXPR in the chain will contain the tree for the simplified
1649 form of the builtin function call. */
1650
1651 static bool
1652 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1653 {
1654 gimple *stmt = gsi_stmt (*gsi);
1655 location_t loc = gimple_location (stmt);
1656
1657 const char *p = c_getstr (src);
1658
1659 /* If the string length is zero, return the dst parameter. */
1660 if (p && *p == '\0')
1661 {
1662 replace_call_with_value (gsi, dst);
1663 return true;
1664 }
1665
1666 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1667 return false;
1668
1669 /* See if we can store by pieces into (dst + strlen(dst)). */
1670 tree newdst;
1671 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1672 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1673
1674 if (!strlen_fn || !memcpy_fn)
1675 return false;
1676
1677 /* If the length of the source string isn't computable don't
1678 split strcat into strlen and memcpy. */
1679 tree len = get_maxval_strlen (src, 0);
1680 if (! len)
1681 return false;
1682
1683 /* Create strlen (dst). */
1684 gimple_seq stmts = NULL, stmts2;
1685 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1686 gimple_set_location (repl, loc);
1687 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1688 gimple_call_set_lhs (repl, newdst);
1689 gimple_seq_add_stmt_without_update (&stmts, repl);
1690
1691 /* Create (dst p+ strlen (dst)). */
1692 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1693 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1694 gimple_seq_add_seq_without_update (&stmts, stmts2);
1695
1696 len = fold_convert_loc (loc, size_type_node, len);
1697 len = size_binop_loc (loc, PLUS_EXPR, len,
1698 build_int_cst (size_type_node, 1));
1699 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1700 gimple_seq_add_seq_without_update (&stmts, stmts2);
1701
1702 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1703 gimple_seq_add_stmt_without_update (&stmts, repl);
1704 if (gimple_call_lhs (stmt))
1705 {
1706 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1707 gimple_seq_add_stmt_without_update (&stmts, repl);
1708 gsi_replace_with_seq_vops (gsi, stmts);
1709 /* gsi now points at the assignment to the lhs, get a
1710 stmt iterator to the memcpy call.
1711 ??? We can't use gsi_for_stmt as that doesn't work when the
1712 CFG isn't built yet. */
1713 gimple_stmt_iterator gsi2 = *gsi;
1714 gsi_prev (&gsi2);
1715 fold_stmt (&gsi2);
1716 }
1717 else
1718 {
1719 gsi_replace_with_seq_vops (gsi, stmts);
1720 fold_stmt (gsi);
1721 }
1722 return true;
1723 }
1724
1725 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1726 are the arguments to the call. */
1727
1728 static bool
1729 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1730 {
1731 gimple *stmt = gsi_stmt (*gsi);
1732 tree dest = gimple_call_arg (stmt, 0);
1733 tree src = gimple_call_arg (stmt, 1);
1734 tree size = gimple_call_arg (stmt, 2);
1735 tree fn;
1736 const char *p;
1737
1738
1739 p = c_getstr (src);
1740 /* If the SRC parameter is "", return DEST. */
1741 if (p && *p == '\0')
1742 {
1743 replace_call_with_value (gsi, dest);
1744 return true;
1745 }
1746
1747 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1748 return false;
1749
1750 /* If __builtin_strcat_chk is used, assume strcat is available. */
1751 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1752 if (!fn)
1753 return false;
1754
1755 gimple *repl = gimple_build_call (fn, 2, dest, src);
1756 replace_call_with_call_and_fold (gsi, repl);
1757 return true;
1758 }
1759
1760 /* Simplify a call to the strncat builtin. */
1761
1762 static bool
1763 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1764 {
1765 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1766 tree dst = gimple_call_arg (stmt, 0);
1767 tree src = gimple_call_arg (stmt, 1);
1768 tree len = gimple_call_arg (stmt, 2);
1769
1770 const char *p = c_getstr (src);
1771
1772 /* If the requested length is zero, or the src parameter string
1773 length is zero, return the dst parameter. */
1774 if (integer_zerop (len) || (p && *p == '\0'))
1775 {
1776 replace_call_with_value (gsi, dst);
1777 return true;
1778 }
1779
1780 /* If the requested len is greater than or equal to the string
1781 length, call strcat. */
1782 if (TREE_CODE (len) == INTEGER_CST && p
1783 && compare_tree_int (len, strlen (p)) >= 0)
1784 {
1785 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1786
1787 /* If the replacement _DECL isn't initialized, don't do the
1788 transformation. */
1789 if (!fn)
1790 return false;
1791
1792 gcall *repl = gimple_build_call (fn, 2, dst, src);
1793 replace_call_with_call_and_fold (gsi, repl);
1794 return true;
1795 }
1796
1797 return false;
1798 }
1799
1800 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1801 LEN, and SIZE. */
1802
1803 static bool
1804 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1805 {
1806 gimple *stmt = gsi_stmt (*gsi);
1807 tree dest = gimple_call_arg (stmt, 0);
1808 tree src = gimple_call_arg (stmt, 1);
1809 tree len = gimple_call_arg (stmt, 2);
1810 tree size = gimple_call_arg (stmt, 3);
1811 tree fn;
1812 const char *p;
1813
1814 p = c_getstr (src);
1815 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1816 if ((p && *p == '\0')
1817 || integer_zerop (len))
1818 {
1819 replace_call_with_value (gsi, dest);
1820 return true;
1821 }
1822
1823 if (! tree_fits_uhwi_p (size))
1824 return false;
1825
1826 if (! integer_all_onesp (size))
1827 {
1828 tree src_len = c_strlen (src, 1);
1829 if (src_len
1830 && tree_fits_uhwi_p (src_len)
1831 && tree_fits_uhwi_p (len)
1832 && ! tree_int_cst_lt (len, src_len))
1833 {
1834 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1835 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1836 if (!fn)
1837 return false;
1838
1839 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1840 replace_call_with_call_and_fold (gsi, repl);
1841 return true;
1842 }
1843 return false;
1844 }
1845
1846 /* If __builtin_strncat_chk is used, assume strncat is available. */
1847 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1848 if (!fn)
1849 return false;
1850
1851 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1852 replace_call_with_call_and_fold (gsi, repl);
1853 return true;
1854 }
1855
1856 /* Build and append gimple statements to STMTS that would load a first
1857 character of a memory location identified by STR. LOC is location
1858 of the statement. */
1859
1860 static tree
1861 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1862 {
1863 tree var;
1864
1865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1866 tree cst_uchar_ptr_node
1867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1868 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1869
1870 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1871 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1872 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1873
1874 gimple_assign_set_lhs (stmt, var);
1875 gimple_seq_add_stmt_without_update (stmts, stmt);
1876
1877 return var;
1878 }
1879
1880 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1881 FCODE is the name of the builtin. */
1882
1883 static bool
1884 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1885 {
1886 gimple *stmt = gsi_stmt (*gsi);
1887 tree callee = gimple_call_fndecl (stmt);
1888 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
1889
1890 tree type = integer_type_node;
1891 tree str1 = gimple_call_arg (stmt, 0);
1892 tree str2 = gimple_call_arg (stmt, 1);
1893 tree lhs = gimple_call_lhs (stmt);
1894 HOST_WIDE_INT length = -1;
1895
1896 /* Handle strncmp and strncasecmp functions. */
1897 if (gimple_call_num_args (stmt) == 3)
1898 {
1899 tree len = gimple_call_arg (stmt, 2);
1900 if (tree_fits_uhwi_p (len))
1901 length = tree_to_uhwi (len);
1902 }
1903
1904 /* If the LEN parameter is zero, return zero. */
1905 if (length == 0)
1906 {
1907 replace_call_with_value (gsi, integer_zero_node);
1908 return true;
1909 }
1910
1911 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
1912 if (operand_equal_p (str1, str2, 0))
1913 {
1914 replace_call_with_value (gsi, integer_zero_node);
1915 return true;
1916 }
1917
1918 const char *p1 = c_getstr (str1);
1919 const char *p2 = c_getstr (str2);
1920
1921 /* For known strings, return an immediate value. */
1922 if (p1 && p2)
1923 {
1924 int r = 0;
1925 bool known_result = false;
1926
1927 switch (fcode)
1928 {
1929 case BUILT_IN_STRCMP:
1930 {
1931 r = strcmp (p1, p2);
1932 known_result = true;
1933 break;
1934 }
1935 case BUILT_IN_STRNCMP:
1936 {
1937 if (length == -1)
1938 break;
1939 r = strncmp (p1, p2, length);
1940 known_result = true;
1941 break;
1942 }
1943 /* Only handleable situation is where the string are equal (result 0),
1944 which is already handled by operand_equal_p case. */
1945 case BUILT_IN_STRCASECMP:
1946 break;
1947 case BUILT_IN_STRNCASECMP:
1948 {
1949 if (length == -1)
1950 break;
1951 r = strncmp (p1, p2, length);
1952 if (r == 0)
1953 known_result = true;
1954 break;;
1955 }
1956 default:
1957 gcc_unreachable ();
1958 }
1959
1960 if (known_result)
1961 {
1962 replace_call_with_value (gsi, build_cmp_result (type, r));
1963 return true;
1964 }
1965 }
1966
1967 bool nonzero_length = length >= 1
1968 || fcode == BUILT_IN_STRCMP
1969 || fcode == BUILT_IN_STRCASECMP;
1970
1971 location_t loc = gimple_location (stmt);
1972
1973 /* If the second arg is "", return *(const unsigned char*)arg1. */
1974 if (p2 && *p2 == '\0' && nonzero_length)
1975 {
1976 gimple_seq stmts = NULL;
1977 tree var = gimple_load_first_char (loc, str1, &stmts);
1978 if (lhs)
1979 {
1980 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
1981 gimple_seq_add_stmt_without_update (&stmts, stmt);
1982 }
1983
1984 gsi_replace_with_seq_vops (gsi, stmts);
1985 return true;
1986 }
1987
1988 /* If the first arg is "", return -*(const unsigned char*)arg2. */
1989 if (p1 && *p1 == '\0' && nonzero_length)
1990 {
1991 gimple_seq stmts = NULL;
1992 tree var = gimple_load_first_char (loc, str2, &stmts);
1993
1994 if (lhs)
1995 {
1996 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
1997 stmt = gimple_build_assign (c, NOP_EXPR, var);
1998 gimple_seq_add_stmt_without_update (&stmts, stmt);
1999
2000 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2001 gimple_seq_add_stmt_without_update (&stmts, stmt);
2002 }
2003
2004 gsi_replace_with_seq_vops (gsi, stmts);
2005 return true;
2006 }
2007
2008 /* If len parameter is one, return an expression corresponding to
2009 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2010 if (fcode == BUILT_IN_STRNCMP && length == 1)
2011 {
2012 gimple_seq stmts = NULL;
2013 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2014 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2015
2016 if (lhs)
2017 {
2018 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2019 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2020 gimple_seq_add_stmt_without_update (&stmts, convert1);
2021
2022 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2023 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2024 gimple_seq_add_stmt_without_update (&stmts, convert2);
2025
2026 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2027 gimple_seq_add_stmt_without_update (&stmts, stmt);
2028 }
2029
2030 gsi_replace_with_seq_vops (gsi, stmts);
2031 return true;
2032 }
2033
2034 return false;
2035 }
2036
2037 /* Fold a call to the memchr pointed by GSI iterator. */
2038
2039 static bool
2040 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2041 {
2042 gimple *stmt = gsi_stmt (*gsi);
2043 tree lhs = gimple_call_lhs (stmt);
2044 tree arg1 = gimple_call_arg (stmt, 0);
2045 tree arg2 = gimple_call_arg (stmt, 1);
2046 tree len = gimple_call_arg (stmt, 2);
2047
2048 /* If the LEN parameter is zero, return zero. */
2049 if (integer_zerop (len))
2050 {
2051 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2052 return true;
2053 }
2054
2055 char c;
2056 if (TREE_CODE (arg2) != INTEGER_CST
2057 || !tree_fits_uhwi_p (len)
2058 || !target_char_cst_p (arg2, &c))
2059 return false;
2060
2061 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2062 unsigned HOST_WIDE_INT string_length;
2063 const char *p1 = c_getstr (arg1, &string_length);
2064
2065 if (p1)
2066 {
2067 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2068 if (r == NULL)
2069 {
2070 if (length <= string_length)
2071 {
2072 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2073 return true;
2074 }
2075 }
2076 else
2077 {
2078 unsigned HOST_WIDE_INT offset = r - p1;
2079 gimple_seq stmts = NULL;
2080 if (lhs != NULL_TREE)
2081 {
2082 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2083 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2084 arg1, offset_cst);
2085 gimple_seq_add_stmt_without_update (&stmts, stmt);
2086 }
2087 else
2088 gimple_seq_add_stmt_without_update (&stmts,
2089 gimple_build_nop ());
2090
2091 gsi_replace_with_seq_vops (gsi, stmts);
2092 return true;
2093 }
2094 }
2095
2096 return false;
2097 }
2098
2099 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2100 to the call. IGNORE is true if the value returned
2101 by the builtin will be ignored. UNLOCKED is true is true if this
2102 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2103 the known length of the string. Return NULL_TREE if no simplification
2104 was possible. */
2105
2106 static bool
2107 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2108 tree arg0, tree arg1,
2109 bool unlocked)
2110 {
2111 gimple *stmt = gsi_stmt (*gsi);
2112
2113 /* If we're using an unlocked function, assume the other unlocked
2114 functions exist explicitly. */
2115 tree const fn_fputc = (unlocked
2116 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2117 : builtin_decl_implicit (BUILT_IN_FPUTC));
2118 tree const fn_fwrite = (unlocked
2119 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2120 : builtin_decl_implicit (BUILT_IN_FWRITE));
2121
2122 /* If the return value is used, don't do the transformation. */
2123 if (gimple_call_lhs (stmt))
2124 return false;
2125
2126 /* Get the length of the string passed to fputs. If the length
2127 can't be determined, punt. */
2128 tree len = get_maxval_strlen (arg0, 0);
2129 if (!len
2130 || TREE_CODE (len) != INTEGER_CST)
2131 return false;
2132
2133 switch (compare_tree_int (len, 1))
2134 {
2135 case -1: /* length is 0, delete the call entirely . */
2136 replace_call_with_value (gsi, integer_zero_node);
2137 return true;
2138
2139 case 0: /* length is 1, call fputc. */
2140 {
2141 const char *p = c_getstr (arg0);
2142 if (p != NULL)
2143 {
2144 if (!fn_fputc)
2145 return false;
2146
2147 gimple *repl = gimple_build_call (fn_fputc, 2,
2148 build_int_cst
2149 (integer_type_node, p[0]), arg1);
2150 replace_call_with_call_and_fold (gsi, repl);
2151 return true;
2152 }
2153 }
2154 /* FALLTHROUGH */
2155 case 1: /* length is greater than 1, call fwrite. */
2156 {
2157 /* If optimizing for size keep fputs. */
2158 if (optimize_function_for_size_p (cfun))
2159 return false;
2160 /* New argument list transforming fputs(string, stream) to
2161 fwrite(string, 1, len, stream). */
2162 if (!fn_fwrite)
2163 return false;
2164
2165 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2166 size_one_node, len, arg1);
2167 replace_call_with_call_and_fold (gsi, repl);
2168 return true;
2169 }
2170 default:
2171 gcc_unreachable ();
2172 }
2173 return false;
2174 }
2175
2176 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2177 DEST, SRC, LEN, and SIZE are the arguments to the call.
2178 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2179 code of the builtin. If MAXLEN is not NULL, it is maximum length
2180 passed as third argument. */
2181
2182 static bool
2183 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2184 tree dest, tree src, tree len, tree size,
2185 enum built_in_function fcode)
2186 {
2187 gimple *stmt = gsi_stmt (*gsi);
2188 location_t loc = gimple_location (stmt);
2189 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2190 tree fn;
2191
2192 /* If SRC and DEST are the same (and not volatile), return DEST
2193 (resp. DEST+LEN for __mempcpy_chk). */
2194 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2195 {
2196 if (fcode != BUILT_IN_MEMPCPY_CHK)
2197 {
2198 replace_call_with_value (gsi, dest);
2199 return true;
2200 }
2201 else
2202 {
2203 gimple_seq stmts = NULL;
2204 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2205 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2206 TREE_TYPE (dest), dest, len);
2207 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2208 replace_call_with_value (gsi, temp);
2209 return true;
2210 }
2211 }
2212
2213 if (! tree_fits_uhwi_p (size))
2214 return false;
2215
2216 tree maxlen = get_maxval_strlen (len, 2);
2217 if (! integer_all_onesp (size))
2218 {
2219 if (! tree_fits_uhwi_p (len))
2220 {
2221 /* If LEN is not constant, try MAXLEN too.
2222 For MAXLEN only allow optimizing into non-_ocs function
2223 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2224 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2225 {
2226 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2227 {
2228 /* (void) __mempcpy_chk () can be optimized into
2229 (void) __memcpy_chk (). */
2230 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2231 if (!fn)
2232 return false;
2233
2234 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2235 replace_call_with_call_and_fold (gsi, repl);
2236 return true;
2237 }
2238 return false;
2239 }
2240 }
2241 else
2242 maxlen = len;
2243
2244 if (tree_int_cst_lt (size, maxlen))
2245 return false;
2246 }
2247
2248 fn = NULL_TREE;
2249 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2250 mem{cpy,pcpy,move,set} is available. */
2251 switch (fcode)
2252 {
2253 case BUILT_IN_MEMCPY_CHK:
2254 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2255 break;
2256 case BUILT_IN_MEMPCPY_CHK:
2257 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2258 break;
2259 case BUILT_IN_MEMMOVE_CHK:
2260 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2261 break;
2262 case BUILT_IN_MEMSET_CHK:
2263 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2264 break;
2265 default:
2266 break;
2267 }
2268
2269 if (!fn)
2270 return false;
2271
2272 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2273 replace_call_with_call_and_fold (gsi, repl);
2274 return true;
2275 }
2276
2277 /* Fold a call to the __st[rp]cpy_chk builtin.
2278 DEST, SRC, and SIZE are the arguments to the call.
2279 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2280 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2281 strings passed as second argument. */
2282
2283 static bool
2284 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2285 tree dest,
2286 tree src, tree size,
2287 enum built_in_function fcode)
2288 {
2289 gimple *stmt = gsi_stmt (*gsi);
2290 location_t loc = gimple_location (stmt);
2291 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2292 tree len, fn;
2293
2294 /* If SRC and DEST are the same (and not volatile), return DEST. */
2295 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2296 {
2297 replace_call_with_value (gsi, dest);
2298 return true;
2299 }
2300
2301 if (! tree_fits_uhwi_p (size))
2302 return false;
2303
2304 tree maxlen = get_maxval_strlen (src, 1);
2305 if (! integer_all_onesp (size))
2306 {
2307 len = c_strlen (src, 1);
2308 if (! len || ! tree_fits_uhwi_p (len))
2309 {
2310 /* If LEN is not constant, try MAXLEN too.
2311 For MAXLEN only allow optimizing into non-_ocs function
2312 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2313 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2314 {
2315 if (fcode == BUILT_IN_STPCPY_CHK)
2316 {
2317 if (! ignore)
2318 return false;
2319
2320 /* If return value of __stpcpy_chk is ignored,
2321 optimize into __strcpy_chk. */
2322 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2323 if (!fn)
2324 return false;
2325
2326 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2327 replace_call_with_call_and_fold (gsi, repl);
2328 return true;
2329 }
2330
2331 if (! len || TREE_SIDE_EFFECTS (len))
2332 return false;
2333
2334 /* If c_strlen returned something, but not a constant,
2335 transform __strcpy_chk into __memcpy_chk. */
2336 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2337 if (!fn)
2338 return false;
2339
2340 gimple_seq stmts = NULL;
2341 len = gimple_convert (&stmts, loc, size_type_node, len);
2342 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2343 build_int_cst (size_type_node, 1));
2344 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2345 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2346 replace_call_with_call_and_fold (gsi, repl);
2347 return true;
2348 }
2349 }
2350 else
2351 maxlen = len;
2352
2353 if (! tree_int_cst_lt (maxlen, size))
2354 return false;
2355 }
2356
2357 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2358 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2359 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2360 if (!fn)
2361 return false;
2362
2363 gimple *repl = gimple_build_call (fn, 2, dest, src);
2364 replace_call_with_call_and_fold (gsi, repl);
2365 return true;
2366 }
2367
2368 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2369 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2370 length passed as third argument. IGNORE is true if return value can be
2371 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2372
2373 static bool
2374 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2375 tree dest, tree src,
2376 tree len, tree size,
2377 enum built_in_function fcode)
2378 {
2379 gimple *stmt = gsi_stmt (*gsi);
2380 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2381 tree fn;
2382
2383 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2384 {
2385 /* If return value of __stpncpy_chk is ignored,
2386 optimize into __strncpy_chk. */
2387 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2388 if (fn)
2389 {
2390 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2391 replace_call_with_call_and_fold (gsi, repl);
2392 return true;
2393 }
2394 }
2395
2396 if (! tree_fits_uhwi_p (size))
2397 return false;
2398
2399 tree maxlen = get_maxval_strlen (len, 2);
2400 if (! integer_all_onesp (size))
2401 {
2402 if (! tree_fits_uhwi_p (len))
2403 {
2404 /* If LEN is not constant, try MAXLEN too.
2405 For MAXLEN only allow optimizing into non-_ocs function
2406 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2407 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2408 return false;
2409 }
2410 else
2411 maxlen = len;
2412
2413 if (tree_int_cst_lt (size, maxlen))
2414 return false;
2415 }
2416
2417 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2418 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2419 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2420 if (!fn)
2421 return false;
2422
2423 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2424 replace_call_with_call_and_fold (gsi, repl);
2425 return true;
2426 }
2427
2428 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2429 Return NULL_TREE if no simplification can be made. */
2430
2431 static bool
2432 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2433 {
2434 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2435 location_t loc = gimple_location (stmt);
2436 tree dest = gimple_call_arg (stmt, 0);
2437 tree src = gimple_call_arg (stmt, 1);
2438 tree fn, len, lenp1;
2439
2440 /* If the result is unused, replace stpcpy with strcpy. */
2441 if (gimple_call_lhs (stmt) == NULL_TREE)
2442 {
2443 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2444 if (!fn)
2445 return false;
2446 gimple_call_set_fndecl (stmt, fn);
2447 fold_stmt (gsi);
2448 return true;
2449 }
2450
2451 len = c_strlen (src, 1);
2452 if (!len
2453 || TREE_CODE (len) != INTEGER_CST)
2454 return false;
2455
2456 if (optimize_function_for_size_p (cfun)
2457 /* If length is zero it's small enough. */
2458 && !integer_zerop (len))
2459 return false;
2460
2461 /* If the source has a known length replace stpcpy with memcpy. */
2462 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2463 if (!fn)
2464 return false;
2465
2466 gimple_seq stmts = NULL;
2467 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2468 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2469 tem, build_int_cst (size_type_node, 1));
2470 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2471 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2472 gimple_set_vuse (repl, gimple_vuse (stmt));
2473 gimple_set_vdef (repl, gimple_vdef (stmt));
2474 if (gimple_vdef (repl)
2475 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2476 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2477 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2478 /* Replace the result with dest + len. */
2479 stmts = NULL;
2480 tem = gimple_convert (&stmts, loc, sizetype, len);
2481 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2482 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2483 POINTER_PLUS_EXPR, dest, tem);
2484 gsi_replace (gsi, ret, false);
2485 /* Finally fold the memcpy call. */
2486 gimple_stmt_iterator gsi2 = *gsi;
2487 gsi_prev (&gsi2);
2488 fold_stmt (&gsi2);
2489 return true;
2490 }
2491
2492 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2493 NULL_TREE if a normal call should be emitted rather than expanding
2494 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2495 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2496 passed as second argument. */
2497
2498 static bool
2499 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2500 enum built_in_function fcode)
2501 {
2502 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2503 tree dest, size, len, fn, fmt, flag;
2504 const char *fmt_str;
2505
2506 /* Verify the required arguments in the original call. */
2507 if (gimple_call_num_args (stmt) < 5)
2508 return false;
2509
2510 dest = gimple_call_arg (stmt, 0);
2511 len = gimple_call_arg (stmt, 1);
2512 flag = gimple_call_arg (stmt, 2);
2513 size = gimple_call_arg (stmt, 3);
2514 fmt = gimple_call_arg (stmt, 4);
2515
2516 if (! tree_fits_uhwi_p (size))
2517 return false;
2518
2519 if (! integer_all_onesp (size))
2520 {
2521 tree maxlen = get_maxval_strlen (len, 2);
2522 if (! tree_fits_uhwi_p (len))
2523 {
2524 /* If LEN is not constant, try MAXLEN too.
2525 For MAXLEN only allow optimizing into non-_ocs function
2526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2527 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2528 return false;
2529 }
2530 else
2531 maxlen = len;
2532
2533 if (tree_int_cst_lt (size, maxlen))
2534 return false;
2535 }
2536
2537 if (!init_target_chars ())
2538 return false;
2539
2540 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2541 or if format doesn't contain % chars or is "%s". */
2542 if (! integer_zerop (flag))
2543 {
2544 fmt_str = c_getstr (fmt);
2545 if (fmt_str == NULL)
2546 return false;
2547 if (strchr (fmt_str, target_percent) != NULL
2548 && strcmp (fmt_str, target_percent_s))
2549 return false;
2550 }
2551
2552 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2553 available. */
2554 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2555 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2556 if (!fn)
2557 return false;
2558
2559 /* Replace the called function and the first 5 argument by 3 retaining
2560 trailing varargs. */
2561 gimple_call_set_fndecl (stmt, fn);
2562 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2563 gimple_call_set_arg (stmt, 0, dest);
2564 gimple_call_set_arg (stmt, 1, len);
2565 gimple_call_set_arg (stmt, 2, fmt);
2566 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2567 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2568 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2569 fold_stmt (gsi);
2570 return true;
2571 }
2572
2573 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2574 Return NULL_TREE if a normal call should be emitted rather than
2575 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2576 or BUILT_IN_VSPRINTF_CHK. */
2577
2578 static bool
2579 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2580 enum built_in_function fcode)
2581 {
2582 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2583 tree dest, size, len, fn, fmt, flag;
2584 const char *fmt_str;
2585 unsigned nargs = gimple_call_num_args (stmt);
2586
2587 /* Verify the required arguments in the original call. */
2588 if (nargs < 4)
2589 return false;
2590 dest = gimple_call_arg (stmt, 0);
2591 flag = gimple_call_arg (stmt, 1);
2592 size = gimple_call_arg (stmt, 2);
2593 fmt = gimple_call_arg (stmt, 3);
2594
2595 if (! tree_fits_uhwi_p (size))
2596 return false;
2597
2598 len = NULL_TREE;
2599
2600 if (!init_target_chars ())
2601 return false;
2602
2603 /* Check whether the format is a literal string constant. */
2604 fmt_str = c_getstr (fmt);
2605 if (fmt_str != NULL)
2606 {
2607 /* If the format doesn't contain % args or %%, we know the size. */
2608 if (strchr (fmt_str, target_percent) == 0)
2609 {
2610 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2611 len = build_int_cstu (size_type_node, strlen (fmt_str));
2612 }
2613 /* If the format is "%s" and first ... argument is a string literal,
2614 we know the size too. */
2615 else if (fcode == BUILT_IN_SPRINTF_CHK
2616 && strcmp (fmt_str, target_percent_s) == 0)
2617 {
2618 tree arg;
2619
2620 if (nargs == 5)
2621 {
2622 arg = gimple_call_arg (stmt, 4);
2623 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2624 {
2625 len = c_strlen (arg, 1);
2626 if (! len || ! tree_fits_uhwi_p (len))
2627 len = NULL_TREE;
2628 }
2629 }
2630 }
2631 }
2632
2633 if (! integer_all_onesp (size))
2634 {
2635 if (! len || ! tree_int_cst_lt (len, size))
2636 return false;
2637 }
2638
2639 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2640 or if format doesn't contain % chars or is "%s". */
2641 if (! integer_zerop (flag))
2642 {
2643 if (fmt_str == NULL)
2644 return false;
2645 if (strchr (fmt_str, target_percent) != NULL
2646 && strcmp (fmt_str, target_percent_s))
2647 return false;
2648 }
2649
2650 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2651 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2652 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2653 if (!fn)
2654 return false;
2655
2656 /* Replace the called function and the first 4 argument by 2 retaining
2657 trailing varargs. */
2658 gimple_call_set_fndecl (stmt, fn);
2659 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2660 gimple_call_set_arg (stmt, 0, dest);
2661 gimple_call_set_arg (stmt, 1, fmt);
2662 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2663 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2664 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2665 fold_stmt (gsi);
2666 return true;
2667 }
2668
2669 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2670 ORIG may be null if this is a 2-argument call. We don't attempt to
2671 simplify calls with more than 3 arguments.
2672
2673 Return NULL_TREE if no simplification was possible, otherwise return the
2674 simplified form of the call as a tree. If IGNORED is true, it means that
2675 the caller does not use the returned value of the function. */
2676
2677 static bool
2678 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2679 {
2680 gimple *stmt = gsi_stmt (*gsi);
2681 tree dest = gimple_call_arg (stmt, 0);
2682 tree fmt = gimple_call_arg (stmt, 1);
2683 tree orig = NULL_TREE;
2684 const char *fmt_str = NULL;
2685
2686 /* Verify the required arguments in the original call. We deal with two
2687 types of sprintf() calls: 'sprintf (str, fmt)' and
2688 'sprintf (dest, "%s", orig)'. */
2689 if (gimple_call_num_args (stmt) > 3)
2690 return false;
2691
2692 if (gimple_call_num_args (stmt) == 3)
2693 orig = gimple_call_arg (stmt, 2);
2694
2695 /* Check whether the format is a literal string constant. */
2696 fmt_str = c_getstr (fmt);
2697 if (fmt_str == NULL)
2698 return false;
2699
2700 if (!init_target_chars ())
2701 return false;
2702
2703 /* If the format doesn't contain % args or %%, use strcpy. */
2704 if (strchr (fmt_str, target_percent) == NULL)
2705 {
2706 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2707
2708 if (!fn)
2709 return false;
2710
2711 /* Don't optimize sprintf (buf, "abc", ptr++). */
2712 if (orig)
2713 return false;
2714
2715 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2716 'format' is known to contain no % formats. */
2717 gimple_seq stmts = NULL;
2718 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2719 gimple_seq_add_stmt_without_update (&stmts, repl);
2720 if (gimple_call_lhs (stmt))
2721 {
2722 repl = gimple_build_assign (gimple_call_lhs (stmt),
2723 build_int_cst (integer_type_node,
2724 strlen (fmt_str)));
2725 gimple_seq_add_stmt_without_update (&stmts, repl);
2726 gsi_replace_with_seq_vops (gsi, stmts);
2727 /* gsi now points at the assignment to the lhs, get a
2728 stmt iterator to the memcpy call.
2729 ??? We can't use gsi_for_stmt as that doesn't work when the
2730 CFG isn't built yet. */
2731 gimple_stmt_iterator gsi2 = *gsi;
2732 gsi_prev (&gsi2);
2733 fold_stmt (&gsi2);
2734 }
2735 else
2736 {
2737 gsi_replace_with_seq_vops (gsi, stmts);
2738 fold_stmt (gsi);
2739 }
2740 return true;
2741 }
2742
2743 /* If the format is "%s", use strcpy if the result isn't used. */
2744 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2745 {
2746 tree fn;
2747 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2748
2749 if (!fn)
2750 return false;
2751
2752 /* Don't crash on sprintf (str1, "%s"). */
2753 if (!orig)
2754 return false;
2755
2756 tree orig_len = NULL_TREE;
2757 if (gimple_call_lhs (stmt))
2758 {
2759 orig_len = get_maxval_strlen (orig, 0);
2760 if (!orig_len)
2761 return false;
2762 }
2763
2764 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2765 gimple_seq stmts = NULL;
2766 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2767 gimple_seq_add_stmt_without_update (&stmts, repl);
2768 if (gimple_call_lhs (stmt))
2769 {
2770 if (!useless_type_conversion_p (integer_type_node,
2771 TREE_TYPE (orig_len)))
2772 orig_len = fold_convert (integer_type_node, orig_len);
2773 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2774 gimple_seq_add_stmt_without_update (&stmts, repl);
2775 gsi_replace_with_seq_vops (gsi, stmts);
2776 /* gsi now points at the assignment to the lhs, get a
2777 stmt iterator to the memcpy call.
2778 ??? We can't use gsi_for_stmt as that doesn't work when the
2779 CFG isn't built yet. */
2780 gimple_stmt_iterator gsi2 = *gsi;
2781 gsi_prev (&gsi2);
2782 fold_stmt (&gsi2);
2783 }
2784 else
2785 {
2786 gsi_replace_with_seq_vops (gsi, stmts);
2787 fold_stmt (gsi);
2788 }
2789 return true;
2790 }
2791 return false;
2792 }
2793
2794 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2795 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2796 attempt to simplify calls with more than 4 arguments.
2797
2798 Return NULL_TREE if no simplification was possible, otherwise return the
2799 simplified form of the call as a tree. If IGNORED is true, it means that
2800 the caller does not use the returned value of the function. */
2801
2802 static bool
2803 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2804 {
2805 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2806 tree dest = gimple_call_arg (stmt, 0);
2807 tree destsize = gimple_call_arg (stmt, 1);
2808 tree fmt = gimple_call_arg (stmt, 2);
2809 tree orig = NULL_TREE;
2810 const char *fmt_str = NULL;
2811
2812 if (gimple_call_num_args (stmt) > 4)
2813 return false;
2814
2815 if (gimple_call_num_args (stmt) == 4)
2816 orig = gimple_call_arg (stmt, 3);
2817
2818 if (!tree_fits_uhwi_p (destsize))
2819 return false;
2820 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2821
2822 /* Check whether the format is a literal string constant. */
2823 fmt_str = c_getstr (fmt);
2824 if (fmt_str == NULL)
2825 return false;
2826
2827 if (!init_target_chars ())
2828 return false;
2829
2830 /* If the format doesn't contain % args or %%, use strcpy. */
2831 if (strchr (fmt_str, target_percent) == NULL)
2832 {
2833 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2834 if (!fn)
2835 return false;
2836
2837 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2838 if (orig)
2839 return false;
2840
2841 /* We could expand this as
2842 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2843 or to
2844 memcpy (str, fmt_with_nul_at_cstm1, cst);
2845 but in the former case that might increase code size
2846 and in the latter case grow .rodata section too much.
2847 So punt for now. */
2848 size_t len = strlen (fmt_str);
2849 if (len >= destlen)
2850 return false;
2851
2852 gimple_seq stmts = NULL;
2853 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2854 gimple_seq_add_stmt_without_update (&stmts, repl);
2855 if (gimple_call_lhs (stmt))
2856 {
2857 repl = gimple_build_assign (gimple_call_lhs (stmt),
2858 build_int_cst (integer_type_node, len));
2859 gimple_seq_add_stmt_without_update (&stmts, repl);
2860 gsi_replace_with_seq_vops (gsi, stmts);
2861 /* gsi now points at the assignment to the lhs, get a
2862 stmt iterator to the memcpy call.
2863 ??? We can't use gsi_for_stmt as that doesn't work when the
2864 CFG isn't built yet. */
2865 gimple_stmt_iterator gsi2 = *gsi;
2866 gsi_prev (&gsi2);
2867 fold_stmt (&gsi2);
2868 }
2869 else
2870 {
2871 gsi_replace_with_seq_vops (gsi, stmts);
2872 fold_stmt (gsi);
2873 }
2874 return true;
2875 }
2876
2877 /* If the format is "%s", use strcpy if the result isn't used. */
2878 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2879 {
2880 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2881 if (!fn)
2882 return false;
2883
2884 /* Don't crash on snprintf (str1, cst, "%s"). */
2885 if (!orig)
2886 return false;
2887
2888 tree orig_len = get_maxval_strlen (orig, 0);
2889 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2890 return false;
2891
2892 /* We could expand this as
2893 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2894 or to
2895 memcpy (str1, str2_with_nul_at_cstm1, cst);
2896 but in the former case that might increase code size
2897 and in the latter case grow .rodata section too much.
2898 So punt for now. */
2899 if (compare_tree_int (orig_len, destlen) >= 0)
2900 return false;
2901
2902 /* Convert snprintf (str1, cst, "%s", str2) into
2903 strcpy (str1, str2) if strlen (str2) < cst. */
2904 gimple_seq stmts = NULL;
2905 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2906 gimple_seq_add_stmt_without_update (&stmts, repl);
2907 if (gimple_call_lhs (stmt))
2908 {
2909 if (!useless_type_conversion_p (integer_type_node,
2910 TREE_TYPE (orig_len)))
2911 orig_len = fold_convert (integer_type_node, orig_len);
2912 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2913 gimple_seq_add_stmt_without_update (&stmts, repl);
2914 gsi_replace_with_seq_vops (gsi, stmts);
2915 /* gsi now points at the assignment to the lhs, get a
2916 stmt iterator to the memcpy call.
2917 ??? We can't use gsi_for_stmt as that doesn't work when the
2918 CFG isn't built yet. */
2919 gimple_stmt_iterator gsi2 = *gsi;
2920 gsi_prev (&gsi2);
2921 fold_stmt (&gsi2);
2922 }
2923 else
2924 {
2925 gsi_replace_with_seq_vops (gsi, stmts);
2926 fold_stmt (gsi);
2927 }
2928 return true;
2929 }
2930 return false;
2931 }
2932
2933 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2934 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2935 more than 3 arguments, and ARG may be null in the 2-argument case.
2936
2937 Return NULL_TREE if no simplification was possible, otherwise return the
2938 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2939 code of the function to be simplified. */
2940
2941 static bool
2942 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2943 tree fp, tree fmt, tree arg,
2944 enum built_in_function fcode)
2945 {
2946 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2947 tree fn_fputc, fn_fputs;
2948 const char *fmt_str = NULL;
2949
2950 /* If the return value is used, don't do the transformation. */
2951 if (gimple_call_lhs (stmt) != NULL_TREE)
2952 return false;
2953
2954 /* Check whether the format is a literal string constant. */
2955 fmt_str = c_getstr (fmt);
2956 if (fmt_str == NULL)
2957 return false;
2958
2959 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2960 {
2961 /* If we're using an unlocked function, assume the other
2962 unlocked functions exist explicitly. */
2963 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2964 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2965 }
2966 else
2967 {
2968 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2969 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2970 }
2971
2972 if (!init_target_chars ())
2973 return false;
2974
2975 /* If the format doesn't contain % args or %%, use strcpy. */
2976 if (strchr (fmt_str, target_percent) == NULL)
2977 {
2978 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2979 && arg)
2980 return false;
2981
2982 /* If the format specifier was "", fprintf does nothing. */
2983 if (fmt_str[0] == '\0')
2984 {
2985 replace_call_with_value (gsi, NULL_TREE);
2986 return true;
2987 }
2988
2989 /* When "string" doesn't contain %, replace all cases of
2990 fprintf (fp, string) with fputs (string, fp). The fputs
2991 builtin will take care of special cases like length == 1. */
2992 if (fn_fputs)
2993 {
2994 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2995 replace_call_with_call_and_fold (gsi, repl);
2996 return true;
2997 }
2998 }
2999
3000 /* The other optimizations can be done only on the non-va_list variants. */
3001 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3002 return false;
3003
3004 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3005 else if (strcmp (fmt_str, target_percent_s) == 0)
3006 {
3007 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3008 return false;
3009 if (fn_fputs)
3010 {
3011 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3012 replace_call_with_call_and_fold (gsi, repl);
3013 return true;
3014 }
3015 }
3016
3017 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3018 else if (strcmp (fmt_str, target_percent_c) == 0)
3019 {
3020 if (!arg
3021 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3022 return false;
3023 if (fn_fputc)
3024 {
3025 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3026 replace_call_with_call_and_fold (gsi, repl);
3027 return true;
3028 }
3029 }
3030
3031 return false;
3032 }
3033
3034 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3035 FMT and ARG are the arguments to the call; we don't fold cases with
3036 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3037
3038 Return NULL_TREE if no simplification was possible, otherwise return the
3039 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3040 code of the function to be simplified. */
3041
3042 static bool
3043 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3044 tree arg, enum built_in_function fcode)
3045 {
3046 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3047 tree fn_putchar, fn_puts, newarg;
3048 const char *fmt_str = NULL;
3049
3050 /* If the return value is used, don't do the transformation. */
3051 if (gimple_call_lhs (stmt) != NULL_TREE)
3052 return false;
3053
3054 /* Check whether the format is a literal string constant. */
3055 fmt_str = c_getstr (fmt);
3056 if (fmt_str == NULL)
3057 return false;
3058
3059 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3060 {
3061 /* If we're using an unlocked function, assume the other
3062 unlocked functions exist explicitly. */
3063 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3064 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3065 }
3066 else
3067 {
3068 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3069 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3070 }
3071
3072 if (!init_target_chars ())
3073 return false;
3074
3075 if (strcmp (fmt_str, target_percent_s) == 0
3076 || strchr (fmt_str, target_percent) == NULL)
3077 {
3078 const char *str;
3079
3080 if (strcmp (fmt_str, target_percent_s) == 0)
3081 {
3082 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3083 return false;
3084
3085 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3086 return false;
3087
3088 str = c_getstr (arg);
3089 if (str == NULL)
3090 return false;
3091 }
3092 else
3093 {
3094 /* The format specifier doesn't contain any '%' characters. */
3095 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3096 && arg)
3097 return false;
3098 str = fmt_str;
3099 }
3100
3101 /* If the string was "", printf does nothing. */
3102 if (str[0] == '\0')
3103 {
3104 replace_call_with_value (gsi, NULL_TREE);
3105 return true;
3106 }
3107
3108 /* If the string has length of 1, call putchar. */
3109 if (str[1] == '\0')
3110 {
3111 /* Given printf("c"), (where c is any one character,)
3112 convert "c"[0] to an int and pass that to the replacement
3113 function. */
3114 newarg = build_int_cst (integer_type_node, str[0]);
3115 if (fn_putchar)
3116 {
3117 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3118 replace_call_with_call_and_fold (gsi, repl);
3119 return true;
3120 }
3121 }
3122 else
3123 {
3124 /* If the string was "string\n", call puts("string"). */
3125 size_t len = strlen (str);
3126 if ((unsigned char)str[len - 1] == target_newline
3127 && (size_t) (int) len == len
3128 && (int) len > 0)
3129 {
3130 char *newstr;
3131 tree offset_node, string_cst;
3132
3133 /* Create a NUL-terminated string that's one char shorter
3134 than the original, stripping off the trailing '\n'. */
3135 newarg = build_string_literal (len, str);
3136 string_cst = string_constant (newarg, &offset_node);
3137 gcc_checking_assert (string_cst
3138 && (TREE_STRING_LENGTH (string_cst)
3139 == (int) len)
3140 && integer_zerop (offset_node)
3141 && (unsigned char)
3142 TREE_STRING_POINTER (string_cst)[len - 1]
3143 == target_newline);
3144 /* build_string_literal creates a new STRING_CST,
3145 modify it in place to avoid double copying. */
3146 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3147 newstr[len - 1] = '\0';
3148 if (fn_puts)
3149 {
3150 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3151 replace_call_with_call_and_fold (gsi, repl);
3152 return true;
3153 }
3154 }
3155 else
3156 /* We'd like to arrange to call fputs(string,stdout) here,
3157 but we need stdout and don't have a way to get it yet. */
3158 return false;
3159 }
3160 }
3161
3162 /* The other optimizations can be done only on the non-va_list variants. */
3163 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3164 return false;
3165
3166 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3167 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3168 {
3169 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3170 return false;
3171 if (fn_puts)
3172 {
3173 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3174 replace_call_with_call_and_fold (gsi, repl);
3175 return true;
3176 }
3177 }
3178
3179 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3180 else if (strcmp (fmt_str, target_percent_c) == 0)
3181 {
3182 if (!arg || ! useless_type_conversion_p (integer_type_node,
3183 TREE_TYPE (arg)))
3184 return false;
3185 if (fn_putchar)
3186 {
3187 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3188 replace_call_with_call_and_fold (gsi, repl);
3189 return true;
3190 }
3191 }
3192
3193 return false;
3194 }
3195
3196
3197
3198 /* Fold a call to __builtin_strlen with known length LEN. */
3199
3200 static bool
3201 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3202 {
3203 gimple *stmt = gsi_stmt (*gsi);
3204 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3205 if (!len)
3206 return false;
3207 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3208 replace_call_with_value (gsi, len);
3209 return true;
3210 }
3211
3212 /* Fold a call to __builtin_acc_on_device. */
3213
3214 static bool
3215 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3216 {
3217 /* Defer folding until we know which compiler we're in. */
3218 if (symtab->state != EXPANSION)
3219 return false;
3220
3221 unsigned val_host = GOMP_DEVICE_HOST;
3222 unsigned val_dev = GOMP_DEVICE_NONE;
3223
3224 #ifdef ACCEL_COMPILER
3225 val_host = GOMP_DEVICE_NOT_HOST;
3226 val_dev = ACCEL_COMPILER_acc_device;
3227 #endif
3228
3229 location_t loc = gimple_location (gsi_stmt (*gsi));
3230
3231 tree host_eq = make_ssa_name (boolean_type_node);
3232 gimple *host_ass = gimple_build_assign
3233 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3234 gimple_set_location (host_ass, loc);
3235 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3236
3237 tree dev_eq = make_ssa_name (boolean_type_node);
3238 gimple *dev_ass = gimple_build_assign
3239 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3240 gimple_set_location (dev_ass, loc);
3241 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3242
3243 tree result = make_ssa_name (boolean_type_node);
3244 gimple *result_ass = gimple_build_assign
3245 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3246 gimple_set_location (result_ass, loc);
3247 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3248
3249 replace_call_with_value (gsi, result);
3250
3251 return true;
3252 }
3253
3254 /* Fold the non-target builtin at *GSI and return whether any simplification
3255 was made. */
3256
3257 static bool
3258 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3259 {
3260 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3261 tree callee = gimple_call_fndecl (stmt);
3262
3263 /* Give up for always_inline inline builtins until they are
3264 inlined. */
3265 if (avoid_folding_inline_builtin (callee))
3266 return false;
3267
3268 unsigned n = gimple_call_num_args (stmt);
3269 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3270 switch (fcode)
3271 {
3272 case BUILT_IN_BZERO:
3273 return gimple_fold_builtin_memset (gsi, integer_zero_node,
3274 gimple_call_arg (stmt, 1));
3275 case BUILT_IN_MEMSET:
3276 return gimple_fold_builtin_memset (gsi,
3277 gimple_call_arg (stmt, 1),
3278 gimple_call_arg (stmt, 2));
3279 case BUILT_IN_BCOPY:
3280 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
3281 gimple_call_arg (stmt, 0), 3);
3282 case BUILT_IN_MEMCPY:
3283 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3284 gimple_call_arg (stmt, 1), 0);
3285 case BUILT_IN_MEMPCPY:
3286 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3287 gimple_call_arg (stmt, 1), 1);
3288 case BUILT_IN_MEMMOVE:
3289 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3290 gimple_call_arg (stmt, 1), 3);
3291 case BUILT_IN_SPRINTF_CHK:
3292 case BUILT_IN_VSPRINTF_CHK:
3293 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3294 case BUILT_IN_STRCAT_CHK:
3295 return gimple_fold_builtin_strcat_chk (gsi);
3296 case BUILT_IN_STRNCAT_CHK:
3297 return gimple_fold_builtin_strncat_chk (gsi);
3298 case BUILT_IN_STRLEN:
3299 return gimple_fold_builtin_strlen (gsi);
3300 case BUILT_IN_STRCPY:
3301 return gimple_fold_builtin_strcpy (gsi,
3302 gimple_call_arg (stmt, 0),
3303 gimple_call_arg (stmt, 1));
3304 case BUILT_IN_STRNCPY:
3305 return gimple_fold_builtin_strncpy (gsi,
3306 gimple_call_arg (stmt, 0),
3307 gimple_call_arg (stmt, 1),
3308 gimple_call_arg (stmt, 2));
3309 case BUILT_IN_STRCAT:
3310 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3311 gimple_call_arg (stmt, 1));
3312 case BUILT_IN_STRNCAT:
3313 return gimple_fold_builtin_strncat (gsi);
3314 case BUILT_IN_INDEX:
3315 case BUILT_IN_STRCHR:
3316 return gimple_fold_builtin_strchr (gsi, false);
3317 case BUILT_IN_RINDEX:
3318 case BUILT_IN_STRRCHR:
3319 return gimple_fold_builtin_strchr (gsi, true);
3320 case BUILT_IN_STRSTR:
3321 return gimple_fold_builtin_strstr (gsi);
3322 case BUILT_IN_STRCMP:
3323 case BUILT_IN_STRCASECMP:
3324 case BUILT_IN_STRNCMP:
3325 case BUILT_IN_STRNCASECMP:
3326 return gimple_fold_builtin_string_compare (gsi);
3327 case BUILT_IN_MEMCHR:
3328 return gimple_fold_builtin_memchr (gsi);
3329 case BUILT_IN_FPUTS:
3330 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3331 gimple_call_arg (stmt, 1), false);
3332 case BUILT_IN_FPUTS_UNLOCKED:
3333 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3334 gimple_call_arg (stmt, 1), true);
3335 case BUILT_IN_MEMCPY_CHK:
3336 case BUILT_IN_MEMPCPY_CHK:
3337 case BUILT_IN_MEMMOVE_CHK:
3338 case BUILT_IN_MEMSET_CHK:
3339 return gimple_fold_builtin_memory_chk (gsi,
3340 gimple_call_arg (stmt, 0),
3341 gimple_call_arg (stmt, 1),
3342 gimple_call_arg (stmt, 2),
3343 gimple_call_arg (stmt, 3),
3344 fcode);
3345 case BUILT_IN_STPCPY:
3346 return gimple_fold_builtin_stpcpy (gsi);
3347 case BUILT_IN_STRCPY_CHK:
3348 case BUILT_IN_STPCPY_CHK:
3349 return gimple_fold_builtin_stxcpy_chk (gsi,
3350 gimple_call_arg (stmt, 0),
3351 gimple_call_arg (stmt, 1),
3352 gimple_call_arg (stmt, 2),
3353 fcode);
3354 case BUILT_IN_STRNCPY_CHK:
3355 case BUILT_IN_STPNCPY_CHK:
3356 return gimple_fold_builtin_stxncpy_chk (gsi,
3357 gimple_call_arg (stmt, 0),
3358 gimple_call_arg (stmt, 1),
3359 gimple_call_arg (stmt, 2),
3360 gimple_call_arg (stmt, 3),
3361 fcode);
3362 case BUILT_IN_SNPRINTF_CHK:
3363 case BUILT_IN_VSNPRINTF_CHK:
3364 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3365 case BUILT_IN_SNPRINTF:
3366 return gimple_fold_builtin_snprintf (gsi);
3367 case BUILT_IN_SPRINTF:
3368 return gimple_fold_builtin_sprintf (gsi);
3369 case BUILT_IN_FPRINTF:
3370 case BUILT_IN_FPRINTF_UNLOCKED:
3371 case BUILT_IN_VFPRINTF:
3372 if (n == 2 || n == 3)
3373 return gimple_fold_builtin_fprintf (gsi,
3374 gimple_call_arg (stmt, 0),
3375 gimple_call_arg (stmt, 1),
3376 n == 3
3377 ? gimple_call_arg (stmt, 2)
3378 : NULL_TREE,
3379 fcode);
3380 break;
3381 case BUILT_IN_FPRINTF_CHK:
3382 case BUILT_IN_VFPRINTF_CHK:
3383 if (n == 3 || n == 4)
3384 return gimple_fold_builtin_fprintf (gsi,
3385 gimple_call_arg (stmt, 0),
3386 gimple_call_arg (stmt, 2),
3387 n == 4
3388 ? gimple_call_arg (stmt, 3)
3389 : NULL_TREE,
3390 fcode);
3391 break;
3392 case BUILT_IN_PRINTF:
3393 case BUILT_IN_PRINTF_UNLOCKED:
3394 case BUILT_IN_VPRINTF:
3395 if (n == 1 || n == 2)
3396 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3397 n == 2
3398 ? gimple_call_arg (stmt, 1)
3399 : NULL_TREE, fcode);
3400 break;
3401 case BUILT_IN_PRINTF_CHK:
3402 case BUILT_IN_VPRINTF_CHK:
3403 if (n == 2 || n == 3)
3404 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3405 n == 3
3406 ? gimple_call_arg (stmt, 2)
3407 : NULL_TREE, fcode);
3408 break;
3409 case BUILT_IN_ACC_ON_DEVICE:
3410 return gimple_fold_builtin_acc_on_device (gsi,
3411 gimple_call_arg (stmt, 0));
3412 default:;
3413 }
3414
3415 /* Try the generic builtin folder. */
3416 bool ignore = (gimple_call_lhs (stmt) == NULL);
3417 tree result = fold_call_stmt (stmt, ignore);
3418 if (result)
3419 {
3420 if (ignore)
3421 STRIP_NOPS (result);
3422 else
3423 result = fold_convert (gimple_call_return_type (stmt), result);
3424 if (!update_call_from_tree (gsi, result))
3425 gimplify_and_update_call_from_tree (gsi, result);
3426 return true;
3427 }
3428
3429 return false;
3430 }
3431
3432 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3433 function calls to constants, where possible. */
3434
3435 static tree
3436 fold_internal_goacc_dim (const gimple *call)
3437 {
3438 int axis = oacc_get_ifn_dim_arg (call);
3439 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3440 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3441 tree result = NULL_TREE;
3442
3443 /* If the size is 1, or we only want the size and it is not dynamic,
3444 we know the answer. */
3445 if (size == 1 || (!is_pos && size))
3446 {
3447 tree type = TREE_TYPE (gimple_call_lhs (call));
3448 result = build_int_cst (type, size - is_pos);
3449 }
3450
3451 return result;
3452 }
3453
3454 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3455 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3456 &var where var is only addressable because of such calls. */
3457
3458 bool
3459 optimize_atomic_compare_exchange_p (gimple *stmt)
3460 {
3461 if (gimple_call_num_args (stmt) != 6
3462 || !flag_inline_atomics
3463 || !optimize
3464 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3465 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3466 || !gimple_vdef (stmt)
3467 || !gimple_vuse (stmt))
3468 return false;
3469
3470 tree fndecl = gimple_call_fndecl (stmt);
3471 switch (DECL_FUNCTION_CODE (fndecl))
3472 {
3473 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3474 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3475 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3476 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3477 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3478 break;
3479 default:
3480 return false;
3481 }
3482
3483 tree expected = gimple_call_arg (stmt, 1);
3484 if (TREE_CODE (expected) != ADDR_EXPR
3485 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3486 return false;
3487
3488 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3489 if (!is_gimple_reg_type (etype)
3490 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3491 || TREE_THIS_VOLATILE (etype)
3492 || VECTOR_TYPE_P (etype)
3493 || TREE_CODE (etype) == COMPLEX_TYPE
3494 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3495 might not preserve all the bits. See PR71716. */
3496 || SCALAR_FLOAT_TYPE_P (etype)
3497 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3498 return false;
3499
3500 tree weak = gimple_call_arg (stmt, 3);
3501 if (!integer_zerop (weak) && !integer_onep (weak))
3502 return false;
3503
3504 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3505 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3506 machine_mode mode = TYPE_MODE (itype);
3507
3508 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3509 == CODE_FOR_nothing
3510 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3511 return false;
3512
3513 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3514 return false;
3515
3516 return true;
3517 }
3518
3519 /* Fold
3520 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3521 into
3522 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3523 i = IMAGPART_EXPR <t>;
3524 r = (_Bool) i;
3525 e = REALPART_EXPR <t>; */
3526
3527 void
3528 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3529 {
3530 gimple *stmt = gsi_stmt (*gsi);
3531 tree fndecl = gimple_call_fndecl (stmt);
3532 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3533 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3534 tree ctype = build_complex_type (itype);
3535 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3536 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3537 expected);
3538 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3539 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3540 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3541 {
3542 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3543 build1 (VIEW_CONVERT_EXPR, itype,
3544 gimple_assign_lhs (g)));
3545 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3546 }
3547 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3548 + int_size_in_bytes (itype);
3549 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3550 gimple_call_arg (stmt, 0),
3551 gimple_assign_lhs (g),
3552 gimple_call_arg (stmt, 2),
3553 build_int_cst (integer_type_node, flag),
3554 gimple_call_arg (stmt, 4),
3555 gimple_call_arg (stmt, 5));
3556 tree lhs = make_ssa_name (ctype);
3557 gimple_call_set_lhs (g, lhs);
3558 gimple_set_vdef (g, gimple_vdef (stmt));
3559 gimple_set_vuse (g, gimple_vuse (stmt));
3560 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3561 if (gimple_call_lhs (stmt))
3562 {
3563 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3564 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3565 build1 (IMAGPART_EXPR, itype, lhs));
3566 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3567 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3568 gimple_assign_lhs (g));
3569 }
3570 gsi_replace (gsi, g, true);
3571 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3572 build1 (REALPART_EXPR, itype, lhs));
3573 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3574 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3575 {
3576 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3577 VIEW_CONVERT_EXPR,
3578 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3579 gimple_assign_lhs (g)));
3580 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3581 }
3582 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3583 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3584 *gsi = gsiret;
3585 }
3586
3587 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3588 doesn't fit into TYPE. The test for overflow should be regardless of
3589 -fwrapv, and even for unsigned types. */
3590
3591 bool
3592 arith_overflowed_p (enum tree_code code, const_tree type,
3593 const_tree arg0, const_tree arg1)
3594 {
3595 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3596 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3597 widest2_int_cst;
3598 widest2_int warg0 = widest2_int_cst (arg0);
3599 widest2_int warg1 = widest2_int_cst (arg1);
3600 widest2_int wres;
3601 switch (code)
3602 {
3603 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3604 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3605 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3606 default: gcc_unreachable ();
3607 }
3608 signop sign = TYPE_SIGN (type);
3609 if (sign == UNSIGNED && wi::neg_p (wres))
3610 return true;
3611 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3612 }
3613
3614 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3615 The statement may be replaced by another statement, e.g., if the call
3616 simplifies to a constant value. Return true if any changes were made.
3617 It is assumed that the operands have been previously folded. */
3618
3619 static bool
3620 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3621 {
3622 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3623 tree callee;
3624 bool changed = false;
3625 unsigned i;
3626
3627 /* Fold *& in call arguments. */
3628 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3629 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3630 {
3631 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3632 if (tmp)
3633 {
3634 gimple_call_set_arg (stmt, i, tmp);
3635 changed = true;
3636 }
3637 }
3638
3639 /* Check for virtual calls that became direct calls. */
3640 callee = gimple_call_fn (stmt);
3641 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3642 {
3643 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3644 {
3645 if (dump_file && virtual_method_call_p (callee)
3646 && !possible_polymorphic_call_target_p
3647 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3648 (OBJ_TYPE_REF_EXPR (callee)))))
3649 {
3650 fprintf (dump_file,
3651 "Type inheritance inconsistent devirtualization of ");
3652 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3653 fprintf (dump_file, " to ");
3654 print_generic_expr (dump_file, callee, TDF_SLIM);
3655 fprintf (dump_file, "\n");
3656 }
3657
3658 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3659 changed = true;
3660 }
3661 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3662 {
3663 bool final;
3664 vec <cgraph_node *>targets
3665 = possible_polymorphic_call_targets (callee, stmt, &final);
3666 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3667 {
3668 tree lhs = gimple_call_lhs (stmt);
3669 if (dump_enabled_p ())
3670 {
3671 location_t loc = gimple_location_safe (stmt);
3672 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3673 "folding virtual function call to %s\n",
3674 targets.length () == 1
3675 ? targets[0]->name ()
3676 : "__builtin_unreachable");
3677 }
3678 if (targets.length () == 1)
3679 {
3680 tree fndecl = targets[0]->decl;
3681 gimple_call_set_fndecl (stmt, fndecl);
3682 changed = true;
3683 /* If changing the call to __cxa_pure_virtual
3684 or similar noreturn function, adjust gimple_call_fntype
3685 too. */
3686 if (gimple_call_noreturn_p (stmt)
3687 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3688 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3689 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3690 == void_type_node))
3691 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3692 /* If the call becomes noreturn, remove the lhs. */
3693 if (lhs
3694 && gimple_call_noreturn_p (stmt)
3695 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3696 || should_remove_lhs_p (lhs)))
3697 {
3698 if (TREE_CODE (lhs) == SSA_NAME)
3699 {
3700 tree var = create_tmp_var (TREE_TYPE (lhs));
3701 tree def = get_or_create_ssa_default_def (cfun, var);
3702 gimple *new_stmt = gimple_build_assign (lhs, def);
3703 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3704 }
3705 gimple_call_set_lhs (stmt, NULL_TREE);
3706 }
3707 maybe_remove_unused_call_args (cfun, stmt);
3708 }
3709 else
3710 {
3711 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3712 gimple *new_stmt = gimple_build_call (fndecl, 0);
3713 gimple_set_location (new_stmt, gimple_location (stmt));
3714 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3715 {
3716 tree var = create_tmp_var (TREE_TYPE (lhs));
3717 tree def = get_or_create_ssa_default_def (cfun, var);
3718
3719 /* To satisfy condition for
3720 cgraph_update_edges_for_call_stmt_node,
3721 we need to preserve GIMPLE_CALL statement
3722 at position of GSI iterator. */
3723 update_call_from_tree (gsi, def);
3724 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3725 }
3726 else
3727 {
3728 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3729 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3730 gsi_replace (gsi, new_stmt, false);
3731 }
3732 return true;
3733 }
3734 }
3735 }
3736 }
3737
3738 /* Check for indirect calls that became direct calls, and then
3739 no longer require a static chain. */
3740 if (gimple_call_chain (stmt))
3741 {
3742 tree fn = gimple_call_fndecl (stmt);
3743 if (fn && !DECL_STATIC_CHAIN (fn))
3744 {
3745 gimple_call_set_chain (stmt, NULL);
3746 changed = true;
3747 }
3748 else
3749 {
3750 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3751 if (tmp)
3752 {
3753 gimple_call_set_chain (stmt, tmp);
3754 changed = true;
3755 }
3756 }
3757 }
3758
3759 if (inplace)
3760 return changed;
3761
3762 /* Check for builtins that CCP can handle using information not
3763 available in the generic fold routines. */
3764 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3765 {
3766 if (gimple_fold_builtin (gsi))
3767 changed = true;
3768 }
3769 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3770 {
3771 changed |= targetm.gimple_fold_builtin (gsi);
3772 }
3773 else if (gimple_call_internal_p (stmt))
3774 {
3775 enum tree_code subcode = ERROR_MARK;
3776 tree result = NULL_TREE;
3777 bool cplx_result = false;
3778 tree overflow = NULL_TREE;
3779 switch (gimple_call_internal_fn (stmt))
3780 {
3781 case IFN_BUILTIN_EXPECT:
3782 result = fold_builtin_expect (gimple_location (stmt),
3783 gimple_call_arg (stmt, 0),
3784 gimple_call_arg (stmt, 1),
3785 gimple_call_arg (stmt, 2));
3786 break;
3787 case IFN_UBSAN_OBJECT_SIZE:
3788 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3789 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3790 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3791 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3792 gimple_call_arg (stmt, 2))))
3793 {
3794 gsi_replace (gsi, gimple_build_nop (), false);
3795 unlink_stmt_vdef (stmt);
3796 release_defs (stmt);
3797 return true;
3798 }
3799 break;
3800 case IFN_GOACC_DIM_SIZE:
3801 case IFN_GOACC_DIM_POS:
3802 result = fold_internal_goacc_dim (stmt);
3803 break;
3804 case IFN_UBSAN_CHECK_ADD:
3805 subcode = PLUS_EXPR;
3806 break;
3807 case IFN_UBSAN_CHECK_SUB:
3808 subcode = MINUS_EXPR;
3809 break;
3810 case IFN_UBSAN_CHECK_MUL:
3811 subcode = MULT_EXPR;
3812 break;
3813 case IFN_ADD_OVERFLOW:
3814 subcode = PLUS_EXPR;
3815 cplx_result = true;
3816 break;
3817 case IFN_SUB_OVERFLOW:
3818 subcode = MINUS_EXPR;
3819 cplx_result = true;
3820 break;
3821 case IFN_MUL_OVERFLOW:
3822 subcode = MULT_EXPR;
3823 cplx_result = true;
3824 break;
3825 default:
3826 break;
3827 }
3828 if (subcode != ERROR_MARK)
3829 {
3830 tree arg0 = gimple_call_arg (stmt, 0);
3831 tree arg1 = gimple_call_arg (stmt, 1);
3832 tree type = TREE_TYPE (arg0);
3833 if (cplx_result)
3834 {
3835 tree lhs = gimple_call_lhs (stmt);
3836 if (lhs == NULL_TREE)
3837 type = NULL_TREE;
3838 else
3839 type = TREE_TYPE (TREE_TYPE (lhs));
3840 }
3841 if (type == NULL_TREE)
3842 ;
3843 /* x = y + 0; x = y - 0; x = y * 0; */
3844 else if (integer_zerop (arg1))
3845 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3846 /* x = 0 + y; x = 0 * y; */
3847 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3848 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3849 /* x = y - y; */
3850 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3851 result = integer_zero_node;
3852 /* x = y * 1; x = 1 * y; */
3853 else if (subcode == MULT_EXPR && integer_onep (arg1))
3854 result = arg0;
3855 else if (subcode == MULT_EXPR && integer_onep (arg0))
3856 result = arg1;
3857 else if (TREE_CODE (arg0) == INTEGER_CST
3858 && TREE_CODE (arg1) == INTEGER_CST)
3859 {
3860 if (cplx_result)
3861 result = int_const_binop (subcode, fold_convert (type, arg0),
3862 fold_convert (type, arg1));
3863 else
3864 result = int_const_binop (subcode, arg0, arg1);
3865 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3866 {
3867 if (cplx_result)
3868 overflow = build_one_cst (type);
3869 else
3870 result = NULL_TREE;
3871 }
3872 }
3873 if (result)
3874 {
3875 if (result == integer_zero_node)
3876 result = build_zero_cst (type);
3877 else if (cplx_result && TREE_TYPE (result) != type)
3878 {
3879 if (TREE_CODE (result) == INTEGER_CST)
3880 {
3881 if (arith_overflowed_p (PLUS_EXPR, type, result,
3882 integer_zero_node))
3883 overflow = build_one_cst (type);
3884 }
3885 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3886 && TYPE_UNSIGNED (type))
3887 || (TYPE_PRECISION (type)
3888 < (TYPE_PRECISION (TREE_TYPE (result))
3889 + (TYPE_UNSIGNED (TREE_TYPE (result))
3890 && !TYPE_UNSIGNED (type)))))
3891 result = NULL_TREE;
3892 if (result)
3893 result = fold_convert (type, result);
3894 }
3895 }
3896 }
3897
3898 if (result)
3899 {
3900 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3901 result = drop_tree_overflow (result);
3902 if (cplx_result)
3903 {
3904 if (overflow == NULL_TREE)
3905 overflow = build_zero_cst (TREE_TYPE (result));
3906 tree ctype = build_complex_type (TREE_TYPE (result));
3907 if (TREE_CODE (result) == INTEGER_CST
3908 && TREE_CODE (overflow) == INTEGER_CST)
3909 result = build_complex (ctype, result, overflow);
3910 else
3911 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3912 ctype, result, overflow);
3913 }
3914 if (!update_call_from_tree (gsi, result))
3915 gimplify_and_update_call_from_tree (gsi, result);
3916 changed = true;
3917 }
3918 }
3919
3920 return changed;
3921 }
3922
3923
3924 /* Return true whether NAME has a use on STMT. */
3925
3926 static bool
3927 has_use_on_stmt (tree name, gimple *stmt)
3928 {
3929 imm_use_iterator iter;
3930 use_operand_p use_p;
3931 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3932 if (USE_STMT (use_p) == stmt)
3933 return true;
3934 return false;
3935 }
3936
3937 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3938 gimple_simplify.
3939
3940 Replaces *GSI with the simplification result in RCODE and OPS
3941 and the associated statements in *SEQ. Does the replacement
3942 according to INPLACE and returns true if the operation succeeded. */
3943
3944 static bool
3945 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3946 code_helper rcode, tree *ops,
3947 gimple_seq *seq, bool inplace)
3948 {
3949 gimple *stmt = gsi_stmt (*gsi);
3950
3951 /* Play safe and do not allow abnormals to be mentioned in
3952 newly created statements. See also maybe_push_res_to_seq.
3953 As an exception allow such uses if there was a use of the
3954 same SSA name on the old stmt. */
3955 if ((TREE_CODE (ops[0]) == SSA_NAME
3956 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3957 && !has_use_on_stmt (ops[0], stmt))
3958 || (ops[1]
3959 && TREE_CODE (ops[1]) == SSA_NAME
3960 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3961 && !has_use_on_stmt (ops[1], stmt))
3962 || (ops[2]
3963 && TREE_CODE (ops[2]) == SSA_NAME
3964 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3965 && !has_use_on_stmt (ops[2], stmt))
3966 || (COMPARISON_CLASS_P (ops[0])
3967 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3968 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3969 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3970 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3971 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3972 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3973 return false;
3974
3975 /* Don't insert new statements when INPLACE is true, even if we could
3976 reuse STMT for the final statement. */
3977 if (inplace && !gimple_seq_empty_p (*seq))
3978 return false;
3979
3980 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3981 {
3982 gcc_assert (rcode.is_tree_code ());
3983 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3984 /* GIMPLE_CONDs condition may not throw. */
3985 && (!flag_exceptions
3986 || !cfun->can_throw_non_call_exceptions
3987 || !operation_could_trap_p (rcode,
3988 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3989 false, NULL_TREE)))
3990 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3991 else if (rcode == SSA_NAME)
3992 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3993 build_zero_cst (TREE_TYPE (ops[0])));
3994 else if (rcode == INTEGER_CST)
3995 {
3996 if (integer_zerop (ops[0]))
3997 gimple_cond_make_false (cond_stmt);
3998 else
3999 gimple_cond_make_true (cond_stmt);
4000 }
4001 else if (!inplace)
4002 {
4003 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4004 ops, seq);
4005 if (!res)
4006 return false;
4007 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4008 build_zero_cst (TREE_TYPE (res)));
4009 }
4010 else
4011 return false;
4012 if (dump_file && (dump_flags & TDF_DETAILS))
4013 {
4014 fprintf (dump_file, "gimple_simplified to ");
4015 if (!gimple_seq_empty_p (*seq))
4016 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4017 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4018 0, TDF_SLIM);
4019 }
4020 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4021 return true;
4022 }
4023 else if (is_gimple_assign (stmt)
4024 && rcode.is_tree_code ())
4025 {
4026 if (!inplace
4027 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4028 {
4029 maybe_build_generic_op (rcode,
4030 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4031 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4032 if (dump_file && (dump_flags & TDF_DETAILS))
4033 {
4034 fprintf (dump_file, "gimple_simplified to ");
4035 if (!gimple_seq_empty_p (*seq))
4036 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4037 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4038 0, TDF_SLIM);
4039 }
4040 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4041 return true;
4042 }
4043 }
4044 else if (rcode.is_fn_code ()
4045 && gimple_call_combined_fn (stmt) == rcode)
4046 {
4047 unsigned i;
4048 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4049 {
4050 gcc_assert (ops[i] != NULL_TREE);
4051 gimple_call_set_arg (stmt, i, ops[i]);
4052 }
4053 if (i < 3)
4054 gcc_assert (ops[i] == NULL_TREE);
4055 if (dump_file && (dump_flags & TDF_DETAILS))
4056 {
4057 fprintf (dump_file, "gimple_simplified to ");
4058 if (!gimple_seq_empty_p (*seq))
4059 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4060 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4061 }
4062 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4063 return true;
4064 }
4065 else if (!inplace)
4066 {
4067 if (gimple_has_lhs (stmt))
4068 {
4069 tree lhs = gimple_get_lhs (stmt);
4070 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4071 ops, seq, lhs))
4072 return false;
4073 if (dump_file && (dump_flags & TDF_DETAILS))
4074 {
4075 fprintf (dump_file, "gimple_simplified to ");
4076 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4077 }
4078 gsi_replace_with_seq_vops (gsi, *seq);
4079 return true;
4080 }
4081 else
4082 gcc_unreachable ();
4083 }
4084
4085 return false;
4086 }
4087
4088 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4089
4090 static bool
4091 maybe_canonicalize_mem_ref_addr (tree *t)
4092 {
4093 bool res = false;
4094
4095 if (TREE_CODE (*t) == ADDR_EXPR)
4096 t = &TREE_OPERAND (*t, 0);
4097
4098 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4099 generic vector extension. The actual vector referenced is
4100 view-converted to an array type for this purpose. If the index
4101 is constant the canonical representation in the middle-end is a
4102 BIT_FIELD_REF so re-write the former to the latter here. */
4103 if (TREE_CODE (*t) == ARRAY_REF
4104 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4105 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4106 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4107 {
4108 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4109 if (VECTOR_TYPE_P (vtype))
4110 {
4111 tree low = array_ref_low_bound (*t);
4112 if (TREE_CODE (low) == INTEGER_CST)
4113 {
4114 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4115 {
4116 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4117 wi::to_widest (low));
4118 idx = wi::mul (idx, wi::to_widest
4119 (TYPE_SIZE (TREE_TYPE (*t))));
4120 widest_int ext
4121 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4122 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4123 {
4124 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4125 TREE_TYPE (*t),
4126 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4127 TYPE_SIZE (TREE_TYPE (*t)),
4128 wide_int_to_tree (sizetype, idx));
4129 res = true;
4130 }
4131 }
4132 }
4133 }
4134 }
4135
4136 while (handled_component_p (*t))
4137 t = &TREE_OPERAND (*t, 0);
4138
4139 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4140 of invariant addresses into a SSA name MEM_REF address. */
4141 if (TREE_CODE (*t) == MEM_REF
4142 || TREE_CODE (*t) == TARGET_MEM_REF)
4143 {
4144 tree addr = TREE_OPERAND (*t, 0);
4145 if (TREE_CODE (addr) == ADDR_EXPR
4146 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4147 || handled_component_p (TREE_OPERAND (addr, 0))))
4148 {
4149 tree base;
4150 HOST_WIDE_INT coffset;
4151 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4152 &coffset);
4153 if (!base)
4154 gcc_unreachable ();
4155
4156 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4157 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4158 TREE_OPERAND (*t, 1),
4159 size_int (coffset));
4160 res = true;
4161 }
4162 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4163 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4164 }
4165
4166 /* Canonicalize back MEM_REFs to plain reference trees if the object
4167 accessed is a decl that has the same access semantics as the MEM_REF. */
4168 if (TREE_CODE (*t) == MEM_REF
4169 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4170 && integer_zerop (TREE_OPERAND (*t, 1))
4171 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4172 {
4173 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4174 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4175 if (/* Same volatile qualification. */
4176 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4177 /* Same TBAA behavior with -fstrict-aliasing. */
4178 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4179 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4180 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4181 /* Same alignment. */
4182 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4183 /* We have to look out here to not drop a required conversion
4184 from the rhs to the lhs if *t appears on the lhs or vice-versa
4185 if it appears on the rhs. Thus require strict type
4186 compatibility. */
4187 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4188 {
4189 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4190 res = true;
4191 }
4192 }
4193
4194 /* Canonicalize TARGET_MEM_REF in particular with respect to
4195 the indexes becoming constant. */
4196 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4197 {
4198 tree tem = maybe_fold_tmr (*t);
4199 if (tem)
4200 {
4201 *t = tem;
4202 res = true;
4203 }
4204 }
4205
4206 return res;
4207 }
4208
4209 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4210 distinguishes both cases. */
4211
4212 static bool
4213 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4214 {
4215 bool changed = false;
4216 gimple *stmt = gsi_stmt (*gsi);
4217 bool nowarning = gimple_no_warning_p (stmt);
4218 unsigned i;
4219 fold_defer_overflow_warnings ();
4220
4221 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4222 after propagation.
4223 ??? This shouldn't be done in generic folding but in the
4224 propagation helpers which also know whether an address was
4225 propagated.
4226 Also canonicalize operand order. */
4227 switch (gimple_code (stmt))
4228 {
4229 case GIMPLE_ASSIGN:
4230 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4231 {
4232 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4233 if ((REFERENCE_CLASS_P (*rhs)
4234 || TREE_CODE (*rhs) == ADDR_EXPR)
4235 && maybe_canonicalize_mem_ref_addr (rhs))
4236 changed = true;
4237 tree *lhs = gimple_assign_lhs_ptr (stmt);
4238 if (REFERENCE_CLASS_P (*lhs)
4239 && maybe_canonicalize_mem_ref_addr (lhs))
4240 changed = true;
4241 }
4242 else
4243 {
4244 /* Canonicalize operand order. */
4245 enum tree_code code = gimple_assign_rhs_code (stmt);
4246 if (TREE_CODE_CLASS (code) == tcc_comparison
4247 || commutative_tree_code (code)
4248 || commutative_ternary_tree_code (code))
4249 {
4250 tree rhs1 = gimple_assign_rhs1 (stmt);
4251 tree rhs2 = gimple_assign_rhs2 (stmt);
4252 if (tree_swap_operands_p (rhs1, rhs2))
4253 {
4254 gimple_assign_set_rhs1 (stmt, rhs2);
4255 gimple_assign_set_rhs2 (stmt, rhs1);
4256 if (TREE_CODE_CLASS (code) == tcc_comparison)
4257 gimple_assign_set_rhs_code (stmt,
4258 swap_tree_comparison (code));
4259 changed = true;
4260 }
4261 }
4262 }
4263 break;
4264 case GIMPLE_CALL:
4265 {
4266 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4267 {
4268 tree *arg = gimple_call_arg_ptr (stmt, i);
4269 if (REFERENCE_CLASS_P (*arg)
4270 && maybe_canonicalize_mem_ref_addr (arg))
4271 changed = true;
4272 }
4273 tree *lhs = gimple_call_lhs_ptr (stmt);
4274 if (*lhs
4275 && REFERENCE_CLASS_P (*lhs)
4276 && maybe_canonicalize_mem_ref_addr (lhs))
4277 changed = true;
4278 break;
4279 }
4280 case GIMPLE_ASM:
4281 {
4282 gasm *asm_stmt = as_a <gasm *> (stmt);
4283 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4284 {
4285 tree link = gimple_asm_output_op (asm_stmt, i);
4286 tree op = TREE_VALUE (link);
4287 if (REFERENCE_CLASS_P (op)
4288 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4289 changed = true;
4290 }
4291 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4292 {
4293 tree link = gimple_asm_input_op (asm_stmt, i);
4294 tree op = TREE_VALUE (link);
4295 if ((REFERENCE_CLASS_P (op)
4296 || TREE_CODE (op) == ADDR_EXPR)
4297 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4298 changed = true;
4299 }
4300 }
4301 break;
4302 case GIMPLE_DEBUG:
4303 if (gimple_debug_bind_p (stmt))
4304 {
4305 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4306 if (*val
4307 && (REFERENCE_CLASS_P (*val)
4308 || TREE_CODE (*val) == ADDR_EXPR)
4309 && maybe_canonicalize_mem_ref_addr (val))
4310 changed = true;
4311 }
4312 break;
4313 case GIMPLE_COND:
4314 {
4315 /* Canonicalize operand order. */
4316 tree lhs = gimple_cond_lhs (stmt);
4317 tree rhs = gimple_cond_rhs (stmt);
4318 if (tree_swap_operands_p (lhs, rhs))
4319 {
4320 gcond *gc = as_a <gcond *> (stmt);
4321 gimple_cond_set_lhs (gc, rhs);
4322 gimple_cond_set_rhs (gc, lhs);
4323 gimple_cond_set_code (gc,
4324 swap_tree_comparison (gimple_cond_code (gc)));
4325 changed = true;
4326 }
4327 }
4328 default:;
4329 }
4330
4331 /* Dispatch to pattern-based folding. */
4332 if (!inplace
4333 || is_gimple_assign (stmt)
4334 || gimple_code (stmt) == GIMPLE_COND)
4335 {
4336 gimple_seq seq = NULL;
4337 code_helper rcode;
4338 tree ops[3] = {};
4339 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4340 valueize, valueize))
4341 {
4342 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4343 changed = true;
4344 else
4345 gimple_seq_discard (seq);
4346 }
4347 }
4348
4349 stmt = gsi_stmt (*gsi);
4350
4351 /* Fold the main computation performed by the statement. */
4352 switch (gimple_code (stmt))
4353 {
4354 case GIMPLE_ASSIGN:
4355 {
4356 /* Try to canonicalize for boolean-typed X the comparisons
4357 X == 0, X == 1, X != 0, and X != 1. */
4358 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4359 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4360 {
4361 tree lhs = gimple_assign_lhs (stmt);
4362 tree op1 = gimple_assign_rhs1 (stmt);
4363 tree op2 = gimple_assign_rhs2 (stmt);
4364 tree type = TREE_TYPE (op1);
4365
4366 /* Check whether the comparison operands are of the same boolean
4367 type as the result type is.
4368 Check that second operand is an integer-constant with value
4369 one or zero. */
4370 if (TREE_CODE (op2) == INTEGER_CST
4371 && (integer_zerop (op2) || integer_onep (op2))
4372 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4373 {
4374 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4375 bool is_logical_not = false;
4376
4377 /* X == 0 and X != 1 is a logical-not.of X
4378 X == 1 and X != 0 is X */
4379 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4380 || (cmp_code == NE_EXPR && integer_onep (op2)))
4381 is_logical_not = true;
4382
4383 if (is_logical_not == false)
4384 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4385 /* Only for one-bit precision typed X the transformation
4386 !X -> ~X is valied. */
4387 else if (TYPE_PRECISION (type) == 1)
4388 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4389 /* Otherwise we use !X -> X ^ 1. */
4390 else
4391 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4392 build_int_cst (type, 1));
4393 changed = true;
4394 break;
4395 }
4396 }
4397
4398 unsigned old_num_ops = gimple_num_ops (stmt);
4399 tree lhs = gimple_assign_lhs (stmt);
4400 tree new_rhs = fold_gimple_assign (gsi);
4401 if (new_rhs
4402 && !useless_type_conversion_p (TREE_TYPE (lhs),
4403 TREE_TYPE (new_rhs)))
4404 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4405 if (new_rhs
4406 && (!inplace
4407 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4408 {
4409 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4410 changed = true;
4411 }
4412 break;
4413 }
4414
4415 case GIMPLE_CALL:
4416 changed |= gimple_fold_call (gsi, inplace);
4417 break;
4418
4419 case GIMPLE_ASM:
4420 /* Fold *& in asm operands. */
4421 {
4422 gasm *asm_stmt = as_a <gasm *> (stmt);
4423 size_t noutputs;
4424 const char **oconstraints;
4425 const char *constraint;
4426 bool allows_mem, allows_reg;
4427
4428 noutputs = gimple_asm_noutputs (asm_stmt);
4429 oconstraints = XALLOCAVEC (const char *, noutputs);
4430
4431 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4432 {
4433 tree link = gimple_asm_output_op (asm_stmt, i);
4434 tree op = TREE_VALUE (link);
4435 oconstraints[i]
4436 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4437 if (REFERENCE_CLASS_P (op)
4438 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4439 {
4440 TREE_VALUE (link) = op;
4441 changed = true;
4442 }
4443 }
4444 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4445 {
4446 tree link = gimple_asm_input_op (asm_stmt, i);
4447 tree op = TREE_VALUE (link);
4448 constraint
4449 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4450 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4451 oconstraints, &allows_mem, &allows_reg);
4452 if (REFERENCE_CLASS_P (op)
4453 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4454 != NULL_TREE)
4455 {
4456 TREE_VALUE (link) = op;
4457 changed = true;
4458 }
4459 }
4460 }
4461 break;
4462
4463 case GIMPLE_DEBUG:
4464 if (gimple_debug_bind_p (stmt))
4465 {
4466 tree val = gimple_debug_bind_get_value (stmt);
4467 if (val
4468 && REFERENCE_CLASS_P (val))
4469 {
4470 tree tem = maybe_fold_reference (val, false);
4471 if (tem)
4472 {
4473 gimple_debug_bind_set_value (stmt, tem);
4474 changed = true;
4475 }
4476 }
4477 else if (val
4478 && TREE_CODE (val) == ADDR_EXPR)
4479 {
4480 tree ref = TREE_OPERAND (val, 0);
4481 tree tem = maybe_fold_reference (ref, false);
4482 if (tem)
4483 {
4484 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4485 gimple_debug_bind_set_value (stmt, tem);
4486 changed = true;
4487 }
4488 }
4489 }
4490 break;
4491
4492 case GIMPLE_RETURN:
4493 {
4494 greturn *ret_stmt = as_a<greturn *> (stmt);
4495 tree ret = gimple_return_retval(ret_stmt);
4496
4497 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4498 {
4499 tree val = valueize (ret);
4500 if (val && val != ret
4501 && may_propagate_copy (ret, val))
4502 {
4503 gimple_return_set_retval (ret_stmt, val);
4504 changed = true;
4505 }
4506 }
4507 }
4508 break;
4509
4510 default:;
4511 }
4512
4513 stmt = gsi_stmt (*gsi);
4514
4515 /* Fold *& on the lhs. */
4516 if (gimple_has_lhs (stmt))
4517 {
4518 tree lhs = gimple_get_lhs (stmt);
4519 if (lhs && REFERENCE_CLASS_P (lhs))
4520 {
4521 tree new_lhs = maybe_fold_reference (lhs, true);
4522 if (new_lhs)
4523 {
4524 gimple_set_lhs (stmt, new_lhs);
4525 changed = true;
4526 }
4527 }
4528 }
4529
4530 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4531 return changed;
4532 }
4533
4534 /* Valueziation callback that ends up not following SSA edges. */
4535
4536 tree
4537 no_follow_ssa_edges (tree)
4538 {
4539 return NULL_TREE;
4540 }
4541
4542 /* Valueization callback that ends up following single-use SSA edges only. */
4543
4544 tree
4545 follow_single_use_edges (tree val)
4546 {
4547 if (TREE_CODE (val) == SSA_NAME
4548 && !has_single_use (val))
4549 return NULL_TREE;
4550 return val;
4551 }
4552
4553 /* Fold the statement pointed to by GSI. In some cases, this function may
4554 replace the whole statement with a new one. Returns true iff folding
4555 makes any changes.
4556 The statement pointed to by GSI should be in valid gimple form but may
4557 be in unfolded state as resulting from for example constant propagation
4558 which can produce *&x = 0. */
4559
4560 bool
4561 fold_stmt (gimple_stmt_iterator *gsi)
4562 {
4563 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4564 }
4565
4566 bool
4567 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4568 {
4569 return fold_stmt_1 (gsi, false, valueize);
4570 }
4571
4572 /* Perform the minimal folding on statement *GSI. Only operations like
4573 *&x created by constant propagation are handled. The statement cannot
4574 be replaced with a new one. Return true if the statement was
4575 changed, false otherwise.
4576 The statement *GSI should be in valid gimple form but may
4577 be in unfolded state as resulting from for example constant propagation
4578 which can produce *&x = 0. */
4579
4580 bool
4581 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4582 {
4583 gimple *stmt = gsi_stmt (*gsi);
4584 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4585 gcc_assert (gsi_stmt (*gsi) == stmt);
4586 return changed;
4587 }
4588
4589 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4590 if EXPR is null or we don't know how.
4591 If non-null, the result always has boolean type. */
4592
4593 static tree
4594 canonicalize_bool (tree expr, bool invert)
4595 {
4596 if (!expr)
4597 return NULL_TREE;
4598 else if (invert)
4599 {
4600 if (integer_nonzerop (expr))
4601 return boolean_false_node;
4602 else if (integer_zerop (expr))
4603 return boolean_true_node;
4604 else if (TREE_CODE (expr) == SSA_NAME)
4605 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4606 build_int_cst (TREE_TYPE (expr), 0));
4607 else if (COMPARISON_CLASS_P (expr))
4608 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4609 boolean_type_node,
4610 TREE_OPERAND (expr, 0),
4611 TREE_OPERAND (expr, 1));
4612 else
4613 return NULL_TREE;
4614 }
4615 else
4616 {
4617 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4618 return expr;
4619 if (integer_nonzerop (expr))
4620 return boolean_true_node;
4621 else if (integer_zerop (expr))
4622 return boolean_false_node;
4623 else if (TREE_CODE (expr) == SSA_NAME)
4624 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4625 build_int_cst (TREE_TYPE (expr), 0));
4626 else if (COMPARISON_CLASS_P (expr))
4627 return fold_build2 (TREE_CODE (expr),
4628 boolean_type_node,
4629 TREE_OPERAND (expr, 0),
4630 TREE_OPERAND (expr, 1));
4631 else
4632 return NULL_TREE;
4633 }
4634 }
4635
4636 /* Check to see if a boolean expression EXPR is logically equivalent to the
4637 comparison (OP1 CODE OP2). Check for various identities involving
4638 SSA_NAMEs. */
4639
4640 static bool
4641 same_bool_comparison_p (const_tree expr, enum tree_code code,
4642 const_tree op1, const_tree op2)
4643 {
4644 gimple *s;
4645
4646 /* The obvious case. */
4647 if (TREE_CODE (expr) == code
4648 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4649 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4650 return true;
4651
4652 /* Check for comparing (name, name != 0) and the case where expr
4653 is an SSA_NAME with a definition matching the comparison. */
4654 if (TREE_CODE (expr) == SSA_NAME
4655 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4656 {
4657 if (operand_equal_p (expr, op1, 0))
4658 return ((code == NE_EXPR && integer_zerop (op2))
4659 || (code == EQ_EXPR && integer_nonzerop (op2)));
4660 s = SSA_NAME_DEF_STMT (expr);
4661 if (is_gimple_assign (s)
4662 && gimple_assign_rhs_code (s) == code
4663 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4664 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4665 return true;
4666 }
4667
4668 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4669 of name is a comparison, recurse. */
4670 if (TREE_CODE (op1) == SSA_NAME
4671 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4672 {
4673 s = SSA_NAME_DEF_STMT (op1);
4674 if (is_gimple_assign (s)
4675 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4676 {
4677 enum tree_code c = gimple_assign_rhs_code (s);
4678 if ((c == NE_EXPR && integer_zerop (op2))
4679 || (c == EQ_EXPR && integer_nonzerop (op2)))
4680 return same_bool_comparison_p (expr, c,
4681 gimple_assign_rhs1 (s),
4682 gimple_assign_rhs2 (s));
4683 if ((c == EQ_EXPR && integer_zerop (op2))
4684 || (c == NE_EXPR && integer_nonzerop (op2)))
4685 return same_bool_comparison_p (expr,
4686 invert_tree_comparison (c, false),
4687 gimple_assign_rhs1 (s),
4688 gimple_assign_rhs2 (s));
4689 }
4690 }
4691 return false;
4692 }
4693
4694 /* Check to see if two boolean expressions OP1 and OP2 are logically
4695 equivalent. */
4696
4697 static bool
4698 same_bool_result_p (const_tree op1, const_tree op2)
4699 {
4700 /* Simple cases first. */
4701 if (operand_equal_p (op1, op2, 0))
4702 return true;
4703
4704 /* Check the cases where at least one of the operands is a comparison.
4705 These are a bit smarter than operand_equal_p in that they apply some
4706 identifies on SSA_NAMEs. */
4707 if (COMPARISON_CLASS_P (op2)
4708 && same_bool_comparison_p (op1, TREE_CODE (op2),
4709 TREE_OPERAND (op2, 0),
4710 TREE_OPERAND (op2, 1)))
4711 return true;
4712 if (COMPARISON_CLASS_P (op1)
4713 && same_bool_comparison_p (op2, TREE_CODE (op1),
4714 TREE_OPERAND (op1, 0),
4715 TREE_OPERAND (op1, 1)))
4716 return true;
4717
4718 /* Default case. */
4719 return false;
4720 }
4721
4722 /* Forward declarations for some mutually recursive functions. */
4723
4724 static tree
4725 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4726 enum tree_code code2, tree op2a, tree op2b);
4727 static tree
4728 and_var_with_comparison (tree var, bool invert,
4729 enum tree_code code2, tree op2a, tree op2b);
4730 static tree
4731 and_var_with_comparison_1 (gimple *stmt,
4732 enum tree_code code2, tree op2a, tree op2b);
4733 static tree
4734 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4735 enum tree_code code2, tree op2a, tree op2b);
4736 static tree
4737 or_var_with_comparison (tree var, bool invert,
4738 enum tree_code code2, tree op2a, tree op2b);
4739 static tree
4740 or_var_with_comparison_1 (gimple *stmt,
4741 enum tree_code code2, tree op2a, tree op2b);
4742
4743 /* Helper function for and_comparisons_1: try to simplify the AND of the
4744 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4745 If INVERT is true, invert the value of the VAR before doing the AND.
4746 Return NULL_EXPR if we can't simplify this to a single expression. */
4747
4748 static tree
4749 and_var_with_comparison (tree var, bool invert,
4750 enum tree_code code2, tree op2a, tree op2b)
4751 {
4752 tree t;
4753 gimple *stmt = SSA_NAME_DEF_STMT (var);
4754
4755 /* We can only deal with variables whose definitions are assignments. */
4756 if (!is_gimple_assign (stmt))
4757 return NULL_TREE;
4758
4759 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4760 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4761 Then we only have to consider the simpler non-inverted cases. */
4762 if (invert)
4763 t = or_var_with_comparison_1 (stmt,
4764 invert_tree_comparison (code2, false),
4765 op2a, op2b);
4766 else
4767 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4768 return canonicalize_bool (t, invert);
4769 }
4770
4771 /* Try to simplify the AND of the ssa variable defined by the assignment
4772 STMT with the comparison specified by (OP2A CODE2 OP2B).
4773 Return NULL_EXPR if we can't simplify this to a single expression. */
4774
4775 static tree
4776 and_var_with_comparison_1 (gimple *stmt,
4777 enum tree_code code2, tree op2a, tree op2b)
4778 {
4779 tree var = gimple_assign_lhs (stmt);
4780 tree true_test_var = NULL_TREE;
4781 tree false_test_var = NULL_TREE;
4782 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4783
4784 /* Check for identities like (var AND (var == 0)) => false. */
4785 if (TREE_CODE (op2a) == SSA_NAME
4786 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4787 {
4788 if ((code2 == NE_EXPR && integer_zerop (op2b))
4789 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4790 {
4791 true_test_var = op2a;
4792 if (var == true_test_var)
4793 return var;
4794 }
4795 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4796 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4797 {
4798 false_test_var = op2a;
4799 if (var == false_test_var)
4800 return boolean_false_node;
4801 }
4802 }
4803
4804 /* If the definition is a comparison, recurse on it. */
4805 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4806 {
4807 tree t = and_comparisons_1 (innercode,
4808 gimple_assign_rhs1 (stmt),
4809 gimple_assign_rhs2 (stmt),
4810 code2,
4811 op2a,
4812 op2b);
4813 if (t)
4814 return t;
4815 }
4816
4817 /* If the definition is an AND or OR expression, we may be able to
4818 simplify by reassociating. */
4819 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4820 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4821 {
4822 tree inner1 = gimple_assign_rhs1 (stmt);
4823 tree inner2 = gimple_assign_rhs2 (stmt);
4824 gimple *s;
4825 tree t;
4826 tree partial = NULL_TREE;
4827 bool is_and = (innercode == BIT_AND_EXPR);
4828
4829 /* Check for boolean identities that don't require recursive examination
4830 of inner1/inner2:
4831 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4832 inner1 AND (inner1 OR inner2) => inner1
4833 !inner1 AND (inner1 AND inner2) => false
4834 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4835 Likewise for similar cases involving inner2. */
4836 if (inner1 == true_test_var)
4837 return (is_and ? var : inner1);
4838 else if (inner2 == true_test_var)
4839 return (is_and ? var : inner2);
4840 else if (inner1 == false_test_var)
4841 return (is_and
4842 ? boolean_false_node
4843 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4844 else if (inner2 == false_test_var)
4845 return (is_and
4846 ? boolean_false_node
4847 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4848
4849 /* Next, redistribute/reassociate the AND across the inner tests.
4850 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4851 if (TREE_CODE (inner1) == SSA_NAME
4852 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4853 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4854 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4855 gimple_assign_rhs1 (s),
4856 gimple_assign_rhs2 (s),
4857 code2, op2a, op2b)))
4858 {
4859 /* Handle the AND case, where we are reassociating:
4860 (inner1 AND inner2) AND (op2a code2 op2b)
4861 => (t AND inner2)
4862 If the partial result t is a constant, we win. Otherwise
4863 continue on to try reassociating with the other inner test. */
4864 if (is_and)
4865 {
4866 if (integer_onep (t))
4867 return inner2;
4868 else if (integer_zerop (t))
4869 return boolean_false_node;
4870 }
4871
4872 /* Handle the OR case, where we are redistributing:
4873 (inner1 OR inner2) AND (op2a code2 op2b)
4874 => (t OR (inner2 AND (op2a code2 op2b))) */
4875 else if (integer_onep (t))
4876 return boolean_true_node;
4877
4878 /* Save partial result for later. */
4879 partial = t;
4880 }
4881
4882 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4883 if (TREE_CODE (inner2) == SSA_NAME
4884 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4885 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4886 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4887 gimple_assign_rhs1 (s),
4888 gimple_assign_rhs2 (s),
4889 code2, op2a, op2b)))
4890 {
4891 /* Handle the AND case, where we are reassociating:
4892 (inner1 AND inner2) AND (op2a code2 op2b)
4893 => (inner1 AND t) */
4894 if (is_and)
4895 {
4896 if (integer_onep (t))
4897 return inner1;
4898 else if (integer_zerop (t))
4899 return boolean_false_node;
4900 /* If both are the same, we can apply the identity
4901 (x AND x) == x. */
4902 else if (partial && same_bool_result_p (t, partial))
4903 return t;
4904 }
4905
4906 /* Handle the OR case. where we are redistributing:
4907 (inner1 OR inner2) AND (op2a code2 op2b)
4908 => (t OR (inner1 AND (op2a code2 op2b)))
4909 => (t OR partial) */
4910 else
4911 {
4912 if (integer_onep (t))
4913 return boolean_true_node;
4914 else if (partial)
4915 {
4916 /* We already got a simplification for the other
4917 operand to the redistributed OR expression. The
4918 interesting case is when at least one is false.
4919 Or, if both are the same, we can apply the identity
4920 (x OR x) == x. */
4921 if (integer_zerop (partial))
4922 return t;
4923 else if (integer_zerop (t))
4924 return partial;
4925 else if (same_bool_result_p (t, partial))
4926 return t;
4927 }
4928 }
4929 }
4930 }
4931 return NULL_TREE;
4932 }
4933
4934 /* Try to simplify the AND of two comparisons defined by
4935 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4936 If this can be done without constructing an intermediate value,
4937 return the resulting tree; otherwise NULL_TREE is returned.
4938 This function is deliberately asymmetric as it recurses on SSA_DEFs
4939 in the first comparison but not the second. */
4940
4941 static tree
4942 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4943 enum tree_code code2, tree op2a, tree op2b)
4944 {
4945 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4946
4947 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4948 if (operand_equal_p (op1a, op2a, 0)
4949 && operand_equal_p (op1b, op2b, 0))
4950 {
4951 /* Result will be either NULL_TREE, or a combined comparison. */
4952 tree t = combine_comparisons (UNKNOWN_LOCATION,
4953 TRUTH_ANDIF_EXPR, code1, code2,
4954 truth_type, op1a, op1b);
4955 if (t)
4956 return t;
4957 }
4958
4959 /* Likewise the swapped case of the above. */
4960 if (operand_equal_p (op1a, op2b, 0)
4961 && operand_equal_p (op1b, op2a, 0))
4962 {
4963 /* Result will be either NULL_TREE, or a combined comparison. */
4964 tree t = combine_comparisons (UNKNOWN_LOCATION,
4965 TRUTH_ANDIF_EXPR, code1,
4966 swap_tree_comparison (code2),
4967 truth_type, op1a, op1b);
4968 if (t)
4969 return t;
4970 }
4971
4972 /* If both comparisons are of the same value against constants, we might
4973 be able to merge them. */
4974 if (operand_equal_p (op1a, op2a, 0)
4975 && TREE_CODE (op1b) == INTEGER_CST
4976 && TREE_CODE (op2b) == INTEGER_CST)
4977 {
4978 int cmp = tree_int_cst_compare (op1b, op2b);
4979
4980 /* If we have (op1a == op1b), we should either be able to
4981 return that or FALSE, depending on whether the constant op1b
4982 also satisfies the other comparison against op2b. */
4983 if (code1 == EQ_EXPR)
4984 {
4985 bool done = true;
4986 bool val;
4987 switch (code2)
4988 {
4989 case EQ_EXPR: val = (cmp == 0); break;
4990 case NE_EXPR: val = (cmp != 0); break;
4991 case LT_EXPR: val = (cmp < 0); break;
4992 case GT_EXPR: val = (cmp > 0); break;
4993 case LE_EXPR: val = (cmp <= 0); break;
4994 case GE_EXPR: val = (cmp >= 0); break;
4995 default: done = false;
4996 }
4997 if (done)
4998 {
4999 if (val)
5000 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5001 else
5002 return boolean_false_node;
5003 }
5004 }
5005 /* Likewise if the second comparison is an == comparison. */
5006 else if (code2 == EQ_EXPR)
5007 {
5008 bool done = true;
5009 bool val;
5010 switch (code1)
5011 {
5012 case EQ_EXPR: val = (cmp == 0); break;
5013 case NE_EXPR: val = (cmp != 0); break;
5014 case LT_EXPR: val = (cmp > 0); break;
5015 case GT_EXPR: val = (cmp < 0); break;
5016 case LE_EXPR: val = (cmp >= 0); break;
5017 case GE_EXPR: val = (cmp <= 0); break;
5018 default: done = false;
5019 }
5020 if (done)
5021 {
5022 if (val)
5023 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5024 else
5025 return boolean_false_node;
5026 }
5027 }
5028
5029 /* Same business with inequality tests. */
5030 else if (code1 == NE_EXPR)
5031 {
5032 bool val;
5033 switch (code2)
5034 {
5035 case EQ_EXPR: val = (cmp != 0); break;
5036 case NE_EXPR: val = (cmp == 0); break;
5037 case LT_EXPR: val = (cmp >= 0); break;
5038 case GT_EXPR: val = (cmp <= 0); break;
5039 case LE_EXPR: val = (cmp > 0); break;
5040 case GE_EXPR: val = (cmp < 0); break;
5041 default:
5042 val = false;
5043 }
5044 if (val)
5045 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5046 }
5047 else if (code2 == NE_EXPR)
5048 {
5049 bool val;
5050 switch (code1)
5051 {
5052 case EQ_EXPR: val = (cmp == 0); break;
5053 case NE_EXPR: val = (cmp != 0); break;
5054 case LT_EXPR: val = (cmp <= 0); break;
5055 case GT_EXPR: val = (cmp >= 0); break;
5056 case LE_EXPR: val = (cmp < 0); break;
5057 case GE_EXPR: val = (cmp > 0); break;
5058 default:
5059 val = false;
5060 }
5061 if (val)
5062 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5063 }
5064
5065 /* Chose the more restrictive of two < or <= comparisons. */
5066 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5067 && (code2 == LT_EXPR || code2 == LE_EXPR))
5068 {
5069 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5070 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5071 else
5072 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5073 }
5074
5075 /* Likewise chose the more restrictive of two > or >= comparisons. */
5076 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5077 && (code2 == GT_EXPR || code2 == GE_EXPR))
5078 {
5079 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5080 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5081 else
5082 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5083 }
5084
5085 /* Check for singleton ranges. */
5086 else if (cmp == 0
5087 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5088 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5089 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5090
5091 /* Check for disjoint ranges. */
5092 else if (cmp <= 0
5093 && (code1 == LT_EXPR || code1 == LE_EXPR)
5094 && (code2 == GT_EXPR || code2 == GE_EXPR))
5095 return boolean_false_node;
5096 else if (cmp >= 0
5097 && (code1 == GT_EXPR || code1 == GE_EXPR)
5098 && (code2 == LT_EXPR || code2 == LE_EXPR))
5099 return boolean_false_node;
5100 }
5101
5102 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5103 NAME's definition is a truth value. See if there are any simplifications
5104 that can be done against the NAME's definition. */
5105 if (TREE_CODE (op1a) == SSA_NAME
5106 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5107 && (integer_zerop (op1b) || integer_onep (op1b)))
5108 {
5109 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5110 || (code1 == NE_EXPR && integer_onep (op1b)));
5111 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5112 switch (gimple_code (stmt))
5113 {
5114 case GIMPLE_ASSIGN:
5115 /* Try to simplify by copy-propagating the definition. */
5116 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5117
5118 case GIMPLE_PHI:
5119 /* If every argument to the PHI produces the same result when
5120 ANDed with the second comparison, we win.
5121 Do not do this unless the type is bool since we need a bool
5122 result here anyway. */
5123 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5124 {
5125 tree result = NULL_TREE;
5126 unsigned i;
5127 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5128 {
5129 tree arg = gimple_phi_arg_def (stmt, i);
5130
5131 /* If this PHI has itself as an argument, ignore it.
5132 If all the other args produce the same result,
5133 we're still OK. */
5134 if (arg == gimple_phi_result (stmt))
5135 continue;
5136 else if (TREE_CODE (arg) == INTEGER_CST)
5137 {
5138 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5139 {
5140 if (!result)
5141 result = boolean_false_node;
5142 else if (!integer_zerop (result))
5143 return NULL_TREE;
5144 }
5145 else if (!result)
5146 result = fold_build2 (code2, boolean_type_node,
5147 op2a, op2b);
5148 else if (!same_bool_comparison_p (result,
5149 code2, op2a, op2b))
5150 return NULL_TREE;
5151 }
5152 else if (TREE_CODE (arg) == SSA_NAME
5153 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5154 {
5155 tree temp;
5156 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5157 /* In simple cases we can look through PHI nodes,
5158 but we have to be careful with loops.
5159 See PR49073. */
5160 if (! dom_info_available_p (CDI_DOMINATORS)
5161 || gimple_bb (def_stmt) == gimple_bb (stmt)
5162 || dominated_by_p (CDI_DOMINATORS,
5163 gimple_bb (def_stmt),
5164 gimple_bb (stmt)))
5165 return NULL_TREE;
5166 temp = and_var_with_comparison (arg, invert, code2,
5167 op2a, op2b);
5168 if (!temp)
5169 return NULL_TREE;
5170 else if (!result)
5171 result = temp;
5172 else if (!same_bool_result_p (result, temp))
5173 return NULL_TREE;
5174 }
5175 else
5176 return NULL_TREE;
5177 }
5178 return result;
5179 }
5180
5181 default:
5182 break;
5183 }
5184 }
5185 return NULL_TREE;
5186 }
5187
5188 /* Try to simplify the AND of two comparisons, specified by
5189 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5190 If this can be simplified to a single expression (without requiring
5191 introducing more SSA variables to hold intermediate values),
5192 return the resulting tree. Otherwise return NULL_TREE.
5193 If the result expression is non-null, it has boolean type. */
5194
5195 tree
5196 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5197 enum tree_code code2, tree op2a, tree op2b)
5198 {
5199 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5200 if (t)
5201 return t;
5202 else
5203 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5204 }
5205
5206 /* Helper function for or_comparisons_1: try to simplify the OR of the
5207 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5208 If INVERT is true, invert the value of VAR before doing the OR.
5209 Return NULL_EXPR if we can't simplify this to a single expression. */
5210
5211 static tree
5212 or_var_with_comparison (tree var, bool invert,
5213 enum tree_code code2, tree op2a, tree op2b)
5214 {
5215 tree t;
5216 gimple *stmt = SSA_NAME_DEF_STMT (var);
5217
5218 /* We can only deal with variables whose definitions are assignments. */
5219 if (!is_gimple_assign (stmt))
5220 return NULL_TREE;
5221
5222 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5223 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5224 Then we only have to consider the simpler non-inverted cases. */
5225 if (invert)
5226 t = and_var_with_comparison_1 (stmt,
5227 invert_tree_comparison (code2, false),
5228 op2a, op2b);
5229 else
5230 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5231 return canonicalize_bool (t, invert);
5232 }
5233
5234 /* Try to simplify the OR of the ssa variable defined by the assignment
5235 STMT with the comparison specified by (OP2A CODE2 OP2B).
5236 Return NULL_EXPR if we can't simplify this to a single expression. */
5237
5238 static tree
5239 or_var_with_comparison_1 (gimple *stmt,
5240 enum tree_code code2, tree op2a, tree op2b)
5241 {
5242 tree var = gimple_assign_lhs (stmt);
5243 tree true_test_var = NULL_TREE;
5244 tree false_test_var = NULL_TREE;
5245 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5246
5247 /* Check for identities like (var OR (var != 0)) => true . */
5248 if (TREE_CODE (op2a) == SSA_NAME
5249 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5250 {
5251 if ((code2 == NE_EXPR && integer_zerop (op2b))
5252 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5253 {
5254 true_test_var = op2a;
5255 if (var == true_test_var)
5256 return var;
5257 }
5258 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5259 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5260 {
5261 false_test_var = op2a;
5262 if (var == false_test_var)
5263 return boolean_true_node;
5264 }
5265 }
5266
5267 /* If the definition is a comparison, recurse on it. */
5268 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5269 {
5270 tree t = or_comparisons_1 (innercode,
5271 gimple_assign_rhs1 (stmt),
5272 gimple_assign_rhs2 (stmt),
5273 code2,
5274 op2a,
5275 op2b);
5276 if (t)
5277 return t;
5278 }
5279
5280 /* If the definition is an AND or OR expression, we may be able to
5281 simplify by reassociating. */
5282 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5283 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5284 {
5285 tree inner1 = gimple_assign_rhs1 (stmt);
5286 tree inner2 = gimple_assign_rhs2 (stmt);
5287 gimple *s;
5288 tree t;
5289 tree partial = NULL_TREE;
5290 bool is_or = (innercode == BIT_IOR_EXPR);
5291
5292 /* Check for boolean identities that don't require recursive examination
5293 of inner1/inner2:
5294 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5295 inner1 OR (inner1 AND inner2) => inner1
5296 !inner1 OR (inner1 OR inner2) => true
5297 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5298 */
5299 if (inner1 == true_test_var)
5300 return (is_or ? var : inner1);
5301 else if (inner2 == true_test_var)
5302 return (is_or ? var : inner2);
5303 else if (inner1 == false_test_var)
5304 return (is_or
5305 ? boolean_true_node
5306 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5307 else if (inner2 == false_test_var)
5308 return (is_or
5309 ? boolean_true_node
5310 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5311
5312 /* Next, redistribute/reassociate the OR across the inner tests.
5313 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5314 if (TREE_CODE (inner1) == SSA_NAME
5315 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5316 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5317 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5318 gimple_assign_rhs1 (s),
5319 gimple_assign_rhs2 (s),
5320 code2, op2a, op2b)))
5321 {
5322 /* Handle the OR case, where we are reassociating:
5323 (inner1 OR inner2) OR (op2a code2 op2b)
5324 => (t OR inner2)
5325 If the partial result t is a constant, we win. Otherwise
5326 continue on to try reassociating with the other inner test. */
5327 if (is_or)
5328 {
5329 if (integer_onep (t))
5330 return boolean_true_node;
5331 else if (integer_zerop (t))
5332 return inner2;
5333 }
5334
5335 /* Handle the AND case, where we are redistributing:
5336 (inner1 AND inner2) OR (op2a code2 op2b)
5337 => (t AND (inner2 OR (op2a code op2b))) */
5338 else if (integer_zerop (t))
5339 return boolean_false_node;
5340
5341 /* Save partial result for later. */
5342 partial = t;
5343 }
5344
5345 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5346 if (TREE_CODE (inner2) == SSA_NAME
5347 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5348 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5349 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5350 gimple_assign_rhs1 (s),
5351 gimple_assign_rhs2 (s),
5352 code2, op2a, op2b)))
5353 {
5354 /* Handle the OR case, where we are reassociating:
5355 (inner1 OR inner2) OR (op2a code2 op2b)
5356 => (inner1 OR t)
5357 => (t OR partial) */
5358 if (is_or)
5359 {
5360 if (integer_zerop (t))
5361 return inner1;
5362 else if (integer_onep (t))
5363 return boolean_true_node;
5364 /* If both are the same, we can apply the identity
5365 (x OR x) == x. */
5366 else if (partial && same_bool_result_p (t, partial))
5367 return t;
5368 }
5369
5370 /* Handle the AND case, where we are redistributing:
5371 (inner1 AND inner2) OR (op2a code2 op2b)
5372 => (t AND (inner1 OR (op2a code2 op2b)))
5373 => (t AND partial) */
5374 else
5375 {
5376 if (integer_zerop (t))
5377 return boolean_false_node;
5378 else if (partial)
5379 {
5380 /* We already got a simplification for the other
5381 operand to the redistributed AND expression. The
5382 interesting case is when at least one is true.
5383 Or, if both are the same, we can apply the identity
5384 (x AND x) == x. */
5385 if (integer_onep (partial))
5386 return t;
5387 else if (integer_onep (t))
5388 return partial;
5389 else if (same_bool_result_p (t, partial))
5390 return t;
5391 }
5392 }
5393 }
5394 }
5395 return NULL_TREE;
5396 }
5397
5398 /* Try to simplify the OR of two comparisons defined by
5399 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5400 If this can be done without constructing an intermediate value,
5401 return the resulting tree; otherwise NULL_TREE is returned.
5402 This function is deliberately asymmetric as it recurses on SSA_DEFs
5403 in the first comparison but not the second. */
5404
5405 static tree
5406 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5407 enum tree_code code2, tree op2a, tree op2b)
5408 {
5409 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5410
5411 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5412 if (operand_equal_p (op1a, op2a, 0)
5413 && operand_equal_p (op1b, op2b, 0))
5414 {
5415 /* Result will be either NULL_TREE, or a combined comparison. */
5416 tree t = combine_comparisons (UNKNOWN_LOCATION,
5417 TRUTH_ORIF_EXPR, code1, code2,
5418 truth_type, op1a, op1b);
5419 if (t)
5420 return t;
5421 }
5422
5423 /* Likewise the swapped case of the above. */
5424 if (operand_equal_p (op1a, op2b, 0)
5425 && operand_equal_p (op1b, op2a, 0))
5426 {
5427 /* Result will be either NULL_TREE, or a combined comparison. */
5428 tree t = combine_comparisons (UNKNOWN_LOCATION,
5429 TRUTH_ORIF_EXPR, code1,
5430 swap_tree_comparison (code2),
5431 truth_type, op1a, op1b);
5432 if (t)
5433 return t;
5434 }
5435
5436 /* If both comparisons are of the same value against constants, we might
5437 be able to merge them. */
5438 if (operand_equal_p (op1a, op2a, 0)
5439 && TREE_CODE (op1b) == INTEGER_CST
5440 && TREE_CODE (op2b) == INTEGER_CST)
5441 {
5442 int cmp = tree_int_cst_compare (op1b, op2b);
5443
5444 /* If we have (op1a != op1b), we should either be able to
5445 return that or TRUE, depending on whether the constant op1b
5446 also satisfies the other comparison against op2b. */
5447 if (code1 == NE_EXPR)
5448 {
5449 bool done = true;
5450 bool val;
5451 switch (code2)
5452 {
5453 case EQ_EXPR: val = (cmp == 0); break;
5454 case NE_EXPR: val = (cmp != 0); break;
5455 case LT_EXPR: val = (cmp < 0); break;
5456 case GT_EXPR: val = (cmp > 0); break;
5457 case LE_EXPR: val = (cmp <= 0); break;
5458 case GE_EXPR: val = (cmp >= 0); break;
5459 default: done = false;
5460 }
5461 if (done)
5462 {
5463 if (val)
5464 return boolean_true_node;
5465 else
5466 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5467 }
5468 }
5469 /* Likewise if the second comparison is a != comparison. */
5470 else if (code2 == NE_EXPR)
5471 {
5472 bool done = true;
5473 bool val;
5474 switch (code1)
5475 {
5476 case EQ_EXPR: val = (cmp == 0); break;
5477 case NE_EXPR: val = (cmp != 0); break;
5478 case LT_EXPR: val = (cmp > 0); break;
5479 case GT_EXPR: val = (cmp < 0); break;
5480 case LE_EXPR: val = (cmp >= 0); break;
5481 case GE_EXPR: val = (cmp <= 0); break;
5482 default: done = false;
5483 }
5484 if (done)
5485 {
5486 if (val)
5487 return boolean_true_node;
5488 else
5489 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5490 }
5491 }
5492
5493 /* See if an equality test is redundant with the other comparison. */
5494 else if (code1 == EQ_EXPR)
5495 {
5496 bool val;
5497 switch (code2)
5498 {
5499 case EQ_EXPR: val = (cmp == 0); break;
5500 case NE_EXPR: val = (cmp != 0); break;
5501 case LT_EXPR: val = (cmp < 0); break;
5502 case GT_EXPR: val = (cmp > 0); break;
5503 case LE_EXPR: val = (cmp <= 0); break;
5504 case GE_EXPR: val = (cmp >= 0); break;
5505 default:
5506 val = false;
5507 }
5508 if (val)
5509 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5510 }
5511 else if (code2 == EQ_EXPR)
5512 {
5513 bool val;
5514 switch (code1)
5515 {
5516 case EQ_EXPR: val = (cmp == 0); break;
5517 case NE_EXPR: val = (cmp != 0); break;
5518 case LT_EXPR: val = (cmp > 0); break;
5519 case GT_EXPR: val = (cmp < 0); break;
5520 case LE_EXPR: val = (cmp >= 0); break;
5521 case GE_EXPR: val = (cmp <= 0); break;
5522 default:
5523 val = false;
5524 }
5525 if (val)
5526 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5527 }
5528
5529 /* Chose the less restrictive of two < or <= comparisons. */
5530 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5531 && (code2 == LT_EXPR || code2 == LE_EXPR))
5532 {
5533 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5534 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5535 else
5536 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5537 }
5538
5539 /* Likewise chose the less restrictive of two > or >= comparisons. */
5540 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5541 && (code2 == GT_EXPR || code2 == GE_EXPR))
5542 {
5543 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5544 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5545 else
5546 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5547 }
5548
5549 /* Check for singleton ranges. */
5550 else if (cmp == 0
5551 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5552 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5553 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5554
5555 /* Check for less/greater pairs that don't restrict the range at all. */
5556 else if (cmp >= 0
5557 && (code1 == LT_EXPR || code1 == LE_EXPR)
5558 && (code2 == GT_EXPR || code2 == GE_EXPR))
5559 return boolean_true_node;
5560 else if (cmp <= 0
5561 && (code1 == GT_EXPR || code1 == GE_EXPR)
5562 && (code2 == LT_EXPR || code2 == LE_EXPR))
5563 return boolean_true_node;
5564 }
5565
5566 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5567 NAME's definition is a truth value. See if there are any simplifications
5568 that can be done against the NAME's definition. */
5569 if (TREE_CODE (op1a) == SSA_NAME
5570 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5571 && (integer_zerop (op1b) || integer_onep (op1b)))
5572 {
5573 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5574 || (code1 == NE_EXPR && integer_onep (op1b)));
5575 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5576 switch (gimple_code (stmt))
5577 {
5578 case GIMPLE_ASSIGN:
5579 /* Try to simplify by copy-propagating the definition. */
5580 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5581
5582 case GIMPLE_PHI:
5583 /* If every argument to the PHI produces the same result when
5584 ORed with the second comparison, we win.
5585 Do not do this unless the type is bool since we need a bool
5586 result here anyway. */
5587 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5588 {
5589 tree result = NULL_TREE;
5590 unsigned i;
5591 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5592 {
5593 tree arg = gimple_phi_arg_def (stmt, i);
5594
5595 /* If this PHI has itself as an argument, ignore it.
5596 If all the other args produce the same result,
5597 we're still OK. */
5598 if (arg == gimple_phi_result (stmt))
5599 continue;
5600 else if (TREE_CODE (arg) == INTEGER_CST)
5601 {
5602 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5603 {
5604 if (!result)
5605 result = boolean_true_node;
5606 else if (!integer_onep (result))
5607 return NULL_TREE;
5608 }
5609 else if (!result)
5610 result = fold_build2 (code2, boolean_type_node,
5611 op2a, op2b);
5612 else if (!same_bool_comparison_p (result,
5613 code2, op2a, op2b))
5614 return NULL_TREE;
5615 }
5616 else if (TREE_CODE (arg) == SSA_NAME
5617 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5618 {
5619 tree temp;
5620 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5621 /* In simple cases we can look through PHI nodes,
5622 but we have to be careful with loops.
5623 See PR49073. */
5624 if (! dom_info_available_p (CDI_DOMINATORS)
5625 || gimple_bb (def_stmt) == gimple_bb (stmt)
5626 || dominated_by_p (CDI_DOMINATORS,
5627 gimple_bb (def_stmt),
5628 gimple_bb (stmt)))
5629 return NULL_TREE;
5630 temp = or_var_with_comparison (arg, invert, code2,
5631 op2a, op2b);
5632 if (!temp)
5633 return NULL_TREE;
5634 else if (!result)
5635 result = temp;
5636 else if (!same_bool_result_p (result, temp))
5637 return NULL_TREE;
5638 }
5639 else
5640 return NULL_TREE;
5641 }
5642 return result;
5643 }
5644
5645 default:
5646 break;
5647 }
5648 }
5649 return NULL_TREE;
5650 }
5651
5652 /* Try to simplify the OR of two comparisons, specified by
5653 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5654 If this can be simplified to a single expression (without requiring
5655 introducing more SSA variables to hold intermediate values),
5656 return the resulting tree. Otherwise return NULL_TREE.
5657 If the result expression is non-null, it has boolean type. */
5658
5659 tree
5660 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5661 enum tree_code code2, tree op2a, tree op2b)
5662 {
5663 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5664 if (t)
5665 return t;
5666 else
5667 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5668 }
5669
5670
5671 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5672
5673 Either NULL_TREE, a simplified but non-constant or a constant
5674 is returned.
5675
5676 ??? This should go into a gimple-fold-inline.h file to be eventually
5677 privatized with the single valueize function used in the various TUs
5678 to avoid the indirect function call overhead. */
5679
5680 tree
5681 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5682 tree (*gvalueize) (tree))
5683 {
5684 code_helper rcode;
5685 tree ops[3] = {};
5686 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5687 edges if there are intermediate VARYING defs. For this reason
5688 do not follow SSA edges here even though SCCVN can technically
5689 just deal fine with that. */
5690 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5691 {
5692 tree res = NULL_TREE;
5693 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5694 res = ops[0];
5695 else if (mprts_hook)
5696 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5697 if (res)
5698 {
5699 if (dump_file && dump_flags & TDF_DETAILS)
5700 {
5701 fprintf (dump_file, "Match-and-simplified ");
5702 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5703 fprintf (dump_file, " to ");
5704 print_generic_expr (dump_file, res, 0);
5705 fprintf (dump_file, "\n");
5706 }
5707 return res;
5708 }
5709 }
5710
5711 location_t loc = gimple_location (stmt);
5712 switch (gimple_code (stmt))
5713 {
5714 case GIMPLE_ASSIGN:
5715 {
5716 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5717
5718 switch (get_gimple_rhs_class (subcode))
5719 {
5720 case GIMPLE_SINGLE_RHS:
5721 {
5722 tree rhs = gimple_assign_rhs1 (stmt);
5723 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5724
5725 if (TREE_CODE (rhs) == SSA_NAME)
5726 {
5727 /* If the RHS is an SSA_NAME, return its known constant value,
5728 if any. */
5729 return (*valueize) (rhs);
5730 }
5731 /* Handle propagating invariant addresses into address
5732 operations. */
5733 else if (TREE_CODE (rhs) == ADDR_EXPR
5734 && !is_gimple_min_invariant (rhs))
5735 {
5736 HOST_WIDE_INT offset = 0;
5737 tree base;
5738 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5739 &offset,
5740 valueize);
5741 if (base
5742 && (CONSTANT_CLASS_P (base)
5743 || decl_address_invariant_p (base)))
5744 return build_invariant_address (TREE_TYPE (rhs),
5745 base, offset);
5746 }
5747 else if (TREE_CODE (rhs) == CONSTRUCTOR
5748 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5749 && (CONSTRUCTOR_NELTS (rhs)
5750 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5751 {
5752 unsigned i;
5753 tree val, *vec;
5754
5755 vec = XALLOCAVEC (tree,
5756 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5757 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5758 {
5759 val = (*valueize) (val);
5760 if (TREE_CODE (val) == INTEGER_CST
5761 || TREE_CODE (val) == REAL_CST
5762 || TREE_CODE (val) == FIXED_CST)
5763 vec[i] = val;
5764 else
5765 return NULL_TREE;
5766 }
5767
5768 return build_vector (TREE_TYPE (rhs), vec);
5769 }
5770 if (subcode == OBJ_TYPE_REF)
5771 {
5772 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5773 /* If callee is constant, we can fold away the wrapper. */
5774 if (is_gimple_min_invariant (val))
5775 return val;
5776 }
5777
5778 if (kind == tcc_reference)
5779 {
5780 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5781 || TREE_CODE (rhs) == REALPART_EXPR
5782 || TREE_CODE (rhs) == IMAGPART_EXPR)
5783 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5784 {
5785 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5786 return fold_unary_loc (EXPR_LOCATION (rhs),
5787 TREE_CODE (rhs),
5788 TREE_TYPE (rhs), val);
5789 }
5790 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5791 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5792 {
5793 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5794 return fold_ternary_loc (EXPR_LOCATION (rhs),
5795 TREE_CODE (rhs),
5796 TREE_TYPE (rhs), val,
5797 TREE_OPERAND (rhs, 1),
5798 TREE_OPERAND (rhs, 2));
5799 }
5800 else if (TREE_CODE (rhs) == MEM_REF
5801 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5802 {
5803 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5804 if (TREE_CODE (val) == ADDR_EXPR
5805 && is_gimple_min_invariant (val))
5806 {
5807 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5808 unshare_expr (val),
5809 TREE_OPERAND (rhs, 1));
5810 if (tem)
5811 rhs = tem;
5812 }
5813 }
5814 return fold_const_aggregate_ref_1 (rhs, valueize);
5815 }
5816 else if (kind == tcc_declaration)
5817 return get_symbol_constant_value (rhs);
5818 return rhs;
5819 }
5820
5821 case GIMPLE_UNARY_RHS:
5822 return NULL_TREE;
5823
5824 case GIMPLE_BINARY_RHS:
5825 /* Translate &x + CST into an invariant form suitable for
5826 further propagation. */
5827 if (subcode == POINTER_PLUS_EXPR)
5828 {
5829 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5830 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5831 if (TREE_CODE (op0) == ADDR_EXPR
5832 && TREE_CODE (op1) == INTEGER_CST)
5833 {
5834 tree off = fold_convert (ptr_type_node, op1);
5835 return build_fold_addr_expr_loc
5836 (loc,
5837 fold_build2 (MEM_REF,
5838 TREE_TYPE (TREE_TYPE (op0)),
5839 unshare_expr (op0), off));
5840 }
5841 }
5842 /* Canonicalize bool != 0 and bool == 0 appearing after
5843 valueization. While gimple_simplify handles this
5844 it can get confused by the ~X == 1 -> X == 0 transform
5845 which we cant reduce to a SSA name or a constant
5846 (and we have no way to tell gimple_simplify to not
5847 consider those transforms in the first place). */
5848 else if (subcode == EQ_EXPR
5849 || subcode == NE_EXPR)
5850 {
5851 tree lhs = gimple_assign_lhs (stmt);
5852 tree op0 = gimple_assign_rhs1 (stmt);
5853 if (useless_type_conversion_p (TREE_TYPE (lhs),
5854 TREE_TYPE (op0)))
5855 {
5856 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5857 op0 = (*valueize) (op0);
5858 if (TREE_CODE (op0) == INTEGER_CST)
5859 std::swap (op0, op1);
5860 if (TREE_CODE (op1) == INTEGER_CST
5861 && ((subcode == NE_EXPR && integer_zerop (op1))
5862 || (subcode == EQ_EXPR && integer_onep (op1))))
5863 return op0;
5864 }
5865 }
5866 return NULL_TREE;
5867
5868 case GIMPLE_TERNARY_RHS:
5869 {
5870 /* Handle ternary operators that can appear in GIMPLE form. */
5871 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5872 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5873 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5874 return fold_ternary_loc (loc, subcode,
5875 gimple_expr_type (stmt), op0, op1, op2);
5876 }
5877
5878 default:
5879 gcc_unreachable ();
5880 }
5881 }
5882
5883 case GIMPLE_CALL:
5884 {
5885 tree fn;
5886 gcall *call_stmt = as_a <gcall *> (stmt);
5887
5888 if (gimple_call_internal_p (stmt))
5889 {
5890 enum tree_code subcode = ERROR_MARK;
5891 switch (gimple_call_internal_fn (stmt))
5892 {
5893 case IFN_UBSAN_CHECK_ADD:
5894 subcode = PLUS_EXPR;
5895 break;
5896 case IFN_UBSAN_CHECK_SUB:
5897 subcode = MINUS_EXPR;
5898 break;
5899 case IFN_UBSAN_CHECK_MUL:
5900 subcode = MULT_EXPR;
5901 break;
5902 case IFN_BUILTIN_EXPECT:
5903 {
5904 tree arg0 = gimple_call_arg (stmt, 0);
5905 tree op0 = (*valueize) (arg0);
5906 if (TREE_CODE (op0) == INTEGER_CST)
5907 return op0;
5908 return NULL_TREE;
5909 }
5910 default:
5911 return NULL_TREE;
5912 }
5913 tree arg0 = gimple_call_arg (stmt, 0);
5914 tree arg1 = gimple_call_arg (stmt, 1);
5915 tree op0 = (*valueize) (arg0);
5916 tree op1 = (*valueize) (arg1);
5917
5918 if (TREE_CODE (op0) != INTEGER_CST
5919 || TREE_CODE (op1) != INTEGER_CST)
5920 {
5921 switch (subcode)
5922 {
5923 case MULT_EXPR:
5924 /* x * 0 = 0 * x = 0 without overflow. */
5925 if (integer_zerop (op0) || integer_zerop (op1))
5926 return build_zero_cst (TREE_TYPE (arg0));
5927 break;
5928 case MINUS_EXPR:
5929 /* y - y = 0 without overflow. */
5930 if (operand_equal_p (op0, op1, 0))
5931 return build_zero_cst (TREE_TYPE (arg0));
5932 break;
5933 default:
5934 break;
5935 }
5936 }
5937 tree res
5938 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5939 if (res
5940 && TREE_CODE (res) == INTEGER_CST
5941 && !TREE_OVERFLOW (res))
5942 return res;
5943 return NULL_TREE;
5944 }
5945
5946 fn = (*valueize) (gimple_call_fn (stmt));
5947 if (TREE_CODE (fn) == ADDR_EXPR
5948 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5949 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5950 && gimple_builtin_call_types_compatible_p (stmt,
5951 TREE_OPERAND (fn, 0)))
5952 {
5953 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5954 tree retval;
5955 unsigned i;
5956 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5957 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5958 retval = fold_builtin_call_array (loc,
5959 gimple_call_return_type (call_stmt),
5960 fn, gimple_call_num_args (stmt), args);
5961 if (retval)
5962 {
5963 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5964 STRIP_NOPS (retval);
5965 retval = fold_convert (gimple_call_return_type (call_stmt),
5966 retval);
5967 }
5968 return retval;
5969 }
5970 return NULL_TREE;
5971 }
5972
5973 default:
5974 return NULL_TREE;
5975 }
5976 }
5977
5978 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5979 Returns NULL_TREE if folding to a constant is not possible, otherwise
5980 returns a constant according to is_gimple_min_invariant. */
5981
5982 tree
5983 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5984 {
5985 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5986 if (res && is_gimple_min_invariant (res))
5987 return res;
5988 return NULL_TREE;
5989 }
5990
5991
5992 /* The following set of functions are supposed to fold references using
5993 their constant initializers. */
5994
5995 /* See if we can find constructor defining value of BASE.
5996 When we know the consructor with constant offset (such as
5997 base is array[40] and we do know constructor of array), then
5998 BIT_OFFSET is adjusted accordingly.
5999
6000 As a special case, return error_mark_node when constructor
6001 is not explicitly available, but it is known to be zero
6002 such as 'static const int a;'. */
6003 static tree
6004 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6005 tree (*valueize)(tree))
6006 {
6007 HOST_WIDE_INT bit_offset2, size, max_size;
6008 bool reverse;
6009
6010 if (TREE_CODE (base) == MEM_REF)
6011 {
6012 if (!integer_zerop (TREE_OPERAND (base, 1)))
6013 {
6014 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6015 return NULL_TREE;
6016 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6017 * BITS_PER_UNIT);
6018 }
6019
6020 if (valueize
6021 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6022 base = valueize (TREE_OPERAND (base, 0));
6023 if (!base || TREE_CODE (base) != ADDR_EXPR)
6024 return NULL_TREE;
6025 base = TREE_OPERAND (base, 0);
6026 }
6027 else if (valueize
6028 && TREE_CODE (base) == SSA_NAME)
6029 base = valueize (base);
6030
6031 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6032 DECL_INITIAL. If BASE is a nested reference into another
6033 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6034 the inner reference. */
6035 switch (TREE_CODE (base))
6036 {
6037 case VAR_DECL:
6038 case CONST_DECL:
6039 {
6040 tree init = ctor_for_folding (base);
6041
6042 /* Our semantic is exact opposite of ctor_for_folding;
6043 NULL means unknown, while error_mark_node is 0. */
6044 if (init == error_mark_node)
6045 return NULL_TREE;
6046 if (!init)
6047 return error_mark_node;
6048 return init;
6049 }
6050
6051 case VIEW_CONVERT_EXPR:
6052 return get_base_constructor (TREE_OPERAND (base, 0),
6053 bit_offset, valueize);
6054
6055 case ARRAY_REF:
6056 case COMPONENT_REF:
6057 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6058 &reverse);
6059 if (max_size == -1 || size != max_size)
6060 return NULL_TREE;
6061 *bit_offset += bit_offset2;
6062 return get_base_constructor (base, bit_offset, valueize);
6063
6064 case CONSTRUCTOR:
6065 return base;
6066
6067 default:
6068 if (CONSTANT_CLASS_P (base))
6069 return base;
6070
6071 return NULL_TREE;
6072 }
6073 }
6074
6075 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6076 SIZE to the memory at bit OFFSET. */
6077
6078 static tree
6079 fold_array_ctor_reference (tree type, tree ctor,
6080 unsigned HOST_WIDE_INT offset,
6081 unsigned HOST_WIDE_INT size,
6082 tree from_decl)
6083 {
6084 offset_int low_bound;
6085 offset_int elt_size;
6086 offset_int access_index;
6087 tree domain_type = NULL_TREE;
6088 HOST_WIDE_INT inner_offset;
6089
6090 /* Compute low bound and elt size. */
6091 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6092 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6093 if (domain_type && TYPE_MIN_VALUE (domain_type))
6094 {
6095 /* Static constructors for variably sized objects makes no sense. */
6096 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6097 return NULL_TREE;
6098 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6099 }
6100 else
6101 low_bound = 0;
6102 /* Static constructors for variably sized objects makes no sense. */
6103 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6104 return NULL_TREE;
6105 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6106
6107 /* We can handle only constantly sized accesses that are known to not
6108 be larger than size of array element. */
6109 if (!TYPE_SIZE_UNIT (type)
6110 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6111 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6112 || elt_size == 0)
6113 return NULL_TREE;
6114
6115 /* Compute the array index we look for. */
6116 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6117 elt_size);
6118 access_index += low_bound;
6119
6120 /* And offset within the access. */
6121 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6122
6123 /* See if the array field is large enough to span whole access. We do not
6124 care to fold accesses spanning multiple array indexes. */
6125 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6126 return NULL_TREE;
6127 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6128 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6129
6130 /* When memory is not explicitely mentioned in constructor,
6131 it is 0 (or out of range). */
6132 return build_zero_cst (type);
6133 }
6134
6135 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6136 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6137
6138 static tree
6139 fold_nonarray_ctor_reference (tree type, tree ctor,
6140 unsigned HOST_WIDE_INT offset,
6141 unsigned HOST_WIDE_INT size,
6142 tree from_decl)
6143 {
6144 unsigned HOST_WIDE_INT cnt;
6145 tree cfield, cval;
6146
6147 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6148 cval)
6149 {
6150 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6151 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6152 tree field_size = DECL_SIZE (cfield);
6153 offset_int bitoffset;
6154 offset_int bitoffset_end, access_end;
6155
6156 /* Variable sized objects in static constructors makes no sense,
6157 but field_size can be NULL for flexible array members. */
6158 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6159 && TREE_CODE (byte_offset) == INTEGER_CST
6160 && (field_size != NULL_TREE
6161 ? TREE_CODE (field_size) == INTEGER_CST
6162 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6163
6164 /* Compute bit offset of the field. */
6165 bitoffset = (wi::to_offset (field_offset)
6166 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6167 /* Compute bit offset where the field ends. */
6168 if (field_size != NULL_TREE)
6169 bitoffset_end = bitoffset + wi::to_offset (field_size);
6170 else
6171 bitoffset_end = 0;
6172
6173 access_end = offset_int (offset) + size;
6174
6175 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6176 [BITOFFSET, BITOFFSET_END)? */
6177 if (wi::cmps (access_end, bitoffset) > 0
6178 && (field_size == NULL_TREE
6179 || wi::lts_p (offset, bitoffset_end)))
6180 {
6181 offset_int inner_offset = offset_int (offset) - bitoffset;
6182 /* We do have overlap. Now see if field is large enough to
6183 cover the access. Give up for accesses spanning multiple
6184 fields. */
6185 if (wi::cmps (access_end, bitoffset_end) > 0)
6186 return NULL_TREE;
6187 if (offset < bitoffset)
6188 return NULL_TREE;
6189 return fold_ctor_reference (type, cval,
6190 inner_offset.to_uhwi (), size,
6191 from_decl);
6192 }
6193 }
6194 /* When memory is not explicitely mentioned in constructor, it is 0. */
6195 return build_zero_cst (type);
6196 }
6197
6198 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6199 to the memory at bit OFFSET. */
6200
6201 tree
6202 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6203 unsigned HOST_WIDE_INT size, tree from_decl)
6204 {
6205 tree ret;
6206
6207 /* We found the field with exact match. */
6208 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6209 && !offset)
6210 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6211
6212 /* We are at the end of walk, see if we can view convert the
6213 result. */
6214 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6215 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6216 && !compare_tree_int (TYPE_SIZE (type), size)
6217 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6218 {
6219 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6220 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6221 if (ret)
6222 STRIP_USELESS_TYPE_CONVERSION (ret);
6223 return ret;
6224 }
6225 /* For constants and byte-aligned/sized reads try to go through
6226 native_encode/interpret. */
6227 if (CONSTANT_CLASS_P (ctor)
6228 && BITS_PER_UNIT == 8
6229 && offset % BITS_PER_UNIT == 0
6230 && size % BITS_PER_UNIT == 0
6231 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6232 {
6233 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6234 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6235 offset / BITS_PER_UNIT);
6236 if (len > 0)
6237 return native_interpret_expr (type, buf, len);
6238 }
6239 if (TREE_CODE (ctor) == CONSTRUCTOR)
6240 {
6241
6242 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6243 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6244 return fold_array_ctor_reference (type, ctor, offset, size,
6245 from_decl);
6246 else
6247 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6248 from_decl);
6249 }
6250
6251 return NULL_TREE;
6252 }
6253
6254 /* Return the tree representing the element referenced by T if T is an
6255 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6256 names using VALUEIZE. Return NULL_TREE otherwise. */
6257
6258 tree
6259 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6260 {
6261 tree ctor, idx, base;
6262 HOST_WIDE_INT offset, size, max_size;
6263 tree tem;
6264 bool reverse;
6265
6266 if (TREE_THIS_VOLATILE (t))
6267 return NULL_TREE;
6268
6269 if (DECL_P (t))
6270 return get_symbol_constant_value (t);
6271
6272 tem = fold_read_from_constant_string (t);
6273 if (tem)
6274 return tem;
6275
6276 switch (TREE_CODE (t))
6277 {
6278 case ARRAY_REF:
6279 case ARRAY_RANGE_REF:
6280 /* Constant indexes are handled well by get_base_constructor.
6281 Only special case variable offsets.
6282 FIXME: This code can't handle nested references with variable indexes
6283 (they will be handled only by iteration of ccp). Perhaps we can bring
6284 get_ref_base_and_extent here and make it use a valueize callback. */
6285 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6286 && valueize
6287 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6288 && TREE_CODE (idx) == INTEGER_CST)
6289 {
6290 tree low_bound, unit_size;
6291
6292 /* If the resulting bit-offset is constant, track it. */
6293 if ((low_bound = array_ref_low_bound (t),
6294 TREE_CODE (low_bound) == INTEGER_CST)
6295 && (unit_size = array_ref_element_size (t),
6296 tree_fits_uhwi_p (unit_size)))
6297 {
6298 offset_int woffset
6299 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6300 TYPE_PRECISION (TREE_TYPE (idx)));
6301
6302 if (wi::fits_shwi_p (woffset))
6303 {
6304 offset = woffset.to_shwi ();
6305 /* TODO: This code seems wrong, multiply then check
6306 to see if it fits. */
6307 offset *= tree_to_uhwi (unit_size);
6308 offset *= BITS_PER_UNIT;
6309
6310 base = TREE_OPERAND (t, 0);
6311 ctor = get_base_constructor (base, &offset, valueize);
6312 /* Empty constructor. Always fold to 0. */
6313 if (ctor == error_mark_node)
6314 return build_zero_cst (TREE_TYPE (t));
6315 /* Out of bound array access. Value is undefined,
6316 but don't fold. */
6317 if (offset < 0)
6318 return NULL_TREE;
6319 /* We can not determine ctor. */
6320 if (!ctor)
6321 return NULL_TREE;
6322 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6323 tree_to_uhwi (unit_size)
6324 * BITS_PER_UNIT,
6325 base);
6326 }
6327 }
6328 }
6329 /* Fallthru. */
6330
6331 case COMPONENT_REF:
6332 case BIT_FIELD_REF:
6333 case TARGET_MEM_REF:
6334 case MEM_REF:
6335 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6336 ctor = get_base_constructor (base, &offset, valueize);
6337
6338 /* Empty constructor. Always fold to 0. */
6339 if (ctor == error_mark_node)
6340 return build_zero_cst (TREE_TYPE (t));
6341 /* We do not know precise address. */
6342 if (max_size == -1 || max_size != size)
6343 return NULL_TREE;
6344 /* We can not determine ctor. */
6345 if (!ctor)
6346 return NULL_TREE;
6347
6348 /* Out of bound array access. Value is undefined, but don't fold. */
6349 if (offset < 0)
6350 return NULL_TREE;
6351
6352 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6353 base);
6354
6355 case REALPART_EXPR:
6356 case IMAGPART_EXPR:
6357 {
6358 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6359 if (c && TREE_CODE (c) == COMPLEX_CST)
6360 return fold_build1_loc (EXPR_LOCATION (t),
6361 TREE_CODE (t), TREE_TYPE (t), c);
6362 break;
6363 }
6364
6365 default:
6366 break;
6367 }
6368
6369 return NULL_TREE;
6370 }
6371
6372 tree
6373 fold_const_aggregate_ref (tree t)
6374 {
6375 return fold_const_aggregate_ref_1 (t, NULL);
6376 }
6377
6378 /* Lookup virtual method with index TOKEN in a virtual table V
6379 at OFFSET.
6380 Set CAN_REFER if non-NULL to false if method
6381 is not referable or if the virtual table is ill-formed (such as rewriten
6382 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6383
6384 tree
6385 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6386 tree v,
6387 unsigned HOST_WIDE_INT offset,
6388 bool *can_refer)
6389 {
6390 tree vtable = v, init, fn;
6391 unsigned HOST_WIDE_INT size;
6392 unsigned HOST_WIDE_INT elt_size, access_index;
6393 tree domain_type;
6394
6395 if (can_refer)
6396 *can_refer = true;
6397
6398 /* First of all double check we have virtual table. */
6399 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6400 {
6401 /* Pass down that we lost track of the target. */
6402 if (can_refer)
6403 *can_refer = false;
6404 return NULL_TREE;
6405 }
6406
6407 init = ctor_for_folding (v);
6408
6409 /* The virtual tables should always be born with constructors
6410 and we always should assume that they are avaialble for
6411 folding. At the moment we do not stream them in all cases,
6412 but it should never happen that ctor seem unreachable. */
6413 gcc_assert (init);
6414 if (init == error_mark_node)
6415 {
6416 gcc_assert (in_lto_p);
6417 /* Pass down that we lost track of the target. */
6418 if (can_refer)
6419 *can_refer = false;
6420 return NULL_TREE;
6421 }
6422 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6423 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6424 offset *= BITS_PER_UNIT;
6425 offset += token * size;
6426
6427 /* Lookup the value in the constructor that is assumed to be array.
6428 This is equivalent to
6429 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6430 offset, size, NULL);
6431 but in a constant time. We expect that frontend produced a simple
6432 array without indexed initializers. */
6433
6434 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6435 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6436 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6437 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6438
6439 access_index = offset / BITS_PER_UNIT / elt_size;
6440 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6441
6442 /* This code makes an assumption that there are no
6443 indexed fileds produced by C++ FE, so we can directly index the array. */
6444 if (access_index < CONSTRUCTOR_NELTS (init))
6445 {
6446 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6447 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6448 STRIP_NOPS (fn);
6449 }
6450 else
6451 fn = NULL;
6452
6453 /* For type inconsistent program we may end up looking up virtual method
6454 in virtual table that does not contain TOKEN entries. We may overrun
6455 the virtual table and pick up a constant or RTTI info pointer.
6456 In any case the call is undefined. */
6457 if (!fn
6458 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6459 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6460 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6461 else
6462 {
6463 fn = TREE_OPERAND (fn, 0);
6464
6465 /* When cgraph node is missing and function is not public, we cannot
6466 devirtualize. This can happen in WHOPR when the actual method
6467 ends up in other partition, because we found devirtualization
6468 possibility too late. */
6469 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6470 {
6471 if (can_refer)
6472 {
6473 *can_refer = false;
6474 return fn;
6475 }
6476 return NULL_TREE;
6477 }
6478 }
6479
6480 /* Make sure we create a cgraph node for functions we'll reference.
6481 They can be non-existent if the reference comes from an entry
6482 of an external vtable for example. */
6483 cgraph_node::get_create (fn);
6484
6485 return fn;
6486 }
6487
6488 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6489 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6490 KNOWN_BINFO carries the binfo describing the true type of
6491 OBJ_TYPE_REF_OBJECT(REF).
6492 Set CAN_REFER if non-NULL to false if method
6493 is not referable or if the virtual table is ill-formed (such as rewriten
6494 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6495
6496 tree
6497 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6498 bool *can_refer)
6499 {
6500 unsigned HOST_WIDE_INT offset;
6501 tree v;
6502
6503 v = BINFO_VTABLE (known_binfo);
6504 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6505 if (!v)
6506 return NULL_TREE;
6507
6508 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6509 {
6510 if (can_refer)
6511 *can_refer = false;
6512 return NULL_TREE;
6513 }
6514 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6515 }
6516
6517 /* Given a pointer value OP0, return a simplified version of an
6518 indirection through OP0, or NULL_TREE if no simplification is
6519 possible. Note that the resulting type may be different from
6520 the type pointed to in the sense that it is still compatible
6521 from the langhooks point of view. */
6522
6523 tree
6524 gimple_fold_indirect_ref (tree t)
6525 {
6526 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6527 tree sub = t;
6528 tree subtype;
6529
6530 STRIP_NOPS (sub);
6531 subtype = TREE_TYPE (sub);
6532 if (!POINTER_TYPE_P (subtype))
6533 return NULL_TREE;
6534
6535 if (TREE_CODE (sub) == ADDR_EXPR)
6536 {
6537 tree op = TREE_OPERAND (sub, 0);
6538 tree optype = TREE_TYPE (op);
6539 /* *&p => p */
6540 if (useless_type_conversion_p (type, optype))
6541 return op;
6542
6543 /* *(foo *)&fooarray => fooarray[0] */
6544 if (TREE_CODE (optype) == ARRAY_TYPE
6545 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6546 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6547 {
6548 tree type_domain = TYPE_DOMAIN (optype);
6549 tree min_val = size_zero_node;
6550 if (type_domain && TYPE_MIN_VALUE (type_domain))
6551 min_val = TYPE_MIN_VALUE (type_domain);
6552 if (TREE_CODE (min_val) == INTEGER_CST)
6553 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6554 }
6555 /* *(foo *)&complexfoo => __real__ complexfoo */
6556 else if (TREE_CODE (optype) == COMPLEX_TYPE
6557 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6558 return fold_build1 (REALPART_EXPR, type, op);
6559 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6560 else if (TREE_CODE (optype) == VECTOR_TYPE
6561 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6562 {
6563 tree part_width = TYPE_SIZE (type);
6564 tree index = bitsize_int (0);
6565 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6566 }
6567 }
6568
6569 /* *(p + CST) -> ... */
6570 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6571 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6572 {
6573 tree addr = TREE_OPERAND (sub, 0);
6574 tree off = TREE_OPERAND (sub, 1);
6575 tree addrtype;
6576
6577 STRIP_NOPS (addr);
6578 addrtype = TREE_TYPE (addr);
6579
6580 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6581 if (TREE_CODE (addr) == ADDR_EXPR
6582 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6583 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6584 && tree_fits_uhwi_p (off))
6585 {
6586 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6587 tree part_width = TYPE_SIZE (type);
6588 unsigned HOST_WIDE_INT part_widthi
6589 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6590 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6591 tree index = bitsize_int (indexi);
6592 if (offset / part_widthi
6593 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6594 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6595 part_width, index);
6596 }
6597
6598 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6599 if (TREE_CODE (addr) == ADDR_EXPR
6600 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6601 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6602 {
6603 tree size = TYPE_SIZE_UNIT (type);
6604 if (tree_int_cst_equal (size, off))
6605 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6606 }
6607
6608 /* *(p + CST) -> MEM_REF <p, CST>. */
6609 if (TREE_CODE (addr) != ADDR_EXPR
6610 || DECL_P (TREE_OPERAND (addr, 0)))
6611 return fold_build2 (MEM_REF, type,
6612 addr,
6613 wide_int_to_tree (ptype, off));
6614 }
6615
6616 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6617 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6618 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6619 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6620 {
6621 tree type_domain;
6622 tree min_val = size_zero_node;
6623 tree osub = sub;
6624 sub = gimple_fold_indirect_ref (sub);
6625 if (! sub)
6626 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6627 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6628 if (type_domain && TYPE_MIN_VALUE (type_domain))
6629 min_val = TYPE_MIN_VALUE (type_domain);
6630 if (TREE_CODE (min_val) == INTEGER_CST)
6631 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6632 }
6633
6634 return NULL_TREE;
6635 }
6636
6637 /* Return true if CODE is an operation that when operating on signed
6638 integer types involves undefined behavior on overflow and the
6639 operation can be expressed with unsigned arithmetic. */
6640
6641 bool
6642 arith_code_with_undefined_signed_overflow (tree_code code)
6643 {
6644 switch (code)
6645 {
6646 case PLUS_EXPR:
6647 case MINUS_EXPR:
6648 case MULT_EXPR:
6649 case NEGATE_EXPR:
6650 case POINTER_PLUS_EXPR:
6651 return true;
6652 default:
6653 return false;
6654 }
6655 }
6656
6657 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6658 operation that can be transformed to unsigned arithmetic by converting
6659 its operand, carrying out the operation in the corresponding unsigned
6660 type and converting the result back to the original type.
6661
6662 Returns a sequence of statements that replace STMT and also contain
6663 a modified form of STMT itself. */
6664
6665 gimple_seq
6666 rewrite_to_defined_overflow (gimple *stmt)
6667 {
6668 if (dump_file && (dump_flags & TDF_DETAILS))
6669 {
6670 fprintf (dump_file, "rewriting stmt with undefined signed "
6671 "overflow ");
6672 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6673 }
6674
6675 tree lhs = gimple_assign_lhs (stmt);
6676 tree type = unsigned_type_for (TREE_TYPE (lhs));
6677 gimple_seq stmts = NULL;
6678 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6679 {
6680 tree op = gimple_op (stmt, i);
6681 op = gimple_convert (&stmts, type, op);
6682 gimple_set_op (stmt, i, op);
6683 }
6684 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6685 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6686 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6687 gimple_seq_add_stmt (&stmts, stmt);
6688 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6689 gimple_seq_add_stmt (&stmts, cvt);
6690
6691 return stmts;
6692 }
6693
6694
6695 /* The valueization hook we use for the gimple_build API simplification.
6696 This makes us match fold_buildN behavior by only combining with
6697 statements in the sequence(s) we are currently building. */
6698
6699 static tree
6700 gimple_build_valueize (tree op)
6701 {
6702 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6703 return op;
6704 return NULL_TREE;
6705 }
6706
6707 /* Build the expression CODE OP0 of type TYPE with location LOC,
6708 simplifying it first if possible. Returns the built
6709 expression value and appends statements possibly defining it
6710 to SEQ. */
6711
6712 tree
6713 gimple_build (gimple_seq *seq, location_t loc,
6714 enum tree_code code, tree type, tree op0)
6715 {
6716 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6717 if (!res)
6718 {
6719 res = create_tmp_reg_or_ssa_name (type);
6720 gimple *stmt;
6721 if (code == REALPART_EXPR
6722 || code == IMAGPART_EXPR
6723 || code == VIEW_CONVERT_EXPR)
6724 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6725 else
6726 stmt = gimple_build_assign (res, code, op0);
6727 gimple_set_location (stmt, loc);
6728 gimple_seq_add_stmt_without_update (seq, stmt);
6729 }
6730 return res;
6731 }
6732
6733 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6734 simplifying it first if possible. Returns the built
6735 expression value and appends statements possibly defining it
6736 to SEQ. */
6737
6738 tree
6739 gimple_build (gimple_seq *seq, location_t loc,
6740 enum tree_code code, tree type, tree op0, tree op1)
6741 {
6742 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6743 if (!res)
6744 {
6745 res = create_tmp_reg_or_ssa_name (type);
6746 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6747 gimple_set_location (stmt, loc);
6748 gimple_seq_add_stmt_without_update (seq, stmt);
6749 }
6750 return res;
6751 }
6752
6753 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6754 simplifying it first if possible. Returns the built
6755 expression value and appends statements possibly defining it
6756 to SEQ. */
6757
6758 tree
6759 gimple_build (gimple_seq *seq, location_t loc,
6760 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6761 {
6762 tree res = gimple_simplify (code, type, op0, op1, op2,
6763 seq, gimple_build_valueize);
6764 if (!res)
6765 {
6766 res = create_tmp_reg_or_ssa_name (type);
6767 gimple *stmt;
6768 if (code == BIT_FIELD_REF)
6769 stmt = gimple_build_assign (res, code,
6770 build3 (code, type, op0, op1, op2));
6771 else
6772 stmt = gimple_build_assign (res, code, op0, op1, op2);
6773 gimple_set_location (stmt, loc);
6774 gimple_seq_add_stmt_without_update (seq, stmt);
6775 }
6776 return res;
6777 }
6778
6779 /* Build the call FN (ARG0) with a result of type TYPE
6780 (or no result if TYPE is void) with location LOC,
6781 simplifying it first if possible. Returns the built
6782 expression value (or NULL_TREE if TYPE is void) and appends
6783 statements possibly defining it to SEQ. */
6784
6785 tree
6786 gimple_build (gimple_seq *seq, location_t loc,
6787 enum built_in_function fn, tree type, tree arg0)
6788 {
6789 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6790 if (!res)
6791 {
6792 tree decl = builtin_decl_implicit (fn);
6793 gimple *stmt = gimple_build_call (decl, 1, arg0);
6794 if (!VOID_TYPE_P (type))
6795 {
6796 res = create_tmp_reg_or_ssa_name (type);
6797 gimple_call_set_lhs (stmt, res);
6798 }
6799 gimple_set_location (stmt, loc);
6800 gimple_seq_add_stmt_without_update (seq, stmt);
6801 }
6802 return res;
6803 }
6804
6805 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6806 (or no result if TYPE is void) with location LOC,
6807 simplifying it first if possible. Returns the built
6808 expression value (or NULL_TREE if TYPE is void) and appends
6809 statements possibly defining it to SEQ. */
6810
6811 tree
6812 gimple_build (gimple_seq *seq, location_t loc,
6813 enum built_in_function fn, tree type, tree arg0, tree arg1)
6814 {
6815 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6816 if (!res)
6817 {
6818 tree decl = builtin_decl_implicit (fn);
6819 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6820 if (!VOID_TYPE_P (type))
6821 {
6822 res = create_tmp_reg_or_ssa_name (type);
6823 gimple_call_set_lhs (stmt, res);
6824 }
6825 gimple_set_location (stmt, loc);
6826 gimple_seq_add_stmt_without_update (seq, stmt);
6827 }
6828 return res;
6829 }
6830
6831 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6832 (or no result if TYPE is void) with location LOC,
6833 simplifying it first if possible. Returns the built
6834 expression value (or NULL_TREE if TYPE is void) and appends
6835 statements possibly defining it to SEQ. */
6836
6837 tree
6838 gimple_build (gimple_seq *seq, location_t loc,
6839 enum built_in_function fn, tree type,
6840 tree arg0, tree arg1, tree arg2)
6841 {
6842 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6843 seq, gimple_build_valueize);
6844 if (!res)
6845 {
6846 tree decl = builtin_decl_implicit (fn);
6847 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6848 if (!VOID_TYPE_P (type))
6849 {
6850 res = create_tmp_reg_or_ssa_name (type);
6851 gimple_call_set_lhs (stmt, res);
6852 }
6853 gimple_set_location (stmt, loc);
6854 gimple_seq_add_stmt_without_update (seq, stmt);
6855 }
6856 return res;
6857 }
6858
6859 /* Build the conversion (TYPE) OP with a result of type TYPE
6860 with location LOC if such conversion is neccesary in GIMPLE,
6861 simplifying it first.
6862 Returns the built expression value and appends
6863 statements possibly defining it to SEQ. */
6864
6865 tree
6866 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6867 {
6868 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6869 return op;
6870 return gimple_build (seq, loc, NOP_EXPR, type, op);
6871 }
6872
6873 /* Build the conversion (ptrofftype) OP with a result of a type
6874 compatible with ptrofftype with location LOC if such conversion
6875 is neccesary in GIMPLE, simplifying it first.
6876 Returns the built expression value and appends
6877 statements possibly defining it to SEQ. */
6878
6879 tree
6880 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6881 {
6882 if (ptrofftype_p (TREE_TYPE (op)))
6883 return op;
6884 return gimple_convert (seq, loc, sizetype, op);
6885 }
6886
6887 /* Return true if the result of assignment STMT is known to be non-negative.
6888 If the return value is based on the assumption that signed overflow is
6889 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6890 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6891
6892 static bool
6893 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6894 int depth)
6895 {
6896 enum tree_code code = gimple_assign_rhs_code (stmt);
6897 switch (get_gimple_rhs_class (code))
6898 {
6899 case GIMPLE_UNARY_RHS:
6900 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6901 gimple_expr_type (stmt),
6902 gimple_assign_rhs1 (stmt),
6903 strict_overflow_p, depth);
6904 case GIMPLE_BINARY_RHS:
6905 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6906 gimple_expr_type (stmt),
6907 gimple_assign_rhs1 (stmt),
6908 gimple_assign_rhs2 (stmt),
6909 strict_overflow_p, depth);
6910 case GIMPLE_TERNARY_RHS:
6911 return false;
6912 case GIMPLE_SINGLE_RHS:
6913 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6914 strict_overflow_p, depth);
6915 case GIMPLE_INVALID_RHS:
6916 break;
6917 }
6918 gcc_unreachable ();
6919 }
6920
6921 /* Return true if return value of call STMT is known to be non-negative.
6922 If the return value is based on the assumption that signed overflow is
6923 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6924 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6925
6926 static bool
6927 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6928 int depth)
6929 {
6930 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6931 gimple_call_arg (stmt, 0) : NULL_TREE;
6932 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6933 gimple_call_arg (stmt, 1) : NULL_TREE;
6934
6935 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6936 gimple_call_combined_fn (stmt),
6937 arg0,
6938 arg1,
6939 strict_overflow_p, depth);
6940 }
6941
6942 /* Return true if return value of call STMT is known to be non-negative.
6943 If the return value is based on the assumption that signed overflow is
6944 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6945 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6946
6947 static bool
6948 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6949 int depth)
6950 {
6951 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6952 {
6953 tree arg = gimple_phi_arg_def (stmt, i);
6954 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6955 return false;
6956 }
6957 return true;
6958 }
6959
6960 /* Return true if STMT is known to compute a non-negative value.
6961 If the return value is based on the assumption that signed overflow is
6962 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6963 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6964
6965 bool
6966 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6967 int depth)
6968 {
6969 switch (gimple_code (stmt))
6970 {
6971 case GIMPLE_ASSIGN:
6972 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6973 depth);
6974 case GIMPLE_CALL:
6975 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6976 depth);
6977 case GIMPLE_PHI:
6978 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6979 depth);
6980 default:
6981 return false;
6982 }
6983 }
6984
6985 /* Return true if the floating-point value computed by assignment STMT
6986 is known to have an integer value. We also allow +Inf, -Inf and NaN
6987 to be considered integer values. Return false for signaling NaN.
6988
6989 DEPTH is the current nesting depth of the query. */
6990
6991 static bool
6992 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6993 {
6994 enum tree_code code = gimple_assign_rhs_code (stmt);
6995 switch (get_gimple_rhs_class (code))
6996 {
6997 case GIMPLE_UNARY_RHS:
6998 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6999 gimple_assign_rhs1 (stmt), depth);
7000 case GIMPLE_BINARY_RHS:
7001 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7002 gimple_assign_rhs1 (stmt),
7003 gimple_assign_rhs2 (stmt), depth);
7004 case GIMPLE_TERNARY_RHS:
7005 return false;
7006 case GIMPLE_SINGLE_RHS:
7007 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7008 case GIMPLE_INVALID_RHS:
7009 break;
7010 }
7011 gcc_unreachable ();
7012 }
7013
7014 /* Return true if the floating-point value computed by call STMT is known
7015 to have an integer value. We also allow +Inf, -Inf and NaN to be
7016 considered integer values. Return false for signaling NaN.
7017
7018 DEPTH is the current nesting depth of the query. */
7019
7020 static bool
7021 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7022 {
7023 tree arg0 = (gimple_call_num_args (stmt) > 0
7024 ? gimple_call_arg (stmt, 0)
7025 : NULL_TREE);
7026 tree arg1 = (gimple_call_num_args (stmt) > 1
7027 ? gimple_call_arg (stmt, 1)
7028 : NULL_TREE);
7029 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7030 arg0, arg1, depth);
7031 }
7032
7033 /* Return true if the floating-point result of phi STMT is known to have
7034 an integer value. We also allow +Inf, -Inf and NaN to be considered
7035 integer values. Return false for signaling NaN.
7036
7037 DEPTH is the current nesting depth of the query. */
7038
7039 static bool
7040 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7041 {
7042 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7043 {
7044 tree arg = gimple_phi_arg_def (stmt, i);
7045 if (!integer_valued_real_single_p (arg, depth + 1))
7046 return false;
7047 }
7048 return true;
7049 }
7050
7051 /* Return true if the floating-point value computed by STMT is known
7052 to have an integer value. We also allow +Inf, -Inf and NaN to be
7053 considered integer values. Return false for signaling NaN.
7054
7055 DEPTH is the current nesting depth of the query. */
7056
7057 bool
7058 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7059 {
7060 switch (gimple_code (stmt))
7061 {
7062 case GIMPLE_ASSIGN:
7063 return gimple_assign_integer_valued_real_p (stmt, depth);
7064 case GIMPLE_CALL:
7065 return gimple_call_integer_valued_real_p (stmt, depth);
7066 case GIMPLE_PHI:
7067 return gimple_phi_integer_valued_real_p (stmt, depth);
7068 default:
7069 return false;
7070 }
7071 }