PR tree-optimization/79352 - -fprintf-return-value doesn't handle flexible-like array...
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58 #include "fold-const-call.h"
59
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
64
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 {
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
88
89 if (DECL_ABSTRACT_P (decl))
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || !VAR_OR_FUNCTION_DECL_P (decl))
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
109 }
110
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || !VAR_P (from_decl)
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
139
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
149
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
158 }
159
160 /* Create a temporary for TYPE for a statement STMT. If the current function
161 is in SSA form, a SSA name is created. Otherwise a temporary register
162 is made. */
163
164 static tree
165 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL)
166 {
167 if (gimple_in_ssa_p (cfun))
168 return make_ssa_name (type, stmt);
169 else
170 return create_tmp_reg (type);
171 }
172
173 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
174 acceptable form for is_gimple_min_invariant.
175 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
176
177 tree
178 canonicalize_constructor_val (tree cval, tree from_decl)
179 {
180 tree orig_cval = cval;
181 STRIP_NOPS (cval);
182 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
183 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
184 {
185 tree ptr = TREE_OPERAND (cval, 0);
186 if (is_gimple_min_invariant (ptr))
187 cval = build1_loc (EXPR_LOCATION (cval),
188 ADDR_EXPR, TREE_TYPE (ptr),
189 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
190 ptr,
191 fold_convert (ptr_type_node,
192 TREE_OPERAND (cval, 1))));
193 }
194 if (TREE_CODE (cval) == ADDR_EXPR)
195 {
196 tree base = NULL_TREE;
197 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
198 {
199 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
200 if (base)
201 TREE_OPERAND (cval, 0) = base;
202 }
203 else
204 base = get_base_address (TREE_OPERAND (cval, 0));
205 if (!base)
206 return NULL_TREE;
207
208 if (VAR_OR_FUNCTION_DECL_P (base)
209 && !can_refer_decl_in_current_unit_p (base, from_decl))
210 return NULL_TREE;
211 if (TREE_TYPE (base) == error_mark_node)
212 return NULL_TREE;
213 if (VAR_P (base))
214 TREE_ADDRESSABLE (base) = 1;
215 else if (TREE_CODE (base) == FUNCTION_DECL)
216 {
217 /* Make sure we create a cgraph node for functions we'll reference.
218 They can be non-existent if the reference comes from an entry
219 of an external vtable for example. */
220 cgraph_node::get_create (base);
221 }
222 /* Fixup types in global initializers. */
223 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
224 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
225
226 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
227 cval = fold_convert (TREE_TYPE (orig_cval), cval);
228 return cval;
229 }
230 if (TREE_OVERFLOW_P (cval))
231 return drop_tree_overflow (cval);
232 return orig_cval;
233 }
234
235 /* If SYM is a constant variable with known value, return the value.
236 NULL_TREE is returned otherwise. */
237
238 tree
239 get_symbol_constant_value (tree sym)
240 {
241 tree val = ctor_for_folding (sym);
242 if (val != error_mark_node)
243 {
244 if (val)
245 {
246 val = canonicalize_constructor_val (unshare_expr (val), sym);
247 if (val && is_gimple_min_invariant (val))
248 return val;
249 else
250 return NULL_TREE;
251 }
252 /* Variables declared 'const' without an initializer
253 have zero as the initializer if they may not be
254 overridden at link or run time. */
255 if (!val
256 && is_gimple_reg_type (TREE_TYPE (sym)))
257 return build_zero_cst (TREE_TYPE (sym));
258 }
259
260 return NULL_TREE;
261 }
262
263
264
265 /* Subroutine of fold_stmt. We perform several simplifications of the
266 memory reference tree EXPR and make sure to re-gimplify them properly
267 after propagation of constant addresses. IS_LHS is true if the
268 reference is supposed to be an lvalue. */
269
270 static tree
271 maybe_fold_reference (tree expr, bool is_lhs)
272 {
273 tree result;
274
275 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
276 || TREE_CODE (expr) == REALPART_EXPR
277 || TREE_CODE (expr) == IMAGPART_EXPR)
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_unary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0));
283 else if (TREE_CODE (expr) == BIT_FIELD_REF
284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
285 return fold_ternary_loc (EXPR_LOCATION (expr),
286 TREE_CODE (expr),
287 TREE_TYPE (expr),
288 TREE_OPERAND (expr, 0),
289 TREE_OPERAND (expr, 1),
290 TREE_OPERAND (expr, 2));
291
292 if (!is_lhs
293 && (result = fold_const_aggregate_ref (expr))
294 && is_gimple_min_invariant (result))
295 return result;
296
297 return NULL_TREE;
298 }
299
300
301 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
302 replacement rhs for the statement or NULL_TREE if no simplification
303 could be made. It is assumed that the operands have been previously
304 folded. */
305
306 static tree
307 fold_gimple_assign (gimple_stmt_iterator *si)
308 {
309 gimple *stmt = gsi_stmt (*si);
310 enum tree_code subcode = gimple_assign_rhs_code (stmt);
311 location_t loc = gimple_location (stmt);
312
313 tree result = NULL_TREE;
314
315 switch (get_gimple_rhs_class (subcode))
316 {
317 case GIMPLE_SINGLE_RHS:
318 {
319 tree rhs = gimple_assign_rhs1 (stmt);
320
321 if (TREE_CLOBBER_P (rhs))
322 return NULL_TREE;
323
324 if (REFERENCE_CLASS_P (rhs))
325 return maybe_fold_reference (rhs, false);
326
327 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
328 {
329 tree val = OBJ_TYPE_REF_EXPR (rhs);
330 if (is_gimple_min_invariant (val))
331 return val;
332 else if (flag_devirtualize && virtual_method_call_p (rhs))
333 {
334 bool final;
335 vec <cgraph_node *>targets
336 = possible_polymorphic_call_targets (rhs, stmt, &final);
337 if (final && targets.length () <= 1 && dbg_cnt (devirt))
338 {
339 if (dump_enabled_p ())
340 {
341 location_t loc = gimple_location_safe (stmt);
342 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
343 "resolving virtual function address "
344 "reference to function %s\n",
345 targets.length () == 1
346 ? targets[0]->name ()
347 : "NULL");
348 }
349 if (targets.length () == 1)
350 {
351 val = fold_convert (TREE_TYPE (val),
352 build_fold_addr_expr_loc
353 (loc, targets[0]->decl));
354 STRIP_USELESS_TYPE_CONVERSION (val);
355 }
356 else
357 /* We can not use __builtin_unreachable here because it
358 can not have address taken. */
359 val = build_int_cst (TREE_TYPE (val), 0);
360 return val;
361 }
362 }
363 }
364
365 else if (TREE_CODE (rhs) == ADDR_EXPR)
366 {
367 tree ref = TREE_OPERAND (rhs, 0);
368 tree tem = maybe_fold_reference (ref, true);
369 if (tem
370 && TREE_CODE (tem) == MEM_REF
371 && integer_zerop (TREE_OPERAND (tem, 1)))
372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
373 else if (tem)
374 result = fold_convert (TREE_TYPE (rhs),
375 build_fold_addr_expr_loc (loc, tem));
376 else if (TREE_CODE (ref) == MEM_REF
377 && integer_zerop (TREE_OPERAND (ref, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
379
380 if (result)
381 {
382 /* Strip away useless type conversions. Both the
383 NON_LVALUE_EXPR that may have been added by fold, and
384 "useless" type conversions that might now be apparent
385 due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
387
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
390 }
391 }
392
393 else if (TREE_CODE (rhs) == CONSTRUCTOR
394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
395 {
396 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
397 unsigned i;
398 tree val;
399
400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
401 if (! CONSTANT_CLASS_P (val))
402 return NULL_TREE;
403
404 return build_vector_from_ctor (TREE_TYPE (rhs),
405 CONSTRUCTOR_ELTS (rhs));
406 }
407
408 else if (DECL_P (rhs))
409 return get_symbol_constant_value (rhs);
410 }
411 break;
412
413 case GIMPLE_UNARY_RHS:
414 break;
415
416 case GIMPLE_BINARY_RHS:
417 break;
418
419 case GIMPLE_TERNARY_RHS:
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
425
426 if (result)
427 {
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
431 }
432 break;
433
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
436 }
437
438 return NULL_TREE;
439 }
440
441
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
446
447 static void
448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
449 {
450 gimple *stmt = gsi_stmt (*si_p);
451
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
454
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple *laststore = NULL;
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
460 {
461 gimple *new_stmt = gsi_stmt (i);
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
467 {
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
477 }
478 }
479
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
485 {
486 gimple *new_stmt = gsi_stmt (i);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
494 }
495
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
500 {
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
504 {
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
507 }
508 }
509
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
512 }
513
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
523
524 void
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
526 {
527 tree lhs;
528 gimple *stmt, *new_stmt;
529 gimple_stmt_iterator i;
530 gimple_seq stmts = NULL;
531
532 stmt = gsi_stmt (*si_p);
533
534 gcc_assert (is_gimple_call (stmt));
535
536 push_gimplify_context (gimple_in_ssa_p (cfun));
537
538 lhs = gimple_call_lhs (stmt);
539 if (lhs == NULL_TREE)
540 {
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
545 {
546 pop_gimplify_context (NULL);
547 if (gimple_in_ssa_p (cfun))
548 {
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
551 }
552 gsi_replace (si_p, gimple_build_nop (), false);
553 return;
554 }
555 }
556 else
557 {
558 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
563 }
564
565 pop_gimplify_context (NULL);
566
567 gsi_replace_with_seq_vops (si_p, stmts);
568 }
569
570
571 /* Replace the call at *GSI with the gimple value VAL. */
572
573 static void
574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
575 {
576 gimple *stmt = gsi_stmt (*gsi);
577 tree lhs = gimple_call_lhs (stmt);
578 gimple *repl;
579 if (lhs)
580 {
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
584 }
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
589 {
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
592 }
593 gsi_replace (gsi, repl, false);
594 }
595
596 /* Replace the call at *GSI with the new call REPL and fold that
597 again. */
598
599 static void
600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
601 {
602 gimple *stmt = gsi_stmt (*gsi);
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
607 {
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
611 }
612 gsi_replace (gsi, repl, false);
613 fold_stmt (gsi);
614 }
615
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
617
618 static bool
619 var_decl_component_p (tree var)
620 {
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
625 }
626
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
634
635 static bool
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
638 {
639 gimple *stmt = gsi_stmt (*gsi);
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
644
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
647 {
648 gimple *repl;
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
655 {
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
658 }
659 gsi_replace (gsi, repl, false);
660 return true;
661 }
662
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
666 {
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
671 {
672 gsi_replace (gsi, gimple_build_nop (), false);
673 return true;
674 }
675 goto done;
676 }
677 else
678 {
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
682
683 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
684 pointers as wide integer) and also may result in huge function
685 size because of inlined bounds copy. Thus don't inline for
686 functions we want to instrument. */
687 if (flag_check_pointer_bounds
688 && chkp_instrumentable_p (cfun->decl)
689 /* Even if data may contain pointers we can inline if copy
690 less than a pointer size. */
691 && (!tree_fits_uhwi_p (len)
692 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
693 return false;
694
695 /* Build accesses at offset zero with a ref-all character type. */
696 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
697 ptr_mode, true), 0);
698
699 /* If we can perform the copy efficiently with first doing all loads
700 and then all stores inline it that way. Currently efficiently
701 means that we can load all the memory into a single integer
702 register which is what MOVE_MAX gives us. */
703 src_align = get_pointer_alignment (src);
704 dest_align = get_pointer_alignment (dest);
705 if (tree_fits_uhwi_p (len)
706 && compare_tree_int (len, MOVE_MAX) <= 0
707 /* ??? Don't transform copies from strings with known length this
708 confuses the tree-ssa-strlen.c. This doesn't handle
709 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
710 reason. */
711 && !c_strlen (src, 2))
712 {
713 unsigned ilen = tree_to_uhwi (len);
714 if (pow2p_hwi (ilen))
715 {
716 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
717 if (type
718 && TYPE_MODE (type) != BLKmode
719 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
720 == ilen * 8)
721 /* If the destination pointer is not aligned we must be able
722 to emit an unaligned store. */
723 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
725 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
726 != CODE_FOR_nothing)))
727 {
728 tree srctype = type;
729 tree desttype = type;
730 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 srctype = build_aligned_type (type, src_align);
732 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
733 tree tem = fold_const_aggregate_ref (srcmem);
734 if (tem)
735 srcmem = tem;
736 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
737 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
738 src_align)
739 && (optab_handler (movmisalign_optab,
740 TYPE_MODE (type))
741 == CODE_FOR_nothing))
742 srcmem = NULL_TREE;
743 if (srcmem)
744 {
745 gimple *new_stmt;
746 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
747 {
748 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
749 srcmem
750 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
751 new_stmt);
752 gimple_assign_set_lhs (new_stmt, srcmem);
753 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
754 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
755 }
756 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
757 desttype = build_aligned_type (type, dest_align);
758 new_stmt
759 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
760 dest, off0),
761 srcmem);
762 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
763 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
764 if (gimple_vdef (new_stmt)
765 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
766 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
767 if (!lhs)
768 {
769 gsi_replace (gsi, new_stmt, false);
770 return true;
771 }
772 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
773 goto done;
774 }
775 }
776 }
777 }
778
779 if (endp == 3)
780 {
781 /* Both DEST and SRC must be pointer types.
782 ??? This is what old code did. Is the testing for pointer types
783 really mandatory?
784
785 If either SRC is readonly or length is 1, we can use memcpy. */
786 if (!dest_align || !src_align)
787 return false;
788 if (readonly_data_expr (src)
789 || (tree_fits_uhwi_p (len)
790 && (MIN (src_align, dest_align) / BITS_PER_UNIT
791 >= tree_to_uhwi (len))))
792 {
793 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
794 if (!fn)
795 return false;
796 gimple_call_set_fndecl (stmt, fn);
797 gimple_call_set_arg (stmt, 0, dest);
798 gimple_call_set_arg (stmt, 1, src);
799 fold_stmt (gsi);
800 return true;
801 }
802
803 /* If *src and *dest can't overlap, optimize into memcpy as well. */
804 if (TREE_CODE (src) == ADDR_EXPR
805 && TREE_CODE (dest) == ADDR_EXPR)
806 {
807 tree src_base, dest_base, fn;
808 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
809 HOST_WIDE_INT maxsize;
810
811 srcvar = TREE_OPERAND (src, 0);
812 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
813 if (src_base == NULL)
814 src_base = srcvar;
815 destvar = TREE_OPERAND (dest, 0);
816 dest_base = get_addr_base_and_unit_offset (destvar,
817 &dest_offset);
818 if (dest_base == NULL)
819 dest_base = destvar;
820 if (tree_fits_uhwi_p (len))
821 maxsize = tree_to_uhwi (len);
822 else
823 maxsize = -1;
824 if (SSA_VAR_P (src_base)
825 && SSA_VAR_P (dest_base))
826 {
827 if (operand_equal_p (src_base, dest_base, 0)
828 && ranges_overlap_p (src_offset, maxsize,
829 dest_offset, maxsize))
830 return false;
831 }
832 else if (TREE_CODE (src_base) == MEM_REF
833 && TREE_CODE (dest_base) == MEM_REF)
834 {
835 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
836 TREE_OPERAND (dest_base, 0), 0))
837 return false;
838 offset_int off = mem_ref_offset (src_base) + src_offset;
839 if (!wi::fits_shwi_p (off))
840 return false;
841 src_offset = off.to_shwi ();
842
843 off = mem_ref_offset (dest_base) + dest_offset;
844 if (!wi::fits_shwi_p (off))
845 return false;
846 dest_offset = off.to_shwi ();
847 if (ranges_overlap_p (src_offset, maxsize,
848 dest_offset, maxsize))
849 return false;
850 }
851 else
852 return false;
853
854 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
855 if (!fn)
856 return false;
857 gimple_call_set_fndecl (stmt, fn);
858 gimple_call_set_arg (stmt, 0, dest);
859 gimple_call_set_arg (stmt, 1, src);
860 fold_stmt (gsi);
861 return true;
862 }
863
864 /* If the destination and source do not alias optimize into
865 memcpy as well. */
866 if ((is_gimple_min_invariant (dest)
867 || TREE_CODE (dest) == SSA_NAME)
868 && (is_gimple_min_invariant (src)
869 || TREE_CODE (src) == SSA_NAME))
870 {
871 ao_ref destr, srcr;
872 ao_ref_init_from_ptr_and_size (&destr, dest, len);
873 ao_ref_init_from_ptr_and_size (&srcr, src, len);
874 if (!refs_may_alias_p_1 (&destr, &srcr, false))
875 {
876 tree fn;
877 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
878 if (!fn)
879 return false;
880 gimple_call_set_fndecl (stmt, fn);
881 gimple_call_set_arg (stmt, 0, dest);
882 gimple_call_set_arg (stmt, 1, src);
883 fold_stmt (gsi);
884 return true;
885 }
886 }
887
888 return false;
889 }
890
891 if (!tree_fits_shwi_p (len))
892 return false;
893 /* FIXME:
894 This logic lose for arguments like (type *)malloc (sizeof (type)),
895 since we strip the casts of up to VOID return value from malloc.
896 Perhaps we ought to inherit type from non-VOID argument here? */
897 STRIP_NOPS (src);
898 STRIP_NOPS (dest);
899 if (!POINTER_TYPE_P (TREE_TYPE (src))
900 || !POINTER_TYPE_P (TREE_TYPE (dest)))
901 return false;
902 /* In the following try to find a type that is most natural to be
903 used for the memcpy source and destination and that allows
904 the most optimization when memcpy is turned into a plain assignment
905 using that type. In theory we could always use a char[len] type
906 but that only gains us that the destination and source possibly
907 no longer will have their address taken. */
908 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
909 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
910 {
911 tree tem = TREE_OPERAND (src, 0);
912 STRIP_NOPS (tem);
913 if (tem != TREE_OPERAND (src, 0))
914 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
915 }
916 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
917 {
918 tree tem = TREE_OPERAND (dest, 0);
919 STRIP_NOPS (tem);
920 if (tem != TREE_OPERAND (dest, 0))
921 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
922 }
923 srctype = TREE_TYPE (TREE_TYPE (src));
924 if (TREE_CODE (srctype) == ARRAY_TYPE
925 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
926 {
927 srctype = TREE_TYPE (srctype);
928 STRIP_NOPS (src);
929 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
930 }
931 desttype = TREE_TYPE (TREE_TYPE (dest));
932 if (TREE_CODE (desttype) == ARRAY_TYPE
933 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
934 {
935 desttype = TREE_TYPE (desttype);
936 STRIP_NOPS (dest);
937 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
938 }
939 if (TREE_ADDRESSABLE (srctype)
940 || TREE_ADDRESSABLE (desttype))
941 return false;
942
943 /* Make sure we are not copying using a floating-point mode or
944 a type whose size possibly does not match its precision. */
945 if (FLOAT_MODE_P (TYPE_MODE (desttype))
946 || TREE_CODE (desttype) == BOOLEAN_TYPE
947 || TREE_CODE (desttype) == ENUMERAL_TYPE)
948 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
949 if (FLOAT_MODE_P (TYPE_MODE (srctype))
950 || TREE_CODE (srctype) == BOOLEAN_TYPE
951 || TREE_CODE (srctype) == ENUMERAL_TYPE)
952 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
953 if (!srctype)
954 srctype = desttype;
955 if (!desttype)
956 desttype = srctype;
957 if (!srctype)
958 return false;
959
960 src_align = get_pointer_alignment (src);
961 dest_align = get_pointer_alignment (dest);
962 if (dest_align < TYPE_ALIGN (desttype)
963 || src_align < TYPE_ALIGN (srctype))
964 return false;
965
966 destvar = dest;
967 STRIP_NOPS (destvar);
968 if (TREE_CODE (destvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (destvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
971 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
972 else
973 destvar = NULL_TREE;
974
975 srcvar = src;
976 STRIP_NOPS (srcvar);
977 if (TREE_CODE (srcvar) == ADDR_EXPR
978 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 {
981 if (!destvar
982 || src_align >= TYPE_ALIGN (desttype))
983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
984 srcvar, off0);
985 else if (!STRICT_ALIGNMENT)
986 {
987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
988 src_align);
989 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
990 }
991 else
992 srcvar = NULL_TREE;
993 }
994 else
995 srcvar = NULL_TREE;
996
997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
998 return false;
999
1000 if (srcvar == NULL_TREE)
1001 {
1002 STRIP_NOPS (src);
1003 if (src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1005 else
1006 {
1007 if (STRICT_ALIGNMENT)
1008 return false;
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 }
1013 }
1014 else if (destvar == NULL_TREE)
1015 {
1016 STRIP_NOPS (dest);
1017 if (dest_align >= TYPE_ALIGN (srctype))
1018 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1019 else
1020 {
1021 if (STRICT_ALIGNMENT)
1022 return false;
1023 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1024 dest_align);
1025 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 }
1027 }
1028
1029 gimple *new_stmt;
1030 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 {
1032 tree tem = fold_const_aggregate_ref (srcvar);
1033 if (tem)
1034 srcvar = tem;
1035 if (! is_gimple_min_invariant (srcvar))
1036 {
1037 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1038 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1039 new_stmt);
1040 gimple_assign_set_lhs (new_stmt, srcvar);
1041 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 }
1044 }
1045 new_stmt = gimple_build_assign (destvar, srcvar);
1046 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (gimple_vdef (new_stmt)
1049 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1050 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1051 if (!lhs)
1052 {
1053 gsi_replace (gsi, new_stmt, false);
1054 return true;
1055 }
1056 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1057 }
1058
1059 done:
1060 gimple_seq stmts = NULL;
1061 if (endp == 0 || endp == 3)
1062 len = NULL_TREE;
1063 else if (endp == 2)
1064 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1065 ssize_int (1));
1066 if (endp == 2 || endp == 1)
1067 {
1068 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1069 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1070 TREE_TYPE (dest), dest, len);
1071 }
1072
1073 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1074 gimple *repl = gimple_build_assign (lhs, dest);
1075 gsi_replace (gsi, repl, false);
1076 return true;
1077 }
1078
1079 /* Fold function call to builtin memset or bzero at *GSI setting the
1080 memory of size LEN to VAL. Return whether a simplification was made. */
1081
1082 static bool
1083 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1084 {
1085 gimple *stmt = gsi_stmt (*gsi);
1086 tree etype;
1087 unsigned HOST_WIDE_INT length, cval;
1088
1089 /* If the LEN parameter is zero, return DEST. */
1090 if (integer_zerop (len))
1091 {
1092 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1093 return true;
1094 }
1095
1096 if (! tree_fits_uhwi_p (len))
1097 return false;
1098
1099 if (TREE_CODE (c) != INTEGER_CST)
1100 return false;
1101
1102 tree dest = gimple_call_arg (stmt, 0);
1103 tree var = dest;
1104 if (TREE_CODE (var) != ADDR_EXPR)
1105 return false;
1106
1107 var = TREE_OPERAND (var, 0);
1108 if (TREE_THIS_VOLATILE (var))
1109 return false;
1110
1111 etype = TREE_TYPE (var);
1112 if (TREE_CODE (etype) == ARRAY_TYPE)
1113 etype = TREE_TYPE (etype);
1114
1115 if (!INTEGRAL_TYPE_P (etype)
1116 && !POINTER_TYPE_P (etype))
1117 return NULL_TREE;
1118
1119 if (! var_decl_component_p (var))
1120 return NULL_TREE;
1121
1122 length = tree_to_uhwi (len);
1123 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1124 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1125 return NULL_TREE;
1126
1127 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1128 return NULL_TREE;
1129
1130 if (integer_zerop (c))
1131 cval = 0;
1132 else
1133 {
1134 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1135 return NULL_TREE;
1136
1137 cval = TREE_INT_CST_LOW (c);
1138 cval &= 0xff;
1139 cval |= cval << 8;
1140 cval |= cval << 16;
1141 cval |= (cval << 31) << 1;
1142 }
1143
1144 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1145 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1146 gimple_set_vuse (store, gimple_vuse (stmt));
1147 tree vdef = gimple_vdef (stmt);
1148 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1149 {
1150 gimple_set_vdef (store, gimple_vdef (stmt));
1151 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1152 }
1153 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1154 if (gimple_call_lhs (stmt))
1155 {
1156 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1157 gsi_replace (gsi, asgn, false);
1158 }
1159 else
1160 {
1161 gimple_stmt_iterator gsi2 = *gsi;
1162 gsi_prev (gsi);
1163 gsi_remove (&gsi2, true);
1164 }
1165
1166 return true;
1167 }
1168
1169
1170 /* Obtain the minimum and maximum string length or minimum and maximum
1171 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1172 If ARG is an SSA name variable, follow its use-def chains. When
1173 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1174 if we are unable to determine the length or value, return False.
1175 VISITED is a bitmap of visited variables.
1176 TYPE is 0 if string length should be obtained, 1 for maximum string
1177 length and 2 for maximum value ARG can have.
1178 When FUZZY is set and the length of a string cannot be determined,
1179 the function instead considers as the maximum possible length the
1180 size of a character array it may refer to.
1181 Set *FLEXP to true if the range of the string lengths has been
1182 obtained from the upper bound of an array at the end of a struct.
1183 Such an array may hold a string that's longer than its upper bound
1184 due to it being used as a poor-man's flexible array member. */
1185
1186 static bool
1187 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1188 bool fuzzy, bool *flexp)
1189 {
1190 tree var, val;
1191 gimple *def_stmt;
1192
1193 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1194 but MINLEN may be cleared during the execution of the function. */
1195 tree *minlen = length;
1196 tree *const maxlen = length + 1;
1197
1198 if (TREE_CODE (arg) != SSA_NAME)
1199 {
1200 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1201 if (TREE_CODE (arg) == ADDR_EXPR
1202 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1203 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1204 {
1205 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1206 if (TREE_CODE (aop0) == INDIRECT_REF
1207 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1208 return get_range_strlen (TREE_OPERAND (aop0, 0),
1209 length, visited, type, fuzzy, flexp);
1210 }
1211
1212 if (type == 2)
1213 {
1214 val = arg;
1215 if (TREE_CODE (val) != INTEGER_CST
1216 || tree_int_cst_sgn (val) < 0)
1217 return false;
1218 }
1219 else
1220 val = c_strlen (arg, 1);
1221
1222 if (!val && fuzzy)
1223 {
1224 if (TREE_CODE (arg) == ADDR_EXPR)
1225 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1226 visited, type, fuzzy, flexp);
1227
1228 if (TREE_CODE (arg) == COMPONENT_REF
1229 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1230 {
1231 /* Use the type of the member array to determine the upper
1232 bound on the length of the array. This may be overly
1233 optimistic if the array itself isn't NUL-terminated and
1234 the caller relies on the subsequent member to contain
1235 the NUL.
1236 Set *FLEXP to true if the array whose bound is being
1237 used is at the end of a struct. */
1238 if (array_at_struct_end_p (arg, true))
1239 *flexp = true;
1240
1241 arg = TREE_OPERAND (arg, 1);
1242 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1243 if (!val || integer_zerop (val))
1244 return false;
1245 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1246 integer_one_node);
1247 /* Avoid using the array size as the minimum. */
1248 minlen = NULL;
1249 }
1250 }
1251
1252 if (!val)
1253 return false;
1254
1255 if (minlen
1256 && (!*minlen
1257 || (type > 0
1258 && TREE_CODE (*minlen) == INTEGER_CST
1259 && TREE_CODE (val) == INTEGER_CST
1260 && tree_int_cst_lt (val, *minlen))))
1261 *minlen = val;
1262
1263 if (*maxlen)
1264 {
1265 if (type > 0)
1266 {
1267 if (TREE_CODE (*maxlen) != INTEGER_CST
1268 || TREE_CODE (val) != INTEGER_CST)
1269 return false;
1270
1271 if (tree_int_cst_lt (*maxlen, val))
1272 *maxlen = val;
1273 return true;
1274 }
1275 else if (simple_cst_equal (val, *maxlen) != 1)
1276 return false;
1277 }
1278
1279 *maxlen = val;
1280 return true;
1281 }
1282
1283 /* If ARG is registered for SSA update we cannot look at its defining
1284 statement. */
1285 if (name_registered_for_update_p (arg))
1286 return false;
1287
1288 /* If we were already here, break the infinite cycle. */
1289 if (!*visited)
1290 *visited = BITMAP_ALLOC (NULL);
1291 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1292 return true;
1293
1294 var = arg;
1295 def_stmt = SSA_NAME_DEF_STMT (var);
1296
1297 switch (gimple_code (def_stmt))
1298 {
1299 case GIMPLE_ASSIGN:
1300 /* The RHS of the statement defining VAR must either have a
1301 constant length or come from another SSA_NAME with a constant
1302 length. */
1303 if (gimple_assign_single_p (def_stmt)
1304 || gimple_assign_unary_nop_p (def_stmt))
1305 {
1306 tree rhs = gimple_assign_rhs1 (def_stmt);
1307 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1308 }
1309 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1310 {
1311 tree op2 = gimple_assign_rhs2 (def_stmt);
1312 tree op3 = gimple_assign_rhs3 (def_stmt);
1313 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1314 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1315 }
1316 return false;
1317
1318 case GIMPLE_PHI:
1319 {
1320 /* All the arguments of the PHI node must have the same constant
1321 length. */
1322 unsigned i;
1323
1324 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1325 {
1326 tree arg = gimple_phi_arg (def_stmt, i)->def;
1327
1328 /* If this PHI has itself as an argument, we cannot
1329 determine the string length of this argument. However,
1330 if we can find a constant string length for the other
1331 PHI args then we can still be sure that this is a
1332 constant string length. So be optimistic and just
1333 continue with the next argument. */
1334 if (arg == gimple_phi_result (def_stmt))
1335 continue;
1336
1337 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1338 {
1339 if (fuzzy)
1340 *maxlen = build_all_ones_cst (size_type_node);
1341 else
1342 return false;
1343 }
1344 }
1345 }
1346 return true;
1347
1348 default:
1349 return false;
1350 }
1351 }
1352
1353 /* Determine the minimum and maximum value or string length that ARG
1354 refers to and store each in the first two elements of MINMAXLEN.
1355 For expressions that point to strings of unknown lengths that are
1356 character arrays, use the upper bound of the array as the maximum
1357 length. For example, given an expression like 'x ? array : "xyz"'
1358 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1359 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1360 stored in array.
1361 Return true if the range of the string lengths has been obtained
1362 from the upper bound of an array at the end of a struct. Such
1363 an array may hold a string that's longer than its upper bound
1364 due to it being used as a poor-man's flexible array member. */
1365
1366 bool
1367 get_range_strlen (tree arg, tree minmaxlen[2])
1368 {
1369 bitmap visited = NULL;
1370
1371 minmaxlen[0] = NULL_TREE;
1372 minmaxlen[1] = NULL_TREE;
1373
1374 bool flexarray = false;
1375 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1376
1377 if (visited)
1378 BITMAP_FREE (visited);
1379
1380 return flexarray;
1381 }
1382
1383 tree
1384 get_maxval_strlen (tree arg, int type)
1385 {
1386 bitmap visited = NULL;
1387 tree len[2] = { NULL_TREE, NULL_TREE };
1388
1389 bool dummy;
1390 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1391 len[1] = NULL_TREE;
1392 if (visited)
1393 BITMAP_FREE (visited);
1394
1395 return len[1];
1396 }
1397
1398
1399 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1400 If LEN is not NULL, it represents the length of the string to be
1401 copied. Return NULL_TREE if no simplification can be made. */
1402
1403 static bool
1404 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1405 tree dest, tree src)
1406 {
1407 location_t loc = gimple_location (gsi_stmt (*gsi));
1408 tree fn;
1409
1410 /* If SRC and DEST are the same (and not volatile), return DEST. */
1411 if (operand_equal_p (src, dest, 0))
1412 {
1413 replace_call_with_value (gsi, dest);
1414 return true;
1415 }
1416
1417 if (optimize_function_for_size_p (cfun))
1418 return false;
1419
1420 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1421 if (!fn)
1422 return false;
1423
1424 tree len = get_maxval_strlen (src, 0);
1425 if (!len)
1426 return false;
1427
1428 len = fold_convert_loc (loc, size_type_node, len);
1429 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1430 len = force_gimple_operand_gsi (gsi, len, true,
1431 NULL_TREE, true, GSI_SAME_STMT);
1432 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1433 replace_call_with_call_and_fold (gsi, repl);
1434 return true;
1435 }
1436
1437 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1438 If SLEN is not NULL, it represents the length of the source string.
1439 Return NULL_TREE if no simplification can be made. */
1440
1441 static bool
1442 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1443 tree dest, tree src, tree len)
1444 {
1445 location_t loc = gimple_location (gsi_stmt (*gsi));
1446 tree fn;
1447
1448 /* If the LEN parameter is zero, return DEST. */
1449 if (integer_zerop (len))
1450 {
1451 replace_call_with_value (gsi, dest);
1452 return true;
1453 }
1454
1455 /* We can't compare slen with len as constants below if len is not a
1456 constant. */
1457 if (TREE_CODE (len) != INTEGER_CST)
1458 return false;
1459
1460 /* Now, we must be passed a constant src ptr parameter. */
1461 tree slen = get_maxval_strlen (src, 0);
1462 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1463 return false;
1464
1465 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1466
1467 /* We do not support simplification of this case, though we do
1468 support it when expanding trees into RTL. */
1469 /* FIXME: generate a call to __builtin_memset. */
1470 if (tree_int_cst_lt (slen, len))
1471 return false;
1472
1473 /* OK transform into builtin memcpy. */
1474 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1475 if (!fn)
1476 return false;
1477
1478 len = fold_convert_loc (loc, size_type_node, len);
1479 len = force_gimple_operand_gsi (gsi, len, true,
1480 NULL_TREE, true, GSI_SAME_STMT);
1481 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1482 replace_call_with_call_and_fold (gsi, repl);
1483 return true;
1484 }
1485
1486 /* Fold function call to builtin strchr or strrchr.
1487 If both arguments are constant, evaluate and fold the result,
1488 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1489 In general strlen is significantly faster than strchr
1490 due to being a simpler operation. */
1491 static bool
1492 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1493 {
1494 gimple *stmt = gsi_stmt (*gsi);
1495 tree str = gimple_call_arg (stmt, 0);
1496 tree c = gimple_call_arg (stmt, 1);
1497 location_t loc = gimple_location (stmt);
1498 const char *p;
1499 char ch;
1500
1501 if (!gimple_call_lhs (stmt))
1502 return false;
1503
1504 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1505 {
1506 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1507
1508 if (p1 == NULL)
1509 {
1510 replace_call_with_value (gsi, integer_zero_node);
1511 return true;
1512 }
1513
1514 tree len = build_int_cst (size_type_node, p1 - p);
1515 gimple_seq stmts = NULL;
1516 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1517 POINTER_PLUS_EXPR, str, len);
1518 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1519 gsi_replace_with_seq_vops (gsi, stmts);
1520 return true;
1521 }
1522
1523 if (!integer_zerop (c))
1524 return false;
1525
1526 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1527 if (is_strrchr && optimize_function_for_size_p (cfun))
1528 {
1529 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1530
1531 if (strchr_fn)
1532 {
1533 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1534 replace_call_with_call_and_fold (gsi, repl);
1535 return true;
1536 }
1537
1538 return false;
1539 }
1540
1541 tree len;
1542 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1543
1544 if (!strlen_fn)
1545 return false;
1546
1547 /* Create newstr = strlen (str). */
1548 gimple_seq stmts = NULL;
1549 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1550 gimple_set_location (new_stmt, loc);
1551 len = create_tmp_reg_or_ssa_name (size_type_node);
1552 gimple_call_set_lhs (new_stmt, len);
1553 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1554
1555 /* Create (str p+ strlen (str)). */
1556 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1557 POINTER_PLUS_EXPR, str, len);
1558 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1559 gsi_replace_with_seq_vops (gsi, stmts);
1560 /* gsi now points at the assignment to the lhs, get a
1561 stmt iterator to the strlen.
1562 ??? We can't use gsi_for_stmt as that doesn't work when the
1563 CFG isn't built yet. */
1564 gimple_stmt_iterator gsi2 = *gsi;
1565 gsi_prev (&gsi2);
1566 fold_stmt (&gsi2);
1567 return true;
1568 }
1569
1570 /* Fold function call to builtin strstr.
1571 If both arguments are constant, evaluate and fold the result,
1572 additionally fold strstr (x, "") into x and strstr (x, "c")
1573 into strchr (x, 'c'). */
1574 static bool
1575 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1576 {
1577 gimple *stmt = gsi_stmt (*gsi);
1578 tree haystack = gimple_call_arg (stmt, 0);
1579 tree needle = gimple_call_arg (stmt, 1);
1580 const char *p, *q;
1581
1582 if (!gimple_call_lhs (stmt))
1583 return false;
1584
1585 q = c_getstr (needle);
1586 if (q == NULL)
1587 return false;
1588
1589 if ((p = c_getstr (haystack)))
1590 {
1591 const char *r = strstr (p, q);
1592
1593 if (r == NULL)
1594 {
1595 replace_call_with_value (gsi, integer_zero_node);
1596 return true;
1597 }
1598
1599 tree len = build_int_cst (size_type_node, r - p);
1600 gimple_seq stmts = NULL;
1601 gimple *new_stmt
1602 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1603 haystack, len);
1604 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1605 gsi_replace_with_seq_vops (gsi, stmts);
1606 return true;
1607 }
1608
1609 /* For strstr (x, "") return x. */
1610 if (q[0] == '\0')
1611 {
1612 replace_call_with_value (gsi, haystack);
1613 return true;
1614 }
1615
1616 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1617 if (q[1] == '\0')
1618 {
1619 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1620 if (strchr_fn)
1621 {
1622 tree c = build_int_cst (integer_type_node, q[0]);
1623 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1624 replace_call_with_call_and_fold (gsi, repl);
1625 return true;
1626 }
1627 }
1628
1629 return false;
1630 }
1631
1632 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1633 to the call.
1634
1635 Return NULL_TREE if no simplification was possible, otherwise return the
1636 simplified form of the call as a tree.
1637
1638 The simplified form may be a constant or other expression which
1639 computes the same value, but in a more efficient manner (including
1640 calls to other builtin functions).
1641
1642 The call may contain arguments which need to be evaluated, but
1643 which are not useful to determine the result of the call. In
1644 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1645 COMPOUND_EXPR will be an argument which must be evaluated.
1646 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1647 COMPOUND_EXPR in the chain will contain the tree for the simplified
1648 form of the builtin function call. */
1649
1650 static bool
1651 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1652 {
1653 gimple *stmt = gsi_stmt (*gsi);
1654 location_t loc = gimple_location (stmt);
1655
1656 const char *p = c_getstr (src);
1657
1658 /* If the string length is zero, return the dst parameter. */
1659 if (p && *p == '\0')
1660 {
1661 replace_call_with_value (gsi, dst);
1662 return true;
1663 }
1664
1665 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1666 return false;
1667
1668 /* See if we can store by pieces into (dst + strlen(dst)). */
1669 tree newdst;
1670 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1671 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1672
1673 if (!strlen_fn || !memcpy_fn)
1674 return false;
1675
1676 /* If the length of the source string isn't computable don't
1677 split strcat into strlen and memcpy. */
1678 tree len = get_maxval_strlen (src, 0);
1679 if (! len)
1680 return false;
1681
1682 /* Create strlen (dst). */
1683 gimple_seq stmts = NULL, stmts2;
1684 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1685 gimple_set_location (repl, loc);
1686 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1687 gimple_call_set_lhs (repl, newdst);
1688 gimple_seq_add_stmt_without_update (&stmts, repl);
1689
1690 /* Create (dst p+ strlen (dst)). */
1691 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1692 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1693 gimple_seq_add_seq_without_update (&stmts, stmts2);
1694
1695 len = fold_convert_loc (loc, size_type_node, len);
1696 len = size_binop_loc (loc, PLUS_EXPR, len,
1697 build_int_cst (size_type_node, 1));
1698 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1699 gimple_seq_add_seq_without_update (&stmts, stmts2);
1700
1701 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1702 gimple_seq_add_stmt_without_update (&stmts, repl);
1703 if (gimple_call_lhs (stmt))
1704 {
1705 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1706 gimple_seq_add_stmt_without_update (&stmts, repl);
1707 gsi_replace_with_seq_vops (gsi, stmts);
1708 /* gsi now points at the assignment to the lhs, get a
1709 stmt iterator to the memcpy call.
1710 ??? We can't use gsi_for_stmt as that doesn't work when the
1711 CFG isn't built yet. */
1712 gimple_stmt_iterator gsi2 = *gsi;
1713 gsi_prev (&gsi2);
1714 fold_stmt (&gsi2);
1715 }
1716 else
1717 {
1718 gsi_replace_with_seq_vops (gsi, stmts);
1719 fold_stmt (gsi);
1720 }
1721 return true;
1722 }
1723
1724 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1725 are the arguments to the call. */
1726
1727 static bool
1728 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1729 {
1730 gimple *stmt = gsi_stmt (*gsi);
1731 tree dest = gimple_call_arg (stmt, 0);
1732 tree src = gimple_call_arg (stmt, 1);
1733 tree size = gimple_call_arg (stmt, 2);
1734 tree fn;
1735 const char *p;
1736
1737
1738 p = c_getstr (src);
1739 /* If the SRC parameter is "", return DEST. */
1740 if (p && *p == '\0')
1741 {
1742 replace_call_with_value (gsi, dest);
1743 return true;
1744 }
1745
1746 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1747 return false;
1748
1749 /* If __builtin_strcat_chk is used, assume strcat is available. */
1750 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1751 if (!fn)
1752 return false;
1753
1754 gimple *repl = gimple_build_call (fn, 2, dest, src);
1755 replace_call_with_call_and_fold (gsi, repl);
1756 return true;
1757 }
1758
1759 /* Simplify a call to the strncat builtin. */
1760
1761 static bool
1762 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1763 {
1764 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1765 tree dst = gimple_call_arg (stmt, 0);
1766 tree src = gimple_call_arg (stmt, 1);
1767 tree len = gimple_call_arg (stmt, 2);
1768
1769 const char *p = c_getstr (src);
1770
1771 /* If the requested length is zero, or the src parameter string
1772 length is zero, return the dst parameter. */
1773 if (integer_zerop (len) || (p && *p == '\0'))
1774 {
1775 replace_call_with_value (gsi, dst);
1776 return true;
1777 }
1778
1779 /* If the requested len is greater than or equal to the string
1780 length, call strcat. */
1781 if (TREE_CODE (len) == INTEGER_CST && p
1782 && compare_tree_int (len, strlen (p)) >= 0)
1783 {
1784 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1785
1786 /* If the replacement _DECL isn't initialized, don't do the
1787 transformation. */
1788 if (!fn)
1789 return false;
1790
1791 gcall *repl = gimple_build_call (fn, 2, dst, src);
1792 replace_call_with_call_and_fold (gsi, repl);
1793 return true;
1794 }
1795
1796 return false;
1797 }
1798
1799 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1800 LEN, and SIZE. */
1801
1802 static bool
1803 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1804 {
1805 gimple *stmt = gsi_stmt (*gsi);
1806 tree dest = gimple_call_arg (stmt, 0);
1807 tree src = gimple_call_arg (stmt, 1);
1808 tree len = gimple_call_arg (stmt, 2);
1809 tree size = gimple_call_arg (stmt, 3);
1810 tree fn;
1811 const char *p;
1812
1813 p = c_getstr (src);
1814 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1815 if ((p && *p == '\0')
1816 || integer_zerop (len))
1817 {
1818 replace_call_with_value (gsi, dest);
1819 return true;
1820 }
1821
1822 if (! tree_fits_uhwi_p (size))
1823 return false;
1824
1825 if (! integer_all_onesp (size))
1826 {
1827 tree src_len = c_strlen (src, 1);
1828 if (src_len
1829 && tree_fits_uhwi_p (src_len)
1830 && tree_fits_uhwi_p (len)
1831 && ! tree_int_cst_lt (len, src_len))
1832 {
1833 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1834 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1835 if (!fn)
1836 return false;
1837
1838 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1839 replace_call_with_call_and_fold (gsi, repl);
1840 return true;
1841 }
1842 return false;
1843 }
1844
1845 /* If __builtin_strncat_chk is used, assume strncat is available. */
1846 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1847 if (!fn)
1848 return false;
1849
1850 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1851 replace_call_with_call_and_fold (gsi, repl);
1852 return true;
1853 }
1854
1855 /* Build and append gimple statements to STMTS that would load a first
1856 character of a memory location identified by STR. LOC is location
1857 of the statement. */
1858
1859 static tree
1860 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1861 {
1862 tree var;
1863
1864 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1865 tree cst_uchar_ptr_node
1866 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1867 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1868
1869 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1870 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1871 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1872
1873 gimple_assign_set_lhs (stmt, var);
1874 gimple_seq_add_stmt_without_update (stmts, stmt);
1875
1876 return var;
1877 }
1878
1879 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1880 FCODE is the name of the builtin. */
1881
1882 static bool
1883 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1884 {
1885 gimple *stmt = gsi_stmt (*gsi);
1886 tree callee = gimple_call_fndecl (stmt);
1887 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
1888
1889 tree type = integer_type_node;
1890 tree str1 = gimple_call_arg (stmt, 0);
1891 tree str2 = gimple_call_arg (stmt, 1);
1892 tree lhs = gimple_call_lhs (stmt);
1893 HOST_WIDE_INT length = -1;
1894
1895 /* Handle strncmp and strncasecmp functions. */
1896 if (gimple_call_num_args (stmt) == 3)
1897 {
1898 tree len = gimple_call_arg (stmt, 2);
1899 if (tree_fits_uhwi_p (len))
1900 length = tree_to_uhwi (len);
1901 }
1902
1903 /* If the LEN parameter is zero, return zero. */
1904 if (length == 0)
1905 {
1906 replace_call_with_value (gsi, integer_zero_node);
1907 return true;
1908 }
1909
1910 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
1911 if (operand_equal_p (str1, str2, 0))
1912 {
1913 replace_call_with_value (gsi, integer_zero_node);
1914 return true;
1915 }
1916
1917 const char *p1 = c_getstr (str1);
1918 const char *p2 = c_getstr (str2);
1919
1920 /* For known strings, return an immediate value. */
1921 if (p1 && p2)
1922 {
1923 int r = 0;
1924 bool known_result = false;
1925
1926 switch (fcode)
1927 {
1928 case BUILT_IN_STRCMP:
1929 {
1930 r = strcmp (p1, p2);
1931 known_result = true;
1932 break;
1933 }
1934 case BUILT_IN_STRNCMP:
1935 {
1936 if (length == -1)
1937 break;
1938 r = strncmp (p1, p2, length);
1939 known_result = true;
1940 break;
1941 }
1942 /* Only handleable situation is where the string are equal (result 0),
1943 which is already handled by operand_equal_p case. */
1944 case BUILT_IN_STRCASECMP:
1945 break;
1946 case BUILT_IN_STRNCASECMP:
1947 {
1948 if (length == -1)
1949 break;
1950 r = strncmp (p1, p2, length);
1951 if (r == 0)
1952 known_result = true;
1953 break;;
1954 }
1955 default:
1956 gcc_unreachable ();
1957 }
1958
1959 if (known_result)
1960 {
1961 replace_call_with_value (gsi, build_cmp_result (type, r));
1962 return true;
1963 }
1964 }
1965
1966 bool nonzero_length = length >= 1
1967 || fcode == BUILT_IN_STRCMP
1968 || fcode == BUILT_IN_STRCASECMP;
1969
1970 location_t loc = gimple_location (stmt);
1971
1972 /* If the second arg is "", return *(const unsigned char*)arg1. */
1973 if (p2 && *p2 == '\0' && nonzero_length)
1974 {
1975 gimple_seq stmts = NULL;
1976 tree var = gimple_load_first_char (loc, str1, &stmts);
1977 if (lhs)
1978 {
1979 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
1980 gimple_seq_add_stmt_without_update (&stmts, stmt);
1981 }
1982
1983 gsi_replace_with_seq_vops (gsi, stmts);
1984 return true;
1985 }
1986
1987 /* If the first arg is "", return -*(const unsigned char*)arg2. */
1988 if (p1 && *p1 == '\0' && nonzero_length)
1989 {
1990 gimple_seq stmts = NULL;
1991 tree var = gimple_load_first_char (loc, str2, &stmts);
1992
1993 if (lhs)
1994 {
1995 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
1996 stmt = gimple_build_assign (c, NOP_EXPR, var);
1997 gimple_seq_add_stmt_without_update (&stmts, stmt);
1998
1999 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2000 gimple_seq_add_stmt_without_update (&stmts, stmt);
2001 }
2002
2003 gsi_replace_with_seq_vops (gsi, stmts);
2004 return true;
2005 }
2006
2007 /* If len parameter is one, return an expression corresponding to
2008 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2009 if (fcode == BUILT_IN_STRNCMP && length == 1)
2010 {
2011 gimple_seq stmts = NULL;
2012 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2013 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2014
2015 if (lhs)
2016 {
2017 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2018 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2019 gimple_seq_add_stmt_without_update (&stmts, convert1);
2020
2021 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2022 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2023 gimple_seq_add_stmt_without_update (&stmts, convert2);
2024
2025 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2026 gimple_seq_add_stmt_without_update (&stmts, stmt);
2027 }
2028
2029 gsi_replace_with_seq_vops (gsi, stmts);
2030 return true;
2031 }
2032
2033 return false;
2034 }
2035
2036 /* Fold a call to the memchr pointed by GSI iterator. */
2037
2038 static bool
2039 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2040 {
2041 gimple *stmt = gsi_stmt (*gsi);
2042 tree lhs = gimple_call_lhs (stmt);
2043 tree arg1 = gimple_call_arg (stmt, 0);
2044 tree arg2 = gimple_call_arg (stmt, 1);
2045 tree len = gimple_call_arg (stmt, 2);
2046
2047 /* If the LEN parameter is zero, return zero. */
2048 if (integer_zerop (len))
2049 {
2050 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2051 return true;
2052 }
2053
2054 char c;
2055 if (TREE_CODE (arg2) != INTEGER_CST
2056 || !tree_fits_uhwi_p (len)
2057 || !target_char_cst_p (arg2, &c))
2058 return false;
2059
2060 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2061 unsigned HOST_WIDE_INT string_length;
2062 const char *p1 = c_getstr (arg1, &string_length);
2063
2064 if (p1)
2065 {
2066 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2067 if (r == NULL)
2068 {
2069 if (length <= string_length)
2070 {
2071 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2072 return true;
2073 }
2074 }
2075 else
2076 {
2077 unsigned HOST_WIDE_INT offset = r - p1;
2078 gimple_seq stmts = NULL;
2079 if (lhs != NULL_TREE)
2080 {
2081 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2082 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2083 arg1, offset_cst);
2084 gimple_seq_add_stmt_without_update (&stmts, stmt);
2085 }
2086 else
2087 gimple_seq_add_stmt_without_update (&stmts,
2088 gimple_build_nop ());
2089
2090 gsi_replace_with_seq_vops (gsi, stmts);
2091 return true;
2092 }
2093 }
2094
2095 return false;
2096 }
2097
2098 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2099 to the call. IGNORE is true if the value returned
2100 by the builtin will be ignored. UNLOCKED is true is true if this
2101 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2102 the known length of the string. Return NULL_TREE if no simplification
2103 was possible. */
2104
2105 static bool
2106 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2107 tree arg0, tree arg1,
2108 bool unlocked)
2109 {
2110 gimple *stmt = gsi_stmt (*gsi);
2111
2112 /* If we're using an unlocked function, assume the other unlocked
2113 functions exist explicitly. */
2114 tree const fn_fputc = (unlocked
2115 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2116 : builtin_decl_implicit (BUILT_IN_FPUTC));
2117 tree const fn_fwrite = (unlocked
2118 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2119 : builtin_decl_implicit (BUILT_IN_FWRITE));
2120
2121 /* If the return value is used, don't do the transformation. */
2122 if (gimple_call_lhs (stmt))
2123 return false;
2124
2125 /* Get the length of the string passed to fputs. If the length
2126 can't be determined, punt. */
2127 tree len = get_maxval_strlen (arg0, 0);
2128 if (!len
2129 || TREE_CODE (len) != INTEGER_CST)
2130 return false;
2131
2132 switch (compare_tree_int (len, 1))
2133 {
2134 case -1: /* length is 0, delete the call entirely . */
2135 replace_call_with_value (gsi, integer_zero_node);
2136 return true;
2137
2138 case 0: /* length is 1, call fputc. */
2139 {
2140 const char *p = c_getstr (arg0);
2141 if (p != NULL)
2142 {
2143 if (!fn_fputc)
2144 return false;
2145
2146 gimple *repl = gimple_build_call (fn_fputc, 2,
2147 build_int_cst
2148 (integer_type_node, p[0]), arg1);
2149 replace_call_with_call_and_fold (gsi, repl);
2150 return true;
2151 }
2152 }
2153 /* FALLTHROUGH */
2154 case 1: /* length is greater than 1, call fwrite. */
2155 {
2156 /* If optimizing for size keep fputs. */
2157 if (optimize_function_for_size_p (cfun))
2158 return false;
2159 /* New argument list transforming fputs(string, stream) to
2160 fwrite(string, 1, len, stream). */
2161 if (!fn_fwrite)
2162 return false;
2163
2164 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2165 size_one_node, len, arg1);
2166 replace_call_with_call_and_fold (gsi, repl);
2167 return true;
2168 }
2169 default:
2170 gcc_unreachable ();
2171 }
2172 return false;
2173 }
2174
2175 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2176 DEST, SRC, LEN, and SIZE are the arguments to the call.
2177 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2178 code of the builtin. If MAXLEN is not NULL, it is maximum length
2179 passed as third argument. */
2180
2181 static bool
2182 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2183 tree dest, tree src, tree len, tree size,
2184 enum built_in_function fcode)
2185 {
2186 gimple *stmt = gsi_stmt (*gsi);
2187 location_t loc = gimple_location (stmt);
2188 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2189 tree fn;
2190
2191 /* If SRC and DEST are the same (and not volatile), return DEST
2192 (resp. DEST+LEN for __mempcpy_chk). */
2193 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2194 {
2195 if (fcode != BUILT_IN_MEMPCPY_CHK)
2196 {
2197 replace_call_with_value (gsi, dest);
2198 return true;
2199 }
2200 else
2201 {
2202 gimple_seq stmts = NULL;
2203 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2204 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2205 TREE_TYPE (dest), dest, len);
2206 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2207 replace_call_with_value (gsi, temp);
2208 return true;
2209 }
2210 }
2211
2212 if (! tree_fits_uhwi_p (size))
2213 return false;
2214
2215 tree maxlen = get_maxval_strlen (len, 2);
2216 if (! integer_all_onesp (size))
2217 {
2218 if (! tree_fits_uhwi_p (len))
2219 {
2220 /* If LEN is not constant, try MAXLEN too.
2221 For MAXLEN only allow optimizing into non-_ocs function
2222 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2223 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2224 {
2225 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2226 {
2227 /* (void) __mempcpy_chk () can be optimized into
2228 (void) __memcpy_chk (). */
2229 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2230 if (!fn)
2231 return false;
2232
2233 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2234 replace_call_with_call_and_fold (gsi, repl);
2235 return true;
2236 }
2237 return false;
2238 }
2239 }
2240 else
2241 maxlen = len;
2242
2243 if (tree_int_cst_lt (size, maxlen))
2244 return false;
2245 }
2246
2247 fn = NULL_TREE;
2248 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2249 mem{cpy,pcpy,move,set} is available. */
2250 switch (fcode)
2251 {
2252 case BUILT_IN_MEMCPY_CHK:
2253 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2254 break;
2255 case BUILT_IN_MEMPCPY_CHK:
2256 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2257 break;
2258 case BUILT_IN_MEMMOVE_CHK:
2259 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2260 break;
2261 case BUILT_IN_MEMSET_CHK:
2262 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2263 break;
2264 default:
2265 break;
2266 }
2267
2268 if (!fn)
2269 return false;
2270
2271 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2272 replace_call_with_call_and_fold (gsi, repl);
2273 return true;
2274 }
2275
2276 /* Fold a call to the __st[rp]cpy_chk builtin.
2277 DEST, SRC, and SIZE are the arguments to the call.
2278 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2279 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2280 strings passed as second argument. */
2281
2282 static bool
2283 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2284 tree dest,
2285 tree src, tree size,
2286 enum built_in_function fcode)
2287 {
2288 gimple *stmt = gsi_stmt (*gsi);
2289 location_t loc = gimple_location (stmt);
2290 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2291 tree len, fn;
2292
2293 /* If SRC and DEST are the same (and not volatile), return DEST. */
2294 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2295 {
2296 replace_call_with_value (gsi, dest);
2297 return true;
2298 }
2299
2300 if (! tree_fits_uhwi_p (size))
2301 return false;
2302
2303 tree maxlen = get_maxval_strlen (src, 1);
2304 if (! integer_all_onesp (size))
2305 {
2306 len = c_strlen (src, 1);
2307 if (! len || ! tree_fits_uhwi_p (len))
2308 {
2309 /* If LEN is not constant, try MAXLEN too.
2310 For MAXLEN only allow optimizing into non-_ocs function
2311 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2312 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2313 {
2314 if (fcode == BUILT_IN_STPCPY_CHK)
2315 {
2316 if (! ignore)
2317 return false;
2318
2319 /* If return value of __stpcpy_chk is ignored,
2320 optimize into __strcpy_chk. */
2321 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2322 if (!fn)
2323 return false;
2324
2325 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2326 replace_call_with_call_and_fold (gsi, repl);
2327 return true;
2328 }
2329
2330 if (! len || TREE_SIDE_EFFECTS (len))
2331 return false;
2332
2333 /* If c_strlen returned something, but not a constant,
2334 transform __strcpy_chk into __memcpy_chk. */
2335 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2336 if (!fn)
2337 return false;
2338
2339 gimple_seq stmts = NULL;
2340 len = gimple_convert (&stmts, loc, size_type_node, len);
2341 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2342 build_int_cst (size_type_node, 1));
2343 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2344 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2345 replace_call_with_call_and_fold (gsi, repl);
2346 return true;
2347 }
2348 }
2349 else
2350 maxlen = len;
2351
2352 if (! tree_int_cst_lt (maxlen, size))
2353 return false;
2354 }
2355
2356 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2357 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2358 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2359 if (!fn)
2360 return false;
2361
2362 gimple *repl = gimple_build_call (fn, 2, dest, src);
2363 replace_call_with_call_and_fold (gsi, repl);
2364 return true;
2365 }
2366
2367 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2368 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2369 length passed as third argument. IGNORE is true if return value can be
2370 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2371
2372 static bool
2373 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2374 tree dest, tree src,
2375 tree len, tree size,
2376 enum built_in_function fcode)
2377 {
2378 gimple *stmt = gsi_stmt (*gsi);
2379 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2380 tree fn;
2381
2382 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2383 {
2384 /* If return value of __stpncpy_chk is ignored,
2385 optimize into __strncpy_chk. */
2386 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2387 if (fn)
2388 {
2389 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2390 replace_call_with_call_and_fold (gsi, repl);
2391 return true;
2392 }
2393 }
2394
2395 if (! tree_fits_uhwi_p (size))
2396 return false;
2397
2398 tree maxlen = get_maxval_strlen (len, 2);
2399 if (! integer_all_onesp (size))
2400 {
2401 if (! tree_fits_uhwi_p (len))
2402 {
2403 /* If LEN is not constant, try MAXLEN too.
2404 For MAXLEN only allow optimizing into non-_ocs function
2405 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2406 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2407 return false;
2408 }
2409 else
2410 maxlen = len;
2411
2412 if (tree_int_cst_lt (size, maxlen))
2413 return false;
2414 }
2415
2416 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2417 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2418 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2419 if (!fn)
2420 return false;
2421
2422 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2423 replace_call_with_call_and_fold (gsi, repl);
2424 return true;
2425 }
2426
2427 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2428 Return NULL_TREE if no simplification can be made. */
2429
2430 static bool
2431 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2432 {
2433 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2434 location_t loc = gimple_location (stmt);
2435 tree dest = gimple_call_arg (stmt, 0);
2436 tree src = gimple_call_arg (stmt, 1);
2437 tree fn, len, lenp1;
2438
2439 /* If the result is unused, replace stpcpy with strcpy. */
2440 if (gimple_call_lhs (stmt) == NULL_TREE)
2441 {
2442 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2443 if (!fn)
2444 return false;
2445 gimple_call_set_fndecl (stmt, fn);
2446 fold_stmt (gsi);
2447 return true;
2448 }
2449
2450 len = c_strlen (src, 1);
2451 if (!len
2452 || TREE_CODE (len) != INTEGER_CST)
2453 return false;
2454
2455 if (optimize_function_for_size_p (cfun)
2456 /* If length is zero it's small enough. */
2457 && !integer_zerop (len))
2458 return false;
2459
2460 /* If the source has a known length replace stpcpy with memcpy. */
2461 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2462 if (!fn)
2463 return false;
2464
2465 gimple_seq stmts = NULL;
2466 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2467 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2468 tem, build_int_cst (size_type_node, 1));
2469 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2470 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2471 gimple_set_vuse (repl, gimple_vuse (stmt));
2472 gimple_set_vdef (repl, gimple_vdef (stmt));
2473 if (gimple_vdef (repl)
2474 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2475 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2476 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2477 /* Replace the result with dest + len. */
2478 stmts = NULL;
2479 tem = gimple_convert (&stmts, loc, sizetype, len);
2480 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2481 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2482 POINTER_PLUS_EXPR, dest, tem);
2483 gsi_replace (gsi, ret, false);
2484 /* Finally fold the memcpy call. */
2485 gimple_stmt_iterator gsi2 = *gsi;
2486 gsi_prev (&gsi2);
2487 fold_stmt (&gsi2);
2488 return true;
2489 }
2490
2491 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2492 NULL_TREE if a normal call should be emitted rather than expanding
2493 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2494 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2495 passed as second argument. */
2496
2497 static bool
2498 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2499 enum built_in_function fcode)
2500 {
2501 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2502 tree dest, size, len, fn, fmt, flag;
2503 const char *fmt_str;
2504
2505 /* Verify the required arguments in the original call. */
2506 if (gimple_call_num_args (stmt) < 5)
2507 return false;
2508
2509 dest = gimple_call_arg (stmt, 0);
2510 len = gimple_call_arg (stmt, 1);
2511 flag = gimple_call_arg (stmt, 2);
2512 size = gimple_call_arg (stmt, 3);
2513 fmt = gimple_call_arg (stmt, 4);
2514
2515 if (! tree_fits_uhwi_p (size))
2516 return false;
2517
2518 if (! integer_all_onesp (size))
2519 {
2520 tree maxlen = get_maxval_strlen (len, 2);
2521 if (! tree_fits_uhwi_p (len))
2522 {
2523 /* If LEN is not constant, try MAXLEN too.
2524 For MAXLEN only allow optimizing into non-_ocs function
2525 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2526 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2527 return false;
2528 }
2529 else
2530 maxlen = len;
2531
2532 if (tree_int_cst_lt (size, maxlen))
2533 return false;
2534 }
2535
2536 if (!init_target_chars ())
2537 return false;
2538
2539 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2540 or if format doesn't contain % chars or is "%s". */
2541 if (! integer_zerop (flag))
2542 {
2543 fmt_str = c_getstr (fmt);
2544 if (fmt_str == NULL)
2545 return false;
2546 if (strchr (fmt_str, target_percent) != NULL
2547 && strcmp (fmt_str, target_percent_s))
2548 return false;
2549 }
2550
2551 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2552 available. */
2553 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2554 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2555 if (!fn)
2556 return false;
2557
2558 /* Replace the called function and the first 5 argument by 3 retaining
2559 trailing varargs. */
2560 gimple_call_set_fndecl (stmt, fn);
2561 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2562 gimple_call_set_arg (stmt, 0, dest);
2563 gimple_call_set_arg (stmt, 1, len);
2564 gimple_call_set_arg (stmt, 2, fmt);
2565 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2566 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2567 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2568 fold_stmt (gsi);
2569 return true;
2570 }
2571
2572 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2573 Return NULL_TREE if a normal call should be emitted rather than
2574 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2575 or BUILT_IN_VSPRINTF_CHK. */
2576
2577 static bool
2578 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2579 enum built_in_function fcode)
2580 {
2581 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2582 tree dest, size, len, fn, fmt, flag;
2583 const char *fmt_str;
2584 unsigned nargs = gimple_call_num_args (stmt);
2585
2586 /* Verify the required arguments in the original call. */
2587 if (nargs < 4)
2588 return false;
2589 dest = gimple_call_arg (stmt, 0);
2590 flag = gimple_call_arg (stmt, 1);
2591 size = gimple_call_arg (stmt, 2);
2592 fmt = gimple_call_arg (stmt, 3);
2593
2594 if (! tree_fits_uhwi_p (size))
2595 return false;
2596
2597 len = NULL_TREE;
2598
2599 if (!init_target_chars ())
2600 return false;
2601
2602 /* Check whether the format is a literal string constant. */
2603 fmt_str = c_getstr (fmt);
2604 if (fmt_str != NULL)
2605 {
2606 /* If the format doesn't contain % args or %%, we know the size. */
2607 if (strchr (fmt_str, target_percent) == 0)
2608 {
2609 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2610 len = build_int_cstu (size_type_node, strlen (fmt_str));
2611 }
2612 /* If the format is "%s" and first ... argument is a string literal,
2613 we know the size too. */
2614 else if (fcode == BUILT_IN_SPRINTF_CHK
2615 && strcmp (fmt_str, target_percent_s) == 0)
2616 {
2617 tree arg;
2618
2619 if (nargs == 5)
2620 {
2621 arg = gimple_call_arg (stmt, 4);
2622 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2623 {
2624 len = c_strlen (arg, 1);
2625 if (! len || ! tree_fits_uhwi_p (len))
2626 len = NULL_TREE;
2627 }
2628 }
2629 }
2630 }
2631
2632 if (! integer_all_onesp (size))
2633 {
2634 if (! len || ! tree_int_cst_lt (len, size))
2635 return false;
2636 }
2637
2638 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2639 or if format doesn't contain % chars or is "%s". */
2640 if (! integer_zerop (flag))
2641 {
2642 if (fmt_str == NULL)
2643 return false;
2644 if (strchr (fmt_str, target_percent) != NULL
2645 && strcmp (fmt_str, target_percent_s))
2646 return false;
2647 }
2648
2649 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2650 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2651 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2652 if (!fn)
2653 return false;
2654
2655 /* Replace the called function and the first 4 argument by 2 retaining
2656 trailing varargs. */
2657 gimple_call_set_fndecl (stmt, fn);
2658 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2659 gimple_call_set_arg (stmt, 0, dest);
2660 gimple_call_set_arg (stmt, 1, fmt);
2661 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2662 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2663 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2664 fold_stmt (gsi);
2665 return true;
2666 }
2667
2668 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2669 ORIG may be null if this is a 2-argument call. We don't attempt to
2670 simplify calls with more than 3 arguments.
2671
2672 Return NULL_TREE if no simplification was possible, otherwise return the
2673 simplified form of the call as a tree. If IGNORED is true, it means that
2674 the caller does not use the returned value of the function. */
2675
2676 static bool
2677 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2678 {
2679 gimple *stmt = gsi_stmt (*gsi);
2680 tree dest = gimple_call_arg (stmt, 0);
2681 tree fmt = gimple_call_arg (stmt, 1);
2682 tree orig = NULL_TREE;
2683 const char *fmt_str = NULL;
2684
2685 /* Verify the required arguments in the original call. We deal with two
2686 types of sprintf() calls: 'sprintf (str, fmt)' and
2687 'sprintf (dest, "%s", orig)'. */
2688 if (gimple_call_num_args (stmt) > 3)
2689 return false;
2690
2691 if (gimple_call_num_args (stmt) == 3)
2692 orig = gimple_call_arg (stmt, 2);
2693
2694 /* Check whether the format is a literal string constant. */
2695 fmt_str = c_getstr (fmt);
2696 if (fmt_str == NULL)
2697 return false;
2698
2699 if (!init_target_chars ())
2700 return false;
2701
2702 /* If the format doesn't contain % args or %%, use strcpy. */
2703 if (strchr (fmt_str, target_percent) == NULL)
2704 {
2705 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2706
2707 if (!fn)
2708 return false;
2709
2710 /* Don't optimize sprintf (buf, "abc", ptr++). */
2711 if (orig)
2712 return false;
2713
2714 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2715 'format' is known to contain no % formats. */
2716 gimple_seq stmts = NULL;
2717 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2718 gimple_seq_add_stmt_without_update (&stmts, repl);
2719 if (gimple_call_lhs (stmt))
2720 {
2721 repl = gimple_build_assign (gimple_call_lhs (stmt),
2722 build_int_cst (integer_type_node,
2723 strlen (fmt_str)));
2724 gimple_seq_add_stmt_without_update (&stmts, repl);
2725 gsi_replace_with_seq_vops (gsi, stmts);
2726 /* gsi now points at the assignment to the lhs, get a
2727 stmt iterator to the memcpy call.
2728 ??? We can't use gsi_for_stmt as that doesn't work when the
2729 CFG isn't built yet. */
2730 gimple_stmt_iterator gsi2 = *gsi;
2731 gsi_prev (&gsi2);
2732 fold_stmt (&gsi2);
2733 }
2734 else
2735 {
2736 gsi_replace_with_seq_vops (gsi, stmts);
2737 fold_stmt (gsi);
2738 }
2739 return true;
2740 }
2741
2742 /* If the format is "%s", use strcpy if the result isn't used. */
2743 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2744 {
2745 tree fn;
2746 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2747
2748 if (!fn)
2749 return false;
2750
2751 /* Don't crash on sprintf (str1, "%s"). */
2752 if (!orig)
2753 return false;
2754
2755 tree orig_len = NULL_TREE;
2756 if (gimple_call_lhs (stmt))
2757 {
2758 orig_len = get_maxval_strlen (orig, 0);
2759 if (!orig_len)
2760 return false;
2761 }
2762
2763 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2764 gimple_seq stmts = NULL;
2765 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2766 gimple_seq_add_stmt_without_update (&stmts, repl);
2767 if (gimple_call_lhs (stmt))
2768 {
2769 if (!useless_type_conversion_p (integer_type_node,
2770 TREE_TYPE (orig_len)))
2771 orig_len = fold_convert (integer_type_node, orig_len);
2772 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2773 gimple_seq_add_stmt_without_update (&stmts, repl);
2774 gsi_replace_with_seq_vops (gsi, stmts);
2775 /* gsi now points at the assignment to the lhs, get a
2776 stmt iterator to the memcpy call.
2777 ??? We can't use gsi_for_stmt as that doesn't work when the
2778 CFG isn't built yet. */
2779 gimple_stmt_iterator gsi2 = *gsi;
2780 gsi_prev (&gsi2);
2781 fold_stmt (&gsi2);
2782 }
2783 else
2784 {
2785 gsi_replace_with_seq_vops (gsi, stmts);
2786 fold_stmt (gsi);
2787 }
2788 return true;
2789 }
2790 return false;
2791 }
2792
2793 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2794 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2795 attempt to simplify calls with more than 4 arguments.
2796
2797 Return NULL_TREE if no simplification was possible, otherwise return the
2798 simplified form of the call as a tree. If IGNORED is true, it means that
2799 the caller does not use the returned value of the function. */
2800
2801 static bool
2802 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2803 {
2804 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2805 tree dest = gimple_call_arg (stmt, 0);
2806 tree destsize = gimple_call_arg (stmt, 1);
2807 tree fmt = gimple_call_arg (stmt, 2);
2808 tree orig = NULL_TREE;
2809 const char *fmt_str = NULL;
2810
2811 if (gimple_call_num_args (stmt) > 4)
2812 return false;
2813
2814 if (gimple_call_num_args (stmt) == 4)
2815 orig = gimple_call_arg (stmt, 3);
2816
2817 if (!tree_fits_uhwi_p (destsize))
2818 return false;
2819 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2820
2821 /* Check whether the format is a literal string constant. */
2822 fmt_str = c_getstr (fmt);
2823 if (fmt_str == NULL)
2824 return false;
2825
2826 if (!init_target_chars ())
2827 return false;
2828
2829 /* If the format doesn't contain % args or %%, use strcpy. */
2830 if (strchr (fmt_str, target_percent) == NULL)
2831 {
2832 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2833 if (!fn)
2834 return false;
2835
2836 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2837 if (orig)
2838 return false;
2839
2840 /* We could expand this as
2841 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2842 or to
2843 memcpy (str, fmt_with_nul_at_cstm1, cst);
2844 but in the former case that might increase code size
2845 and in the latter case grow .rodata section too much.
2846 So punt for now. */
2847 size_t len = strlen (fmt_str);
2848 if (len >= destlen)
2849 return false;
2850
2851 gimple_seq stmts = NULL;
2852 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2853 gimple_seq_add_stmt_without_update (&stmts, repl);
2854 if (gimple_call_lhs (stmt))
2855 {
2856 repl = gimple_build_assign (gimple_call_lhs (stmt),
2857 build_int_cst (integer_type_node, len));
2858 gimple_seq_add_stmt_without_update (&stmts, repl);
2859 gsi_replace_with_seq_vops (gsi, stmts);
2860 /* gsi now points at the assignment to the lhs, get a
2861 stmt iterator to the memcpy call.
2862 ??? We can't use gsi_for_stmt as that doesn't work when the
2863 CFG isn't built yet. */
2864 gimple_stmt_iterator gsi2 = *gsi;
2865 gsi_prev (&gsi2);
2866 fold_stmt (&gsi2);
2867 }
2868 else
2869 {
2870 gsi_replace_with_seq_vops (gsi, stmts);
2871 fold_stmt (gsi);
2872 }
2873 return true;
2874 }
2875
2876 /* If the format is "%s", use strcpy if the result isn't used. */
2877 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2878 {
2879 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2880 if (!fn)
2881 return false;
2882
2883 /* Don't crash on snprintf (str1, cst, "%s"). */
2884 if (!orig)
2885 return false;
2886
2887 tree orig_len = get_maxval_strlen (orig, 0);
2888 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2889 return false;
2890
2891 /* We could expand this as
2892 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2893 or to
2894 memcpy (str1, str2_with_nul_at_cstm1, cst);
2895 but in the former case that might increase code size
2896 and in the latter case grow .rodata section too much.
2897 So punt for now. */
2898 if (compare_tree_int (orig_len, destlen) >= 0)
2899 return false;
2900
2901 /* Convert snprintf (str1, cst, "%s", str2) into
2902 strcpy (str1, str2) if strlen (str2) < cst. */
2903 gimple_seq stmts = NULL;
2904 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2905 gimple_seq_add_stmt_without_update (&stmts, repl);
2906 if (gimple_call_lhs (stmt))
2907 {
2908 if (!useless_type_conversion_p (integer_type_node,
2909 TREE_TYPE (orig_len)))
2910 orig_len = fold_convert (integer_type_node, orig_len);
2911 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2912 gimple_seq_add_stmt_without_update (&stmts, repl);
2913 gsi_replace_with_seq_vops (gsi, stmts);
2914 /* gsi now points at the assignment to the lhs, get a
2915 stmt iterator to the memcpy call.
2916 ??? We can't use gsi_for_stmt as that doesn't work when the
2917 CFG isn't built yet. */
2918 gimple_stmt_iterator gsi2 = *gsi;
2919 gsi_prev (&gsi2);
2920 fold_stmt (&gsi2);
2921 }
2922 else
2923 {
2924 gsi_replace_with_seq_vops (gsi, stmts);
2925 fold_stmt (gsi);
2926 }
2927 return true;
2928 }
2929 return false;
2930 }
2931
2932 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2933 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2934 more than 3 arguments, and ARG may be null in the 2-argument case.
2935
2936 Return NULL_TREE if no simplification was possible, otherwise return the
2937 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2938 code of the function to be simplified. */
2939
2940 static bool
2941 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2942 tree fp, tree fmt, tree arg,
2943 enum built_in_function fcode)
2944 {
2945 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2946 tree fn_fputc, fn_fputs;
2947 const char *fmt_str = NULL;
2948
2949 /* If the return value is used, don't do the transformation. */
2950 if (gimple_call_lhs (stmt) != NULL_TREE)
2951 return false;
2952
2953 /* Check whether the format is a literal string constant. */
2954 fmt_str = c_getstr (fmt);
2955 if (fmt_str == NULL)
2956 return false;
2957
2958 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2959 {
2960 /* If we're using an unlocked function, assume the other
2961 unlocked functions exist explicitly. */
2962 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2963 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2964 }
2965 else
2966 {
2967 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2968 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2969 }
2970
2971 if (!init_target_chars ())
2972 return false;
2973
2974 /* If the format doesn't contain % args or %%, use strcpy. */
2975 if (strchr (fmt_str, target_percent) == NULL)
2976 {
2977 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2978 && arg)
2979 return false;
2980
2981 /* If the format specifier was "", fprintf does nothing. */
2982 if (fmt_str[0] == '\0')
2983 {
2984 replace_call_with_value (gsi, NULL_TREE);
2985 return true;
2986 }
2987
2988 /* When "string" doesn't contain %, replace all cases of
2989 fprintf (fp, string) with fputs (string, fp). The fputs
2990 builtin will take care of special cases like length == 1. */
2991 if (fn_fputs)
2992 {
2993 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2994 replace_call_with_call_and_fold (gsi, repl);
2995 return true;
2996 }
2997 }
2998
2999 /* The other optimizations can be done only on the non-va_list variants. */
3000 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3001 return false;
3002
3003 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3004 else if (strcmp (fmt_str, target_percent_s) == 0)
3005 {
3006 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3007 return false;
3008 if (fn_fputs)
3009 {
3010 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3011 replace_call_with_call_and_fold (gsi, repl);
3012 return true;
3013 }
3014 }
3015
3016 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3017 else if (strcmp (fmt_str, target_percent_c) == 0)
3018 {
3019 if (!arg
3020 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3021 return false;
3022 if (fn_fputc)
3023 {
3024 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3025 replace_call_with_call_and_fold (gsi, repl);
3026 return true;
3027 }
3028 }
3029
3030 return false;
3031 }
3032
3033 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3034 FMT and ARG are the arguments to the call; we don't fold cases with
3035 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3036
3037 Return NULL_TREE if no simplification was possible, otherwise return the
3038 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3039 code of the function to be simplified. */
3040
3041 static bool
3042 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3043 tree arg, enum built_in_function fcode)
3044 {
3045 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3046 tree fn_putchar, fn_puts, newarg;
3047 const char *fmt_str = NULL;
3048
3049 /* If the return value is used, don't do the transformation. */
3050 if (gimple_call_lhs (stmt) != NULL_TREE)
3051 return false;
3052
3053 /* Check whether the format is a literal string constant. */
3054 fmt_str = c_getstr (fmt);
3055 if (fmt_str == NULL)
3056 return false;
3057
3058 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3059 {
3060 /* If we're using an unlocked function, assume the other
3061 unlocked functions exist explicitly. */
3062 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3063 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3064 }
3065 else
3066 {
3067 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3068 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3069 }
3070
3071 if (!init_target_chars ())
3072 return false;
3073
3074 if (strcmp (fmt_str, target_percent_s) == 0
3075 || strchr (fmt_str, target_percent) == NULL)
3076 {
3077 const char *str;
3078
3079 if (strcmp (fmt_str, target_percent_s) == 0)
3080 {
3081 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3082 return false;
3083
3084 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3085 return false;
3086
3087 str = c_getstr (arg);
3088 if (str == NULL)
3089 return false;
3090 }
3091 else
3092 {
3093 /* The format specifier doesn't contain any '%' characters. */
3094 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3095 && arg)
3096 return false;
3097 str = fmt_str;
3098 }
3099
3100 /* If the string was "", printf does nothing. */
3101 if (str[0] == '\0')
3102 {
3103 replace_call_with_value (gsi, NULL_TREE);
3104 return true;
3105 }
3106
3107 /* If the string has length of 1, call putchar. */
3108 if (str[1] == '\0')
3109 {
3110 /* Given printf("c"), (where c is any one character,)
3111 convert "c"[0] to an int and pass that to the replacement
3112 function. */
3113 newarg = build_int_cst (integer_type_node, str[0]);
3114 if (fn_putchar)
3115 {
3116 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3117 replace_call_with_call_and_fold (gsi, repl);
3118 return true;
3119 }
3120 }
3121 else
3122 {
3123 /* If the string was "string\n", call puts("string"). */
3124 size_t len = strlen (str);
3125 if ((unsigned char)str[len - 1] == target_newline
3126 && (size_t) (int) len == len
3127 && (int) len > 0)
3128 {
3129 char *newstr;
3130 tree offset_node, string_cst;
3131
3132 /* Create a NUL-terminated string that's one char shorter
3133 than the original, stripping off the trailing '\n'. */
3134 newarg = build_string_literal (len, str);
3135 string_cst = string_constant (newarg, &offset_node);
3136 gcc_checking_assert (string_cst
3137 && (TREE_STRING_LENGTH (string_cst)
3138 == (int) len)
3139 && integer_zerop (offset_node)
3140 && (unsigned char)
3141 TREE_STRING_POINTER (string_cst)[len - 1]
3142 == target_newline);
3143 /* build_string_literal creates a new STRING_CST,
3144 modify it in place to avoid double copying. */
3145 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3146 newstr[len - 1] = '\0';
3147 if (fn_puts)
3148 {
3149 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3150 replace_call_with_call_and_fold (gsi, repl);
3151 return true;
3152 }
3153 }
3154 else
3155 /* We'd like to arrange to call fputs(string,stdout) here,
3156 but we need stdout and don't have a way to get it yet. */
3157 return false;
3158 }
3159 }
3160
3161 /* The other optimizations can be done only on the non-va_list variants. */
3162 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3163 return false;
3164
3165 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3166 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3167 {
3168 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3169 return false;
3170 if (fn_puts)
3171 {
3172 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3173 replace_call_with_call_and_fold (gsi, repl);
3174 return true;
3175 }
3176 }
3177
3178 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3179 else if (strcmp (fmt_str, target_percent_c) == 0)
3180 {
3181 if (!arg || ! useless_type_conversion_p (integer_type_node,
3182 TREE_TYPE (arg)))
3183 return false;
3184 if (fn_putchar)
3185 {
3186 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3187 replace_call_with_call_and_fold (gsi, repl);
3188 return true;
3189 }
3190 }
3191
3192 return false;
3193 }
3194
3195
3196
3197 /* Fold a call to __builtin_strlen with known length LEN. */
3198
3199 static bool
3200 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3201 {
3202 gimple *stmt = gsi_stmt (*gsi);
3203 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3204 if (!len)
3205 return false;
3206 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3207 replace_call_with_value (gsi, len);
3208 return true;
3209 }
3210
3211 /* Fold a call to __builtin_acc_on_device. */
3212
3213 static bool
3214 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3215 {
3216 /* Defer folding until we know which compiler we're in. */
3217 if (symtab->state != EXPANSION)
3218 return false;
3219
3220 unsigned val_host = GOMP_DEVICE_HOST;
3221 unsigned val_dev = GOMP_DEVICE_NONE;
3222
3223 #ifdef ACCEL_COMPILER
3224 val_host = GOMP_DEVICE_NOT_HOST;
3225 val_dev = ACCEL_COMPILER_acc_device;
3226 #endif
3227
3228 location_t loc = gimple_location (gsi_stmt (*gsi));
3229
3230 tree host_eq = make_ssa_name (boolean_type_node);
3231 gimple *host_ass = gimple_build_assign
3232 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3233 gimple_set_location (host_ass, loc);
3234 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3235
3236 tree dev_eq = make_ssa_name (boolean_type_node);
3237 gimple *dev_ass = gimple_build_assign
3238 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3239 gimple_set_location (dev_ass, loc);
3240 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3241
3242 tree result = make_ssa_name (boolean_type_node);
3243 gimple *result_ass = gimple_build_assign
3244 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3245 gimple_set_location (result_ass, loc);
3246 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3247
3248 replace_call_with_value (gsi, result);
3249
3250 return true;
3251 }
3252
3253 /* Fold the non-target builtin at *GSI and return whether any simplification
3254 was made. */
3255
3256 static bool
3257 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3258 {
3259 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3260 tree callee = gimple_call_fndecl (stmt);
3261
3262 /* Give up for always_inline inline builtins until they are
3263 inlined. */
3264 if (avoid_folding_inline_builtin (callee))
3265 return false;
3266
3267 unsigned n = gimple_call_num_args (stmt);
3268 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3269 switch (fcode)
3270 {
3271 case BUILT_IN_BZERO:
3272 return gimple_fold_builtin_memset (gsi, integer_zero_node,
3273 gimple_call_arg (stmt, 1));
3274 case BUILT_IN_MEMSET:
3275 return gimple_fold_builtin_memset (gsi,
3276 gimple_call_arg (stmt, 1),
3277 gimple_call_arg (stmt, 2));
3278 case BUILT_IN_BCOPY:
3279 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
3280 gimple_call_arg (stmt, 0), 3);
3281 case BUILT_IN_MEMCPY:
3282 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3283 gimple_call_arg (stmt, 1), 0);
3284 case BUILT_IN_MEMPCPY:
3285 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3286 gimple_call_arg (stmt, 1), 1);
3287 case BUILT_IN_MEMMOVE:
3288 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3289 gimple_call_arg (stmt, 1), 3);
3290 case BUILT_IN_SPRINTF_CHK:
3291 case BUILT_IN_VSPRINTF_CHK:
3292 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3293 case BUILT_IN_STRCAT_CHK:
3294 return gimple_fold_builtin_strcat_chk (gsi);
3295 case BUILT_IN_STRNCAT_CHK:
3296 return gimple_fold_builtin_strncat_chk (gsi);
3297 case BUILT_IN_STRLEN:
3298 return gimple_fold_builtin_strlen (gsi);
3299 case BUILT_IN_STRCPY:
3300 return gimple_fold_builtin_strcpy (gsi,
3301 gimple_call_arg (stmt, 0),
3302 gimple_call_arg (stmt, 1));
3303 case BUILT_IN_STRNCPY:
3304 return gimple_fold_builtin_strncpy (gsi,
3305 gimple_call_arg (stmt, 0),
3306 gimple_call_arg (stmt, 1),
3307 gimple_call_arg (stmt, 2));
3308 case BUILT_IN_STRCAT:
3309 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3310 gimple_call_arg (stmt, 1));
3311 case BUILT_IN_STRNCAT:
3312 return gimple_fold_builtin_strncat (gsi);
3313 case BUILT_IN_INDEX:
3314 case BUILT_IN_STRCHR:
3315 return gimple_fold_builtin_strchr (gsi, false);
3316 case BUILT_IN_RINDEX:
3317 case BUILT_IN_STRRCHR:
3318 return gimple_fold_builtin_strchr (gsi, true);
3319 case BUILT_IN_STRSTR:
3320 return gimple_fold_builtin_strstr (gsi);
3321 case BUILT_IN_STRCMP:
3322 case BUILT_IN_STRCASECMP:
3323 case BUILT_IN_STRNCMP:
3324 case BUILT_IN_STRNCASECMP:
3325 return gimple_fold_builtin_string_compare (gsi);
3326 case BUILT_IN_MEMCHR:
3327 return gimple_fold_builtin_memchr (gsi);
3328 case BUILT_IN_FPUTS:
3329 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3330 gimple_call_arg (stmt, 1), false);
3331 case BUILT_IN_FPUTS_UNLOCKED:
3332 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3333 gimple_call_arg (stmt, 1), true);
3334 case BUILT_IN_MEMCPY_CHK:
3335 case BUILT_IN_MEMPCPY_CHK:
3336 case BUILT_IN_MEMMOVE_CHK:
3337 case BUILT_IN_MEMSET_CHK:
3338 return gimple_fold_builtin_memory_chk (gsi,
3339 gimple_call_arg (stmt, 0),
3340 gimple_call_arg (stmt, 1),
3341 gimple_call_arg (stmt, 2),
3342 gimple_call_arg (stmt, 3),
3343 fcode);
3344 case BUILT_IN_STPCPY:
3345 return gimple_fold_builtin_stpcpy (gsi);
3346 case BUILT_IN_STRCPY_CHK:
3347 case BUILT_IN_STPCPY_CHK:
3348 return gimple_fold_builtin_stxcpy_chk (gsi,
3349 gimple_call_arg (stmt, 0),
3350 gimple_call_arg (stmt, 1),
3351 gimple_call_arg (stmt, 2),
3352 fcode);
3353 case BUILT_IN_STRNCPY_CHK:
3354 case BUILT_IN_STPNCPY_CHK:
3355 return gimple_fold_builtin_stxncpy_chk (gsi,
3356 gimple_call_arg (stmt, 0),
3357 gimple_call_arg (stmt, 1),
3358 gimple_call_arg (stmt, 2),
3359 gimple_call_arg (stmt, 3),
3360 fcode);
3361 case BUILT_IN_SNPRINTF_CHK:
3362 case BUILT_IN_VSNPRINTF_CHK:
3363 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3364 case BUILT_IN_SNPRINTF:
3365 return gimple_fold_builtin_snprintf (gsi);
3366 case BUILT_IN_SPRINTF:
3367 return gimple_fold_builtin_sprintf (gsi);
3368 case BUILT_IN_FPRINTF:
3369 case BUILT_IN_FPRINTF_UNLOCKED:
3370 case BUILT_IN_VFPRINTF:
3371 if (n == 2 || n == 3)
3372 return gimple_fold_builtin_fprintf (gsi,
3373 gimple_call_arg (stmt, 0),
3374 gimple_call_arg (stmt, 1),
3375 n == 3
3376 ? gimple_call_arg (stmt, 2)
3377 : NULL_TREE,
3378 fcode);
3379 break;
3380 case BUILT_IN_FPRINTF_CHK:
3381 case BUILT_IN_VFPRINTF_CHK:
3382 if (n == 3 || n == 4)
3383 return gimple_fold_builtin_fprintf (gsi,
3384 gimple_call_arg (stmt, 0),
3385 gimple_call_arg (stmt, 2),
3386 n == 4
3387 ? gimple_call_arg (stmt, 3)
3388 : NULL_TREE,
3389 fcode);
3390 break;
3391 case BUILT_IN_PRINTF:
3392 case BUILT_IN_PRINTF_UNLOCKED:
3393 case BUILT_IN_VPRINTF:
3394 if (n == 1 || n == 2)
3395 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3396 n == 2
3397 ? gimple_call_arg (stmt, 1)
3398 : NULL_TREE, fcode);
3399 break;
3400 case BUILT_IN_PRINTF_CHK:
3401 case BUILT_IN_VPRINTF_CHK:
3402 if (n == 2 || n == 3)
3403 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3404 n == 3
3405 ? gimple_call_arg (stmt, 2)
3406 : NULL_TREE, fcode);
3407 break;
3408 case BUILT_IN_ACC_ON_DEVICE:
3409 return gimple_fold_builtin_acc_on_device (gsi,
3410 gimple_call_arg (stmt, 0));
3411 default:;
3412 }
3413
3414 /* Try the generic builtin folder. */
3415 bool ignore = (gimple_call_lhs (stmt) == NULL);
3416 tree result = fold_call_stmt (stmt, ignore);
3417 if (result)
3418 {
3419 if (ignore)
3420 STRIP_NOPS (result);
3421 else
3422 result = fold_convert (gimple_call_return_type (stmt), result);
3423 if (!update_call_from_tree (gsi, result))
3424 gimplify_and_update_call_from_tree (gsi, result);
3425 return true;
3426 }
3427
3428 return false;
3429 }
3430
3431 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3432 function calls to constants, where possible. */
3433
3434 static tree
3435 fold_internal_goacc_dim (const gimple *call)
3436 {
3437 int axis = oacc_get_ifn_dim_arg (call);
3438 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3439 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3440 tree result = NULL_TREE;
3441
3442 /* If the size is 1, or we only want the size and it is not dynamic,
3443 we know the answer. */
3444 if (size == 1 || (!is_pos && size))
3445 {
3446 tree type = TREE_TYPE (gimple_call_lhs (call));
3447 result = build_int_cst (type, size - is_pos);
3448 }
3449
3450 return result;
3451 }
3452
3453 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3454 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3455 &var where var is only addressable because of such calls. */
3456
3457 bool
3458 optimize_atomic_compare_exchange_p (gimple *stmt)
3459 {
3460 if (gimple_call_num_args (stmt) != 6
3461 || !flag_inline_atomics
3462 || !optimize
3463 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3464 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3465 || !gimple_vdef (stmt)
3466 || !gimple_vuse (stmt))
3467 return false;
3468
3469 tree fndecl = gimple_call_fndecl (stmt);
3470 switch (DECL_FUNCTION_CODE (fndecl))
3471 {
3472 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3473 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3474 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3475 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3476 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3477 break;
3478 default:
3479 return false;
3480 }
3481
3482 tree expected = gimple_call_arg (stmt, 1);
3483 if (TREE_CODE (expected) != ADDR_EXPR
3484 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3485 return false;
3486
3487 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3488 if (!is_gimple_reg_type (etype)
3489 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3490 || TREE_THIS_VOLATILE (etype)
3491 || VECTOR_TYPE_P (etype)
3492 || TREE_CODE (etype) == COMPLEX_TYPE
3493 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3494 might not preserve all the bits. See PR71716. */
3495 || SCALAR_FLOAT_TYPE_P (etype)
3496 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3497 return false;
3498
3499 tree weak = gimple_call_arg (stmt, 3);
3500 if (!integer_zerop (weak) && !integer_onep (weak))
3501 return false;
3502
3503 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3504 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3505 machine_mode mode = TYPE_MODE (itype);
3506
3507 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3508 == CODE_FOR_nothing
3509 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3510 return false;
3511
3512 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3513 return false;
3514
3515 return true;
3516 }
3517
3518 /* Fold
3519 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3520 into
3521 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3522 i = IMAGPART_EXPR <t>;
3523 r = (_Bool) i;
3524 e = REALPART_EXPR <t>; */
3525
3526 void
3527 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3528 {
3529 gimple *stmt = gsi_stmt (*gsi);
3530 tree fndecl = gimple_call_fndecl (stmt);
3531 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3532 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3533 tree ctype = build_complex_type (itype);
3534 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3535 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3536 expected);
3537 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3538 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3539 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3540 {
3541 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3542 build1 (VIEW_CONVERT_EXPR, itype,
3543 gimple_assign_lhs (g)));
3544 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3545 }
3546 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3547 + int_size_in_bytes (itype);
3548 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3549 gimple_call_arg (stmt, 0),
3550 gimple_assign_lhs (g),
3551 gimple_call_arg (stmt, 2),
3552 build_int_cst (integer_type_node, flag),
3553 gimple_call_arg (stmt, 4),
3554 gimple_call_arg (stmt, 5));
3555 tree lhs = make_ssa_name (ctype);
3556 gimple_call_set_lhs (g, lhs);
3557 gimple_set_vdef (g, gimple_vdef (stmt));
3558 gimple_set_vuse (g, gimple_vuse (stmt));
3559 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3560 if (gimple_call_lhs (stmt))
3561 {
3562 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3563 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3564 build1 (IMAGPART_EXPR, itype, lhs));
3565 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3566 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3567 gimple_assign_lhs (g));
3568 }
3569 gsi_replace (gsi, g, true);
3570 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3571 build1 (REALPART_EXPR, itype, lhs));
3572 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3573 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3574 {
3575 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3576 VIEW_CONVERT_EXPR,
3577 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3578 gimple_assign_lhs (g)));
3579 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3580 }
3581 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3582 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3583 *gsi = gsiret;
3584 }
3585
3586 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3587 doesn't fit into TYPE. The test for overflow should be regardless of
3588 -fwrapv, and even for unsigned types. */
3589
3590 bool
3591 arith_overflowed_p (enum tree_code code, const_tree type,
3592 const_tree arg0, const_tree arg1)
3593 {
3594 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3595 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3596 widest2_int_cst;
3597 widest2_int warg0 = widest2_int_cst (arg0);
3598 widest2_int warg1 = widest2_int_cst (arg1);
3599 widest2_int wres;
3600 switch (code)
3601 {
3602 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3603 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3604 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3605 default: gcc_unreachable ();
3606 }
3607 signop sign = TYPE_SIGN (type);
3608 if (sign == UNSIGNED && wi::neg_p (wres))
3609 return true;
3610 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3611 }
3612
3613 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3614 The statement may be replaced by another statement, e.g., if the call
3615 simplifies to a constant value. Return true if any changes were made.
3616 It is assumed that the operands have been previously folded. */
3617
3618 static bool
3619 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3620 {
3621 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3622 tree callee;
3623 bool changed = false;
3624 unsigned i;
3625
3626 /* Fold *& in call arguments. */
3627 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3628 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3629 {
3630 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3631 if (tmp)
3632 {
3633 gimple_call_set_arg (stmt, i, tmp);
3634 changed = true;
3635 }
3636 }
3637
3638 /* Check for virtual calls that became direct calls. */
3639 callee = gimple_call_fn (stmt);
3640 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3641 {
3642 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3643 {
3644 if (dump_file && virtual_method_call_p (callee)
3645 && !possible_polymorphic_call_target_p
3646 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3647 (OBJ_TYPE_REF_EXPR (callee)))))
3648 {
3649 fprintf (dump_file,
3650 "Type inheritance inconsistent devirtualization of ");
3651 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3652 fprintf (dump_file, " to ");
3653 print_generic_expr (dump_file, callee, TDF_SLIM);
3654 fprintf (dump_file, "\n");
3655 }
3656
3657 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3658 changed = true;
3659 }
3660 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3661 {
3662 bool final;
3663 vec <cgraph_node *>targets
3664 = possible_polymorphic_call_targets (callee, stmt, &final);
3665 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3666 {
3667 tree lhs = gimple_call_lhs (stmt);
3668 if (dump_enabled_p ())
3669 {
3670 location_t loc = gimple_location_safe (stmt);
3671 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3672 "folding virtual function call to %s\n",
3673 targets.length () == 1
3674 ? targets[0]->name ()
3675 : "__builtin_unreachable");
3676 }
3677 if (targets.length () == 1)
3678 {
3679 tree fndecl = targets[0]->decl;
3680 gimple_call_set_fndecl (stmt, fndecl);
3681 changed = true;
3682 /* If changing the call to __cxa_pure_virtual
3683 or similar noreturn function, adjust gimple_call_fntype
3684 too. */
3685 if (gimple_call_noreturn_p (stmt)
3686 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3687 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3688 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3689 == void_type_node))
3690 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3691 /* If the call becomes noreturn, remove the lhs. */
3692 if (lhs
3693 && gimple_call_noreturn_p (stmt)
3694 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3695 || should_remove_lhs_p (lhs)))
3696 {
3697 if (TREE_CODE (lhs) == SSA_NAME)
3698 {
3699 tree var = create_tmp_var (TREE_TYPE (lhs));
3700 tree def = get_or_create_ssa_default_def (cfun, var);
3701 gimple *new_stmt = gimple_build_assign (lhs, def);
3702 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3703 }
3704 gimple_call_set_lhs (stmt, NULL_TREE);
3705 }
3706 maybe_remove_unused_call_args (cfun, stmt);
3707 }
3708 else
3709 {
3710 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3711 gimple *new_stmt = gimple_build_call (fndecl, 0);
3712 gimple_set_location (new_stmt, gimple_location (stmt));
3713 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3714 {
3715 tree var = create_tmp_var (TREE_TYPE (lhs));
3716 tree def = get_or_create_ssa_default_def (cfun, var);
3717
3718 /* To satisfy condition for
3719 cgraph_update_edges_for_call_stmt_node,
3720 we need to preserve GIMPLE_CALL statement
3721 at position of GSI iterator. */
3722 update_call_from_tree (gsi, def);
3723 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3724 }
3725 else
3726 {
3727 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3728 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3729 gsi_replace (gsi, new_stmt, false);
3730 }
3731 return true;
3732 }
3733 }
3734 }
3735 }
3736
3737 /* Check for indirect calls that became direct calls, and then
3738 no longer require a static chain. */
3739 if (gimple_call_chain (stmt))
3740 {
3741 tree fn = gimple_call_fndecl (stmt);
3742 if (fn && !DECL_STATIC_CHAIN (fn))
3743 {
3744 gimple_call_set_chain (stmt, NULL);
3745 changed = true;
3746 }
3747 else
3748 {
3749 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3750 if (tmp)
3751 {
3752 gimple_call_set_chain (stmt, tmp);
3753 changed = true;
3754 }
3755 }
3756 }
3757
3758 if (inplace)
3759 return changed;
3760
3761 /* Check for builtins that CCP can handle using information not
3762 available in the generic fold routines. */
3763 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3764 {
3765 if (gimple_fold_builtin (gsi))
3766 changed = true;
3767 }
3768 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3769 {
3770 changed |= targetm.gimple_fold_builtin (gsi);
3771 }
3772 else if (gimple_call_internal_p (stmt))
3773 {
3774 enum tree_code subcode = ERROR_MARK;
3775 tree result = NULL_TREE;
3776 bool cplx_result = false;
3777 tree overflow = NULL_TREE;
3778 switch (gimple_call_internal_fn (stmt))
3779 {
3780 case IFN_BUILTIN_EXPECT:
3781 result = fold_builtin_expect (gimple_location (stmt),
3782 gimple_call_arg (stmt, 0),
3783 gimple_call_arg (stmt, 1),
3784 gimple_call_arg (stmt, 2));
3785 break;
3786 case IFN_UBSAN_OBJECT_SIZE:
3787 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3788 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3789 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3790 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3791 gimple_call_arg (stmt, 2))))
3792 {
3793 gsi_replace (gsi, gimple_build_nop (), false);
3794 unlink_stmt_vdef (stmt);
3795 release_defs (stmt);
3796 return true;
3797 }
3798 break;
3799 case IFN_GOACC_DIM_SIZE:
3800 case IFN_GOACC_DIM_POS:
3801 result = fold_internal_goacc_dim (stmt);
3802 break;
3803 case IFN_UBSAN_CHECK_ADD:
3804 subcode = PLUS_EXPR;
3805 break;
3806 case IFN_UBSAN_CHECK_SUB:
3807 subcode = MINUS_EXPR;
3808 break;
3809 case IFN_UBSAN_CHECK_MUL:
3810 subcode = MULT_EXPR;
3811 break;
3812 case IFN_ADD_OVERFLOW:
3813 subcode = PLUS_EXPR;
3814 cplx_result = true;
3815 break;
3816 case IFN_SUB_OVERFLOW:
3817 subcode = MINUS_EXPR;
3818 cplx_result = true;
3819 break;
3820 case IFN_MUL_OVERFLOW:
3821 subcode = MULT_EXPR;
3822 cplx_result = true;
3823 break;
3824 default:
3825 break;
3826 }
3827 if (subcode != ERROR_MARK)
3828 {
3829 tree arg0 = gimple_call_arg (stmt, 0);
3830 tree arg1 = gimple_call_arg (stmt, 1);
3831 tree type = TREE_TYPE (arg0);
3832 if (cplx_result)
3833 {
3834 tree lhs = gimple_call_lhs (stmt);
3835 if (lhs == NULL_TREE)
3836 type = NULL_TREE;
3837 else
3838 type = TREE_TYPE (TREE_TYPE (lhs));
3839 }
3840 if (type == NULL_TREE)
3841 ;
3842 /* x = y + 0; x = y - 0; x = y * 0; */
3843 else if (integer_zerop (arg1))
3844 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3845 /* x = 0 + y; x = 0 * y; */
3846 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3847 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3848 /* x = y - y; */
3849 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3850 result = integer_zero_node;
3851 /* x = y * 1; x = 1 * y; */
3852 else if (subcode == MULT_EXPR && integer_onep (arg1))
3853 result = arg0;
3854 else if (subcode == MULT_EXPR && integer_onep (arg0))
3855 result = arg1;
3856 else if (TREE_CODE (arg0) == INTEGER_CST
3857 && TREE_CODE (arg1) == INTEGER_CST)
3858 {
3859 if (cplx_result)
3860 result = int_const_binop (subcode, fold_convert (type, arg0),
3861 fold_convert (type, arg1));
3862 else
3863 result = int_const_binop (subcode, arg0, arg1);
3864 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3865 {
3866 if (cplx_result)
3867 overflow = build_one_cst (type);
3868 else
3869 result = NULL_TREE;
3870 }
3871 }
3872 if (result)
3873 {
3874 if (result == integer_zero_node)
3875 result = build_zero_cst (type);
3876 else if (cplx_result && TREE_TYPE (result) != type)
3877 {
3878 if (TREE_CODE (result) == INTEGER_CST)
3879 {
3880 if (arith_overflowed_p (PLUS_EXPR, type, result,
3881 integer_zero_node))
3882 overflow = build_one_cst (type);
3883 }
3884 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3885 && TYPE_UNSIGNED (type))
3886 || (TYPE_PRECISION (type)
3887 < (TYPE_PRECISION (TREE_TYPE (result))
3888 + (TYPE_UNSIGNED (TREE_TYPE (result))
3889 && !TYPE_UNSIGNED (type)))))
3890 result = NULL_TREE;
3891 if (result)
3892 result = fold_convert (type, result);
3893 }
3894 }
3895 }
3896
3897 if (result)
3898 {
3899 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3900 result = drop_tree_overflow (result);
3901 if (cplx_result)
3902 {
3903 if (overflow == NULL_TREE)
3904 overflow = build_zero_cst (TREE_TYPE (result));
3905 tree ctype = build_complex_type (TREE_TYPE (result));
3906 if (TREE_CODE (result) == INTEGER_CST
3907 && TREE_CODE (overflow) == INTEGER_CST)
3908 result = build_complex (ctype, result, overflow);
3909 else
3910 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3911 ctype, result, overflow);
3912 }
3913 if (!update_call_from_tree (gsi, result))
3914 gimplify_and_update_call_from_tree (gsi, result);
3915 changed = true;
3916 }
3917 }
3918
3919 return changed;
3920 }
3921
3922
3923 /* Return true whether NAME has a use on STMT. */
3924
3925 static bool
3926 has_use_on_stmt (tree name, gimple *stmt)
3927 {
3928 imm_use_iterator iter;
3929 use_operand_p use_p;
3930 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3931 if (USE_STMT (use_p) == stmt)
3932 return true;
3933 return false;
3934 }
3935
3936 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3937 gimple_simplify.
3938
3939 Replaces *GSI with the simplification result in RCODE and OPS
3940 and the associated statements in *SEQ. Does the replacement
3941 according to INPLACE and returns true if the operation succeeded. */
3942
3943 static bool
3944 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3945 code_helper rcode, tree *ops,
3946 gimple_seq *seq, bool inplace)
3947 {
3948 gimple *stmt = gsi_stmt (*gsi);
3949
3950 /* Play safe and do not allow abnormals to be mentioned in
3951 newly created statements. See also maybe_push_res_to_seq.
3952 As an exception allow such uses if there was a use of the
3953 same SSA name on the old stmt. */
3954 if ((TREE_CODE (ops[0]) == SSA_NAME
3955 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3956 && !has_use_on_stmt (ops[0], stmt))
3957 || (ops[1]
3958 && TREE_CODE (ops[1]) == SSA_NAME
3959 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3960 && !has_use_on_stmt (ops[1], stmt))
3961 || (ops[2]
3962 && TREE_CODE (ops[2]) == SSA_NAME
3963 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3964 && !has_use_on_stmt (ops[2], stmt))
3965 || (COMPARISON_CLASS_P (ops[0])
3966 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3967 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3968 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3969 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3970 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3971 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3972 return false;
3973
3974 /* Don't insert new statements when INPLACE is true, even if we could
3975 reuse STMT for the final statement. */
3976 if (inplace && !gimple_seq_empty_p (*seq))
3977 return false;
3978
3979 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3980 {
3981 gcc_assert (rcode.is_tree_code ());
3982 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3983 /* GIMPLE_CONDs condition may not throw. */
3984 && (!flag_exceptions
3985 || !cfun->can_throw_non_call_exceptions
3986 || !operation_could_trap_p (rcode,
3987 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3988 false, NULL_TREE)))
3989 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3990 else if (rcode == SSA_NAME)
3991 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3992 build_zero_cst (TREE_TYPE (ops[0])));
3993 else if (rcode == INTEGER_CST)
3994 {
3995 if (integer_zerop (ops[0]))
3996 gimple_cond_make_false (cond_stmt);
3997 else
3998 gimple_cond_make_true (cond_stmt);
3999 }
4000 else if (!inplace)
4001 {
4002 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4003 ops, seq);
4004 if (!res)
4005 return false;
4006 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4007 build_zero_cst (TREE_TYPE (res)));
4008 }
4009 else
4010 return false;
4011 if (dump_file && (dump_flags & TDF_DETAILS))
4012 {
4013 fprintf (dump_file, "gimple_simplified to ");
4014 if (!gimple_seq_empty_p (*seq))
4015 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4016 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4017 0, TDF_SLIM);
4018 }
4019 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4020 return true;
4021 }
4022 else if (is_gimple_assign (stmt)
4023 && rcode.is_tree_code ())
4024 {
4025 if (!inplace
4026 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4027 {
4028 maybe_build_generic_op (rcode,
4029 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4030 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4031 if (dump_file && (dump_flags & TDF_DETAILS))
4032 {
4033 fprintf (dump_file, "gimple_simplified to ");
4034 if (!gimple_seq_empty_p (*seq))
4035 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4036 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4037 0, TDF_SLIM);
4038 }
4039 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4040 return true;
4041 }
4042 }
4043 else if (rcode.is_fn_code ()
4044 && gimple_call_combined_fn (stmt) == rcode)
4045 {
4046 unsigned i;
4047 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4048 {
4049 gcc_assert (ops[i] != NULL_TREE);
4050 gimple_call_set_arg (stmt, i, ops[i]);
4051 }
4052 if (i < 3)
4053 gcc_assert (ops[i] == NULL_TREE);
4054 if (dump_file && (dump_flags & TDF_DETAILS))
4055 {
4056 fprintf (dump_file, "gimple_simplified to ");
4057 if (!gimple_seq_empty_p (*seq))
4058 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4059 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4060 }
4061 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4062 return true;
4063 }
4064 else if (!inplace)
4065 {
4066 if (gimple_has_lhs (stmt))
4067 {
4068 tree lhs = gimple_get_lhs (stmt);
4069 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4070 ops, seq, lhs))
4071 return false;
4072 if (dump_file && (dump_flags & TDF_DETAILS))
4073 {
4074 fprintf (dump_file, "gimple_simplified to ");
4075 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4076 }
4077 gsi_replace_with_seq_vops (gsi, *seq);
4078 return true;
4079 }
4080 else
4081 gcc_unreachable ();
4082 }
4083
4084 return false;
4085 }
4086
4087 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4088
4089 static bool
4090 maybe_canonicalize_mem_ref_addr (tree *t)
4091 {
4092 bool res = false;
4093
4094 if (TREE_CODE (*t) == ADDR_EXPR)
4095 t = &TREE_OPERAND (*t, 0);
4096
4097 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4098 generic vector extension. The actual vector referenced is
4099 view-converted to an array type for this purpose. If the index
4100 is constant the canonical representation in the middle-end is a
4101 BIT_FIELD_REF so re-write the former to the latter here. */
4102 if (TREE_CODE (*t) == ARRAY_REF
4103 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4104 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4105 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4106 {
4107 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4108 if (VECTOR_TYPE_P (vtype))
4109 {
4110 tree low = array_ref_low_bound (*t);
4111 if (TREE_CODE (low) == INTEGER_CST)
4112 {
4113 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4114 {
4115 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4116 wi::to_widest (low));
4117 idx = wi::mul (idx, wi::to_widest
4118 (TYPE_SIZE (TREE_TYPE (*t))));
4119 widest_int ext
4120 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4121 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4122 {
4123 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4124 TREE_TYPE (*t),
4125 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4126 TYPE_SIZE (TREE_TYPE (*t)),
4127 wide_int_to_tree (sizetype, idx));
4128 res = true;
4129 }
4130 }
4131 }
4132 }
4133 }
4134
4135 while (handled_component_p (*t))
4136 t = &TREE_OPERAND (*t, 0);
4137
4138 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4139 of invariant addresses into a SSA name MEM_REF address. */
4140 if (TREE_CODE (*t) == MEM_REF
4141 || TREE_CODE (*t) == TARGET_MEM_REF)
4142 {
4143 tree addr = TREE_OPERAND (*t, 0);
4144 if (TREE_CODE (addr) == ADDR_EXPR
4145 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4146 || handled_component_p (TREE_OPERAND (addr, 0))))
4147 {
4148 tree base;
4149 HOST_WIDE_INT coffset;
4150 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4151 &coffset);
4152 if (!base)
4153 gcc_unreachable ();
4154
4155 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4156 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4157 TREE_OPERAND (*t, 1),
4158 size_int (coffset));
4159 res = true;
4160 }
4161 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4162 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4163 }
4164
4165 /* Canonicalize back MEM_REFs to plain reference trees if the object
4166 accessed is a decl that has the same access semantics as the MEM_REF. */
4167 if (TREE_CODE (*t) == MEM_REF
4168 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4169 && integer_zerop (TREE_OPERAND (*t, 1))
4170 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4171 {
4172 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4173 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4174 if (/* Same volatile qualification. */
4175 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4176 /* Same TBAA behavior with -fstrict-aliasing. */
4177 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4178 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4179 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4180 /* Same alignment. */
4181 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4182 /* We have to look out here to not drop a required conversion
4183 from the rhs to the lhs if *t appears on the lhs or vice-versa
4184 if it appears on the rhs. Thus require strict type
4185 compatibility. */
4186 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4187 {
4188 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4189 res = true;
4190 }
4191 }
4192
4193 /* Canonicalize TARGET_MEM_REF in particular with respect to
4194 the indexes becoming constant. */
4195 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4196 {
4197 tree tem = maybe_fold_tmr (*t);
4198 if (tem)
4199 {
4200 *t = tem;
4201 res = true;
4202 }
4203 }
4204
4205 return res;
4206 }
4207
4208 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4209 distinguishes both cases. */
4210
4211 static bool
4212 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4213 {
4214 bool changed = false;
4215 gimple *stmt = gsi_stmt (*gsi);
4216 bool nowarning = gimple_no_warning_p (stmt);
4217 unsigned i;
4218 fold_defer_overflow_warnings ();
4219
4220 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4221 after propagation.
4222 ??? This shouldn't be done in generic folding but in the
4223 propagation helpers which also know whether an address was
4224 propagated.
4225 Also canonicalize operand order. */
4226 switch (gimple_code (stmt))
4227 {
4228 case GIMPLE_ASSIGN:
4229 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4230 {
4231 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4232 if ((REFERENCE_CLASS_P (*rhs)
4233 || TREE_CODE (*rhs) == ADDR_EXPR)
4234 && maybe_canonicalize_mem_ref_addr (rhs))
4235 changed = true;
4236 tree *lhs = gimple_assign_lhs_ptr (stmt);
4237 if (REFERENCE_CLASS_P (*lhs)
4238 && maybe_canonicalize_mem_ref_addr (lhs))
4239 changed = true;
4240 }
4241 else
4242 {
4243 /* Canonicalize operand order. */
4244 enum tree_code code = gimple_assign_rhs_code (stmt);
4245 if (TREE_CODE_CLASS (code) == tcc_comparison
4246 || commutative_tree_code (code)
4247 || commutative_ternary_tree_code (code))
4248 {
4249 tree rhs1 = gimple_assign_rhs1 (stmt);
4250 tree rhs2 = gimple_assign_rhs2 (stmt);
4251 if (tree_swap_operands_p (rhs1, rhs2))
4252 {
4253 gimple_assign_set_rhs1 (stmt, rhs2);
4254 gimple_assign_set_rhs2 (stmt, rhs1);
4255 if (TREE_CODE_CLASS (code) == tcc_comparison)
4256 gimple_assign_set_rhs_code (stmt,
4257 swap_tree_comparison (code));
4258 changed = true;
4259 }
4260 }
4261 }
4262 break;
4263 case GIMPLE_CALL:
4264 {
4265 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4266 {
4267 tree *arg = gimple_call_arg_ptr (stmt, i);
4268 if (REFERENCE_CLASS_P (*arg)
4269 && maybe_canonicalize_mem_ref_addr (arg))
4270 changed = true;
4271 }
4272 tree *lhs = gimple_call_lhs_ptr (stmt);
4273 if (*lhs
4274 && REFERENCE_CLASS_P (*lhs)
4275 && maybe_canonicalize_mem_ref_addr (lhs))
4276 changed = true;
4277 break;
4278 }
4279 case GIMPLE_ASM:
4280 {
4281 gasm *asm_stmt = as_a <gasm *> (stmt);
4282 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4283 {
4284 tree link = gimple_asm_output_op (asm_stmt, i);
4285 tree op = TREE_VALUE (link);
4286 if (REFERENCE_CLASS_P (op)
4287 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4288 changed = true;
4289 }
4290 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4291 {
4292 tree link = gimple_asm_input_op (asm_stmt, i);
4293 tree op = TREE_VALUE (link);
4294 if ((REFERENCE_CLASS_P (op)
4295 || TREE_CODE (op) == ADDR_EXPR)
4296 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4297 changed = true;
4298 }
4299 }
4300 break;
4301 case GIMPLE_DEBUG:
4302 if (gimple_debug_bind_p (stmt))
4303 {
4304 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4305 if (*val
4306 && (REFERENCE_CLASS_P (*val)
4307 || TREE_CODE (*val) == ADDR_EXPR)
4308 && maybe_canonicalize_mem_ref_addr (val))
4309 changed = true;
4310 }
4311 break;
4312 case GIMPLE_COND:
4313 {
4314 /* Canonicalize operand order. */
4315 tree lhs = gimple_cond_lhs (stmt);
4316 tree rhs = gimple_cond_rhs (stmt);
4317 if (tree_swap_operands_p (lhs, rhs))
4318 {
4319 gcond *gc = as_a <gcond *> (stmt);
4320 gimple_cond_set_lhs (gc, rhs);
4321 gimple_cond_set_rhs (gc, lhs);
4322 gimple_cond_set_code (gc,
4323 swap_tree_comparison (gimple_cond_code (gc)));
4324 changed = true;
4325 }
4326 }
4327 default:;
4328 }
4329
4330 /* Dispatch to pattern-based folding. */
4331 if (!inplace
4332 || is_gimple_assign (stmt)
4333 || gimple_code (stmt) == GIMPLE_COND)
4334 {
4335 gimple_seq seq = NULL;
4336 code_helper rcode;
4337 tree ops[3] = {};
4338 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4339 valueize, valueize))
4340 {
4341 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4342 changed = true;
4343 else
4344 gimple_seq_discard (seq);
4345 }
4346 }
4347
4348 stmt = gsi_stmt (*gsi);
4349
4350 /* Fold the main computation performed by the statement. */
4351 switch (gimple_code (stmt))
4352 {
4353 case GIMPLE_ASSIGN:
4354 {
4355 /* Try to canonicalize for boolean-typed X the comparisons
4356 X == 0, X == 1, X != 0, and X != 1. */
4357 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4358 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4359 {
4360 tree lhs = gimple_assign_lhs (stmt);
4361 tree op1 = gimple_assign_rhs1 (stmt);
4362 tree op2 = gimple_assign_rhs2 (stmt);
4363 tree type = TREE_TYPE (op1);
4364
4365 /* Check whether the comparison operands are of the same boolean
4366 type as the result type is.
4367 Check that second operand is an integer-constant with value
4368 one or zero. */
4369 if (TREE_CODE (op2) == INTEGER_CST
4370 && (integer_zerop (op2) || integer_onep (op2))
4371 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4372 {
4373 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4374 bool is_logical_not = false;
4375
4376 /* X == 0 and X != 1 is a logical-not.of X
4377 X == 1 and X != 0 is X */
4378 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4379 || (cmp_code == NE_EXPR && integer_onep (op2)))
4380 is_logical_not = true;
4381
4382 if (is_logical_not == false)
4383 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4384 /* Only for one-bit precision typed X the transformation
4385 !X -> ~X is valied. */
4386 else if (TYPE_PRECISION (type) == 1)
4387 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4388 /* Otherwise we use !X -> X ^ 1. */
4389 else
4390 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4391 build_int_cst (type, 1));
4392 changed = true;
4393 break;
4394 }
4395 }
4396
4397 unsigned old_num_ops = gimple_num_ops (stmt);
4398 tree lhs = gimple_assign_lhs (stmt);
4399 tree new_rhs = fold_gimple_assign (gsi);
4400 if (new_rhs
4401 && !useless_type_conversion_p (TREE_TYPE (lhs),
4402 TREE_TYPE (new_rhs)))
4403 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4404 if (new_rhs
4405 && (!inplace
4406 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4407 {
4408 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4409 changed = true;
4410 }
4411 break;
4412 }
4413
4414 case GIMPLE_CALL:
4415 changed |= gimple_fold_call (gsi, inplace);
4416 break;
4417
4418 case GIMPLE_ASM:
4419 /* Fold *& in asm operands. */
4420 {
4421 gasm *asm_stmt = as_a <gasm *> (stmt);
4422 size_t noutputs;
4423 const char **oconstraints;
4424 const char *constraint;
4425 bool allows_mem, allows_reg;
4426
4427 noutputs = gimple_asm_noutputs (asm_stmt);
4428 oconstraints = XALLOCAVEC (const char *, noutputs);
4429
4430 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4431 {
4432 tree link = gimple_asm_output_op (asm_stmt, i);
4433 tree op = TREE_VALUE (link);
4434 oconstraints[i]
4435 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4436 if (REFERENCE_CLASS_P (op)
4437 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4438 {
4439 TREE_VALUE (link) = op;
4440 changed = true;
4441 }
4442 }
4443 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4444 {
4445 tree link = gimple_asm_input_op (asm_stmt, i);
4446 tree op = TREE_VALUE (link);
4447 constraint
4448 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4449 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4450 oconstraints, &allows_mem, &allows_reg);
4451 if (REFERENCE_CLASS_P (op)
4452 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4453 != NULL_TREE)
4454 {
4455 TREE_VALUE (link) = op;
4456 changed = true;
4457 }
4458 }
4459 }
4460 break;
4461
4462 case GIMPLE_DEBUG:
4463 if (gimple_debug_bind_p (stmt))
4464 {
4465 tree val = gimple_debug_bind_get_value (stmt);
4466 if (val
4467 && REFERENCE_CLASS_P (val))
4468 {
4469 tree tem = maybe_fold_reference (val, false);
4470 if (tem)
4471 {
4472 gimple_debug_bind_set_value (stmt, tem);
4473 changed = true;
4474 }
4475 }
4476 else if (val
4477 && TREE_CODE (val) == ADDR_EXPR)
4478 {
4479 tree ref = TREE_OPERAND (val, 0);
4480 tree tem = maybe_fold_reference (ref, false);
4481 if (tem)
4482 {
4483 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4484 gimple_debug_bind_set_value (stmt, tem);
4485 changed = true;
4486 }
4487 }
4488 }
4489 break;
4490
4491 case GIMPLE_RETURN:
4492 {
4493 greturn *ret_stmt = as_a<greturn *> (stmt);
4494 tree ret = gimple_return_retval(ret_stmt);
4495
4496 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4497 {
4498 tree val = valueize (ret);
4499 if (val && val != ret
4500 && may_propagate_copy (ret, val))
4501 {
4502 gimple_return_set_retval (ret_stmt, val);
4503 changed = true;
4504 }
4505 }
4506 }
4507 break;
4508
4509 default:;
4510 }
4511
4512 stmt = gsi_stmt (*gsi);
4513
4514 /* Fold *& on the lhs. */
4515 if (gimple_has_lhs (stmt))
4516 {
4517 tree lhs = gimple_get_lhs (stmt);
4518 if (lhs && REFERENCE_CLASS_P (lhs))
4519 {
4520 tree new_lhs = maybe_fold_reference (lhs, true);
4521 if (new_lhs)
4522 {
4523 gimple_set_lhs (stmt, new_lhs);
4524 changed = true;
4525 }
4526 }
4527 }
4528
4529 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4530 return changed;
4531 }
4532
4533 /* Valueziation callback that ends up not following SSA edges. */
4534
4535 tree
4536 no_follow_ssa_edges (tree)
4537 {
4538 return NULL_TREE;
4539 }
4540
4541 /* Valueization callback that ends up following single-use SSA edges only. */
4542
4543 tree
4544 follow_single_use_edges (tree val)
4545 {
4546 if (TREE_CODE (val) == SSA_NAME
4547 && !has_single_use (val))
4548 return NULL_TREE;
4549 return val;
4550 }
4551
4552 /* Fold the statement pointed to by GSI. In some cases, this function may
4553 replace the whole statement with a new one. Returns true iff folding
4554 makes any changes.
4555 The statement pointed to by GSI should be in valid gimple form but may
4556 be in unfolded state as resulting from for example constant propagation
4557 which can produce *&x = 0. */
4558
4559 bool
4560 fold_stmt (gimple_stmt_iterator *gsi)
4561 {
4562 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4563 }
4564
4565 bool
4566 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4567 {
4568 return fold_stmt_1 (gsi, false, valueize);
4569 }
4570
4571 /* Perform the minimal folding on statement *GSI. Only operations like
4572 *&x created by constant propagation are handled. The statement cannot
4573 be replaced with a new one. Return true if the statement was
4574 changed, false otherwise.
4575 The statement *GSI should be in valid gimple form but may
4576 be in unfolded state as resulting from for example constant propagation
4577 which can produce *&x = 0. */
4578
4579 bool
4580 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4581 {
4582 gimple *stmt = gsi_stmt (*gsi);
4583 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4584 gcc_assert (gsi_stmt (*gsi) == stmt);
4585 return changed;
4586 }
4587
4588 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4589 if EXPR is null or we don't know how.
4590 If non-null, the result always has boolean type. */
4591
4592 static tree
4593 canonicalize_bool (tree expr, bool invert)
4594 {
4595 if (!expr)
4596 return NULL_TREE;
4597 else if (invert)
4598 {
4599 if (integer_nonzerop (expr))
4600 return boolean_false_node;
4601 else if (integer_zerop (expr))
4602 return boolean_true_node;
4603 else if (TREE_CODE (expr) == SSA_NAME)
4604 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4605 build_int_cst (TREE_TYPE (expr), 0));
4606 else if (COMPARISON_CLASS_P (expr))
4607 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4608 boolean_type_node,
4609 TREE_OPERAND (expr, 0),
4610 TREE_OPERAND (expr, 1));
4611 else
4612 return NULL_TREE;
4613 }
4614 else
4615 {
4616 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4617 return expr;
4618 if (integer_nonzerop (expr))
4619 return boolean_true_node;
4620 else if (integer_zerop (expr))
4621 return boolean_false_node;
4622 else if (TREE_CODE (expr) == SSA_NAME)
4623 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4624 build_int_cst (TREE_TYPE (expr), 0));
4625 else if (COMPARISON_CLASS_P (expr))
4626 return fold_build2 (TREE_CODE (expr),
4627 boolean_type_node,
4628 TREE_OPERAND (expr, 0),
4629 TREE_OPERAND (expr, 1));
4630 else
4631 return NULL_TREE;
4632 }
4633 }
4634
4635 /* Check to see if a boolean expression EXPR is logically equivalent to the
4636 comparison (OP1 CODE OP2). Check for various identities involving
4637 SSA_NAMEs. */
4638
4639 static bool
4640 same_bool_comparison_p (const_tree expr, enum tree_code code,
4641 const_tree op1, const_tree op2)
4642 {
4643 gimple *s;
4644
4645 /* The obvious case. */
4646 if (TREE_CODE (expr) == code
4647 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4648 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4649 return true;
4650
4651 /* Check for comparing (name, name != 0) and the case where expr
4652 is an SSA_NAME with a definition matching the comparison. */
4653 if (TREE_CODE (expr) == SSA_NAME
4654 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4655 {
4656 if (operand_equal_p (expr, op1, 0))
4657 return ((code == NE_EXPR && integer_zerop (op2))
4658 || (code == EQ_EXPR && integer_nonzerop (op2)));
4659 s = SSA_NAME_DEF_STMT (expr);
4660 if (is_gimple_assign (s)
4661 && gimple_assign_rhs_code (s) == code
4662 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4663 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4664 return true;
4665 }
4666
4667 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4668 of name is a comparison, recurse. */
4669 if (TREE_CODE (op1) == SSA_NAME
4670 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4671 {
4672 s = SSA_NAME_DEF_STMT (op1);
4673 if (is_gimple_assign (s)
4674 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4675 {
4676 enum tree_code c = gimple_assign_rhs_code (s);
4677 if ((c == NE_EXPR && integer_zerop (op2))
4678 || (c == EQ_EXPR && integer_nonzerop (op2)))
4679 return same_bool_comparison_p (expr, c,
4680 gimple_assign_rhs1 (s),
4681 gimple_assign_rhs2 (s));
4682 if ((c == EQ_EXPR && integer_zerop (op2))
4683 || (c == NE_EXPR && integer_nonzerop (op2)))
4684 return same_bool_comparison_p (expr,
4685 invert_tree_comparison (c, false),
4686 gimple_assign_rhs1 (s),
4687 gimple_assign_rhs2 (s));
4688 }
4689 }
4690 return false;
4691 }
4692
4693 /* Check to see if two boolean expressions OP1 and OP2 are logically
4694 equivalent. */
4695
4696 static bool
4697 same_bool_result_p (const_tree op1, const_tree op2)
4698 {
4699 /* Simple cases first. */
4700 if (operand_equal_p (op1, op2, 0))
4701 return true;
4702
4703 /* Check the cases where at least one of the operands is a comparison.
4704 These are a bit smarter than operand_equal_p in that they apply some
4705 identifies on SSA_NAMEs. */
4706 if (COMPARISON_CLASS_P (op2)
4707 && same_bool_comparison_p (op1, TREE_CODE (op2),
4708 TREE_OPERAND (op2, 0),
4709 TREE_OPERAND (op2, 1)))
4710 return true;
4711 if (COMPARISON_CLASS_P (op1)
4712 && same_bool_comparison_p (op2, TREE_CODE (op1),
4713 TREE_OPERAND (op1, 0),
4714 TREE_OPERAND (op1, 1)))
4715 return true;
4716
4717 /* Default case. */
4718 return false;
4719 }
4720
4721 /* Forward declarations for some mutually recursive functions. */
4722
4723 static tree
4724 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4725 enum tree_code code2, tree op2a, tree op2b);
4726 static tree
4727 and_var_with_comparison (tree var, bool invert,
4728 enum tree_code code2, tree op2a, tree op2b);
4729 static tree
4730 and_var_with_comparison_1 (gimple *stmt,
4731 enum tree_code code2, tree op2a, tree op2b);
4732 static tree
4733 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4734 enum tree_code code2, tree op2a, tree op2b);
4735 static tree
4736 or_var_with_comparison (tree var, bool invert,
4737 enum tree_code code2, tree op2a, tree op2b);
4738 static tree
4739 or_var_with_comparison_1 (gimple *stmt,
4740 enum tree_code code2, tree op2a, tree op2b);
4741
4742 /* Helper function for and_comparisons_1: try to simplify the AND of the
4743 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4744 If INVERT is true, invert the value of the VAR before doing the AND.
4745 Return NULL_EXPR if we can't simplify this to a single expression. */
4746
4747 static tree
4748 and_var_with_comparison (tree var, bool invert,
4749 enum tree_code code2, tree op2a, tree op2b)
4750 {
4751 tree t;
4752 gimple *stmt = SSA_NAME_DEF_STMT (var);
4753
4754 /* We can only deal with variables whose definitions are assignments. */
4755 if (!is_gimple_assign (stmt))
4756 return NULL_TREE;
4757
4758 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4759 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4760 Then we only have to consider the simpler non-inverted cases. */
4761 if (invert)
4762 t = or_var_with_comparison_1 (stmt,
4763 invert_tree_comparison (code2, false),
4764 op2a, op2b);
4765 else
4766 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4767 return canonicalize_bool (t, invert);
4768 }
4769
4770 /* Try to simplify the AND of the ssa variable defined by the assignment
4771 STMT with the comparison specified by (OP2A CODE2 OP2B).
4772 Return NULL_EXPR if we can't simplify this to a single expression. */
4773
4774 static tree
4775 and_var_with_comparison_1 (gimple *stmt,
4776 enum tree_code code2, tree op2a, tree op2b)
4777 {
4778 tree var = gimple_assign_lhs (stmt);
4779 tree true_test_var = NULL_TREE;
4780 tree false_test_var = NULL_TREE;
4781 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4782
4783 /* Check for identities like (var AND (var == 0)) => false. */
4784 if (TREE_CODE (op2a) == SSA_NAME
4785 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4786 {
4787 if ((code2 == NE_EXPR && integer_zerop (op2b))
4788 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4789 {
4790 true_test_var = op2a;
4791 if (var == true_test_var)
4792 return var;
4793 }
4794 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4795 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4796 {
4797 false_test_var = op2a;
4798 if (var == false_test_var)
4799 return boolean_false_node;
4800 }
4801 }
4802
4803 /* If the definition is a comparison, recurse on it. */
4804 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4805 {
4806 tree t = and_comparisons_1 (innercode,
4807 gimple_assign_rhs1 (stmt),
4808 gimple_assign_rhs2 (stmt),
4809 code2,
4810 op2a,
4811 op2b);
4812 if (t)
4813 return t;
4814 }
4815
4816 /* If the definition is an AND or OR expression, we may be able to
4817 simplify by reassociating. */
4818 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4819 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4820 {
4821 tree inner1 = gimple_assign_rhs1 (stmt);
4822 tree inner2 = gimple_assign_rhs2 (stmt);
4823 gimple *s;
4824 tree t;
4825 tree partial = NULL_TREE;
4826 bool is_and = (innercode == BIT_AND_EXPR);
4827
4828 /* Check for boolean identities that don't require recursive examination
4829 of inner1/inner2:
4830 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4831 inner1 AND (inner1 OR inner2) => inner1
4832 !inner1 AND (inner1 AND inner2) => false
4833 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4834 Likewise for similar cases involving inner2. */
4835 if (inner1 == true_test_var)
4836 return (is_and ? var : inner1);
4837 else if (inner2 == true_test_var)
4838 return (is_and ? var : inner2);
4839 else if (inner1 == false_test_var)
4840 return (is_and
4841 ? boolean_false_node
4842 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4843 else if (inner2 == false_test_var)
4844 return (is_and
4845 ? boolean_false_node
4846 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4847
4848 /* Next, redistribute/reassociate the AND across the inner tests.
4849 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4850 if (TREE_CODE (inner1) == SSA_NAME
4851 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4852 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4853 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4854 gimple_assign_rhs1 (s),
4855 gimple_assign_rhs2 (s),
4856 code2, op2a, op2b)))
4857 {
4858 /* Handle the AND case, where we are reassociating:
4859 (inner1 AND inner2) AND (op2a code2 op2b)
4860 => (t AND inner2)
4861 If the partial result t is a constant, we win. Otherwise
4862 continue on to try reassociating with the other inner test. */
4863 if (is_and)
4864 {
4865 if (integer_onep (t))
4866 return inner2;
4867 else if (integer_zerop (t))
4868 return boolean_false_node;
4869 }
4870
4871 /* Handle the OR case, where we are redistributing:
4872 (inner1 OR inner2) AND (op2a code2 op2b)
4873 => (t OR (inner2 AND (op2a code2 op2b))) */
4874 else if (integer_onep (t))
4875 return boolean_true_node;
4876
4877 /* Save partial result for later. */
4878 partial = t;
4879 }
4880
4881 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4882 if (TREE_CODE (inner2) == SSA_NAME
4883 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4884 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4885 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4886 gimple_assign_rhs1 (s),
4887 gimple_assign_rhs2 (s),
4888 code2, op2a, op2b)))
4889 {
4890 /* Handle the AND case, where we are reassociating:
4891 (inner1 AND inner2) AND (op2a code2 op2b)
4892 => (inner1 AND t) */
4893 if (is_and)
4894 {
4895 if (integer_onep (t))
4896 return inner1;
4897 else if (integer_zerop (t))
4898 return boolean_false_node;
4899 /* If both are the same, we can apply the identity
4900 (x AND x) == x. */
4901 else if (partial && same_bool_result_p (t, partial))
4902 return t;
4903 }
4904
4905 /* Handle the OR case. where we are redistributing:
4906 (inner1 OR inner2) AND (op2a code2 op2b)
4907 => (t OR (inner1 AND (op2a code2 op2b)))
4908 => (t OR partial) */
4909 else
4910 {
4911 if (integer_onep (t))
4912 return boolean_true_node;
4913 else if (partial)
4914 {
4915 /* We already got a simplification for the other
4916 operand to the redistributed OR expression. The
4917 interesting case is when at least one is false.
4918 Or, if both are the same, we can apply the identity
4919 (x OR x) == x. */
4920 if (integer_zerop (partial))
4921 return t;
4922 else if (integer_zerop (t))
4923 return partial;
4924 else if (same_bool_result_p (t, partial))
4925 return t;
4926 }
4927 }
4928 }
4929 }
4930 return NULL_TREE;
4931 }
4932
4933 /* Try to simplify the AND of two comparisons defined by
4934 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4935 If this can be done without constructing an intermediate value,
4936 return the resulting tree; otherwise NULL_TREE is returned.
4937 This function is deliberately asymmetric as it recurses on SSA_DEFs
4938 in the first comparison but not the second. */
4939
4940 static tree
4941 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4942 enum tree_code code2, tree op2a, tree op2b)
4943 {
4944 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4945
4946 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4947 if (operand_equal_p (op1a, op2a, 0)
4948 && operand_equal_p (op1b, op2b, 0))
4949 {
4950 /* Result will be either NULL_TREE, or a combined comparison. */
4951 tree t = combine_comparisons (UNKNOWN_LOCATION,
4952 TRUTH_ANDIF_EXPR, code1, code2,
4953 truth_type, op1a, op1b);
4954 if (t)
4955 return t;
4956 }
4957
4958 /* Likewise the swapped case of the above. */
4959 if (operand_equal_p (op1a, op2b, 0)
4960 && operand_equal_p (op1b, op2a, 0))
4961 {
4962 /* Result will be either NULL_TREE, or a combined comparison. */
4963 tree t = combine_comparisons (UNKNOWN_LOCATION,
4964 TRUTH_ANDIF_EXPR, code1,
4965 swap_tree_comparison (code2),
4966 truth_type, op1a, op1b);
4967 if (t)
4968 return t;
4969 }
4970
4971 /* If both comparisons are of the same value against constants, we might
4972 be able to merge them. */
4973 if (operand_equal_p (op1a, op2a, 0)
4974 && TREE_CODE (op1b) == INTEGER_CST
4975 && TREE_CODE (op2b) == INTEGER_CST)
4976 {
4977 int cmp = tree_int_cst_compare (op1b, op2b);
4978
4979 /* If we have (op1a == op1b), we should either be able to
4980 return that or FALSE, depending on whether the constant op1b
4981 also satisfies the other comparison against op2b. */
4982 if (code1 == EQ_EXPR)
4983 {
4984 bool done = true;
4985 bool val;
4986 switch (code2)
4987 {
4988 case EQ_EXPR: val = (cmp == 0); break;
4989 case NE_EXPR: val = (cmp != 0); break;
4990 case LT_EXPR: val = (cmp < 0); break;
4991 case GT_EXPR: val = (cmp > 0); break;
4992 case LE_EXPR: val = (cmp <= 0); break;
4993 case GE_EXPR: val = (cmp >= 0); break;
4994 default: done = false;
4995 }
4996 if (done)
4997 {
4998 if (val)
4999 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5000 else
5001 return boolean_false_node;
5002 }
5003 }
5004 /* Likewise if the second comparison is an == comparison. */
5005 else if (code2 == EQ_EXPR)
5006 {
5007 bool done = true;
5008 bool val;
5009 switch (code1)
5010 {
5011 case EQ_EXPR: val = (cmp == 0); break;
5012 case NE_EXPR: val = (cmp != 0); break;
5013 case LT_EXPR: val = (cmp > 0); break;
5014 case GT_EXPR: val = (cmp < 0); break;
5015 case LE_EXPR: val = (cmp >= 0); break;
5016 case GE_EXPR: val = (cmp <= 0); break;
5017 default: done = false;
5018 }
5019 if (done)
5020 {
5021 if (val)
5022 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5023 else
5024 return boolean_false_node;
5025 }
5026 }
5027
5028 /* Same business with inequality tests. */
5029 else if (code1 == NE_EXPR)
5030 {
5031 bool val;
5032 switch (code2)
5033 {
5034 case EQ_EXPR: val = (cmp != 0); break;
5035 case NE_EXPR: val = (cmp == 0); break;
5036 case LT_EXPR: val = (cmp >= 0); break;
5037 case GT_EXPR: val = (cmp <= 0); break;
5038 case LE_EXPR: val = (cmp > 0); break;
5039 case GE_EXPR: val = (cmp < 0); break;
5040 default:
5041 val = false;
5042 }
5043 if (val)
5044 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5045 }
5046 else if (code2 == NE_EXPR)
5047 {
5048 bool val;
5049 switch (code1)
5050 {
5051 case EQ_EXPR: val = (cmp == 0); break;
5052 case NE_EXPR: val = (cmp != 0); break;
5053 case LT_EXPR: val = (cmp <= 0); break;
5054 case GT_EXPR: val = (cmp >= 0); break;
5055 case LE_EXPR: val = (cmp < 0); break;
5056 case GE_EXPR: val = (cmp > 0); break;
5057 default:
5058 val = false;
5059 }
5060 if (val)
5061 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5062 }
5063
5064 /* Chose the more restrictive of two < or <= comparisons. */
5065 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5066 && (code2 == LT_EXPR || code2 == LE_EXPR))
5067 {
5068 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5069 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5070 else
5071 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5072 }
5073
5074 /* Likewise chose the more restrictive of two > or >= comparisons. */
5075 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5076 && (code2 == GT_EXPR || code2 == GE_EXPR))
5077 {
5078 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5079 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5080 else
5081 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5082 }
5083
5084 /* Check for singleton ranges. */
5085 else if (cmp == 0
5086 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5087 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5088 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5089
5090 /* Check for disjoint ranges. */
5091 else if (cmp <= 0
5092 && (code1 == LT_EXPR || code1 == LE_EXPR)
5093 && (code2 == GT_EXPR || code2 == GE_EXPR))
5094 return boolean_false_node;
5095 else if (cmp >= 0
5096 && (code1 == GT_EXPR || code1 == GE_EXPR)
5097 && (code2 == LT_EXPR || code2 == LE_EXPR))
5098 return boolean_false_node;
5099 }
5100
5101 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5102 NAME's definition is a truth value. See if there are any simplifications
5103 that can be done against the NAME's definition. */
5104 if (TREE_CODE (op1a) == SSA_NAME
5105 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5106 && (integer_zerop (op1b) || integer_onep (op1b)))
5107 {
5108 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5109 || (code1 == NE_EXPR && integer_onep (op1b)));
5110 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5111 switch (gimple_code (stmt))
5112 {
5113 case GIMPLE_ASSIGN:
5114 /* Try to simplify by copy-propagating the definition. */
5115 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5116
5117 case GIMPLE_PHI:
5118 /* If every argument to the PHI produces the same result when
5119 ANDed with the second comparison, we win.
5120 Do not do this unless the type is bool since we need a bool
5121 result here anyway. */
5122 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5123 {
5124 tree result = NULL_TREE;
5125 unsigned i;
5126 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5127 {
5128 tree arg = gimple_phi_arg_def (stmt, i);
5129
5130 /* If this PHI has itself as an argument, ignore it.
5131 If all the other args produce the same result,
5132 we're still OK. */
5133 if (arg == gimple_phi_result (stmt))
5134 continue;
5135 else if (TREE_CODE (arg) == INTEGER_CST)
5136 {
5137 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5138 {
5139 if (!result)
5140 result = boolean_false_node;
5141 else if (!integer_zerop (result))
5142 return NULL_TREE;
5143 }
5144 else if (!result)
5145 result = fold_build2 (code2, boolean_type_node,
5146 op2a, op2b);
5147 else if (!same_bool_comparison_p (result,
5148 code2, op2a, op2b))
5149 return NULL_TREE;
5150 }
5151 else if (TREE_CODE (arg) == SSA_NAME
5152 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5153 {
5154 tree temp;
5155 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5156 /* In simple cases we can look through PHI nodes,
5157 but we have to be careful with loops.
5158 See PR49073. */
5159 if (! dom_info_available_p (CDI_DOMINATORS)
5160 || gimple_bb (def_stmt) == gimple_bb (stmt)
5161 || dominated_by_p (CDI_DOMINATORS,
5162 gimple_bb (def_stmt),
5163 gimple_bb (stmt)))
5164 return NULL_TREE;
5165 temp = and_var_with_comparison (arg, invert, code2,
5166 op2a, op2b);
5167 if (!temp)
5168 return NULL_TREE;
5169 else if (!result)
5170 result = temp;
5171 else if (!same_bool_result_p (result, temp))
5172 return NULL_TREE;
5173 }
5174 else
5175 return NULL_TREE;
5176 }
5177 return result;
5178 }
5179
5180 default:
5181 break;
5182 }
5183 }
5184 return NULL_TREE;
5185 }
5186
5187 /* Try to simplify the AND of two comparisons, specified by
5188 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5189 If this can be simplified to a single expression (without requiring
5190 introducing more SSA variables to hold intermediate values),
5191 return the resulting tree. Otherwise return NULL_TREE.
5192 If the result expression is non-null, it has boolean type. */
5193
5194 tree
5195 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5196 enum tree_code code2, tree op2a, tree op2b)
5197 {
5198 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5199 if (t)
5200 return t;
5201 else
5202 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5203 }
5204
5205 /* Helper function for or_comparisons_1: try to simplify the OR of the
5206 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5207 If INVERT is true, invert the value of VAR before doing the OR.
5208 Return NULL_EXPR if we can't simplify this to a single expression. */
5209
5210 static tree
5211 or_var_with_comparison (tree var, bool invert,
5212 enum tree_code code2, tree op2a, tree op2b)
5213 {
5214 tree t;
5215 gimple *stmt = SSA_NAME_DEF_STMT (var);
5216
5217 /* We can only deal with variables whose definitions are assignments. */
5218 if (!is_gimple_assign (stmt))
5219 return NULL_TREE;
5220
5221 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5222 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5223 Then we only have to consider the simpler non-inverted cases. */
5224 if (invert)
5225 t = and_var_with_comparison_1 (stmt,
5226 invert_tree_comparison (code2, false),
5227 op2a, op2b);
5228 else
5229 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5230 return canonicalize_bool (t, invert);
5231 }
5232
5233 /* Try to simplify the OR of the ssa variable defined by the assignment
5234 STMT with the comparison specified by (OP2A CODE2 OP2B).
5235 Return NULL_EXPR if we can't simplify this to a single expression. */
5236
5237 static tree
5238 or_var_with_comparison_1 (gimple *stmt,
5239 enum tree_code code2, tree op2a, tree op2b)
5240 {
5241 tree var = gimple_assign_lhs (stmt);
5242 tree true_test_var = NULL_TREE;
5243 tree false_test_var = NULL_TREE;
5244 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5245
5246 /* Check for identities like (var OR (var != 0)) => true . */
5247 if (TREE_CODE (op2a) == SSA_NAME
5248 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5249 {
5250 if ((code2 == NE_EXPR && integer_zerop (op2b))
5251 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5252 {
5253 true_test_var = op2a;
5254 if (var == true_test_var)
5255 return var;
5256 }
5257 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5258 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5259 {
5260 false_test_var = op2a;
5261 if (var == false_test_var)
5262 return boolean_true_node;
5263 }
5264 }
5265
5266 /* If the definition is a comparison, recurse on it. */
5267 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5268 {
5269 tree t = or_comparisons_1 (innercode,
5270 gimple_assign_rhs1 (stmt),
5271 gimple_assign_rhs2 (stmt),
5272 code2,
5273 op2a,
5274 op2b);
5275 if (t)
5276 return t;
5277 }
5278
5279 /* If the definition is an AND or OR expression, we may be able to
5280 simplify by reassociating. */
5281 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5282 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5283 {
5284 tree inner1 = gimple_assign_rhs1 (stmt);
5285 tree inner2 = gimple_assign_rhs2 (stmt);
5286 gimple *s;
5287 tree t;
5288 tree partial = NULL_TREE;
5289 bool is_or = (innercode == BIT_IOR_EXPR);
5290
5291 /* Check for boolean identities that don't require recursive examination
5292 of inner1/inner2:
5293 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5294 inner1 OR (inner1 AND inner2) => inner1
5295 !inner1 OR (inner1 OR inner2) => true
5296 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5297 */
5298 if (inner1 == true_test_var)
5299 return (is_or ? var : inner1);
5300 else if (inner2 == true_test_var)
5301 return (is_or ? var : inner2);
5302 else if (inner1 == false_test_var)
5303 return (is_or
5304 ? boolean_true_node
5305 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5306 else if (inner2 == false_test_var)
5307 return (is_or
5308 ? boolean_true_node
5309 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5310
5311 /* Next, redistribute/reassociate the OR across the inner tests.
5312 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5313 if (TREE_CODE (inner1) == SSA_NAME
5314 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5315 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5316 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5317 gimple_assign_rhs1 (s),
5318 gimple_assign_rhs2 (s),
5319 code2, op2a, op2b)))
5320 {
5321 /* Handle the OR case, where we are reassociating:
5322 (inner1 OR inner2) OR (op2a code2 op2b)
5323 => (t OR inner2)
5324 If the partial result t is a constant, we win. Otherwise
5325 continue on to try reassociating with the other inner test. */
5326 if (is_or)
5327 {
5328 if (integer_onep (t))
5329 return boolean_true_node;
5330 else if (integer_zerop (t))
5331 return inner2;
5332 }
5333
5334 /* Handle the AND case, where we are redistributing:
5335 (inner1 AND inner2) OR (op2a code2 op2b)
5336 => (t AND (inner2 OR (op2a code op2b))) */
5337 else if (integer_zerop (t))
5338 return boolean_false_node;
5339
5340 /* Save partial result for later. */
5341 partial = t;
5342 }
5343
5344 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5345 if (TREE_CODE (inner2) == SSA_NAME
5346 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5347 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5348 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5349 gimple_assign_rhs1 (s),
5350 gimple_assign_rhs2 (s),
5351 code2, op2a, op2b)))
5352 {
5353 /* Handle the OR case, where we are reassociating:
5354 (inner1 OR inner2) OR (op2a code2 op2b)
5355 => (inner1 OR t)
5356 => (t OR partial) */
5357 if (is_or)
5358 {
5359 if (integer_zerop (t))
5360 return inner1;
5361 else if (integer_onep (t))
5362 return boolean_true_node;
5363 /* If both are the same, we can apply the identity
5364 (x OR x) == x. */
5365 else if (partial && same_bool_result_p (t, partial))
5366 return t;
5367 }
5368
5369 /* Handle the AND case, where we are redistributing:
5370 (inner1 AND inner2) OR (op2a code2 op2b)
5371 => (t AND (inner1 OR (op2a code2 op2b)))
5372 => (t AND partial) */
5373 else
5374 {
5375 if (integer_zerop (t))
5376 return boolean_false_node;
5377 else if (partial)
5378 {
5379 /* We already got a simplification for the other
5380 operand to the redistributed AND expression. The
5381 interesting case is when at least one is true.
5382 Or, if both are the same, we can apply the identity
5383 (x AND x) == x. */
5384 if (integer_onep (partial))
5385 return t;
5386 else if (integer_onep (t))
5387 return partial;
5388 else if (same_bool_result_p (t, partial))
5389 return t;
5390 }
5391 }
5392 }
5393 }
5394 return NULL_TREE;
5395 }
5396
5397 /* Try to simplify the OR of two comparisons defined by
5398 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5399 If this can be done without constructing an intermediate value,
5400 return the resulting tree; otherwise NULL_TREE is returned.
5401 This function is deliberately asymmetric as it recurses on SSA_DEFs
5402 in the first comparison but not the second. */
5403
5404 static tree
5405 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5406 enum tree_code code2, tree op2a, tree op2b)
5407 {
5408 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5409
5410 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5411 if (operand_equal_p (op1a, op2a, 0)
5412 && operand_equal_p (op1b, op2b, 0))
5413 {
5414 /* Result will be either NULL_TREE, or a combined comparison. */
5415 tree t = combine_comparisons (UNKNOWN_LOCATION,
5416 TRUTH_ORIF_EXPR, code1, code2,
5417 truth_type, op1a, op1b);
5418 if (t)
5419 return t;
5420 }
5421
5422 /* Likewise the swapped case of the above. */
5423 if (operand_equal_p (op1a, op2b, 0)
5424 && operand_equal_p (op1b, op2a, 0))
5425 {
5426 /* Result will be either NULL_TREE, or a combined comparison. */
5427 tree t = combine_comparisons (UNKNOWN_LOCATION,
5428 TRUTH_ORIF_EXPR, code1,
5429 swap_tree_comparison (code2),
5430 truth_type, op1a, op1b);
5431 if (t)
5432 return t;
5433 }
5434
5435 /* If both comparisons are of the same value against constants, we might
5436 be able to merge them. */
5437 if (operand_equal_p (op1a, op2a, 0)
5438 && TREE_CODE (op1b) == INTEGER_CST
5439 && TREE_CODE (op2b) == INTEGER_CST)
5440 {
5441 int cmp = tree_int_cst_compare (op1b, op2b);
5442
5443 /* If we have (op1a != op1b), we should either be able to
5444 return that or TRUE, depending on whether the constant op1b
5445 also satisfies the other comparison against op2b. */
5446 if (code1 == NE_EXPR)
5447 {
5448 bool done = true;
5449 bool val;
5450 switch (code2)
5451 {
5452 case EQ_EXPR: val = (cmp == 0); break;
5453 case NE_EXPR: val = (cmp != 0); break;
5454 case LT_EXPR: val = (cmp < 0); break;
5455 case GT_EXPR: val = (cmp > 0); break;
5456 case LE_EXPR: val = (cmp <= 0); break;
5457 case GE_EXPR: val = (cmp >= 0); break;
5458 default: done = false;
5459 }
5460 if (done)
5461 {
5462 if (val)
5463 return boolean_true_node;
5464 else
5465 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5466 }
5467 }
5468 /* Likewise if the second comparison is a != comparison. */
5469 else if (code2 == NE_EXPR)
5470 {
5471 bool done = true;
5472 bool val;
5473 switch (code1)
5474 {
5475 case EQ_EXPR: val = (cmp == 0); break;
5476 case NE_EXPR: val = (cmp != 0); break;
5477 case LT_EXPR: val = (cmp > 0); break;
5478 case GT_EXPR: val = (cmp < 0); break;
5479 case LE_EXPR: val = (cmp >= 0); break;
5480 case GE_EXPR: val = (cmp <= 0); break;
5481 default: done = false;
5482 }
5483 if (done)
5484 {
5485 if (val)
5486 return boolean_true_node;
5487 else
5488 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5489 }
5490 }
5491
5492 /* See if an equality test is redundant with the other comparison. */
5493 else if (code1 == EQ_EXPR)
5494 {
5495 bool val;
5496 switch (code2)
5497 {
5498 case EQ_EXPR: val = (cmp == 0); break;
5499 case NE_EXPR: val = (cmp != 0); break;
5500 case LT_EXPR: val = (cmp < 0); break;
5501 case GT_EXPR: val = (cmp > 0); break;
5502 case LE_EXPR: val = (cmp <= 0); break;
5503 case GE_EXPR: val = (cmp >= 0); break;
5504 default:
5505 val = false;
5506 }
5507 if (val)
5508 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5509 }
5510 else if (code2 == EQ_EXPR)
5511 {
5512 bool val;
5513 switch (code1)
5514 {
5515 case EQ_EXPR: val = (cmp == 0); break;
5516 case NE_EXPR: val = (cmp != 0); break;
5517 case LT_EXPR: val = (cmp > 0); break;
5518 case GT_EXPR: val = (cmp < 0); break;
5519 case LE_EXPR: val = (cmp >= 0); break;
5520 case GE_EXPR: val = (cmp <= 0); break;
5521 default:
5522 val = false;
5523 }
5524 if (val)
5525 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5526 }
5527
5528 /* Chose the less restrictive of two < or <= comparisons. */
5529 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5530 && (code2 == LT_EXPR || code2 == LE_EXPR))
5531 {
5532 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5533 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5534 else
5535 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5536 }
5537
5538 /* Likewise chose the less restrictive of two > or >= comparisons. */
5539 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5540 && (code2 == GT_EXPR || code2 == GE_EXPR))
5541 {
5542 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5543 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5544 else
5545 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5546 }
5547
5548 /* Check for singleton ranges. */
5549 else if (cmp == 0
5550 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5551 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5552 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5553
5554 /* Check for less/greater pairs that don't restrict the range at all. */
5555 else if (cmp >= 0
5556 && (code1 == LT_EXPR || code1 == LE_EXPR)
5557 && (code2 == GT_EXPR || code2 == GE_EXPR))
5558 return boolean_true_node;
5559 else if (cmp <= 0
5560 && (code1 == GT_EXPR || code1 == GE_EXPR)
5561 && (code2 == LT_EXPR || code2 == LE_EXPR))
5562 return boolean_true_node;
5563 }
5564
5565 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5566 NAME's definition is a truth value. See if there are any simplifications
5567 that can be done against the NAME's definition. */
5568 if (TREE_CODE (op1a) == SSA_NAME
5569 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5570 && (integer_zerop (op1b) || integer_onep (op1b)))
5571 {
5572 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5573 || (code1 == NE_EXPR && integer_onep (op1b)));
5574 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5575 switch (gimple_code (stmt))
5576 {
5577 case GIMPLE_ASSIGN:
5578 /* Try to simplify by copy-propagating the definition. */
5579 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5580
5581 case GIMPLE_PHI:
5582 /* If every argument to the PHI produces the same result when
5583 ORed with the second comparison, we win.
5584 Do not do this unless the type is bool since we need a bool
5585 result here anyway. */
5586 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5587 {
5588 tree result = NULL_TREE;
5589 unsigned i;
5590 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5591 {
5592 tree arg = gimple_phi_arg_def (stmt, i);
5593
5594 /* If this PHI has itself as an argument, ignore it.
5595 If all the other args produce the same result,
5596 we're still OK. */
5597 if (arg == gimple_phi_result (stmt))
5598 continue;
5599 else if (TREE_CODE (arg) == INTEGER_CST)
5600 {
5601 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5602 {
5603 if (!result)
5604 result = boolean_true_node;
5605 else if (!integer_onep (result))
5606 return NULL_TREE;
5607 }
5608 else if (!result)
5609 result = fold_build2 (code2, boolean_type_node,
5610 op2a, op2b);
5611 else if (!same_bool_comparison_p (result,
5612 code2, op2a, op2b))
5613 return NULL_TREE;
5614 }
5615 else if (TREE_CODE (arg) == SSA_NAME
5616 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5617 {
5618 tree temp;
5619 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5620 /* In simple cases we can look through PHI nodes,
5621 but we have to be careful with loops.
5622 See PR49073. */
5623 if (! dom_info_available_p (CDI_DOMINATORS)
5624 || gimple_bb (def_stmt) == gimple_bb (stmt)
5625 || dominated_by_p (CDI_DOMINATORS,
5626 gimple_bb (def_stmt),
5627 gimple_bb (stmt)))
5628 return NULL_TREE;
5629 temp = or_var_with_comparison (arg, invert, code2,
5630 op2a, op2b);
5631 if (!temp)
5632 return NULL_TREE;
5633 else if (!result)
5634 result = temp;
5635 else if (!same_bool_result_p (result, temp))
5636 return NULL_TREE;
5637 }
5638 else
5639 return NULL_TREE;
5640 }
5641 return result;
5642 }
5643
5644 default:
5645 break;
5646 }
5647 }
5648 return NULL_TREE;
5649 }
5650
5651 /* Try to simplify the OR of two comparisons, specified by
5652 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5653 If this can be simplified to a single expression (without requiring
5654 introducing more SSA variables to hold intermediate values),
5655 return the resulting tree. Otherwise return NULL_TREE.
5656 If the result expression is non-null, it has boolean type. */
5657
5658 tree
5659 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5660 enum tree_code code2, tree op2a, tree op2b)
5661 {
5662 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5663 if (t)
5664 return t;
5665 else
5666 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5667 }
5668
5669
5670 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5671
5672 Either NULL_TREE, a simplified but non-constant or a constant
5673 is returned.
5674
5675 ??? This should go into a gimple-fold-inline.h file to be eventually
5676 privatized with the single valueize function used in the various TUs
5677 to avoid the indirect function call overhead. */
5678
5679 tree
5680 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5681 tree (*gvalueize) (tree))
5682 {
5683 code_helper rcode;
5684 tree ops[3] = {};
5685 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5686 edges if there are intermediate VARYING defs. For this reason
5687 do not follow SSA edges here even though SCCVN can technically
5688 just deal fine with that. */
5689 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5690 {
5691 tree res = NULL_TREE;
5692 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5693 res = ops[0];
5694 else if (mprts_hook)
5695 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5696 if (res)
5697 {
5698 if (dump_file && dump_flags & TDF_DETAILS)
5699 {
5700 fprintf (dump_file, "Match-and-simplified ");
5701 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5702 fprintf (dump_file, " to ");
5703 print_generic_expr (dump_file, res, 0);
5704 fprintf (dump_file, "\n");
5705 }
5706 return res;
5707 }
5708 }
5709
5710 location_t loc = gimple_location (stmt);
5711 switch (gimple_code (stmt))
5712 {
5713 case GIMPLE_ASSIGN:
5714 {
5715 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5716
5717 switch (get_gimple_rhs_class (subcode))
5718 {
5719 case GIMPLE_SINGLE_RHS:
5720 {
5721 tree rhs = gimple_assign_rhs1 (stmt);
5722 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5723
5724 if (TREE_CODE (rhs) == SSA_NAME)
5725 {
5726 /* If the RHS is an SSA_NAME, return its known constant value,
5727 if any. */
5728 return (*valueize) (rhs);
5729 }
5730 /* Handle propagating invariant addresses into address
5731 operations. */
5732 else if (TREE_CODE (rhs) == ADDR_EXPR
5733 && !is_gimple_min_invariant (rhs))
5734 {
5735 HOST_WIDE_INT offset = 0;
5736 tree base;
5737 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5738 &offset,
5739 valueize);
5740 if (base
5741 && (CONSTANT_CLASS_P (base)
5742 || decl_address_invariant_p (base)))
5743 return build_invariant_address (TREE_TYPE (rhs),
5744 base, offset);
5745 }
5746 else if (TREE_CODE (rhs) == CONSTRUCTOR
5747 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5748 && (CONSTRUCTOR_NELTS (rhs)
5749 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5750 {
5751 unsigned i;
5752 tree val, *vec;
5753
5754 vec = XALLOCAVEC (tree,
5755 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5756 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5757 {
5758 val = (*valueize) (val);
5759 if (TREE_CODE (val) == INTEGER_CST
5760 || TREE_CODE (val) == REAL_CST
5761 || TREE_CODE (val) == FIXED_CST)
5762 vec[i] = val;
5763 else
5764 return NULL_TREE;
5765 }
5766
5767 return build_vector (TREE_TYPE (rhs), vec);
5768 }
5769 if (subcode == OBJ_TYPE_REF)
5770 {
5771 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5772 /* If callee is constant, we can fold away the wrapper. */
5773 if (is_gimple_min_invariant (val))
5774 return val;
5775 }
5776
5777 if (kind == tcc_reference)
5778 {
5779 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5780 || TREE_CODE (rhs) == REALPART_EXPR
5781 || TREE_CODE (rhs) == IMAGPART_EXPR)
5782 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5783 {
5784 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5785 return fold_unary_loc (EXPR_LOCATION (rhs),
5786 TREE_CODE (rhs),
5787 TREE_TYPE (rhs), val);
5788 }
5789 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5790 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5791 {
5792 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5793 return fold_ternary_loc (EXPR_LOCATION (rhs),
5794 TREE_CODE (rhs),
5795 TREE_TYPE (rhs), val,
5796 TREE_OPERAND (rhs, 1),
5797 TREE_OPERAND (rhs, 2));
5798 }
5799 else if (TREE_CODE (rhs) == MEM_REF
5800 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5801 {
5802 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5803 if (TREE_CODE (val) == ADDR_EXPR
5804 && is_gimple_min_invariant (val))
5805 {
5806 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5807 unshare_expr (val),
5808 TREE_OPERAND (rhs, 1));
5809 if (tem)
5810 rhs = tem;
5811 }
5812 }
5813 return fold_const_aggregate_ref_1 (rhs, valueize);
5814 }
5815 else if (kind == tcc_declaration)
5816 return get_symbol_constant_value (rhs);
5817 return rhs;
5818 }
5819
5820 case GIMPLE_UNARY_RHS:
5821 return NULL_TREE;
5822
5823 case GIMPLE_BINARY_RHS:
5824 /* Translate &x + CST into an invariant form suitable for
5825 further propagation. */
5826 if (subcode == POINTER_PLUS_EXPR)
5827 {
5828 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5829 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5830 if (TREE_CODE (op0) == ADDR_EXPR
5831 && TREE_CODE (op1) == INTEGER_CST)
5832 {
5833 tree off = fold_convert (ptr_type_node, op1);
5834 return build_fold_addr_expr_loc
5835 (loc,
5836 fold_build2 (MEM_REF,
5837 TREE_TYPE (TREE_TYPE (op0)),
5838 unshare_expr (op0), off));
5839 }
5840 }
5841 /* Canonicalize bool != 0 and bool == 0 appearing after
5842 valueization. While gimple_simplify handles this
5843 it can get confused by the ~X == 1 -> X == 0 transform
5844 which we cant reduce to a SSA name or a constant
5845 (and we have no way to tell gimple_simplify to not
5846 consider those transforms in the first place). */
5847 else if (subcode == EQ_EXPR
5848 || subcode == NE_EXPR)
5849 {
5850 tree lhs = gimple_assign_lhs (stmt);
5851 tree op0 = gimple_assign_rhs1 (stmt);
5852 if (useless_type_conversion_p (TREE_TYPE (lhs),
5853 TREE_TYPE (op0)))
5854 {
5855 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5856 op0 = (*valueize) (op0);
5857 if (TREE_CODE (op0) == INTEGER_CST)
5858 std::swap (op0, op1);
5859 if (TREE_CODE (op1) == INTEGER_CST
5860 && ((subcode == NE_EXPR && integer_zerop (op1))
5861 || (subcode == EQ_EXPR && integer_onep (op1))))
5862 return op0;
5863 }
5864 }
5865 return NULL_TREE;
5866
5867 case GIMPLE_TERNARY_RHS:
5868 {
5869 /* Handle ternary operators that can appear in GIMPLE form. */
5870 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5871 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5872 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5873 return fold_ternary_loc (loc, subcode,
5874 gimple_expr_type (stmt), op0, op1, op2);
5875 }
5876
5877 default:
5878 gcc_unreachable ();
5879 }
5880 }
5881
5882 case GIMPLE_CALL:
5883 {
5884 tree fn;
5885 gcall *call_stmt = as_a <gcall *> (stmt);
5886
5887 if (gimple_call_internal_p (stmt))
5888 {
5889 enum tree_code subcode = ERROR_MARK;
5890 switch (gimple_call_internal_fn (stmt))
5891 {
5892 case IFN_UBSAN_CHECK_ADD:
5893 subcode = PLUS_EXPR;
5894 break;
5895 case IFN_UBSAN_CHECK_SUB:
5896 subcode = MINUS_EXPR;
5897 break;
5898 case IFN_UBSAN_CHECK_MUL:
5899 subcode = MULT_EXPR;
5900 break;
5901 case IFN_BUILTIN_EXPECT:
5902 {
5903 tree arg0 = gimple_call_arg (stmt, 0);
5904 tree op0 = (*valueize) (arg0);
5905 if (TREE_CODE (op0) == INTEGER_CST)
5906 return op0;
5907 return NULL_TREE;
5908 }
5909 default:
5910 return NULL_TREE;
5911 }
5912 tree arg0 = gimple_call_arg (stmt, 0);
5913 tree arg1 = gimple_call_arg (stmt, 1);
5914 tree op0 = (*valueize) (arg0);
5915 tree op1 = (*valueize) (arg1);
5916
5917 if (TREE_CODE (op0) != INTEGER_CST
5918 || TREE_CODE (op1) != INTEGER_CST)
5919 {
5920 switch (subcode)
5921 {
5922 case MULT_EXPR:
5923 /* x * 0 = 0 * x = 0 without overflow. */
5924 if (integer_zerop (op0) || integer_zerop (op1))
5925 return build_zero_cst (TREE_TYPE (arg0));
5926 break;
5927 case MINUS_EXPR:
5928 /* y - y = 0 without overflow. */
5929 if (operand_equal_p (op0, op1, 0))
5930 return build_zero_cst (TREE_TYPE (arg0));
5931 break;
5932 default:
5933 break;
5934 }
5935 }
5936 tree res
5937 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5938 if (res
5939 && TREE_CODE (res) == INTEGER_CST
5940 && !TREE_OVERFLOW (res))
5941 return res;
5942 return NULL_TREE;
5943 }
5944
5945 fn = (*valueize) (gimple_call_fn (stmt));
5946 if (TREE_CODE (fn) == ADDR_EXPR
5947 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5948 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5949 && gimple_builtin_call_types_compatible_p (stmt,
5950 TREE_OPERAND (fn, 0)))
5951 {
5952 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5953 tree retval;
5954 unsigned i;
5955 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5956 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5957 retval = fold_builtin_call_array (loc,
5958 gimple_call_return_type (call_stmt),
5959 fn, gimple_call_num_args (stmt), args);
5960 if (retval)
5961 {
5962 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5963 STRIP_NOPS (retval);
5964 retval = fold_convert (gimple_call_return_type (call_stmt),
5965 retval);
5966 }
5967 return retval;
5968 }
5969 return NULL_TREE;
5970 }
5971
5972 default:
5973 return NULL_TREE;
5974 }
5975 }
5976
5977 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5978 Returns NULL_TREE if folding to a constant is not possible, otherwise
5979 returns a constant according to is_gimple_min_invariant. */
5980
5981 tree
5982 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5983 {
5984 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5985 if (res && is_gimple_min_invariant (res))
5986 return res;
5987 return NULL_TREE;
5988 }
5989
5990
5991 /* The following set of functions are supposed to fold references using
5992 their constant initializers. */
5993
5994 /* See if we can find constructor defining value of BASE.
5995 When we know the consructor with constant offset (such as
5996 base is array[40] and we do know constructor of array), then
5997 BIT_OFFSET is adjusted accordingly.
5998
5999 As a special case, return error_mark_node when constructor
6000 is not explicitly available, but it is known to be zero
6001 such as 'static const int a;'. */
6002 static tree
6003 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6004 tree (*valueize)(tree))
6005 {
6006 HOST_WIDE_INT bit_offset2, size, max_size;
6007 bool reverse;
6008
6009 if (TREE_CODE (base) == MEM_REF)
6010 {
6011 if (!integer_zerop (TREE_OPERAND (base, 1)))
6012 {
6013 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6014 return NULL_TREE;
6015 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6016 * BITS_PER_UNIT);
6017 }
6018
6019 if (valueize
6020 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6021 base = valueize (TREE_OPERAND (base, 0));
6022 if (!base || TREE_CODE (base) != ADDR_EXPR)
6023 return NULL_TREE;
6024 base = TREE_OPERAND (base, 0);
6025 }
6026 else if (valueize
6027 && TREE_CODE (base) == SSA_NAME)
6028 base = valueize (base);
6029
6030 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6031 DECL_INITIAL. If BASE is a nested reference into another
6032 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6033 the inner reference. */
6034 switch (TREE_CODE (base))
6035 {
6036 case VAR_DECL:
6037 case CONST_DECL:
6038 {
6039 tree init = ctor_for_folding (base);
6040
6041 /* Our semantic is exact opposite of ctor_for_folding;
6042 NULL means unknown, while error_mark_node is 0. */
6043 if (init == error_mark_node)
6044 return NULL_TREE;
6045 if (!init)
6046 return error_mark_node;
6047 return init;
6048 }
6049
6050 case VIEW_CONVERT_EXPR:
6051 return get_base_constructor (TREE_OPERAND (base, 0),
6052 bit_offset, valueize);
6053
6054 case ARRAY_REF:
6055 case COMPONENT_REF:
6056 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6057 &reverse);
6058 if (max_size == -1 || size != max_size)
6059 return NULL_TREE;
6060 *bit_offset += bit_offset2;
6061 return get_base_constructor (base, bit_offset, valueize);
6062
6063 case CONSTRUCTOR:
6064 return base;
6065
6066 default:
6067 if (CONSTANT_CLASS_P (base))
6068 return base;
6069
6070 return NULL_TREE;
6071 }
6072 }
6073
6074 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6075 SIZE to the memory at bit OFFSET. */
6076
6077 static tree
6078 fold_array_ctor_reference (tree type, tree ctor,
6079 unsigned HOST_WIDE_INT offset,
6080 unsigned HOST_WIDE_INT size,
6081 tree from_decl)
6082 {
6083 offset_int low_bound;
6084 offset_int elt_size;
6085 offset_int access_index;
6086 tree domain_type = NULL_TREE;
6087 HOST_WIDE_INT inner_offset;
6088
6089 /* Compute low bound and elt size. */
6090 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6091 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6092 if (domain_type && TYPE_MIN_VALUE (domain_type))
6093 {
6094 /* Static constructors for variably sized objects makes no sense. */
6095 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6096 return NULL_TREE;
6097 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6098 }
6099 else
6100 low_bound = 0;
6101 /* Static constructors for variably sized objects makes no sense. */
6102 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6103 return NULL_TREE;
6104 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6105
6106 /* We can handle only constantly sized accesses that are known to not
6107 be larger than size of array element. */
6108 if (!TYPE_SIZE_UNIT (type)
6109 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6110 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6111 || elt_size == 0)
6112 return NULL_TREE;
6113
6114 /* Compute the array index we look for. */
6115 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6116 elt_size);
6117 access_index += low_bound;
6118
6119 /* And offset within the access. */
6120 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6121
6122 /* See if the array field is large enough to span whole access. We do not
6123 care to fold accesses spanning multiple array indexes. */
6124 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6125 return NULL_TREE;
6126 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6127 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6128
6129 /* When memory is not explicitely mentioned in constructor,
6130 it is 0 (or out of range). */
6131 return build_zero_cst (type);
6132 }
6133
6134 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6135 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6136
6137 static tree
6138 fold_nonarray_ctor_reference (tree type, tree ctor,
6139 unsigned HOST_WIDE_INT offset,
6140 unsigned HOST_WIDE_INT size,
6141 tree from_decl)
6142 {
6143 unsigned HOST_WIDE_INT cnt;
6144 tree cfield, cval;
6145
6146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6147 cval)
6148 {
6149 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6150 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6151 tree field_size = DECL_SIZE (cfield);
6152 offset_int bitoffset;
6153 offset_int bitoffset_end, access_end;
6154
6155 /* Variable sized objects in static constructors makes no sense,
6156 but field_size can be NULL for flexible array members. */
6157 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6158 && TREE_CODE (byte_offset) == INTEGER_CST
6159 && (field_size != NULL_TREE
6160 ? TREE_CODE (field_size) == INTEGER_CST
6161 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6162
6163 /* Compute bit offset of the field. */
6164 bitoffset = (wi::to_offset (field_offset)
6165 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6166 /* Compute bit offset where the field ends. */
6167 if (field_size != NULL_TREE)
6168 bitoffset_end = bitoffset + wi::to_offset (field_size);
6169 else
6170 bitoffset_end = 0;
6171
6172 access_end = offset_int (offset) + size;
6173
6174 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6175 [BITOFFSET, BITOFFSET_END)? */
6176 if (wi::cmps (access_end, bitoffset) > 0
6177 && (field_size == NULL_TREE
6178 || wi::lts_p (offset, bitoffset_end)))
6179 {
6180 offset_int inner_offset = offset_int (offset) - bitoffset;
6181 /* We do have overlap. Now see if field is large enough to
6182 cover the access. Give up for accesses spanning multiple
6183 fields. */
6184 if (wi::cmps (access_end, bitoffset_end) > 0)
6185 return NULL_TREE;
6186 if (offset < bitoffset)
6187 return NULL_TREE;
6188 return fold_ctor_reference (type, cval,
6189 inner_offset.to_uhwi (), size,
6190 from_decl);
6191 }
6192 }
6193 /* When memory is not explicitely mentioned in constructor, it is 0. */
6194 return build_zero_cst (type);
6195 }
6196
6197 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6198 to the memory at bit OFFSET. */
6199
6200 tree
6201 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6202 unsigned HOST_WIDE_INT size, tree from_decl)
6203 {
6204 tree ret;
6205
6206 /* We found the field with exact match. */
6207 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6208 && !offset)
6209 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6210
6211 /* We are at the end of walk, see if we can view convert the
6212 result. */
6213 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6214 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6215 && !compare_tree_int (TYPE_SIZE (type), size)
6216 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6217 {
6218 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6219 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6220 if (ret)
6221 STRIP_USELESS_TYPE_CONVERSION (ret);
6222 return ret;
6223 }
6224 /* For constants and byte-aligned/sized reads try to go through
6225 native_encode/interpret. */
6226 if (CONSTANT_CLASS_P (ctor)
6227 && BITS_PER_UNIT == 8
6228 && offset % BITS_PER_UNIT == 0
6229 && size % BITS_PER_UNIT == 0
6230 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6231 {
6232 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6233 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6234 offset / BITS_PER_UNIT);
6235 if (len > 0)
6236 return native_interpret_expr (type, buf, len);
6237 }
6238 if (TREE_CODE (ctor) == CONSTRUCTOR)
6239 {
6240
6241 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6242 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6243 return fold_array_ctor_reference (type, ctor, offset, size,
6244 from_decl);
6245 else
6246 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6247 from_decl);
6248 }
6249
6250 return NULL_TREE;
6251 }
6252
6253 /* Return the tree representing the element referenced by T if T is an
6254 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6255 names using VALUEIZE. Return NULL_TREE otherwise. */
6256
6257 tree
6258 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6259 {
6260 tree ctor, idx, base;
6261 HOST_WIDE_INT offset, size, max_size;
6262 tree tem;
6263 bool reverse;
6264
6265 if (TREE_THIS_VOLATILE (t))
6266 return NULL_TREE;
6267
6268 if (DECL_P (t))
6269 return get_symbol_constant_value (t);
6270
6271 tem = fold_read_from_constant_string (t);
6272 if (tem)
6273 return tem;
6274
6275 switch (TREE_CODE (t))
6276 {
6277 case ARRAY_REF:
6278 case ARRAY_RANGE_REF:
6279 /* Constant indexes are handled well by get_base_constructor.
6280 Only special case variable offsets.
6281 FIXME: This code can't handle nested references with variable indexes
6282 (they will be handled only by iteration of ccp). Perhaps we can bring
6283 get_ref_base_and_extent here and make it use a valueize callback. */
6284 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6285 && valueize
6286 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6287 && TREE_CODE (idx) == INTEGER_CST)
6288 {
6289 tree low_bound, unit_size;
6290
6291 /* If the resulting bit-offset is constant, track it. */
6292 if ((low_bound = array_ref_low_bound (t),
6293 TREE_CODE (low_bound) == INTEGER_CST)
6294 && (unit_size = array_ref_element_size (t),
6295 tree_fits_uhwi_p (unit_size)))
6296 {
6297 offset_int woffset
6298 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6299 TYPE_PRECISION (TREE_TYPE (idx)));
6300
6301 if (wi::fits_shwi_p (woffset))
6302 {
6303 offset = woffset.to_shwi ();
6304 /* TODO: This code seems wrong, multiply then check
6305 to see if it fits. */
6306 offset *= tree_to_uhwi (unit_size);
6307 offset *= BITS_PER_UNIT;
6308
6309 base = TREE_OPERAND (t, 0);
6310 ctor = get_base_constructor (base, &offset, valueize);
6311 /* Empty constructor. Always fold to 0. */
6312 if (ctor == error_mark_node)
6313 return build_zero_cst (TREE_TYPE (t));
6314 /* Out of bound array access. Value is undefined,
6315 but don't fold. */
6316 if (offset < 0)
6317 return NULL_TREE;
6318 /* We can not determine ctor. */
6319 if (!ctor)
6320 return NULL_TREE;
6321 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6322 tree_to_uhwi (unit_size)
6323 * BITS_PER_UNIT,
6324 base);
6325 }
6326 }
6327 }
6328 /* Fallthru. */
6329
6330 case COMPONENT_REF:
6331 case BIT_FIELD_REF:
6332 case TARGET_MEM_REF:
6333 case MEM_REF:
6334 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6335 ctor = get_base_constructor (base, &offset, valueize);
6336
6337 /* Empty constructor. Always fold to 0. */
6338 if (ctor == error_mark_node)
6339 return build_zero_cst (TREE_TYPE (t));
6340 /* We do not know precise address. */
6341 if (max_size == -1 || max_size != size)
6342 return NULL_TREE;
6343 /* We can not determine ctor. */
6344 if (!ctor)
6345 return NULL_TREE;
6346
6347 /* Out of bound array access. Value is undefined, but don't fold. */
6348 if (offset < 0)
6349 return NULL_TREE;
6350
6351 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6352 base);
6353
6354 case REALPART_EXPR:
6355 case IMAGPART_EXPR:
6356 {
6357 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6358 if (c && TREE_CODE (c) == COMPLEX_CST)
6359 return fold_build1_loc (EXPR_LOCATION (t),
6360 TREE_CODE (t), TREE_TYPE (t), c);
6361 break;
6362 }
6363
6364 default:
6365 break;
6366 }
6367
6368 return NULL_TREE;
6369 }
6370
6371 tree
6372 fold_const_aggregate_ref (tree t)
6373 {
6374 return fold_const_aggregate_ref_1 (t, NULL);
6375 }
6376
6377 /* Lookup virtual method with index TOKEN in a virtual table V
6378 at OFFSET.
6379 Set CAN_REFER if non-NULL to false if method
6380 is not referable or if the virtual table is ill-formed (such as rewriten
6381 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6382
6383 tree
6384 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6385 tree v,
6386 unsigned HOST_WIDE_INT offset,
6387 bool *can_refer)
6388 {
6389 tree vtable = v, init, fn;
6390 unsigned HOST_WIDE_INT size;
6391 unsigned HOST_WIDE_INT elt_size, access_index;
6392 tree domain_type;
6393
6394 if (can_refer)
6395 *can_refer = true;
6396
6397 /* First of all double check we have virtual table. */
6398 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6399 {
6400 /* Pass down that we lost track of the target. */
6401 if (can_refer)
6402 *can_refer = false;
6403 return NULL_TREE;
6404 }
6405
6406 init = ctor_for_folding (v);
6407
6408 /* The virtual tables should always be born with constructors
6409 and we always should assume that they are avaialble for
6410 folding. At the moment we do not stream them in all cases,
6411 but it should never happen that ctor seem unreachable. */
6412 gcc_assert (init);
6413 if (init == error_mark_node)
6414 {
6415 gcc_assert (in_lto_p);
6416 /* Pass down that we lost track of the target. */
6417 if (can_refer)
6418 *can_refer = false;
6419 return NULL_TREE;
6420 }
6421 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6422 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6423 offset *= BITS_PER_UNIT;
6424 offset += token * size;
6425
6426 /* Lookup the value in the constructor that is assumed to be array.
6427 This is equivalent to
6428 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6429 offset, size, NULL);
6430 but in a constant time. We expect that frontend produced a simple
6431 array without indexed initializers. */
6432
6433 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6434 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6435 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6436 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6437
6438 access_index = offset / BITS_PER_UNIT / elt_size;
6439 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6440
6441 /* This code makes an assumption that there are no
6442 indexed fileds produced by C++ FE, so we can directly index the array. */
6443 if (access_index < CONSTRUCTOR_NELTS (init))
6444 {
6445 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6446 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6447 STRIP_NOPS (fn);
6448 }
6449 else
6450 fn = NULL;
6451
6452 /* For type inconsistent program we may end up looking up virtual method
6453 in virtual table that does not contain TOKEN entries. We may overrun
6454 the virtual table and pick up a constant or RTTI info pointer.
6455 In any case the call is undefined. */
6456 if (!fn
6457 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6458 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6459 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6460 else
6461 {
6462 fn = TREE_OPERAND (fn, 0);
6463
6464 /* When cgraph node is missing and function is not public, we cannot
6465 devirtualize. This can happen in WHOPR when the actual method
6466 ends up in other partition, because we found devirtualization
6467 possibility too late. */
6468 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6469 {
6470 if (can_refer)
6471 {
6472 *can_refer = false;
6473 return fn;
6474 }
6475 return NULL_TREE;
6476 }
6477 }
6478
6479 /* Make sure we create a cgraph node for functions we'll reference.
6480 They can be non-existent if the reference comes from an entry
6481 of an external vtable for example. */
6482 cgraph_node::get_create (fn);
6483
6484 return fn;
6485 }
6486
6487 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6488 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6489 KNOWN_BINFO carries the binfo describing the true type of
6490 OBJ_TYPE_REF_OBJECT(REF).
6491 Set CAN_REFER if non-NULL to false if method
6492 is not referable or if the virtual table is ill-formed (such as rewriten
6493 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6494
6495 tree
6496 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6497 bool *can_refer)
6498 {
6499 unsigned HOST_WIDE_INT offset;
6500 tree v;
6501
6502 v = BINFO_VTABLE (known_binfo);
6503 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6504 if (!v)
6505 return NULL_TREE;
6506
6507 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6508 {
6509 if (can_refer)
6510 *can_refer = false;
6511 return NULL_TREE;
6512 }
6513 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6514 }
6515
6516 /* Given a pointer value OP0, return a simplified version of an
6517 indirection through OP0, or NULL_TREE if no simplification is
6518 possible. Note that the resulting type may be different from
6519 the type pointed to in the sense that it is still compatible
6520 from the langhooks point of view. */
6521
6522 tree
6523 gimple_fold_indirect_ref (tree t)
6524 {
6525 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6526 tree sub = t;
6527 tree subtype;
6528
6529 STRIP_NOPS (sub);
6530 subtype = TREE_TYPE (sub);
6531 if (!POINTER_TYPE_P (subtype))
6532 return NULL_TREE;
6533
6534 if (TREE_CODE (sub) == ADDR_EXPR)
6535 {
6536 tree op = TREE_OPERAND (sub, 0);
6537 tree optype = TREE_TYPE (op);
6538 /* *&p => p */
6539 if (useless_type_conversion_p (type, optype))
6540 return op;
6541
6542 /* *(foo *)&fooarray => fooarray[0] */
6543 if (TREE_CODE (optype) == ARRAY_TYPE
6544 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6545 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6546 {
6547 tree type_domain = TYPE_DOMAIN (optype);
6548 tree min_val = size_zero_node;
6549 if (type_domain && TYPE_MIN_VALUE (type_domain))
6550 min_val = TYPE_MIN_VALUE (type_domain);
6551 if (TREE_CODE (min_val) == INTEGER_CST)
6552 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6553 }
6554 /* *(foo *)&complexfoo => __real__ complexfoo */
6555 else if (TREE_CODE (optype) == COMPLEX_TYPE
6556 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6557 return fold_build1 (REALPART_EXPR, type, op);
6558 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6559 else if (TREE_CODE (optype) == VECTOR_TYPE
6560 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6561 {
6562 tree part_width = TYPE_SIZE (type);
6563 tree index = bitsize_int (0);
6564 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6565 }
6566 }
6567
6568 /* *(p + CST) -> ... */
6569 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6570 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6571 {
6572 tree addr = TREE_OPERAND (sub, 0);
6573 tree off = TREE_OPERAND (sub, 1);
6574 tree addrtype;
6575
6576 STRIP_NOPS (addr);
6577 addrtype = TREE_TYPE (addr);
6578
6579 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6580 if (TREE_CODE (addr) == ADDR_EXPR
6581 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6582 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6583 && tree_fits_uhwi_p (off))
6584 {
6585 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6586 tree part_width = TYPE_SIZE (type);
6587 unsigned HOST_WIDE_INT part_widthi
6588 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6589 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6590 tree index = bitsize_int (indexi);
6591 if (offset / part_widthi
6592 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6593 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6594 part_width, index);
6595 }
6596
6597 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6598 if (TREE_CODE (addr) == ADDR_EXPR
6599 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6600 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6601 {
6602 tree size = TYPE_SIZE_UNIT (type);
6603 if (tree_int_cst_equal (size, off))
6604 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6605 }
6606
6607 /* *(p + CST) -> MEM_REF <p, CST>. */
6608 if (TREE_CODE (addr) != ADDR_EXPR
6609 || DECL_P (TREE_OPERAND (addr, 0)))
6610 return fold_build2 (MEM_REF, type,
6611 addr,
6612 wide_int_to_tree (ptype, off));
6613 }
6614
6615 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6616 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6617 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6618 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6619 {
6620 tree type_domain;
6621 tree min_val = size_zero_node;
6622 tree osub = sub;
6623 sub = gimple_fold_indirect_ref (sub);
6624 if (! sub)
6625 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6626 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6627 if (type_domain && TYPE_MIN_VALUE (type_domain))
6628 min_val = TYPE_MIN_VALUE (type_domain);
6629 if (TREE_CODE (min_val) == INTEGER_CST)
6630 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6631 }
6632
6633 return NULL_TREE;
6634 }
6635
6636 /* Return true if CODE is an operation that when operating on signed
6637 integer types involves undefined behavior on overflow and the
6638 operation can be expressed with unsigned arithmetic. */
6639
6640 bool
6641 arith_code_with_undefined_signed_overflow (tree_code code)
6642 {
6643 switch (code)
6644 {
6645 case PLUS_EXPR:
6646 case MINUS_EXPR:
6647 case MULT_EXPR:
6648 case NEGATE_EXPR:
6649 case POINTER_PLUS_EXPR:
6650 return true;
6651 default:
6652 return false;
6653 }
6654 }
6655
6656 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6657 operation that can be transformed to unsigned arithmetic by converting
6658 its operand, carrying out the operation in the corresponding unsigned
6659 type and converting the result back to the original type.
6660
6661 Returns a sequence of statements that replace STMT and also contain
6662 a modified form of STMT itself. */
6663
6664 gimple_seq
6665 rewrite_to_defined_overflow (gimple *stmt)
6666 {
6667 if (dump_file && (dump_flags & TDF_DETAILS))
6668 {
6669 fprintf (dump_file, "rewriting stmt with undefined signed "
6670 "overflow ");
6671 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6672 }
6673
6674 tree lhs = gimple_assign_lhs (stmt);
6675 tree type = unsigned_type_for (TREE_TYPE (lhs));
6676 gimple_seq stmts = NULL;
6677 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6678 {
6679 tree op = gimple_op (stmt, i);
6680 op = gimple_convert (&stmts, type, op);
6681 gimple_set_op (stmt, i, op);
6682 }
6683 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6684 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6685 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6686 gimple_seq_add_stmt (&stmts, stmt);
6687 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6688 gimple_seq_add_stmt (&stmts, cvt);
6689
6690 return stmts;
6691 }
6692
6693
6694 /* The valueization hook we use for the gimple_build API simplification.
6695 This makes us match fold_buildN behavior by only combining with
6696 statements in the sequence(s) we are currently building. */
6697
6698 static tree
6699 gimple_build_valueize (tree op)
6700 {
6701 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6702 return op;
6703 return NULL_TREE;
6704 }
6705
6706 /* Build the expression CODE OP0 of type TYPE with location LOC,
6707 simplifying it first if possible. Returns the built
6708 expression value and appends statements possibly defining it
6709 to SEQ. */
6710
6711 tree
6712 gimple_build (gimple_seq *seq, location_t loc,
6713 enum tree_code code, tree type, tree op0)
6714 {
6715 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6716 if (!res)
6717 {
6718 res = create_tmp_reg_or_ssa_name (type);
6719 gimple *stmt;
6720 if (code == REALPART_EXPR
6721 || code == IMAGPART_EXPR
6722 || code == VIEW_CONVERT_EXPR)
6723 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6724 else
6725 stmt = gimple_build_assign (res, code, op0);
6726 gimple_set_location (stmt, loc);
6727 gimple_seq_add_stmt_without_update (seq, stmt);
6728 }
6729 return res;
6730 }
6731
6732 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6733 simplifying it first if possible. Returns the built
6734 expression value and appends statements possibly defining it
6735 to SEQ. */
6736
6737 tree
6738 gimple_build (gimple_seq *seq, location_t loc,
6739 enum tree_code code, tree type, tree op0, tree op1)
6740 {
6741 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6742 if (!res)
6743 {
6744 res = create_tmp_reg_or_ssa_name (type);
6745 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6746 gimple_set_location (stmt, loc);
6747 gimple_seq_add_stmt_without_update (seq, stmt);
6748 }
6749 return res;
6750 }
6751
6752 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6753 simplifying it first if possible. Returns the built
6754 expression value and appends statements possibly defining it
6755 to SEQ. */
6756
6757 tree
6758 gimple_build (gimple_seq *seq, location_t loc,
6759 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6760 {
6761 tree res = gimple_simplify (code, type, op0, op1, op2,
6762 seq, gimple_build_valueize);
6763 if (!res)
6764 {
6765 res = create_tmp_reg_or_ssa_name (type);
6766 gimple *stmt;
6767 if (code == BIT_FIELD_REF)
6768 stmt = gimple_build_assign (res, code,
6769 build3 (code, type, op0, op1, op2));
6770 else
6771 stmt = gimple_build_assign (res, code, op0, op1, op2);
6772 gimple_set_location (stmt, loc);
6773 gimple_seq_add_stmt_without_update (seq, stmt);
6774 }
6775 return res;
6776 }
6777
6778 /* Build the call FN (ARG0) with a result of type TYPE
6779 (or no result if TYPE is void) with location LOC,
6780 simplifying it first if possible. Returns the built
6781 expression value (or NULL_TREE if TYPE is void) and appends
6782 statements possibly defining it to SEQ. */
6783
6784 tree
6785 gimple_build (gimple_seq *seq, location_t loc,
6786 enum built_in_function fn, tree type, tree arg0)
6787 {
6788 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6789 if (!res)
6790 {
6791 tree decl = builtin_decl_implicit (fn);
6792 gimple *stmt = gimple_build_call (decl, 1, arg0);
6793 if (!VOID_TYPE_P (type))
6794 {
6795 res = create_tmp_reg_or_ssa_name (type);
6796 gimple_call_set_lhs (stmt, res);
6797 }
6798 gimple_set_location (stmt, loc);
6799 gimple_seq_add_stmt_without_update (seq, stmt);
6800 }
6801 return res;
6802 }
6803
6804 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6805 (or no result if TYPE is void) with location LOC,
6806 simplifying it first if possible. Returns the built
6807 expression value (or NULL_TREE if TYPE is void) and appends
6808 statements possibly defining it to SEQ. */
6809
6810 tree
6811 gimple_build (gimple_seq *seq, location_t loc,
6812 enum built_in_function fn, tree type, tree arg0, tree arg1)
6813 {
6814 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6815 if (!res)
6816 {
6817 tree decl = builtin_decl_implicit (fn);
6818 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6819 if (!VOID_TYPE_P (type))
6820 {
6821 res = create_tmp_reg_or_ssa_name (type);
6822 gimple_call_set_lhs (stmt, res);
6823 }
6824 gimple_set_location (stmt, loc);
6825 gimple_seq_add_stmt_without_update (seq, stmt);
6826 }
6827 return res;
6828 }
6829
6830 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6831 (or no result if TYPE is void) with location LOC,
6832 simplifying it first if possible. Returns the built
6833 expression value (or NULL_TREE if TYPE is void) and appends
6834 statements possibly defining it to SEQ. */
6835
6836 tree
6837 gimple_build (gimple_seq *seq, location_t loc,
6838 enum built_in_function fn, tree type,
6839 tree arg0, tree arg1, tree arg2)
6840 {
6841 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6842 seq, gimple_build_valueize);
6843 if (!res)
6844 {
6845 tree decl = builtin_decl_implicit (fn);
6846 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6847 if (!VOID_TYPE_P (type))
6848 {
6849 res = create_tmp_reg_or_ssa_name (type);
6850 gimple_call_set_lhs (stmt, res);
6851 }
6852 gimple_set_location (stmt, loc);
6853 gimple_seq_add_stmt_without_update (seq, stmt);
6854 }
6855 return res;
6856 }
6857
6858 /* Build the conversion (TYPE) OP with a result of type TYPE
6859 with location LOC if such conversion is neccesary in GIMPLE,
6860 simplifying it first.
6861 Returns the built expression value and appends
6862 statements possibly defining it to SEQ. */
6863
6864 tree
6865 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6866 {
6867 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6868 return op;
6869 return gimple_build (seq, loc, NOP_EXPR, type, op);
6870 }
6871
6872 /* Build the conversion (ptrofftype) OP with a result of a type
6873 compatible with ptrofftype with location LOC if such conversion
6874 is neccesary in GIMPLE, simplifying it first.
6875 Returns the built expression value and appends
6876 statements possibly defining it to SEQ. */
6877
6878 tree
6879 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6880 {
6881 if (ptrofftype_p (TREE_TYPE (op)))
6882 return op;
6883 return gimple_convert (seq, loc, sizetype, op);
6884 }
6885
6886 /* Return true if the result of assignment STMT is known to be non-negative.
6887 If the return value is based on the assumption that signed overflow is
6888 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6889 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6890
6891 static bool
6892 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6893 int depth)
6894 {
6895 enum tree_code code = gimple_assign_rhs_code (stmt);
6896 switch (get_gimple_rhs_class (code))
6897 {
6898 case GIMPLE_UNARY_RHS:
6899 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6900 gimple_expr_type (stmt),
6901 gimple_assign_rhs1 (stmt),
6902 strict_overflow_p, depth);
6903 case GIMPLE_BINARY_RHS:
6904 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6905 gimple_expr_type (stmt),
6906 gimple_assign_rhs1 (stmt),
6907 gimple_assign_rhs2 (stmt),
6908 strict_overflow_p, depth);
6909 case GIMPLE_TERNARY_RHS:
6910 return false;
6911 case GIMPLE_SINGLE_RHS:
6912 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6913 strict_overflow_p, depth);
6914 case GIMPLE_INVALID_RHS:
6915 break;
6916 }
6917 gcc_unreachable ();
6918 }
6919
6920 /* Return true if return value of call STMT is known to be non-negative.
6921 If the return value is based on the assumption that signed overflow is
6922 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6923 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6924
6925 static bool
6926 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6927 int depth)
6928 {
6929 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6930 gimple_call_arg (stmt, 0) : NULL_TREE;
6931 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6932 gimple_call_arg (stmt, 1) : NULL_TREE;
6933
6934 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6935 gimple_call_combined_fn (stmt),
6936 arg0,
6937 arg1,
6938 strict_overflow_p, depth);
6939 }
6940
6941 /* Return true if return value of call STMT is known to be non-negative.
6942 If the return value is based on the assumption that signed overflow is
6943 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6944 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6945
6946 static bool
6947 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6948 int depth)
6949 {
6950 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6951 {
6952 tree arg = gimple_phi_arg_def (stmt, i);
6953 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6954 return false;
6955 }
6956 return true;
6957 }
6958
6959 /* Return true if STMT is known to compute a non-negative value.
6960 If the return value is based on the assumption that signed overflow is
6961 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6962 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6963
6964 bool
6965 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6966 int depth)
6967 {
6968 switch (gimple_code (stmt))
6969 {
6970 case GIMPLE_ASSIGN:
6971 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6972 depth);
6973 case GIMPLE_CALL:
6974 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6975 depth);
6976 case GIMPLE_PHI:
6977 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6978 depth);
6979 default:
6980 return false;
6981 }
6982 }
6983
6984 /* Return true if the floating-point value computed by assignment STMT
6985 is known to have an integer value. We also allow +Inf, -Inf and NaN
6986 to be considered integer values. Return false for signaling NaN.
6987
6988 DEPTH is the current nesting depth of the query. */
6989
6990 static bool
6991 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6992 {
6993 enum tree_code code = gimple_assign_rhs_code (stmt);
6994 switch (get_gimple_rhs_class (code))
6995 {
6996 case GIMPLE_UNARY_RHS:
6997 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6998 gimple_assign_rhs1 (stmt), depth);
6999 case GIMPLE_BINARY_RHS:
7000 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7001 gimple_assign_rhs1 (stmt),
7002 gimple_assign_rhs2 (stmt), depth);
7003 case GIMPLE_TERNARY_RHS:
7004 return false;
7005 case GIMPLE_SINGLE_RHS:
7006 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7007 case GIMPLE_INVALID_RHS:
7008 break;
7009 }
7010 gcc_unreachable ();
7011 }
7012
7013 /* Return true if the floating-point value computed by call STMT is known
7014 to have an integer value. We also allow +Inf, -Inf and NaN to be
7015 considered integer values. Return false for signaling NaN.
7016
7017 DEPTH is the current nesting depth of the query. */
7018
7019 static bool
7020 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7021 {
7022 tree arg0 = (gimple_call_num_args (stmt) > 0
7023 ? gimple_call_arg (stmt, 0)
7024 : NULL_TREE);
7025 tree arg1 = (gimple_call_num_args (stmt) > 1
7026 ? gimple_call_arg (stmt, 1)
7027 : NULL_TREE);
7028 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7029 arg0, arg1, depth);
7030 }
7031
7032 /* Return true if the floating-point result of phi STMT is known to have
7033 an integer value. We also allow +Inf, -Inf and NaN to be considered
7034 integer values. Return false for signaling NaN.
7035
7036 DEPTH is the current nesting depth of the query. */
7037
7038 static bool
7039 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7040 {
7041 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7042 {
7043 tree arg = gimple_phi_arg_def (stmt, i);
7044 if (!integer_valued_real_single_p (arg, depth + 1))
7045 return false;
7046 }
7047 return true;
7048 }
7049
7050 /* Return true if the floating-point value computed by STMT is known
7051 to have an integer value. We also allow +Inf, -Inf and NaN to be
7052 considered integer values. Return false for signaling NaN.
7053
7054 DEPTH is the current nesting depth of the query. */
7055
7056 bool
7057 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7058 {
7059 switch (gimple_code (stmt))
7060 {
7061 case GIMPLE_ASSIGN:
7062 return gimple_assign_integer_valued_real_p (stmt, depth);
7063 case GIMPLE_CALL:
7064 return gimple_call_integer_valued_real_p (stmt, depth);
7065 case GIMPLE_PHI:
7066 return gimple_phi_integer_valued_real_p (stmt, depth);
7067 default:
7068 return false;
7069 }
7070 }