Make tree_expr_nonnegative_warnv_p recurse into SSA names
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "predict.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "stor-layout.h"
43 #include "dumpfile.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
50 #include "tree-dfa.h"
51 #include "tree-ssa.h"
52 #include "tree-ssa-propagate.h"
53 #include "target.h"
54 #include "cgraph.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
60 #include "dbgcnt.h"
61 #include "builtins.h"
62 #include "output.h"
63 #include "tree-eh.h"
64 #include "gimple-match.h"
65 #include "gomp-constants.h"
66
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
70 reasons:
71
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
77 set.
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
82 declaring the body.
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
87 directly. */
88
89 static bool
90 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
91 {
92 varpool_node *vnode;
93 struct cgraph_node *node;
94 symtab_node *snode;
95
96 if (DECL_ABSTRACT_P (decl))
97 return false;
98
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
101 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
102 return true;
103
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
106 {
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab->function_flags_ready)
110 return true;
111 snode = symtab_node::get (decl);
112 if (!snode || !snode->definition)
113 return false;
114 node = dyn_cast <cgraph_node *> (snode);
115 return !node || !node->global.inlined_to;
116 }
117
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
121 if (!from_decl
122 || TREE_CODE (from_decl) != VAR_DECL
123 || (!DECL_EXTERNAL (from_decl)
124 && (vnode = varpool_node::get (from_decl)) != NULL
125 && vnode->definition)
126 || (flag_ltrans
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->in_other_partition))
129 return true;
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
137 return false;
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
142 return true;
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
146
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
153 was privatized. */
154 if (!symtab->function_flags_ready)
155 return true;
156
157 snode = symtab_node::get (decl);
158 if (!snode
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
162 return false;
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
165 }
166
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170
171 tree
172 canonicalize_constructor_val (tree cval, tree from_decl)
173 {
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
178 {
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
187 }
188 if (TREE_CODE (cval) == ADDR_EXPR)
189 {
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
192 {
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
196 }
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
199 if (!base)
200 return NULL_TREE;
201
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
205 return NULL_TREE;
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
209 {
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_node::get_create (base);
214 }
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
218
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
222 }
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
225 return orig_cval;
226 }
227
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
230
231 tree
232 get_symbol_constant_value (tree sym)
233 {
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
236 {
237 if (val)
238 {
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
241 return val;
242 else
243 return NULL_TREE;
244 }
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
249 && is_gimple_reg_type (TREE_TYPE (sym)))
250 return build_zero_cst (TREE_TYPE (sym));
251 }
252
253 return NULL_TREE;
254 }
255
256
257
258 /* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
262
263 static tree
264 maybe_fold_reference (tree expr, bool is_lhs)
265 {
266 tree result;
267
268 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr) == REALPART_EXPR
270 || TREE_CODE (expr) == IMAGPART_EXPR)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0));
276 else if (TREE_CODE (expr) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr),
279 TREE_CODE (expr),
280 TREE_TYPE (expr),
281 TREE_OPERAND (expr, 0),
282 TREE_OPERAND (expr, 1),
283 TREE_OPERAND (expr, 2));
284
285 if (!is_lhs
286 && (result = fold_const_aggregate_ref (expr))
287 && is_gimple_min_invariant (result))
288 return result;
289
290 return NULL_TREE;
291 }
292
293
294 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
295 replacement rhs for the statement or NULL_TREE if no simplification
296 could be made. It is assumed that the operands have been previously
297 folded. */
298
299 static tree
300 fold_gimple_assign (gimple_stmt_iterator *si)
301 {
302 gimple *stmt = gsi_stmt (*si);
303 enum tree_code subcode = gimple_assign_rhs_code (stmt);
304 location_t loc = gimple_location (stmt);
305
306 tree result = NULL_TREE;
307
308 switch (get_gimple_rhs_class (subcode))
309 {
310 case GIMPLE_SINGLE_RHS:
311 {
312 tree rhs = gimple_assign_rhs1 (stmt);
313
314 if (TREE_CLOBBER_P (rhs))
315 return NULL_TREE;
316
317 if (REFERENCE_CLASS_P (rhs))
318 return maybe_fold_reference (rhs, false);
319
320 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
321 {
322 tree val = OBJ_TYPE_REF_EXPR (rhs);
323 if (is_gimple_min_invariant (val))
324 return val;
325 else if (flag_devirtualize && virtual_method_call_p (rhs))
326 {
327 bool final;
328 vec <cgraph_node *>targets
329 = possible_polymorphic_call_targets (rhs, stmt, &final);
330 if (final && targets.length () <= 1 && dbg_cnt (devirt))
331 {
332 if (dump_enabled_p ())
333 {
334 location_t loc = gimple_location_safe (stmt);
335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
336 "resolving virtual function address "
337 "reference to function %s\n",
338 targets.length () == 1
339 ? targets[0]->name ()
340 : "NULL");
341 }
342 if (targets.length () == 1)
343 {
344 val = fold_convert (TREE_TYPE (val),
345 build_fold_addr_expr_loc
346 (loc, targets[0]->decl));
347 STRIP_USELESS_TYPE_CONVERSION (val);
348 }
349 else
350 /* We can not use __builtin_unreachable here because it
351 can not have address taken. */
352 val = build_int_cst (TREE_TYPE (val), 0);
353 return val;
354 }
355 }
356
357 }
358 else if (TREE_CODE (rhs) == ADDR_EXPR)
359 {
360 tree ref = TREE_OPERAND (rhs, 0);
361 tree tem = maybe_fold_reference (ref, true);
362 if (tem
363 && TREE_CODE (tem) == MEM_REF
364 && integer_zerop (TREE_OPERAND (tem, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
366 else if (tem)
367 result = fold_convert (TREE_TYPE (rhs),
368 build_fold_addr_expr_loc (loc, tem));
369 else if (TREE_CODE (ref) == MEM_REF
370 && integer_zerop (TREE_OPERAND (ref, 1)))
371 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
372 }
373
374 else if (TREE_CODE (rhs) == CONSTRUCTOR
375 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
376 && (CONSTRUCTOR_NELTS (rhs)
377 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
378 {
379 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
380 unsigned i;
381 tree val;
382
383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
384 if (TREE_CODE (val) != INTEGER_CST
385 && TREE_CODE (val) != REAL_CST
386 && TREE_CODE (val) != FIXED_CST)
387 return NULL_TREE;
388
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
391 }
392
393 else if (DECL_P (rhs))
394 return get_symbol_constant_value (rhs);
395
396 /* If we couldn't fold the RHS, hand over to the generic
397 fold routines. */
398 if (result == NULL_TREE)
399 result = fold (rhs);
400
401 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
402 that may have been added by fold, and "useless" type
403 conversions that might now be apparent due to propagation. */
404 STRIP_USELESS_TYPE_CONVERSION (result);
405
406 if (result != rhs && valid_gimple_rhs_p (result))
407 return result;
408
409 return NULL_TREE;
410 }
411 break;
412
413 case GIMPLE_UNARY_RHS:
414 break;
415
416 case GIMPLE_BINARY_RHS:
417 break;
418
419 case GIMPLE_TERNARY_RHS:
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
425
426 if (result)
427 {
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
431 }
432 break;
433
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
436 }
437
438 return NULL_TREE;
439 }
440
441
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
446
447 static void
448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
449 {
450 gimple *stmt = gsi_stmt (*si_p);
451
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
454
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple *laststore = NULL;
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
460 {
461 gimple *new_stmt = gsi_stmt (i);
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
467 {
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
477 }
478 }
479
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
485 {
486 gimple *new_stmt = gsi_stmt (i);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
494 }
495
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
500 {
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
504 {
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
507 }
508 }
509
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
512 }
513
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
523
524 void
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
526 {
527 tree lhs;
528 gimple *stmt, *new_stmt;
529 gimple_stmt_iterator i;
530 gimple_seq stmts = NULL;
531
532 stmt = gsi_stmt (*si_p);
533
534 gcc_assert (is_gimple_call (stmt));
535
536 push_gimplify_context (gimple_in_ssa_p (cfun));
537
538 lhs = gimple_call_lhs (stmt);
539 if (lhs == NULL_TREE)
540 {
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
545 {
546 pop_gimplify_context (NULL);
547 if (gimple_in_ssa_p (cfun))
548 {
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
551 }
552 gsi_replace (si_p, gimple_build_nop (), false);
553 return;
554 }
555 }
556 else
557 {
558 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
563 }
564
565 pop_gimplify_context (NULL);
566
567 gsi_replace_with_seq_vops (si_p, stmts);
568 }
569
570
571 /* Replace the call at *GSI with the gimple value VAL. */
572
573 static void
574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
575 {
576 gimple *stmt = gsi_stmt (*gsi);
577 tree lhs = gimple_call_lhs (stmt);
578 gimple *repl;
579 if (lhs)
580 {
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
584 }
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
589 {
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
592 }
593 gsi_replace (gsi, repl, false);
594 }
595
596 /* Replace the call at *GSI with the new call REPL and fold that
597 again. */
598
599 static void
600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
601 {
602 gimple *stmt = gsi_stmt (*gsi);
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
607 {
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
611 }
612 gsi_replace (gsi, repl, false);
613 fold_stmt (gsi);
614 }
615
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
617
618 static bool
619 var_decl_component_p (tree var)
620 {
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
625 }
626
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
634
635 static bool
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
638 {
639 gimple *stmt = gsi_stmt (*gsi);
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
644
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
647 {
648 gimple *repl;
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
655 {
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
658 }
659 gsi_replace (gsi, repl, false);
660 return true;
661 }
662
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
666 {
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
671 {
672 gsi_replace (gsi, gimple_build_nop (), false);
673 return true;
674 }
675 goto done;
676 }
677 else
678 {
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
682
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
686
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
700 {
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
703 {
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
713 {
714 tree srctype = type;
715 tree desttype = type;
716 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
717 srctype = build_aligned_type (type, src_align);
718 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
719 tree tem = fold_const_aggregate_ref (srcmem);
720 if (tem)
721 srcmem = tem;
722 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
723 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
724 src_align))
725 srcmem = NULL_TREE;
726 if (srcmem)
727 {
728 gimple *new_stmt;
729 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
730 {
731 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
732 if (gimple_in_ssa_p (cfun))
733 srcmem = make_ssa_name (TREE_TYPE (srcmem),
734 new_stmt);
735 else
736 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
737 gimple_assign_set_lhs (new_stmt, srcmem);
738 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
739 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
740 }
741 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
742 desttype = build_aligned_type (type, dest_align);
743 new_stmt
744 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
745 dest, off0),
746 srcmem);
747 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
748 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
749 if (gimple_vdef (new_stmt)
750 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
751 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
752 if (!lhs)
753 {
754 gsi_replace (gsi, new_stmt, false);
755 return true;
756 }
757 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
758 goto done;
759 }
760 }
761 }
762 }
763
764 if (endp == 3)
765 {
766 /* Both DEST and SRC must be pointer types.
767 ??? This is what old code did. Is the testing for pointer types
768 really mandatory?
769
770 If either SRC is readonly or length is 1, we can use memcpy. */
771 if (!dest_align || !src_align)
772 return false;
773 if (readonly_data_expr (src)
774 || (tree_fits_uhwi_p (len)
775 && (MIN (src_align, dest_align) / BITS_PER_UNIT
776 >= tree_to_uhwi (len))))
777 {
778 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
779 if (!fn)
780 return false;
781 gimple_call_set_fndecl (stmt, fn);
782 gimple_call_set_arg (stmt, 0, dest);
783 gimple_call_set_arg (stmt, 1, src);
784 fold_stmt (gsi);
785 return true;
786 }
787
788 /* If *src and *dest can't overlap, optimize into memcpy as well. */
789 if (TREE_CODE (src) == ADDR_EXPR
790 && TREE_CODE (dest) == ADDR_EXPR)
791 {
792 tree src_base, dest_base, fn;
793 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
794 HOST_WIDE_INT size = -1;
795 HOST_WIDE_INT maxsize = -1;
796
797 srcvar = TREE_OPERAND (src, 0);
798 src_base = get_ref_base_and_extent (srcvar, &src_offset,
799 &size, &maxsize);
800 destvar = TREE_OPERAND (dest, 0);
801 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
802 &size, &maxsize);
803 if (tree_fits_uhwi_p (len))
804 maxsize = tree_to_uhwi (len);
805 else
806 maxsize = -1;
807 src_offset /= BITS_PER_UNIT;
808 dest_offset /= BITS_PER_UNIT;
809 if (SSA_VAR_P (src_base)
810 && SSA_VAR_P (dest_base))
811 {
812 if (operand_equal_p (src_base, dest_base, 0)
813 && ranges_overlap_p (src_offset, maxsize,
814 dest_offset, maxsize))
815 return false;
816 }
817 else if (TREE_CODE (src_base) == MEM_REF
818 && TREE_CODE (dest_base) == MEM_REF)
819 {
820 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
821 TREE_OPERAND (dest_base, 0), 0))
822 return false;
823 offset_int off = mem_ref_offset (src_base) + src_offset;
824 if (!wi::fits_shwi_p (off))
825 return false;
826 src_offset = off.to_shwi ();
827
828 off = mem_ref_offset (dest_base) + dest_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 dest_offset = off.to_shwi ();
832 if (ranges_overlap_p (src_offset, maxsize,
833 dest_offset, maxsize))
834 return false;
835 }
836 else
837 return false;
838
839 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
840 if (!fn)
841 return false;
842 gimple_call_set_fndecl (stmt, fn);
843 gimple_call_set_arg (stmt, 0, dest);
844 gimple_call_set_arg (stmt, 1, src);
845 fold_stmt (gsi);
846 return true;
847 }
848
849 /* If the destination and source do not alias optimize into
850 memcpy as well. */
851 if ((is_gimple_min_invariant (dest)
852 || TREE_CODE (dest) == SSA_NAME)
853 && (is_gimple_min_invariant (src)
854 || TREE_CODE (src) == SSA_NAME))
855 {
856 ao_ref destr, srcr;
857 ao_ref_init_from_ptr_and_size (&destr, dest, len);
858 ao_ref_init_from_ptr_and_size (&srcr, src, len);
859 if (!refs_may_alias_p_1 (&destr, &srcr, false))
860 {
861 tree fn;
862 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
863 if (!fn)
864 return false;
865 gimple_call_set_fndecl (stmt, fn);
866 gimple_call_set_arg (stmt, 0, dest);
867 gimple_call_set_arg (stmt, 1, src);
868 fold_stmt (gsi);
869 return true;
870 }
871 }
872
873 return false;
874 }
875
876 if (!tree_fits_shwi_p (len))
877 return false;
878 /* FIXME:
879 This logic lose for arguments like (type *)malloc (sizeof (type)),
880 since we strip the casts of up to VOID return value from malloc.
881 Perhaps we ought to inherit type from non-VOID argument here? */
882 STRIP_NOPS (src);
883 STRIP_NOPS (dest);
884 if (!POINTER_TYPE_P (TREE_TYPE (src))
885 || !POINTER_TYPE_P (TREE_TYPE (dest)))
886 return false;
887 /* In the following try to find a type that is most natural to be
888 used for the memcpy source and destination and that allows
889 the most optimization when memcpy is turned into a plain assignment
890 using that type. In theory we could always use a char[len] type
891 but that only gains us that the destination and source possibly
892 no longer will have their address taken. */
893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
894 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
895 {
896 tree tem = TREE_OPERAND (src, 0);
897 STRIP_NOPS (tem);
898 if (tem != TREE_OPERAND (src, 0))
899 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
900 }
901 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
902 {
903 tree tem = TREE_OPERAND (dest, 0);
904 STRIP_NOPS (tem);
905 if (tem != TREE_OPERAND (dest, 0))
906 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
907 }
908 srctype = TREE_TYPE (TREE_TYPE (src));
909 if (TREE_CODE (srctype) == ARRAY_TYPE
910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
911 {
912 srctype = TREE_TYPE (srctype);
913 STRIP_NOPS (src);
914 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
915 }
916 desttype = TREE_TYPE (TREE_TYPE (dest));
917 if (TREE_CODE (desttype) == ARRAY_TYPE
918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
919 {
920 desttype = TREE_TYPE (desttype);
921 STRIP_NOPS (dest);
922 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
923 }
924 if (TREE_ADDRESSABLE (srctype)
925 || TREE_ADDRESSABLE (desttype))
926 return false;
927
928 /* Make sure we are not copying using a floating-point mode or
929 a type whose size possibly does not match its precision. */
930 if (FLOAT_MODE_P (TYPE_MODE (desttype))
931 || TREE_CODE (desttype) == BOOLEAN_TYPE
932 || TREE_CODE (desttype) == ENUMERAL_TYPE)
933 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
934 if (FLOAT_MODE_P (TYPE_MODE (srctype))
935 || TREE_CODE (srctype) == BOOLEAN_TYPE
936 || TREE_CODE (srctype) == ENUMERAL_TYPE)
937 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
938 if (!srctype)
939 srctype = desttype;
940 if (!desttype)
941 desttype = srctype;
942 if (!srctype)
943 return false;
944
945 src_align = get_pointer_alignment (src);
946 dest_align = get_pointer_alignment (dest);
947 if (dest_align < TYPE_ALIGN (desttype)
948 || src_align < TYPE_ALIGN (srctype))
949 return false;
950
951 destvar = dest;
952 STRIP_NOPS (destvar);
953 if (TREE_CODE (destvar) == ADDR_EXPR
954 && var_decl_component_p (TREE_OPERAND (destvar, 0))
955 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
956 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
957 else
958 destvar = NULL_TREE;
959
960 srcvar = src;
961 STRIP_NOPS (srcvar);
962 if (TREE_CODE (srcvar) == ADDR_EXPR
963 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
964 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
965 {
966 if (!destvar
967 || src_align >= TYPE_ALIGN (desttype))
968 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
969 srcvar, off0);
970 else if (!STRICT_ALIGNMENT)
971 {
972 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
973 src_align);
974 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
975 }
976 else
977 srcvar = NULL_TREE;
978 }
979 else
980 srcvar = NULL_TREE;
981
982 if (srcvar == NULL_TREE && destvar == NULL_TREE)
983 return false;
984
985 if (srcvar == NULL_TREE)
986 {
987 STRIP_NOPS (src);
988 if (src_align >= TYPE_ALIGN (desttype))
989 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
990 else
991 {
992 if (STRICT_ALIGNMENT)
993 return false;
994 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
995 src_align);
996 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
997 }
998 }
999 else if (destvar == NULL_TREE)
1000 {
1001 STRIP_NOPS (dest);
1002 if (dest_align >= TYPE_ALIGN (srctype))
1003 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1004 else
1005 {
1006 if (STRICT_ALIGNMENT)
1007 return false;
1008 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1009 dest_align);
1010 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1011 }
1012 }
1013
1014 gimple *new_stmt;
1015 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1016 {
1017 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1018 if (gimple_in_ssa_p (cfun))
1019 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1020 else
1021 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1022 gimple_assign_set_lhs (new_stmt, srcvar);
1023 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1024 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1025 }
1026 new_stmt = gimple_build_assign (destvar, srcvar);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1029 if (gimple_vdef (new_stmt)
1030 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1032 if (!lhs)
1033 {
1034 gsi_replace (gsi, new_stmt, false);
1035 return true;
1036 }
1037 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 }
1039
1040 done:
1041 if (endp == 0 || endp == 3)
1042 len = NULL_TREE;
1043 else if (endp == 2)
1044 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1045 ssize_int (1));
1046 if (endp == 2 || endp == 1)
1047 dest = fold_build_pointer_plus_loc (loc, dest, len);
1048
1049 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1050 GSI_SAME_STMT);
1051 gimple *repl = gimple_build_assign (lhs, dest);
1052 gsi_replace (gsi, repl, false);
1053 return true;
1054 }
1055
1056 /* Fold function call to builtin memset or bzero at *GSI setting the
1057 memory of size LEN to VAL. Return whether a simplification was made. */
1058
1059 static bool
1060 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1061 {
1062 gimple *stmt = gsi_stmt (*gsi);
1063 tree etype;
1064 unsigned HOST_WIDE_INT length, cval;
1065
1066 /* If the LEN parameter is zero, return DEST. */
1067 if (integer_zerop (len))
1068 {
1069 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1070 return true;
1071 }
1072
1073 if (! tree_fits_uhwi_p (len))
1074 return false;
1075
1076 if (TREE_CODE (c) != INTEGER_CST)
1077 return false;
1078
1079 tree dest = gimple_call_arg (stmt, 0);
1080 tree var = dest;
1081 if (TREE_CODE (var) != ADDR_EXPR)
1082 return false;
1083
1084 var = TREE_OPERAND (var, 0);
1085 if (TREE_THIS_VOLATILE (var))
1086 return false;
1087
1088 etype = TREE_TYPE (var);
1089 if (TREE_CODE (etype) == ARRAY_TYPE)
1090 etype = TREE_TYPE (etype);
1091
1092 if (!INTEGRAL_TYPE_P (etype)
1093 && !POINTER_TYPE_P (etype))
1094 return NULL_TREE;
1095
1096 if (! var_decl_component_p (var))
1097 return NULL_TREE;
1098
1099 length = tree_to_uhwi (len);
1100 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1101 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1102 return NULL_TREE;
1103
1104 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1105 return NULL_TREE;
1106
1107 if (integer_zerop (c))
1108 cval = 0;
1109 else
1110 {
1111 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1112 return NULL_TREE;
1113
1114 cval = TREE_INT_CST_LOW (c);
1115 cval &= 0xff;
1116 cval |= cval << 8;
1117 cval |= cval << 16;
1118 cval |= (cval << 31) << 1;
1119 }
1120
1121 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1122 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1123 gimple_set_vuse (store, gimple_vuse (stmt));
1124 tree vdef = gimple_vdef (stmt);
1125 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1126 {
1127 gimple_set_vdef (store, gimple_vdef (stmt));
1128 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1129 }
1130 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1131 if (gimple_call_lhs (stmt))
1132 {
1133 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1134 gsi_replace (gsi, asgn, false);
1135 }
1136 else
1137 {
1138 gimple_stmt_iterator gsi2 = *gsi;
1139 gsi_prev (gsi);
1140 gsi_remove (&gsi2, true);
1141 }
1142
1143 return true;
1144 }
1145
1146
1147 /* Return the string length, maximum string length or maximum value of
1148 ARG in LENGTH.
1149 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1150 is not NULL and, for TYPE == 0, its value is not equal to the length
1151 we determine or if we are unable to determine the length or value,
1152 return false. VISITED is a bitmap of visited variables.
1153 TYPE is 0 if string length should be returned, 1 for maximum string
1154 length and 2 for maximum value ARG can have. */
1155
1156 static bool
1157 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1158 {
1159 tree var, val;
1160 gimple *def_stmt;
1161
1162 if (TREE_CODE (arg) != SSA_NAME)
1163 {
1164 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1165 if (TREE_CODE (arg) == ADDR_EXPR
1166 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1167 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1168 {
1169 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1170 if (TREE_CODE (aop0) == INDIRECT_REF
1171 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1172 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1173 length, visited, type);
1174 }
1175
1176 if (type == 2)
1177 {
1178 val = arg;
1179 if (TREE_CODE (val) != INTEGER_CST
1180 || tree_int_cst_sgn (val) < 0)
1181 return false;
1182 }
1183 else
1184 val = c_strlen (arg, 1);
1185 if (!val)
1186 return false;
1187
1188 if (*length)
1189 {
1190 if (type > 0)
1191 {
1192 if (TREE_CODE (*length) != INTEGER_CST
1193 || TREE_CODE (val) != INTEGER_CST)
1194 return false;
1195
1196 if (tree_int_cst_lt (*length, val))
1197 *length = val;
1198 return true;
1199 }
1200 else if (simple_cst_equal (val, *length) != 1)
1201 return false;
1202 }
1203
1204 *length = val;
1205 return true;
1206 }
1207
1208 /* If ARG is registered for SSA update we cannot look at its defining
1209 statement. */
1210 if (name_registered_for_update_p (arg))
1211 return false;
1212
1213 /* If we were already here, break the infinite cycle. */
1214 if (!*visited)
1215 *visited = BITMAP_ALLOC (NULL);
1216 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1217 return true;
1218
1219 var = arg;
1220 def_stmt = SSA_NAME_DEF_STMT (var);
1221
1222 switch (gimple_code (def_stmt))
1223 {
1224 case GIMPLE_ASSIGN:
1225 /* The RHS of the statement defining VAR must either have a
1226 constant length or come from another SSA_NAME with a constant
1227 length. */
1228 if (gimple_assign_single_p (def_stmt)
1229 || gimple_assign_unary_nop_p (def_stmt))
1230 {
1231 tree rhs = gimple_assign_rhs1 (def_stmt);
1232 return get_maxval_strlen (rhs, length, visited, type);
1233 }
1234 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1235 {
1236 tree op2 = gimple_assign_rhs2 (def_stmt);
1237 tree op3 = gimple_assign_rhs3 (def_stmt);
1238 return get_maxval_strlen (op2, length, visited, type)
1239 && get_maxval_strlen (op3, length, visited, type);
1240 }
1241 return false;
1242
1243 case GIMPLE_PHI:
1244 {
1245 /* All the arguments of the PHI node must have the same constant
1246 length. */
1247 unsigned i;
1248
1249 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1250 {
1251 tree arg = gimple_phi_arg (def_stmt, i)->def;
1252
1253 /* If this PHI has itself as an argument, we cannot
1254 determine the string length of this argument. However,
1255 if we can find a constant string length for the other
1256 PHI args then we can still be sure that this is a
1257 constant string length. So be optimistic and just
1258 continue with the next argument. */
1259 if (arg == gimple_phi_result (def_stmt))
1260 continue;
1261
1262 if (!get_maxval_strlen (arg, length, visited, type))
1263 return false;
1264 }
1265 }
1266 return true;
1267
1268 default:
1269 return false;
1270 }
1271 }
1272
1273 tree
1274 get_maxval_strlen (tree arg, int type)
1275 {
1276 bitmap visited = NULL;
1277 tree len = NULL_TREE;
1278 if (!get_maxval_strlen (arg, &len, &visited, type))
1279 len = NULL_TREE;
1280 if (visited)
1281 BITMAP_FREE (visited);
1282
1283 return len;
1284 }
1285
1286
1287 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1288 If LEN is not NULL, it represents the length of the string to be
1289 copied. Return NULL_TREE if no simplification can be made. */
1290
1291 static bool
1292 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1293 tree dest, tree src)
1294 {
1295 location_t loc = gimple_location (gsi_stmt (*gsi));
1296 tree fn;
1297
1298 /* If SRC and DEST are the same (and not volatile), return DEST. */
1299 if (operand_equal_p (src, dest, 0))
1300 {
1301 replace_call_with_value (gsi, dest);
1302 return true;
1303 }
1304
1305 if (optimize_function_for_size_p (cfun))
1306 return false;
1307
1308 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1309 if (!fn)
1310 return false;
1311
1312 tree len = get_maxval_strlen (src, 0);
1313 if (!len)
1314 return false;
1315
1316 len = fold_convert_loc (loc, size_type_node, len);
1317 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1318 len = force_gimple_operand_gsi (gsi, len, true,
1319 NULL_TREE, true, GSI_SAME_STMT);
1320 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1321 replace_call_with_call_and_fold (gsi, repl);
1322 return true;
1323 }
1324
1325 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1326 If SLEN is not NULL, it represents the length of the source string.
1327 Return NULL_TREE if no simplification can be made. */
1328
1329 static bool
1330 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1331 tree dest, tree src, tree len)
1332 {
1333 location_t loc = gimple_location (gsi_stmt (*gsi));
1334 tree fn;
1335
1336 /* If the LEN parameter is zero, return DEST. */
1337 if (integer_zerop (len))
1338 {
1339 replace_call_with_value (gsi, dest);
1340 return true;
1341 }
1342
1343 /* We can't compare slen with len as constants below if len is not a
1344 constant. */
1345 if (TREE_CODE (len) != INTEGER_CST)
1346 return false;
1347
1348 /* Now, we must be passed a constant src ptr parameter. */
1349 tree slen = get_maxval_strlen (src, 0);
1350 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1351 return false;
1352
1353 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1354
1355 /* We do not support simplification of this case, though we do
1356 support it when expanding trees into RTL. */
1357 /* FIXME: generate a call to __builtin_memset. */
1358 if (tree_int_cst_lt (slen, len))
1359 return false;
1360
1361 /* OK transform into builtin memcpy. */
1362 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1363 if (!fn)
1364 return false;
1365
1366 len = fold_convert_loc (loc, size_type_node, len);
1367 len = force_gimple_operand_gsi (gsi, len, true,
1368 NULL_TREE, true, GSI_SAME_STMT);
1369 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1371 return true;
1372 }
1373
1374 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1375 to the call.
1376
1377 Return NULL_TREE if no simplification was possible, otherwise return the
1378 simplified form of the call as a tree.
1379
1380 The simplified form may be a constant or other expression which
1381 computes the same value, but in a more efficient manner (including
1382 calls to other builtin functions).
1383
1384 The call may contain arguments which need to be evaluated, but
1385 which are not useful to determine the result of the call. In
1386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1387 COMPOUND_EXPR will be an argument which must be evaluated.
1388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1389 COMPOUND_EXPR in the chain will contain the tree for the simplified
1390 form of the builtin function call. */
1391
1392 static bool
1393 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1394 {
1395 gimple *stmt = gsi_stmt (*gsi);
1396 location_t loc = gimple_location (stmt);
1397
1398 const char *p = c_getstr (src);
1399
1400 /* If the string length is zero, return the dst parameter. */
1401 if (p && *p == '\0')
1402 {
1403 replace_call_with_value (gsi, dst);
1404 return true;
1405 }
1406
1407 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1408 return false;
1409
1410 /* See if we can store by pieces into (dst + strlen(dst)). */
1411 tree newdst;
1412 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1413 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1414
1415 if (!strlen_fn || !memcpy_fn)
1416 return false;
1417
1418 /* If the length of the source string isn't computable don't
1419 split strcat into strlen and memcpy. */
1420 tree len = get_maxval_strlen (src, 0);
1421 if (! len)
1422 return false;
1423
1424 /* Create strlen (dst). */
1425 gimple_seq stmts = NULL, stmts2;
1426 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1427 gimple_set_location (repl, loc);
1428 if (gimple_in_ssa_p (cfun))
1429 newdst = make_ssa_name (size_type_node);
1430 else
1431 newdst = create_tmp_reg (size_type_node);
1432 gimple_call_set_lhs (repl, newdst);
1433 gimple_seq_add_stmt_without_update (&stmts, repl);
1434
1435 /* Create (dst p+ strlen (dst)). */
1436 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1437 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1438 gimple_seq_add_seq_without_update (&stmts, stmts2);
1439
1440 len = fold_convert_loc (loc, size_type_node, len);
1441 len = size_binop_loc (loc, PLUS_EXPR, len,
1442 build_int_cst (size_type_node, 1));
1443 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1444 gimple_seq_add_seq_without_update (&stmts, stmts2);
1445
1446 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1447 gimple_seq_add_stmt_without_update (&stmts, repl);
1448 if (gimple_call_lhs (stmt))
1449 {
1450 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1451 gimple_seq_add_stmt_without_update (&stmts, repl);
1452 gsi_replace_with_seq_vops (gsi, stmts);
1453 /* gsi now points at the assignment to the lhs, get a
1454 stmt iterator to the memcpy call.
1455 ??? We can't use gsi_for_stmt as that doesn't work when the
1456 CFG isn't built yet. */
1457 gimple_stmt_iterator gsi2 = *gsi;
1458 gsi_prev (&gsi2);
1459 fold_stmt (&gsi2);
1460 }
1461 else
1462 {
1463 gsi_replace_with_seq_vops (gsi, stmts);
1464 fold_stmt (gsi);
1465 }
1466 return true;
1467 }
1468
1469 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1470 are the arguments to the call. */
1471
1472 static bool
1473 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1474 {
1475 gimple *stmt = gsi_stmt (*gsi);
1476 tree dest = gimple_call_arg (stmt, 0);
1477 tree src = gimple_call_arg (stmt, 1);
1478 tree size = gimple_call_arg (stmt, 2);
1479 tree fn;
1480 const char *p;
1481
1482
1483 p = c_getstr (src);
1484 /* If the SRC parameter is "", return DEST. */
1485 if (p && *p == '\0')
1486 {
1487 replace_call_with_value (gsi, dest);
1488 return true;
1489 }
1490
1491 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1492 return false;
1493
1494 /* If __builtin_strcat_chk is used, assume strcat is available. */
1495 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1496 if (!fn)
1497 return false;
1498
1499 gimple *repl = gimple_build_call (fn, 2, dest, src);
1500 replace_call_with_call_and_fold (gsi, repl);
1501 return true;
1502 }
1503
1504 /* Simplify a call to the strncat builtin. */
1505
1506 static bool
1507 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1508 {
1509 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1510 tree dst = gimple_call_arg (stmt, 0);
1511 tree src = gimple_call_arg (stmt, 1);
1512 tree len = gimple_call_arg (stmt, 2);
1513
1514 const char *p = c_getstr (src);
1515
1516 /* If the requested length is zero, or the src parameter string
1517 length is zero, return the dst parameter. */
1518 if (integer_zerop (len) || (p && *p == '\0'))
1519 {
1520 replace_call_with_value (gsi, dst);
1521 return true;
1522 }
1523
1524 /* If the requested len is greater than or equal to the string
1525 length, call strcat. */
1526 if (TREE_CODE (len) == INTEGER_CST && p
1527 && compare_tree_int (len, strlen (p)) >= 0)
1528 {
1529 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1530
1531 /* If the replacement _DECL isn't initialized, don't do the
1532 transformation. */
1533 if (!fn)
1534 return false;
1535
1536 gcall *repl = gimple_build_call (fn, 2, dst, src);
1537 replace_call_with_call_and_fold (gsi, repl);
1538 return true;
1539 }
1540
1541 return false;
1542 }
1543
1544 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1545 LEN, and SIZE. */
1546
1547 static bool
1548 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1549 {
1550 gimple *stmt = gsi_stmt (*gsi);
1551 tree dest = gimple_call_arg (stmt, 0);
1552 tree src = gimple_call_arg (stmt, 1);
1553 tree len = gimple_call_arg (stmt, 2);
1554 tree size = gimple_call_arg (stmt, 3);
1555 tree fn;
1556 const char *p;
1557
1558 p = c_getstr (src);
1559 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1560 if ((p && *p == '\0')
1561 || integer_zerop (len))
1562 {
1563 replace_call_with_value (gsi, dest);
1564 return true;
1565 }
1566
1567 if (! tree_fits_uhwi_p (size))
1568 return false;
1569
1570 if (! integer_all_onesp (size))
1571 {
1572 tree src_len = c_strlen (src, 1);
1573 if (src_len
1574 && tree_fits_uhwi_p (src_len)
1575 && tree_fits_uhwi_p (len)
1576 && ! tree_int_cst_lt (len, src_len))
1577 {
1578 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1579 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1580 if (!fn)
1581 return false;
1582
1583 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1584 replace_call_with_call_and_fold (gsi, repl);
1585 return true;
1586 }
1587 return false;
1588 }
1589
1590 /* If __builtin_strncat_chk is used, assume strncat is available. */
1591 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1592 if (!fn)
1593 return false;
1594
1595 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1596 replace_call_with_call_and_fold (gsi, repl);
1597 return true;
1598 }
1599
1600 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1601 to the call. IGNORE is true if the value returned
1602 by the builtin will be ignored. UNLOCKED is true is true if this
1603 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1604 the known length of the string. Return NULL_TREE if no simplification
1605 was possible. */
1606
1607 static bool
1608 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1609 tree arg0, tree arg1,
1610 bool unlocked)
1611 {
1612 gimple *stmt = gsi_stmt (*gsi);
1613
1614 /* If we're using an unlocked function, assume the other unlocked
1615 functions exist explicitly. */
1616 tree const fn_fputc = (unlocked
1617 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1618 : builtin_decl_implicit (BUILT_IN_FPUTC));
1619 tree const fn_fwrite = (unlocked
1620 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1621 : builtin_decl_implicit (BUILT_IN_FWRITE));
1622
1623 /* If the return value is used, don't do the transformation. */
1624 if (gimple_call_lhs (stmt))
1625 return false;
1626
1627 /* Get the length of the string passed to fputs. If the length
1628 can't be determined, punt. */
1629 tree len = get_maxval_strlen (arg0, 0);
1630 if (!len
1631 || TREE_CODE (len) != INTEGER_CST)
1632 return false;
1633
1634 switch (compare_tree_int (len, 1))
1635 {
1636 case -1: /* length is 0, delete the call entirely . */
1637 replace_call_with_value (gsi, integer_zero_node);
1638 return true;
1639
1640 case 0: /* length is 1, call fputc. */
1641 {
1642 const char *p = c_getstr (arg0);
1643 if (p != NULL)
1644 {
1645 if (!fn_fputc)
1646 return false;
1647
1648 gimple *repl = gimple_build_call (fn_fputc, 2,
1649 build_int_cst
1650 (integer_type_node, p[0]), arg1);
1651 replace_call_with_call_and_fold (gsi, repl);
1652 return true;
1653 }
1654 }
1655 /* FALLTHROUGH */
1656 case 1: /* length is greater than 1, call fwrite. */
1657 {
1658 /* If optimizing for size keep fputs. */
1659 if (optimize_function_for_size_p (cfun))
1660 return false;
1661 /* New argument list transforming fputs(string, stream) to
1662 fwrite(string, 1, len, stream). */
1663 if (!fn_fwrite)
1664 return false;
1665
1666 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1667 size_one_node, len, arg1);
1668 replace_call_with_call_and_fold (gsi, repl);
1669 return true;
1670 }
1671 default:
1672 gcc_unreachable ();
1673 }
1674 return false;
1675 }
1676
1677 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1678 DEST, SRC, LEN, and SIZE are the arguments to the call.
1679 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1680 code of the builtin. If MAXLEN is not NULL, it is maximum length
1681 passed as third argument. */
1682
1683 static bool
1684 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1685 tree dest, tree src, tree len, tree size,
1686 enum built_in_function fcode)
1687 {
1688 gimple *stmt = gsi_stmt (*gsi);
1689 location_t loc = gimple_location (stmt);
1690 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1691 tree fn;
1692
1693 /* If SRC and DEST are the same (and not volatile), return DEST
1694 (resp. DEST+LEN for __mempcpy_chk). */
1695 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1696 {
1697 if (fcode != BUILT_IN_MEMPCPY_CHK)
1698 {
1699 replace_call_with_value (gsi, dest);
1700 return true;
1701 }
1702 else
1703 {
1704 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1705 temp = force_gimple_operand_gsi (gsi, temp,
1706 false, NULL_TREE, true,
1707 GSI_SAME_STMT);
1708 replace_call_with_value (gsi, temp);
1709 return true;
1710 }
1711 }
1712
1713 if (! tree_fits_uhwi_p (size))
1714 return false;
1715
1716 tree maxlen = get_maxval_strlen (len, 2);
1717 if (! integer_all_onesp (size))
1718 {
1719 if (! tree_fits_uhwi_p (len))
1720 {
1721 /* If LEN is not constant, try MAXLEN too.
1722 For MAXLEN only allow optimizing into non-_ocs function
1723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1724 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1725 {
1726 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1727 {
1728 /* (void) __mempcpy_chk () can be optimized into
1729 (void) __memcpy_chk (). */
1730 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1731 if (!fn)
1732 return false;
1733
1734 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1735 replace_call_with_call_and_fold (gsi, repl);
1736 return true;
1737 }
1738 return false;
1739 }
1740 }
1741 else
1742 maxlen = len;
1743
1744 if (tree_int_cst_lt (size, maxlen))
1745 return false;
1746 }
1747
1748 fn = NULL_TREE;
1749 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1750 mem{cpy,pcpy,move,set} is available. */
1751 switch (fcode)
1752 {
1753 case BUILT_IN_MEMCPY_CHK:
1754 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1755 break;
1756 case BUILT_IN_MEMPCPY_CHK:
1757 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1758 break;
1759 case BUILT_IN_MEMMOVE_CHK:
1760 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1761 break;
1762 case BUILT_IN_MEMSET_CHK:
1763 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1764 break;
1765 default:
1766 break;
1767 }
1768
1769 if (!fn)
1770 return false;
1771
1772 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1773 replace_call_with_call_and_fold (gsi, repl);
1774 return true;
1775 }
1776
1777 /* Fold a call to the __st[rp]cpy_chk builtin.
1778 DEST, SRC, and SIZE are the arguments to the call.
1779 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1780 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1781 strings passed as second argument. */
1782
1783 static bool
1784 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1785 tree dest,
1786 tree src, tree size,
1787 enum built_in_function fcode)
1788 {
1789 gimple *stmt = gsi_stmt (*gsi);
1790 location_t loc = gimple_location (stmt);
1791 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1792 tree len, fn;
1793
1794 /* If SRC and DEST are the same (and not volatile), return DEST. */
1795 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1796 {
1797 replace_call_with_value (gsi, dest);
1798 return true;
1799 }
1800
1801 if (! tree_fits_uhwi_p (size))
1802 return false;
1803
1804 tree maxlen = get_maxval_strlen (src, 1);
1805 if (! integer_all_onesp (size))
1806 {
1807 len = c_strlen (src, 1);
1808 if (! len || ! tree_fits_uhwi_p (len))
1809 {
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 {
1815 if (fcode == BUILT_IN_STPCPY_CHK)
1816 {
1817 if (! ignore)
1818 return false;
1819
1820 /* If return value of __stpcpy_chk is ignored,
1821 optimize into __strcpy_chk. */
1822 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1823 if (!fn)
1824 return false;
1825
1826 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1827 replace_call_with_call_and_fold (gsi, repl);
1828 return true;
1829 }
1830
1831 if (! len || TREE_SIDE_EFFECTS (len))
1832 return false;
1833
1834 /* If c_strlen returned something, but not a constant,
1835 transform __strcpy_chk into __memcpy_chk. */
1836 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1837 if (!fn)
1838 return false;
1839
1840 len = fold_convert_loc (loc, size_type_node, len);
1841 len = size_binop_loc (loc, PLUS_EXPR, len,
1842 build_int_cst (size_type_node, 1));
1843 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1844 true, GSI_SAME_STMT);
1845 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1846 replace_call_with_call_and_fold (gsi, repl);
1847 return true;
1848 }
1849 }
1850 else
1851 maxlen = len;
1852
1853 if (! tree_int_cst_lt (maxlen, size))
1854 return false;
1855 }
1856
1857 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1858 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1859 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1860 if (!fn)
1861 return false;
1862
1863 gimple *repl = gimple_build_call (fn, 2, dest, src);
1864 replace_call_with_call_and_fold (gsi, repl);
1865 return true;
1866 }
1867
1868 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1869 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1870 length passed as third argument. IGNORE is true if return value can be
1871 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1872
1873 static bool
1874 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1875 tree dest, tree src,
1876 tree len, tree size,
1877 enum built_in_function fcode)
1878 {
1879 gimple *stmt = gsi_stmt (*gsi);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1881 tree fn;
1882
1883 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1884 {
1885 /* If return value of __stpncpy_chk is ignored,
1886 optimize into __strncpy_chk. */
1887 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1888 if (fn)
1889 {
1890 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1893 }
1894 }
1895
1896 if (! tree_fits_uhwi_p (size))
1897 return false;
1898
1899 tree maxlen = get_maxval_strlen (len, 2);
1900 if (! integer_all_onesp (size))
1901 {
1902 if (! tree_fits_uhwi_p (len))
1903 {
1904 /* If LEN is not constant, try MAXLEN too.
1905 For MAXLEN only allow optimizing into non-_ocs function
1906 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1907 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1908 return false;
1909 }
1910 else
1911 maxlen = len;
1912
1913 if (tree_int_cst_lt (size, maxlen))
1914 return false;
1915 }
1916
1917 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1918 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1919 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1920 if (!fn)
1921 return false;
1922
1923 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1924 replace_call_with_call_and_fold (gsi, repl);
1925 return true;
1926 }
1927
1928 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1929 Return NULL_TREE if no simplification can be made. */
1930
1931 static bool
1932 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1933 {
1934 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1935 location_t loc = gimple_location (stmt);
1936 tree dest = gimple_call_arg (stmt, 0);
1937 tree src = gimple_call_arg (stmt, 1);
1938 tree fn, len, lenp1;
1939
1940 /* If the result is unused, replace stpcpy with strcpy. */
1941 if (gimple_call_lhs (stmt) == NULL_TREE)
1942 {
1943 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1944 if (!fn)
1945 return false;
1946 gimple_call_set_fndecl (stmt, fn);
1947 fold_stmt (gsi);
1948 return true;
1949 }
1950
1951 len = c_strlen (src, 1);
1952 if (!len
1953 || TREE_CODE (len) != INTEGER_CST)
1954 return false;
1955
1956 if (optimize_function_for_size_p (cfun)
1957 /* If length is zero it's small enough. */
1958 && !integer_zerop (len))
1959 return false;
1960
1961 /* If the source has a known length replace stpcpy with memcpy. */
1962 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1963 if (!fn)
1964 return false;
1965
1966 gimple_seq stmts = NULL;
1967 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1968 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1969 tem, build_int_cst (size_type_node, 1));
1970 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1971 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1972 gimple_set_vuse (repl, gimple_vuse (stmt));
1973 gimple_set_vdef (repl, gimple_vdef (stmt));
1974 if (gimple_vdef (repl)
1975 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1976 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1977 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1978 /* Replace the result with dest + len. */
1979 stmts = NULL;
1980 tem = gimple_convert (&stmts, loc, sizetype, len);
1981 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1982 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1983 POINTER_PLUS_EXPR, dest, tem);
1984 gsi_replace (gsi, ret, false);
1985 /* Finally fold the memcpy call. */
1986 gimple_stmt_iterator gsi2 = *gsi;
1987 gsi_prev (&gsi2);
1988 fold_stmt (&gsi2);
1989 return true;
1990 }
1991
1992 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1993 NULL_TREE if a normal call should be emitted rather than expanding
1994 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1995 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1996 passed as second argument. */
1997
1998 static bool
1999 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2000 enum built_in_function fcode)
2001 {
2002 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2003 tree dest, size, len, fn, fmt, flag;
2004 const char *fmt_str;
2005
2006 /* Verify the required arguments in the original call. */
2007 if (gimple_call_num_args (stmt) < 5)
2008 return false;
2009
2010 dest = gimple_call_arg (stmt, 0);
2011 len = gimple_call_arg (stmt, 1);
2012 flag = gimple_call_arg (stmt, 2);
2013 size = gimple_call_arg (stmt, 3);
2014 fmt = gimple_call_arg (stmt, 4);
2015
2016 if (! tree_fits_uhwi_p (size))
2017 return false;
2018
2019 if (! integer_all_onesp (size))
2020 {
2021 tree maxlen = get_maxval_strlen (len, 2);
2022 if (! tree_fits_uhwi_p (len))
2023 {
2024 /* If LEN is not constant, try MAXLEN too.
2025 For MAXLEN only allow optimizing into non-_ocs function
2026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2027 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2028 return false;
2029 }
2030 else
2031 maxlen = len;
2032
2033 if (tree_int_cst_lt (size, maxlen))
2034 return false;
2035 }
2036
2037 if (!init_target_chars ())
2038 return false;
2039
2040 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2041 or if format doesn't contain % chars or is "%s". */
2042 if (! integer_zerop (flag))
2043 {
2044 fmt_str = c_getstr (fmt);
2045 if (fmt_str == NULL)
2046 return false;
2047 if (strchr (fmt_str, target_percent) != NULL
2048 && strcmp (fmt_str, target_percent_s))
2049 return false;
2050 }
2051
2052 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2053 available. */
2054 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2055 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2056 if (!fn)
2057 return false;
2058
2059 /* Replace the called function and the first 5 argument by 3 retaining
2060 trailing varargs. */
2061 gimple_call_set_fndecl (stmt, fn);
2062 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2063 gimple_call_set_arg (stmt, 0, dest);
2064 gimple_call_set_arg (stmt, 1, len);
2065 gimple_call_set_arg (stmt, 2, fmt);
2066 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2067 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2068 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2069 fold_stmt (gsi);
2070 return true;
2071 }
2072
2073 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2074 Return NULL_TREE if a normal call should be emitted rather than
2075 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2076 or BUILT_IN_VSPRINTF_CHK. */
2077
2078 static bool
2079 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2080 enum built_in_function fcode)
2081 {
2082 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2083 tree dest, size, len, fn, fmt, flag;
2084 const char *fmt_str;
2085 unsigned nargs = gimple_call_num_args (stmt);
2086
2087 /* Verify the required arguments in the original call. */
2088 if (nargs < 4)
2089 return false;
2090 dest = gimple_call_arg (stmt, 0);
2091 flag = gimple_call_arg (stmt, 1);
2092 size = gimple_call_arg (stmt, 2);
2093 fmt = gimple_call_arg (stmt, 3);
2094
2095 if (! tree_fits_uhwi_p (size))
2096 return false;
2097
2098 len = NULL_TREE;
2099
2100 if (!init_target_chars ())
2101 return false;
2102
2103 /* Check whether the format is a literal string constant. */
2104 fmt_str = c_getstr (fmt);
2105 if (fmt_str != NULL)
2106 {
2107 /* If the format doesn't contain % args or %%, we know the size. */
2108 if (strchr (fmt_str, target_percent) == 0)
2109 {
2110 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2111 len = build_int_cstu (size_type_node, strlen (fmt_str));
2112 }
2113 /* If the format is "%s" and first ... argument is a string literal,
2114 we know the size too. */
2115 else if (fcode == BUILT_IN_SPRINTF_CHK
2116 && strcmp (fmt_str, target_percent_s) == 0)
2117 {
2118 tree arg;
2119
2120 if (nargs == 5)
2121 {
2122 arg = gimple_call_arg (stmt, 4);
2123 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2124 {
2125 len = c_strlen (arg, 1);
2126 if (! len || ! tree_fits_uhwi_p (len))
2127 len = NULL_TREE;
2128 }
2129 }
2130 }
2131 }
2132
2133 if (! integer_all_onesp (size))
2134 {
2135 if (! len || ! tree_int_cst_lt (len, size))
2136 return false;
2137 }
2138
2139 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2140 or if format doesn't contain % chars or is "%s". */
2141 if (! integer_zerop (flag))
2142 {
2143 if (fmt_str == NULL)
2144 return false;
2145 if (strchr (fmt_str, target_percent) != NULL
2146 && strcmp (fmt_str, target_percent_s))
2147 return false;
2148 }
2149
2150 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2151 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2152 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2153 if (!fn)
2154 return false;
2155
2156 /* Replace the called function and the first 4 argument by 2 retaining
2157 trailing varargs. */
2158 gimple_call_set_fndecl (stmt, fn);
2159 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2160 gimple_call_set_arg (stmt, 0, dest);
2161 gimple_call_set_arg (stmt, 1, fmt);
2162 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2163 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2164 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2165 fold_stmt (gsi);
2166 return true;
2167 }
2168
2169 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2170 ORIG may be null if this is a 2-argument call. We don't attempt to
2171 simplify calls with more than 3 arguments.
2172
2173 Return NULL_TREE if no simplification was possible, otherwise return the
2174 simplified form of the call as a tree. If IGNORED is true, it means that
2175 the caller does not use the returned value of the function. */
2176
2177 static bool
2178 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2179 {
2180 gimple *stmt = gsi_stmt (*gsi);
2181 tree dest = gimple_call_arg (stmt, 0);
2182 tree fmt = gimple_call_arg (stmt, 1);
2183 tree orig = NULL_TREE;
2184 const char *fmt_str = NULL;
2185
2186 /* Verify the required arguments in the original call. We deal with two
2187 types of sprintf() calls: 'sprintf (str, fmt)' and
2188 'sprintf (dest, "%s", orig)'. */
2189 if (gimple_call_num_args (stmt) > 3)
2190 return false;
2191
2192 if (gimple_call_num_args (stmt) == 3)
2193 orig = gimple_call_arg (stmt, 2);
2194
2195 /* Check whether the format is a literal string constant. */
2196 fmt_str = c_getstr (fmt);
2197 if (fmt_str == NULL)
2198 return false;
2199
2200 if (!init_target_chars ())
2201 return false;
2202
2203 /* If the format doesn't contain % args or %%, use strcpy. */
2204 if (strchr (fmt_str, target_percent) == NULL)
2205 {
2206 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2207
2208 if (!fn)
2209 return false;
2210
2211 /* Don't optimize sprintf (buf, "abc", ptr++). */
2212 if (orig)
2213 return false;
2214
2215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2216 'format' is known to contain no % formats. */
2217 gimple_seq stmts = NULL;
2218 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2219 gimple_seq_add_stmt_without_update (&stmts, repl);
2220 if (gimple_call_lhs (stmt))
2221 {
2222 repl = gimple_build_assign (gimple_call_lhs (stmt),
2223 build_int_cst (integer_type_node,
2224 strlen (fmt_str)));
2225 gimple_seq_add_stmt_without_update (&stmts, repl);
2226 gsi_replace_with_seq_vops (gsi, stmts);
2227 /* gsi now points at the assignment to the lhs, get a
2228 stmt iterator to the memcpy call.
2229 ??? We can't use gsi_for_stmt as that doesn't work when the
2230 CFG isn't built yet. */
2231 gimple_stmt_iterator gsi2 = *gsi;
2232 gsi_prev (&gsi2);
2233 fold_stmt (&gsi2);
2234 }
2235 else
2236 {
2237 gsi_replace_with_seq_vops (gsi, stmts);
2238 fold_stmt (gsi);
2239 }
2240 return true;
2241 }
2242
2243 /* If the format is "%s", use strcpy if the result isn't used. */
2244 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2245 {
2246 tree fn;
2247 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2248
2249 if (!fn)
2250 return false;
2251
2252 /* Don't crash on sprintf (str1, "%s"). */
2253 if (!orig)
2254 return false;
2255
2256 tree orig_len = NULL_TREE;
2257 if (gimple_call_lhs (stmt))
2258 {
2259 orig_len = get_maxval_strlen (orig, 0);
2260 if (!orig_len)
2261 return false;
2262 }
2263
2264 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2265 gimple_seq stmts = NULL;
2266 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2267 gimple_seq_add_stmt_without_update (&stmts, repl);
2268 if (gimple_call_lhs (stmt))
2269 {
2270 if (!useless_type_conversion_p (integer_type_node,
2271 TREE_TYPE (orig_len)))
2272 orig_len = fold_convert (integer_type_node, orig_len);
2273 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2274 gimple_seq_add_stmt_without_update (&stmts, repl);
2275 gsi_replace_with_seq_vops (gsi, stmts);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the memcpy call.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2 = *gsi;
2281 gsi_prev (&gsi2);
2282 fold_stmt (&gsi2);
2283 }
2284 else
2285 {
2286 gsi_replace_with_seq_vops (gsi, stmts);
2287 fold_stmt (gsi);
2288 }
2289 return true;
2290 }
2291 return false;
2292 }
2293
2294 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2295 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2296 attempt to simplify calls with more than 4 arguments.
2297
2298 Return NULL_TREE if no simplification was possible, otherwise return the
2299 simplified form of the call as a tree. If IGNORED is true, it means that
2300 the caller does not use the returned value of the function. */
2301
2302 static bool
2303 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2304 {
2305 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2306 tree dest = gimple_call_arg (stmt, 0);
2307 tree destsize = gimple_call_arg (stmt, 1);
2308 tree fmt = gimple_call_arg (stmt, 2);
2309 tree orig = NULL_TREE;
2310 const char *fmt_str = NULL;
2311
2312 if (gimple_call_num_args (stmt) > 4)
2313 return false;
2314
2315 if (gimple_call_num_args (stmt) == 4)
2316 orig = gimple_call_arg (stmt, 3);
2317
2318 if (!tree_fits_uhwi_p (destsize))
2319 return false;
2320 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2321
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str = c_getstr (fmt);
2324 if (fmt_str == NULL)
2325 return false;
2326
2327 if (!init_target_chars ())
2328 return false;
2329
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str, target_percent) == NULL)
2332 {
2333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2334 if (!fn)
2335 return false;
2336
2337 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2338 if (orig)
2339 return false;
2340
2341 /* We could expand this as
2342 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2343 or to
2344 memcpy (str, fmt_with_nul_at_cstm1, cst);
2345 but in the former case that might increase code size
2346 and in the latter case grow .rodata section too much.
2347 So punt for now. */
2348 size_t len = strlen (fmt_str);
2349 if (len >= destlen)
2350 return false;
2351
2352 gimple_seq stmts = NULL;
2353 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2354 gimple_seq_add_stmt_without_update (&stmts, repl);
2355 if (gimple_call_lhs (stmt))
2356 {
2357 repl = gimple_build_assign (gimple_call_lhs (stmt),
2358 build_int_cst (integer_type_node, len));
2359 gimple_seq_add_stmt_without_update (&stmts, repl);
2360 gsi_replace_with_seq_vops (gsi, stmts);
2361 /* gsi now points at the assignment to the lhs, get a
2362 stmt iterator to the memcpy call.
2363 ??? We can't use gsi_for_stmt as that doesn't work when the
2364 CFG isn't built yet. */
2365 gimple_stmt_iterator gsi2 = *gsi;
2366 gsi_prev (&gsi2);
2367 fold_stmt (&gsi2);
2368 }
2369 else
2370 {
2371 gsi_replace_with_seq_vops (gsi, stmts);
2372 fold_stmt (gsi);
2373 }
2374 return true;
2375 }
2376
2377 /* If the format is "%s", use strcpy if the result isn't used. */
2378 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2379 {
2380 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2381 if (!fn)
2382 return false;
2383
2384 /* Don't crash on snprintf (str1, cst, "%s"). */
2385 if (!orig)
2386 return false;
2387
2388 tree orig_len = get_maxval_strlen (orig, 0);
2389 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2390 return false;
2391
2392 /* We could expand this as
2393 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2394 or to
2395 memcpy (str1, str2_with_nul_at_cstm1, cst);
2396 but in the former case that might increase code size
2397 and in the latter case grow .rodata section too much.
2398 So punt for now. */
2399 if (compare_tree_int (orig_len, destlen) >= 0)
2400 return false;
2401
2402 /* Convert snprintf (str1, cst, "%s", str2) into
2403 strcpy (str1, str2) if strlen (str2) < cst. */
2404 gimple_seq stmts = NULL;
2405 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 if (gimple_call_lhs (stmt))
2408 {
2409 if (!useless_type_conversion_p (integer_type_node,
2410 TREE_TYPE (orig_len)))
2411 orig_len = fold_convert (integer_type_node, orig_len);
2412 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2413 gimple_seq_add_stmt_without_update (&stmts, repl);
2414 gsi_replace_with_seq_vops (gsi, stmts);
2415 /* gsi now points at the assignment to the lhs, get a
2416 stmt iterator to the memcpy call.
2417 ??? We can't use gsi_for_stmt as that doesn't work when the
2418 CFG isn't built yet. */
2419 gimple_stmt_iterator gsi2 = *gsi;
2420 gsi_prev (&gsi2);
2421 fold_stmt (&gsi2);
2422 }
2423 else
2424 {
2425 gsi_replace_with_seq_vops (gsi, stmts);
2426 fold_stmt (gsi);
2427 }
2428 return true;
2429 }
2430 return false;
2431 }
2432
2433 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2434 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2435 more than 3 arguments, and ARG may be null in the 2-argument case.
2436
2437 Return NULL_TREE if no simplification was possible, otherwise return the
2438 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2439 code of the function to be simplified. */
2440
2441 static bool
2442 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2443 tree fp, tree fmt, tree arg,
2444 enum built_in_function fcode)
2445 {
2446 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2447 tree fn_fputc, fn_fputs;
2448 const char *fmt_str = NULL;
2449
2450 /* If the return value is used, don't do the transformation. */
2451 if (gimple_call_lhs (stmt) != NULL_TREE)
2452 return false;
2453
2454 /* Check whether the format is a literal string constant. */
2455 fmt_str = c_getstr (fmt);
2456 if (fmt_str == NULL)
2457 return false;
2458
2459 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2460 {
2461 /* If we're using an unlocked function, assume the other
2462 unlocked functions exist explicitly. */
2463 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2464 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2465 }
2466 else
2467 {
2468 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2469 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2470 }
2471
2472 if (!init_target_chars ())
2473 return false;
2474
2475 /* If the format doesn't contain % args or %%, use strcpy. */
2476 if (strchr (fmt_str, target_percent) == NULL)
2477 {
2478 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2479 && arg)
2480 return false;
2481
2482 /* If the format specifier was "", fprintf does nothing. */
2483 if (fmt_str[0] == '\0')
2484 {
2485 replace_call_with_value (gsi, NULL_TREE);
2486 return true;
2487 }
2488
2489 /* When "string" doesn't contain %, replace all cases of
2490 fprintf (fp, string) with fputs (string, fp). The fputs
2491 builtin will take care of special cases like length == 1. */
2492 if (fn_fputs)
2493 {
2494 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2495 replace_call_with_call_and_fold (gsi, repl);
2496 return true;
2497 }
2498 }
2499
2500 /* The other optimizations can be done only on the non-va_list variants. */
2501 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2502 return false;
2503
2504 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2505 else if (strcmp (fmt_str, target_percent_s) == 0)
2506 {
2507 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2508 return false;
2509 if (fn_fputs)
2510 {
2511 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2512 replace_call_with_call_and_fold (gsi, repl);
2513 return true;
2514 }
2515 }
2516
2517 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2518 else if (strcmp (fmt_str, target_percent_c) == 0)
2519 {
2520 if (!arg
2521 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2522 return false;
2523 if (fn_fputc)
2524 {
2525 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2526 replace_call_with_call_and_fold (gsi, repl);
2527 return true;
2528 }
2529 }
2530
2531 return false;
2532 }
2533
2534 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2535 FMT and ARG are the arguments to the call; we don't fold cases with
2536 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2537
2538 Return NULL_TREE if no simplification was possible, otherwise return the
2539 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2540 code of the function to be simplified. */
2541
2542 static bool
2543 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2544 tree arg, enum built_in_function fcode)
2545 {
2546 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2547 tree fn_putchar, fn_puts, newarg;
2548 const char *fmt_str = NULL;
2549
2550 /* If the return value is used, don't do the transformation. */
2551 if (gimple_call_lhs (stmt) != NULL_TREE)
2552 return false;
2553
2554 /* Check whether the format is a literal string constant. */
2555 fmt_str = c_getstr (fmt);
2556 if (fmt_str == NULL)
2557 return false;
2558
2559 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2560 {
2561 /* If we're using an unlocked function, assume the other
2562 unlocked functions exist explicitly. */
2563 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2564 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2565 }
2566 else
2567 {
2568 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2569 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2570 }
2571
2572 if (!init_target_chars ())
2573 return false;
2574
2575 if (strcmp (fmt_str, target_percent_s) == 0
2576 || strchr (fmt_str, target_percent) == NULL)
2577 {
2578 const char *str;
2579
2580 if (strcmp (fmt_str, target_percent_s) == 0)
2581 {
2582 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2583 return false;
2584
2585 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2586 return false;
2587
2588 str = c_getstr (arg);
2589 if (str == NULL)
2590 return false;
2591 }
2592 else
2593 {
2594 /* The format specifier doesn't contain any '%' characters. */
2595 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2596 && arg)
2597 return false;
2598 str = fmt_str;
2599 }
2600
2601 /* If the string was "", printf does nothing. */
2602 if (str[0] == '\0')
2603 {
2604 replace_call_with_value (gsi, NULL_TREE);
2605 return true;
2606 }
2607
2608 /* If the string has length of 1, call putchar. */
2609 if (str[1] == '\0')
2610 {
2611 /* Given printf("c"), (where c is any one character,)
2612 convert "c"[0] to an int and pass that to the replacement
2613 function. */
2614 newarg = build_int_cst (integer_type_node, str[0]);
2615 if (fn_putchar)
2616 {
2617 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2618 replace_call_with_call_and_fold (gsi, repl);
2619 return true;
2620 }
2621 }
2622 else
2623 {
2624 /* If the string was "string\n", call puts("string"). */
2625 size_t len = strlen (str);
2626 if ((unsigned char)str[len - 1] == target_newline
2627 && (size_t) (int) len == len
2628 && (int) len > 0)
2629 {
2630 char *newstr;
2631 tree offset_node, string_cst;
2632
2633 /* Create a NUL-terminated string that's one char shorter
2634 than the original, stripping off the trailing '\n'. */
2635 newarg = build_string_literal (len, str);
2636 string_cst = string_constant (newarg, &offset_node);
2637 gcc_checking_assert (string_cst
2638 && (TREE_STRING_LENGTH (string_cst)
2639 == (int) len)
2640 && integer_zerop (offset_node)
2641 && (unsigned char)
2642 TREE_STRING_POINTER (string_cst)[len - 1]
2643 == target_newline);
2644 /* build_string_literal creates a new STRING_CST,
2645 modify it in place to avoid double copying. */
2646 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2647 newstr[len - 1] = '\0';
2648 if (fn_puts)
2649 {
2650 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2651 replace_call_with_call_and_fold (gsi, repl);
2652 return true;
2653 }
2654 }
2655 else
2656 /* We'd like to arrange to call fputs(string,stdout) here,
2657 but we need stdout and don't have a way to get it yet. */
2658 return false;
2659 }
2660 }
2661
2662 /* The other optimizations can be done only on the non-va_list variants. */
2663 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2664 return false;
2665
2666 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2667 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2668 {
2669 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2670 return false;
2671 if (fn_puts)
2672 {
2673 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2674 replace_call_with_call_and_fold (gsi, repl);
2675 return true;
2676 }
2677 }
2678
2679 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2680 else if (strcmp (fmt_str, target_percent_c) == 0)
2681 {
2682 if (!arg || ! useless_type_conversion_p (integer_type_node,
2683 TREE_TYPE (arg)))
2684 return false;
2685 if (fn_putchar)
2686 {
2687 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2688 replace_call_with_call_and_fold (gsi, repl);
2689 return true;
2690 }
2691 }
2692
2693 return false;
2694 }
2695
2696
2697
2698 /* Fold a call to __builtin_strlen with known length LEN. */
2699
2700 static bool
2701 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2702 {
2703 gimple *stmt = gsi_stmt (*gsi);
2704 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2705 if (!len)
2706 return false;
2707 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2708 replace_call_with_value (gsi, len);
2709 return true;
2710 }
2711
2712 /* Fold a call to __builtin_acc_on_device. */
2713
2714 static bool
2715 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2716 {
2717 /* Defer folding until we know which compiler we're in. */
2718 if (symtab->state != EXPANSION)
2719 return false;
2720
2721 unsigned val_host = GOMP_DEVICE_HOST;
2722 unsigned val_dev = GOMP_DEVICE_NONE;
2723
2724 #ifdef ACCEL_COMPILER
2725 val_host = GOMP_DEVICE_NOT_HOST;
2726 val_dev = ACCEL_COMPILER_acc_device;
2727 #endif
2728
2729 location_t loc = gimple_location (gsi_stmt (*gsi));
2730
2731 tree host_eq = make_ssa_name (boolean_type_node);
2732 gimple *host_ass = gimple_build_assign
2733 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2734 gimple_set_location (host_ass, loc);
2735 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2736
2737 tree dev_eq = make_ssa_name (boolean_type_node);
2738 gimple *dev_ass = gimple_build_assign
2739 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2740 gimple_set_location (dev_ass, loc);
2741 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2742
2743 tree result = make_ssa_name (boolean_type_node);
2744 gimple *result_ass = gimple_build_assign
2745 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2746 gimple_set_location (result_ass, loc);
2747 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2748
2749 replace_call_with_value (gsi, result);
2750
2751 return true;
2752 }
2753
2754 /* Fold the non-target builtin at *GSI and return whether any simplification
2755 was made. */
2756
2757 static bool
2758 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2759 {
2760 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2761 tree callee = gimple_call_fndecl (stmt);
2762
2763 /* Give up for always_inline inline builtins until they are
2764 inlined. */
2765 if (avoid_folding_inline_builtin (callee))
2766 return false;
2767
2768 unsigned n = gimple_call_num_args (stmt);
2769 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2770 switch (fcode)
2771 {
2772 case BUILT_IN_BZERO:
2773 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2774 gimple_call_arg (stmt, 1));
2775 case BUILT_IN_MEMSET:
2776 return gimple_fold_builtin_memset (gsi,
2777 gimple_call_arg (stmt, 1),
2778 gimple_call_arg (stmt, 2));
2779 case BUILT_IN_BCOPY:
2780 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2781 gimple_call_arg (stmt, 0), 3);
2782 case BUILT_IN_MEMCPY:
2783 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2784 gimple_call_arg (stmt, 1), 0);
2785 case BUILT_IN_MEMPCPY:
2786 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2787 gimple_call_arg (stmt, 1), 1);
2788 case BUILT_IN_MEMMOVE:
2789 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2790 gimple_call_arg (stmt, 1), 3);
2791 case BUILT_IN_SPRINTF_CHK:
2792 case BUILT_IN_VSPRINTF_CHK:
2793 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2794 case BUILT_IN_STRCAT_CHK:
2795 return gimple_fold_builtin_strcat_chk (gsi);
2796 case BUILT_IN_STRNCAT_CHK:
2797 return gimple_fold_builtin_strncat_chk (gsi);
2798 case BUILT_IN_STRLEN:
2799 return gimple_fold_builtin_strlen (gsi);
2800 case BUILT_IN_STRCPY:
2801 return gimple_fold_builtin_strcpy (gsi,
2802 gimple_call_arg (stmt, 0),
2803 gimple_call_arg (stmt, 1));
2804 case BUILT_IN_STRNCPY:
2805 return gimple_fold_builtin_strncpy (gsi,
2806 gimple_call_arg (stmt, 0),
2807 gimple_call_arg (stmt, 1),
2808 gimple_call_arg (stmt, 2));
2809 case BUILT_IN_STRCAT:
2810 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2811 gimple_call_arg (stmt, 1));
2812 case BUILT_IN_STRNCAT:
2813 return gimple_fold_builtin_strncat (gsi);
2814 case BUILT_IN_FPUTS:
2815 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2816 gimple_call_arg (stmt, 1), false);
2817 case BUILT_IN_FPUTS_UNLOCKED:
2818 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1), true);
2820 case BUILT_IN_MEMCPY_CHK:
2821 case BUILT_IN_MEMPCPY_CHK:
2822 case BUILT_IN_MEMMOVE_CHK:
2823 case BUILT_IN_MEMSET_CHK:
2824 return gimple_fold_builtin_memory_chk (gsi,
2825 gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1),
2827 gimple_call_arg (stmt, 2),
2828 gimple_call_arg (stmt, 3),
2829 fcode);
2830 case BUILT_IN_STPCPY:
2831 return gimple_fold_builtin_stpcpy (gsi);
2832 case BUILT_IN_STRCPY_CHK:
2833 case BUILT_IN_STPCPY_CHK:
2834 return gimple_fold_builtin_stxcpy_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 fcode);
2839 case BUILT_IN_STRNCPY_CHK:
2840 case BUILT_IN_STPNCPY_CHK:
2841 return gimple_fold_builtin_stxncpy_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 gimple_call_arg (stmt, 3),
2846 fcode);
2847 case BUILT_IN_SNPRINTF_CHK:
2848 case BUILT_IN_VSNPRINTF_CHK:
2849 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2850 case BUILT_IN_SNPRINTF:
2851 return gimple_fold_builtin_snprintf (gsi);
2852 case BUILT_IN_SPRINTF:
2853 return gimple_fold_builtin_sprintf (gsi);
2854 case BUILT_IN_FPRINTF:
2855 case BUILT_IN_FPRINTF_UNLOCKED:
2856 case BUILT_IN_VFPRINTF:
2857 if (n == 2 || n == 3)
2858 return gimple_fold_builtin_fprintf (gsi,
2859 gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1),
2861 n == 3
2862 ? gimple_call_arg (stmt, 2)
2863 : NULL_TREE,
2864 fcode);
2865 break;
2866 case BUILT_IN_FPRINTF_CHK:
2867 case BUILT_IN_VFPRINTF_CHK:
2868 if (n == 3 || n == 4)
2869 return gimple_fold_builtin_fprintf (gsi,
2870 gimple_call_arg (stmt, 0),
2871 gimple_call_arg (stmt, 2),
2872 n == 4
2873 ? gimple_call_arg (stmt, 3)
2874 : NULL_TREE,
2875 fcode);
2876 break;
2877 case BUILT_IN_PRINTF:
2878 case BUILT_IN_PRINTF_UNLOCKED:
2879 case BUILT_IN_VPRINTF:
2880 if (n == 1 || n == 2)
2881 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2882 n == 2
2883 ? gimple_call_arg (stmt, 1)
2884 : NULL_TREE, fcode);
2885 break;
2886 case BUILT_IN_PRINTF_CHK:
2887 case BUILT_IN_VPRINTF_CHK:
2888 if (n == 2 || n == 3)
2889 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2890 n == 3
2891 ? gimple_call_arg (stmt, 2)
2892 : NULL_TREE, fcode);
2893 break;
2894 case BUILT_IN_ACC_ON_DEVICE:
2895 return gimple_fold_builtin_acc_on_device (gsi,
2896 gimple_call_arg (stmt, 0));
2897 default:;
2898 }
2899
2900 /* Try the generic builtin folder. */
2901 bool ignore = (gimple_call_lhs (stmt) == NULL);
2902 tree result = fold_call_stmt (stmt, ignore);
2903 if (result)
2904 {
2905 if (ignore)
2906 STRIP_NOPS (result);
2907 else
2908 result = fold_convert (gimple_call_return_type (stmt), result);
2909 if (!update_call_from_tree (gsi, result))
2910 gimplify_and_update_call_from_tree (gsi, result);
2911 return true;
2912 }
2913
2914 return false;
2915 }
2916
2917 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2918 doesn't fit into TYPE. The test for overflow should be regardless of
2919 -fwrapv, and even for unsigned types. */
2920
2921 bool
2922 arith_overflowed_p (enum tree_code code, const_tree type,
2923 const_tree arg0, const_tree arg1)
2924 {
2925 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2926 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2927 widest2_int_cst;
2928 widest2_int warg0 = widest2_int_cst (arg0);
2929 widest2_int warg1 = widest2_int_cst (arg1);
2930 widest2_int wres;
2931 switch (code)
2932 {
2933 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2934 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2935 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2936 default: gcc_unreachable ();
2937 }
2938 signop sign = TYPE_SIGN (type);
2939 if (sign == UNSIGNED && wi::neg_p (wres))
2940 return true;
2941 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2942 }
2943
2944 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2945 The statement may be replaced by another statement, e.g., if the call
2946 simplifies to a constant value. Return true if any changes were made.
2947 It is assumed that the operands have been previously folded. */
2948
2949 static bool
2950 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2951 {
2952 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2953 tree callee;
2954 bool changed = false;
2955 unsigned i;
2956
2957 /* Fold *& in call arguments. */
2958 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2959 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2960 {
2961 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2962 if (tmp)
2963 {
2964 gimple_call_set_arg (stmt, i, tmp);
2965 changed = true;
2966 }
2967 }
2968
2969 /* Check for virtual calls that became direct calls. */
2970 callee = gimple_call_fn (stmt);
2971 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2972 {
2973 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2974 {
2975 if (dump_file && virtual_method_call_p (callee)
2976 && !possible_polymorphic_call_target_p
2977 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2978 (OBJ_TYPE_REF_EXPR (callee)))))
2979 {
2980 fprintf (dump_file,
2981 "Type inheritance inconsistent devirtualization of ");
2982 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2983 fprintf (dump_file, " to ");
2984 print_generic_expr (dump_file, callee, TDF_SLIM);
2985 fprintf (dump_file, "\n");
2986 }
2987
2988 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2989 changed = true;
2990 }
2991 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2992 {
2993 bool final;
2994 vec <cgraph_node *>targets
2995 = possible_polymorphic_call_targets (callee, stmt, &final);
2996 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2997 {
2998 tree lhs = gimple_call_lhs (stmt);
2999 if (dump_enabled_p ())
3000 {
3001 location_t loc = gimple_location_safe (stmt);
3002 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3003 "folding virtual function call to %s\n",
3004 targets.length () == 1
3005 ? targets[0]->name ()
3006 : "__builtin_unreachable");
3007 }
3008 if (targets.length () == 1)
3009 {
3010 gimple_call_set_fndecl (stmt, targets[0]->decl);
3011 changed = true;
3012 /* If the call becomes noreturn, remove the lhs. */
3013 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3014 {
3015 if (TREE_CODE (lhs) == SSA_NAME)
3016 {
3017 tree var = create_tmp_var (TREE_TYPE (lhs));
3018 tree def = get_or_create_ssa_default_def (cfun, var);
3019 gimple *new_stmt = gimple_build_assign (lhs, def);
3020 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3021 }
3022 gimple_call_set_lhs (stmt, NULL_TREE);
3023 }
3024 maybe_remove_unused_call_args (cfun, stmt);
3025 }
3026 else
3027 {
3028 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3029 gimple *new_stmt = gimple_build_call (fndecl, 0);
3030 gimple_set_location (new_stmt, gimple_location (stmt));
3031 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3032 {
3033 tree var = create_tmp_var (TREE_TYPE (lhs));
3034 tree def = get_or_create_ssa_default_def (cfun, var);
3035
3036 /* To satisfy condition for
3037 cgraph_update_edges_for_call_stmt_node,
3038 we need to preserve GIMPLE_CALL statement
3039 at position of GSI iterator. */
3040 update_call_from_tree (gsi, def);
3041 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3042 }
3043 else
3044 {
3045 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3047 gsi_replace (gsi, new_stmt, false);
3048 }
3049 return true;
3050 }
3051 }
3052 }
3053 }
3054
3055 /* Check for indirect calls that became direct calls, and then
3056 no longer require a static chain. */
3057 if (gimple_call_chain (stmt))
3058 {
3059 tree fn = gimple_call_fndecl (stmt);
3060 if (fn && !DECL_STATIC_CHAIN (fn))
3061 {
3062 gimple_call_set_chain (stmt, NULL);
3063 changed = true;
3064 }
3065 else
3066 {
3067 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3068 if (tmp)
3069 {
3070 gimple_call_set_chain (stmt, tmp);
3071 changed = true;
3072 }
3073 }
3074 }
3075
3076 if (inplace)
3077 return changed;
3078
3079 /* Check for builtins that CCP can handle using information not
3080 available in the generic fold routines. */
3081 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3082 {
3083 if (gimple_fold_builtin (gsi))
3084 changed = true;
3085 }
3086 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3087 {
3088 changed |= targetm.gimple_fold_builtin (gsi);
3089 }
3090 else if (gimple_call_internal_p (stmt))
3091 {
3092 enum tree_code subcode = ERROR_MARK;
3093 tree result = NULL_TREE;
3094 bool cplx_result = false;
3095 tree overflow = NULL_TREE;
3096 switch (gimple_call_internal_fn (stmt))
3097 {
3098 case IFN_BUILTIN_EXPECT:
3099 result = fold_builtin_expect (gimple_location (stmt),
3100 gimple_call_arg (stmt, 0),
3101 gimple_call_arg (stmt, 1),
3102 gimple_call_arg (stmt, 2));
3103 break;
3104 case IFN_UBSAN_OBJECT_SIZE:
3105 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3106 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3107 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3108 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3109 gimple_call_arg (stmt, 2))))
3110 {
3111 gsi_replace (gsi, gimple_build_nop (), false);
3112 unlink_stmt_vdef (stmt);
3113 release_defs (stmt);
3114 return true;
3115 }
3116 break;
3117 case IFN_UBSAN_CHECK_ADD:
3118 subcode = PLUS_EXPR;
3119 break;
3120 case IFN_UBSAN_CHECK_SUB:
3121 subcode = MINUS_EXPR;
3122 break;
3123 case IFN_UBSAN_CHECK_MUL:
3124 subcode = MULT_EXPR;
3125 break;
3126 case IFN_ADD_OVERFLOW:
3127 subcode = PLUS_EXPR;
3128 cplx_result = true;
3129 break;
3130 case IFN_SUB_OVERFLOW:
3131 subcode = MINUS_EXPR;
3132 cplx_result = true;
3133 break;
3134 case IFN_MUL_OVERFLOW:
3135 subcode = MULT_EXPR;
3136 cplx_result = true;
3137 break;
3138 default:
3139 break;
3140 }
3141 if (subcode != ERROR_MARK)
3142 {
3143 tree arg0 = gimple_call_arg (stmt, 0);
3144 tree arg1 = gimple_call_arg (stmt, 1);
3145 tree type = TREE_TYPE (arg0);
3146 if (cplx_result)
3147 {
3148 tree lhs = gimple_call_lhs (stmt);
3149 if (lhs == NULL_TREE)
3150 type = NULL_TREE;
3151 else
3152 type = TREE_TYPE (TREE_TYPE (lhs));
3153 }
3154 if (type == NULL_TREE)
3155 ;
3156 /* x = y + 0; x = y - 0; x = y * 0; */
3157 else if (integer_zerop (arg1))
3158 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3159 /* x = 0 + y; x = 0 * y; */
3160 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3161 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3162 /* x = y - y; */
3163 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3164 result = integer_zero_node;
3165 /* x = y * 1; x = 1 * y; */
3166 else if (subcode == MULT_EXPR && integer_onep (arg1))
3167 result = arg0;
3168 else if (subcode == MULT_EXPR && integer_onep (arg0))
3169 result = arg1;
3170 else if (TREE_CODE (arg0) == INTEGER_CST
3171 && TREE_CODE (arg1) == INTEGER_CST)
3172 {
3173 if (cplx_result)
3174 result = int_const_binop (subcode, fold_convert (type, arg0),
3175 fold_convert (type, arg1));
3176 else
3177 result = int_const_binop (subcode, arg0, arg1);
3178 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3179 {
3180 if (cplx_result)
3181 overflow = build_one_cst (type);
3182 else
3183 result = NULL_TREE;
3184 }
3185 }
3186 if (result)
3187 {
3188 if (result == integer_zero_node)
3189 result = build_zero_cst (type);
3190 else if (cplx_result && TREE_TYPE (result) != type)
3191 {
3192 if (TREE_CODE (result) == INTEGER_CST)
3193 {
3194 if (arith_overflowed_p (PLUS_EXPR, type, result,
3195 integer_zero_node))
3196 overflow = build_one_cst (type);
3197 }
3198 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3199 && TYPE_UNSIGNED (type))
3200 || (TYPE_PRECISION (type)
3201 < (TYPE_PRECISION (TREE_TYPE (result))
3202 + (TYPE_UNSIGNED (TREE_TYPE (result))
3203 && !TYPE_UNSIGNED (type)))))
3204 result = NULL_TREE;
3205 if (result)
3206 result = fold_convert (type, result);
3207 }
3208 }
3209 }
3210
3211 if (result)
3212 {
3213 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3214 result = drop_tree_overflow (result);
3215 if (cplx_result)
3216 {
3217 if (overflow == NULL_TREE)
3218 overflow = build_zero_cst (TREE_TYPE (result));
3219 tree ctype = build_complex_type (TREE_TYPE (result));
3220 if (TREE_CODE (result) == INTEGER_CST
3221 && TREE_CODE (overflow) == INTEGER_CST)
3222 result = build_complex (ctype, result, overflow);
3223 else
3224 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3225 ctype, result, overflow);
3226 }
3227 if (!update_call_from_tree (gsi, result))
3228 gimplify_and_update_call_from_tree (gsi, result);
3229 changed = true;
3230 }
3231 }
3232
3233 return changed;
3234 }
3235
3236
3237 /* Return true whether NAME has a use on STMT. */
3238
3239 static bool
3240 has_use_on_stmt (tree name, gimple *stmt)
3241 {
3242 imm_use_iterator iter;
3243 use_operand_p use_p;
3244 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3245 if (USE_STMT (use_p) == stmt)
3246 return true;
3247 return false;
3248 }
3249
3250 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3251 gimple_simplify.
3252
3253 Replaces *GSI with the simplification result in RCODE and OPS
3254 and the associated statements in *SEQ. Does the replacement
3255 according to INPLACE and returns true if the operation succeeded. */
3256
3257 static bool
3258 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3259 code_helper rcode, tree *ops,
3260 gimple_seq *seq, bool inplace)
3261 {
3262 gimple *stmt = gsi_stmt (*gsi);
3263
3264 /* Play safe and do not allow abnormals to be mentioned in
3265 newly created statements. See also maybe_push_res_to_seq.
3266 As an exception allow such uses if there was a use of the
3267 same SSA name on the old stmt. */
3268 if ((TREE_CODE (ops[0]) == SSA_NAME
3269 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3270 && !has_use_on_stmt (ops[0], stmt))
3271 || (ops[1]
3272 && TREE_CODE (ops[1]) == SSA_NAME
3273 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3274 && !has_use_on_stmt (ops[1], stmt))
3275 || (ops[2]
3276 && TREE_CODE (ops[2]) == SSA_NAME
3277 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3278 && !has_use_on_stmt (ops[2], stmt)))
3279 return false;
3280
3281 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3282 {
3283 gcc_assert (rcode.is_tree_code ());
3284 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3285 /* GIMPLE_CONDs condition may not throw. */
3286 && (!flag_exceptions
3287 || !cfun->can_throw_non_call_exceptions
3288 || !operation_could_trap_p (rcode,
3289 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3290 false, NULL_TREE)))
3291 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3292 else if (rcode == SSA_NAME)
3293 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3294 build_zero_cst (TREE_TYPE (ops[0])));
3295 else if (rcode == INTEGER_CST)
3296 {
3297 if (integer_zerop (ops[0]))
3298 gimple_cond_make_false (cond_stmt);
3299 else
3300 gimple_cond_make_true (cond_stmt);
3301 }
3302 else if (!inplace)
3303 {
3304 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3305 ops, seq);
3306 if (!res)
3307 return false;
3308 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3309 build_zero_cst (TREE_TYPE (res)));
3310 }
3311 else
3312 return false;
3313 if (dump_file && (dump_flags & TDF_DETAILS))
3314 {
3315 fprintf (dump_file, "gimple_simplified to ");
3316 if (!gimple_seq_empty_p (*seq))
3317 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3318 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3319 0, TDF_SLIM);
3320 }
3321 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3322 return true;
3323 }
3324 else if (is_gimple_assign (stmt)
3325 && rcode.is_tree_code ())
3326 {
3327 if (!inplace
3328 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3329 {
3330 maybe_build_generic_op (rcode,
3331 TREE_TYPE (gimple_assign_lhs (stmt)),
3332 &ops[0], ops[1], ops[2]);
3333 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3334 if (dump_file && (dump_flags & TDF_DETAILS))
3335 {
3336 fprintf (dump_file, "gimple_simplified to ");
3337 if (!gimple_seq_empty_p (*seq))
3338 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3339 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3340 0, TDF_SLIM);
3341 }
3342 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3343 return true;
3344 }
3345 }
3346 else if (rcode.is_fn_code ()
3347 && gimple_call_builtin_p (stmt, rcode))
3348 {
3349 unsigned i;
3350 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3351 {
3352 gcc_assert (ops[i] != NULL_TREE);
3353 gimple_call_set_arg (stmt, i, ops[i]);
3354 }
3355 if (i < 3)
3356 gcc_assert (ops[i] == NULL_TREE);
3357 gcc_assert (gimple_seq_empty_p (*seq));
3358 return true;
3359 }
3360 else if (!inplace)
3361 {
3362 if (gimple_has_lhs (stmt))
3363 {
3364 tree lhs = gimple_get_lhs (stmt);
3365 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3366 ops, seq, lhs))
3367 return false;
3368 if (dump_file && (dump_flags & TDF_DETAILS))
3369 {
3370 fprintf (dump_file, "gimple_simplified to ");
3371 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3372 }
3373 gsi_replace_with_seq_vops (gsi, *seq);
3374 return true;
3375 }
3376 else
3377 gcc_unreachable ();
3378 }
3379
3380 return false;
3381 }
3382
3383 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3384
3385 static bool
3386 maybe_canonicalize_mem_ref_addr (tree *t)
3387 {
3388 bool res = false;
3389
3390 if (TREE_CODE (*t) == ADDR_EXPR)
3391 t = &TREE_OPERAND (*t, 0);
3392
3393 while (handled_component_p (*t))
3394 t = &TREE_OPERAND (*t, 0);
3395
3396 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3397 of invariant addresses into a SSA name MEM_REF address. */
3398 if (TREE_CODE (*t) == MEM_REF
3399 || TREE_CODE (*t) == TARGET_MEM_REF)
3400 {
3401 tree addr = TREE_OPERAND (*t, 0);
3402 if (TREE_CODE (addr) == ADDR_EXPR
3403 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3404 || handled_component_p (TREE_OPERAND (addr, 0))))
3405 {
3406 tree base;
3407 HOST_WIDE_INT coffset;
3408 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3409 &coffset);
3410 if (!base)
3411 gcc_unreachable ();
3412
3413 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3414 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3415 TREE_OPERAND (*t, 1),
3416 size_int (coffset));
3417 res = true;
3418 }
3419 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3420 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3421 }
3422
3423 /* Canonicalize back MEM_REFs to plain reference trees if the object
3424 accessed is a decl that has the same access semantics as the MEM_REF. */
3425 if (TREE_CODE (*t) == MEM_REF
3426 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3427 && integer_zerop (TREE_OPERAND (*t, 1))
3428 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3429 {
3430 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3431 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3432 if (/* Same volatile qualification. */
3433 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3434 /* Same TBAA behavior with -fstrict-aliasing. */
3435 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3436 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3437 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3438 /* Same alignment. */
3439 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3440 /* We have to look out here to not drop a required conversion
3441 from the rhs to the lhs if *t appears on the lhs or vice-versa
3442 if it appears on the rhs. Thus require strict type
3443 compatibility. */
3444 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3445 {
3446 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3447 res = true;
3448 }
3449 }
3450
3451 /* Canonicalize TARGET_MEM_REF in particular with respect to
3452 the indexes becoming constant. */
3453 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3454 {
3455 tree tem = maybe_fold_tmr (*t);
3456 if (tem)
3457 {
3458 *t = tem;
3459 res = true;
3460 }
3461 }
3462
3463 return res;
3464 }
3465
3466 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3467 distinguishes both cases. */
3468
3469 static bool
3470 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3471 {
3472 bool changed = false;
3473 gimple *stmt = gsi_stmt (*gsi);
3474 unsigned i;
3475
3476 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3477 after propagation.
3478 ??? This shouldn't be done in generic folding but in the
3479 propagation helpers which also know whether an address was
3480 propagated.
3481 Also canonicalize operand order. */
3482 switch (gimple_code (stmt))
3483 {
3484 case GIMPLE_ASSIGN:
3485 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3486 {
3487 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3488 if ((REFERENCE_CLASS_P (*rhs)
3489 || TREE_CODE (*rhs) == ADDR_EXPR)
3490 && maybe_canonicalize_mem_ref_addr (rhs))
3491 changed = true;
3492 tree *lhs = gimple_assign_lhs_ptr (stmt);
3493 if (REFERENCE_CLASS_P (*lhs)
3494 && maybe_canonicalize_mem_ref_addr (lhs))
3495 changed = true;
3496 }
3497 else
3498 {
3499 /* Canonicalize operand order. */
3500 enum tree_code code = gimple_assign_rhs_code (stmt);
3501 if (TREE_CODE_CLASS (code) == tcc_comparison
3502 || commutative_tree_code (code)
3503 || commutative_ternary_tree_code (code))
3504 {
3505 tree rhs1 = gimple_assign_rhs1 (stmt);
3506 tree rhs2 = gimple_assign_rhs2 (stmt);
3507 if (tree_swap_operands_p (rhs1, rhs2, false))
3508 {
3509 gimple_assign_set_rhs1 (stmt, rhs2);
3510 gimple_assign_set_rhs2 (stmt, rhs1);
3511 if (TREE_CODE_CLASS (code) == tcc_comparison)
3512 gimple_assign_set_rhs_code (stmt,
3513 swap_tree_comparison (code));
3514 changed = true;
3515 }
3516 }
3517 }
3518 break;
3519 case GIMPLE_CALL:
3520 {
3521 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3522 {
3523 tree *arg = gimple_call_arg_ptr (stmt, i);
3524 if (REFERENCE_CLASS_P (*arg)
3525 && maybe_canonicalize_mem_ref_addr (arg))
3526 changed = true;
3527 }
3528 tree *lhs = gimple_call_lhs_ptr (stmt);
3529 if (*lhs
3530 && REFERENCE_CLASS_P (*lhs)
3531 && maybe_canonicalize_mem_ref_addr (lhs))
3532 changed = true;
3533 break;
3534 }
3535 case GIMPLE_ASM:
3536 {
3537 gasm *asm_stmt = as_a <gasm *> (stmt);
3538 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3539 {
3540 tree link = gimple_asm_output_op (asm_stmt, i);
3541 tree op = TREE_VALUE (link);
3542 if (REFERENCE_CLASS_P (op)
3543 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3544 changed = true;
3545 }
3546 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3547 {
3548 tree link = gimple_asm_input_op (asm_stmt, i);
3549 tree op = TREE_VALUE (link);
3550 if ((REFERENCE_CLASS_P (op)
3551 || TREE_CODE (op) == ADDR_EXPR)
3552 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3553 changed = true;
3554 }
3555 }
3556 break;
3557 case GIMPLE_DEBUG:
3558 if (gimple_debug_bind_p (stmt))
3559 {
3560 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3561 if (*val
3562 && (REFERENCE_CLASS_P (*val)
3563 || TREE_CODE (*val) == ADDR_EXPR)
3564 && maybe_canonicalize_mem_ref_addr (val))
3565 changed = true;
3566 }
3567 break;
3568 case GIMPLE_COND:
3569 {
3570 /* Canonicalize operand order. */
3571 tree lhs = gimple_cond_lhs (stmt);
3572 tree rhs = gimple_cond_rhs (stmt);
3573 if (tree_swap_operands_p (lhs, rhs, false))
3574 {
3575 gcond *gc = as_a <gcond *> (stmt);
3576 gimple_cond_set_lhs (gc, rhs);
3577 gimple_cond_set_rhs (gc, lhs);
3578 gimple_cond_set_code (gc,
3579 swap_tree_comparison (gimple_cond_code (gc)));
3580 changed = true;
3581 }
3582 }
3583 default:;
3584 }
3585
3586 /* Dispatch to pattern-based folding. */
3587 if (!inplace
3588 || is_gimple_assign (stmt)
3589 || gimple_code (stmt) == GIMPLE_COND)
3590 {
3591 gimple_seq seq = NULL;
3592 code_helper rcode;
3593 tree ops[3] = {};
3594 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3595 valueize, valueize))
3596 {
3597 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3598 changed = true;
3599 else
3600 gimple_seq_discard (seq);
3601 }
3602 }
3603
3604 stmt = gsi_stmt (*gsi);
3605
3606 /* Fold the main computation performed by the statement. */
3607 switch (gimple_code (stmt))
3608 {
3609 case GIMPLE_ASSIGN:
3610 {
3611 /* Try to canonicalize for boolean-typed X the comparisons
3612 X == 0, X == 1, X != 0, and X != 1. */
3613 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3614 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3615 {
3616 tree lhs = gimple_assign_lhs (stmt);
3617 tree op1 = gimple_assign_rhs1 (stmt);
3618 tree op2 = gimple_assign_rhs2 (stmt);
3619 tree type = TREE_TYPE (op1);
3620
3621 /* Check whether the comparison operands are of the same boolean
3622 type as the result type is.
3623 Check that second operand is an integer-constant with value
3624 one or zero. */
3625 if (TREE_CODE (op2) == INTEGER_CST
3626 && (integer_zerop (op2) || integer_onep (op2))
3627 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3628 {
3629 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3630 bool is_logical_not = false;
3631
3632 /* X == 0 and X != 1 is a logical-not.of X
3633 X == 1 and X != 0 is X */
3634 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3635 || (cmp_code == NE_EXPR && integer_onep (op2)))
3636 is_logical_not = true;
3637
3638 if (is_logical_not == false)
3639 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3640 /* Only for one-bit precision typed X the transformation
3641 !X -> ~X is valied. */
3642 else if (TYPE_PRECISION (type) == 1)
3643 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3644 /* Otherwise we use !X -> X ^ 1. */
3645 else
3646 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3647 build_int_cst (type, 1));
3648 changed = true;
3649 break;
3650 }
3651 }
3652
3653 unsigned old_num_ops = gimple_num_ops (stmt);
3654 tree lhs = gimple_assign_lhs (stmt);
3655 tree new_rhs = fold_gimple_assign (gsi);
3656 if (new_rhs
3657 && !useless_type_conversion_p (TREE_TYPE (lhs),
3658 TREE_TYPE (new_rhs)))
3659 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3660 if (new_rhs
3661 && (!inplace
3662 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3663 {
3664 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3665 changed = true;
3666 }
3667 break;
3668 }
3669
3670 case GIMPLE_CALL:
3671 changed |= gimple_fold_call (gsi, inplace);
3672 break;
3673
3674 case GIMPLE_ASM:
3675 /* Fold *& in asm operands. */
3676 {
3677 gasm *asm_stmt = as_a <gasm *> (stmt);
3678 size_t noutputs;
3679 const char **oconstraints;
3680 const char *constraint;
3681 bool allows_mem, allows_reg;
3682
3683 noutputs = gimple_asm_noutputs (asm_stmt);
3684 oconstraints = XALLOCAVEC (const char *, noutputs);
3685
3686 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3687 {
3688 tree link = gimple_asm_output_op (asm_stmt, i);
3689 tree op = TREE_VALUE (link);
3690 oconstraints[i]
3691 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3692 if (REFERENCE_CLASS_P (op)
3693 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3694 {
3695 TREE_VALUE (link) = op;
3696 changed = true;
3697 }
3698 }
3699 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3700 {
3701 tree link = gimple_asm_input_op (asm_stmt, i);
3702 tree op = TREE_VALUE (link);
3703 constraint
3704 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3705 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3706 oconstraints, &allows_mem, &allows_reg);
3707 if (REFERENCE_CLASS_P (op)
3708 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3709 != NULL_TREE)
3710 {
3711 TREE_VALUE (link) = op;
3712 changed = true;
3713 }
3714 }
3715 }
3716 break;
3717
3718 case GIMPLE_DEBUG:
3719 if (gimple_debug_bind_p (stmt))
3720 {
3721 tree val = gimple_debug_bind_get_value (stmt);
3722 if (val
3723 && REFERENCE_CLASS_P (val))
3724 {
3725 tree tem = maybe_fold_reference (val, false);
3726 if (tem)
3727 {
3728 gimple_debug_bind_set_value (stmt, tem);
3729 changed = true;
3730 }
3731 }
3732 else if (val
3733 && TREE_CODE (val) == ADDR_EXPR)
3734 {
3735 tree ref = TREE_OPERAND (val, 0);
3736 tree tem = maybe_fold_reference (ref, false);
3737 if (tem)
3738 {
3739 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3740 gimple_debug_bind_set_value (stmt, tem);
3741 changed = true;
3742 }
3743 }
3744 }
3745 break;
3746
3747 default:;
3748 }
3749
3750 stmt = gsi_stmt (*gsi);
3751
3752 /* Fold *& on the lhs. */
3753 if (gimple_has_lhs (stmt))
3754 {
3755 tree lhs = gimple_get_lhs (stmt);
3756 if (lhs && REFERENCE_CLASS_P (lhs))
3757 {
3758 tree new_lhs = maybe_fold_reference (lhs, true);
3759 if (new_lhs)
3760 {
3761 gimple_set_lhs (stmt, new_lhs);
3762 changed = true;
3763 }
3764 }
3765 }
3766
3767 return changed;
3768 }
3769
3770 /* Valueziation callback that ends up not following SSA edges. */
3771
3772 tree
3773 no_follow_ssa_edges (tree)
3774 {
3775 return NULL_TREE;
3776 }
3777
3778 /* Valueization callback that ends up following single-use SSA edges only. */
3779
3780 tree
3781 follow_single_use_edges (tree val)
3782 {
3783 if (TREE_CODE (val) == SSA_NAME
3784 && !has_single_use (val))
3785 return NULL_TREE;
3786 return val;
3787 }
3788
3789 /* Fold the statement pointed to by GSI. In some cases, this function may
3790 replace the whole statement with a new one. Returns true iff folding
3791 makes any changes.
3792 The statement pointed to by GSI should be in valid gimple form but may
3793 be in unfolded state as resulting from for example constant propagation
3794 which can produce *&x = 0. */
3795
3796 bool
3797 fold_stmt (gimple_stmt_iterator *gsi)
3798 {
3799 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3800 }
3801
3802 bool
3803 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3804 {
3805 return fold_stmt_1 (gsi, false, valueize);
3806 }
3807
3808 /* Perform the minimal folding on statement *GSI. Only operations like
3809 *&x created by constant propagation are handled. The statement cannot
3810 be replaced with a new one. Return true if the statement was
3811 changed, false otherwise.
3812 The statement *GSI should be in valid gimple form but may
3813 be in unfolded state as resulting from for example constant propagation
3814 which can produce *&x = 0. */
3815
3816 bool
3817 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3818 {
3819 gimple *stmt = gsi_stmt (*gsi);
3820 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3821 gcc_assert (gsi_stmt (*gsi) == stmt);
3822 return changed;
3823 }
3824
3825 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3826 if EXPR is null or we don't know how.
3827 If non-null, the result always has boolean type. */
3828
3829 static tree
3830 canonicalize_bool (tree expr, bool invert)
3831 {
3832 if (!expr)
3833 return NULL_TREE;
3834 else if (invert)
3835 {
3836 if (integer_nonzerop (expr))
3837 return boolean_false_node;
3838 else if (integer_zerop (expr))
3839 return boolean_true_node;
3840 else if (TREE_CODE (expr) == SSA_NAME)
3841 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3842 build_int_cst (TREE_TYPE (expr), 0));
3843 else if (COMPARISON_CLASS_P (expr))
3844 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3845 boolean_type_node,
3846 TREE_OPERAND (expr, 0),
3847 TREE_OPERAND (expr, 1));
3848 else
3849 return NULL_TREE;
3850 }
3851 else
3852 {
3853 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3854 return expr;
3855 if (integer_nonzerop (expr))
3856 return boolean_true_node;
3857 else if (integer_zerop (expr))
3858 return boolean_false_node;
3859 else if (TREE_CODE (expr) == SSA_NAME)
3860 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3861 build_int_cst (TREE_TYPE (expr), 0));
3862 else if (COMPARISON_CLASS_P (expr))
3863 return fold_build2 (TREE_CODE (expr),
3864 boolean_type_node,
3865 TREE_OPERAND (expr, 0),
3866 TREE_OPERAND (expr, 1));
3867 else
3868 return NULL_TREE;
3869 }
3870 }
3871
3872 /* Check to see if a boolean expression EXPR is logically equivalent to the
3873 comparison (OP1 CODE OP2). Check for various identities involving
3874 SSA_NAMEs. */
3875
3876 static bool
3877 same_bool_comparison_p (const_tree expr, enum tree_code code,
3878 const_tree op1, const_tree op2)
3879 {
3880 gimple *s;
3881
3882 /* The obvious case. */
3883 if (TREE_CODE (expr) == code
3884 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3885 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3886 return true;
3887
3888 /* Check for comparing (name, name != 0) and the case where expr
3889 is an SSA_NAME with a definition matching the comparison. */
3890 if (TREE_CODE (expr) == SSA_NAME
3891 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3892 {
3893 if (operand_equal_p (expr, op1, 0))
3894 return ((code == NE_EXPR && integer_zerop (op2))
3895 || (code == EQ_EXPR && integer_nonzerop (op2)));
3896 s = SSA_NAME_DEF_STMT (expr);
3897 if (is_gimple_assign (s)
3898 && gimple_assign_rhs_code (s) == code
3899 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3900 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3901 return true;
3902 }
3903
3904 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3905 of name is a comparison, recurse. */
3906 if (TREE_CODE (op1) == SSA_NAME
3907 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3908 {
3909 s = SSA_NAME_DEF_STMT (op1);
3910 if (is_gimple_assign (s)
3911 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3912 {
3913 enum tree_code c = gimple_assign_rhs_code (s);
3914 if ((c == NE_EXPR && integer_zerop (op2))
3915 || (c == EQ_EXPR && integer_nonzerop (op2)))
3916 return same_bool_comparison_p (expr, c,
3917 gimple_assign_rhs1 (s),
3918 gimple_assign_rhs2 (s));
3919 if ((c == EQ_EXPR && integer_zerop (op2))
3920 || (c == NE_EXPR && integer_nonzerop (op2)))
3921 return same_bool_comparison_p (expr,
3922 invert_tree_comparison (c, false),
3923 gimple_assign_rhs1 (s),
3924 gimple_assign_rhs2 (s));
3925 }
3926 }
3927 return false;
3928 }
3929
3930 /* Check to see if two boolean expressions OP1 and OP2 are logically
3931 equivalent. */
3932
3933 static bool
3934 same_bool_result_p (const_tree op1, const_tree op2)
3935 {
3936 /* Simple cases first. */
3937 if (operand_equal_p (op1, op2, 0))
3938 return true;
3939
3940 /* Check the cases where at least one of the operands is a comparison.
3941 These are a bit smarter than operand_equal_p in that they apply some
3942 identifies on SSA_NAMEs. */
3943 if (COMPARISON_CLASS_P (op2)
3944 && same_bool_comparison_p (op1, TREE_CODE (op2),
3945 TREE_OPERAND (op2, 0),
3946 TREE_OPERAND (op2, 1)))
3947 return true;
3948 if (COMPARISON_CLASS_P (op1)
3949 && same_bool_comparison_p (op2, TREE_CODE (op1),
3950 TREE_OPERAND (op1, 0),
3951 TREE_OPERAND (op1, 1)))
3952 return true;
3953
3954 /* Default case. */
3955 return false;
3956 }
3957
3958 /* Forward declarations for some mutually recursive functions. */
3959
3960 static tree
3961 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3962 enum tree_code code2, tree op2a, tree op2b);
3963 static tree
3964 and_var_with_comparison (tree var, bool invert,
3965 enum tree_code code2, tree op2a, tree op2b);
3966 static tree
3967 and_var_with_comparison_1 (gimple *stmt,
3968 enum tree_code code2, tree op2a, tree op2b);
3969 static tree
3970 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3971 enum tree_code code2, tree op2a, tree op2b);
3972 static tree
3973 or_var_with_comparison (tree var, bool invert,
3974 enum tree_code code2, tree op2a, tree op2b);
3975 static tree
3976 or_var_with_comparison_1 (gimple *stmt,
3977 enum tree_code code2, tree op2a, tree op2b);
3978
3979 /* Helper function for and_comparisons_1: try to simplify the AND of the
3980 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3981 If INVERT is true, invert the value of the VAR before doing the AND.
3982 Return NULL_EXPR if we can't simplify this to a single expression. */
3983
3984 static tree
3985 and_var_with_comparison (tree var, bool invert,
3986 enum tree_code code2, tree op2a, tree op2b)
3987 {
3988 tree t;
3989 gimple *stmt = SSA_NAME_DEF_STMT (var);
3990
3991 /* We can only deal with variables whose definitions are assignments. */
3992 if (!is_gimple_assign (stmt))
3993 return NULL_TREE;
3994
3995 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3996 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3997 Then we only have to consider the simpler non-inverted cases. */
3998 if (invert)
3999 t = or_var_with_comparison_1 (stmt,
4000 invert_tree_comparison (code2, false),
4001 op2a, op2b);
4002 else
4003 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4004 return canonicalize_bool (t, invert);
4005 }
4006
4007 /* Try to simplify the AND of the ssa variable defined by the assignment
4008 STMT with the comparison specified by (OP2A CODE2 OP2B).
4009 Return NULL_EXPR if we can't simplify this to a single expression. */
4010
4011 static tree
4012 and_var_with_comparison_1 (gimple *stmt,
4013 enum tree_code code2, tree op2a, tree op2b)
4014 {
4015 tree var = gimple_assign_lhs (stmt);
4016 tree true_test_var = NULL_TREE;
4017 tree false_test_var = NULL_TREE;
4018 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4019
4020 /* Check for identities like (var AND (var == 0)) => false. */
4021 if (TREE_CODE (op2a) == SSA_NAME
4022 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4023 {
4024 if ((code2 == NE_EXPR && integer_zerop (op2b))
4025 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4026 {
4027 true_test_var = op2a;
4028 if (var == true_test_var)
4029 return var;
4030 }
4031 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4032 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4033 {
4034 false_test_var = op2a;
4035 if (var == false_test_var)
4036 return boolean_false_node;
4037 }
4038 }
4039
4040 /* If the definition is a comparison, recurse on it. */
4041 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4042 {
4043 tree t = and_comparisons_1 (innercode,
4044 gimple_assign_rhs1 (stmt),
4045 gimple_assign_rhs2 (stmt),
4046 code2,
4047 op2a,
4048 op2b);
4049 if (t)
4050 return t;
4051 }
4052
4053 /* If the definition is an AND or OR expression, we may be able to
4054 simplify by reassociating. */
4055 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4056 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4057 {
4058 tree inner1 = gimple_assign_rhs1 (stmt);
4059 tree inner2 = gimple_assign_rhs2 (stmt);
4060 gimple *s;
4061 tree t;
4062 tree partial = NULL_TREE;
4063 bool is_and = (innercode == BIT_AND_EXPR);
4064
4065 /* Check for boolean identities that don't require recursive examination
4066 of inner1/inner2:
4067 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4068 inner1 AND (inner1 OR inner2) => inner1
4069 !inner1 AND (inner1 AND inner2) => false
4070 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4071 Likewise for similar cases involving inner2. */
4072 if (inner1 == true_test_var)
4073 return (is_and ? var : inner1);
4074 else if (inner2 == true_test_var)
4075 return (is_and ? var : inner2);
4076 else if (inner1 == false_test_var)
4077 return (is_and
4078 ? boolean_false_node
4079 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4080 else if (inner2 == false_test_var)
4081 return (is_and
4082 ? boolean_false_node
4083 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4084
4085 /* Next, redistribute/reassociate the AND across the inner tests.
4086 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4087 if (TREE_CODE (inner1) == SSA_NAME
4088 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4090 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4091 gimple_assign_rhs1 (s),
4092 gimple_assign_rhs2 (s),
4093 code2, op2a, op2b)))
4094 {
4095 /* Handle the AND case, where we are reassociating:
4096 (inner1 AND inner2) AND (op2a code2 op2b)
4097 => (t AND inner2)
4098 If the partial result t is a constant, we win. Otherwise
4099 continue on to try reassociating with the other inner test. */
4100 if (is_and)
4101 {
4102 if (integer_onep (t))
4103 return inner2;
4104 else if (integer_zerop (t))
4105 return boolean_false_node;
4106 }
4107
4108 /* Handle the OR case, where we are redistributing:
4109 (inner1 OR inner2) AND (op2a code2 op2b)
4110 => (t OR (inner2 AND (op2a code2 op2b))) */
4111 else if (integer_onep (t))
4112 return boolean_true_node;
4113
4114 /* Save partial result for later. */
4115 partial = t;
4116 }
4117
4118 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4119 if (TREE_CODE (inner2) == SSA_NAME
4120 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4122 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4123 gimple_assign_rhs1 (s),
4124 gimple_assign_rhs2 (s),
4125 code2, op2a, op2b)))
4126 {
4127 /* Handle the AND case, where we are reassociating:
4128 (inner1 AND inner2) AND (op2a code2 op2b)
4129 => (inner1 AND t) */
4130 if (is_and)
4131 {
4132 if (integer_onep (t))
4133 return inner1;
4134 else if (integer_zerop (t))
4135 return boolean_false_node;
4136 /* If both are the same, we can apply the identity
4137 (x AND x) == x. */
4138 else if (partial && same_bool_result_p (t, partial))
4139 return t;
4140 }
4141
4142 /* Handle the OR case. where we are redistributing:
4143 (inner1 OR inner2) AND (op2a code2 op2b)
4144 => (t OR (inner1 AND (op2a code2 op2b)))
4145 => (t OR partial) */
4146 else
4147 {
4148 if (integer_onep (t))
4149 return boolean_true_node;
4150 else if (partial)
4151 {
4152 /* We already got a simplification for the other
4153 operand to the redistributed OR expression. The
4154 interesting case is when at least one is false.
4155 Or, if both are the same, we can apply the identity
4156 (x OR x) == x. */
4157 if (integer_zerop (partial))
4158 return t;
4159 else if (integer_zerop (t))
4160 return partial;
4161 else if (same_bool_result_p (t, partial))
4162 return t;
4163 }
4164 }
4165 }
4166 }
4167 return NULL_TREE;
4168 }
4169
4170 /* Try to simplify the AND of two comparisons defined by
4171 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4172 If this can be done without constructing an intermediate value,
4173 return the resulting tree; otherwise NULL_TREE is returned.
4174 This function is deliberately asymmetric as it recurses on SSA_DEFs
4175 in the first comparison but not the second. */
4176
4177 static tree
4178 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4179 enum tree_code code2, tree op2a, tree op2b)
4180 {
4181 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4182
4183 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4184 if (operand_equal_p (op1a, op2a, 0)
4185 && operand_equal_p (op1b, op2b, 0))
4186 {
4187 /* Result will be either NULL_TREE, or a combined comparison. */
4188 tree t = combine_comparisons (UNKNOWN_LOCATION,
4189 TRUTH_ANDIF_EXPR, code1, code2,
4190 truth_type, op1a, op1b);
4191 if (t)
4192 return t;
4193 }
4194
4195 /* Likewise the swapped case of the above. */
4196 if (operand_equal_p (op1a, op2b, 0)
4197 && operand_equal_p (op1b, op2a, 0))
4198 {
4199 /* Result will be either NULL_TREE, or a combined comparison. */
4200 tree t = combine_comparisons (UNKNOWN_LOCATION,
4201 TRUTH_ANDIF_EXPR, code1,
4202 swap_tree_comparison (code2),
4203 truth_type, op1a, op1b);
4204 if (t)
4205 return t;
4206 }
4207
4208 /* If both comparisons are of the same value against constants, we might
4209 be able to merge them. */
4210 if (operand_equal_p (op1a, op2a, 0)
4211 && TREE_CODE (op1b) == INTEGER_CST
4212 && TREE_CODE (op2b) == INTEGER_CST)
4213 {
4214 int cmp = tree_int_cst_compare (op1b, op2b);
4215
4216 /* If we have (op1a == op1b), we should either be able to
4217 return that or FALSE, depending on whether the constant op1b
4218 also satisfies the other comparison against op2b. */
4219 if (code1 == EQ_EXPR)
4220 {
4221 bool done = true;
4222 bool val;
4223 switch (code2)
4224 {
4225 case EQ_EXPR: val = (cmp == 0); break;
4226 case NE_EXPR: val = (cmp != 0); break;
4227 case LT_EXPR: val = (cmp < 0); break;
4228 case GT_EXPR: val = (cmp > 0); break;
4229 case LE_EXPR: val = (cmp <= 0); break;
4230 case GE_EXPR: val = (cmp >= 0); break;
4231 default: done = false;
4232 }
4233 if (done)
4234 {
4235 if (val)
4236 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4237 else
4238 return boolean_false_node;
4239 }
4240 }
4241 /* Likewise if the second comparison is an == comparison. */
4242 else if (code2 == EQ_EXPR)
4243 {
4244 bool done = true;
4245 bool val;
4246 switch (code1)
4247 {
4248 case EQ_EXPR: val = (cmp == 0); break;
4249 case NE_EXPR: val = (cmp != 0); break;
4250 case LT_EXPR: val = (cmp > 0); break;
4251 case GT_EXPR: val = (cmp < 0); break;
4252 case LE_EXPR: val = (cmp >= 0); break;
4253 case GE_EXPR: val = (cmp <= 0); break;
4254 default: done = false;
4255 }
4256 if (done)
4257 {
4258 if (val)
4259 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4260 else
4261 return boolean_false_node;
4262 }
4263 }
4264
4265 /* Same business with inequality tests. */
4266 else if (code1 == NE_EXPR)
4267 {
4268 bool val;
4269 switch (code2)
4270 {
4271 case EQ_EXPR: val = (cmp != 0); break;
4272 case NE_EXPR: val = (cmp == 0); break;
4273 case LT_EXPR: val = (cmp >= 0); break;
4274 case GT_EXPR: val = (cmp <= 0); break;
4275 case LE_EXPR: val = (cmp > 0); break;
4276 case GE_EXPR: val = (cmp < 0); break;
4277 default:
4278 val = false;
4279 }
4280 if (val)
4281 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4282 }
4283 else if (code2 == NE_EXPR)
4284 {
4285 bool val;
4286 switch (code1)
4287 {
4288 case EQ_EXPR: val = (cmp == 0); break;
4289 case NE_EXPR: val = (cmp != 0); break;
4290 case LT_EXPR: val = (cmp <= 0); break;
4291 case GT_EXPR: val = (cmp >= 0); break;
4292 case LE_EXPR: val = (cmp < 0); break;
4293 case GE_EXPR: val = (cmp > 0); break;
4294 default:
4295 val = false;
4296 }
4297 if (val)
4298 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4299 }
4300
4301 /* Chose the more restrictive of two < or <= comparisons. */
4302 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4303 && (code2 == LT_EXPR || code2 == LE_EXPR))
4304 {
4305 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4306 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4307 else
4308 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4309 }
4310
4311 /* Likewise chose the more restrictive of two > or >= comparisons. */
4312 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4313 && (code2 == GT_EXPR || code2 == GE_EXPR))
4314 {
4315 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4316 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4317 else
4318 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4319 }
4320
4321 /* Check for singleton ranges. */
4322 else if (cmp == 0
4323 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4324 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4325 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4326
4327 /* Check for disjoint ranges. */
4328 else if (cmp <= 0
4329 && (code1 == LT_EXPR || code1 == LE_EXPR)
4330 && (code2 == GT_EXPR || code2 == GE_EXPR))
4331 return boolean_false_node;
4332 else if (cmp >= 0
4333 && (code1 == GT_EXPR || code1 == GE_EXPR)
4334 && (code2 == LT_EXPR || code2 == LE_EXPR))
4335 return boolean_false_node;
4336 }
4337
4338 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4339 NAME's definition is a truth value. See if there are any simplifications
4340 that can be done against the NAME's definition. */
4341 if (TREE_CODE (op1a) == SSA_NAME
4342 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4343 && (integer_zerop (op1b) || integer_onep (op1b)))
4344 {
4345 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4346 || (code1 == NE_EXPR && integer_onep (op1b)));
4347 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4348 switch (gimple_code (stmt))
4349 {
4350 case GIMPLE_ASSIGN:
4351 /* Try to simplify by copy-propagating the definition. */
4352 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4353
4354 case GIMPLE_PHI:
4355 /* If every argument to the PHI produces the same result when
4356 ANDed with the second comparison, we win.
4357 Do not do this unless the type is bool since we need a bool
4358 result here anyway. */
4359 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4360 {
4361 tree result = NULL_TREE;
4362 unsigned i;
4363 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4364 {
4365 tree arg = gimple_phi_arg_def (stmt, i);
4366
4367 /* If this PHI has itself as an argument, ignore it.
4368 If all the other args produce the same result,
4369 we're still OK. */
4370 if (arg == gimple_phi_result (stmt))
4371 continue;
4372 else if (TREE_CODE (arg) == INTEGER_CST)
4373 {
4374 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4375 {
4376 if (!result)
4377 result = boolean_false_node;
4378 else if (!integer_zerop (result))
4379 return NULL_TREE;
4380 }
4381 else if (!result)
4382 result = fold_build2 (code2, boolean_type_node,
4383 op2a, op2b);
4384 else if (!same_bool_comparison_p (result,
4385 code2, op2a, op2b))
4386 return NULL_TREE;
4387 }
4388 else if (TREE_CODE (arg) == SSA_NAME
4389 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4390 {
4391 tree temp;
4392 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4393 /* In simple cases we can look through PHI nodes,
4394 but we have to be careful with loops.
4395 See PR49073. */
4396 if (! dom_info_available_p (CDI_DOMINATORS)
4397 || gimple_bb (def_stmt) == gimple_bb (stmt)
4398 || dominated_by_p (CDI_DOMINATORS,
4399 gimple_bb (def_stmt),
4400 gimple_bb (stmt)))
4401 return NULL_TREE;
4402 temp = and_var_with_comparison (arg, invert, code2,
4403 op2a, op2b);
4404 if (!temp)
4405 return NULL_TREE;
4406 else if (!result)
4407 result = temp;
4408 else if (!same_bool_result_p (result, temp))
4409 return NULL_TREE;
4410 }
4411 else
4412 return NULL_TREE;
4413 }
4414 return result;
4415 }
4416
4417 default:
4418 break;
4419 }
4420 }
4421 return NULL_TREE;
4422 }
4423
4424 /* Try to simplify the AND of two comparisons, specified by
4425 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4426 If this can be simplified to a single expression (without requiring
4427 introducing more SSA variables to hold intermediate values),
4428 return the resulting tree. Otherwise return NULL_TREE.
4429 If the result expression is non-null, it has boolean type. */
4430
4431 tree
4432 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4433 enum tree_code code2, tree op2a, tree op2b)
4434 {
4435 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4436 if (t)
4437 return t;
4438 else
4439 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4440 }
4441
4442 /* Helper function for or_comparisons_1: try to simplify the OR of the
4443 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4444 If INVERT is true, invert the value of VAR before doing the OR.
4445 Return NULL_EXPR if we can't simplify this to a single expression. */
4446
4447 static tree
4448 or_var_with_comparison (tree var, bool invert,
4449 enum tree_code code2, tree op2a, tree op2b)
4450 {
4451 tree t;
4452 gimple *stmt = SSA_NAME_DEF_STMT (var);
4453
4454 /* We can only deal with variables whose definitions are assignments. */
4455 if (!is_gimple_assign (stmt))
4456 return NULL_TREE;
4457
4458 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4459 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4460 Then we only have to consider the simpler non-inverted cases. */
4461 if (invert)
4462 t = and_var_with_comparison_1 (stmt,
4463 invert_tree_comparison (code2, false),
4464 op2a, op2b);
4465 else
4466 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4467 return canonicalize_bool (t, invert);
4468 }
4469
4470 /* Try to simplify the OR of the ssa variable defined by the assignment
4471 STMT with the comparison specified by (OP2A CODE2 OP2B).
4472 Return NULL_EXPR if we can't simplify this to a single expression. */
4473
4474 static tree
4475 or_var_with_comparison_1 (gimple *stmt,
4476 enum tree_code code2, tree op2a, tree op2b)
4477 {
4478 tree var = gimple_assign_lhs (stmt);
4479 tree true_test_var = NULL_TREE;
4480 tree false_test_var = NULL_TREE;
4481 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4482
4483 /* Check for identities like (var OR (var != 0)) => true . */
4484 if (TREE_CODE (op2a) == SSA_NAME
4485 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4486 {
4487 if ((code2 == NE_EXPR && integer_zerop (op2b))
4488 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4489 {
4490 true_test_var = op2a;
4491 if (var == true_test_var)
4492 return var;
4493 }
4494 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4495 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4496 {
4497 false_test_var = op2a;
4498 if (var == false_test_var)
4499 return boolean_true_node;
4500 }
4501 }
4502
4503 /* If the definition is a comparison, recurse on it. */
4504 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4505 {
4506 tree t = or_comparisons_1 (innercode,
4507 gimple_assign_rhs1 (stmt),
4508 gimple_assign_rhs2 (stmt),
4509 code2,
4510 op2a,
4511 op2b);
4512 if (t)
4513 return t;
4514 }
4515
4516 /* If the definition is an AND or OR expression, we may be able to
4517 simplify by reassociating. */
4518 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4519 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4520 {
4521 tree inner1 = gimple_assign_rhs1 (stmt);
4522 tree inner2 = gimple_assign_rhs2 (stmt);
4523 gimple *s;
4524 tree t;
4525 tree partial = NULL_TREE;
4526 bool is_or = (innercode == BIT_IOR_EXPR);
4527
4528 /* Check for boolean identities that don't require recursive examination
4529 of inner1/inner2:
4530 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4531 inner1 OR (inner1 AND inner2) => inner1
4532 !inner1 OR (inner1 OR inner2) => true
4533 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4534 */
4535 if (inner1 == true_test_var)
4536 return (is_or ? var : inner1);
4537 else if (inner2 == true_test_var)
4538 return (is_or ? var : inner2);
4539 else if (inner1 == false_test_var)
4540 return (is_or
4541 ? boolean_true_node
4542 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4543 else if (inner2 == false_test_var)
4544 return (is_or
4545 ? boolean_true_node
4546 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4547
4548 /* Next, redistribute/reassociate the OR across the inner tests.
4549 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4550 if (TREE_CODE (inner1) == SSA_NAME
4551 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4552 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4553 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4554 gimple_assign_rhs1 (s),
4555 gimple_assign_rhs2 (s),
4556 code2, op2a, op2b)))
4557 {
4558 /* Handle the OR case, where we are reassociating:
4559 (inner1 OR inner2) OR (op2a code2 op2b)
4560 => (t OR inner2)
4561 If the partial result t is a constant, we win. Otherwise
4562 continue on to try reassociating with the other inner test. */
4563 if (is_or)
4564 {
4565 if (integer_onep (t))
4566 return boolean_true_node;
4567 else if (integer_zerop (t))
4568 return inner2;
4569 }
4570
4571 /* Handle the AND case, where we are redistributing:
4572 (inner1 AND inner2) OR (op2a code2 op2b)
4573 => (t AND (inner2 OR (op2a code op2b))) */
4574 else if (integer_zerop (t))
4575 return boolean_false_node;
4576
4577 /* Save partial result for later. */
4578 partial = t;
4579 }
4580
4581 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4582 if (TREE_CODE (inner2) == SSA_NAME
4583 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4584 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4585 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4586 gimple_assign_rhs1 (s),
4587 gimple_assign_rhs2 (s),
4588 code2, op2a, op2b)))
4589 {
4590 /* Handle the OR case, where we are reassociating:
4591 (inner1 OR inner2) OR (op2a code2 op2b)
4592 => (inner1 OR t)
4593 => (t OR partial) */
4594 if (is_or)
4595 {
4596 if (integer_zerop (t))
4597 return inner1;
4598 else if (integer_onep (t))
4599 return boolean_true_node;
4600 /* If both are the same, we can apply the identity
4601 (x OR x) == x. */
4602 else if (partial && same_bool_result_p (t, partial))
4603 return t;
4604 }
4605
4606 /* Handle the AND case, where we are redistributing:
4607 (inner1 AND inner2) OR (op2a code2 op2b)
4608 => (t AND (inner1 OR (op2a code2 op2b)))
4609 => (t AND partial) */
4610 else
4611 {
4612 if (integer_zerop (t))
4613 return boolean_false_node;
4614 else if (partial)
4615 {
4616 /* We already got a simplification for the other
4617 operand to the redistributed AND expression. The
4618 interesting case is when at least one is true.
4619 Or, if both are the same, we can apply the identity
4620 (x AND x) == x. */
4621 if (integer_onep (partial))
4622 return t;
4623 else if (integer_onep (t))
4624 return partial;
4625 else if (same_bool_result_p (t, partial))
4626 return t;
4627 }
4628 }
4629 }
4630 }
4631 return NULL_TREE;
4632 }
4633
4634 /* Try to simplify the OR of two comparisons defined by
4635 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4636 If this can be done without constructing an intermediate value,
4637 return the resulting tree; otherwise NULL_TREE is returned.
4638 This function is deliberately asymmetric as it recurses on SSA_DEFs
4639 in the first comparison but not the second. */
4640
4641 static tree
4642 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4643 enum tree_code code2, tree op2a, tree op2b)
4644 {
4645 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4646
4647 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4648 if (operand_equal_p (op1a, op2a, 0)
4649 && operand_equal_p (op1b, op2b, 0))
4650 {
4651 /* Result will be either NULL_TREE, or a combined comparison. */
4652 tree t = combine_comparisons (UNKNOWN_LOCATION,
4653 TRUTH_ORIF_EXPR, code1, code2,
4654 truth_type, op1a, op1b);
4655 if (t)
4656 return t;
4657 }
4658
4659 /* Likewise the swapped case of the above. */
4660 if (operand_equal_p (op1a, op2b, 0)
4661 && operand_equal_p (op1b, op2a, 0))
4662 {
4663 /* Result will be either NULL_TREE, or a combined comparison. */
4664 tree t = combine_comparisons (UNKNOWN_LOCATION,
4665 TRUTH_ORIF_EXPR, code1,
4666 swap_tree_comparison (code2),
4667 truth_type, op1a, op1b);
4668 if (t)
4669 return t;
4670 }
4671
4672 /* If both comparisons are of the same value against constants, we might
4673 be able to merge them. */
4674 if (operand_equal_p (op1a, op2a, 0)
4675 && TREE_CODE (op1b) == INTEGER_CST
4676 && TREE_CODE (op2b) == INTEGER_CST)
4677 {
4678 int cmp = tree_int_cst_compare (op1b, op2b);
4679
4680 /* If we have (op1a != op1b), we should either be able to
4681 return that or TRUE, depending on whether the constant op1b
4682 also satisfies the other comparison against op2b. */
4683 if (code1 == NE_EXPR)
4684 {
4685 bool done = true;
4686 bool val;
4687 switch (code2)
4688 {
4689 case EQ_EXPR: val = (cmp == 0); break;
4690 case NE_EXPR: val = (cmp != 0); break;
4691 case LT_EXPR: val = (cmp < 0); break;
4692 case GT_EXPR: val = (cmp > 0); break;
4693 case LE_EXPR: val = (cmp <= 0); break;
4694 case GE_EXPR: val = (cmp >= 0); break;
4695 default: done = false;
4696 }
4697 if (done)
4698 {
4699 if (val)
4700 return boolean_true_node;
4701 else
4702 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4703 }
4704 }
4705 /* Likewise if the second comparison is a != comparison. */
4706 else if (code2 == NE_EXPR)
4707 {
4708 bool done = true;
4709 bool val;
4710 switch (code1)
4711 {
4712 case EQ_EXPR: val = (cmp == 0); break;
4713 case NE_EXPR: val = (cmp != 0); break;
4714 case LT_EXPR: val = (cmp > 0); break;
4715 case GT_EXPR: val = (cmp < 0); break;
4716 case LE_EXPR: val = (cmp >= 0); break;
4717 case GE_EXPR: val = (cmp <= 0); break;
4718 default: done = false;
4719 }
4720 if (done)
4721 {
4722 if (val)
4723 return boolean_true_node;
4724 else
4725 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4726 }
4727 }
4728
4729 /* See if an equality test is redundant with the other comparison. */
4730 else if (code1 == EQ_EXPR)
4731 {
4732 bool val;
4733 switch (code2)
4734 {
4735 case EQ_EXPR: val = (cmp == 0); break;
4736 case NE_EXPR: val = (cmp != 0); break;
4737 case LT_EXPR: val = (cmp < 0); break;
4738 case GT_EXPR: val = (cmp > 0); break;
4739 case LE_EXPR: val = (cmp <= 0); break;
4740 case GE_EXPR: val = (cmp >= 0); break;
4741 default:
4742 val = false;
4743 }
4744 if (val)
4745 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4746 }
4747 else if (code2 == EQ_EXPR)
4748 {
4749 bool val;
4750 switch (code1)
4751 {
4752 case EQ_EXPR: val = (cmp == 0); break;
4753 case NE_EXPR: val = (cmp != 0); break;
4754 case LT_EXPR: val = (cmp > 0); break;
4755 case GT_EXPR: val = (cmp < 0); break;
4756 case LE_EXPR: val = (cmp >= 0); break;
4757 case GE_EXPR: val = (cmp <= 0); break;
4758 default:
4759 val = false;
4760 }
4761 if (val)
4762 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4763 }
4764
4765 /* Chose the less restrictive of two < or <= comparisons. */
4766 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4767 && (code2 == LT_EXPR || code2 == LE_EXPR))
4768 {
4769 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4770 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4771 else
4772 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4773 }
4774
4775 /* Likewise chose the less restrictive of two > or >= comparisons. */
4776 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4777 && (code2 == GT_EXPR || code2 == GE_EXPR))
4778 {
4779 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4780 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4781 else
4782 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4783 }
4784
4785 /* Check for singleton ranges. */
4786 else if (cmp == 0
4787 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4788 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4789 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4790
4791 /* Check for less/greater pairs that don't restrict the range at all. */
4792 else if (cmp >= 0
4793 && (code1 == LT_EXPR || code1 == LE_EXPR)
4794 && (code2 == GT_EXPR || code2 == GE_EXPR))
4795 return boolean_true_node;
4796 else if (cmp <= 0
4797 && (code1 == GT_EXPR || code1 == GE_EXPR)
4798 && (code2 == LT_EXPR || code2 == LE_EXPR))
4799 return boolean_true_node;
4800 }
4801
4802 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4803 NAME's definition is a truth value. See if there are any simplifications
4804 that can be done against the NAME's definition. */
4805 if (TREE_CODE (op1a) == SSA_NAME
4806 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4807 && (integer_zerop (op1b) || integer_onep (op1b)))
4808 {
4809 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4810 || (code1 == NE_EXPR && integer_onep (op1b)));
4811 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4812 switch (gimple_code (stmt))
4813 {
4814 case GIMPLE_ASSIGN:
4815 /* Try to simplify by copy-propagating the definition. */
4816 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4817
4818 case GIMPLE_PHI:
4819 /* If every argument to the PHI produces the same result when
4820 ORed with the second comparison, we win.
4821 Do not do this unless the type is bool since we need a bool
4822 result here anyway. */
4823 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4824 {
4825 tree result = NULL_TREE;
4826 unsigned i;
4827 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4828 {
4829 tree arg = gimple_phi_arg_def (stmt, i);
4830
4831 /* If this PHI has itself as an argument, ignore it.
4832 If all the other args produce the same result,
4833 we're still OK. */
4834 if (arg == gimple_phi_result (stmt))
4835 continue;
4836 else if (TREE_CODE (arg) == INTEGER_CST)
4837 {
4838 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4839 {
4840 if (!result)
4841 result = boolean_true_node;
4842 else if (!integer_onep (result))
4843 return NULL_TREE;
4844 }
4845 else if (!result)
4846 result = fold_build2 (code2, boolean_type_node,
4847 op2a, op2b);
4848 else if (!same_bool_comparison_p (result,
4849 code2, op2a, op2b))
4850 return NULL_TREE;
4851 }
4852 else if (TREE_CODE (arg) == SSA_NAME
4853 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4854 {
4855 tree temp;
4856 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4857 /* In simple cases we can look through PHI nodes,
4858 but we have to be careful with loops.
4859 See PR49073. */
4860 if (! dom_info_available_p (CDI_DOMINATORS)
4861 || gimple_bb (def_stmt) == gimple_bb (stmt)
4862 || dominated_by_p (CDI_DOMINATORS,
4863 gimple_bb (def_stmt),
4864 gimple_bb (stmt)))
4865 return NULL_TREE;
4866 temp = or_var_with_comparison (arg, invert, code2,
4867 op2a, op2b);
4868 if (!temp)
4869 return NULL_TREE;
4870 else if (!result)
4871 result = temp;
4872 else if (!same_bool_result_p (result, temp))
4873 return NULL_TREE;
4874 }
4875 else
4876 return NULL_TREE;
4877 }
4878 return result;
4879 }
4880
4881 default:
4882 break;
4883 }
4884 }
4885 return NULL_TREE;
4886 }
4887
4888 /* Try to simplify the OR of two comparisons, specified by
4889 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4890 If this can be simplified to a single expression (without requiring
4891 introducing more SSA variables to hold intermediate values),
4892 return the resulting tree. Otherwise return NULL_TREE.
4893 If the result expression is non-null, it has boolean type. */
4894
4895 tree
4896 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4897 enum tree_code code2, tree op2a, tree op2b)
4898 {
4899 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4900 if (t)
4901 return t;
4902 else
4903 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4904 }
4905
4906
4907 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4908
4909 Either NULL_TREE, a simplified but non-constant or a constant
4910 is returned.
4911
4912 ??? This should go into a gimple-fold-inline.h file to be eventually
4913 privatized with the single valueize function used in the various TUs
4914 to avoid the indirect function call overhead. */
4915
4916 tree
4917 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4918 tree (*gvalueize) (tree))
4919 {
4920 code_helper rcode;
4921 tree ops[3] = {};
4922 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4923 edges if there are intermediate VARYING defs. For this reason
4924 do not follow SSA edges here even though SCCVN can technically
4925 just deal fine with that. */
4926 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4927 {
4928 tree res = NULL_TREE;
4929 if (rcode.is_tree_code ()
4930 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4931 || ((tree_code) rcode) == ADDR_EXPR)
4932 && is_gimple_val (ops[0]))
4933 res = ops[0];
4934 else if (mprts_hook)
4935 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4936 if (res)
4937 {
4938 if (dump_file && dump_flags & TDF_DETAILS)
4939 {
4940 fprintf (dump_file, "Match-and-simplified ");
4941 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4942 fprintf (dump_file, " to ");
4943 print_generic_expr (dump_file, res, 0);
4944 fprintf (dump_file, "\n");
4945 }
4946 return res;
4947 }
4948 }
4949
4950 location_t loc = gimple_location (stmt);
4951 switch (gimple_code (stmt))
4952 {
4953 case GIMPLE_ASSIGN:
4954 {
4955 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4956
4957 switch (get_gimple_rhs_class (subcode))
4958 {
4959 case GIMPLE_SINGLE_RHS:
4960 {
4961 tree rhs = gimple_assign_rhs1 (stmt);
4962 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4963
4964 if (TREE_CODE (rhs) == SSA_NAME)
4965 {
4966 /* If the RHS is an SSA_NAME, return its known constant value,
4967 if any. */
4968 return (*valueize) (rhs);
4969 }
4970 /* Handle propagating invariant addresses into address
4971 operations. */
4972 else if (TREE_CODE (rhs) == ADDR_EXPR
4973 && !is_gimple_min_invariant (rhs))
4974 {
4975 HOST_WIDE_INT offset = 0;
4976 tree base;
4977 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4978 &offset,
4979 valueize);
4980 if (base
4981 && (CONSTANT_CLASS_P (base)
4982 || decl_address_invariant_p (base)))
4983 return build_invariant_address (TREE_TYPE (rhs),
4984 base, offset);
4985 }
4986 else if (TREE_CODE (rhs) == CONSTRUCTOR
4987 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4988 && (CONSTRUCTOR_NELTS (rhs)
4989 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4990 {
4991 unsigned i;
4992 tree val, *vec;
4993
4994 vec = XALLOCAVEC (tree,
4995 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4997 {
4998 val = (*valueize) (val);
4999 if (TREE_CODE (val) == INTEGER_CST
5000 || TREE_CODE (val) == REAL_CST
5001 || TREE_CODE (val) == FIXED_CST)
5002 vec[i] = val;
5003 else
5004 return NULL_TREE;
5005 }
5006
5007 return build_vector (TREE_TYPE (rhs), vec);
5008 }
5009 if (subcode == OBJ_TYPE_REF)
5010 {
5011 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5012 /* If callee is constant, we can fold away the wrapper. */
5013 if (is_gimple_min_invariant (val))
5014 return val;
5015 }
5016
5017 if (kind == tcc_reference)
5018 {
5019 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5020 || TREE_CODE (rhs) == REALPART_EXPR
5021 || TREE_CODE (rhs) == IMAGPART_EXPR)
5022 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5023 {
5024 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5025 return fold_unary_loc (EXPR_LOCATION (rhs),
5026 TREE_CODE (rhs),
5027 TREE_TYPE (rhs), val);
5028 }
5029 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5030 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5031 {
5032 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5033 return fold_ternary_loc (EXPR_LOCATION (rhs),
5034 TREE_CODE (rhs),
5035 TREE_TYPE (rhs), val,
5036 TREE_OPERAND (rhs, 1),
5037 TREE_OPERAND (rhs, 2));
5038 }
5039 else if (TREE_CODE (rhs) == MEM_REF
5040 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5041 {
5042 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5043 if (TREE_CODE (val) == ADDR_EXPR
5044 && is_gimple_min_invariant (val))
5045 {
5046 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5047 unshare_expr (val),
5048 TREE_OPERAND (rhs, 1));
5049 if (tem)
5050 rhs = tem;
5051 }
5052 }
5053 return fold_const_aggregate_ref_1 (rhs, valueize);
5054 }
5055 else if (kind == tcc_declaration)
5056 return get_symbol_constant_value (rhs);
5057 return rhs;
5058 }
5059
5060 case GIMPLE_UNARY_RHS:
5061 return NULL_TREE;
5062
5063 case GIMPLE_BINARY_RHS:
5064 /* Translate &x + CST into an invariant form suitable for
5065 further propagation. */
5066 if (subcode == POINTER_PLUS_EXPR)
5067 {
5068 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5069 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5070 if (TREE_CODE (op0) == ADDR_EXPR
5071 && TREE_CODE (op1) == INTEGER_CST)
5072 {
5073 tree off = fold_convert (ptr_type_node, op1);
5074 return build_fold_addr_expr_loc
5075 (loc,
5076 fold_build2 (MEM_REF,
5077 TREE_TYPE (TREE_TYPE (op0)),
5078 unshare_expr (op0), off));
5079 }
5080 }
5081 /* Canonicalize bool != 0 and bool == 0 appearing after
5082 valueization. While gimple_simplify handles this
5083 it can get confused by the ~X == 1 -> X == 0 transform
5084 which we cant reduce to a SSA name or a constant
5085 (and we have no way to tell gimple_simplify to not
5086 consider those transforms in the first place). */
5087 else if (subcode == EQ_EXPR
5088 || subcode == NE_EXPR)
5089 {
5090 tree lhs = gimple_assign_lhs (stmt);
5091 tree op0 = gimple_assign_rhs1 (stmt);
5092 if (useless_type_conversion_p (TREE_TYPE (lhs),
5093 TREE_TYPE (op0)))
5094 {
5095 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5096 op0 = (*valueize) (op0);
5097 if (TREE_CODE (op0) == INTEGER_CST)
5098 std::swap (op0, op1);
5099 if (TREE_CODE (op1) == INTEGER_CST
5100 && ((subcode == NE_EXPR && integer_zerop (op1))
5101 || (subcode == EQ_EXPR && integer_onep (op1))))
5102 return op0;
5103 }
5104 }
5105 return NULL_TREE;
5106
5107 case GIMPLE_TERNARY_RHS:
5108 {
5109 /* Handle ternary operators that can appear in GIMPLE form. */
5110 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5111 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5112 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5113 return fold_ternary_loc (loc, subcode,
5114 gimple_expr_type (stmt), op0, op1, op2);
5115 }
5116
5117 default:
5118 gcc_unreachable ();
5119 }
5120 }
5121
5122 case GIMPLE_CALL:
5123 {
5124 tree fn;
5125 gcall *call_stmt = as_a <gcall *> (stmt);
5126
5127 if (gimple_call_internal_p (stmt))
5128 {
5129 enum tree_code subcode = ERROR_MARK;
5130 switch (gimple_call_internal_fn (stmt))
5131 {
5132 case IFN_UBSAN_CHECK_ADD:
5133 subcode = PLUS_EXPR;
5134 break;
5135 case IFN_UBSAN_CHECK_SUB:
5136 subcode = MINUS_EXPR;
5137 break;
5138 case IFN_UBSAN_CHECK_MUL:
5139 subcode = MULT_EXPR;
5140 break;
5141 default:
5142 return NULL_TREE;
5143 }
5144 tree arg0 = gimple_call_arg (stmt, 0);
5145 tree arg1 = gimple_call_arg (stmt, 1);
5146 tree op0 = (*valueize) (arg0);
5147 tree op1 = (*valueize) (arg1);
5148
5149 if (TREE_CODE (op0) != INTEGER_CST
5150 || TREE_CODE (op1) != INTEGER_CST)
5151 {
5152 switch (subcode)
5153 {
5154 case MULT_EXPR:
5155 /* x * 0 = 0 * x = 0 without overflow. */
5156 if (integer_zerop (op0) || integer_zerop (op1))
5157 return build_zero_cst (TREE_TYPE (arg0));
5158 break;
5159 case MINUS_EXPR:
5160 /* y - y = 0 without overflow. */
5161 if (operand_equal_p (op0, op1, 0))
5162 return build_zero_cst (TREE_TYPE (arg0));
5163 break;
5164 default:
5165 break;
5166 }
5167 }
5168 tree res
5169 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5170 if (res
5171 && TREE_CODE (res) == INTEGER_CST
5172 && !TREE_OVERFLOW (res))
5173 return res;
5174 return NULL_TREE;
5175 }
5176
5177 fn = (*valueize) (gimple_call_fn (stmt));
5178 if (TREE_CODE (fn) == ADDR_EXPR
5179 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5180 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5181 && gimple_builtin_call_types_compatible_p (stmt,
5182 TREE_OPERAND (fn, 0)))
5183 {
5184 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5185 tree retval;
5186 unsigned i;
5187 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5188 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5189 retval = fold_builtin_call_array (loc,
5190 gimple_call_return_type (call_stmt),
5191 fn, gimple_call_num_args (stmt), args);
5192 if (retval)
5193 {
5194 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5195 STRIP_NOPS (retval);
5196 retval = fold_convert (gimple_call_return_type (call_stmt),
5197 retval);
5198 }
5199 return retval;
5200 }
5201 return NULL_TREE;
5202 }
5203
5204 default:
5205 return NULL_TREE;
5206 }
5207 }
5208
5209 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5210 Returns NULL_TREE if folding to a constant is not possible, otherwise
5211 returns a constant according to is_gimple_min_invariant. */
5212
5213 tree
5214 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5215 {
5216 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5217 if (res && is_gimple_min_invariant (res))
5218 return res;
5219 return NULL_TREE;
5220 }
5221
5222
5223 /* The following set of functions are supposed to fold references using
5224 their constant initializers. */
5225
5226 /* See if we can find constructor defining value of BASE.
5227 When we know the consructor with constant offset (such as
5228 base is array[40] and we do know constructor of array), then
5229 BIT_OFFSET is adjusted accordingly.
5230
5231 As a special case, return error_mark_node when constructor
5232 is not explicitly available, but it is known to be zero
5233 such as 'static const int a;'. */
5234 static tree
5235 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5236 tree (*valueize)(tree))
5237 {
5238 HOST_WIDE_INT bit_offset2, size, max_size;
5239 if (TREE_CODE (base) == MEM_REF)
5240 {
5241 if (!integer_zerop (TREE_OPERAND (base, 1)))
5242 {
5243 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5244 return NULL_TREE;
5245 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5246 * BITS_PER_UNIT);
5247 }
5248
5249 if (valueize
5250 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5251 base = valueize (TREE_OPERAND (base, 0));
5252 if (!base || TREE_CODE (base) != ADDR_EXPR)
5253 return NULL_TREE;
5254 base = TREE_OPERAND (base, 0);
5255 }
5256
5257 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5258 DECL_INITIAL. If BASE is a nested reference into another
5259 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5260 the inner reference. */
5261 switch (TREE_CODE (base))
5262 {
5263 case VAR_DECL:
5264 case CONST_DECL:
5265 {
5266 tree init = ctor_for_folding (base);
5267
5268 /* Our semantic is exact opposite of ctor_for_folding;
5269 NULL means unknown, while error_mark_node is 0. */
5270 if (init == error_mark_node)
5271 return NULL_TREE;
5272 if (!init)
5273 return error_mark_node;
5274 return init;
5275 }
5276
5277 case ARRAY_REF:
5278 case COMPONENT_REF:
5279 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5280 if (max_size == -1 || size != max_size)
5281 return NULL_TREE;
5282 *bit_offset += bit_offset2;
5283 return get_base_constructor (base, bit_offset, valueize);
5284
5285 case STRING_CST:
5286 case CONSTRUCTOR:
5287 return base;
5288
5289 default:
5290 return NULL_TREE;
5291 }
5292 }
5293
5294 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5295 SIZE to the memory at bit OFFSET. */
5296
5297 static tree
5298 fold_array_ctor_reference (tree type, tree ctor,
5299 unsigned HOST_WIDE_INT offset,
5300 unsigned HOST_WIDE_INT size,
5301 tree from_decl)
5302 {
5303 unsigned HOST_WIDE_INT cnt;
5304 tree cfield, cval;
5305 offset_int low_bound;
5306 offset_int elt_size;
5307 offset_int index, max_index;
5308 offset_int access_index;
5309 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5310 HOST_WIDE_INT inner_offset;
5311
5312 /* Compute low bound and elt size. */
5313 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5314 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5315 if (domain_type && TYPE_MIN_VALUE (domain_type))
5316 {
5317 /* Static constructors for variably sized objects makes no sense. */
5318 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5319 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5320 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5321 }
5322 else
5323 low_bound = 0;
5324 /* Static constructors for variably sized objects makes no sense. */
5325 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5326 == INTEGER_CST);
5327 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5328
5329 /* We can handle only constantly sized accesses that are known to not
5330 be larger than size of array element. */
5331 if (!TYPE_SIZE_UNIT (type)
5332 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5333 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5334 || elt_size == 0)
5335 return NULL_TREE;
5336
5337 /* Compute the array index we look for. */
5338 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5339 elt_size);
5340 access_index += low_bound;
5341 if (index_type)
5342 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5343 TYPE_SIGN (index_type));
5344
5345 /* And offset within the access. */
5346 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5347
5348 /* See if the array field is large enough to span whole access. We do not
5349 care to fold accesses spanning multiple array indexes. */
5350 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5351 return NULL_TREE;
5352
5353 index = low_bound - 1;
5354 if (index_type)
5355 index = wi::ext (index, TYPE_PRECISION (index_type),
5356 TYPE_SIGN (index_type));
5357
5358 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5359 {
5360 /* Array constructor might explicitely set index, or specify range
5361 or leave index NULL meaning that it is next index after previous
5362 one. */
5363 if (cfield)
5364 {
5365 if (TREE_CODE (cfield) == INTEGER_CST)
5366 max_index = index = wi::to_offset (cfield);
5367 else
5368 {
5369 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5370 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5371 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5372 }
5373 }
5374 else
5375 {
5376 index += 1;
5377 if (index_type)
5378 index = wi::ext (index, TYPE_PRECISION (index_type),
5379 TYPE_SIGN (index_type));
5380 max_index = index;
5381 }
5382
5383 /* Do we have match? */
5384 if (wi::cmpu (access_index, index) >= 0
5385 && wi::cmpu (access_index, max_index) <= 0)
5386 return fold_ctor_reference (type, cval, inner_offset, size,
5387 from_decl);
5388 }
5389 /* When memory is not explicitely mentioned in constructor,
5390 it is 0 (or out of range). */
5391 return build_zero_cst (type);
5392 }
5393
5394 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5395 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5396
5397 static tree
5398 fold_nonarray_ctor_reference (tree type, tree ctor,
5399 unsigned HOST_WIDE_INT offset,
5400 unsigned HOST_WIDE_INT size,
5401 tree from_decl)
5402 {
5403 unsigned HOST_WIDE_INT cnt;
5404 tree cfield, cval;
5405
5406 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5407 cval)
5408 {
5409 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5410 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5411 tree field_size = DECL_SIZE (cfield);
5412 offset_int bitoffset;
5413 offset_int bitoffset_end, access_end;
5414
5415 /* Variable sized objects in static constructors makes no sense,
5416 but field_size can be NULL for flexible array members. */
5417 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5418 && TREE_CODE (byte_offset) == INTEGER_CST
5419 && (field_size != NULL_TREE
5420 ? TREE_CODE (field_size) == INTEGER_CST
5421 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5422
5423 /* Compute bit offset of the field. */
5424 bitoffset = (wi::to_offset (field_offset)
5425 + wi::lshift (wi::to_offset (byte_offset),
5426 LOG2_BITS_PER_UNIT));
5427 /* Compute bit offset where the field ends. */
5428 if (field_size != NULL_TREE)
5429 bitoffset_end = bitoffset + wi::to_offset (field_size);
5430 else
5431 bitoffset_end = 0;
5432
5433 access_end = offset_int (offset) + size;
5434
5435 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5436 [BITOFFSET, BITOFFSET_END)? */
5437 if (wi::cmps (access_end, bitoffset) > 0
5438 && (field_size == NULL_TREE
5439 || wi::lts_p (offset, bitoffset_end)))
5440 {
5441 offset_int inner_offset = offset_int (offset) - bitoffset;
5442 /* We do have overlap. Now see if field is large enough to
5443 cover the access. Give up for accesses spanning multiple
5444 fields. */
5445 if (wi::cmps (access_end, bitoffset_end) > 0)
5446 return NULL_TREE;
5447 if (wi::lts_p (offset, bitoffset))
5448 return NULL_TREE;
5449 return fold_ctor_reference (type, cval,
5450 inner_offset.to_uhwi (), size,
5451 from_decl);
5452 }
5453 }
5454 /* When memory is not explicitely mentioned in constructor, it is 0. */
5455 return build_zero_cst (type);
5456 }
5457
5458 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5459 to the memory at bit OFFSET. */
5460
5461 tree
5462 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5463 unsigned HOST_WIDE_INT size, tree from_decl)
5464 {
5465 tree ret;
5466
5467 /* We found the field with exact match. */
5468 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5469 && !offset)
5470 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5471
5472 /* We are at the end of walk, see if we can view convert the
5473 result. */
5474 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5475 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5476 && !compare_tree_int (TYPE_SIZE (type), size)
5477 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5478 {
5479 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5480 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5481 if (ret)
5482 STRIP_USELESS_TYPE_CONVERSION (ret);
5483 return ret;
5484 }
5485 /* For constants and byte-aligned/sized reads try to go through
5486 native_encode/interpret. */
5487 if (CONSTANT_CLASS_P (ctor)
5488 && BITS_PER_UNIT == 8
5489 && offset % BITS_PER_UNIT == 0
5490 && size % BITS_PER_UNIT == 0
5491 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5492 {
5493 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5494 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5495 offset / BITS_PER_UNIT) > 0)
5496 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5497 }
5498 if (TREE_CODE (ctor) == CONSTRUCTOR)
5499 {
5500
5501 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5502 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5503 return fold_array_ctor_reference (type, ctor, offset, size,
5504 from_decl);
5505 else
5506 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5507 from_decl);
5508 }
5509
5510 return NULL_TREE;
5511 }
5512
5513 /* Return the tree representing the element referenced by T if T is an
5514 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5515 names using VALUEIZE. Return NULL_TREE otherwise. */
5516
5517 tree
5518 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5519 {
5520 tree ctor, idx, base;
5521 HOST_WIDE_INT offset, size, max_size;
5522 tree tem;
5523
5524 if (TREE_THIS_VOLATILE (t))
5525 return NULL_TREE;
5526
5527 if (DECL_P (t))
5528 return get_symbol_constant_value (t);
5529
5530 tem = fold_read_from_constant_string (t);
5531 if (tem)
5532 return tem;
5533
5534 switch (TREE_CODE (t))
5535 {
5536 case ARRAY_REF:
5537 case ARRAY_RANGE_REF:
5538 /* Constant indexes are handled well by get_base_constructor.
5539 Only special case variable offsets.
5540 FIXME: This code can't handle nested references with variable indexes
5541 (they will be handled only by iteration of ccp). Perhaps we can bring
5542 get_ref_base_and_extent here and make it use a valueize callback. */
5543 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5544 && valueize
5545 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5546 && TREE_CODE (idx) == INTEGER_CST)
5547 {
5548 tree low_bound, unit_size;
5549
5550 /* If the resulting bit-offset is constant, track it. */
5551 if ((low_bound = array_ref_low_bound (t),
5552 TREE_CODE (low_bound) == INTEGER_CST)
5553 && (unit_size = array_ref_element_size (t),
5554 tree_fits_uhwi_p (unit_size)))
5555 {
5556 offset_int woffset
5557 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5558 TYPE_PRECISION (TREE_TYPE (idx)));
5559
5560 if (wi::fits_shwi_p (woffset))
5561 {
5562 offset = woffset.to_shwi ();
5563 /* TODO: This code seems wrong, multiply then check
5564 to see if it fits. */
5565 offset *= tree_to_uhwi (unit_size);
5566 offset *= BITS_PER_UNIT;
5567
5568 base = TREE_OPERAND (t, 0);
5569 ctor = get_base_constructor (base, &offset, valueize);
5570 /* Empty constructor. Always fold to 0. */
5571 if (ctor == error_mark_node)
5572 return build_zero_cst (TREE_TYPE (t));
5573 /* Out of bound array access. Value is undefined,
5574 but don't fold. */
5575 if (offset < 0)
5576 return NULL_TREE;
5577 /* We can not determine ctor. */
5578 if (!ctor)
5579 return NULL_TREE;
5580 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5581 tree_to_uhwi (unit_size)
5582 * BITS_PER_UNIT,
5583 base);
5584 }
5585 }
5586 }
5587 /* Fallthru. */
5588
5589 case COMPONENT_REF:
5590 case BIT_FIELD_REF:
5591 case TARGET_MEM_REF:
5592 case MEM_REF:
5593 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5594 ctor = get_base_constructor (base, &offset, valueize);
5595
5596 /* Empty constructor. Always fold to 0. */
5597 if (ctor == error_mark_node)
5598 return build_zero_cst (TREE_TYPE (t));
5599 /* We do not know precise address. */
5600 if (max_size == -1 || max_size != size)
5601 return NULL_TREE;
5602 /* We can not determine ctor. */
5603 if (!ctor)
5604 return NULL_TREE;
5605
5606 /* Out of bound array access. Value is undefined, but don't fold. */
5607 if (offset < 0)
5608 return NULL_TREE;
5609
5610 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5611 base);
5612
5613 case REALPART_EXPR:
5614 case IMAGPART_EXPR:
5615 {
5616 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5617 if (c && TREE_CODE (c) == COMPLEX_CST)
5618 return fold_build1_loc (EXPR_LOCATION (t),
5619 TREE_CODE (t), TREE_TYPE (t), c);
5620 break;
5621 }
5622
5623 default:
5624 break;
5625 }
5626
5627 return NULL_TREE;
5628 }
5629
5630 tree
5631 fold_const_aggregate_ref (tree t)
5632 {
5633 return fold_const_aggregate_ref_1 (t, NULL);
5634 }
5635
5636 /* Lookup virtual method with index TOKEN in a virtual table V
5637 at OFFSET.
5638 Set CAN_REFER if non-NULL to false if method
5639 is not referable or if the virtual table is ill-formed (such as rewriten
5640 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5641
5642 tree
5643 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5644 tree v,
5645 unsigned HOST_WIDE_INT offset,
5646 bool *can_refer)
5647 {
5648 tree vtable = v, init, fn;
5649 unsigned HOST_WIDE_INT size;
5650 unsigned HOST_WIDE_INT elt_size, access_index;
5651 tree domain_type;
5652
5653 if (can_refer)
5654 *can_refer = true;
5655
5656 /* First of all double check we have virtual table. */
5657 if (TREE_CODE (v) != VAR_DECL
5658 || !DECL_VIRTUAL_P (v))
5659 {
5660 /* Pass down that we lost track of the target. */
5661 if (can_refer)
5662 *can_refer = false;
5663 return NULL_TREE;
5664 }
5665
5666 init = ctor_for_folding (v);
5667
5668 /* The virtual tables should always be born with constructors
5669 and we always should assume that they are avaialble for
5670 folding. At the moment we do not stream them in all cases,
5671 but it should never happen that ctor seem unreachable. */
5672 gcc_assert (init);
5673 if (init == error_mark_node)
5674 {
5675 gcc_assert (in_lto_p);
5676 /* Pass down that we lost track of the target. */
5677 if (can_refer)
5678 *can_refer = false;
5679 return NULL_TREE;
5680 }
5681 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5682 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5683 offset *= BITS_PER_UNIT;
5684 offset += token * size;
5685
5686 /* Lookup the value in the constructor that is assumed to be array.
5687 This is equivalent to
5688 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5689 offset, size, NULL);
5690 but in a constant time. We expect that frontend produced a simple
5691 array without indexed initializers. */
5692
5693 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5694 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5695 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5696 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5697
5698 access_index = offset / BITS_PER_UNIT / elt_size;
5699 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5700
5701 /* This code makes an assumption that there are no
5702 indexed fileds produced by C++ FE, so we can directly index the array. */
5703 if (access_index < CONSTRUCTOR_NELTS (init))
5704 {
5705 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5706 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5707 STRIP_NOPS (fn);
5708 }
5709 else
5710 fn = NULL;
5711
5712 /* For type inconsistent program we may end up looking up virtual method
5713 in virtual table that does not contain TOKEN entries. We may overrun
5714 the virtual table and pick up a constant or RTTI info pointer.
5715 In any case the call is undefined. */
5716 if (!fn
5717 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5718 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5719 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5720 else
5721 {
5722 fn = TREE_OPERAND (fn, 0);
5723
5724 /* When cgraph node is missing and function is not public, we cannot
5725 devirtualize. This can happen in WHOPR when the actual method
5726 ends up in other partition, because we found devirtualization
5727 possibility too late. */
5728 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5729 {
5730 if (can_refer)
5731 {
5732 *can_refer = false;
5733 return fn;
5734 }
5735 return NULL_TREE;
5736 }
5737 }
5738
5739 /* Make sure we create a cgraph node for functions we'll reference.
5740 They can be non-existent if the reference comes from an entry
5741 of an external vtable for example. */
5742 cgraph_node::get_create (fn);
5743
5744 return fn;
5745 }
5746
5747 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5748 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5749 KNOWN_BINFO carries the binfo describing the true type of
5750 OBJ_TYPE_REF_OBJECT(REF).
5751 Set CAN_REFER if non-NULL to false if method
5752 is not referable or if the virtual table is ill-formed (such as rewriten
5753 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5754
5755 tree
5756 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5757 bool *can_refer)
5758 {
5759 unsigned HOST_WIDE_INT offset;
5760 tree v;
5761
5762 v = BINFO_VTABLE (known_binfo);
5763 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5764 if (!v)
5765 return NULL_TREE;
5766
5767 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5768 {
5769 if (can_refer)
5770 *can_refer = false;
5771 return NULL_TREE;
5772 }
5773 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5774 }
5775
5776 /* Given a pointer value OP0, return a simplified version of an
5777 indirection through OP0, or NULL_TREE if no simplification is
5778 possible. Note that the resulting type may be different from
5779 the type pointed to in the sense that it is still compatible
5780 from the langhooks point of view. */
5781
5782 tree
5783 gimple_fold_indirect_ref (tree t)
5784 {
5785 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5786 tree sub = t;
5787 tree subtype;
5788
5789 STRIP_NOPS (sub);
5790 subtype = TREE_TYPE (sub);
5791 if (!POINTER_TYPE_P (subtype))
5792 return NULL_TREE;
5793
5794 if (TREE_CODE (sub) == ADDR_EXPR)
5795 {
5796 tree op = TREE_OPERAND (sub, 0);
5797 tree optype = TREE_TYPE (op);
5798 /* *&p => p */
5799 if (useless_type_conversion_p (type, optype))
5800 return op;
5801
5802 /* *(foo *)&fooarray => fooarray[0] */
5803 if (TREE_CODE (optype) == ARRAY_TYPE
5804 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5805 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5806 {
5807 tree type_domain = TYPE_DOMAIN (optype);
5808 tree min_val = size_zero_node;
5809 if (type_domain && TYPE_MIN_VALUE (type_domain))
5810 min_val = TYPE_MIN_VALUE (type_domain);
5811 if (TREE_CODE (min_val) == INTEGER_CST)
5812 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5813 }
5814 /* *(foo *)&complexfoo => __real__ complexfoo */
5815 else if (TREE_CODE (optype) == COMPLEX_TYPE
5816 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5817 return fold_build1 (REALPART_EXPR, type, op);
5818 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5819 else if (TREE_CODE (optype) == VECTOR_TYPE
5820 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5821 {
5822 tree part_width = TYPE_SIZE (type);
5823 tree index = bitsize_int (0);
5824 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5825 }
5826 }
5827
5828 /* *(p + CST) -> ... */
5829 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5830 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5831 {
5832 tree addr = TREE_OPERAND (sub, 0);
5833 tree off = TREE_OPERAND (sub, 1);
5834 tree addrtype;
5835
5836 STRIP_NOPS (addr);
5837 addrtype = TREE_TYPE (addr);
5838
5839 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5840 if (TREE_CODE (addr) == ADDR_EXPR
5841 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5842 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5843 && tree_fits_uhwi_p (off))
5844 {
5845 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5846 tree part_width = TYPE_SIZE (type);
5847 unsigned HOST_WIDE_INT part_widthi
5848 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5849 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5850 tree index = bitsize_int (indexi);
5851 if (offset / part_widthi
5852 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5853 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5854 part_width, index);
5855 }
5856
5857 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5858 if (TREE_CODE (addr) == ADDR_EXPR
5859 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5860 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5861 {
5862 tree size = TYPE_SIZE_UNIT (type);
5863 if (tree_int_cst_equal (size, off))
5864 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5865 }
5866
5867 /* *(p + CST) -> MEM_REF <p, CST>. */
5868 if (TREE_CODE (addr) != ADDR_EXPR
5869 || DECL_P (TREE_OPERAND (addr, 0)))
5870 return fold_build2 (MEM_REF, type,
5871 addr,
5872 wide_int_to_tree (ptype, off));
5873 }
5874
5875 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5876 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5878 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5879 {
5880 tree type_domain;
5881 tree min_val = size_zero_node;
5882 tree osub = sub;
5883 sub = gimple_fold_indirect_ref (sub);
5884 if (! sub)
5885 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5886 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5887 if (type_domain && TYPE_MIN_VALUE (type_domain))
5888 min_val = TYPE_MIN_VALUE (type_domain);
5889 if (TREE_CODE (min_val) == INTEGER_CST)
5890 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5891 }
5892
5893 return NULL_TREE;
5894 }
5895
5896 /* Return true if CODE is an operation that when operating on signed
5897 integer types involves undefined behavior on overflow and the
5898 operation can be expressed with unsigned arithmetic. */
5899
5900 bool
5901 arith_code_with_undefined_signed_overflow (tree_code code)
5902 {
5903 switch (code)
5904 {
5905 case PLUS_EXPR:
5906 case MINUS_EXPR:
5907 case MULT_EXPR:
5908 case NEGATE_EXPR:
5909 case POINTER_PLUS_EXPR:
5910 return true;
5911 default:
5912 return false;
5913 }
5914 }
5915
5916 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5917 operation that can be transformed to unsigned arithmetic by converting
5918 its operand, carrying out the operation in the corresponding unsigned
5919 type and converting the result back to the original type.
5920
5921 Returns a sequence of statements that replace STMT and also contain
5922 a modified form of STMT itself. */
5923
5924 gimple_seq
5925 rewrite_to_defined_overflow (gimple *stmt)
5926 {
5927 if (dump_file && (dump_flags & TDF_DETAILS))
5928 {
5929 fprintf (dump_file, "rewriting stmt with undefined signed "
5930 "overflow ");
5931 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5932 }
5933
5934 tree lhs = gimple_assign_lhs (stmt);
5935 tree type = unsigned_type_for (TREE_TYPE (lhs));
5936 gimple_seq stmts = NULL;
5937 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5938 {
5939 gimple_seq stmts2 = NULL;
5940 gimple_set_op (stmt, i,
5941 force_gimple_operand (fold_convert (type,
5942 gimple_op (stmt, i)),
5943 &stmts2, true, NULL_TREE));
5944 gimple_seq_add_seq (&stmts, stmts2);
5945 }
5946 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5947 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5948 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5949 gimple_seq_add_stmt (&stmts, stmt);
5950 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5951 gimple_seq_add_stmt (&stmts, cvt);
5952
5953 return stmts;
5954 }
5955
5956
5957 /* The valueization hook we use for the gimple_build API simplification.
5958 This makes us match fold_buildN behavior by only combining with
5959 statements in the sequence(s) we are currently building. */
5960
5961 static tree
5962 gimple_build_valueize (tree op)
5963 {
5964 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5965 return op;
5966 return NULL_TREE;
5967 }
5968
5969 /* Build the expression CODE OP0 of type TYPE with location LOC,
5970 simplifying it first if possible. Returns the built
5971 expression value and appends statements possibly defining it
5972 to SEQ. */
5973
5974 tree
5975 gimple_build (gimple_seq *seq, location_t loc,
5976 enum tree_code code, tree type, tree op0)
5977 {
5978 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5979 if (!res)
5980 {
5981 if (gimple_in_ssa_p (cfun))
5982 res = make_ssa_name (type);
5983 else
5984 res = create_tmp_reg (type);
5985 gimple *stmt;
5986 if (code == REALPART_EXPR
5987 || code == IMAGPART_EXPR
5988 || code == VIEW_CONVERT_EXPR)
5989 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
5990 else
5991 stmt = gimple_build_assign (res, code, op0);
5992 gimple_set_location (stmt, loc);
5993 gimple_seq_add_stmt_without_update (seq, stmt);
5994 }
5995 return res;
5996 }
5997
5998 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5999 simplifying it first if possible. Returns the built
6000 expression value and appends statements possibly defining it
6001 to SEQ. */
6002
6003 tree
6004 gimple_build (gimple_seq *seq, location_t loc,
6005 enum tree_code code, tree type, tree op0, tree op1)
6006 {
6007 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6008 if (!res)
6009 {
6010 if (gimple_in_ssa_p (cfun))
6011 res = make_ssa_name (type);
6012 else
6013 res = create_tmp_reg (type);
6014 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6015 gimple_set_location (stmt, loc);
6016 gimple_seq_add_stmt_without_update (seq, stmt);
6017 }
6018 return res;
6019 }
6020
6021 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6022 simplifying it first if possible. Returns the built
6023 expression value and appends statements possibly defining it
6024 to SEQ. */
6025
6026 tree
6027 gimple_build (gimple_seq *seq, location_t loc,
6028 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6029 {
6030 tree res = gimple_simplify (code, type, op0, op1, op2,
6031 seq, gimple_build_valueize);
6032 if (!res)
6033 {
6034 if (gimple_in_ssa_p (cfun))
6035 res = make_ssa_name (type);
6036 else
6037 res = create_tmp_reg (type);
6038 gimple *stmt;
6039 if (code == BIT_FIELD_REF)
6040 stmt = gimple_build_assign (res, code,
6041 build3 (code, type, op0, op1, op2));
6042 else
6043 stmt = gimple_build_assign (res, code, op0, op1, op2);
6044 gimple_set_location (stmt, loc);
6045 gimple_seq_add_stmt_without_update (seq, stmt);
6046 }
6047 return res;
6048 }
6049
6050 /* Build the call FN (ARG0) with a result of type TYPE
6051 (or no result if TYPE is void) with location LOC,
6052 simplifying it first if possible. Returns the built
6053 expression value (or NULL_TREE if TYPE is void) and appends
6054 statements possibly defining it to SEQ. */
6055
6056 tree
6057 gimple_build (gimple_seq *seq, location_t loc,
6058 enum built_in_function fn, tree type, tree arg0)
6059 {
6060 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6061 if (!res)
6062 {
6063 tree decl = builtin_decl_implicit (fn);
6064 gimple *stmt = gimple_build_call (decl, 1, arg0);
6065 if (!VOID_TYPE_P (type))
6066 {
6067 if (gimple_in_ssa_p (cfun))
6068 res = make_ssa_name (type);
6069 else
6070 res = create_tmp_reg (type);
6071 gimple_call_set_lhs (stmt, res);
6072 }
6073 gimple_set_location (stmt, loc);
6074 gimple_seq_add_stmt_without_update (seq, stmt);
6075 }
6076 return res;
6077 }
6078
6079 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6080 (or no result if TYPE is void) with location LOC,
6081 simplifying it first if possible. Returns the built
6082 expression value (or NULL_TREE if TYPE is void) and appends
6083 statements possibly defining it to SEQ. */
6084
6085 tree
6086 gimple_build (gimple_seq *seq, location_t loc,
6087 enum built_in_function fn, tree type, tree arg0, tree arg1)
6088 {
6089 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6090 if (!res)
6091 {
6092 tree decl = builtin_decl_implicit (fn);
6093 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6094 if (!VOID_TYPE_P (type))
6095 {
6096 if (gimple_in_ssa_p (cfun))
6097 res = make_ssa_name (type);
6098 else
6099 res = create_tmp_reg (type);
6100 gimple_call_set_lhs (stmt, res);
6101 }
6102 gimple_set_location (stmt, loc);
6103 gimple_seq_add_stmt_without_update (seq, stmt);
6104 }
6105 return res;
6106 }
6107
6108 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6109 (or no result if TYPE is void) with location LOC,
6110 simplifying it first if possible. Returns the built
6111 expression value (or NULL_TREE if TYPE is void) and appends
6112 statements possibly defining it to SEQ. */
6113
6114 tree
6115 gimple_build (gimple_seq *seq, location_t loc,
6116 enum built_in_function fn, tree type,
6117 tree arg0, tree arg1, tree arg2)
6118 {
6119 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6120 seq, gimple_build_valueize);
6121 if (!res)
6122 {
6123 tree decl = builtin_decl_implicit (fn);
6124 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6125 if (!VOID_TYPE_P (type))
6126 {
6127 if (gimple_in_ssa_p (cfun))
6128 res = make_ssa_name (type);
6129 else
6130 res = create_tmp_reg (type);
6131 gimple_call_set_lhs (stmt, res);
6132 }
6133 gimple_set_location (stmt, loc);
6134 gimple_seq_add_stmt_without_update (seq, stmt);
6135 }
6136 return res;
6137 }
6138
6139 /* Build the conversion (TYPE) OP with a result of type TYPE
6140 with location LOC if such conversion is neccesary in GIMPLE,
6141 simplifying it first.
6142 Returns the built expression value and appends
6143 statements possibly defining it to SEQ. */
6144
6145 tree
6146 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6147 {
6148 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6149 return op;
6150 return gimple_build (seq, loc, NOP_EXPR, type, op);
6151 }
6152
6153 /* Return true if the result of assignment STMT is known to be non-negative.
6154 If the return value is based on the assumption that signed overflow is
6155 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6156 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6157
6158 static bool
6159 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6160 int depth)
6161 {
6162 enum tree_code code = gimple_assign_rhs_code (stmt);
6163 switch (get_gimple_rhs_class (code))
6164 {
6165 case GIMPLE_UNARY_RHS:
6166 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6167 gimple_expr_type (stmt),
6168 gimple_assign_rhs1 (stmt),
6169 strict_overflow_p, depth);
6170 case GIMPLE_BINARY_RHS:
6171 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6172 gimple_expr_type (stmt),
6173 gimple_assign_rhs1 (stmt),
6174 gimple_assign_rhs2 (stmt),
6175 strict_overflow_p, depth);
6176 case GIMPLE_TERNARY_RHS:
6177 return false;
6178 case GIMPLE_SINGLE_RHS:
6179 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6180 strict_overflow_p, depth);
6181 case GIMPLE_INVALID_RHS:
6182 break;
6183 }
6184 gcc_unreachable ();
6185 }
6186
6187 /* Return true if return value of call STMT is known to be non-negative.
6188 If the return value is based on the assumption that signed overflow is
6189 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6190 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6191
6192 static bool
6193 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6194 int depth)
6195 {
6196 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6197 gimple_call_arg (stmt, 0) : NULL_TREE;
6198 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6199 gimple_call_arg (stmt, 1) : NULL_TREE;
6200
6201 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6202 gimple_call_fndecl (stmt),
6203 arg0,
6204 arg1,
6205 strict_overflow_p, depth);
6206 }
6207
6208 /* Return true if STMT is known to compute a non-negative value.
6209 If the return value is based on the assumption that signed overflow is
6210 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6211 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6212
6213 bool
6214 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6215 int depth)
6216 {
6217 switch (gimple_code (stmt))
6218 {
6219 case GIMPLE_ASSIGN:
6220 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6221 depth);
6222 case GIMPLE_CALL:
6223 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6224 depth);
6225 default:
6226 return false;
6227 }
6228 }