re PR tree-optimization/80171 (ICE (Segmentation fault) with optimization)
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58 #include "fold-const-call.h"
59
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
64
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 {
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
88
89 if (DECL_ABSTRACT_P (decl))
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || !VAR_OR_FUNCTION_DECL_P (decl))
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
109 }
110
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || !VAR_P (from_decl)
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
139
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
149
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
158 }
159
160 /* Create a temporary for TYPE for a statement STMT. If the current function
161 is in SSA form, a SSA name is created. Otherwise a temporary register
162 is made. */
163
164 static tree
165 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL)
166 {
167 if (gimple_in_ssa_p (cfun))
168 return make_ssa_name (type, stmt);
169 else
170 return create_tmp_reg (type);
171 }
172
173 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
174 acceptable form for is_gimple_min_invariant.
175 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
176
177 tree
178 canonicalize_constructor_val (tree cval, tree from_decl)
179 {
180 tree orig_cval = cval;
181 STRIP_NOPS (cval);
182 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
183 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
184 {
185 tree ptr = TREE_OPERAND (cval, 0);
186 if (is_gimple_min_invariant (ptr))
187 cval = build1_loc (EXPR_LOCATION (cval),
188 ADDR_EXPR, TREE_TYPE (ptr),
189 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
190 ptr,
191 fold_convert (ptr_type_node,
192 TREE_OPERAND (cval, 1))));
193 }
194 if (TREE_CODE (cval) == ADDR_EXPR)
195 {
196 tree base = NULL_TREE;
197 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
198 {
199 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
200 if (base)
201 TREE_OPERAND (cval, 0) = base;
202 }
203 else
204 base = get_base_address (TREE_OPERAND (cval, 0));
205 if (!base)
206 return NULL_TREE;
207
208 if (VAR_OR_FUNCTION_DECL_P (base)
209 && !can_refer_decl_in_current_unit_p (base, from_decl))
210 return NULL_TREE;
211 if (TREE_TYPE (base) == error_mark_node)
212 return NULL_TREE;
213 if (VAR_P (base))
214 TREE_ADDRESSABLE (base) = 1;
215 else if (TREE_CODE (base) == FUNCTION_DECL)
216 {
217 /* Make sure we create a cgraph node for functions we'll reference.
218 They can be non-existent if the reference comes from an entry
219 of an external vtable for example. */
220 cgraph_node::get_create (base);
221 }
222 /* Fixup types in global initializers. */
223 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
224 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
225
226 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
227 cval = fold_convert (TREE_TYPE (orig_cval), cval);
228 return cval;
229 }
230 if (TREE_OVERFLOW_P (cval))
231 return drop_tree_overflow (cval);
232 return orig_cval;
233 }
234
235 /* If SYM is a constant variable with known value, return the value.
236 NULL_TREE is returned otherwise. */
237
238 tree
239 get_symbol_constant_value (tree sym)
240 {
241 tree val = ctor_for_folding (sym);
242 if (val != error_mark_node)
243 {
244 if (val)
245 {
246 val = canonicalize_constructor_val (unshare_expr (val), sym);
247 if (val && is_gimple_min_invariant (val))
248 return val;
249 else
250 return NULL_TREE;
251 }
252 /* Variables declared 'const' without an initializer
253 have zero as the initializer if they may not be
254 overridden at link or run time. */
255 if (!val
256 && is_gimple_reg_type (TREE_TYPE (sym)))
257 return build_zero_cst (TREE_TYPE (sym));
258 }
259
260 return NULL_TREE;
261 }
262
263
264
265 /* Subroutine of fold_stmt. We perform several simplifications of the
266 memory reference tree EXPR and make sure to re-gimplify them properly
267 after propagation of constant addresses. IS_LHS is true if the
268 reference is supposed to be an lvalue. */
269
270 static tree
271 maybe_fold_reference (tree expr, bool is_lhs)
272 {
273 tree result;
274
275 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
276 || TREE_CODE (expr) == REALPART_EXPR
277 || TREE_CODE (expr) == IMAGPART_EXPR)
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_unary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0));
283 else if (TREE_CODE (expr) == BIT_FIELD_REF
284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
285 return fold_ternary_loc (EXPR_LOCATION (expr),
286 TREE_CODE (expr),
287 TREE_TYPE (expr),
288 TREE_OPERAND (expr, 0),
289 TREE_OPERAND (expr, 1),
290 TREE_OPERAND (expr, 2));
291
292 if (!is_lhs
293 && (result = fold_const_aggregate_ref (expr))
294 && is_gimple_min_invariant (result))
295 return result;
296
297 return NULL_TREE;
298 }
299
300
301 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
302 replacement rhs for the statement or NULL_TREE if no simplification
303 could be made. It is assumed that the operands have been previously
304 folded. */
305
306 static tree
307 fold_gimple_assign (gimple_stmt_iterator *si)
308 {
309 gimple *stmt = gsi_stmt (*si);
310 enum tree_code subcode = gimple_assign_rhs_code (stmt);
311 location_t loc = gimple_location (stmt);
312
313 tree result = NULL_TREE;
314
315 switch (get_gimple_rhs_class (subcode))
316 {
317 case GIMPLE_SINGLE_RHS:
318 {
319 tree rhs = gimple_assign_rhs1 (stmt);
320
321 if (TREE_CLOBBER_P (rhs))
322 return NULL_TREE;
323
324 if (REFERENCE_CLASS_P (rhs))
325 return maybe_fold_reference (rhs, false);
326
327 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
328 {
329 tree val = OBJ_TYPE_REF_EXPR (rhs);
330 if (is_gimple_min_invariant (val))
331 return val;
332 else if (flag_devirtualize && virtual_method_call_p (rhs))
333 {
334 bool final;
335 vec <cgraph_node *>targets
336 = possible_polymorphic_call_targets (rhs, stmt, &final);
337 if (final && targets.length () <= 1 && dbg_cnt (devirt))
338 {
339 if (dump_enabled_p ())
340 {
341 location_t loc = gimple_location_safe (stmt);
342 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
343 "resolving virtual function address "
344 "reference to function %s\n",
345 targets.length () == 1
346 ? targets[0]->name ()
347 : "NULL");
348 }
349 if (targets.length () == 1)
350 {
351 val = fold_convert (TREE_TYPE (val),
352 build_fold_addr_expr_loc
353 (loc, targets[0]->decl));
354 STRIP_USELESS_TYPE_CONVERSION (val);
355 }
356 else
357 /* We can not use __builtin_unreachable here because it
358 can not have address taken. */
359 val = build_int_cst (TREE_TYPE (val), 0);
360 return val;
361 }
362 }
363 }
364
365 else if (TREE_CODE (rhs) == ADDR_EXPR)
366 {
367 tree ref = TREE_OPERAND (rhs, 0);
368 tree tem = maybe_fold_reference (ref, true);
369 if (tem
370 && TREE_CODE (tem) == MEM_REF
371 && integer_zerop (TREE_OPERAND (tem, 1)))
372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
373 else if (tem)
374 result = fold_convert (TREE_TYPE (rhs),
375 build_fold_addr_expr_loc (loc, tem));
376 else if (TREE_CODE (ref) == MEM_REF
377 && integer_zerop (TREE_OPERAND (ref, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
379
380 if (result)
381 {
382 /* Strip away useless type conversions. Both the
383 NON_LVALUE_EXPR that may have been added by fold, and
384 "useless" type conversions that might now be apparent
385 due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
387
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
390 }
391 }
392
393 else if (TREE_CODE (rhs) == CONSTRUCTOR
394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
395 {
396 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
397 unsigned i;
398 tree val;
399
400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
401 if (! CONSTANT_CLASS_P (val))
402 return NULL_TREE;
403
404 return build_vector_from_ctor (TREE_TYPE (rhs),
405 CONSTRUCTOR_ELTS (rhs));
406 }
407
408 else if (DECL_P (rhs))
409 return get_symbol_constant_value (rhs);
410 }
411 break;
412
413 case GIMPLE_UNARY_RHS:
414 break;
415
416 case GIMPLE_BINARY_RHS:
417 break;
418
419 case GIMPLE_TERNARY_RHS:
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
425
426 if (result)
427 {
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
431 }
432 break;
433
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
436 }
437
438 return NULL_TREE;
439 }
440
441
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
446
447 static void
448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
449 {
450 gimple *stmt = gsi_stmt (*si_p);
451
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
454
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple *laststore = NULL;
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
460 {
461 gimple *new_stmt = gsi_stmt (i);
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
467 {
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
477 }
478 }
479
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
485 {
486 gimple *new_stmt = gsi_stmt (i);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
494 }
495
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
500 {
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
504 {
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
507 }
508 }
509
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
512 }
513
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
523
524 void
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
526 {
527 tree lhs;
528 gimple *stmt, *new_stmt;
529 gimple_stmt_iterator i;
530 gimple_seq stmts = NULL;
531
532 stmt = gsi_stmt (*si_p);
533
534 gcc_assert (is_gimple_call (stmt));
535
536 push_gimplify_context (gimple_in_ssa_p (cfun));
537
538 lhs = gimple_call_lhs (stmt);
539 if (lhs == NULL_TREE)
540 {
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
545 {
546 pop_gimplify_context (NULL);
547 if (gimple_in_ssa_p (cfun))
548 {
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
551 }
552 gsi_replace (si_p, gimple_build_nop (), false);
553 return;
554 }
555 }
556 else
557 {
558 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
563 }
564
565 pop_gimplify_context (NULL);
566
567 gsi_replace_with_seq_vops (si_p, stmts);
568 }
569
570
571 /* Replace the call at *GSI with the gimple value VAL. */
572
573 static void
574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
575 {
576 gimple *stmt = gsi_stmt (*gsi);
577 tree lhs = gimple_call_lhs (stmt);
578 gimple *repl;
579 if (lhs)
580 {
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
584 }
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
589 {
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
592 }
593 gsi_replace (gsi, repl, false);
594 }
595
596 /* Replace the call at *GSI with the new call REPL and fold that
597 again. */
598
599 static void
600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
601 {
602 gimple *stmt = gsi_stmt (*gsi);
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
607 {
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
611 }
612 gsi_replace (gsi, repl, false);
613 fold_stmt (gsi);
614 }
615
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
617
618 static bool
619 var_decl_component_p (tree var)
620 {
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
625 }
626
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
634
635 static bool
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
638 {
639 gimple *stmt = gsi_stmt (*gsi);
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
644
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
647 {
648 gimple *repl;
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
655 {
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
658 }
659 gsi_replace (gsi, repl, false);
660 return true;
661 }
662
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
666 {
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
671 {
672 gsi_replace (gsi, gimple_build_nop (), false);
673 return true;
674 }
675 goto done;
676 }
677 else
678 {
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
682
683 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
684 pointers as wide integer) and also may result in huge function
685 size because of inlined bounds copy. Thus don't inline for
686 functions we want to instrument. */
687 if (flag_check_pointer_bounds
688 && chkp_instrumentable_p (cfun->decl)
689 /* Even if data may contain pointers we can inline if copy
690 less than a pointer size. */
691 && (!tree_fits_uhwi_p (len)
692 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
693 return false;
694
695 /* Build accesses at offset zero with a ref-all character type. */
696 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
697 ptr_mode, true), 0);
698
699 /* If we can perform the copy efficiently with first doing all loads
700 and then all stores inline it that way. Currently efficiently
701 means that we can load all the memory into a single integer
702 register which is what MOVE_MAX gives us. */
703 src_align = get_pointer_alignment (src);
704 dest_align = get_pointer_alignment (dest);
705 if (tree_fits_uhwi_p (len)
706 && compare_tree_int (len, MOVE_MAX) <= 0
707 /* ??? Don't transform copies from strings with known length this
708 confuses the tree-ssa-strlen.c. This doesn't handle
709 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
710 reason. */
711 && !c_strlen (src, 2))
712 {
713 unsigned ilen = tree_to_uhwi (len);
714 if (pow2p_hwi (ilen))
715 {
716 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
717 if (type
718 && TYPE_MODE (type) != BLKmode
719 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
720 == ilen * 8)
721 /* If the destination pointer is not aligned we must be able
722 to emit an unaligned store. */
723 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
725 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
726 != CODE_FOR_nothing)))
727 {
728 tree srctype = type;
729 tree desttype = type;
730 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 srctype = build_aligned_type (type, src_align);
732 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
733 tree tem = fold_const_aggregate_ref (srcmem);
734 if (tem)
735 srcmem = tem;
736 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
737 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
738 src_align)
739 && (optab_handler (movmisalign_optab,
740 TYPE_MODE (type))
741 == CODE_FOR_nothing))
742 srcmem = NULL_TREE;
743 if (srcmem)
744 {
745 gimple *new_stmt;
746 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
747 {
748 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
749 srcmem
750 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
751 new_stmt);
752 gimple_assign_set_lhs (new_stmt, srcmem);
753 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
754 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
755 }
756 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
757 desttype = build_aligned_type (type, dest_align);
758 new_stmt
759 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
760 dest, off0),
761 srcmem);
762 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
763 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
764 if (gimple_vdef (new_stmt)
765 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
766 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
767 if (!lhs)
768 {
769 gsi_replace (gsi, new_stmt, false);
770 return true;
771 }
772 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
773 goto done;
774 }
775 }
776 }
777 }
778
779 if (endp == 3)
780 {
781 /* Both DEST and SRC must be pointer types.
782 ??? This is what old code did. Is the testing for pointer types
783 really mandatory?
784
785 If either SRC is readonly or length is 1, we can use memcpy. */
786 if (!dest_align || !src_align)
787 return false;
788 if (readonly_data_expr (src)
789 || (tree_fits_uhwi_p (len)
790 && (MIN (src_align, dest_align) / BITS_PER_UNIT
791 >= tree_to_uhwi (len))))
792 {
793 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
794 if (!fn)
795 return false;
796 gimple_call_set_fndecl (stmt, fn);
797 gimple_call_set_arg (stmt, 0, dest);
798 gimple_call_set_arg (stmt, 1, src);
799 fold_stmt (gsi);
800 return true;
801 }
802
803 /* If *src and *dest can't overlap, optimize into memcpy as well. */
804 if (TREE_CODE (src) == ADDR_EXPR
805 && TREE_CODE (dest) == ADDR_EXPR)
806 {
807 tree src_base, dest_base, fn;
808 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
809 HOST_WIDE_INT maxsize;
810
811 srcvar = TREE_OPERAND (src, 0);
812 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
813 if (src_base == NULL)
814 src_base = srcvar;
815 destvar = TREE_OPERAND (dest, 0);
816 dest_base = get_addr_base_and_unit_offset (destvar,
817 &dest_offset);
818 if (dest_base == NULL)
819 dest_base = destvar;
820 if (tree_fits_uhwi_p (len))
821 maxsize = tree_to_uhwi (len);
822 else
823 maxsize = -1;
824 if (SSA_VAR_P (src_base)
825 && SSA_VAR_P (dest_base))
826 {
827 if (operand_equal_p (src_base, dest_base, 0)
828 && ranges_overlap_p (src_offset, maxsize,
829 dest_offset, maxsize))
830 return false;
831 }
832 else if (TREE_CODE (src_base) == MEM_REF
833 && TREE_CODE (dest_base) == MEM_REF)
834 {
835 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
836 TREE_OPERAND (dest_base, 0), 0))
837 return false;
838 offset_int off = mem_ref_offset (src_base) + src_offset;
839 if (!wi::fits_shwi_p (off))
840 return false;
841 src_offset = off.to_shwi ();
842
843 off = mem_ref_offset (dest_base) + dest_offset;
844 if (!wi::fits_shwi_p (off))
845 return false;
846 dest_offset = off.to_shwi ();
847 if (ranges_overlap_p (src_offset, maxsize,
848 dest_offset, maxsize))
849 return false;
850 }
851 else
852 return false;
853
854 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
855 if (!fn)
856 return false;
857 gimple_call_set_fndecl (stmt, fn);
858 gimple_call_set_arg (stmt, 0, dest);
859 gimple_call_set_arg (stmt, 1, src);
860 fold_stmt (gsi);
861 return true;
862 }
863
864 /* If the destination and source do not alias optimize into
865 memcpy as well. */
866 if ((is_gimple_min_invariant (dest)
867 || TREE_CODE (dest) == SSA_NAME)
868 && (is_gimple_min_invariant (src)
869 || TREE_CODE (src) == SSA_NAME))
870 {
871 ao_ref destr, srcr;
872 ao_ref_init_from_ptr_and_size (&destr, dest, len);
873 ao_ref_init_from_ptr_and_size (&srcr, src, len);
874 if (!refs_may_alias_p_1 (&destr, &srcr, false))
875 {
876 tree fn;
877 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
878 if (!fn)
879 return false;
880 gimple_call_set_fndecl (stmt, fn);
881 gimple_call_set_arg (stmt, 0, dest);
882 gimple_call_set_arg (stmt, 1, src);
883 fold_stmt (gsi);
884 return true;
885 }
886 }
887
888 return false;
889 }
890
891 if (!tree_fits_shwi_p (len))
892 return false;
893 /* FIXME:
894 This logic lose for arguments like (type *)malloc (sizeof (type)),
895 since we strip the casts of up to VOID return value from malloc.
896 Perhaps we ought to inherit type from non-VOID argument here? */
897 STRIP_NOPS (src);
898 STRIP_NOPS (dest);
899 if (!POINTER_TYPE_P (TREE_TYPE (src))
900 || !POINTER_TYPE_P (TREE_TYPE (dest)))
901 return false;
902 /* In the following try to find a type that is most natural to be
903 used for the memcpy source and destination and that allows
904 the most optimization when memcpy is turned into a plain assignment
905 using that type. In theory we could always use a char[len] type
906 but that only gains us that the destination and source possibly
907 no longer will have their address taken. */
908 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
909 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
910 {
911 tree tem = TREE_OPERAND (src, 0);
912 STRIP_NOPS (tem);
913 if (tem != TREE_OPERAND (src, 0))
914 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
915 }
916 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
917 {
918 tree tem = TREE_OPERAND (dest, 0);
919 STRIP_NOPS (tem);
920 if (tem != TREE_OPERAND (dest, 0))
921 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
922 }
923 srctype = TREE_TYPE (TREE_TYPE (src));
924 if (TREE_CODE (srctype) == ARRAY_TYPE
925 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
926 {
927 srctype = TREE_TYPE (srctype);
928 STRIP_NOPS (src);
929 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
930 }
931 desttype = TREE_TYPE (TREE_TYPE (dest));
932 if (TREE_CODE (desttype) == ARRAY_TYPE
933 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
934 {
935 desttype = TREE_TYPE (desttype);
936 STRIP_NOPS (dest);
937 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
938 }
939 if (TREE_ADDRESSABLE (srctype)
940 || TREE_ADDRESSABLE (desttype))
941 return false;
942
943 /* Make sure we are not copying using a floating-point mode or
944 a type whose size possibly does not match its precision. */
945 if (FLOAT_MODE_P (TYPE_MODE (desttype))
946 || TREE_CODE (desttype) == BOOLEAN_TYPE
947 || TREE_CODE (desttype) == ENUMERAL_TYPE)
948 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
949 if (FLOAT_MODE_P (TYPE_MODE (srctype))
950 || TREE_CODE (srctype) == BOOLEAN_TYPE
951 || TREE_CODE (srctype) == ENUMERAL_TYPE)
952 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
953 if (!srctype)
954 srctype = desttype;
955 if (!desttype)
956 desttype = srctype;
957 if (!srctype)
958 return false;
959
960 src_align = get_pointer_alignment (src);
961 dest_align = get_pointer_alignment (dest);
962 if (dest_align < TYPE_ALIGN (desttype)
963 || src_align < TYPE_ALIGN (srctype))
964 return false;
965
966 destvar = dest;
967 STRIP_NOPS (destvar);
968 if (TREE_CODE (destvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (destvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
971 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
972 else
973 destvar = NULL_TREE;
974
975 srcvar = src;
976 STRIP_NOPS (srcvar);
977 if (TREE_CODE (srcvar) == ADDR_EXPR
978 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 {
981 if (!destvar
982 || src_align >= TYPE_ALIGN (desttype))
983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
984 srcvar, off0);
985 else if (!STRICT_ALIGNMENT)
986 {
987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
988 src_align);
989 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
990 }
991 else
992 srcvar = NULL_TREE;
993 }
994 else
995 srcvar = NULL_TREE;
996
997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
998 return false;
999
1000 if (srcvar == NULL_TREE)
1001 {
1002 STRIP_NOPS (src);
1003 if (src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1005 else
1006 {
1007 if (STRICT_ALIGNMENT)
1008 return false;
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1012 }
1013 }
1014 else if (destvar == NULL_TREE)
1015 {
1016 STRIP_NOPS (dest);
1017 if (dest_align >= TYPE_ALIGN (srctype))
1018 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1019 else
1020 {
1021 if (STRICT_ALIGNMENT)
1022 return false;
1023 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1024 dest_align);
1025 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1026 }
1027 }
1028
1029 gimple *new_stmt;
1030 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 {
1032 tree tem = fold_const_aggregate_ref (srcvar);
1033 if (tem)
1034 srcvar = tem;
1035 if (! is_gimple_min_invariant (srcvar))
1036 {
1037 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1038 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1039 new_stmt);
1040 gimple_assign_set_lhs (new_stmt, srcvar);
1041 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 }
1044 }
1045 new_stmt = gimple_build_assign (destvar, srcvar);
1046 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1047 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1048 if (gimple_vdef (new_stmt)
1049 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1050 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1051 if (!lhs)
1052 {
1053 gsi_replace (gsi, new_stmt, false);
1054 return true;
1055 }
1056 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1057 }
1058
1059 done:
1060 gimple_seq stmts = NULL;
1061 if (endp == 0 || endp == 3)
1062 len = NULL_TREE;
1063 else if (endp == 2)
1064 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1065 ssize_int (1));
1066 if (endp == 2 || endp == 1)
1067 {
1068 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1069 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1070 TREE_TYPE (dest), dest, len);
1071 }
1072
1073 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1074 gimple *repl = gimple_build_assign (lhs, dest);
1075 gsi_replace (gsi, repl, false);
1076 return true;
1077 }
1078
1079 /* Fold function call to builtin memset or bzero at *GSI setting the
1080 memory of size LEN to VAL. Return whether a simplification was made. */
1081
1082 static bool
1083 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1084 {
1085 gimple *stmt = gsi_stmt (*gsi);
1086 tree etype;
1087 unsigned HOST_WIDE_INT length, cval;
1088
1089 /* If the LEN parameter is zero, return DEST. */
1090 if (integer_zerop (len))
1091 {
1092 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1093 return true;
1094 }
1095
1096 if (! tree_fits_uhwi_p (len))
1097 return false;
1098
1099 if (TREE_CODE (c) != INTEGER_CST)
1100 return false;
1101
1102 tree dest = gimple_call_arg (stmt, 0);
1103 tree var = dest;
1104 if (TREE_CODE (var) != ADDR_EXPR)
1105 return false;
1106
1107 var = TREE_OPERAND (var, 0);
1108 if (TREE_THIS_VOLATILE (var))
1109 return false;
1110
1111 etype = TREE_TYPE (var);
1112 if (TREE_CODE (etype) == ARRAY_TYPE)
1113 etype = TREE_TYPE (etype);
1114
1115 if (!INTEGRAL_TYPE_P (etype)
1116 && !POINTER_TYPE_P (etype))
1117 return NULL_TREE;
1118
1119 if (! var_decl_component_p (var))
1120 return NULL_TREE;
1121
1122 length = tree_to_uhwi (len);
1123 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1124 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1125 return NULL_TREE;
1126
1127 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1128 return NULL_TREE;
1129
1130 if (integer_zerop (c))
1131 cval = 0;
1132 else
1133 {
1134 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1135 return NULL_TREE;
1136
1137 cval = TREE_INT_CST_LOW (c);
1138 cval &= 0xff;
1139 cval |= cval << 8;
1140 cval |= cval << 16;
1141 cval |= (cval << 31) << 1;
1142 }
1143
1144 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1145 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1146 gimple_set_vuse (store, gimple_vuse (stmt));
1147 tree vdef = gimple_vdef (stmt);
1148 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1149 {
1150 gimple_set_vdef (store, gimple_vdef (stmt));
1151 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1152 }
1153 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1154 if (gimple_call_lhs (stmt))
1155 {
1156 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1157 gsi_replace (gsi, asgn, false);
1158 }
1159 else
1160 {
1161 gimple_stmt_iterator gsi2 = *gsi;
1162 gsi_prev (gsi);
1163 gsi_remove (&gsi2, true);
1164 }
1165
1166 return true;
1167 }
1168
1169
1170 /* Obtain the minimum and maximum string length or minimum and maximum
1171 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1172 If ARG is an SSA name variable, follow its use-def chains. When
1173 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1174 if we are unable to determine the length or value, return False.
1175 VISITED is a bitmap of visited variables.
1176 TYPE is 0 if string length should be obtained, 1 for maximum string
1177 length and 2 for maximum value ARG can have.
1178 When FUZZY is set and the length of a string cannot be determined,
1179 the function instead considers as the maximum possible length the
1180 size of a character array it may refer to.
1181 Set *FLEXP to true if the range of the string lengths has been
1182 obtained from the upper bound of an array at the end of a struct.
1183 Such an array may hold a string that's longer than its upper bound
1184 due to it being used as a poor-man's flexible array member. */
1185
1186 static bool
1187 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1188 bool fuzzy, bool *flexp)
1189 {
1190 tree var, val;
1191 gimple *def_stmt;
1192
1193 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1194 but MINLEN may be cleared during the execution of the function. */
1195 tree *minlen = length;
1196 tree *const maxlen = length + 1;
1197
1198 if (TREE_CODE (arg) != SSA_NAME)
1199 {
1200 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1201 if (TREE_CODE (arg) == ADDR_EXPR
1202 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1203 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1204 {
1205 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1206 if (TREE_CODE (aop0) == INDIRECT_REF
1207 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1208 return get_range_strlen (TREE_OPERAND (aop0, 0),
1209 length, visited, type, fuzzy, flexp);
1210 }
1211
1212 if (type == 2)
1213 {
1214 val = arg;
1215 if (TREE_CODE (val) != INTEGER_CST
1216 || tree_int_cst_sgn (val) < 0)
1217 return false;
1218 }
1219 else
1220 val = c_strlen (arg, 1);
1221
1222 if (!val && fuzzy)
1223 {
1224 if (TREE_CODE (arg) == ADDR_EXPR)
1225 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1226 visited, type, fuzzy, flexp);
1227
1228 if (TREE_CODE (arg) == COMPONENT_REF
1229 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1230 {
1231 /* Use the type of the member array to determine the upper
1232 bound on the length of the array. This may be overly
1233 optimistic if the array itself isn't NUL-terminated and
1234 the caller relies on the subsequent member to contain
1235 the NUL.
1236 Set *FLEXP to true if the array whose bound is being
1237 used is at the end of a struct. */
1238 if (array_at_struct_end_p (arg, true))
1239 *flexp = true;
1240
1241 arg = TREE_OPERAND (arg, 1);
1242 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1243 if (!val || integer_zerop (val))
1244 return false;
1245 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1246 integer_one_node);
1247 /* Set the minimum size to zero since the string in
1248 the array could have zero length. */
1249 *minlen = ssize_int (0);
1250 }
1251 }
1252
1253 if (!val)
1254 return false;
1255
1256 if (minlen
1257 && (!*minlen
1258 || (type > 0
1259 && TREE_CODE (*minlen) == INTEGER_CST
1260 && TREE_CODE (val) == INTEGER_CST
1261 && tree_int_cst_lt (val, *minlen))))
1262 *minlen = val;
1263
1264 if (*maxlen)
1265 {
1266 if (type > 0)
1267 {
1268 if (TREE_CODE (*maxlen) != INTEGER_CST
1269 || TREE_CODE (val) != INTEGER_CST)
1270 return false;
1271
1272 if (tree_int_cst_lt (*maxlen, val))
1273 *maxlen = val;
1274 return true;
1275 }
1276 else if (simple_cst_equal (val, *maxlen) != 1)
1277 return false;
1278 }
1279
1280 *maxlen = val;
1281 return true;
1282 }
1283
1284 /* If ARG is registered for SSA update we cannot look at its defining
1285 statement. */
1286 if (name_registered_for_update_p (arg))
1287 return false;
1288
1289 /* If we were already here, break the infinite cycle. */
1290 if (!*visited)
1291 *visited = BITMAP_ALLOC (NULL);
1292 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1293 return true;
1294
1295 var = arg;
1296 def_stmt = SSA_NAME_DEF_STMT (var);
1297
1298 switch (gimple_code (def_stmt))
1299 {
1300 case GIMPLE_ASSIGN:
1301 /* The RHS of the statement defining VAR must either have a
1302 constant length or come from another SSA_NAME with a constant
1303 length. */
1304 if (gimple_assign_single_p (def_stmt)
1305 || gimple_assign_unary_nop_p (def_stmt))
1306 {
1307 tree rhs = gimple_assign_rhs1 (def_stmt);
1308 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1309 }
1310 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1311 {
1312 tree op2 = gimple_assign_rhs2 (def_stmt);
1313 tree op3 = gimple_assign_rhs3 (def_stmt);
1314 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1315 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1316 }
1317 return false;
1318
1319 case GIMPLE_PHI:
1320 {
1321 /* All the arguments of the PHI node must have the same constant
1322 length. */
1323 unsigned i;
1324
1325 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1326 {
1327 tree arg = gimple_phi_arg (def_stmt, i)->def;
1328
1329 /* If this PHI has itself as an argument, we cannot
1330 determine the string length of this argument. However,
1331 if we can find a constant string length for the other
1332 PHI args then we can still be sure that this is a
1333 constant string length. So be optimistic and just
1334 continue with the next argument. */
1335 if (arg == gimple_phi_result (def_stmt))
1336 continue;
1337
1338 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1339 {
1340 if (fuzzy)
1341 *maxlen = build_all_ones_cst (size_type_node);
1342 else
1343 return false;
1344 }
1345 }
1346 }
1347 return true;
1348
1349 default:
1350 return false;
1351 }
1352 }
1353
1354 /* Determine the minimum and maximum value or string length that ARG
1355 refers to and store each in the first two elements of MINMAXLEN.
1356 For expressions that point to strings of unknown lengths that are
1357 character arrays, use the upper bound of the array as the maximum
1358 length. For example, given an expression like 'x ? array : "xyz"'
1359 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1360 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1361 stored in array.
1362 Return true if the range of the string lengths has been obtained
1363 from the upper bound of an array at the end of a struct. Such
1364 an array may hold a string that's longer than its upper bound
1365 due to it being used as a poor-man's flexible array member. */
1366
1367 bool
1368 get_range_strlen (tree arg, tree minmaxlen[2])
1369 {
1370 bitmap visited = NULL;
1371
1372 minmaxlen[0] = NULL_TREE;
1373 minmaxlen[1] = NULL_TREE;
1374
1375 bool flexarray = false;
1376 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1377
1378 if (visited)
1379 BITMAP_FREE (visited);
1380
1381 return flexarray;
1382 }
1383
1384 tree
1385 get_maxval_strlen (tree arg, int type)
1386 {
1387 bitmap visited = NULL;
1388 tree len[2] = { NULL_TREE, NULL_TREE };
1389
1390 bool dummy;
1391 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1392 len[1] = NULL_TREE;
1393 if (visited)
1394 BITMAP_FREE (visited);
1395
1396 return len[1];
1397 }
1398
1399
1400 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1401 If LEN is not NULL, it represents the length of the string to be
1402 copied. Return NULL_TREE if no simplification can be made. */
1403
1404 static bool
1405 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1406 tree dest, tree src)
1407 {
1408 location_t loc = gimple_location (gsi_stmt (*gsi));
1409 tree fn;
1410
1411 /* If SRC and DEST are the same (and not volatile), return DEST. */
1412 if (operand_equal_p (src, dest, 0))
1413 {
1414 replace_call_with_value (gsi, dest);
1415 return true;
1416 }
1417
1418 if (optimize_function_for_size_p (cfun))
1419 return false;
1420
1421 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1422 if (!fn)
1423 return false;
1424
1425 tree len = get_maxval_strlen (src, 0);
1426 if (!len)
1427 return false;
1428
1429 len = fold_convert_loc (loc, size_type_node, len);
1430 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1431 len = force_gimple_operand_gsi (gsi, len, true,
1432 NULL_TREE, true, GSI_SAME_STMT);
1433 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1434 replace_call_with_call_and_fold (gsi, repl);
1435 return true;
1436 }
1437
1438 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1439 If SLEN is not NULL, it represents the length of the source string.
1440 Return NULL_TREE if no simplification can be made. */
1441
1442 static bool
1443 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1444 tree dest, tree src, tree len)
1445 {
1446 location_t loc = gimple_location (gsi_stmt (*gsi));
1447 tree fn;
1448
1449 /* If the LEN parameter is zero, return DEST. */
1450 if (integer_zerop (len))
1451 {
1452 replace_call_with_value (gsi, dest);
1453 return true;
1454 }
1455
1456 /* We can't compare slen with len as constants below if len is not a
1457 constant. */
1458 if (TREE_CODE (len) != INTEGER_CST)
1459 return false;
1460
1461 /* Now, we must be passed a constant src ptr parameter. */
1462 tree slen = get_maxval_strlen (src, 0);
1463 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1464 return false;
1465
1466 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1467
1468 /* We do not support simplification of this case, though we do
1469 support it when expanding trees into RTL. */
1470 /* FIXME: generate a call to __builtin_memset. */
1471 if (tree_int_cst_lt (slen, len))
1472 return false;
1473
1474 /* OK transform into builtin memcpy. */
1475 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1476 if (!fn)
1477 return false;
1478
1479 len = fold_convert_loc (loc, size_type_node, len);
1480 len = force_gimple_operand_gsi (gsi, len, true,
1481 NULL_TREE, true, GSI_SAME_STMT);
1482 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1483 replace_call_with_call_and_fold (gsi, repl);
1484 return true;
1485 }
1486
1487 /* Fold function call to builtin strchr or strrchr.
1488 If both arguments are constant, evaluate and fold the result,
1489 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1490 In general strlen is significantly faster than strchr
1491 due to being a simpler operation. */
1492 static bool
1493 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1494 {
1495 gimple *stmt = gsi_stmt (*gsi);
1496 tree str = gimple_call_arg (stmt, 0);
1497 tree c = gimple_call_arg (stmt, 1);
1498 location_t loc = gimple_location (stmt);
1499 const char *p;
1500 char ch;
1501
1502 if (!gimple_call_lhs (stmt))
1503 return false;
1504
1505 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1506 {
1507 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1508
1509 if (p1 == NULL)
1510 {
1511 replace_call_with_value (gsi, integer_zero_node);
1512 return true;
1513 }
1514
1515 tree len = build_int_cst (size_type_node, p1 - p);
1516 gimple_seq stmts = NULL;
1517 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1518 POINTER_PLUS_EXPR, str, len);
1519 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1520 gsi_replace_with_seq_vops (gsi, stmts);
1521 return true;
1522 }
1523
1524 if (!integer_zerop (c))
1525 return false;
1526
1527 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1528 if (is_strrchr && optimize_function_for_size_p (cfun))
1529 {
1530 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1531
1532 if (strchr_fn)
1533 {
1534 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1535 replace_call_with_call_and_fold (gsi, repl);
1536 return true;
1537 }
1538
1539 return false;
1540 }
1541
1542 tree len;
1543 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1544
1545 if (!strlen_fn)
1546 return false;
1547
1548 /* Create newstr = strlen (str). */
1549 gimple_seq stmts = NULL;
1550 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1551 gimple_set_location (new_stmt, loc);
1552 len = create_tmp_reg_or_ssa_name (size_type_node);
1553 gimple_call_set_lhs (new_stmt, len);
1554 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1555
1556 /* Create (str p+ strlen (str)). */
1557 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1558 POINTER_PLUS_EXPR, str, len);
1559 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1560 gsi_replace_with_seq_vops (gsi, stmts);
1561 /* gsi now points at the assignment to the lhs, get a
1562 stmt iterator to the strlen.
1563 ??? We can't use gsi_for_stmt as that doesn't work when the
1564 CFG isn't built yet. */
1565 gimple_stmt_iterator gsi2 = *gsi;
1566 gsi_prev (&gsi2);
1567 fold_stmt (&gsi2);
1568 return true;
1569 }
1570
1571 /* Fold function call to builtin strstr.
1572 If both arguments are constant, evaluate and fold the result,
1573 additionally fold strstr (x, "") into x and strstr (x, "c")
1574 into strchr (x, 'c'). */
1575 static bool
1576 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1577 {
1578 gimple *stmt = gsi_stmt (*gsi);
1579 tree haystack = gimple_call_arg (stmt, 0);
1580 tree needle = gimple_call_arg (stmt, 1);
1581 const char *p, *q;
1582
1583 if (!gimple_call_lhs (stmt))
1584 return false;
1585
1586 q = c_getstr (needle);
1587 if (q == NULL)
1588 return false;
1589
1590 if ((p = c_getstr (haystack)))
1591 {
1592 const char *r = strstr (p, q);
1593
1594 if (r == NULL)
1595 {
1596 replace_call_with_value (gsi, integer_zero_node);
1597 return true;
1598 }
1599
1600 tree len = build_int_cst (size_type_node, r - p);
1601 gimple_seq stmts = NULL;
1602 gimple *new_stmt
1603 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1604 haystack, len);
1605 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1606 gsi_replace_with_seq_vops (gsi, stmts);
1607 return true;
1608 }
1609
1610 /* For strstr (x, "") return x. */
1611 if (q[0] == '\0')
1612 {
1613 replace_call_with_value (gsi, haystack);
1614 return true;
1615 }
1616
1617 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1618 if (q[1] == '\0')
1619 {
1620 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1621 if (strchr_fn)
1622 {
1623 tree c = build_int_cst (integer_type_node, q[0]);
1624 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1625 replace_call_with_call_and_fold (gsi, repl);
1626 return true;
1627 }
1628 }
1629
1630 return false;
1631 }
1632
1633 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1634 to the call.
1635
1636 Return NULL_TREE if no simplification was possible, otherwise return the
1637 simplified form of the call as a tree.
1638
1639 The simplified form may be a constant or other expression which
1640 computes the same value, but in a more efficient manner (including
1641 calls to other builtin functions).
1642
1643 The call may contain arguments which need to be evaluated, but
1644 which are not useful to determine the result of the call. In
1645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1646 COMPOUND_EXPR will be an argument which must be evaluated.
1647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1648 COMPOUND_EXPR in the chain will contain the tree for the simplified
1649 form of the builtin function call. */
1650
1651 static bool
1652 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1653 {
1654 gimple *stmt = gsi_stmt (*gsi);
1655 location_t loc = gimple_location (stmt);
1656
1657 const char *p = c_getstr (src);
1658
1659 /* If the string length is zero, return the dst parameter. */
1660 if (p && *p == '\0')
1661 {
1662 replace_call_with_value (gsi, dst);
1663 return true;
1664 }
1665
1666 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1667 return false;
1668
1669 /* See if we can store by pieces into (dst + strlen(dst)). */
1670 tree newdst;
1671 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1672 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1673
1674 if (!strlen_fn || !memcpy_fn)
1675 return false;
1676
1677 /* If the length of the source string isn't computable don't
1678 split strcat into strlen and memcpy. */
1679 tree len = get_maxval_strlen (src, 0);
1680 if (! len)
1681 return false;
1682
1683 /* Create strlen (dst). */
1684 gimple_seq stmts = NULL, stmts2;
1685 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1686 gimple_set_location (repl, loc);
1687 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1688 gimple_call_set_lhs (repl, newdst);
1689 gimple_seq_add_stmt_without_update (&stmts, repl);
1690
1691 /* Create (dst p+ strlen (dst)). */
1692 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1693 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1694 gimple_seq_add_seq_without_update (&stmts, stmts2);
1695
1696 len = fold_convert_loc (loc, size_type_node, len);
1697 len = size_binop_loc (loc, PLUS_EXPR, len,
1698 build_int_cst (size_type_node, 1));
1699 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1700 gimple_seq_add_seq_without_update (&stmts, stmts2);
1701
1702 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1703 gimple_seq_add_stmt_without_update (&stmts, repl);
1704 if (gimple_call_lhs (stmt))
1705 {
1706 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1707 gimple_seq_add_stmt_without_update (&stmts, repl);
1708 gsi_replace_with_seq_vops (gsi, stmts);
1709 /* gsi now points at the assignment to the lhs, get a
1710 stmt iterator to the memcpy call.
1711 ??? We can't use gsi_for_stmt as that doesn't work when the
1712 CFG isn't built yet. */
1713 gimple_stmt_iterator gsi2 = *gsi;
1714 gsi_prev (&gsi2);
1715 fold_stmt (&gsi2);
1716 }
1717 else
1718 {
1719 gsi_replace_with_seq_vops (gsi, stmts);
1720 fold_stmt (gsi);
1721 }
1722 return true;
1723 }
1724
1725 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1726 are the arguments to the call. */
1727
1728 static bool
1729 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1730 {
1731 gimple *stmt = gsi_stmt (*gsi);
1732 tree dest = gimple_call_arg (stmt, 0);
1733 tree src = gimple_call_arg (stmt, 1);
1734 tree size = gimple_call_arg (stmt, 2);
1735 tree fn;
1736 const char *p;
1737
1738
1739 p = c_getstr (src);
1740 /* If the SRC parameter is "", return DEST. */
1741 if (p && *p == '\0')
1742 {
1743 replace_call_with_value (gsi, dest);
1744 return true;
1745 }
1746
1747 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1748 return false;
1749
1750 /* If __builtin_strcat_chk is used, assume strcat is available. */
1751 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1752 if (!fn)
1753 return false;
1754
1755 gimple *repl = gimple_build_call (fn, 2, dest, src);
1756 replace_call_with_call_and_fold (gsi, repl);
1757 return true;
1758 }
1759
1760 /* Simplify a call to the strncat builtin. */
1761
1762 static bool
1763 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1764 {
1765 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1766 tree dst = gimple_call_arg (stmt, 0);
1767 tree src = gimple_call_arg (stmt, 1);
1768 tree len = gimple_call_arg (stmt, 2);
1769
1770 const char *p = c_getstr (src);
1771
1772 /* If the requested length is zero, or the src parameter string
1773 length is zero, return the dst parameter. */
1774 if (integer_zerop (len) || (p && *p == '\0'))
1775 {
1776 replace_call_with_value (gsi, dst);
1777 return true;
1778 }
1779
1780 /* If the requested len is greater than or equal to the string
1781 length, call strcat. */
1782 if (TREE_CODE (len) == INTEGER_CST && p
1783 && compare_tree_int (len, strlen (p)) >= 0)
1784 {
1785 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1786
1787 /* If the replacement _DECL isn't initialized, don't do the
1788 transformation. */
1789 if (!fn)
1790 return false;
1791
1792 gcall *repl = gimple_build_call (fn, 2, dst, src);
1793 replace_call_with_call_and_fold (gsi, repl);
1794 return true;
1795 }
1796
1797 return false;
1798 }
1799
1800 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1801 LEN, and SIZE. */
1802
1803 static bool
1804 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1805 {
1806 gimple *stmt = gsi_stmt (*gsi);
1807 tree dest = gimple_call_arg (stmt, 0);
1808 tree src = gimple_call_arg (stmt, 1);
1809 tree len = gimple_call_arg (stmt, 2);
1810 tree size = gimple_call_arg (stmt, 3);
1811 tree fn;
1812 const char *p;
1813
1814 p = c_getstr (src);
1815 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1816 if ((p && *p == '\0')
1817 || integer_zerop (len))
1818 {
1819 replace_call_with_value (gsi, dest);
1820 return true;
1821 }
1822
1823 if (! tree_fits_uhwi_p (size))
1824 return false;
1825
1826 if (! integer_all_onesp (size))
1827 {
1828 tree src_len = c_strlen (src, 1);
1829 if (src_len
1830 && tree_fits_uhwi_p (src_len)
1831 && tree_fits_uhwi_p (len)
1832 && ! tree_int_cst_lt (len, src_len))
1833 {
1834 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1835 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1836 if (!fn)
1837 return false;
1838
1839 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1840 replace_call_with_call_and_fold (gsi, repl);
1841 return true;
1842 }
1843 return false;
1844 }
1845
1846 /* If __builtin_strncat_chk is used, assume strncat is available. */
1847 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1848 if (!fn)
1849 return false;
1850
1851 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1852 replace_call_with_call_and_fold (gsi, repl);
1853 return true;
1854 }
1855
1856 /* Build and append gimple statements to STMTS that would load a first
1857 character of a memory location identified by STR. LOC is location
1858 of the statement. */
1859
1860 static tree
1861 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1862 {
1863 tree var;
1864
1865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1866 tree cst_uchar_ptr_node
1867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1868 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1869
1870 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1871 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1872 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1873
1874 gimple_assign_set_lhs (stmt, var);
1875 gimple_seq_add_stmt_without_update (stmts, stmt);
1876
1877 return var;
1878 }
1879
1880 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1881 FCODE is the name of the builtin. */
1882
1883 static bool
1884 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1885 {
1886 gimple *stmt = gsi_stmt (*gsi);
1887 tree callee = gimple_call_fndecl (stmt);
1888 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
1889
1890 tree type = integer_type_node;
1891 tree str1 = gimple_call_arg (stmt, 0);
1892 tree str2 = gimple_call_arg (stmt, 1);
1893 tree lhs = gimple_call_lhs (stmt);
1894 HOST_WIDE_INT length = -1;
1895
1896 /* Handle strncmp and strncasecmp functions. */
1897 if (gimple_call_num_args (stmt) == 3)
1898 {
1899 tree len = gimple_call_arg (stmt, 2);
1900 if (tree_fits_uhwi_p (len))
1901 length = tree_to_uhwi (len);
1902 }
1903
1904 /* If the LEN parameter is zero, return zero. */
1905 if (length == 0)
1906 {
1907 replace_call_with_value (gsi, integer_zero_node);
1908 return true;
1909 }
1910
1911 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
1912 if (operand_equal_p (str1, str2, 0))
1913 {
1914 replace_call_with_value (gsi, integer_zero_node);
1915 return true;
1916 }
1917
1918 const char *p1 = c_getstr (str1);
1919 const char *p2 = c_getstr (str2);
1920
1921 /* For known strings, return an immediate value. */
1922 if (p1 && p2)
1923 {
1924 int r = 0;
1925 bool known_result = false;
1926
1927 switch (fcode)
1928 {
1929 case BUILT_IN_STRCMP:
1930 {
1931 r = strcmp (p1, p2);
1932 known_result = true;
1933 break;
1934 }
1935 case BUILT_IN_STRNCMP:
1936 {
1937 if (length == -1)
1938 break;
1939 r = strncmp (p1, p2, length);
1940 known_result = true;
1941 break;
1942 }
1943 /* Only handleable situation is where the string are equal (result 0),
1944 which is already handled by operand_equal_p case. */
1945 case BUILT_IN_STRCASECMP:
1946 break;
1947 case BUILT_IN_STRNCASECMP:
1948 {
1949 if (length == -1)
1950 break;
1951 r = strncmp (p1, p2, length);
1952 if (r == 0)
1953 known_result = true;
1954 break;;
1955 }
1956 default:
1957 gcc_unreachable ();
1958 }
1959
1960 if (known_result)
1961 {
1962 replace_call_with_value (gsi, build_cmp_result (type, r));
1963 return true;
1964 }
1965 }
1966
1967 bool nonzero_length = length >= 1
1968 || fcode == BUILT_IN_STRCMP
1969 || fcode == BUILT_IN_STRCASECMP;
1970
1971 location_t loc = gimple_location (stmt);
1972
1973 /* If the second arg is "", return *(const unsigned char*)arg1. */
1974 if (p2 && *p2 == '\0' && nonzero_length)
1975 {
1976 gimple_seq stmts = NULL;
1977 tree var = gimple_load_first_char (loc, str1, &stmts);
1978 if (lhs)
1979 {
1980 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
1981 gimple_seq_add_stmt_without_update (&stmts, stmt);
1982 }
1983
1984 gsi_replace_with_seq_vops (gsi, stmts);
1985 return true;
1986 }
1987
1988 /* If the first arg is "", return -*(const unsigned char*)arg2. */
1989 if (p1 && *p1 == '\0' && nonzero_length)
1990 {
1991 gimple_seq stmts = NULL;
1992 tree var = gimple_load_first_char (loc, str2, &stmts);
1993
1994 if (lhs)
1995 {
1996 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
1997 stmt = gimple_build_assign (c, NOP_EXPR, var);
1998 gimple_seq_add_stmt_without_update (&stmts, stmt);
1999
2000 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2001 gimple_seq_add_stmt_without_update (&stmts, stmt);
2002 }
2003
2004 gsi_replace_with_seq_vops (gsi, stmts);
2005 return true;
2006 }
2007
2008 /* If len parameter is one, return an expression corresponding to
2009 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2010 if (fcode == BUILT_IN_STRNCMP && length == 1)
2011 {
2012 gimple_seq stmts = NULL;
2013 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2014 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2015
2016 if (lhs)
2017 {
2018 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2019 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2020 gimple_seq_add_stmt_without_update (&stmts, convert1);
2021
2022 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2023 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2024 gimple_seq_add_stmt_without_update (&stmts, convert2);
2025
2026 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2027 gimple_seq_add_stmt_without_update (&stmts, stmt);
2028 }
2029
2030 gsi_replace_with_seq_vops (gsi, stmts);
2031 return true;
2032 }
2033
2034 return false;
2035 }
2036
2037 /* Fold a call to the memchr pointed by GSI iterator. */
2038
2039 static bool
2040 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2041 {
2042 gimple *stmt = gsi_stmt (*gsi);
2043 tree lhs = gimple_call_lhs (stmt);
2044 tree arg1 = gimple_call_arg (stmt, 0);
2045 tree arg2 = gimple_call_arg (stmt, 1);
2046 tree len = gimple_call_arg (stmt, 2);
2047
2048 /* If the LEN parameter is zero, return zero. */
2049 if (integer_zerop (len))
2050 {
2051 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2052 return true;
2053 }
2054
2055 char c;
2056 if (TREE_CODE (arg2) != INTEGER_CST
2057 || !tree_fits_uhwi_p (len)
2058 || !target_char_cst_p (arg2, &c))
2059 return false;
2060
2061 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2062 unsigned HOST_WIDE_INT string_length;
2063 const char *p1 = c_getstr (arg1, &string_length);
2064
2065 if (p1)
2066 {
2067 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2068 if (r == NULL)
2069 {
2070 if (length <= string_length)
2071 {
2072 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2073 return true;
2074 }
2075 }
2076 else
2077 {
2078 unsigned HOST_WIDE_INT offset = r - p1;
2079 gimple_seq stmts = NULL;
2080 if (lhs != NULL_TREE)
2081 {
2082 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2083 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2084 arg1, offset_cst);
2085 gimple_seq_add_stmt_without_update (&stmts, stmt);
2086 }
2087 else
2088 gimple_seq_add_stmt_without_update (&stmts,
2089 gimple_build_nop ());
2090
2091 gsi_replace_with_seq_vops (gsi, stmts);
2092 return true;
2093 }
2094 }
2095
2096 return false;
2097 }
2098
2099 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2100 to the call. IGNORE is true if the value returned
2101 by the builtin will be ignored. UNLOCKED is true is true if this
2102 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2103 the known length of the string. Return NULL_TREE if no simplification
2104 was possible. */
2105
2106 static bool
2107 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2108 tree arg0, tree arg1,
2109 bool unlocked)
2110 {
2111 gimple *stmt = gsi_stmt (*gsi);
2112
2113 /* If we're using an unlocked function, assume the other unlocked
2114 functions exist explicitly. */
2115 tree const fn_fputc = (unlocked
2116 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2117 : builtin_decl_implicit (BUILT_IN_FPUTC));
2118 tree const fn_fwrite = (unlocked
2119 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2120 : builtin_decl_implicit (BUILT_IN_FWRITE));
2121
2122 /* If the return value is used, don't do the transformation. */
2123 if (gimple_call_lhs (stmt))
2124 return false;
2125
2126 /* Get the length of the string passed to fputs. If the length
2127 can't be determined, punt. */
2128 tree len = get_maxval_strlen (arg0, 0);
2129 if (!len
2130 || TREE_CODE (len) != INTEGER_CST)
2131 return false;
2132
2133 switch (compare_tree_int (len, 1))
2134 {
2135 case -1: /* length is 0, delete the call entirely . */
2136 replace_call_with_value (gsi, integer_zero_node);
2137 return true;
2138
2139 case 0: /* length is 1, call fputc. */
2140 {
2141 const char *p = c_getstr (arg0);
2142 if (p != NULL)
2143 {
2144 if (!fn_fputc)
2145 return false;
2146
2147 gimple *repl = gimple_build_call (fn_fputc, 2,
2148 build_int_cst
2149 (integer_type_node, p[0]), arg1);
2150 replace_call_with_call_and_fold (gsi, repl);
2151 return true;
2152 }
2153 }
2154 /* FALLTHROUGH */
2155 case 1: /* length is greater than 1, call fwrite. */
2156 {
2157 /* If optimizing for size keep fputs. */
2158 if (optimize_function_for_size_p (cfun))
2159 return false;
2160 /* New argument list transforming fputs(string, stream) to
2161 fwrite(string, 1, len, stream). */
2162 if (!fn_fwrite)
2163 return false;
2164
2165 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2166 size_one_node, len, arg1);
2167 replace_call_with_call_and_fold (gsi, repl);
2168 return true;
2169 }
2170 default:
2171 gcc_unreachable ();
2172 }
2173 return false;
2174 }
2175
2176 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2177 DEST, SRC, LEN, and SIZE are the arguments to the call.
2178 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2179 code of the builtin. If MAXLEN is not NULL, it is maximum length
2180 passed as third argument. */
2181
2182 static bool
2183 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2184 tree dest, tree src, tree len, tree size,
2185 enum built_in_function fcode)
2186 {
2187 gimple *stmt = gsi_stmt (*gsi);
2188 location_t loc = gimple_location (stmt);
2189 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2190 tree fn;
2191
2192 /* If SRC and DEST are the same (and not volatile), return DEST
2193 (resp. DEST+LEN for __mempcpy_chk). */
2194 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2195 {
2196 if (fcode != BUILT_IN_MEMPCPY_CHK)
2197 {
2198 replace_call_with_value (gsi, dest);
2199 return true;
2200 }
2201 else
2202 {
2203 gimple_seq stmts = NULL;
2204 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2205 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2206 TREE_TYPE (dest), dest, len);
2207 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2208 replace_call_with_value (gsi, temp);
2209 return true;
2210 }
2211 }
2212
2213 if (! tree_fits_uhwi_p (size))
2214 return false;
2215
2216 tree maxlen = get_maxval_strlen (len, 2);
2217 if (! integer_all_onesp (size))
2218 {
2219 if (! tree_fits_uhwi_p (len))
2220 {
2221 /* If LEN is not constant, try MAXLEN too.
2222 For MAXLEN only allow optimizing into non-_ocs function
2223 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2224 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2225 {
2226 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2227 {
2228 /* (void) __mempcpy_chk () can be optimized into
2229 (void) __memcpy_chk (). */
2230 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2231 if (!fn)
2232 return false;
2233
2234 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2235 replace_call_with_call_and_fold (gsi, repl);
2236 return true;
2237 }
2238 return false;
2239 }
2240 }
2241 else
2242 maxlen = len;
2243
2244 if (tree_int_cst_lt (size, maxlen))
2245 return false;
2246 }
2247
2248 fn = NULL_TREE;
2249 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2250 mem{cpy,pcpy,move,set} is available. */
2251 switch (fcode)
2252 {
2253 case BUILT_IN_MEMCPY_CHK:
2254 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2255 break;
2256 case BUILT_IN_MEMPCPY_CHK:
2257 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2258 break;
2259 case BUILT_IN_MEMMOVE_CHK:
2260 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2261 break;
2262 case BUILT_IN_MEMSET_CHK:
2263 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2264 break;
2265 default:
2266 break;
2267 }
2268
2269 if (!fn)
2270 return false;
2271
2272 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2273 replace_call_with_call_and_fold (gsi, repl);
2274 return true;
2275 }
2276
2277 /* Fold a call to the __st[rp]cpy_chk builtin.
2278 DEST, SRC, and SIZE are the arguments to the call.
2279 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2280 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2281 strings passed as second argument. */
2282
2283 static bool
2284 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2285 tree dest,
2286 tree src, tree size,
2287 enum built_in_function fcode)
2288 {
2289 gimple *stmt = gsi_stmt (*gsi);
2290 location_t loc = gimple_location (stmt);
2291 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2292 tree len, fn;
2293
2294 /* If SRC and DEST are the same (and not volatile), return DEST. */
2295 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2296 {
2297 replace_call_with_value (gsi, dest);
2298 return true;
2299 }
2300
2301 if (! tree_fits_uhwi_p (size))
2302 return false;
2303
2304 tree maxlen = get_maxval_strlen (src, 1);
2305 if (! integer_all_onesp (size))
2306 {
2307 len = c_strlen (src, 1);
2308 if (! len || ! tree_fits_uhwi_p (len))
2309 {
2310 /* If LEN is not constant, try MAXLEN too.
2311 For MAXLEN only allow optimizing into non-_ocs function
2312 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2313 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2314 {
2315 if (fcode == BUILT_IN_STPCPY_CHK)
2316 {
2317 if (! ignore)
2318 return false;
2319
2320 /* If return value of __stpcpy_chk is ignored,
2321 optimize into __strcpy_chk. */
2322 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2323 if (!fn)
2324 return false;
2325
2326 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2327 replace_call_with_call_and_fold (gsi, repl);
2328 return true;
2329 }
2330
2331 if (! len || TREE_SIDE_EFFECTS (len))
2332 return false;
2333
2334 /* If c_strlen returned something, but not a constant,
2335 transform __strcpy_chk into __memcpy_chk. */
2336 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2337 if (!fn)
2338 return false;
2339
2340 gimple_seq stmts = NULL;
2341 len = gimple_convert (&stmts, loc, size_type_node, len);
2342 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2343 build_int_cst (size_type_node, 1));
2344 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2345 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2346 replace_call_with_call_and_fold (gsi, repl);
2347 return true;
2348 }
2349 }
2350 else
2351 maxlen = len;
2352
2353 if (! tree_int_cst_lt (maxlen, size))
2354 return false;
2355 }
2356
2357 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2358 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2359 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2360 if (!fn)
2361 return false;
2362
2363 gimple *repl = gimple_build_call (fn, 2, dest, src);
2364 replace_call_with_call_and_fold (gsi, repl);
2365 return true;
2366 }
2367
2368 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2369 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2370 length passed as third argument. IGNORE is true if return value can be
2371 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2372
2373 static bool
2374 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2375 tree dest, tree src,
2376 tree len, tree size,
2377 enum built_in_function fcode)
2378 {
2379 gimple *stmt = gsi_stmt (*gsi);
2380 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2381 tree fn;
2382
2383 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2384 {
2385 /* If return value of __stpncpy_chk is ignored,
2386 optimize into __strncpy_chk. */
2387 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2388 if (fn)
2389 {
2390 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2391 replace_call_with_call_and_fold (gsi, repl);
2392 return true;
2393 }
2394 }
2395
2396 if (! tree_fits_uhwi_p (size))
2397 return false;
2398
2399 tree maxlen = get_maxval_strlen (len, 2);
2400 if (! integer_all_onesp (size))
2401 {
2402 if (! tree_fits_uhwi_p (len))
2403 {
2404 /* If LEN is not constant, try MAXLEN too.
2405 For MAXLEN only allow optimizing into non-_ocs function
2406 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2407 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2408 return false;
2409 }
2410 else
2411 maxlen = len;
2412
2413 if (tree_int_cst_lt (size, maxlen))
2414 return false;
2415 }
2416
2417 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2418 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2419 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2420 if (!fn)
2421 return false;
2422
2423 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2424 replace_call_with_call_and_fold (gsi, repl);
2425 return true;
2426 }
2427
2428 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2429 Return NULL_TREE if no simplification can be made. */
2430
2431 static bool
2432 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2433 {
2434 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2435 location_t loc = gimple_location (stmt);
2436 tree dest = gimple_call_arg (stmt, 0);
2437 tree src = gimple_call_arg (stmt, 1);
2438 tree fn, len, lenp1;
2439
2440 /* If the result is unused, replace stpcpy with strcpy. */
2441 if (gimple_call_lhs (stmt) == NULL_TREE)
2442 {
2443 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2444 if (!fn)
2445 return false;
2446 gimple_call_set_fndecl (stmt, fn);
2447 fold_stmt (gsi);
2448 return true;
2449 }
2450
2451 len = c_strlen (src, 1);
2452 if (!len
2453 || TREE_CODE (len) != INTEGER_CST)
2454 return false;
2455
2456 if (optimize_function_for_size_p (cfun)
2457 /* If length is zero it's small enough. */
2458 && !integer_zerop (len))
2459 return false;
2460
2461 /* If the source has a known length replace stpcpy with memcpy. */
2462 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2463 if (!fn)
2464 return false;
2465
2466 gimple_seq stmts = NULL;
2467 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2468 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2469 tem, build_int_cst (size_type_node, 1));
2470 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2471 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2472 gimple_set_vuse (repl, gimple_vuse (stmt));
2473 gimple_set_vdef (repl, gimple_vdef (stmt));
2474 if (gimple_vdef (repl)
2475 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2476 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2477 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2478 /* Replace the result with dest + len. */
2479 stmts = NULL;
2480 tem = gimple_convert (&stmts, loc, sizetype, len);
2481 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2482 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2483 POINTER_PLUS_EXPR, dest, tem);
2484 gsi_replace (gsi, ret, false);
2485 /* Finally fold the memcpy call. */
2486 gimple_stmt_iterator gsi2 = *gsi;
2487 gsi_prev (&gsi2);
2488 fold_stmt (&gsi2);
2489 return true;
2490 }
2491
2492 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2493 NULL_TREE if a normal call should be emitted rather than expanding
2494 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2495 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2496 passed as second argument. */
2497
2498 static bool
2499 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2500 enum built_in_function fcode)
2501 {
2502 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2503 tree dest, size, len, fn, fmt, flag;
2504 const char *fmt_str;
2505
2506 /* Verify the required arguments in the original call. */
2507 if (gimple_call_num_args (stmt) < 5)
2508 return false;
2509
2510 dest = gimple_call_arg (stmt, 0);
2511 len = gimple_call_arg (stmt, 1);
2512 flag = gimple_call_arg (stmt, 2);
2513 size = gimple_call_arg (stmt, 3);
2514 fmt = gimple_call_arg (stmt, 4);
2515
2516 if (! tree_fits_uhwi_p (size))
2517 return false;
2518
2519 if (! integer_all_onesp (size))
2520 {
2521 tree maxlen = get_maxval_strlen (len, 2);
2522 if (! tree_fits_uhwi_p (len))
2523 {
2524 /* If LEN is not constant, try MAXLEN too.
2525 For MAXLEN only allow optimizing into non-_ocs function
2526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2527 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2528 return false;
2529 }
2530 else
2531 maxlen = len;
2532
2533 if (tree_int_cst_lt (size, maxlen))
2534 return false;
2535 }
2536
2537 if (!init_target_chars ())
2538 return false;
2539
2540 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2541 or if format doesn't contain % chars or is "%s". */
2542 if (! integer_zerop (flag))
2543 {
2544 fmt_str = c_getstr (fmt);
2545 if (fmt_str == NULL)
2546 return false;
2547 if (strchr (fmt_str, target_percent) != NULL
2548 && strcmp (fmt_str, target_percent_s))
2549 return false;
2550 }
2551
2552 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2553 available. */
2554 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2555 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2556 if (!fn)
2557 return false;
2558
2559 /* Replace the called function and the first 5 argument by 3 retaining
2560 trailing varargs. */
2561 gimple_call_set_fndecl (stmt, fn);
2562 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2563 gimple_call_set_arg (stmt, 0, dest);
2564 gimple_call_set_arg (stmt, 1, len);
2565 gimple_call_set_arg (stmt, 2, fmt);
2566 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2567 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2568 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2569 fold_stmt (gsi);
2570 return true;
2571 }
2572
2573 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2574 Return NULL_TREE if a normal call should be emitted rather than
2575 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2576 or BUILT_IN_VSPRINTF_CHK. */
2577
2578 static bool
2579 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2580 enum built_in_function fcode)
2581 {
2582 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2583 tree dest, size, len, fn, fmt, flag;
2584 const char *fmt_str;
2585 unsigned nargs = gimple_call_num_args (stmt);
2586
2587 /* Verify the required arguments in the original call. */
2588 if (nargs < 4)
2589 return false;
2590 dest = gimple_call_arg (stmt, 0);
2591 flag = gimple_call_arg (stmt, 1);
2592 size = gimple_call_arg (stmt, 2);
2593 fmt = gimple_call_arg (stmt, 3);
2594
2595 if (! tree_fits_uhwi_p (size))
2596 return false;
2597
2598 len = NULL_TREE;
2599
2600 if (!init_target_chars ())
2601 return false;
2602
2603 /* Check whether the format is a literal string constant. */
2604 fmt_str = c_getstr (fmt);
2605 if (fmt_str != NULL)
2606 {
2607 /* If the format doesn't contain % args or %%, we know the size. */
2608 if (strchr (fmt_str, target_percent) == 0)
2609 {
2610 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2611 len = build_int_cstu (size_type_node, strlen (fmt_str));
2612 }
2613 /* If the format is "%s" and first ... argument is a string literal,
2614 we know the size too. */
2615 else if (fcode == BUILT_IN_SPRINTF_CHK
2616 && strcmp (fmt_str, target_percent_s) == 0)
2617 {
2618 tree arg;
2619
2620 if (nargs == 5)
2621 {
2622 arg = gimple_call_arg (stmt, 4);
2623 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2624 {
2625 len = c_strlen (arg, 1);
2626 if (! len || ! tree_fits_uhwi_p (len))
2627 len = NULL_TREE;
2628 }
2629 }
2630 }
2631 }
2632
2633 if (! integer_all_onesp (size))
2634 {
2635 if (! len || ! tree_int_cst_lt (len, size))
2636 return false;
2637 }
2638
2639 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2640 or if format doesn't contain % chars or is "%s". */
2641 if (! integer_zerop (flag))
2642 {
2643 if (fmt_str == NULL)
2644 return false;
2645 if (strchr (fmt_str, target_percent) != NULL
2646 && strcmp (fmt_str, target_percent_s))
2647 return false;
2648 }
2649
2650 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2651 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2652 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2653 if (!fn)
2654 return false;
2655
2656 /* Replace the called function and the first 4 argument by 2 retaining
2657 trailing varargs. */
2658 gimple_call_set_fndecl (stmt, fn);
2659 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2660 gimple_call_set_arg (stmt, 0, dest);
2661 gimple_call_set_arg (stmt, 1, fmt);
2662 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2663 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2664 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2665 fold_stmt (gsi);
2666 return true;
2667 }
2668
2669 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2670 ORIG may be null if this is a 2-argument call. We don't attempt to
2671 simplify calls with more than 3 arguments.
2672
2673 Return NULL_TREE if no simplification was possible, otherwise return the
2674 simplified form of the call as a tree. If IGNORED is true, it means that
2675 the caller does not use the returned value of the function. */
2676
2677 static bool
2678 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2679 {
2680 gimple *stmt = gsi_stmt (*gsi);
2681 tree dest = gimple_call_arg (stmt, 0);
2682 tree fmt = gimple_call_arg (stmt, 1);
2683 tree orig = NULL_TREE;
2684 const char *fmt_str = NULL;
2685
2686 /* Verify the required arguments in the original call. We deal with two
2687 types of sprintf() calls: 'sprintf (str, fmt)' and
2688 'sprintf (dest, "%s", orig)'. */
2689 if (gimple_call_num_args (stmt) > 3)
2690 return false;
2691
2692 if (gimple_call_num_args (stmt) == 3)
2693 orig = gimple_call_arg (stmt, 2);
2694
2695 /* Check whether the format is a literal string constant. */
2696 fmt_str = c_getstr (fmt);
2697 if (fmt_str == NULL)
2698 return false;
2699
2700 if (!init_target_chars ())
2701 return false;
2702
2703 /* If the format doesn't contain % args or %%, use strcpy. */
2704 if (strchr (fmt_str, target_percent) == NULL)
2705 {
2706 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2707
2708 if (!fn)
2709 return false;
2710
2711 /* Don't optimize sprintf (buf, "abc", ptr++). */
2712 if (orig)
2713 return false;
2714
2715 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2716 'format' is known to contain no % formats. */
2717 gimple_seq stmts = NULL;
2718 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2719 gimple_seq_add_stmt_without_update (&stmts, repl);
2720 if (gimple_call_lhs (stmt))
2721 {
2722 repl = gimple_build_assign (gimple_call_lhs (stmt),
2723 build_int_cst (integer_type_node,
2724 strlen (fmt_str)));
2725 gimple_seq_add_stmt_without_update (&stmts, repl);
2726 gsi_replace_with_seq_vops (gsi, stmts);
2727 /* gsi now points at the assignment to the lhs, get a
2728 stmt iterator to the memcpy call.
2729 ??? We can't use gsi_for_stmt as that doesn't work when the
2730 CFG isn't built yet. */
2731 gimple_stmt_iterator gsi2 = *gsi;
2732 gsi_prev (&gsi2);
2733 fold_stmt (&gsi2);
2734 }
2735 else
2736 {
2737 gsi_replace_with_seq_vops (gsi, stmts);
2738 fold_stmt (gsi);
2739 }
2740 return true;
2741 }
2742
2743 /* If the format is "%s", use strcpy if the result isn't used. */
2744 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2745 {
2746 tree fn;
2747 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2748
2749 if (!fn)
2750 return false;
2751
2752 /* Don't crash on sprintf (str1, "%s"). */
2753 if (!orig)
2754 return false;
2755
2756 tree orig_len = NULL_TREE;
2757 if (gimple_call_lhs (stmt))
2758 {
2759 orig_len = get_maxval_strlen (orig, 0);
2760 if (!orig_len)
2761 return false;
2762 }
2763
2764 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2765 gimple_seq stmts = NULL;
2766 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2767 gimple_seq_add_stmt_without_update (&stmts, repl);
2768 if (gimple_call_lhs (stmt))
2769 {
2770 if (!useless_type_conversion_p (integer_type_node,
2771 TREE_TYPE (orig_len)))
2772 orig_len = fold_convert (integer_type_node, orig_len);
2773 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2774 gimple_seq_add_stmt_without_update (&stmts, repl);
2775 gsi_replace_with_seq_vops (gsi, stmts);
2776 /* gsi now points at the assignment to the lhs, get a
2777 stmt iterator to the memcpy call.
2778 ??? We can't use gsi_for_stmt as that doesn't work when the
2779 CFG isn't built yet. */
2780 gimple_stmt_iterator gsi2 = *gsi;
2781 gsi_prev (&gsi2);
2782 fold_stmt (&gsi2);
2783 }
2784 else
2785 {
2786 gsi_replace_with_seq_vops (gsi, stmts);
2787 fold_stmt (gsi);
2788 }
2789 return true;
2790 }
2791 return false;
2792 }
2793
2794 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2795 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2796 attempt to simplify calls with more than 4 arguments.
2797
2798 Return NULL_TREE if no simplification was possible, otherwise return the
2799 simplified form of the call as a tree. If IGNORED is true, it means that
2800 the caller does not use the returned value of the function. */
2801
2802 static bool
2803 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2804 {
2805 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2806 tree dest = gimple_call_arg (stmt, 0);
2807 tree destsize = gimple_call_arg (stmt, 1);
2808 tree fmt = gimple_call_arg (stmt, 2);
2809 tree orig = NULL_TREE;
2810 const char *fmt_str = NULL;
2811
2812 if (gimple_call_num_args (stmt) > 4)
2813 return false;
2814
2815 if (gimple_call_num_args (stmt) == 4)
2816 orig = gimple_call_arg (stmt, 3);
2817
2818 if (!tree_fits_uhwi_p (destsize))
2819 return false;
2820 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2821
2822 /* Check whether the format is a literal string constant. */
2823 fmt_str = c_getstr (fmt);
2824 if (fmt_str == NULL)
2825 return false;
2826
2827 if (!init_target_chars ())
2828 return false;
2829
2830 /* If the format doesn't contain % args or %%, use strcpy. */
2831 if (strchr (fmt_str, target_percent) == NULL)
2832 {
2833 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2834 if (!fn)
2835 return false;
2836
2837 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2838 if (orig)
2839 return false;
2840
2841 /* We could expand this as
2842 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2843 or to
2844 memcpy (str, fmt_with_nul_at_cstm1, cst);
2845 but in the former case that might increase code size
2846 and in the latter case grow .rodata section too much.
2847 So punt for now. */
2848 size_t len = strlen (fmt_str);
2849 if (len >= destlen)
2850 return false;
2851
2852 gimple_seq stmts = NULL;
2853 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2854 gimple_seq_add_stmt_without_update (&stmts, repl);
2855 if (gimple_call_lhs (stmt))
2856 {
2857 repl = gimple_build_assign (gimple_call_lhs (stmt),
2858 build_int_cst (integer_type_node, len));
2859 gimple_seq_add_stmt_without_update (&stmts, repl);
2860 gsi_replace_with_seq_vops (gsi, stmts);
2861 /* gsi now points at the assignment to the lhs, get a
2862 stmt iterator to the memcpy call.
2863 ??? We can't use gsi_for_stmt as that doesn't work when the
2864 CFG isn't built yet. */
2865 gimple_stmt_iterator gsi2 = *gsi;
2866 gsi_prev (&gsi2);
2867 fold_stmt (&gsi2);
2868 }
2869 else
2870 {
2871 gsi_replace_with_seq_vops (gsi, stmts);
2872 fold_stmt (gsi);
2873 }
2874 return true;
2875 }
2876
2877 /* If the format is "%s", use strcpy if the result isn't used. */
2878 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2879 {
2880 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2881 if (!fn)
2882 return false;
2883
2884 /* Don't crash on snprintf (str1, cst, "%s"). */
2885 if (!orig)
2886 return false;
2887
2888 tree orig_len = get_maxval_strlen (orig, 0);
2889 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2890 return false;
2891
2892 /* We could expand this as
2893 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2894 or to
2895 memcpy (str1, str2_with_nul_at_cstm1, cst);
2896 but in the former case that might increase code size
2897 and in the latter case grow .rodata section too much.
2898 So punt for now. */
2899 if (compare_tree_int (orig_len, destlen) >= 0)
2900 return false;
2901
2902 /* Convert snprintf (str1, cst, "%s", str2) into
2903 strcpy (str1, str2) if strlen (str2) < cst. */
2904 gimple_seq stmts = NULL;
2905 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2906 gimple_seq_add_stmt_without_update (&stmts, repl);
2907 if (gimple_call_lhs (stmt))
2908 {
2909 if (!useless_type_conversion_p (integer_type_node,
2910 TREE_TYPE (orig_len)))
2911 orig_len = fold_convert (integer_type_node, orig_len);
2912 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2913 gimple_seq_add_stmt_without_update (&stmts, repl);
2914 gsi_replace_with_seq_vops (gsi, stmts);
2915 /* gsi now points at the assignment to the lhs, get a
2916 stmt iterator to the memcpy call.
2917 ??? We can't use gsi_for_stmt as that doesn't work when the
2918 CFG isn't built yet. */
2919 gimple_stmt_iterator gsi2 = *gsi;
2920 gsi_prev (&gsi2);
2921 fold_stmt (&gsi2);
2922 }
2923 else
2924 {
2925 gsi_replace_with_seq_vops (gsi, stmts);
2926 fold_stmt (gsi);
2927 }
2928 return true;
2929 }
2930 return false;
2931 }
2932
2933 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2934 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2935 more than 3 arguments, and ARG may be null in the 2-argument case.
2936
2937 Return NULL_TREE if no simplification was possible, otherwise return the
2938 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2939 code of the function to be simplified. */
2940
2941 static bool
2942 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2943 tree fp, tree fmt, tree arg,
2944 enum built_in_function fcode)
2945 {
2946 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2947 tree fn_fputc, fn_fputs;
2948 const char *fmt_str = NULL;
2949
2950 /* If the return value is used, don't do the transformation. */
2951 if (gimple_call_lhs (stmt) != NULL_TREE)
2952 return false;
2953
2954 /* Check whether the format is a literal string constant. */
2955 fmt_str = c_getstr (fmt);
2956 if (fmt_str == NULL)
2957 return false;
2958
2959 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2960 {
2961 /* If we're using an unlocked function, assume the other
2962 unlocked functions exist explicitly. */
2963 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2964 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2965 }
2966 else
2967 {
2968 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2969 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2970 }
2971
2972 if (!init_target_chars ())
2973 return false;
2974
2975 /* If the format doesn't contain % args or %%, use strcpy. */
2976 if (strchr (fmt_str, target_percent) == NULL)
2977 {
2978 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2979 && arg)
2980 return false;
2981
2982 /* If the format specifier was "", fprintf does nothing. */
2983 if (fmt_str[0] == '\0')
2984 {
2985 replace_call_with_value (gsi, NULL_TREE);
2986 return true;
2987 }
2988
2989 /* When "string" doesn't contain %, replace all cases of
2990 fprintf (fp, string) with fputs (string, fp). The fputs
2991 builtin will take care of special cases like length == 1. */
2992 if (fn_fputs)
2993 {
2994 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2995 replace_call_with_call_and_fold (gsi, repl);
2996 return true;
2997 }
2998 }
2999
3000 /* The other optimizations can be done only on the non-va_list variants. */
3001 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3002 return false;
3003
3004 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3005 else if (strcmp (fmt_str, target_percent_s) == 0)
3006 {
3007 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3008 return false;
3009 if (fn_fputs)
3010 {
3011 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3012 replace_call_with_call_and_fold (gsi, repl);
3013 return true;
3014 }
3015 }
3016
3017 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3018 else if (strcmp (fmt_str, target_percent_c) == 0)
3019 {
3020 if (!arg
3021 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3022 return false;
3023 if (fn_fputc)
3024 {
3025 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3026 replace_call_with_call_and_fold (gsi, repl);
3027 return true;
3028 }
3029 }
3030
3031 return false;
3032 }
3033
3034 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3035 FMT and ARG are the arguments to the call; we don't fold cases with
3036 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3037
3038 Return NULL_TREE if no simplification was possible, otherwise return the
3039 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3040 code of the function to be simplified. */
3041
3042 static bool
3043 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3044 tree arg, enum built_in_function fcode)
3045 {
3046 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3047 tree fn_putchar, fn_puts, newarg;
3048 const char *fmt_str = NULL;
3049
3050 /* If the return value is used, don't do the transformation. */
3051 if (gimple_call_lhs (stmt) != NULL_TREE)
3052 return false;
3053
3054 /* Check whether the format is a literal string constant. */
3055 fmt_str = c_getstr (fmt);
3056 if (fmt_str == NULL)
3057 return false;
3058
3059 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3060 {
3061 /* If we're using an unlocked function, assume the other
3062 unlocked functions exist explicitly. */
3063 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3064 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3065 }
3066 else
3067 {
3068 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3069 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3070 }
3071
3072 if (!init_target_chars ())
3073 return false;
3074
3075 if (strcmp (fmt_str, target_percent_s) == 0
3076 || strchr (fmt_str, target_percent) == NULL)
3077 {
3078 const char *str;
3079
3080 if (strcmp (fmt_str, target_percent_s) == 0)
3081 {
3082 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3083 return false;
3084
3085 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3086 return false;
3087
3088 str = c_getstr (arg);
3089 if (str == NULL)
3090 return false;
3091 }
3092 else
3093 {
3094 /* The format specifier doesn't contain any '%' characters. */
3095 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3096 && arg)
3097 return false;
3098 str = fmt_str;
3099 }
3100
3101 /* If the string was "", printf does nothing. */
3102 if (str[0] == '\0')
3103 {
3104 replace_call_with_value (gsi, NULL_TREE);
3105 return true;
3106 }
3107
3108 /* If the string has length of 1, call putchar. */
3109 if (str[1] == '\0')
3110 {
3111 /* Given printf("c"), (where c is any one character,)
3112 convert "c"[0] to an int and pass that to the replacement
3113 function. */
3114 newarg = build_int_cst (integer_type_node, str[0]);
3115 if (fn_putchar)
3116 {
3117 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3118 replace_call_with_call_and_fold (gsi, repl);
3119 return true;
3120 }
3121 }
3122 else
3123 {
3124 /* If the string was "string\n", call puts("string"). */
3125 size_t len = strlen (str);
3126 if ((unsigned char)str[len - 1] == target_newline
3127 && (size_t) (int) len == len
3128 && (int) len > 0)
3129 {
3130 char *newstr;
3131 tree offset_node, string_cst;
3132
3133 /* Create a NUL-terminated string that's one char shorter
3134 than the original, stripping off the trailing '\n'. */
3135 newarg = build_string_literal (len, str);
3136 string_cst = string_constant (newarg, &offset_node);
3137 gcc_checking_assert (string_cst
3138 && (TREE_STRING_LENGTH (string_cst)
3139 == (int) len)
3140 && integer_zerop (offset_node)
3141 && (unsigned char)
3142 TREE_STRING_POINTER (string_cst)[len - 1]
3143 == target_newline);
3144 /* build_string_literal creates a new STRING_CST,
3145 modify it in place to avoid double copying. */
3146 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3147 newstr[len - 1] = '\0';
3148 if (fn_puts)
3149 {
3150 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3151 replace_call_with_call_and_fold (gsi, repl);
3152 return true;
3153 }
3154 }
3155 else
3156 /* We'd like to arrange to call fputs(string,stdout) here,
3157 but we need stdout and don't have a way to get it yet. */
3158 return false;
3159 }
3160 }
3161
3162 /* The other optimizations can be done only on the non-va_list variants. */
3163 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3164 return false;
3165
3166 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3167 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3168 {
3169 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3170 return false;
3171 if (fn_puts)
3172 {
3173 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3174 replace_call_with_call_and_fold (gsi, repl);
3175 return true;
3176 }
3177 }
3178
3179 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3180 else if (strcmp (fmt_str, target_percent_c) == 0)
3181 {
3182 if (!arg || ! useless_type_conversion_p (integer_type_node,
3183 TREE_TYPE (arg)))
3184 return false;
3185 if (fn_putchar)
3186 {
3187 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3188 replace_call_with_call_and_fold (gsi, repl);
3189 return true;
3190 }
3191 }
3192
3193 return false;
3194 }
3195
3196
3197
3198 /* Fold a call to __builtin_strlen with known length LEN. */
3199
3200 static bool
3201 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3202 {
3203 gimple *stmt = gsi_stmt (*gsi);
3204 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3205 if (!len)
3206 return false;
3207 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3208 replace_call_with_value (gsi, len);
3209 return true;
3210 }
3211
3212 /* Fold a call to __builtin_acc_on_device. */
3213
3214 static bool
3215 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3216 {
3217 /* Defer folding until we know which compiler we're in. */
3218 if (symtab->state != EXPANSION)
3219 return false;
3220
3221 unsigned val_host = GOMP_DEVICE_HOST;
3222 unsigned val_dev = GOMP_DEVICE_NONE;
3223
3224 #ifdef ACCEL_COMPILER
3225 val_host = GOMP_DEVICE_NOT_HOST;
3226 val_dev = ACCEL_COMPILER_acc_device;
3227 #endif
3228
3229 location_t loc = gimple_location (gsi_stmt (*gsi));
3230
3231 tree host_eq = make_ssa_name (boolean_type_node);
3232 gimple *host_ass = gimple_build_assign
3233 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3234 gimple_set_location (host_ass, loc);
3235 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3236
3237 tree dev_eq = make_ssa_name (boolean_type_node);
3238 gimple *dev_ass = gimple_build_assign
3239 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3240 gimple_set_location (dev_ass, loc);
3241 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3242
3243 tree result = make_ssa_name (boolean_type_node);
3244 gimple *result_ass = gimple_build_assign
3245 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3246 gimple_set_location (result_ass, loc);
3247 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3248
3249 replace_call_with_value (gsi, result);
3250
3251 return true;
3252 }
3253
3254 /* Fold the non-target builtin at *GSI and return whether any simplification
3255 was made. */
3256
3257 static bool
3258 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3259 {
3260 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3261 tree callee = gimple_call_fndecl (stmt);
3262
3263 /* Give up for always_inline inline builtins until they are
3264 inlined. */
3265 if (avoid_folding_inline_builtin (callee))
3266 return false;
3267
3268 unsigned n = gimple_call_num_args (stmt);
3269 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3270 switch (fcode)
3271 {
3272 case BUILT_IN_BZERO:
3273 return gimple_fold_builtin_memset (gsi, integer_zero_node,
3274 gimple_call_arg (stmt, 1));
3275 case BUILT_IN_MEMSET:
3276 return gimple_fold_builtin_memset (gsi,
3277 gimple_call_arg (stmt, 1),
3278 gimple_call_arg (stmt, 2));
3279 case BUILT_IN_BCOPY:
3280 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
3281 gimple_call_arg (stmt, 0), 3);
3282 case BUILT_IN_MEMCPY:
3283 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3284 gimple_call_arg (stmt, 1), 0);
3285 case BUILT_IN_MEMPCPY:
3286 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3287 gimple_call_arg (stmt, 1), 1);
3288 case BUILT_IN_MEMMOVE:
3289 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3290 gimple_call_arg (stmt, 1), 3);
3291 case BUILT_IN_SPRINTF_CHK:
3292 case BUILT_IN_VSPRINTF_CHK:
3293 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3294 case BUILT_IN_STRCAT_CHK:
3295 return gimple_fold_builtin_strcat_chk (gsi);
3296 case BUILT_IN_STRNCAT_CHK:
3297 return gimple_fold_builtin_strncat_chk (gsi);
3298 case BUILT_IN_STRLEN:
3299 return gimple_fold_builtin_strlen (gsi);
3300 case BUILT_IN_STRCPY:
3301 return gimple_fold_builtin_strcpy (gsi,
3302 gimple_call_arg (stmt, 0),
3303 gimple_call_arg (stmt, 1));
3304 case BUILT_IN_STRNCPY:
3305 return gimple_fold_builtin_strncpy (gsi,
3306 gimple_call_arg (stmt, 0),
3307 gimple_call_arg (stmt, 1),
3308 gimple_call_arg (stmt, 2));
3309 case BUILT_IN_STRCAT:
3310 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3311 gimple_call_arg (stmt, 1));
3312 case BUILT_IN_STRNCAT:
3313 return gimple_fold_builtin_strncat (gsi);
3314 case BUILT_IN_INDEX:
3315 case BUILT_IN_STRCHR:
3316 return gimple_fold_builtin_strchr (gsi, false);
3317 case BUILT_IN_RINDEX:
3318 case BUILT_IN_STRRCHR:
3319 return gimple_fold_builtin_strchr (gsi, true);
3320 case BUILT_IN_STRSTR:
3321 return gimple_fold_builtin_strstr (gsi);
3322 case BUILT_IN_STRCMP:
3323 case BUILT_IN_STRCASECMP:
3324 case BUILT_IN_STRNCMP:
3325 case BUILT_IN_STRNCASECMP:
3326 return gimple_fold_builtin_string_compare (gsi);
3327 case BUILT_IN_MEMCHR:
3328 return gimple_fold_builtin_memchr (gsi);
3329 case BUILT_IN_FPUTS:
3330 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3331 gimple_call_arg (stmt, 1), false);
3332 case BUILT_IN_FPUTS_UNLOCKED:
3333 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3334 gimple_call_arg (stmt, 1), true);
3335 case BUILT_IN_MEMCPY_CHK:
3336 case BUILT_IN_MEMPCPY_CHK:
3337 case BUILT_IN_MEMMOVE_CHK:
3338 case BUILT_IN_MEMSET_CHK:
3339 return gimple_fold_builtin_memory_chk (gsi,
3340 gimple_call_arg (stmt, 0),
3341 gimple_call_arg (stmt, 1),
3342 gimple_call_arg (stmt, 2),
3343 gimple_call_arg (stmt, 3),
3344 fcode);
3345 case BUILT_IN_STPCPY:
3346 return gimple_fold_builtin_stpcpy (gsi);
3347 case BUILT_IN_STRCPY_CHK:
3348 case BUILT_IN_STPCPY_CHK:
3349 return gimple_fold_builtin_stxcpy_chk (gsi,
3350 gimple_call_arg (stmt, 0),
3351 gimple_call_arg (stmt, 1),
3352 gimple_call_arg (stmt, 2),
3353 fcode);
3354 case BUILT_IN_STRNCPY_CHK:
3355 case BUILT_IN_STPNCPY_CHK:
3356 return gimple_fold_builtin_stxncpy_chk (gsi,
3357 gimple_call_arg (stmt, 0),
3358 gimple_call_arg (stmt, 1),
3359 gimple_call_arg (stmt, 2),
3360 gimple_call_arg (stmt, 3),
3361 fcode);
3362 case BUILT_IN_SNPRINTF_CHK:
3363 case BUILT_IN_VSNPRINTF_CHK:
3364 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3365 case BUILT_IN_SNPRINTF:
3366 return gimple_fold_builtin_snprintf (gsi);
3367 case BUILT_IN_SPRINTF:
3368 return gimple_fold_builtin_sprintf (gsi);
3369 case BUILT_IN_FPRINTF:
3370 case BUILT_IN_FPRINTF_UNLOCKED:
3371 case BUILT_IN_VFPRINTF:
3372 if (n == 2 || n == 3)
3373 return gimple_fold_builtin_fprintf (gsi,
3374 gimple_call_arg (stmt, 0),
3375 gimple_call_arg (stmt, 1),
3376 n == 3
3377 ? gimple_call_arg (stmt, 2)
3378 : NULL_TREE,
3379 fcode);
3380 break;
3381 case BUILT_IN_FPRINTF_CHK:
3382 case BUILT_IN_VFPRINTF_CHK:
3383 if (n == 3 || n == 4)
3384 return gimple_fold_builtin_fprintf (gsi,
3385 gimple_call_arg (stmt, 0),
3386 gimple_call_arg (stmt, 2),
3387 n == 4
3388 ? gimple_call_arg (stmt, 3)
3389 : NULL_TREE,
3390 fcode);
3391 break;
3392 case BUILT_IN_PRINTF:
3393 case BUILT_IN_PRINTF_UNLOCKED:
3394 case BUILT_IN_VPRINTF:
3395 if (n == 1 || n == 2)
3396 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3397 n == 2
3398 ? gimple_call_arg (stmt, 1)
3399 : NULL_TREE, fcode);
3400 break;
3401 case BUILT_IN_PRINTF_CHK:
3402 case BUILT_IN_VPRINTF_CHK:
3403 if (n == 2 || n == 3)
3404 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3405 n == 3
3406 ? gimple_call_arg (stmt, 2)
3407 : NULL_TREE, fcode);
3408 break;
3409 case BUILT_IN_ACC_ON_DEVICE:
3410 return gimple_fold_builtin_acc_on_device (gsi,
3411 gimple_call_arg (stmt, 0));
3412 default:;
3413 }
3414
3415 /* Try the generic builtin folder. */
3416 bool ignore = (gimple_call_lhs (stmt) == NULL);
3417 tree result = fold_call_stmt (stmt, ignore);
3418 if (result)
3419 {
3420 if (ignore)
3421 STRIP_NOPS (result);
3422 else
3423 result = fold_convert (gimple_call_return_type (stmt), result);
3424 if (!update_call_from_tree (gsi, result))
3425 gimplify_and_update_call_from_tree (gsi, result);
3426 return true;
3427 }
3428
3429 return false;
3430 }
3431
3432 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3433 function calls to constants, where possible. */
3434
3435 static tree
3436 fold_internal_goacc_dim (const gimple *call)
3437 {
3438 int axis = oacc_get_ifn_dim_arg (call);
3439 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3440 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3441 tree result = NULL_TREE;
3442
3443 /* If the size is 1, or we only want the size and it is not dynamic,
3444 we know the answer. */
3445 if (size == 1 || (!is_pos && size))
3446 {
3447 tree type = TREE_TYPE (gimple_call_lhs (call));
3448 result = build_int_cst (type, size - is_pos);
3449 }
3450
3451 return result;
3452 }
3453
3454 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3455 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3456 &var where var is only addressable because of such calls. */
3457
3458 bool
3459 optimize_atomic_compare_exchange_p (gimple *stmt)
3460 {
3461 if (gimple_call_num_args (stmt) != 6
3462 || !flag_inline_atomics
3463 || !optimize
3464 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3465 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3466 || !gimple_vdef (stmt)
3467 || !gimple_vuse (stmt))
3468 return false;
3469
3470 tree fndecl = gimple_call_fndecl (stmt);
3471 switch (DECL_FUNCTION_CODE (fndecl))
3472 {
3473 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3474 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3475 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3476 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3477 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3478 break;
3479 default:
3480 return false;
3481 }
3482
3483 tree expected = gimple_call_arg (stmt, 1);
3484 if (TREE_CODE (expected) != ADDR_EXPR
3485 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3486 return false;
3487
3488 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3489 if (!is_gimple_reg_type (etype)
3490 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3491 || TREE_THIS_VOLATILE (etype)
3492 || VECTOR_TYPE_P (etype)
3493 || TREE_CODE (etype) == COMPLEX_TYPE
3494 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3495 might not preserve all the bits. See PR71716. */
3496 || SCALAR_FLOAT_TYPE_P (etype)
3497 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3498 return false;
3499
3500 tree weak = gimple_call_arg (stmt, 3);
3501 if (!integer_zerop (weak) && !integer_onep (weak))
3502 return false;
3503
3504 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3505 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3506 machine_mode mode = TYPE_MODE (itype);
3507
3508 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3509 == CODE_FOR_nothing
3510 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3511 return false;
3512
3513 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3514 return false;
3515
3516 return true;
3517 }
3518
3519 /* Fold
3520 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3521 into
3522 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3523 i = IMAGPART_EXPR <t>;
3524 r = (_Bool) i;
3525 e = REALPART_EXPR <t>; */
3526
3527 void
3528 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3529 {
3530 gimple *stmt = gsi_stmt (*gsi);
3531 tree fndecl = gimple_call_fndecl (stmt);
3532 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3533 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3534 tree ctype = build_complex_type (itype);
3535 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3536 bool throws = false;
3537 edge e = NULL;
3538 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3539 expected);
3540 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3541 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3542 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3543 {
3544 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3545 build1 (VIEW_CONVERT_EXPR, itype,
3546 gimple_assign_lhs (g)));
3547 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3548 }
3549 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3550 + int_size_in_bytes (itype);
3551 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3552 gimple_call_arg (stmt, 0),
3553 gimple_assign_lhs (g),
3554 gimple_call_arg (stmt, 2),
3555 build_int_cst (integer_type_node, flag),
3556 gimple_call_arg (stmt, 4),
3557 gimple_call_arg (stmt, 5));
3558 tree lhs = make_ssa_name (ctype);
3559 gimple_call_set_lhs (g, lhs);
3560 gimple_set_vdef (g, gimple_vdef (stmt));
3561 gimple_set_vuse (g, gimple_vuse (stmt));
3562 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3563 tree oldlhs = gimple_call_lhs (stmt);
3564 if (stmt_can_throw_internal (stmt))
3565 {
3566 throws = true;
3567 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3568 }
3569 gimple_call_set_nothrow (as_a <gcall *> (g),
3570 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3571 gimple_call_set_lhs (stmt, NULL_TREE);
3572 gsi_replace (gsi, g, true);
3573 if (oldlhs)
3574 {
3575 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3576 build1 (IMAGPART_EXPR, itype, lhs));
3577 if (throws)
3578 {
3579 gsi_insert_on_edge_immediate (e, g);
3580 *gsi = gsi_for_stmt (g);
3581 }
3582 else
3583 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3584 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3585 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3586 }
3587 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3588 build1 (REALPART_EXPR, itype, lhs));
3589 if (throws && oldlhs == NULL_TREE)
3590 {
3591 gsi_insert_on_edge_immediate (e, g);
3592 *gsi = gsi_for_stmt (g);
3593 }
3594 else
3595 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3596 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3597 {
3598 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3599 VIEW_CONVERT_EXPR,
3600 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3601 gimple_assign_lhs (g)));
3602 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3603 }
3604 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3605 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3606 *gsi = gsiret;
3607 }
3608
3609 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3610 doesn't fit into TYPE. The test for overflow should be regardless of
3611 -fwrapv, and even for unsigned types. */
3612
3613 bool
3614 arith_overflowed_p (enum tree_code code, const_tree type,
3615 const_tree arg0, const_tree arg1)
3616 {
3617 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3618 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3619 widest2_int_cst;
3620 widest2_int warg0 = widest2_int_cst (arg0);
3621 widest2_int warg1 = widest2_int_cst (arg1);
3622 widest2_int wres;
3623 switch (code)
3624 {
3625 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3626 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3627 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3628 default: gcc_unreachable ();
3629 }
3630 signop sign = TYPE_SIGN (type);
3631 if (sign == UNSIGNED && wi::neg_p (wres))
3632 return true;
3633 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3634 }
3635
3636 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3637 The statement may be replaced by another statement, e.g., if the call
3638 simplifies to a constant value. Return true if any changes were made.
3639 It is assumed that the operands have been previously folded. */
3640
3641 static bool
3642 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3643 {
3644 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3645 tree callee;
3646 bool changed = false;
3647 unsigned i;
3648
3649 /* Fold *& in call arguments. */
3650 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3651 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3652 {
3653 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3654 if (tmp)
3655 {
3656 gimple_call_set_arg (stmt, i, tmp);
3657 changed = true;
3658 }
3659 }
3660
3661 /* Check for virtual calls that became direct calls. */
3662 callee = gimple_call_fn (stmt);
3663 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3664 {
3665 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3666 {
3667 if (dump_file && virtual_method_call_p (callee)
3668 && !possible_polymorphic_call_target_p
3669 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3670 (OBJ_TYPE_REF_EXPR (callee)))))
3671 {
3672 fprintf (dump_file,
3673 "Type inheritance inconsistent devirtualization of ");
3674 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3675 fprintf (dump_file, " to ");
3676 print_generic_expr (dump_file, callee, TDF_SLIM);
3677 fprintf (dump_file, "\n");
3678 }
3679
3680 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3681 changed = true;
3682 }
3683 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3684 {
3685 bool final;
3686 vec <cgraph_node *>targets
3687 = possible_polymorphic_call_targets (callee, stmt, &final);
3688 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3689 {
3690 tree lhs = gimple_call_lhs (stmt);
3691 if (dump_enabled_p ())
3692 {
3693 location_t loc = gimple_location_safe (stmt);
3694 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3695 "folding virtual function call to %s\n",
3696 targets.length () == 1
3697 ? targets[0]->name ()
3698 : "__builtin_unreachable");
3699 }
3700 if (targets.length () == 1)
3701 {
3702 tree fndecl = targets[0]->decl;
3703 gimple_call_set_fndecl (stmt, fndecl);
3704 changed = true;
3705 /* If changing the call to __cxa_pure_virtual
3706 or similar noreturn function, adjust gimple_call_fntype
3707 too. */
3708 if (gimple_call_noreturn_p (stmt)
3709 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3710 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3711 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3712 == void_type_node))
3713 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3714 /* If the call becomes noreturn, remove the lhs. */
3715 if (lhs
3716 && gimple_call_noreturn_p (stmt)
3717 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3718 || should_remove_lhs_p (lhs)))
3719 {
3720 if (TREE_CODE (lhs) == SSA_NAME)
3721 {
3722 tree var = create_tmp_var (TREE_TYPE (lhs));
3723 tree def = get_or_create_ssa_default_def (cfun, var);
3724 gimple *new_stmt = gimple_build_assign (lhs, def);
3725 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3726 }
3727 gimple_call_set_lhs (stmt, NULL_TREE);
3728 }
3729 maybe_remove_unused_call_args (cfun, stmt);
3730 }
3731 else
3732 {
3733 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3734 gimple *new_stmt = gimple_build_call (fndecl, 0);
3735 gimple_set_location (new_stmt, gimple_location (stmt));
3736 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3737 {
3738 tree var = create_tmp_var (TREE_TYPE (lhs));
3739 tree def = get_or_create_ssa_default_def (cfun, var);
3740
3741 /* To satisfy condition for
3742 cgraph_update_edges_for_call_stmt_node,
3743 we need to preserve GIMPLE_CALL statement
3744 at position of GSI iterator. */
3745 update_call_from_tree (gsi, def);
3746 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3747 }
3748 else
3749 {
3750 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3751 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3752 gsi_replace (gsi, new_stmt, false);
3753 }
3754 return true;
3755 }
3756 }
3757 }
3758 }
3759
3760 /* Check for indirect calls that became direct calls, and then
3761 no longer require a static chain. */
3762 if (gimple_call_chain (stmt))
3763 {
3764 tree fn = gimple_call_fndecl (stmt);
3765 if (fn && !DECL_STATIC_CHAIN (fn))
3766 {
3767 gimple_call_set_chain (stmt, NULL);
3768 changed = true;
3769 }
3770 else
3771 {
3772 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3773 if (tmp)
3774 {
3775 gimple_call_set_chain (stmt, tmp);
3776 changed = true;
3777 }
3778 }
3779 }
3780
3781 if (inplace)
3782 return changed;
3783
3784 /* Check for builtins that CCP can handle using information not
3785 available in the generic fold routines. */
3786 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3787 {
3788 if (gimple_fold_builtin (gsi))
3789 changed = true;
3790 }
3791 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3792 {
3793 changed |= targetm.gimple_fold_builtin (gsi);
3794 }
3795 else if (gimple_call_internal_p (stmt))
3796 {
3797 enum tree_code subcode = ERROR_MARK;
3798 tree result = NULL_TREE;
3799 bool cplx_result = false;
3800 tree overflow = NULL_TREE;
3801 switch (gimple_call_internal_fn (stmt))
3802 {
3803 case IFN_BUILTIN_EXPECT:
3804 result = fold_builtin_expect (gimple_location (stmt),
3805 gimple_call_arg (stmt, 0),
3806 gimple_call_arg (stmt, 1),
3807 gimple_call_arg (stmt, 2));
3808 break;
3809 case IFN_UBSAN_OBJECT_SIZE:
3810 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3811 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3812 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3813 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3814 gimple_call_arg (stmt, 2))))
3815 {
3816 gsi_replace (gsi, gimple_build_nop (), false);
3817 unlink_stmt_vdef (stmt);
3818 release_defs (stmt);
3819 return true;
3820 }
3821 break;
3822 case IFN_GOACC_DIM_SIZE:
3823 case IFN_GOACC_DIM_POS:
3824 result = fold_internal_goacc_dim (stmt);
3825 break;
3826 case IFN_UBSAN_CHECK_ADD:
3827 subcode = PLUS_EXPR;
3828 break;
3829 case IFN_UBSAN_CHECK_SUB:
3830 subcode = MINUS_EXPR;
3831 break;
3832 case IFN_UBSAN_CHECK_MUL:
3833 subcode = MULT_EXPR;
3834 break;
3835 case IFN_ADD_OVERFLOW:
3836 subcode = PLUS_EXPR;
3837 cplx_result = true;
3838 break;
3839 case IFN_SUB_OVERFLOW:
3840 subcode = MINUS_EXPR;
3841 cplx_result = true;
3842 break;
3843 case IFN_MUL_OVERFLOW:
3844 subcode = MULT_EXPR;
3845 cplx_result = true;
3846 break;
3847 default:
3848 break;
3849 }
3850 if (subcode != ERROR_MARK)
3851 {
3852 tree arg0 = gimple_call_arg (stmt, 0);
3853 tree arg1 = gimple_call_arg (stmt, 1);
3854 tree type = TREE_TYPE (arg0);
3855 if (cplx_result)
3856 {
3857 tree lhs = gimple_call_lhs (stmt);
3858 if (lhs == NULL_TREE)
3859 type = NULL_TREE;
3860 else
3861 type = TREE_TYPE (TREE_TYPE (lhs));
3862 }
3863 if (type == NULL_TREE)
3864 ;
3865 /* x = y + 0; x = y - 0; x = y * 0; */
3866 else if (integer_zerop (arg1))
3867 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3868 /* x = 0 + y; x = 0 * y; */
3869 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3870 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3871 /* x = y - y; */
3872 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3873 result = integer_zero_node;
3874 /* x = y * 1; x = 1 * y; */
3875 else if (subcode == MULT_EXPR && integer_onep (arg1))
3876 result = arg0;
3877 else if (subcode == MULT_EXPR && integer_onep (arg0))
3878 result = arg1;
3879 else if (TREE_CODE (arg0) == INTEGER_CST
3880 && TREE_CODE (arg1) == INTEGER_CST)
3881 {
3882 if (cplx_result)
3883 result = int_const_binop (subcode, fold_convert (type, arg0),
3884 fold_convert (type, arg1));
3885 else
3886 result = int_const_binop (subcode, arg0, arg1);
3887 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3888 {
3889 if (cplx_result)
3890 overflow = build_one_cst (type);
3891 else
3892 result = NULL_TREE;
3893 }
3894 }
3895 if (result)
3896 {
3897 if (result == integer_zero_node)
3898 result = build_zero_cst (type);
3899 else if (cplx_result && TREE_TYPE (result) != type)
3900 {
3901 if (TREE_CODE (result) == INTEGER_CST)
3902 {
3903 if (arith_overflowed_p (PLUS_EXPR, type, result,
3904 integer_zero_node))
3905 overflow = build_one_cst (type);
3906 }
3907 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3908 && TYPE_UNSIGNED (type))
3909 || (TYPE_PRECISION (type)
3910 < (TYPE_PRECISION (TREE_TYPE (result))
3911 + (TYPE_UNSIGNED (TREE_TYPE (result))
3912 && !TYPE_UNSIGNED (type)))))
3913 result = NULL_TREE;
3914 if (result)
3915 result = fold_convert (type, result);
3916 }
3917 }
3918 }
3919
3920 if (result)
3921 {
3922 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3923 result = drop_tree_overflow (result);
3924 if (cplx_result)
3925 {
3926 if (overflow == NULL_TREE)
3927 overflow = build_zero_cst (TREE_TYPE (result));
3928 tree ctype = build_complex_type (TREE_TYPE (result));
3929 if (TREE_CODE (result) == INTEGER_CST
3930 && TREE_CODE (overflow) == INTEGER_CST)
3931 result = build_complex (ctype, result, overflow);
3932 else
3933 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3934 ctype, result, overflow);
3935 }
3936 if (!update_call_from_tree (gsi, result))
3937 gimplify_and_update_call_from_tree (gsi, result);
3938 changed = true;
3939 }
3940 }
3941
3942 return changed;
3943 }
3944
3945
3946 /* Return true whether NAME has a use on STMT. */
3947
3948 static bool
3949 has_use_on_stmt (tree name, gimple *stmt)
3950 {
3951 imm_use_iterator iter;
3952 use_operand_p use_p;
3953 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3954 if (USE_STMT (use_p) == stmt)
3955 return true;
3956 return false;
3957 }
3958
3959 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3960 gimple_simplify.
3961
3962 Replaces *GSI with the simplification result in RCODE and OPS
3963 and the associated statements in *SEQ. Does the replacement
3964 according to INPLACE and returns true if the operation succeeded. */
3965
3966 static bool
3967 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3968 code_helper rcode, tree *ops,
3969 gimple_seq *seq, bool inplace)
3970 {
3971 gimple *stmt = gsi_stmt (*gsi);
3972
3973 /* Play safe and do not allow abnormals to be mentioned in
3974 newly created statements. See also maybe_push_res_to_seq.
3975 As an exception allow such uses if there was a use of the
3976 same SSA name on the old stmt. */
3977 if ((TREE_CODE (ops[0]) == SSA_NAME
3978 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3979 && !has_use_on_stmt (ops[0], stmt))
3980 || (ops[1]
3981 && TREE_CODE (ops[1]) == SSA_NAME
3982 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3983 && !has_use_on_stmt (ops[1], stmt))
3984 || (ops[2]
3985 && TREE_CODE (ops[2]) == SSA_NAME
3986 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3987 && !has_use_on_stmt (ops[2], stmt))
3988 || (COMPARISON_CLASS_P (ops[0])
3989 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3990 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3991 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3992 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3993 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3994 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3995 return false;
3996
3997 /* Don't insert new statements when INPLACE is true, even if we could
3998 reuse STMT for the final statement. */
3999 if (inplace && !gimple_seq_empty_p (*seq))
4000 return false;
4001
4002 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4003 {
4004 gcc_assert (rcode.is_tree_code ());
4005 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4006 /* GIMPLE_CONDs condition may not throw. */
4007 && (!flag_exceptions
4008 || !cfun->can_throw_non_call_exceptions
4009 || !operation_could_trap_p (rcode,
4010 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4011 false, NULL_TREE)))
4012 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4013 else if (rcode == SSA_NAME)
4014 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4015 build_zero_cst (TREE_TYPE (ops[0])));
4016 else if (rcode == INTEGER_CST)
4017 {
4018 if (integer_zerop (ops[0]))
4019 gimple_cond_make_false (cond_stmt);
4020 else
4021 gimple_cond_make_true (cond_stmt);
4022 }
4023 else if (!inplace)
4024 {
4025 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4026 ops, seq);
4027 if (!res)
4028 return false;
4029 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4030 build_zero_cst (TREE_TYPE (res)));
4031 }
4032 else
4033 return false;
4034 if (dump_file && (dump_flags & TDF_DETAILS))
4035 {
4036 fprintf (dump_file, "gimple_simplified to ");
4037 if (!gimple_seq_empty_p (*seq))
4038 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4039 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4040 0, TDF_SLIM);
4041 }
4042 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4043 return true;
4044 }
4045 else if (is_gimple_assign (stmt)
4046 && rcode.is_tree_code ())
4047 {
4048 if (!inplace
4049 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4050 {
4051 maybe_build_generic_op (rcode,
4052 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4053 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4054 if (dump_file && (dump_flags & TDF_DETAILS))
4055 {
4056 fprintf (dump_file, "gimple_simplified to ");
4057 if (!gimple_seq_empty_p (*seq))
4058 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4059 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4060 0, TDF_SLIM);
4061 }
4062 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4063 return true;
4064 }
4065 }
4066 else if (rcode.is_fn_code ()
4067 && gimple_call_combined_fn (stmt) == rcode)
4068 {
4069 unsigned i;
4070 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4071 {
4072 gcc_assert (ops[i] != NULL_TREE);
4073 gimple_call_set_arg (stmt, i, ops[i]);
4074 }
4075 if (i < 3)
4076 gcc_assert (ops[i] == NULL_TREE);
4077 if (dump_file && (dump_flags & TDF_DETAILS))
4078 {
4079 fprintf (dump_file, "gimple_simplified to ");
4080 if (!gimple_seq_empty_p (*seq))
4081 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4082 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4083 }
4084 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4085 return true;
4086 }
4087 else if (!inplace)
4088 {
4089 if (gimple_has_lhs (stmt))
4090 {
4091 tree lhs = gimple_get_lhs (stmt);
4092 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4093 ops, seq, lhs))
4094 return false;
4095 if (dump_file && (dump_flags & TDF_DETAILS))
4096 {
4097 fprintf (dump_file, "gimple_simplified to ");
4098 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4099 }
4100 gsi_replace_with_seq_vops (gsi, *seq);
4101 return true;
4102 }
4103 else
4104 gcc_unreachable ();
4105 }
4106
4107 return false;
4108 }
4109
4110 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4111
4112 static bool
4113 maybe_canonicalize_mem_ref_addr (tree *t)
4114 {
4115 bool res = false;
4116
4117 if (TREE_CODE (*t) == ADDR_EXPR)
4118 t = &TREE_OPERAND (*t, 0);
4119
4120 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4121 generic vector extension. The actual vector referenced is
4122 view-converted to an array type for this purpose. If the index
4123 is constant the canonical representation in the middle-end is a
4124 BIT_FIELD_REF so re-write the former to the latter here. */
4125 if (TREE_CODE (*t) == ARRAY_REF
4126 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4127 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4128 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4129 {
4130 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4131 if (VECTOR_TYPE_P (vtype))
4132 {
4133 tree low = array_ref_low_bound (*t);
4134 if (TREE_CODE (low) == INTEGER_CST)
4135 {
4136 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4137 {
4138 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4139 wi::to_widest (low));
4140 idx = wi::mul (idx, wi::to_widest
4141 (TYPE_SIZE (TREE_TYPE (*t))));
4142 widest_int ext
4143 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4144 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4145 {
4146 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4147 TREE_TYPE (*t),
4148 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4149 TYPE_SIZE (TREE_TYPE (*t)),
4150 wide_int_to_tree (sizetype, idx));
4151 res = true;
4152 }
4153 }
4154 }
4155 }
4156 }
4157
4158 while (handled_component_p (*t))
4159 t = &TREE_OPERAND (*t, 0);
4160
4161 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4162 of invariant addresses into a SSA name MEM_REF address. */
4163 if (TREE_CODE (*t) == MEM_REF
4164 || TREE_CODE (*t) == TARGET_MEM_REF)
4165 {
4166 tree addr = TREE_OPERAND (*t, 0);
4167 if (TREE_CODE (addr) == ADDR_EXPR
4168 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4169 || handled_component_p (TREE_OPERAND (addr, 0))))
4170 {
4171 tree base;
4172 HOST_WIDE_INT coffset;
4173 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4174 &coffset);
4175 if (!base)
4176 gcc_unreachable ();
4177
4178 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4179 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4180 TREE_OPERAND (*t, 1),
4181 size_int (coffset));
4182 res = true;
4183 }
4184 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4185 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4186 }
4187
4188 /* Canonicalize back MEM_REFs to plain reference trees if the object
4189 accessed is a decl that has the same access semantics as the MEM_REF. */
4190 if (TREE_CODE (*t) == MEM_REF
4191 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4192 && integer_zerop (TREE_OPERAND (*t, 1))
4193 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4194 {
4195 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4196 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4197 if (/* Same volatile qualification. */
4198 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4199 /* Same TBAA behavior with -fstrict-aliasing. */
4200 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4201 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4202 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4203 /* Same alignment. */
4204 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4205 /* We have to look out here to not drop a required conversion
4206 from the rhs to the lhs if *t appears on the lhs or vice-versa
4207 if it appears on the rhs. Thus require strict type
4208 compatibility. */
4209 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4210 {
4211 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4212 res = true;
4213 }
4214 }
4215
4216 /* Canonicalize TARGET_MEM_REF in particular with respect to
4217 the indexes becoming constant. */
4218 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4219 {
4220 tree tem = maybe_fold_tmr (*t);
4221 if (tem)
4222 {
4223 *t = tem;
4224 res = true;
4225 }
4226 }
4227
4228 return res;
4229 }
4230
4231 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4232 distinguishes both cases. */
4233
4234 static bool
4235 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4236 {
4237 bool changed = false;
4238 gimple *stmt = gsi_stmt (*gsi);
4239 bool nowarning = gimple_no_warning_p (stmt);
4240 unsigned i;
4241 fold_defer_overflow_warnings ();
4242
4243 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4244 after propagation.
4245 ??? This shouldn't be done in generic folding but in the
4246 propagation helpers which also know whether an address was
4247 propagated.
4248 Also canonicalize operand order. */
4249 switch (gimple_code (stmt))
4250 {
4251 case GIMPLE_ASSIGN:
4252 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4253 {
4254 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4255 if ((REFERENCE_CLASS_P (*rhs)
4256 || TREE_CODE (*rhs) == ADDR_EXPR)
4257 && maybe_canonicalize_mem_ref_addr (rhs))
4258 changed = true;
4259 tree *lhs = gimple_assign_lhs_ptr (stmt);
4260 if (REFERENCE_CLASS_P (*lhs)
4261 && maybe_canonicalize_mem_ref_addr (lhs))
4262 changed = true;
4263 }
4264 else
4265 {
4266 /* Canonicalize operand order. */
4267 enum tree_code code = gimple_assign_rhs_code (stmt);
4268 if (TREE_CODE_CLASS (code) == tcc_comparison
4269 || commutative_tree_code (code)
4270 || commutative_ternary_tree_code (code))
4271 {
4272 tree rhs1 = gimple_assign_rhs1 (stmt);
4273 tree rhs2 = gimple_assign_rhs2 (stmt);
4274 if (tree_swap_operands_p (rhs1, rhs2))
4275 {
4276 gimple_assign_set_rhs1 (stmt, rhs2);
4277 gimple_assign_set_rhs2 (stmt, rhs1);
4278 if (TREE_CODE_CLASS (code) == tcc_comparison)
4279 gimple_assign_set_rhs_code (stmt,
4280 swap_tree_comparison (code));
4281 changed = true;
4282 }
4283 }
4284 }
4285 break;
4286 case GIMPLE_CALL:
4287 {
4288 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4289 {
4290 tree *arg = gimple_call_arg_ptr (stmt, i);
4291 if (REFERENCE_CLASS_P (*arg)
4292 && maybe_canonicalize_mem_ref_addr (arg))
4293 changed = true;
4294 }
4295 tree *lhs = gimple_call_lhs_ptr (stmt);
4296 if (*lhs
4297 && REFERENCE_CLASS_P (*lhs)
4298 && maybe_canonicalize_mem_ref_addr (lhs))
4299 changed = true;
4300 break;
4301 }
4302 case GIMPLE_ASM:
4303 {
4304 gasm *asm_stmt = as_a <gasm *> (stmt);
4305 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4306 {
4307 tree link = gimple_asm_output_op (asm_stmt, i);
4308 tree op = TREE_VALUE (link);
4309 if (REFERENCE_CLASS_P (op)
4310 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4311 changed = true;
4312 }
4313 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4314 {
4315 tree link = gimple_asm_input_op (asm_stmt, i);
4316 tree op = TREE_VALUE (link);
4317 if ((REFERENCE_CLASS_P (op)
4318 || TREE_CODE (op) == ADDR_EXPR)
4319 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4320 changed = true;
4321 }
4322 }
4323 break;
4324 case GIMPLE_DEBUG:
4325 if (gimple_debug_bind_p (stmt))
4326 {
4327 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4328 if (*val
4329 && (REFERENCE_CLASS_P (*val)
4330 || TREE_CODE (*val) == ADDR_EXPR)
4331 && maybe_canonicalize_mem_ref_addr (val))
4332 changed = true;
4333 }
4334 break;
4335 case GIMPLE_COND:
4336 {
4337 /* Canonicalize operand order. */
4338 tree lhs = gimple_cond_lhs (stmt);
4339 tree rhs = gimple_cond_rhs (stmt);
4340 if (tree_swap_operands_p (lhs, rhs))
4341 {
4342 gcond *gc = as_a <gcond *> (stmt);
4343 gimple_cond_set_lhs (gc, rhs);
4344 gimple_cond_set_rhs (gc, lhs);
4345 gimple_cond_set_code (gc,
4346 swap_tree_comparison (gimple_cond_code (gc)));
4347 changed = true;
4348 }
4349 }
4350 default:;
4351 }
4352
4353 /* Dispatch to pattern-based folding. */
4354 if (!inplace
4355 || is_gimple_assign (stmt)
4356 || gimple_code (stmt) == GIMPLE_COND)
4357 {
4358 gimple_seq seq = NULL;
4359 code_helper rcode;
4360 tree ops[3] = {};
4361 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4362 valueize, valueize))
4363 {
4364 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4365 changed = true;
4366 else
4367 gimple_seq_discard (seq);
4368 }
4369 }
4370
4371 stmt = gsi_stmt (*gsi);
4372
4373 /* Fold the main computation performed by the statement. */
4374 switch (gimple_code (stmt))
4375 {
4376 case GIMPLE_ASSIGN:
4377 {
4378 /* Try to canonicalize for boolean-typed X the comparisons
4379 X == 0, X == 1, X != 0, and X != 1. */
4380 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4381 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4382 {
4383 tree lhs = gimple_assign_lhs (stmt);
4384 tree op1 = gimple_assign_rhs1 (stmt);
4385 tree op2 = gimple_assign_rhs2 (stmt);
4386 tree type = TREE_TYPE (op1);
4387
4388 /* Check whether the comparison operands are of the same boolean
4389 type as the result type is.
4390 Check that second operand is an integer-constant with value
4391 one or zero. */
4392 if (TREE_CODE (op2) == INTEGER_CST
4393 && (integer_zerop (op2) || integer_onep (op2))
4394 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4395 {
4396 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4397 bool is_logical_not = false;
4398
4399 /* X == 0 and X != 1 is a logical-not.of X
4400 X == 1 and X != 0 is X */
4401 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4402 || (cmp_code == NE_EXPR && integer_onep (op2)))
4403 is_logical_not = true;
4404
4405 if (is_logical_not == false)
4406 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4407 /* Only for one-bit precision typed X the transformation
4408 !X -> ~X is valied. */
4409 else if (TYPE_PRECISION (type) == 1)
4410 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4411 /* Otherwise we use !X -> X ^ 1. */
4412 else
4413 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4414 build_int_cst (type, 1));
4415 changed = true;
4416 break;
4417 }
4418 }
4419
4420 unsigned old_num_ops = gimple_num_ops (stmt);
4421 tree lhs = gimple_assign_lhs (stmt);
4422 tree new_rhs = fold_gimple_assign (gsi);
4423 if (new_rhs
4424 && !useless_type_conversion_p (TREE_TYPE (lhs),
4425 TREE_TYPE (new_rhs)))
4426 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4427 if (new_rhs
4428 && (!inplace
4429 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4430 {
4431 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4432 changed = true;
4433 }
4434 break;
4435 }
4436
4437 case GIMPLE_CALL:
4438 changed |= gimple_fold_call (gsi, inplace);
4439 break;
4440
4441 case GIMPLE_ASM:
4442 /* Fold *& in asm operands. */
4443 {
4444 gasm *asm_stmt = as_a <gasm *> (stmt);
4445 size_t noutputs;
4446 const char **oconstraints;
4447 const char *constraint;
4448 bool allows_mem, allows_reg;
4449
4450 noutputs = gimple_asm_noutputs (asm_stmt);
4451 oconstraints = XALLOCAVEC (const char *, noutputs);
4452
4453 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4454 {
4455 tree link = gimple_asm_output_op (asm_stmt, i);
4456 tree op = TREE_VALUE (link);
4457 oconstraints[i]
4458 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4459 if (REFERENCE_CLASS_P (op)
4460 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4461 {
4462 TREE_VALUE (link) = op;
4463 changed = true;
4464 }
4465 }
4466 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4467 {
4468 tree link = gimple_asm_input_op (asm_stmt, i);
4469 tree op = TREE_VALUE (link);
4470 constraint
4471 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4472 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4473 oconstraints, &allows_mem, &allows_reg);
4474 if (REFERENCE_CLASS_P (op)
4475 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4476 != NULL_TREE)
4477 {
4478 TREE_VALUE (link) = op;
4479 changed = true;
4480 }
4481 }
4482 }
4483 break;
4484
4485 case GIMPLE_DEBUG:
4486 if (gimple_debug_bind_p (stmt))
4487 {
4488 tree val = gimple_debug_bind_get_value (stmt);
4489 if (val
4490 && REFERENCE_CLASS_P (val))
4491 {
4492 tree tem = maybe_fold_reference (val, false);
4493 if (tem)
4494 {
4495 gimple_debug_bind_set_value (stmt, tem);
4496 changed = true;
4497 }
4498 }
4499 else if (val
4500 && TREE_CODE (val) == ADDR_EXPR)
4501 {
4502 tree ref = TREE_OPERAND (val, 0);
4503 tree tem = maybe_fold_reference (ref, false);
4504 if (tem)
4505 {
4506 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4507 gimple_debug_bind_set_value (stmt, tem);
4508 changed = true;
4509 }
4510 }
4511 }
4512 break;
4513
4514 case GIMPLE_RETURN:
4515 {
4516 greturn *ret_stmt = as_a<greturn *> (stmt);
4517 tree ret = gimple_return_retval(ret_stmt);
4518
4519 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4520 {
4521 tree val = valueize (ret);
4522 if (val && val != ret
4523 && may_propagate_copy (ret, val))
4524 {
4525 gimple_return_set_retval (ret_stmt, val);
4526 changed = true;
4527 }
4528 }
4529 }
4530 break;
4531
4532 default:;
4533 }
4534
4535 stmt = gsi_stmt (*gsi);
4536
4537 /* Fold *& on the lhs. */
4538 if (gimple_has_lhs (stmt))
4539 {
4540 tree lhs = gimple_get_lhs (stmt);
4541 if (lhs && REFERENCE_CLASS_P (lhs))
4542 {
4543 tree new_lhs = maybe_fold_reference (lhs, true);
4544 if (new_lhs)
4545 {
4546 gimple_set_lhs (stmt, new_lhs);
4547 changed = true;
4548 }
4549 }
4550 }
4551
4552 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4553 return changed;
4554 }
4555
4556 /* Valueziation callback that ends up not following SSA edges. */
4557
4558 tree
4559 no_follow_ssa_edges (tree)
4560 {
4561 return NULL_TREE;
4562 }
4563
4564 /* Valueization callback that ends up following single-use SSA edges only. */
4565
4566 tree
4567 follow_single_use_edges (tree val)
4568 {
4569 if (TREE_CODE (val) == SSA_NAME
4570 && !has_single_use (val))
4571 return NULL_TREE;
4572 return val;
4573 }
4574
4575 /* Fold the statement pointed to by GSI. In some cases, this function may
4576 replace the whole statement with a new one. Returns true iff folding
4577 makes any changes.
4578 The statement pointed to by GSI should be in valid gimple form but may
4579 be in unfolded state as resulting from for example constant propagation
4580 which can produce *&x = 0. */
4581
4582 bool
4583 fold_stmt (gimple_stmt_iterator *gsi)
4584 {
4585 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4586 }
4587
4588 bool
4589 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4590 {
4591 return fold_stmt_1 (gsi, false, valueize);
4592 }
4593
4594 /* Perform the minimal folding on statement *GSI. Only operations like
4595 *&x created by constant propagation are handled. The statement cannot
4596 be replaced with a new one. Return true if the statement was
4597 changed, false otherwise.
4598 The statement *GSI should be in valid gimple form but may
4599 be in unfolded state as resulting from for example constant propagation
4600 which can produce *&x = 0. */
4601
4602 bool
4603 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4604 {
4605 gimple *stmt = gsi_stmt (*gsi);
4606 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4607 gcc_assert (gsi_stmt (*gsi) == stmt);
4608 return changed;
4609 }
4610
4611 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4612 if EXPR is null or we don't know how.
4613 If non-null, the result always has boolean type. */
4614
4615 static tree
4616 canonicalize_bool (tree expr, bool invert)
4617 {
4618 if (!expr)
4619 return NULL_TREE;
4620 else if (invert)
4621 {
4622 if (integer_nonzerop (expr))
4623 return boolean_false_node;
4624 else if (integer_zerop (expr))
4625 return boolean_true_node;
4626 else if (TREE_CODE (expr) == SSA_NAME)
4627 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4628 build_int_cst (TREE_TYPE (expr), 0));
4629 else if (COMPARISON_CLASS_P (expr))
4630 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4631 boolean_type_node,
4632 TREE_OPERAND (expr, 0),
4633 TREE_OPERAND (expr, 1));
4634 else
4635 return NULL_TREE;
4636 }
4637 else
4638 {
4639 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4640 return expr;
4641 if (integer_nonzerop (expr))
4642 return boolean_true_node;
4643 else if (integer_zerop (expr))
4644 return boolean_false_node;
4645 else if (TREE_CODE (expr) == SSA_NAME)
4646 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4647 build_int_cst (TREE_TYPE (expr), 0));
4648 else if (COMPARISON_CLASS_P (expr))
4649 return fold_build2 (TREE_CODE (expr),
4650 boolean_type_node,
4651 TREE_OPERAND (expr, 0),
4652 TREE_OPERAND (expr, 1));
4653 else
4654 return NULL_TREE;
4655 }
4656 }
4657
4658 /* Check to see if a boolean expression EXPR is logically equivalent to the
4659 comparison (OP1 CODE OP2). Check for various identities involving
4660 SSA_NAMEs. */
4661
4662 static bool
4663 same_bool_comparison_p (const_tree expr, enum tree_code code,
4664 const_tree op1, const_tree op2)
4665 {
4666 gimple *s;
4667
4668 /* The obvious case. */
4669 if (TREE_CODE (expr) == code
4670 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4671 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4672 return true;
4673
4674 /* Check for comparing (name, name != 0) and the case where expr
4675 is an SSA_NAME with a definition matching the comparison. */
4676 if (TREE_CODE (expr) == SSA_NAME
4677 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4678 {
4679 if (operand_equal_p (expr, op1, 0))
4680 return ((code == NE_EXPR && integer_zerop (op2))
4681 || (code == EQ_EXPR && integer_nonzerop (op2)));
4682 s = SSA_NAME_DEF_STMT (expr);
4683 if (is_gimple_assign (s)
4684 && gimple_assign_rhs_code (s) == code
4685 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4686 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4687 return true;
4688 }
4689
4690 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4691 of name is a comparison, recurse. */
4692 if (TREE_CODE (op1) == SSA_NAME
4693 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4694 {
4695 s = SSA_NAME_DEF_STMT (op1);
4696 if (is_gimple_assign (s)
4697 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4698 {
4699 enum tree_code c = gimple_assign_rhs_code (s);
4700 if ((c == NE_EXPR && integer_zerop (op2))
4701 || (c == EQ_EXPR && integer_nonzerop (op2)))
4702 return same_bool_comparison_p (expr, c,
4703 gimple_assign_rhs1 (s),
4704 gimple_assign_rhs2 (s));
4705 if ((c == EQ_EXPR && integer_zerop (op2))
4706 || (c == NE_EXPR && integer_nonzerop (op2)))
4707 return same_bool_comparison_p (expr,
4708 invert_tree_comparison (c, false),
4709 gimple_assign_rhs1 (s),
4710 gimple_assign_rhs2 (s));
4711 }
4712 }
4713 return false;
4714 }
4715
4716 /* Check to see if two boolean expressions OP1 and OP2 are logically
4717 equivalent. */
4718
4719 static bool
4720 same_bool_result_p (const_tree op1, const_tree op2)
4721 {
4722 /* Simple cases first. */
4723 if (operand_equal_p (op1, op2, 0))
4724 return true;
4725
4726 /* Check the cases where at least one of the operands is a comparison.
4727 These are a bit smarter than operand_equal_p in that they apply some
4728 identifies on SSA_NAMEs. */
4729 if (COMPARISON_CLASS_P (op2)
4730 && same_bool_comparison_p (op1, TREE_CODE (op2),
4731 TREE_OPERAND (op2, 0),
4732 TREE_OPERAND (op2, 1)))
4733 return true;
4734 if (COMPARISON_CLASS_P (op1)
4735 && same_bool_comparison_p (op2, TREE_CODE (op1),
4736 TREE_OPERAND (op1, 0),
4737 TREE_OPERAND (op1, 1)))
4738 return true;
4739
4740 /* Default case. */
4741 return false;
4742 }
4743
4744 /* Forward declarations for some mutually recursive functions. */
4745
4746 static tree
4747 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4748 enum tree_code code2, tree op2a, tree op2b);
4749 static tree
4750 and_var_with_comparison (tree var, bool invert,
4751 enum tree_code code2, tree op2a, tree op2b);
4752 static tree
4753 and_var_with_comparison_1 (gimple *stmt,
4754 enum tree_code code2, tree op2a, tree op2b);
4755 static tree
4756 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4757 enum tree_code code2, tree op2a, tree op2b);
4758 static tree
4759 or_var_with_comparison (tree var, bool invert,
4760 enum tree_code code2, tree op2a, tree op2b);
4761 static tree
4762 or_var_with_comparison_1 (gimple *stmt,
4763 enum tree_code code2, tree op2a, tree op2b);
4764
4765 /* Helper function for and_comparisons_1: try to simplify the AND of the
4766 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4767 If INVERT is true, invert the value of the VAR before doing the AND.
4768 Return NULL_EXPR if we can't simplify this to a single expression. */
4769
4770 static tree
4771 and_var_with_comparison (tree var, bool invert,
4772 enum tree_code code2, tree op2a, tree op2b)
4773 {
4774 tree t;
4775 gimple *stmt = SSA_NAME_DEF_STMT (var);
4776
4777 /* We can only deal with variables whose definitions are assignments. */
4778 if (!is_gimple_assign (stmt))
4779 return NULL_TREE;
4780
4781 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4782 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4783 Then we only have to consider the simpler non-inverted cases. */
4784 if (invert)
4785 t = or_var_with_comparison_1 (stmt,
4786 invert_tree_comparison (code2, false),
4787 op2a, op2b);
4788 else
4789 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4790 return canonicalize_bool (t, invert);
4791 }
4792
4793 /* Try to simplify the AND of the ssa variable defined by the assignment
4794 STMT with the comparison specified by (OP2A CODE2 OP2B).
4795 Return NULL_EXPR if we can't simplify this to a single expression. */
4796
4797 static tree
4798 and_var_with_comparison_1 (gimple *stmt,
4799 enum tree_code code2, tree op2a, tree op2b)
4800 {
4801 tree var = gimple_assign_lhs (stmt);
4802 tree true_test_var = NULL_TREE;
4803 tree false_test_var = NULL_TREE;
4804 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4805
4806 /* Check for identities like (var AND (var == 0)) => false. */
4807 if (TREE_CODE (op2a) == SSA_NAME
4808 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4809 {
4810 if ((code2 == NE_EXPR && integer_zerop (op2b))
4811 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4812 {
4813 true_test_var = op2a;
4814 if (var == true_test_var)
4815 return var;
4816 }
4817 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4818 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4819 {
4820 false_test_var = op2a;
4821 if (var == false_test_var)
4822 return boolean_false_node;
4823 }
4824 }
4825
4826 /* If the definition is a comparison, recurse on it. */
4827 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4828 {
4829 tree t = and_comparisons_1 (innercode,
4830 gimple_assign_rhs1 (stmt),
4831 gimple_assign_rhs2 (stmt),
4832 code2,
4833 op2a,
4834 op2b);
4835 if (t)
4836 return t;
4837 }
4838
4839 /* If the definition is an AND or OR expression, we may be able to
4840 simplify by reassociating. */
4841 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4842 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4843 {
4844 tree inner1 = gimple_assign_rhs1 (stmt);
4845 tree inner2 = gimple_assign_rhs2 (stmt);
4846 gimple *s;
4847 tree t;
4848 tree partial = NULL_TREE;
4849 bool is_and = (innercode == BIT_AND_EXPR);
4850
4851 /* Check for boolean identities that don't require recursive examination
4852 of inner1/inner2:
4853 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4854 inner1 AND (inner1 OR inner2) => inner1
4855 !inner1 AND (inner1 AND inner2) => false
4856 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4857 Likewise for similar cases involving inner2. */
4858 if (inner1 == true_test_var)
4859 return (is_and ? var : inner1);
4860 else if (inner2 == true_test_var)
4861 return (is_and ? var : inner2);
4862 else if (inner1 == false_test_var)
4863 return (is_and
4864 ? boolean_false_node
4865 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4866 else if (inner2 == false_test_var)
4867 return (is_and
4868 ? boolean_false_node
4869 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4870
4871 /* Next, redistribute/reassociate the AND across the inner tests.
4872 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4873 if (TREE_CODE (inner1) == SSA_NAME
4874 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4875 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4876 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4877 gimple_assign_rhs1 (s),
4878 gimple_assign_rhs2 (s),
4879 code2, op2a, op2b)))
4880 {
4881 /* Handle the AND case, where we are reassociating:
4882 (inner1 AND inner2) AND (op2a code2 op2b)
4883 => (t AND inner2)
4884 If the partial result t is a constant, we win. Otherwise
4885 continue on to try reassociating with the other inner test. */
4886 if (is_and)
4887 {
4888 if (integer_onep (t))
4889 return inner2;
4890 else if (integer_zerop (t))
4891 return boolean_false_node;
4892 }
4893
4894 /* Handle the OR case, where we are redistributing:
4895 (inner1 OR inner2) AND (op2a code2 op2b)
4896 => (t OR (inner2 AND (op2a code2 op2b))) */
4897 else if (integer_onep (t))
4898 return boolean_true_node;
4899
4900 /* Save partial result for later. */
4901 partial = t;
4902 }
4903
4904 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4905 if (TREE_CODE (inner2) == SSA_NAME
4906 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4907 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4908 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4909 gimple_assign_rhs1 (s),
4910 gimple_assign_rhs2 (s),
4911 code2, op2a, op2b)))
4912 {
4913 /* Handle the AND case, where we are reassociating:
4914 (inner1 AND inner2) AND (op2a code2 op2b)
4915 => (inner1 AND t) */
4916 if (is_and)
4917 {
4918 if (integer_onep (t))
4919 return inner1;
4920 else if (integer_zerop (t))
4921 return boolean_false_node;
4922 /* If both are the same, we can apply the identity
4923 (x AND x) == x. */
4924 else if (partial && same_bool_result_p (t, partial))
4925 return t;
4926 }
4927
4928 /* Handle the OR case. where we are redistributing:
4929 (inner1 OR inner2) AND (op2a code2 op2b)
4930 => (t OR (inner1 AND (op2a code2 op2b)))
4931 => (t OR partial) */
4932 else
4933 {
4934 if (integer_onep (t))
4935 return boolean_true_node;
4936 else if (partial)
4937 {
4938 /* We already got a simplification for the other
4939 operand to the redistributed OR expression. The
4940 interesting case is when at least one is false.
4941 Or, if both are the same, we can apply the identity
4942 (x OR x) == x. */
4943 if (integer_zerop (partial))
4944 return t;
4945 else if (integer_zerop (t))
4946 return partial;
4947 else if (same_bool_result_p (t, partial))
4948 return t;
4949 }
4950 }
4951 }
4952 }
4953 return NULL_TREE;
4954 }
4955
4956 /* Try to simplify the AND of two comparisons defined by
4957 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4958 If this can be done without constructing an intermediate value,
4959 return the resulting tree; otherwise NULL_TREE is returned.
4960 This function is deliberately asymmetric as it recurses on SSA_DEFs
4961 in the first comparison but not the second. */
4962
4963 static tree
4964 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4965 enum tree_code code2, tree op2a, tree op2b)
4966 {
4967 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4968
4969 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4970 if (operand_equal_p (op1a, op2a, 0)
4971 && operand_equal_p (op1b, op2b, 0))
4972 {
4973 /* Result will be either NULL_TREE, or a combined comparison. */
4974 tree t = combine_comparisons (UNKNOWN_LOCATION,
4975 TRUTH_ANDIF_EXPR, code1, code2,
4976 truth_type, op1a, op1b);
4977 if (t)
4978 return t;
4979 }
4980
4981 /* Likewise the swapped case of the above. */
4982 if (operand_equal_p (op1a, op2b, 0)
4983 && operand_equal_p (op1b, op2a, 0))
4984 {
4985 /* Result will be either NULL_TREE, or a combined comparison. */
4986 tree t = combine_comparisons (UNKNOWN_LOCATION,
4987 TRUTH_ANDIF_EXPR, code1,
4988 swap_tree_comparison (code2),
4989 truth_type, op1a, op1b);
4990 if (t)
4991 return t;
4992 }
4993
4994 /* If both comparisons are of the same value against constants, we might
4995 be able to merge them. */
4996 if (operand_equal_p (op1a, op2a, 0)
4997 && TREE_CODE (op1b) == INTEGER_CST
4998 && TREE_CODE (op2b) == INTEGER_CST)
4999 {
5000 int cmp = tree_int_cst_compare (op1b, op2b);
5001
5002 /* If we have (op1a == op1b), we should either be able to
5003 return that or FALSE, depending on whether the constant op1b
5004 also satisfies the other comparison against op2b. */
5005 if (code1 == EQ_EXPR)
5006 {
5007 bool done = true;
5008 bool val;
5009 switch (code2)
5010 {
5011 case EQ_EXPR: val = (cmp == 0); break;
5012 case NE_EXPR: val = (cmp != 0); break;
5013 case LT_EXPR: val = (cmp < 0); break;
5014 case GT_EXPR: val = (cmp > 0); break;
5015 case LE_EXPR: val = (cmp <= 0); break;
5016 case GE_EXPR: val = (cmp >= 0); break;
5017 default: done = false;
5018 }
5019 if (done)
5020 {
5021 if (val)
5022 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5023 else
5024 return boolean_false_node;
5025 }
5026 }
5027 /* Likewise if the second comparison is an == comparison. */
5028 else if (code2 == EQ_EXPR)
5029 {
5030 bool done = true;
5031 bool val;
5032 switch (code1)
5033 {
5034 case EQ_EXPR: val = (cmp == 0); break;
5035 case NE_EXPR: val = (cmp != 0); break;
5036 case LT_EXPR: val = (cmp > 0); break;
5037 case GT_EXPR: val = (cmp < 0); break;
5038 case LE_EXPR: val = (cmp >= 0); break;
5039 case GE_EXPR: val = (cmp <= 0); break;
5040 default: done = false;
5041 }
5042 if (done)
5043 {
5044 if (val)
5045 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5046 else
5047 return boolean_false_node;
5048 }
5049 }
5050
5051 /* Same business with inequality tests. */
5052 else if (code1 == NE_EXPR)
5053 {
5054 bool val;
5055 switch (code2)
5056 {
5057 case EQ_EXPR: val = (cmp != 0); break;
5058 case NE_EXPR: val = (cmp == 0); break;
5059 case LT_EXPR: val = (cmp >= 0); break;
5060 case GT_EXPR: val = (cmp <= 0); break;
5061 case LE_EXPR: val = (cmp > 0); break;
5062 case GE_EXPR: val = (cmp < 0); break;
5063 default:
5064 val = false;
5065 }
5066 if (val)
5067 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5068 }
5069 else if (code2 == NE_EXPR)
5070 {
5071 bool val;
5072 switch (code1)
5073 {
5074 case EQ_EXPR: val = (cmp == 0); break;
5075 case NE_EXPR: val = (cmp != 0); break;
5076 case LT_EXPR: val = (cmp <= 0); break;
5077 case GT_EXPR: val = (cmp >= 0); break;
5078 case LE_EXPR: val = (cmp < 0); break;
5079 case GE_EXPR: val = (cmp > 0); break;
5080 default:
5081 val = false;
5082 }
5083 if (val)
5084 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5085 }
5086
5087 /* Chose the more restrictive of two < or <= comparisons. */
5088 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5089 && (code2 == LT_EXPR || code2 == LE_EXPR))
5090 {
5091 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5092 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5093 else
5094 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5095 }
5096
5097 /* Likewise chose the more restrictive of two > or >= comparisons. */
5098 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5099 && (code2 == GT_EXPR || code2 == GE_EXPR))
5100 {
5101 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5102 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5103 else
5104 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5105 }
5106
5107 /* Check for singleton ranges. */
5108 else if (cmp == 0
5109 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5110 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5111 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5112
5113 /* Check for disjoint ranges. */
5114 else if (cmp <= 0
5115 && (code1 == LT_EXPR || code1 == LE_EXPR)
5116 && (code2 == GT_EXPR || code2 == GE_EXPR))
5117 return boolean_false_node;
5118 else if (cmp >= 0
5119 && (code1 == GT_EXPR || code1 == GE_EXPR)
5120 && (code2 == LT_EXPR || code2 == LE_EXPR))
5121 return boolean_false_node;
5122 }
5123
5124 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5125 NAME's definition is a truth value. See if there are any simplifications
5126 that can be done against the NAME's definition. */
5127 if (TREE_CODE (op1a) == SSA_NAME
5128 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5129 && (integer_zerop (op1b) || integer_onep (op1b)))
5130 {
5131 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5132 || (code1 == NE_EXPR && integer_onep (op1b)));
5133 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5134 switch (gimple_code (stmt))
5135 {
5136 case GIMPLE_ASSIGN:
5137 /* Try to simplify by copy-propagating the definition. */
5138 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5139
5140 case GIMPLE_PHI:
5141 /* If every argument to the PHI produces the same result when
5142 ANDed with the second comparison, we win.
5143 Do not do this unless the type is bool since we need a bool
5144 result here anyway. */
5145 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5146 {
5147 tree result = NULL_TREE;
5148 unsigned i;
5149 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5150 {
5151 tree arg = gimple_phi_arg_def (stmt, i);
5152
5153 /* If this PHI has itself as an argument, ignore it.
5154 If all the other args produce the same result,
5155 we're still OK. */
5156 if (arg == gimple_phi_result (stmt))
5157 continue;
5158 else if (TREE_CODE (arg) == INTEGER_CST)
5159 {
5160 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5161 {
5162 if (!result)
5163 result = boolean_false_node;
5164 else if (!integer_zerop (result))
5165 return NULL_TREE;
5166 }
5167 else if (!result)
5168 result = fold_build2 (code2, boolean_type_node,
5169 op2a, op2b);
5170 else if (!same_bool_comparison_p (result,
5171 code2, op2a, op2b))
5172 return NULL_TREE;
5173 }
5174 else if (TREE_CODE (arg) == SSA_NAME
5175 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5176 {
5177 tree temp;
5178 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5179 /* In simple cases we can look through PHI nodes,
5180 but we have to be careful with loops.
5181 See PR49073. */
5182 if (! dom_info_available_p (CDI_DOMINATORS)
5183 || gimple_bb (def_stmt) == gimple_bb (stmt)
5184 || dominated_by_p (CDI_DOMINATORS,
5185 gimple_bb (def_stmt),
5186 gimple_bb (stmt)))
5187 return NULL_TREE;
5188 temp = and_var_with_comparison (arg, invert, code2,
5189 op2a, op2b);
5190 if (!temp)
5191 return NULL_TREE;
5192 else if (!result)
5193 result = temp;
5194 else if (!same_bool_result_p (result, temp))
5195 return NULL_TREE;
5196 }
5197 else
5198 return NULL_TREE;
5199 }
5200 return result;
5201 }
5202
5203 default:
5204 break;
5205 }
5206 }
5207 return NULL_TREE;
5208 }
5209
5210 /* Try to simplify the AND of two comparisons, specified by
5211 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5212 If this can be simplified to a single expression (without requiring
5213 introducing more SSA variables to hold intermediate values),
5214 return the resulting tree. Otherwise return NULL_TREE.
5215 If the result expression is non-null, it has boolean type. */
5216
5217 tree
5218 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5219 enum tree_code code2, tree op2a, tree op2b)
5220 {
5221 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5222 if (t)
5223 return t;
5224 else
5225 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5226 }
5227
5228 /* Helper function for or_comparisons_1: try to simplify the OR of the
5229 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5230 If INVERT is true, invert the value of VAR before doing the OR.
5231 Return NULL_EXPR if we can't simplify this to a single expression. */
5232
5233 static tree
5234 or_var_with_comparison (tree var, bool invert,
5235 enum tree_code code2, tree op2a, tree op2b)
5236 {
5237 tree t;
5238 gimple *stmt = SSA_NAME_DEF_STMT (var);
5239
5240 /* We can only deal with variables whose definitions are assignments. */
5241 if (!is_gimple_assign (stmt))
5242 return NULL_TREE;
5243
5244 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5245 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5246 Then we only have to consider the simpler non-inverted cases. */
5247 if (invert)
5248 t = and_var_with_comparison_1 (stmt,
5249 invert_tree_comparison (code2, false),
5250 op2a, op2b);
5251 else
5252 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5253 return canonicalize_bool (t, invert);
5254 }
5255
5256 /* Try to simplify the OR of the ssa variable defined by the assignment
5257 STMT with the comparison specified by (OP2A CODE2 OP2B).
5258 Return NULL_EXPR if we can't simplify this to a single expression. */
5259
5260 static tree
5261 or_var_with_comparison_1 (gimple *stmt,
5262 enum tree_code code2, tree op2a, tree op2b)
5263 {
5264 tree var = gimple_assign_lhs (stmt);
5265 tree true_test_var = NULL_TREE;
5266 tree false_test_var = NULL_TREE;
5267 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5268
5269 /* Check for identities like (var OR (var != 0)) => true . */
5270 if (TREE_CODE (op2a) == SSA_NAME
5271 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5272 {
5273 if ((code2 == NE_EXPR && integer_zerop (op2b))
5274 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5275 {
5276 true_test_var = op2a;
5277 if (var == true_test_var)
5278 return var;
5279 }
5280 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5281 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5282 {
5283 false_test_var = op2a;
5284 if (var == false_test_var)
5285 return boolean_true_node;
5286 }
5287 }
5288
5289 /* If the definition is a comparison, recurse on it. */
5290 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5291 {
5292 tree t = or_comparisons_1 (innercode,
5293 gimple_assign_rhs1 (stmt),
5294 gimple_assign_rhs2 (stmt),
5295 code2,
5296 op2a,
5297 op2b);
5298 if (t)
5299 return t;
5300 }
5301
5302 /* If the definition is an AND or OR expression, we may be able to
5303 simplify by reassociating. */
5304 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5305 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5306 {
5307 tree inner1 = gimple_assign_rhs1 (stmt);
5308 tree inner2 = gimple_assign_rhs2 (stmt);
5309 gimple *s;
5310 tree t;
5311 tree partial = NULL_TREE;
5312 bool is_or = (innercode == BIT_IOR_EXPR);
5313
5314 /* Check for boolean identities that don't require recursive examination
5315 of inner1/inner2:
5316 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5317 inner1 OR (inner1 AND inner2) => inner1
5318 !inner1 OR (inner1 OR inner2) => true
5319 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5320 */
5321 if (inner1 == true_test_var)
5322 return (is_or ? var : inner1);
5323 else if (inner2 == true_test_var)
5324 return (is_or ? var : inner2);
5325 else if (inner1 == false_test_var)
5326 return (is_or
5327 ? boolean_true_node
5328 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5329 else if (inner2 == false_test_var)
5330 return (is_or
5331 ? boolean_true_node
5332 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5333
5334 /* Next, redistribute/reassociate the OR across the inner tests.
5335 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5336 if (TREE_CODE (inner1) == SSA_NAME
5337 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5338 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5339 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5340 gimple_assign_rhs1 (s),
5341 gimple_assign_rhs2 (s),
5342 code2, op2a, op2b)))
5343 {
5344 /* Handle the OR case, where we are reassociating:
5345 (inner1 OR inner2) OR (op2a code2 op2b)
5346 => (t OR inner2)
5347 If the partial result t is a constant, we win. Otherwise
5348 continue on to try reassociating with the other inner test. */
5349 if (is_or)
5350 {
5351 if (integer_onep (t))
5352 return boolean_true_node;
5353 else if (integer_zerop (t))
5354 return inner2;
5355 }
5356
5357 /* Handle the AND case, where we are redistributing:
5358 (inner1 AND inner2) OR (op2a code2 op2b)
5359 => (t AND (inner2 OR (op2a code op2b))) */
5360 else if (integer_zerop (t))
5361 return boolean_false_node;
5362
5363 /* Save partial result for later. */
5364 partial = t;
5365 }
5366
5367 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5368 if (TREE_CODE (inner2) == SSA_NAME
5369 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5370 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5371 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5372 gimple_assign_rhs1 (s),
5373 gimple_assign_rhs2 (s),
5374 code2, op2a, op2b)))
5375 {
5376 /* Handle the OR case, where we are reassociating:
5377 (inner1 OR inner2) OR (op2a code2 op2b)
5378 => (inner1 OR t)
5379 => (t OR partial) */
5380 if (is_or)
5381 {
5382 if (integer_zerop (t))
5383 return inner1;
5384 else if (integer_onep (t))
5385 return boolean_true_node;
5386 /* If both are the same, we can apply the identity
5387 (x OR x) == x. */
5388 else if (partial && same_bool_result_p (t, partial))
5389 return t;
5390 }
5391
5392 /* Handle the AND case, where we are redistributing:
5393 (inner1 AND inner2) OR (op2a code2 op2b)
5394 => (t AND (inner1 OR (op2a code2 op2b)))
5395 => (t AND partial) */
5396 else
5397 {
5398 if (integer_zerop (t))
5399 return boolean_false_node;
5400 else if (partial)
5401 {
5402 /* We already got a simplification for the other
5403 operand to the redistributed AND expression. The
5404 interesting case is when at least one is true.
5405 Or, if both are the same, we can apply the identity
5406 (x AND x) == x. */
5407 if (integer_onep (partial))
5408 return t;
5409 else if (integer_onep (t))
5410 return partial;
5411 else if (same_bool_result_p (t, partial))
5412 return t;
5413 }
5414 }
5415 }
5416 }
5417 return NULL_TREE;
5418 }
5419
5420 /* Try to simplify the OR of two comparisons defined by
5421 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5422 If this can be done without constructing an intermediate value,
5423 return the resulting tree; otherwise NULL_TREE is returned.
5424 This function is deliberately asymmetric as it recurses on SSA_DEFs
5425 in the first comparison but not the second. */
5426
5427 static tree
5428 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5429 enum tree_code code2, tree op2a, tree op2b)
5430 {
5431 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5432
5433 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5434 if (operand_equal_p (op1a, op2a, 0)
5435 && operand_equal_p (op1b, op2b, 0))
5436 {
5437 /* Result will be either NULL_TREE, or a combined comparison. */
5438 tree t = combine_comparisons (UNKNOWN_LOCATION,
5439 TRUTH_ORIF_EXPR, code1, code2,
5440 truth_type, op1a, op1b);
5441 if (t)
5442 return t;
5443 }
5444
5445 /* Likewise the swapped case of the above. */
5446 if (operand_equal_p (op1a, op2b, 0)
5447 && operand_equal_p (op1b, op2a, 0))
5448 {
5449 /* Result will be either NULL_TREE, or a combined comparison. */
5450 tree t = combine_comparisons (UNKNOWN_LOCATION,
5451 TRUTH_ORIF_EXPR, code1,
5452 swap_tree_comparison (code2),
5453 truth_type, op1a, op1b);
5454 if (t)
5455 return t;
5456 }
5457
5458 /* If both comparisons are of the same value against constants, we might
5459 be able to merge them. */
5460 if (operand_equal_p (op1a, op2a, 0)
5461 && TREE_CODE (op1b) == INTEGER_CST
5462 && TREE_CODE (op2b) == INTEGER_CST)
5463 {
5464 int cmp = tree_int_cst_compare (op1b, op2b);
5465
5466 /* If we have (op1a != op1b), we should either be able to
5467 return that or TRUE, depending on whether the constant op1b
5468 also satisfies the other comparison against op2b. */
5469 if (code1 == NE_EXPR)
5470 {
5471 bool done = true;
5472 bool val;
5473 switch (code2)
5474 {
5475 case EQ_EXPR: val = (cmp == 0); break;
5476 case NE_EXPR: val = (cmp != 0); break;
5477 case LT_EXPR: val = (cmp < 0); break;
5478 case GT_EXPR: val = (cmp > 0); break;
5479 case LE_EXPR: val = (cmp <= 0); break;
5480 case GE_EXPR: val = (cmp >= 0); break;
5481 default: done = false;
5482 }
5483 if (done)
5484 {
5485 if (val)
5486 return boolean_true_node;
5487 else
5488 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5489 }
5490 }
5491 /* Likewise if the second comparison is a != comparison. */
5492 else if (code2 == NE_EXPR)
5493 {
5494 bool done = true;
5495 bool val;
5496 switch (code1)
5497 {
5498 case EQ_EXPR: val = (cmp == 0); break;
5499 case NE_EXPR: val = (cmp != 0); break;
5500 case LT_EXPR: val = (cmp > 0); break;
5501 case GT_EXPR: val = (cmp < 0); break;
5502 case LE_EXPR: val = (cmp >= 0); break;
5503 case GE_EXPR: val = (cmp <= 0); break;
5504 default: done = false;
5505 }
5506 if (done)
5507 {
5508 if (val)
5509 return boolean_true_node;
5510 else
5511 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5512 }
5513 }
5514
5515 /* See if an equality test is redundant with the other comparison. */
5516 else if (code1 == EQ_EXPR)
5517 {
5518 bool val;
5519 switch (code2)
5520 {
5521 case EQ_EXPR: val = (cmp == 0); break;
5522 case NE_EXPR: val = (cmp != 0); break;
5523 case LT_EXPR: val = (cmp < 0); break;
5524 case GT_EXPR: val = (cmp > 0); break;
5525 case LE_EXPR: val = (cmp <= 0); break;
5526 case GE_EXPR: val = (cmp >= 0); break;
5527 default:
5528 val = false;
5529 }
5530 if (val)
5531 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5532 }
5533 else if (code2 == EQ_EXPR)
5534 {
5535 bool val;
5536 switch (code1)
5537 {
5538 case EQ_EXPR: val = (cmp == 0); break;
5539 case NE_EXPR: val = (cmp != 0); break;
5540 case LT_EXPR: val = (cmp > 0); break;
5541 case GT_EXPR: val = (cmp < 0); break;
5542 case LE_EXPR: val = (cmp >= 0); break;
5543 case GE_EXPR: val = (cmp <= 0); break;
5544 default:
5545 val = false;
5546 }
5547 if (val)
5548 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5549 }
5550
5551 /* Chose the less restrictive of two < or <= comparisons. */
5552 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5553 && (code2 == LT_EXPR || code2 == LE_EXPR))
5554 {
5555 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5556 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5557 else
5558 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5559 }
5560
5561 /* Likewise chose the less restrictive of two > or >= comparisons. */
5562 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5563 && (code2 == GT_EXPR || code2 == GE_EXPR))
5564 {
5565 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5566 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5567 else
5568 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5569 }
5570
5571 /* Check for singleton ranges. */
5572 else if (cmp == 0
5573 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5574 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5575 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5576
5577 /* Check for less/greater pairs that don't restrict the range at all. */
5578 else if (cmp >= 0
5579 && (code1 == LT_EXPR || code1 == LE_EXPR)
5580 && (code2 == GT_EXPR || code2 == GE_EXPR))
5581 return boolean_true_node;
5582 else if (cmp <= 0
5583 && (code1 == GT_EXPR || code1 == GE_EXPR)
5584 && (code2 == LT_EXPR || code2 == LE_EXPR))
5585 return boolean_true_node;
5586 }
5587
5588 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5589 NAME's definition is a truth value. See if there are any simplifications
5590 that can be done against the NAME's definition. */
5591 if (TREE_CODE (op1a) == SSA_NAME
5592 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5593 && (integer_zerop (op1b) || integer_onep (op1b)))
5594 {
5595 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5596 || (code1 == NE_EXPR && integer_onep (op1b)));
5597 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5598 switch (gimple_code (stmt))
5599 {
5600 case GIMPLE_ASSIGN:
5601 /* Try to simplify by copy-propagating the definition. */
5602 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5603
5604 case GIMPLE_PHI:
5605 /* If every argument to the PHI produces the same result when
5606 ORed with the second comparison, we win.
5607 Do not do this unless the type is bool since we need a bool
5608 result here anyway. */
5609 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5610 {
5611 tree result = NULL_TREE;
5612 unsigned i;
5613 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5614 {
5615 tree arg = gimple_phi_arg_def (stmt, i);
5616
5617 /* If this PHI has itself as an argument, ignore it.
5618 If all the other args produce the same result,
5619 we're still OK. */
5620 if (arg == gimple_phi_result (stmt))
5621 continue;
5622 else if (TREE_CODE (arg) == INTEGER_CST)
5623 {
5624 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5625 {
5626 if (!result)
5627 result = boolean_true_node;
5628 else if (!integer_onep (result))
5629 return NULL_TREE;
5630 }
5631 else if (!result)
5632 result = fold_build2 (code2, boolean_type_node,
5633 op2a, op2b);
5634 else if (!same_bool_comparison_p (result,
5635 code2, op2a, op2b))
5636 return NULL_TREE;
5637 }
5638 else if (TREE_CODE (arg) == SSA_NAME
5639 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5640 {
5641 tree temp;
5642 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5643 /* In simple cases we can look through PHI nodes,
5644 but we have to be careful with loops.
5645 See PR49073. */
5646 if (! dom_info_available_p (CDI_DOMINATORS)
5647 || gimple_bb (def_stmt) == gimple_bb (stmt)
5648 || dominated_by_p (CDI_DOMINATORS,
5649 gimple_bb (def_stmt),
5650 gimple_bb (stmt)))
5651 return NULL_TREE;
5652 temp = or_var_with_comparison (arg, invert, code2,
5653 op2a, op2b);
5654 if (!temp)
5655 return NULL_TREE;
5656 else if (!result)
5657 result = temp;
5658 else if (!same_bool_result_p (result, temp))
5659 return NULL_TREE;
5660 }
5661 else
5662 return NULL_TREE;
5663 }
5664 return result;
5665 }
5666
5667 default:
5668 break;
5669 }
5670 }
5671 return NULL_TREE;
5672 }
5673
5674 /* Try to simplify the OR of two comparisons, specified by
5675 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5676 If this can be simplified to a single expression (without requiring
5677 introducing more SSA variables to hold intermediate values),
5678 return the resulting tree. Otherwise return NULL_TREE.
5679 If the result expression is non-null, it has boolean type. */
5680
5681 tree
5682 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5683 enum tree_code code2, tree op2a, tree op2b)
5684 {
5685 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5686 if (t)
5687 return t;
5688 else
5689 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5690 }
5691
5692
5693 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5694
5695 Either NULL_TREE, a simplified but non-constant or a constant
5696 is returned.
5697
5698 ??? This should go into a gimple-fold-inline.h file to be eventually
5699 privatized with the single valueize function used in the various TUs
5700 to avoid the indirect function call overhead. */
5701
5702 tree
5703 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5704 tree (*gvalueize) (tree))
5705 {
5706 code_helper rcode;
5707 tree ops[3] = {};
5708 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5709 edges if there are intermediate VARYING defs. For this reason
5710 do not follow SSA edges here even though SCCVN can technically
5711 just deal fine with that. */
5712 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5713 {
5714 tree res = NULL_TREE;
5715 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5716 res = ops[0];
5717 else if (mprts_hook)
5718 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5719 if (res)
5720 {
5721 if (dump_file && dump_flags & TDF_DETAILS)
5722 {
5723 fprintf (dump_file, "Match-and-simplified ");
5724 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5725 fprintf (dump_file, " to ");
5726 print_generic_expr (dump_file, res, 0);
5727 fprintf (dump_file, "\n");
5728 }
5729 return res;
5730 }
5731 }
5732
5733 location_t loc = gimple_location (stmt);
5734 switch (gimple_code (stmt))
5735 {
5736 case GIMPLE_ASSIGN:
5737 {
5738 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5739
5740 switch (get_gimple_rhs_class (subcode))
5741 {
5742 case GIMPLE_SINGLE_RHS:
5743 {
5744 tree rhs = gimple_assign_rhs1 (stmt);
5745 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5746
5747 if (TREE_CODE (rhs) == SSA_NAME)
5748 {
5749 /* If the RHS is an SSA_NAME, return its known constant value,
5750 if any. */
5751 return (*valueize) (rhs);
5752 }
5753 /* Handle propagating invariant addresses into address
5754 operations. */
5755 else if (TREE_CODE (rhs) == ADDR_EXPR
5756 && !is_gimple_min_invariant (rhs))
5757 {
5758 HOST_WIDE_INT offset = 0;
5759 tree base;
5760 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5761 &offset,
5762 valueize);
5763 if (base
5764 && (CONSTANT_CLASS_P (base)
5765 || decl_address_invariant_p (base)))
5766 return build_invariant_address (TREE_TYPE (rhs),
5767 base, offset);
5768 }
5769 else if (TREE_CODE (rhs) == CONSTRUCTOR
5770 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5771 && (CONSTRUCTOR_NELTS (rhs)
5772 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5773 {
5774 unsigned i;
5775 tree val, *vec;
5776
5777 vec = XALLOCAVEC (tree,
5778 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5779 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5780 {
5781 val = (*valueize) (val);
5782 if (TREE_CODE (val) == INTEGER_CST
5783 || TREE_CODE (val) == REAL_CST
5784 || TREE_CODE (val) == FIXED_CST)
5785 vec[i] = val;
5786 else
5787 return NULL_TREE;
5788 }
5789
5790 return build_vector (TREE_TYPE (rhs), vec);
5791 }
5792 if (subcode == OBJ_TYPE_REF)
5793 {
5794 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5795 /* If callee is constant, we can fold away the wrapper. */
5796 if (is_gimple_min_invariant (val))
5797 return val;
5798 }
5799
5800 if (kind == tcc_reference)
5801 {
5802 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5803 || TREE_CODE (rhs) == REALPART_EXPR
5804 || TREE_CODE (rhs) == IMAGPART_EXPR)
5805 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5806 {
5807 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5808 return fold_unary_loc (EXPR_LOCATION (rhs),
5809 TREE_CODE (rhs),
5810 TREE_TYPE (rhs), val);
5811 }
5812 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5813 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5814 {
5815 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5816 return fold_ternary_loc (EXPR_LOCATION (rhs),
5817 TREE_CODE (rhs),
5818 TREE_TYPE (rhs), val,
5819 TREE_OPERAND (rhs, 1),
5820 TREE_OPERAND (rhs, 2));
5821 }
5822 else if (TREE_CODE (rhs) == MEM_REF
5823 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5824 {
5825 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5826 if (TREE_CODE (val) == ADDR_EXPR
5827 && is_gimple_min_invariant (val))
5828 {
5829 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5830 unshare_expr (val),
5831 TREE_OPERAND (rhs, 1));
5832 if (tem)
5833 rhs = tem;
5834 }
5835 }
5836 return fold_const_aggregate_ref_1 (rhs, valueize);
5837 }
5838 else if (kind == tcc_declaration)
5839 return get_symbol_constant_value (rhs);
5840 return rhs;
5841 }
5842
5843 case GIMPLE_UNARY_RHS:
5844 return NULL_TREE;
5845
5846 case GIMPLE_BINARY_RHS:
5847 /* Translate &x + CST into an invariant form suitable for
5848 further propagation. */
5849 if (subcode == POINTER_PLUS_EXPR)
5850 {
5851 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5852 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5853 if (TREE_CODE (op0) == ADDR_EXPR
5854 && TREE_CODE (op1) == INTEGER_CST)
5855 {
5856 tree off = fold_convert (ptr_type_node, op1);
5857 return build_fold_addr_expr_loc
5858 (loc,
5859 fold_build2 (MEM_REF,
5860 TREE_TYPE (TREE_TYPE (op0)),
5861 unshare_expr (op0), off));
5862 }
5863 }
5864 /* Canonicalize bool != 0 and bool == 0 appearing after
5865 valueization. While gimple_simplify handles this
5866 it can get confused by the ~X == 1 -> X == 0 transform
5867 which we cant reduce to a SSA name or a constant
5868 (and we have no way to tell gimple_simplify to not
5869 consider those transforms in the first place). */
5870 else if (subcode == EQ_EXPR
5871 || subcode == NE_EXPR)
5872 {
5873 tree lhs = gimple_assign_lhs (stmt);
5874 tree op0 = gimple_assign_rhs1 (stmt);
5875 if (useless_type_conversion_p (TREE_TYPE (lhs),
5876 TREE_TYPE (op0)))
5877 {
5878 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5879 op0 = (*valueize) (op0);
5880 if (TREE_CODE (op0) == INTEGER_CST)
5881 std::swap (op0, op1);
5882 if (TREE_CODE (op1) == INTEGER_CST
5883 && ((subcode == NE_EXPR && integer_zerop (op1))
5884 || (subcode == EQ_EXPR && integer_onep (op1))))
5885 return op0;
5886 }
5887 }
5888 return NULL_TREE;
5889
5890 case GIMPLE_TERNARY_RHS:
5891 {
5892 /* Handle ternary operators that can appear in GIMPLE form. */
5893 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5894 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5895 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5896 return fold_ternary_loc (loc, subcode,
5897 gimple_expr_type (stmt), op0, op1, op2);
5898 }
5899
5900 default:
5901 gcc_unreachable ();
5902 }
5903 }
5904
5905 case GIMPLE_CALL:
5906 {
5907 tree fn;
5908 gcall *call_stmt = as_a <gcall *> (stmt);
5909
5910 if (gimple_call_internal_p (stmt))
5911 {
5912 enum tree_code subcode = ERROR_MARK;
5913 switch (gimple_call_internal_fn (stmt))
5914 {
5915 case IFN_UBSAN_CHECK_ADD:
5916 subcode = PLUS_EXPR;
5917 break;
5918 case IFN_UBSAN_CHECK_SUB:
5919 subcode = MINUS_EXPR;
5920 break;
5921 case IFN_UBSAN_CHECK_MUL:
5922 subcode = MULT_EXPR;
5923 break;
5924 case IFN_BUILTIN_EXPECT:
5925 {
5926 tree arg0 = gimple_call_arg (stmt, 0);
5927 tree op0 = (*valueize) (arg0);
5928 if (TREE_CODE (op0) == INTEGER_CST)
5929 return op0;
5930 return NULL_TREE;
5931 }
5932 default:
5933 return NULL_TREE;
5934 }
5935 tree arg0 = gimple_call_arg (stmt, 0);
5936 tree arg1 = gimple_call_arg (stmt, 1);
5937 tree op0 = (*valueize) (arg0);
5938 tree op1 = (*valueize) (arg1);
5939
5940 if (TREE_CODE (op0) != INTEGER_CST
5941 || TREE_CODE (op1) != INTEGER_CST)
5942 {
5943 switch (subcode)
5944 {
5945 case MULT_EXPR:
5946 /* x * 0 = 0 * x = 0 without overflow. */
5947 if (integer_zerop (op0) || integer_zerop (op1))
5948 return build_zero_cst (TREE_TYPE (arg0));
5949 break;
5950 case MINUS_EXPR:
5951 /* y - y = 0 without overflow. */
5952 if (operand_equal_p (op0, op1, 0))
5953 return build_zero_cst (TREE_TYPE (arg0));
5954 break;
5955 default:
5956 break;
5957 }
5958 }
5959 tree res
5960 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5961 if (res
5962 && TREE_CODE (res) == INTEGER_CST
5963 && !TREE_OVERFLOW (res))
5964 return res;
5965 return NULL_TREE;
5966 }
5967
5968 fn = (*valueize) (gimple_call_fn (stmt));
5969 if (TREE_CODE (fn) == ADDR_EXPR
5970 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5971 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5972 && gimple_builtin_call_types_compatible_p (stmt,
5973 TREE_OPERAND (fn, 0)))
5974 {
5975 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5976 tree retval;
5977 unsigned i;
5978 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5979 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5980 retval = fold_builtin_call_array (loc,
5981 gimple_call_return_type (call_stmt),
5982 fn, gimple_call_num_args (stmt), args);
5983 if (retval)
5984 {
5985 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5986 STRIP_NOPS (retval);
5987 retval = fold_convert (gimple_call_return_type (call_stmt),
5988 retval);
5989 }
5990 return retval;
5991 }
5992 return NULL_TREE;
5993 }
5994
5995 default:
5996 return NULL_TREE;
5997 }
5998 }
5999
6000 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6001 Returns NULL_TREE if folding to a constant is not possible, otherwise
6002 returns a constant according to is_gimple_min_invariant. */
6003
6004 tree
6005 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6006 {
6007 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6008 if (res && is_gimple_min_invariant (res))
6009 return res;
6010 return NULL_TREE;
6011 }
6012
6013
6014 /* The following set of functions are supposed to fold references using
6015 their constant initializers. */
6016
6017 /* See if we can find constructor defining value of BASE.
6018 When we know the consructor with constant offset (such as
6019 base is array[40] and we do know constructor of array), then
6020 BIT_OFFSET is adjusted accordingly.
6021
6022 As a special case, return error_mark_node when constructor
6023 is not explicitly available, but it is known to be zero
6024 such as 'static const int a;'. */
6025 static tree
6026 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6027 tree (*valueize)(tree))
6028 {
6029 HOST_WIDE_INT bit_offset2, size, max_size;
6030 bool reverse;
6031
6032 if (TREE_CODE (base) == MEM_REF)
6033 {
6034 if (!integer_zerop (TREE_OPERAND (base, 1)))
6035 {
6036 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6037 return NULL_TREE;
6038 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6039 * BITS_PER_UNIT);
6040 }
6041
6042 if (valueize
6043 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6044 base = valueize (TREE_OPERAND (base, 0));
6045 if (!base || TREE_CODE (base) != ADDR_EXPR)
6046 return NULL_TREE;
6047 base = TREE_OPERAND (base, 0);
6048 }
6049 else if (valueize
6050 && TREE_CODE (base) == SSA_NAME)
6051 base = valueize (base);
6052
6053 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6054 DECL_INITIAL. If BASE is a nested reference into another
6055 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6056 the inner reference. */
6057 switch (TREE_CODE (base))
6058 {
6059 case VAR_DECL:
6060 case CONST_DECL:
6061 {
6062 tree init = ctor_for_folding (base);
6063
6064 /* Our semantic is exact opposite of ctor_for_folding;
6065 NULL means unknown, while error_mark_node is 0. */
6066 if (init == error_mark_node)
6067 return NULL_TREE;
6068 if (!init)
6069 return error_mark_node;
6070 return init;
6071 }
6072
6073 case VIEW_CONVERT_EXPR:
6074 return get_base_constructor (TREE_OPERAND (base, 0),
6075 bit_offset, valueize);
6076
6077 case ARRAY_REF:
6078 case COMPONENT_REF:
6079 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6080 &reverse);
6081 if (max_size == -1 || size != max_size)
6082 return NULL_TREE;
6083 *bit_offset += bit_offset2;
6084 return get_base_constructor (base, bit_offset, valueize);
6085
6086 case CONSTRUCTOR:
6087 return base;
6088
6089 default:
6090 if (CONSTANT_CLASS_P (base))
6091 return base;
6092
6093 return NULL_TREE;
6094 }
6095 }
6096
6097 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6098 SIZE to the memory at bit OFFSET. */
6099
6100 static tree
6101 fold_array_ctor_reference (tree type, tree ctor,
6102 unsigned HOST_WIDE_INT offset,
6103 unsigned HOST_WIDE_INT size,
6104 tree from_decl)
6105 {
6106 offset_int low_bound;
6107 offset_int elt_size;
6108 offset_int access_index;
6109 tree domain_type = NULL_TREE;
6110 HOST_WIDE_INT inner_offset;
6111
6112 /* Compute low bound and elt size. */
6113 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6114 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6115 if (domain_type && TYPE_MIN_VALUE (domain_type))
6116 {
6117 /* Static constructors for variably sized objects makes no sense. */
6118 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6119 return NULL_TREE;
6120 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6121 }
6122 else
6123 low_bound = 0;
6124 /* Static constructors for variably sized objects makes no sense. */
6125 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6126 return NULL_TREE;
6127 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6128
6129 /* We can handle only constantly sized accesses that are known to not
6130 be larger than size of array element. */
6131 if (!TYPE_SIZE_UNIT (type)
6132 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6133 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6134 || elt_size == 0)
6135 return NULL_TREE;
6136
6137 /* Compute the array index we look for. */
6138 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6139 elt_size);
6140 access_index += low_bound;
6141
6142 /* And offset within the access. */
6143 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6144
6145 /* See if the array field is large enough to span whole access. We do not
6146 care to fold accesses spanning multiple array indexes. */
6147 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6148 return NULL_TREE;
6149 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6150 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6151
6152 /* When memory is not explicitely mentioned in constructor,
6153 it is 0 (or out of range). */
6154 return build_zero_cst (type);
6155 }
6156
6157 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6158 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6159
6160 static tree
6161 fold_nonarray_ctor_reference (tree type, tree ctor,
6162 unsigned HOST_WIDE_INT offset,
6163 unsigned HOST_WIDE_INT size,
6164 tree from_decl)
6165 {
6166 unsigned HOST_WIDE_INT cnt;
6167 tree cfield, cval;
6168
6169 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6170 cval)
6171 {
6172 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6173 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6174 tree field_size = DECL_SIZE (cfield);
6175 offset_int bitoffset;
6176 offset_int bitoffset_end, access_end;
6177
6178 /* Variable sized objects in static constructors makes no sense,
6179 but field_size can be NULL for flexible array members. */
6180 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6181 && TREE_CODE (byte_offset) == INTEGER_CST
6182 && (field_size != NULL_TREE
6183 ? TREE_CODE (field_size) == INTEGER_CST
6184 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6185
6186 /* Compute bit offset of the field. */
6187 bitoffset = (wi::to_offset (field_offset)
6188 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6189 /* Compute bit offset where the field ends. */
6190 if (field_size != NULL_TREE)
6191 bitoffset_end = bitoffset + wi::to_offset (field_size);
6192 else
6193 bitoffset_end = 0;
6194
6195 access_end = offset_int (offset) + size;
6196
6197 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6198 [BITOFFSET, BITOFFSET_END)? */
6199 if (wi::cmps (access_end, bitoffset) > 0
6200 && (field_size == NULL_TREE
6201 || wi::lts_p (offset, bitoffset_end)))
6202 {
6203 offset_int inner_offset = offset_int (offset) - bitoffset;
6204 /* We do have overlap. Now see if field is large enough to
6205 cover the access. Give up for accesses spanning multiple
6206 fields. */
6207 if (wi::cmps (access_end, bitoffset_end) > 0)
6208 return NULL_TREE;
6209 if (offset < bitoffset)
6210 return NULL_TREE;
6211 return fold_ctor_reference (type, cval,
6212 inner_offset.to_uhwi (), size,
6213 from_decl);
6214 }
6215 }
6216 /* When memory is not explicitely mentioned in constructor, it is 0. */
6217 return build_zero_cst (type);
6218 }
6219
6220 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6221 to the memory at bit OFFSET. */
6222
6223 tree
6224 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6225 unsigned HOST_WIDE_INT size, tree from_decl)
6226 {
6227 tree ret;
6228
6229 /* We found the field with exact match. */
6230 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6231 && !offset)
6232 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6233
6234 /* We are at the end of walk, see if we can view convert the
6235 result. */
6236 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6237 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6238 && !compare_tree_int (TYPE_SIZE (type), size)
6239 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6240 {
6241 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6242 if (ret)
6243 {
6244 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6245 if (ret)
6246 STRIP_USELESS_TYPE_CONVERSION (ret);
6247 }
6248 return ret;
6249 }
6250 /* For constants and byte-aligned/sized reads try to go through
6251 native_encode/interpret. */
6252 if (CONSTANT_CLASS_P (ctor)
6253 && BITS_PER_UNIT == 8
6254 && offset % BITS_PER_UNIT == 0
6255 && size % BITS_PER_UNIT == 0
6256 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6257 {
6258 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6259 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6260 offset / BITS_PER_UNIT);
6261 if (len > 0)
6262 return native_interpret_expr (type, buf, len);
6263 }
6264 if (TREE_CODE (ctor) == CONSTRUCTOR)
6265 {
6266
6267 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6268 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6269 return fold_array_ctor_reference (type, ctor, offset, size,
6270 from_decl);
6271 else
6272 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6273 from_decl);
6274 }
6275
6276 return NULL_TREE;
6277 }
6278
6279 /* Return the tree representing the element referenced by T if T is an
6280 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6281 names using VALUEIZE. Return NULL_TREE otherwise. */
6282
6283 tree
6284 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6285 {
6286 tree ctor, idx, base;
6287 HOST_WIDE_INT offset, size, max_size;
6288 tree tem;
6289 bool reverse;
6290
6291 if (TREE_THIS_VOLATILE (t))
6292 return NULL_TREE;
6293
6294 if (DECL_P (t))
6295 return get_symbol_constant_value (t);
6296
6297 tem = fold_read_from_constant_string (t);
6298 if (tem)
6299 return tem;
6300
6301 switch (TREE_CODE (t))
6302 {
6303 case ARRAY_REF:
6304 case ARRAY_RANGE_REF:
6305 /* Constant indexes are handled well by get_base_constructor.
6306 Only special case variable offsets.
6307 FIXME: This code can't handle nested references with variable indexes
6308 (they will be handled only by iteration of ccp). Perhaps we can bring
6309 get_ref_base_and_extent here and make it use a valueize callback. */
6310 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6311 && valueize
6312 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6313 && TREE_CODE (idx) == INTEGER_CST)
6314 {
6315 tree low_bound, unit_size;
6316
6317 /* If the resulting bit-offset is constant, track it. */
6318 if ((low_bound = array_ref_low_bound (t),
6319 TREE_CODE (low_bound) == INTEGER_CST)
6320 && (unit_size = array_ref_element_size (t),
6321 tree_fits_uhwi_p (unit_size)))
6322 {
6323 offset_int woffset
6324 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6325 TYPE_PRECISION (TREE_TYPE (idx)));
6326
6327 if (wi::fits_shwi_p (woffset))
6328 {
6329 offset = woffset.to_shwi ();
6330 /* TODO: This code seems wrong, multiply then check
6331 to see if it fits. */
6332 offset *= tree_to_uhwi (unit_size);
6333 offset *= BITS_PER_UNIT;
6334
6335 base = TREE_OPERAND (t, 0);
6336 ctor = get_base_constructor (base, &offset, valueize);
6337 /* Empty constructor. Always fold to 0. */
6338 if (ctor == error_mark_node)
6339 return build_zero_cst (TREE_TYPE (t));
6340 /* Out of bound array access. Value is undefined,
6341 but don't fold. */
6342 if (offset < 0)
6343 return NULL_TREE;
6344 /* We can not determine ctor. */
6345 if (!ctor)
6346 return NULL_TREE;
6347 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6348 tree_to_uhwi (unit_size)
6349 * BITS_PER_UNIT,
6350 base);
6351 }
6352 }
6353 }
6354 /* Fallthru. */
6355
6356 case COMPONENT_REF:
6357 case BIT_FIELD_REF:
6358 case TARGET_MEM_REF:
6359 case MEM_REF:
6360 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6361 ctor = get_base_constructor (base, &offset, valueize);
6362
6363 /* Empty constructor. Always fold to 0. */
6364 if (ctor == error_mark_node)
6365 return build_zero_cst (TREE_TYPE (t));
6366 /* We do not know precise address. */
6367 if (max_size == -1 || max_size != size)
6368 return NULL_TREE;
6369 /* We can not determine ctor. */
6370 if (!ctor)
6371 return NULL_TREE;
6372
6373 /* Out of bound array access. Value is undefined, but don't fold. */
6374 if (offset < 0)
6375 return NULL_TREE;
6376
6377 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6378 base);
6379
6380 case REALPART_EXPR:
6381 case IMAGPART_EXPR:
6382 {
6383 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6384 if (c && TREE_CODE (c) == COMPLEX_CST)
6385 return fold_build1_loc (EXPR_LOCATION (t),
6386 TREE_CODE (t), TREE_TYPE (t), c);
6387 break;
6388 }
6389
6390 default:
6391 break;
6392 }
6393
6394 return NULL_TREE;
6395 }
6396
6397 tree
6398 fold_const_aggregate_ref (tree t)
6399 {
6400 return fold_const_aggregate_ref_1 (t, NULL);
6401 }
6402
6403 /* Lookup virtual method with index TOKEN in a virtual table V
6404 at OFFSET.
6405 Set CAN_REFER if non-NULL to false if method
6406 is not referable or if the virtual table is ill-formed (such as rewriten
6407 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6408
6409 tree
6410 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6411 tree v,
6412 unsigned HOST_WIDE_INT offset,
6413 bool *can_refer)
6414 {
6415 tree vtable = v, init, fn;
6416 unsigned HOST_WIDE_INT size;
6417 unsigned HOST_WIDE_INT elt_size, access_index;
6418 tree domain_type;
6419
6420 if (can_refer)
6421 *can_refer = true;
6422
6423 /* First of all double check we have virtual table. */
6424 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6425 {
6426 /* Pass down that we lost track of the target. */
6427 if (can_refer)
6428 *can_refer = false;
6429 return NULL_TREE;
6430 }
6431
6432 init = ctor_for_folding (v);
6433
6434 /* The virtual tables should always be born with constructors
6435 and we always should assume that they are avaialble for
6436 folding. At the moment we do not stream them in all cases,
6437 but it should never happen that ctor seem unreachable. */
6438 gcc_assert (init);
6439 if (init == error_mark_node)
6440 {
6441 gcc_assert (in_lto_p);
6442 /* Pass down that we lost track of the target. */
6443 if (can_refer)
6444 *can_refer = false;
6445 return NULL_TREE;
6446 }
6447 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6448 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6449 offset *= BITS_PER_UNIT;
6450 offset += token * size;
6451
6452 /* Lookup the value in the constructor that is assumed to be array.
6453 This is equivalent to
6454 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6455 offset, size, NULL);
6456 but in a constant time. We expect that frontend produced a simple
6457 array without indexed initializers. */
6458
6459 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6460 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6461 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6462 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6463
6464 access_index = offset / BITS_PER_UNIT / elt_size;
6465 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6466
6467 /* This code makes an assumption that there are no
6468 indexed fileds produced by C++ FE, so we can directly index the array. */
6469 if (access_index < CONSTRUCTOR_NELTS (init))
6470 {
6471 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6472 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6473 STRIP_NOPS (fn);
6474 }
6475 else
6476 fn = NULL;
6477
6478 /* For type inconsistent program we may end up looking up virtual method
6479 in virtual table that does not contain TOKEN entries. We may overrun
6480 the virtual table and pick up a constant or RTTI info pointer.
6481 In any case the call is undefined. */
6482 if (!fn
6483 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6484 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6485 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6486 else
6487 {
6488 fn = TREE_OPERAND (fn, 0);
6489
6490 /* When cgraph node is missing and function is not public, we cannot
6491 devirtualize. This can happen in WHOPR when the actual method
6492 ends up in other partition, because we found devirtualization
6493 possibility too late. */
6494 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6495 {
6496 if (can_refer)
6497 {
6498 *can_refer = false;
6499 return fn;
6500 }
6501 return NULL_TREE;
6502 }
6503 }
6504
6505 /* Make sure we create a cgraph node for functions we'll reference.
6506 They can be non-existent if the reference comes from an entry
6507 of an external vtable for example. */
6508 cgraph_node::get_create (fn);
6509
6510 return fn;
6511 }
6512
6513 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6514 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6515 KNOWN_BINFO carries the binfo describing the true type of
6516 OBJ_TYPE_REF_OBJECT(REF).
6517 Set CAN_REFER if non-NULL to false if method
6518 is not referable or if the virtual table is ill-formed (such as rewriten
6519 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6520
6521 tree
6522 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6523 bool *can_refer)
6524 {
6525 unsigned HOST_WIDE_INT offset;
6526 tree v;
6527
6528 v = BINFO_VTABLE (known_binfo);
6529 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6530 if (!v)
6531 return NULL_TREE;
6532
6533 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6534 {
6535 if (can_refer)
6536 *can_refer = false;
6537 return NULL_TREE;
6538 }
6539 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6540 }
6541
6542 /* Given a pointer value OP0, return a simplified version of an
6543 indirection through OP0, or NULL_TREE if no simplification is
6544 possible. Note that the resulting type may be different from
6545 the type pointed to in the sense that it is still compatible
6546 from the langhooks point of view. */
6547
6548 tree
6549 gimple_fold_indirect_ref (tree t)
6550 {
6551 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6552 tree sub = t;
6553 tree subtype;
6554
6555 STRIP_NOPS (sub);
6556 subtype = TREE_TYPE (sub);
6557 if (!POINTER_TYPE_P (subtype))
6558 return NULL_TREE;
6559
6560 if (TREE_CODE (sub) == ADDR_EXPR)
6561 {
6562 tree op = TREE_OPERAND (sub, 0);
6563 tree optype = TREE_TYPE (op);
6564 /* *&p => p */
6565 if (useless_type_conversion_p (type, optype))
6566 return op;
6567
6568 /* *(foo *)&fooarray => fooarray[0] */
6569 if (TREE_CODE (optype) == ARRAY_TYPE
6570 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6571 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6572 {
6573 tree type_domain = TYPE_DOMAIN (optype);
6574 tree min_val = size_zero_node;
6575 if (type_domain && TYPE_MIN_VALUE (type_domain))
6576 min_val = TYPE_MIN_VALUE (type_domain);
6577 if (TREE_CODE (min_val) == INTEGER_CST)
6578 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6579 }
6580 /* *(foo *)&complexfoo => __real__ complexfoo */
6581 else if (TREE_CODE (optype) == COMPLEX_TYPE
6582 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6583 return fold_build1 (REALPART_EXPR, type, op);
6584 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6585 else if (TREE_CODE (optype) == VECTOR_TYPE
6586 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6587 {
6588 tree part_width = TYPE_SIZE (type);
6589 tree index = bitsize_int (0);
6590 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6591 }
6592 }
6593
6594 /* *(p + CST) -> ... */
6595 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6596 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6597 {
6598 tree addr = TREE_OPERAND (sub, 0);
6599 tree off = TREE_OPERAND (sub, 1);
6600 tree addrtype;
6601
6602 STRIP_NOPS (addr);
6603 addrtype = TREE_TYPE (addr);
6604
6605 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6606 if (TREE_CODE (addr) == ADDR_EXPR
6607 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6608 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6609 && tree_fits_uhwi_p (off))
6610 {
6611 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6612 tree part_width = TYPE_SIZE (type);
6613 unsigned HOST_WIDE_INT part_widthi
6614 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6615 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6616 tree index = bitsize_int (indexi);
6617 if (offset / part_widthi
6618 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6619 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6620 part_width, index);
6621 }
6622
6623 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6624 if (TREE_CODE (addr) == ADDR_EXPR
6625 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6626 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6627 {
6628 tree size = TYPE_SIZE_UNIT (type);
6629 if (tree_int_cst_equal (size, off))
6630 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6631 }
6632
6633 /* *(p + CST) -> MEM_REF <p, CST>. */
6634 if (TREE_CODE (addr) != ADDR_EXPR
6635 || DECL_P (TREE_OPERAND (addr, 0)))
6636 return fold_build2 (MEM_REF, type,
6637 addr,
6638 wide_int_to_tree (ptype, off));
6639 }
6640
6641 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6642 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6643 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6644 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6645 {
6646 tree type_domain;
6647 tree min_val = size_zero_node;
6648 tree osub = sub;
6649 sub = gimple_fold_indirect_ref (sub);
6650 if (! sub)
6651 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6652 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6653 if (type_domain && TYPE_MIN_VALUE (type_domain))
6654 min_val = TYPE_MIN_VALUE (type_domain);
6655 if (TREE_CODE (min_val) == INTEGER_CST)
6656 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6657 }
6658
6659 return NULL_TREE;
6660 }
6661
6662 /* Return true if CODE is an operation that when operating on signed
6663 integer types involves undefined behavior on overflow and the
6664 operation can be expressed with unsigned arithmetic. */
6665
6666 bool
6667 arith_code_with_undefined_signed_overflow (tree_code code)
6668 {
6669 switch (code)
6670 {
6671 case PLUS_EXPR:
6672 case MINUS_EXPR:
6673 case MULT_EXPR:
6674 case NEGATE_EXPR:
6675 case POINTER_PLUS_EXPR:
6676 return true;
6677 default:
6678 return false;
6679 }
6680 }
6681
6682 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6683 operation that can be transformed to unsigned arithmetic by converting
6684 its operand, carrying out the operation in the corresponding unsigned
6685 type and converting the result back to the original type.
6686
6687 Returns a sequence of statements that replace STMT and also contain
6688 a modified form of STMT itself. */
6689
6690 gimple_seq
6691 rewrite_to_defined_overflow (gimple *stmt)
6692 {
6693 if (dump_file && (dump_flags & TDF_DETAILS))
6694 {
6695 fprintf (dump_file, "rewriting stmt with undefined signed "
6696 "overflow ");
6697 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6698 }
6699
6700 tree lhs = gimple_assign_lhs (stmt);
6701 tree type = unsigned_type_for (TREE_TYPE (lhs));
6702 gimple_seq stmts = NULL;
6703 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6704 {
6705 tree op = gimple_op (stmt, i);
6706 op = gimple_convert (&stmts, type, op);
6707 gimple_set_op (stmt, i, op);
6708 }
6709 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6710 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6711 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6712 gimple_seq_add_stmt (&stmts, stmt);
6713 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6714 gimple_seq_add_stmt (&stmts, cvt);
6715
6716 return stmts;
6717 }
6718
6719
6720 /* The valueization hook we use for the gimple_build API simplification.
6721 This makes us match fold_buildN behavior by only combining with
6722 statements in the sequence(s) we are currently building. */
6723
6724 static tree
6725 gimple_build_valueize (tree op)
6726 {
6727 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6728 return op;
6729 return NULL_TREE;
6730 }
6731
6732 /* Build the expression CODE OP0 of type TYPE with location LOC,
6733 simplifying it first if possible. Returns the built
6734 expression value and appends statements possibly defining it
6735 to SEQ. */
6736
6737 tree
6738 gimple_build (gimple_seq *seq, location_t loc,
6739 enum tree_code code, tree type, tree op0)
6740 {
6741 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6742 if (!res)
6743 {
6744 res = create_tmp_reg_or_ssa_name (type);
6745 gimple *stmt;
6746 if (code == REALPART_EXPR
6747 || code == IMAGPART_EXPR
6748 || code == VIEW_CONVERT_EXPR)
6749 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6750 else
6751 stmt = gimple_build_assign (res, code, op0);
6752 gimple_set_location (stmt, loc);
6753 gimple_seq_add_stmt_without_update (seq, stmt);
6754 }
6755 return res;
6756 }
6757
6758 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6759 simplifying it first if possible. Returns the built
6760 expression value and appends statements possibly defining it
6761 to SEQ. */
6762
6763 tree
6764 gimple_build (gimple_seq *seq, location_t loc,
6765 enum tree_code code, tree type, tree op0, tree op1)
6766 {
6767 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6768 if (!res)
6769 {
6770 res = create_tmp_reg_or_ssa_name (type);
6771 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6772 gimple_set_location (stmt, loc);
6773 gimple_seq_add_stmt_without_update (seq, stmt);
6774 }
6775 return res;
6776 }
6777
6778 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6779 simplifying it first if possible. Returns the built
6780 expression value and appends statements possibly defining it
6781 to SEQ. */
6782
6783 tree
6784 gimple_build (gimple_seq *seq, location_t loc,
6785 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6786 {
6787 tree res = gimple_simplify (code, type, op0, op1, op2,
6788 seq, gimple_build_valueize);
6789 if (!res)
6790 {
6791 res = create_tmp_reg_or_ssa_name (type);
6792 gimple *stmt;
6793 if (code == BIT_FIELD_REF)
6794 stmt = gimple_build_assign (res, code,
6795 build3 (code, type, op0, op1, op2));
6796 else
6797 stmt = gimple_build_assign (res, code, op0, op1, op2);
6798 gimple_set_location (stmt, loc);
6799 gimple_seq_add_stmt_without_update (seq, stmt);
6800 }
6801 return res;
6802 }
6803
6804 /* Build the call FN (ARG0) with a result of type TYPE
6805 (or no result if TYPE is void) with location LOC,
6806 simplifying it first if possible. Returns the built
6807 expression value (or NULL_TREE if TYPE is void) and appends
6808 statements possibly defining it to SEQ. */
6809
6810 tree
6811 gimple_build (gimple_seq *seq, location_t loc,
6812 enum built_in_function fn, tree type, tree arg0)
6813 {
6814 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6815 if (!res)
6816 {
6817 tree decl = builtin_decl_implicit (fn);
6818 gimple *stmt = gimple_build_call (decl, 1, arg0);
6819 if (!VOID_TYPE_P (type))
6820 {
6821 res = create_tmp_reg_or_ssa_name (type);
6822 gimple_call_set_lhs (stmt, res);
6823 }
6824 gimple_set_location (stmt, loc);
6825 gimple_seq_add_stmt_without_update (seq, stmt);
6826 }
6827 return res;
6828 }
6829
6830 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6831 (or no result if TYPE is void) with location LOC,
6832 simplifying it first if possible. Returns the built
6833 expression value (or NULL_TREE if TYPE is void) and appends
6834 statements possibly defining it to SEQ. */
6835
6836 tree
6837 gimple_build (gimple_seq *seq, location_t loc,
6838 enum built_in_function fn, tree type, tree arg0, tree arg1)
6839 {
6840 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6841 if (!res)
6842 {
6843 tree decl = builtin_decl_implicit (fn);
6844 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6845 if (!VOID_TYPE_P (type))
6846 {
6847 res = create_tmp_reg_or_ssa_name (type);
6848 gimple_call_set_lhs (stmt, res);
6849 }
6850 gimple_set_location (stmt, loc);
6851 gimple_seq_add_stmt_without_update (seq, stmt);
6852 }
6853 return res;
6854 }
6855
6856 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6857 (or no result if TYPE is void) with location LOC,
6858 simplifying it first if possible. Returns the built
6859 expression value (or NULL_TREE if TYPE is void) and appends
6860 statements possibly defining it to SEQ. */
6861
6862 tree
6863 gimple_build (gimple_seq *seq, location_t loc,
6864 enum built_in_function fn, tree type,
6865 tree arg0, tree arg1, tree arg2)
6866 {
6867 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6868 seq, gimple_build_valueize);
6869 if (!res)
6870 {
6871 tree decl = builtin_decl_implicit (fn);
6872 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6873 if (!VOID_TYPE_P (type))
6874 {
6875 res = create_tmp_reg_or_ssa_name (type);
6876 gimple_call_set_lhs (stmt, res);
6877 }
6878 gimple_set_location (stmt, loc);
6879 gimple_seq_add_stmt_without_update (seq, stmt);
6880 }
6881 return res;
6882 }
6883
6884 /* Build the conversion (TYPE) OP with a result of type TYPE
6885 with location LOC if such conversion is neccesary in GIMPLE,
6886 simplifying it first.
6887 Returns the built expression value and appends
6888 statements possibly defining it to SEQ. */
6889
6890 tree
6891 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6892 {
6893 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6894 return op;
6895 return gimple_build (seq, loc, NOP_EXPR, type, op);
6896 }
6897
6898 /* Build the conversion (ptrofftype) OP with a result of a type
6899 compatible with ptrofftype with location LOC if such conversion
6900 is neccesary in GIMPLE, simplifying it first.
6901 Returns the built expression value and appends
6902 statements possibly defining it to SEQ. */
6903
6904 tree
6905 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6906 {
6907 if (ptrofftype_p (TREE_TYPE (op)))
6908 return op;
6909 return gimple_convert (seq, loc, sizetype, op);
6910 }
6911
6912 /* Return true if the result of assignment STMT is known to be non-negative.
6913 If the return value is based on the assumption that signed overflow is
6914 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6916
6917 static bool
6918 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6919 int depth)
6920 {
6921 enum tree_code code = gimple_assign_rhs_code (stmt);
6922 switch (get_gimple_rhs_class (code))
6923 {
6924 case GIMPLE_UNARY_RHS:
6925 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6926 gimple_expr_type (stmt),
6927 gimple_assign_rhs1 (stmt),
6928 strict_overflow_p, depth);
6929 case GIMPLE_BINARY_RHS:
6930 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6931 gimple_expr_type (stmt),
6932 gimple_assign_rhs1 (stmt),
6933 gimple_assign_rhs2 (stmt),
6934 strict_overflow_p, depth);
6935 case GIMPLE_TERNARY_RHS:
6936 return false;
6937 case GIMPLE_SINGLE_RHS:
6938 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6939 strict_overflow_p, depth);
6940 case GIMPLE_INVALID_RHS:
6941 break;
6942 }
6943 gcc_unreachable ();
6944 }
6945
6946 /* Return true if return value of call STMT is known to be non-negative.
6947 If the return value is based on the assumption that signed overflow is
6948 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6949 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6950
6951 static bool
6952 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6953 int depth)
6954 {
6955 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6956 gimple_call_arg (stmt, 0) : NULL_TREE;
6957 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6958 gimple_call_arg (stmt, 1) : NULL_TREE;
6959
6960 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6961 gimple_call_combined_fn (stmt),
6962 arg0,
6963 arg1,
6964 strict_overflow_p, depth);
6965 }
6966
6967 /* Return true if return value of call STMT is known to be non-negative.
6968 If the return value is based on the assumption that signed overflow is
6969 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6970 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6971
6972 static bool
6973 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6974 int depth)
6975 {
6976 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6977 {
6978 tree arg = gimple_phi_arg_def (stmt, i);
6979 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6980 return false;
6981 }
6982 return true;
6983 }
6984
6985 /* Return true if STMT is known to compute a non-negative value.
6986 If the return value is based on the assumption that signed overflow is
6987 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6988 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6989
6990 bool
6991 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6992 int depth)
6993 {
6994 switch (gimple_code (stmt))
6995 {
6996 case GIMPLE_ASSIGN:
6997 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6998 depth);
6999 case GIMPLE_CALL:
7000 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7001 depth);
7002 case GIMPLE_PHI:
7003 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7004 depth);
7005 default:
7006 return false;
7007 }
7008 }
7009
7010 /* Return true if the floating-point value computed by assignment STMT
7011 is known to have an integer value. We also allow +Inf, -Inf and NaN
7012 to be considered integer values. Return false for signaling NaN.
7013
7014 DEPTH is the current nesting depth of the query. */
7015
7016 static bool
7017 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7018 {
7019 enum tree_code code = gimple_assign_rhs_code (stmt);
7020 switch (get_gimple_rhs_class (code))
7021 {
7022 case GIMPLE_UNARY_RHS:
7023 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7024 gimple_assign_rhs1 (stmt), depth);
7025 case GIMPLE_BINARY_RHS:
7026 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7027 gimple_assign_rhs1 (stmt),
7028 gimple_assign_rhs2 (stmt), depth);
7029 case GIMPLE_TERNARY_RHS:
7030 return false;
7031 case GIMPLE_SINGLE_RHS:
7032 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7033 case GIMPLE_INVALID_RHS:
7034 break;
7035 }
7036 gcc_unreachable ();
7037 }
7038
7039 /* Return true if the floating-point value computed by call STMT is known
7040 to have an integer value. We also allow +Inf, -Inf and NaN to be
7041 considered integer values. Return false for signaling NaN.
7042
7043 DEPTH is the current nesting depth of the query. */
7044
7045 static bool
7046 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7047 {
7048 tree arg0 = (gimple_call_num_args (stmt) > 0
7049 ? gimple_call_arg (stmt, 0)
7050 : NULL_TREE);
7051 tree arg1 = (gimple_call_num_args (stmt) > 1
7052 ? gimple_call_arg (stmt, 1)
7053 : NULL_TREE);
7054 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7055 arg0, arg1, depth);
7056 }
7057
7058 /* Return true if the floating-point result of phi STMT is known to have
7059 an integer value. We also allow +Inf, -Inf and NaN to be considered
7060 integer values. Return false for signaling NaN.
7061
7062 DEPTH is the current nesting depth of the query. */
7063
7064 static bool
7065 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7066 {
7067 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7068 {
7069 tree arg = gimple_phi_arg_def (stmt, i);
7070 if (!integer_valued_real_single_p (arg, depth + 1))
7071 return false;
7072 }
7073 return true;
7074 }
7075
7076 /* Return true if the floating-point value computed by STMT is known
7077 to have an integer value. We also allow +Inf, -Inf and NaN to be
7078 considered integer values. Return false for signaling NaN.
7079
7080 DEPTH is the current nesting depth of the query. */
7081
7082 bool
7083 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7084 {
7085 switch (gimple_code (stmt))
7086 {
7087 case GIMPLE_ASSIGN:
7088 return gimple_assign_integer_valued_real_p (stmt, depth);
7089 case GIMPLE_CALL:
7090 return gimple_call_integer_valued_real_p (stmt, depth);
7091 case GIMPLE_PHI:
7092 return gimple_phi_integer_valued_real_p (stmt, depth);
7093 default:
7094 return false;
7095 }
7096 }