re PR middle-end/71716 (gcc.dg/atomic/c11-atomic-exec-4.c is miscompiled with -march...
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58
59
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
64
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
81
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 {
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
88
89 if (DECL_ABSTRACT_P (decl))
90 return false;
91
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
95 return true;
96
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 {
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
109 }
110
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || TREE_CODE (from_decl) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
139
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
149
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
158 }
159
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163
164 tree
165 canonicalize_constructor_val (tree cval, tree from_decl)
166 {
167 tree orig_cval = cval;
168 STRIP_NOPS (cval);
169 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 {
172 tree ptr = TREE_OPERAND (cval, 0);
173 if (is_gimple_min_invariant (ptr))
174 cval = build1_loc (EXPR_LOCATION (cval),
175 ADDR_EXPR, TREE_TYPE (ptr),
176 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 ptr,
178 fold_convert (ptr_type_node,
179 TREE_OPERAND (cval, 1))));
180 }
181 if (TREE_CODE (cval) == ADDR_EXPR)
182 {
183 tree base = NULL_TREE;
184 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 {
186 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 if (base)
188 TREE_OPERAND (cval, 0) = base;
189 }
190 else
191 base = get_base_address (TREE_OPERAND (cval, 0));
192 if (!base)
193 return NULL_TREE;
194
195 if ((TREE_CODE (base) == VAR_DECL
196 || TREE_CODE (base) == FUNCTION_DECL)
197 && !can_refer_decl_in_current_unit_p (base, from_decl))
198 return NULL_TREE;
199 if (TREE_TYPE (base) == error_mark_node)
200 return NULL_TREE;
201 if (TREE_CODE (base) == VAR_DECL)
202 TREE_ADDRESSABLE (base) = 1;
203 else if (TREE_CODE (base) == FUNCTION_DECL)
204 {
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base);
209 }
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
212 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
213
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
215 cval = fold_convert (TREE_TYPE (orig_cval), cval);
216 return cval;
217 }
218 if (TREE_OVERFLOW_P (cval))
219 return drop_tree_overflow (cval);
220 return orig_cval;
221 }
222
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
225
226 tree
227 get_symbol_constant_value (tree sym)
228 {
229 tree val = ctor_for_folding (sym);
230 if (val != error_mark_node)
231 {
232 if (val)
233 {
234 val = canonicalize_constructor_val (unshare_expr (val), sym);
235 if (val && is_gimple_min_invariant (val))
236 return val;
237 else
238 return NULL_TREE;
239 }
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
243 if (!val
244 && is_gimple_reg_type (TREE_TYPE (sym)))
245 return build_zero_cst (TREE_TYPE (sym));
246 }
247
248 return NULL_TREE;
249 }
250
251
252
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
257
258 static tree
259 maybe_fold_reference (tree expr, bool is_lhs)
260 {
261 tree result;
262
263 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr) == REALPART_EXPR
265 || TREE_CODE (expr) == IMAGPART_EXPR)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr),
268 TREE_CODE (expr),
269 TREE_TYPE (expr),
270 TREE_OPERAND (expr, 0));
271 else if (TREE_CODE (expr) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0),
277 TREE_OPERAND (expr, 1),
278 TREE_OPERAND (expr, 2));
279
280 if (!is_lhs
281 && (result = fold_const_aggregate_ref (expr))
282 && is_gimple_min_invariant (result))
283 return result;
284
285 return NULL_TREE;
286 }
287
288
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
292 folded. */
293
294 static tree
295 fold_gimple_assign (gimple_stmt_iterator *si)
296 {
297 gimple *stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
300
301 tree result = NULL_TREE;
302
303 switch (get_gimple_rhs_class (subcode))
304 {
305 case GIMPLE_SINGLE_RHS:
306 {
307 tree rhs = gimple_assign_rhs1 (stmt);
308
309 if (TREE_CLOBBER_P (rhs))
310 return NULL_TREE;
311
312 if (REFERENCE_CLASS_P (rhs))
313 return maybe_fold_reference (rhs, false);
314
315 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 {
317 tree val = OBJ_TYPE_REF_EXPR (rhs);
318 if (is_gimple_min_invariant (val))
319 return val;
320 else if (flag_devirtualize && virtual_method_call_p (rhs))
321 {
322 bool final;
323 vec <cgraph_node *>targets
324 = possible_polymorphic_call_targets (rhs, stmt, &final);
325 if (final && targets.length () <= 1 && dbg_cnt (devirt))
326 {
327 if (dump_enabled_p ())
328 {
329 location_t loc = gimple_location_safe (stmt);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets.length () == 1
334 ? targets[0]->name ()
335 : "NULL");
336 }
337 if (targets.length () == 1)
338 {
339 val = fold_convert (TREE_TYPE (val),
340 build_fold_addr_expr_loc
341 (loc, targets[0]->decl));
342 STRIP_USELESS_TYPE_CONVERSION (val);
343 }
344 else
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val = build_int_cst (TREE_TYPE (val), 0);
348 return val;
349 }
350 }
351 }
352
353 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 {
355 tree ref = TREE_OPERAND (rhs, 0);
356 tree tem = maybe_fold_reference (ref, true);
357 if (tem
358 && TREE_CODE (tem) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem, 1)))
360 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 else if (tem)
362 result = fold_convert (TREE_TYPE (rhs),
363 build_fold_addr_expr_loc (loc, tem));
364 else if (TREE_CODE (ref) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
367
368 if (result)
369 {
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result);
375
376 if (result != rhs && valid_gimple_rhs_p (result))
377 return result;
378 }
379 }
380
381 else if (TREE_CODE (rhs) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
383 {
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 unsigned i;
386 tree val;
387
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
389 if (! CONSTANT_CLASS_P (val))
390 return NULL_TREE;
391
392 return build_vector_from_ctor (TREE_TYPE (rhs),
393 CONSTRUCTOR_ELTS (rhs));
394 }
395
396 else if (DECL_P (rhs))
397 return get_symbol_constant_value (rhs);
398 }
399 break;
400
401 case GIMPLE_UNARY_RHS:
402 break;
403
404 case GIMPLE_BINARY_RHS:
405 break;
406
407 case GIMPLE_TERNARY_RHS:
408 result = fold_ternary_loc (loc, subcode,
409 TREE_TYPE (gimple_assign_lhs (stmt)),
410 gimple_assign_rhs1 (stmt),
411 gimple_assign_rhs2 (stmt),
412 gimple_assign_rhs3 (stmt));
413
414 if (result)
415 {
416 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (valid_gimple_rhs_p (result))
418 return result;
419 }
420 break;
421
422 case GIMPLE_INVALID_RHS:
423 gcc_unreachable ();
424 }
425
426 return NULL_TREE;
427 }
428
429
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
434
435 static void
436 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437 {
438 gimple *stmt = gsi_stmt (*si_p);
439
440 if (gimple_has_location (stmt))
441 annotate_all_with_location (stmts, gimple_location (stmt));
442
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple *laststore = NULL;
446 for (gimple_stmt_iterator i = gsi_last (stmts);
447 !gsi_end_p (i); gsi_prev (&i))
448 {
449 gimple *new_stmt = gsi_stmt (i);
450 if ((gimple_assign_single_p (new_stmt)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
452 || (is_gimple_call (new_stmt)
453 && (gimple_call_flags (new_stmt)
454 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
455 {
456 tree vdef;
457 if (!laststore)
458 vdef = gimple_vdef (stmt);
459 else
460 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
461 gimple_set_vdef (new_stmt, vdef);
462 if (vdef && TREE_CODE (vdef) == SSA_NAME)
463 SSA_NAME_DEF_STMT (vdef) = new_stmt;
464 laststore = new_stmt;
465 }
466 }
467
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse = gimple_vuse (stmt);
471 for (gimple_stmt_iterator i = gsi_start (stmts);
472 !gsi_end_p (i); gsi_next (&i))
473 {
474 gimple *new_stmt = gsi_stmt (i);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt))
478 gimple_set_vuse (new_stmt, reaching_vuse);
479 gimple_set_modified (new_stmt, true);
480 if (gimple_vdef (new_stmt))
481 reaching_vuse = gimple_vdef (new_stmt);
482 }
483
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
486 if (reaching_vuse
487 && reaching_vuse == gimple_vuse (stmt))
488 {
489 tree vdef = gimple_vdef (stmt);
490 if (vdef
491 && TREE_CODE (vdef) == SSA_NAME)
492 {
493 unlink_stmt_vdef (stmt);
494 release_ssa_name (vdef);
495 }
496 }
497
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p, stmts, false);
500 }
501
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
511
512 void
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
514 {
515 tree lhs;
516 gimple *stmt, *new_stmt;
517 gimple_stmt_iterator i;
518 gimple_seq stmts = NULL;
519
520 stmt = gsi_stmt (*si_p);
521
522 gcc_assert (is_gimple_call (stmt));
523
524 push_gimplify_context (gimple_in_ssa_p (cfun));
525
526 lhs = gimple_call_lhs (stmt);
527 if (lhs == NULL_TREE)
528 {
529 gimplify_and_add (expr, &stmts);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts))
533 {
534 pop_gimplify_context (NULL);
535 if (gimple_in_ssa_p (cfun))
536 {
537 unlink_stmt_vdef (stmt);
538 release_defs (stmt);
539 }
540 gsi_replace (si_p, gimple_build_nop (), false);
541 return;
542 }
543 }
544 else
545 {
546 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
547 new_stmt = gimple_build_assign (lhs, tmp);
548 i = gsi_last (stmts);
549 gsi_insert_after_without_update (&i, new_stmt,
550 GSI_CONTINUE_LINKING);
551 }
552
553 pop_gimplify_context (NULL);
554
555 gsi_replace_with_seq_vops (si_p, stmts);
556 }
557
558
559 /* Replace the call at *GSI with the gimple value VAL. */
560
561 static void
562 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563 {
564 gimple *stmt = gsi_stmt (*gsi);
565 tree lhs = gimple_call_lhs (stmt);
566 gimple *repl;
567 if (lhs)
568 {
569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
570 val = fold_convert (TREE_TYPE (lhs), val);
571 repl = gimple_build_assign (lhs, val);
572 }
573 else
574 repl = gimple_build_nop ();
575 tree vdef = gimple_vdef (stmt);
576 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 {
578 unlink_stmt_vdef (stmt);
579 release_ssa_name (vdef);
580 }
581 gsi_replace (gsi, repl, false);
582 }
583
584 /* Replace the call at *GSI with the new call REPL and fold that
585 again. */
586
587 static void
588 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
589 {
590 gimple *stmt = gsi_stmt (*gsi);
591 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
592 gimple_set_location (repl, gimple_location (stmt));
593 if (gimple_vdef (stmt)
594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 {
596 gimple_set_vdef (repl, gimple_vdef (stmt));
597 gimple_set_vuse (repl, gimple_vuse (stmt));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 }
600 gsi_replace (gsi, repl, false);
601 fold_stmt (gsi);
602 }
603
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
605
606 static bool
607 var_decl_component_p (tree var)
608 {
609 tree inner = var;
610 while (handled_component_p (inner))
611 inner = TREE_OPERAND (inner, 0);
612 return SSA_VAR_P (inner);
613 }
614
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
621 (memmove). */
622
623 static bool
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
625 tree dest, tree src, int endp)
626 {
627 gimple *stmt = gsi_stmt (*gsi);
628 tree lhs = gimple_call_lhs (stmt);
629 tree len = gimple_call_arg (stmt, 2);
630 tree destvar, srcvar;
631 location_t loc = gimple_location (stmt);
632
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len))
635 {
636 gimple *repl;
637 if (gimple_call_lhs (stmt))
638 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 else
640 repl = gimple_build_nop ();
641 tree vdef = gimple_vdef (stmt);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 {
644 unlink_stmt_vdef (stmt);
645 release_ssa_name (vdef);
646 }
647 gsi_replace (gsi, repl, false);
648 return true;
649 }
650
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src, dest, 0))
654 {
655 unlink_stmt_vdef (stmt);
656 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
657 release_ssa_name (gimple_vdef (stmt));
658 if (!lhs)
659 {
660 gsi_replace (gsi, gimple_build_nop (), false);
661 return true;
662 }
663 goto done;
664 }
665 else
666 {
667 tree srctype, desttype;
668 unsigned int src_align, dest_align;
669 tree off0;
670
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun->decl)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len)
680 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
681 return false;
682
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
686
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
700 {
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
703 {
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
713 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
714 != CODE_FOR_nothing)))
715 {
716 tree srctype = type;
717 tree desttype = type;
718 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
719 srctype = build_aligned_type (type, src_align);
720 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
721 tree tem = fold_const_aggregate_ref (srcmem);
722 if (tem)
723 srcmem = tem;
724 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
726 src_align)
727 && (optab_handler (movmisalign_optab,
728 TYPE_MODE (type))
729 == CODE_FOR_nothing))
730 srcmem = NULL_TREE;
731 if (srcmem)
732 {
733 gimple *new_stmt;
734 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 {
736 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
737 if (gimple_in_ssa_p (cfun))
738 srcmem = make_ssa_name (TREE_TYPE (srcmem),
739 new_stmt);
740 else
741 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
742 gimple_assign_set_lhs (new_stmt, srcmem);
743 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
744 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 }
746 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
747 desttype = build_aligned_type (type, dest_align);
748 new_stmt
749 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
750 dest, off0),
751 srcmem);
752 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
753 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
754 if (gimple_vdef (new_stmt)
755 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
757 if (!lhs)
758 {
759 gsi_replace (gsi, new_stmt, false);
760 return true;
761 }
762 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
763 goto done;
764 }
765 }
766 }
767 }
768
769 if (endp == 3)
770 {
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
773 really mandatory?
774
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align || !src_align)
777 return false;
778 if (readonly_data_expr (src)
779 || (tree_fits_uhwi_p (len)
780 && (MIN (src_align, dest_align) / BITS_PER_UNIT
781 >= tree_to_uhwi (len))))
782 {
783 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
784 if (!fn)
785 return false;
786 gimple_call_set_fndecl (stmt, fn);
787 gimple_call_set_arg (stmt, 0, dest);
788 gimple_call_set_arg (stmt, 1, src);
789 fold_stmt (gsi);
790 return true;
791 }
792
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src) == ADDR_EXPR
795 && TREE_CODE (dest) == ADDR_EXPR)
796 {
797 tree src_base, dest_base, fn;
798 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
799 HOST_WIDE_INT size = -1;
800 HOST_WIDE_INT maxsize = -1;
801 bool reverse;
802
803 srcvar = TREE_OPERAND (src, 0);
804 src_base = get_ref_base_and_extent (srcvar, &src_offset,
805 &size, &maxsize, &reverse);
806 destvar = TREE_OPERAND (dest, 0);
807 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
808 &size, &maxsize, &reverse);
809 if (tree_fits_uhwi_p (len))
810 maxsize = tree_to_uhwi (len);
811 else
812 maxsize = -1;
813 src_offset /= BITS_PER_UNIT;
814 dest_offset /= BITS_PER_UNIT;
815 if (SSA_VAR_P (src_base)
816 && SSA_VAR_P (dest_base))
817 {
818 if (operand_equal_p (src_base, dest_base, 0)
819 && ranges_overlap_p (src_offset, maxsize,
820 dest_offset, maxsize))
821 return false;
822 }
823 else if (TREE_CODE (src_base) == MEM_REF
824 && TREE_CODE (dest_base) == MEM_REF)
825 {
826 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
827 TREE_OPERAND (dest_base, 0), 0))
828 return false;
829 offset_int off = mem_ref_offset (src_base) + src_offset;
830 if (!wi::fits_shwi_p (off))
831 return false;
832 src_offset = off.to_shwi ();
833
834 off = mem_ref_offset (dest_base) + dest_offset;
835 if (!wi::fits_shwi_p (off))
836 return false;
837 dest_offset = off.to_shwi ();
838 if (ranges_overlap_p (src_offset, maxsize,
839 dest_offset, maxsize))
840 return false;
841 }
842 else
843 return false;
844
845 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
846 if (!fn)
847 return false;
848 gimple_call_set_fndecl (stmt, fn);
849 gimple_call_set_arg (stmt, 0, dest);
850 gimple_call_set_arg (stmt, 1, src);
851 fold_stmt (gsi);
852 return true;
853 }
854
855 /* If the destination and source do not alias optimize into
856 memcpy as well. */
857 if ((is_gimple_min_invariant (dest)
858 || TREE_CODE (dest) == SSA_NAME)
859 && (is_gimple_min_invariant (src)
860 || TREE_CODE (src) == SSA_NAME))
861 {
862 ao_ref destr, srcr;
863 ao_ref_init_from_ptr_and_size (&destr, dest, len);
864 ao_ref_init_from_ptr_and_size (&srcr, src, len);
865 if (!refs_may_alias_p_1 (&destr, &srcr, false))
866 {
867 tree fn;
868 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
869 if (!fn)
870 return false;
871 gimple_call_set_fndecl (stmt, fn);
872 gimple_call_set_arg (stmt, 0, dest);
873 gimple_call_set_arg (stmt, 1, src);
874 fold_stmt (gsi);
875 return true;
876 }
877 }
878
879 return false;
880 }
881
882 if (!tree_fits_shwi_p (len))
883 return false;
884 /* FIXME:
885 This logic lose for arguments like (type *)malloc (sizeof (type)),
886 since we strip the casts of up to VOID return value from malloc.
887 Perhaps we ought to inherit type from non-VOID argument here? */
888 STRIP_NOPS (src);
889 STRIP_NOPS (dest);
890 if (!POINTER_TYPE_P (TREE_TYPE (src))
891 || !POINTER_TYPE_P (TREE_TYPE (dest)))
892 return false;
893 /* In the following try to find a type that is most natural to be
894 used for the memcpy source and destination and that allows
895 the most optimization when memcpy is turned into a plain assignment
896 using that type. In theory we could always use a char[len] type
897 but that only gains us that the destination and source possibly
898 no longer will have their address taken. */
899 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
900 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
901 {
902 tree tem = TREE_OPERAND (src, 0);
903 STRIP_NOPS (tem);
904 if (tem != TREE_OPERAND (src, 0))
905 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
906 }
907 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
908 {
909 tree tem = TREE_OPERAND (dest, 0);
910 STRIP_NOPS (tem);
911 if (tem != TREE_OPERAND (dest, 0))
912 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
913 }
914 srctype = TREE_TYPE (TREE_TYPE (src));
915 if (TREE_CODE (srctype) == ARRAY_TYPE
916 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
917 {
918 srctype = TREE_TYPE (srctype);
919 STRIP_NOPS (src);
920 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
921 }
922 desttype = TREE_TYPE (TREE_TYPE (dest));
923 if (TREE_CODE (desttype) == ARRAY_TYPE
924 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
925 {
926 desttype = TREE_TYPE (desttype);
927 STRIP_NOPS (dest);
928 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
929 }
930 if (TREE_ADDRESSABLE (srctype)
931 || TREE_ADDRESSABLE (desttype))
932 return false;
933
934 /* Make sure we are not copying using a floating-point mode or
935 a type whose size possibly does not match its precision. */
936 if (FLOAT_MODE_P (TYPE_MODE (desttype))
937 || TREE_CODE (desttype) == BOOLEAN_TYPE
938 || TREE_CODE (desttype) == ENUMERAL_TYPE)
939 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
940 if (FLOAT_MODE_P (TYPE_MODE (srctype))
941 || TREE_CODE (srctype) == BOOLEAN_TYPE
942 || TREE_CODE (srctype) == ENUMERAL_TYPE)
943 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
944 if (!srctype)
945 srctype = desttype;
946 if (!desttype)
947 desttype = srctype;
948 if (!srctype)
949 return false;
950
951 src_align = get_pointer_alignment (src);
952 dest_align = get_pointer_alignment (dest);
953 if (dest_align < TYPE_ALIGN (desttype)
954 || src_align < TYPE_ALIGN (srctype))
955 return false;
956
957 destvar = dest;
958 STRIP_NOPS (destvar);
959 if (TREE_CODE (destvar) == ADDR_EXPR
960 && var_decl_component_p (TREE_OPERAND (destvar, 0))
961 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
963 else
964 destvar = NULL_TREE;
965
966 srcvar = src;
967 STRIP_NOPS (srcvar);
968 if (TREE_CODE (srcvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
971 {
972 if (!destvar
973 || src_align >= TYPE_ALIGN (desttype))
974 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
975 srcvar, off0);
976 else if (!STRICT_ALIGNMENT)
977 {
978 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
979 src_align);
980 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
981 }
982 else
983 srcvar = NULL_TREE;
984 }
985 else
986 srcvar = NULL_TREE;
987
988 if (srcvar == NULL_TREE && destvar == NULL_TREE)
989 return false;
990
991 if (srcvar == NULL_TREE)
992 {
993 STRIP_NOPS (src);
994 if (src_align >= TYPE_ALIGN (desttype))
995 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
996 else
997 {
998 if (STRICT_ALIGNMENT)
999 return false;
1000 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1001 src_align);
1002 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1003 }
1004 }
1005 else if (destvar == NULL_TREE)
1006 {
1007 STRIP_NOPS (dest);
1008 if (dest_align >= TYPE_ALIGN (srctype))
1009 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1010 else
1011 {
1012 if (STRICT_ALIGNMENT)
1013 return false;
1014 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1015 dest_align);
1016 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1017 }
1018 }
1019
1020 gimple *new_stmt;
1021 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1022 {
1023 tree tem = fold_const_aggregate_ref (srcvar);
1024 if (tem)
1025 srcvar = tem;
1026 if (! is_gimple_min_invariant (srcvar))
1027 {
1028 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1029 if (gimple_in_ssa_p (cfun))
1030 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1031 else
1032 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1033 gimple_assign_set_lhs (new_stmt, srcvar);
1034 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1035 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1036 }
1037 }
1038 new_stmt = gimple_build_assign (destvar, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1041 if (gimple_vdef (new_stmt)
1042 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1043 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1044 if (!lhs)
1045 {
1046 gsi_replace (gsi, new_stmt, false);
1047 return true;
1048 }
1049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1050 }
1051
1052 done:
1053 gimple_seq stmts = NULL;
1054 if (endp == 0 || endp == 3)
1055 len = NULL_TREE;
1056 else if (endp == 2)
1057 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1058 ssize_int (1));
1059 if (endp == 2 || endp == 1)
1060 {
1061 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1062 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1063 TREE_TYPE (dest), dest, len);
1064 }
1065
1066 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1067 gimple *repl = gimple_build_assign (lhs, dest);
1068 gsi_replace (gsi, repl, false);
1069 return true;
1070 }
1071
1072 /* Fold function call to builtin memset or bzero at *GSI setting the
1073 memory of size LEN to VAL. Return whether a simplification was made. */
1074
1075 static bool
1076 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1077 {
1078 gimple *stmt = gsi_stmt (*gsi);
1079 tree etype;
1080 unsigned HOST_WIDE_INT length, cval;
1081
1082 /* If the LEN parameter is zero, return DEST. */
1083 if (integer_zerop (len))
1084 {
1085 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1086 return true;
1087 }
1088
1089 if (! tree_fits_uhwi_p (len))
1090 return false;
1091
1092 if (TREE_CODE (c) != INTEGER_CST)
1093 return false;
1094
1095 tree dest = gimple_call_arg (stmt, 0);
1096 tree var = dest;
1097 if (TREE_CODE (var) != ADDR_EXPR)
1098 return false;
1099
1100 var = TREE_OPERAND (var, 0);
1101 if (TREE_THIS_VOLATILE (var))
1102 return false;
1103
1104 etype = TREE_TYPE (var);
1105 if (TREE_CODE (etype) == ARRAY_TYPE)
1106 etype = TREE_TYPE (etype);
1107
1108 if (!INTEGRAL_TYPE_P (etype)
1109 && !POINTER_TYPE_P (etype))
1110 return NULL_TREE;
1111
1112 if (! var_decl_component_p (var))
1113 return NULL_TREE;
1114
1115 length = tree_to_uhwi (len);
1116 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1117 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1118 return NULL_TREE;
1119
1120 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1121 return NULL_TREE;
1122
1123 if (integer_zerop (c))
1124 cval = 0;
1125 else
1126 {
1127 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1128 return NULL_TREE;
1129
1130 cval = TREE_INT_CST_LOW (c);
1131 cval &= 0xff;
1132 cval |= cval << 8;
1133 cval |= cval << 16;
1134 cval |= (cval << 31) << 1;
1135 }
1136
1137 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1138 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1139 gimple_set_vuse (store, gimple_vuse (stmt));
1140 tree vdef = gimple_vdef (stmt);
1141 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1142 {
1143 gimple_set_vdef (store, gimple_vdef (stmt));
1144 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1145 }
1146 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1147 if (gimple_call_lhs (stmt))
1148 {
1149 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1150 gsi_replace (gsi, asgn, false);
1151 }
1152 else
1153 {
1154 gimple_stmt_iterator gsi2 = *gsi;
1155 gsi_prev (gsi);
1156 gsi_remove (&gsi2, true);
1157 }
1158
1159 return true;
1160 }
1161
1162
1163 /* Return the string length, maximum string length or maximum value of
1164 ARG in LENGTH.
1165 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1166 is not NULL and, for TYPE == 0, its value is not equal to the length
1167 we determine or if we are unable to determine the length or value,
1168 return false. VISITED is a bitmap of visited variables.
1169 TYPE is 0 if string length should be returned, 1 for maximum string
1170 length and 2 for maximum value ARG can have. */
1171
1172 static bool
1173 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1174 {
1175 tree var, val;
1176 gimple *def_stmt;
1177
1178 if (TREE_CODE (arg) != SSA_NAME)
1179 {
1180 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1181 if (TREE_CODE (arg) == ADDR_EXPR
1182 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1183 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1184 {
1185 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1186 if (TREE_CODE (aop0) == INDIRECT_REF
1187 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1188 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1189 length, visited, type);
1190 }
1191
1192 if (type == 2)
1193 {
1194 val = arg;
1195 if (TREE_CODE (val) != INTEGER_CST
1196 || tree_int_cst_sgn (val) < 0)
1197 return false;
1198 }
1199 else
1200 val = c_strlen (arg, 1);
1201 if (!val)
1202 return false;
1203
1204 if (*length)
1205 {
1206 if (type > 0)
1207 {
1208 if (TREE_CODE (*length) != INTEGER_CST
1209 || TREE_CODE (val) != INTEGER_CST)
1210 return false;
1211
1212 if (tree_int_cst_lt (*length, val))
1213 *length = val;
1214 return true;
1215 }
1216 else if (simple_cst_equal (val, *length) != 1)
1217 return false;
1218 }
1219
1220 *length = val;
1221 return true;
1222 }
1223
1224 /* If ARG is registered for SSA update we cannot look at its defining
1225 statement. */
1226 if (name_registered_for_update_p (arg))
1227 return false;
1228
1229 /* If we were already here, break the infinite cycle. */
1230 if (!*visited)
1231 *visited = BITMAP_ALLOC (NULL);
1232 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1233 return true;
1234
1235 var = arg;
1236 def_stmt = SSA_NAME_DEF_STMT (var);
1237
1238 switch (gimple_code (def_stmt))
1239 {
1240 case GIMPLE_ASSIGN:
1241 /* The RHS of the statement defining VAR must either have a
1242 constant length or come from another SSA_NAME with a constant
1243 length. */
1244 if (gimple_assign_single_p (def_stmt)
1245 || gimple_assign_unary_nop_p (def_stmt))
1246 {
1247 tree rhs = gimple_assign_rhs1 (def_stmt);
1248 return get_maxval_strlen (rhs, length, visited, type);
1249 }
1250 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1251 {
1252 tree op2 = gimple_assign_rhs2 (def_stmt);
1253 tree op3 = gimple_assign_rhs3 (def_stmt);
1254 return get_maxval_strlen (op2, length, visited, type)
1255 && get_maxval_strlen (op3, length, visited, type);
1256 }
1257 return false;
1258
1259 case GIMPLE_PHI:
1260 {
1261 /* All the arguments of the PHI node must have the same constant
1262 length. */
1263 unsigned i;
1264
1265 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1266 {
1267 tree arg = gimple_phi_arg (def_stmt, i)->def;
1268
1269 /* If this PHI has itself as an argument, we cannot
1270 determine the string length of this argument. However,
1271 if we can find a constant string length for the other
1272 PHI args then we can still be sure that this is a
1273 constant string length. So be optimistic and just
1274 continue with the next argument. */
1275 if (arg == gimple_phi_result (def_stmt))
1276 continue;
1277
1278 if (!get_maxval_strlen (arg, length, visited, type))
1279 return false;
1280 }
1281 }
1282 return true;
1283
1284 default:
1285 return false;
1286 }
1287 }
1288
1289 tree
1290 get_maxval_strlen (tree arg, int type)
1291 {
1292 bitmap visited = NULL;
1293 tree len = NULL_TREE;
1294 if (!get_maxval_strlen (arg, &len, &visited, type))
1295 len = NULL_TREE;
1296 if (visited)
1297 BITMAP_FREE (visited);
1298
1299 return len;
1300 }
1301
1302
1303 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1304 If LEN is not NULL, it represents the length of the string to be
1305 copied. Return NULL_TREE if no simplification can be made. */
1306
1307 static bool
1308 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1309 tree dest, tree src)
1310 {
1311 location_t loc = gimple_location (gsi_stmt (*gsi));
1312 tree fn;
1313
1314 /* If SRC and DEST are the same (and not volatile), return DEST. */
1315 if (operand_equal_p (src, dest, 0))
1316 {
1317 replace_call_with_value (gsi, dest);
1318 return true;
1319 }
1320
1321 if (optimize_function_for_size_p (cfun))
1322 return false;
1323
1324 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1325 if (!fn)
1326 return false;
1327
1328 tree len = get_maxval_strlen (src, 0);
1329 if (!len)
1330 return false;
1331
1332 len = fold_convert_loc (loc, size_type_node, len);
1333 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1334 len = force_gimple_operand_gsi (gsi, len, true,
1335 NULL_TREE, true, GSI_SAME_STMT);
1336 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1337 replace_call_with_call_and_fold (gsi, repl);
1338 return true;
1339 }
1340
1341 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1342 If SLEN is not NULL, it represents the length of the source string.
1343 Return NULL_TREE if no simplification can be made. */
1344
1345 static bool
1346 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1347 tree dest, tree src, tree len)
1348 {
1349 location_t loc = gimple_location (gsi_stmt (*gsi));
1350 tree fn;
1351
1352 /* If the LEN parameter is zero, return DEST. */
1353 if (integer_zerop (len))
1354 {
1355 replace_call_with_value (gsi, dest);
1356 return true;
1357 }
1358
1359 /* We can't compare slen with len as constants below if len is not a
1360 constant. */
1361 if (TREE_CODE (len) != INTEGER_CST)
1362 return false;
1363
1364 /* Now, we must be passed a constant src ptr parameter. */
1365 tree slen = get_maxval_strlen (src, 0);
1366 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1367 return false;
1368
1369 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1370
1371 /* We do not support simplification of this case, though we do
1372 support it when expanding trees into RTL. */
1373 /* FIXME: generate a call to __builtin_memset. */
1374 if (tree_int_cst_lt (slen, len))
1375 return false;
1376
1377 /* OK transform into builtin memcpy. */
1378 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1379 if (!fn)
1380 return false;
1381
1382 len = fold_convert_loc (loc, size_type_node, len);
1383 len = force_gimple_operand_gsi (gsi, len, true,
1384 NULL_TREE, true, GSI_SAME_STMT);
1385 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1386 replace_call_with_call_and_fold (gsi, repl);
1387 return true;
1388 }
1389
1390 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1391 to the call.
1392
1393 Return NULL_TREE if no simplification was possible, otherwise return the
1394 simplified form of the call as a tree.
1395
1396 The simplified form may be a constant or other expression which
1397 computes the same value, but in a more efficient manner (including
1398 calls to other builtin functions).
1399
1400 The call may contain arguments which need to be evaluated, but
1401 which are not useful to determine the result of the call. In
1402 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1403 COMPOUND_EXPR will be an argument which must be evaluated.
1404 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1405 COMPOUND_EXPR in the chain will contain the tree for the simplified
1406 form of the builtin function call. */
1407
1408 static bool
1409 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1410 {
1411 gimple *stmt = gsi_stmt (*gsi);
1412 location_t loc = gimple_location (stmt);
1413
1414 const char *p = c_getstr (src);
1415
1416 /* If the string length is zero, return the dst parameter. */
1417 if (p && *p == '\0')
1418 {
1419 replace_call_with_value (gsi, dst);
1420 return true;
1421 }
1422
1423 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1424 return false;
1425
1426 /* See if we can store by pieces into (dst + strlen(dst)). */
1427 tree newdst;
1428 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1429 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1430
1431 if (!strlen_fn || !memcpy_fn)
1432 return false;
1433
1434 /* If the length of the source string isn't computable don't
1435 split strcat into strlen and memcpy. */
1436 tree len = get_maxval_strlen (src, 0);
1437 if (! len)
1438 return false;
1439
1440 /* Create strlen (dst). */
1441 gimple_seq stmts = NULL, stmts2;
1442 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1443 gimple_set_location (repl, loc);
1444 if (gimple_in_ssa_p (cfun))
1445 newdst = make_ssa_name (size_type_node);
1446 else
1447 newdst = create_tmp_reg (size_type_node);
1448 gimple_call_set_lhs (repl, newdst);
1449 gimple_seq_add_stmt_without_update (&stmts, repl);
1450
1451 /* Create (dst p+ strlen (dst)). */
1452 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1453 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1454 gimple_seq_add_seq_without_update (&stmts, stmts2);
1455
1456 len = fold_convert_loc (loc, size_type_node, len);
1457 len = size_binop_loc (loc, PLUS_EXPR, len,
1458 build_int_cst (size_type_node, 1));
1459 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1460 gimple_seq_add_seq_without_update (&stmts, stmts2);
1461
1462 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1463 gimple_seq_add_stmt_without_update (&stmts, repl);
1464 if (gimple_call_lhs (stmt))
1465 {
1466 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1467 gimple_seq_add_stmt_without_update (&stmts, repl);
1468 gsi_replace_with_seq_vops (gsi, stmts);
1469 /* gsi now points at the assignment to the lhs, get a
1470 stmt iterator to the memcpy call.
1471 ??? We can't use gsi_for_stmt as that doesn't work when the
1472 CFG isn't built yet. */
1473 gimple_stmt_iterator gsi2 = *gsi;
1474 gsi_prev (&gsi2);
1475 fold_stmt (&gsi2);
1476 }
1477 else
1478 {
1479 gsi_replace_with_seq_vops (gsi, stmts);
1480 fold_stmt (gsi);
1481 }
1482 return true;
1483 }
1484
1485 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1486 are the arguments to the call. */
1487
1488 static bool
1489 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1490 {
1491 gimple *stmt = gsi_stmt (*gsi);
1492 tree dest = gimple_call_arg (stmt, 0);
1493 tree src = gimple_call_arg (stmt, 1);
1494 tree size = gimple_call_arg (stmt, 2);
1495 tree fn;
1496 const char *p;
1497
1498
1499 p = c_getstr (src);
1500 /* If the SRC parameter is "", return DEST. */
1501 if (p && *p == '\0')
1502 {
1503 replace_call_with_value (gsi, dest);
1504 return true;
1505 }
1506
1507 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1508 return false;
1509
1510 /* If __builtin_strcat_chk is used, assume strcat is available. */
1511 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1512 if (!fn)
1513 return false;
1514
1515 gimple *repl = gimple_build_call (fn, 2, dest, src);
1516 replace_call_with_call_and_fold (gsi, repl);
1517 return true;
1518 }
1519
1520 /* Simplify a call to the strncat builtin. */
1521
1522 static bool
1523 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1524 {
1525 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1526 tree dst = gimple_call_arg (stmt, 0);
1527 tree src = gimple_call_arg (stmt, 1);
1528 tree len = gimple_call_arg (stmt, 2);
1529
1530 const char *p = c_getstr (src);
1531
1532 /* If the requested length is zero, or the src parameter string
1533 length is zero, return the dst parameter. */
1534 if (integer_zerop (len) || (p && *p == '\0'))
1535 {
1536 replace_call_with_value (gsi, dst);
1537 return true;
1538 }
1539
1540 /* If the requested len is greater than or equal to the string
1541 length, call strcat. */
1542 if (TREE_CODE (len) == INTEGER_CST && p
1543 && compare_tree_int (len, strlen (p)) >= 0)
1544 {
1545 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1546
1547 /* If the replacement _DECL isn't initialized, don't do the
1548 transformation. */
1549 if (!fn)
1550 return false;
1551
1552 gcall *repl = gimple_build_call (fn, 2, dst, src);
1553 replace_call_with_call_and_fold (gsi, repl);
1554 return true;
1555 }
1556
1557 return false;
1558 }
1559
1560 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1561 LEN, and SIZE. */
1562
1563 static bool
1564 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1565 {
1566 gimple *stmt = gsi_stmt (*gsi);
1567 tree dest = gimple_call_arg (stmt, 0);
1568 tree src = gimple_call_arg (stmt, 1);
1569 tree len = gimple_call_arg (stmt, 2);
1570 tree size = gimple_call_arg (stmt, 3);
1571 tree fn;
1572 const char *p;
1573
1574 p = c_getstr (src);
1575 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1576 if ((p && *p == '\0')
1577 || integer_zerop (len))
1578 {
1579 replace_call_with_value (gsi, dest);
1580 return true;
1581 }
1582
1583 if (! tree_fits_uhwi_p (size))
1584 return false;
1585
1586 if (! integer_all_onesp (size))
1587 {
1588 tree src_len = c_strlen (src, 1);
1589 if (src_len
1590 && tree_fits_uhwi_p (src_len)
1591 && tree_fits_uhwi_p (len)
1592 && ! tree_int_cst_lt (len, src_len))
1593 {
1594 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1595 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1596 if (!fn)
1597 return false;
1598
1599 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1600 replace_call_with_call_and_fold (gsi, repl);
1601 return true;
1602 }
1603 return false;
1604 }
1605
1606 /* If __builtin_strncat_chk is used, assume strncat is available. */
1607 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1608 if (!fn)
1609 return false;
1610
1611 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1612 replace_call_with_call_and_fold (gsi, repl);
1613 return true;
1614 }
1615
1616 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1617 to the call. IGNORE is true if the value returned
1618 by the builtin will be ignored. UNLOCKED is true is true if this
1619 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1620 the known length of the string. Return NULL_TREE if no simplification
1621 was possible. */
1622
1623 static bool
1624 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1625 tree arg0, tree arg1,
1626 bool unlocked)
1627 {
1628 gimple *stmt = gsi_stmt (*gsi);
1629
1630 /* If we're using an unlocked function, assume the other unlocked
1631 functions exist explicitly. */
1632 tree const fn_fputc = (unlocked
1633 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1634 : builtin_decl_implicit (BUILT_IN_FPUTC));
1635 tree const fn_fwrite = (unlocked
1636 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1637 : builtin_decl_implicit (BUILT_IN_FWRITE));
1638
1639 /* If the return value is used, don't do the transformation. */
1640 if (gimple_call_lhs (stmt))
1641 return false;
1642
1643 /* Get the length of the string passed to fputs. If the length
1644 can't be determined, punt. */
1645 tree len = get_maxval_strlen (arg0, 0);
1646 if (!len
1647 || TREE_CODE (len) != INTEGER_CST)
1648 return false;
1649
1650 switch (compare_tree_int (len, 1))
1651 {
1652 case -1: /* length is 0, delete the call entirely . */
1653 replace_call_with_value (gsi, integer_zero_node);
1654 return true;
1655
1656 case 0: /* length is 1, call fputc. */
1657 {
1658 const char *p = c_getstr (arg0);
1659 if (p != NULL)
1660 {
1661 if (!fn_fputc)
1662 return false;
1663
1664 gimple *repl = gimple_build_call (fn_fputc, 2,
1665 build_int_cst
1666 (integer_type_node, p[0]), arg1);
1667 replace_call_with_call_and_fold (gsi, repl);
1668 return true;
1669 }
1670 }
1671 /* FALLTHROUGH */
1672 case 1: /* length is greater than 1, call fwrite. */
1673 {
1674 /* If optimizing for size keep fputs. */
1675 if (optimize_function_for_size_p (cfun))
1676 return false;
1677 /* New argument list transforming fputs(string, stream) to
1678 fwrite(string, 1, len, stream). */
1679 if (!fn_fwrite)
1680 return false;
1681
1682 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1683 size_one_node, len, arg1);
1684 replace_call_with_call_and_fold (gsi, repl);
1685 return true;
1686 }
1687 default:
1688 gcc_unreachable ();
1689 }
1690 return false;
1691 }
1692
1693 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1694 DEST, SRC, LEN, and SIZE are the arguments to the call.
1695 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1696 code of the builtin. If MAXLEN is not NULL, it is maximum length
1697 passed as third argument. */
1698
1699 static bool
1700 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1701 tree dest, tree src, tree len, tree size,
1702 enum built_in_function fcode)
1703 {
1704 gimple *stmt = gsi_stmt (*gsi);
1705 location_t loc = gimple_location (stmt);
1706 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1707 tree fn;
1708
1709 /* If SRC and DEST are the same (and not volatile), return DEST
1710 (resp. DEST+LEN for __mempcpy_chk). */
1711 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1712 {
1713 if (fcode != BUILT_IN_MEMPCPY_CHK)
1714 {
1715 replace_call_with_value (gsi, dest);
1716 return true;
1717 }
1718 else
1719 {
1720 gimple_seq stmts = NULL;
1721 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1722 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1723 TREE_TYPE (dest), dest, len);
1724 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1725 replace_call_with_value (gsi, temp);
1726 return true;
1727 }
1728 }
1729
1730 if (! tree_fits_uhwi_p (size))
1731 return false;
1732
1733 tree maxlen = get_maxval_strlen (len, 2);
1734 if (! integer_all_onesp (size))
1735 {
1736 if (! tree_fits_uhwi_p (len))
1737 {
1738 /* If LEN is not constant, try MAXLEN too.
1739 For MAXLEN only allow optimizing into non-_ocs function
1740 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1741 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1742 {
1743 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1744 {
1745 /* (void) __mempcpy_chk () can be optimized into
1746 (void) __memcpy_chk (). */
1747 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1748 if (!fn)
1749 return false;
1750
1751 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1752 replace_call_with_call_and_fold (gsi, repl);
1753 return true;
1754 }
1755 return false;
1756 }
1757 }
1758 else
1759 maxlen = len;
1760
1761 if (tree_int_cst_lt (size, maxlen))
1762 return false;
1763 }
1764
1765 fn = NULL_TREE;
1766 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1767 mem{cpy,pcpy,move,set} is available. */
1768 switch (fcode)
1769 {
1770 case BUILT_IN_MEMCPY_CHK:
1771 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1772 break;
1773 case BUILT_IN_MEMPCPY_CHK:
1774 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1775 break;
1776 case BUILT_IN_MEMMOVE_CHK:
1777 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1778 break;
1779 case BUILT_IN_MEMSET_CHK:
1780 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1781 break;
1782 default:
1783 break;
1784 }
1785
1786 if (!fn)
1787 return false;
1788
1789 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1790 replace_call_with_call_and_fold (gsi, repl);
1791 return true;
1792 }
1793
1794 /* Fold a call to the __st[rp]cpy_chk builtin.
1795 DEST, SRC, and SIZE are the arguments to the call.
1796 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1797 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1798 strings passed as second argument. */
1799
1800 static bool
1801 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1802 tree dest,
1803 tree src, tree size,
1804 enum built_in_function fcode)
1805 {
1806 gimple *stmt = gsi_stmt (*gsi);
1807 location_t loc = gimple_location (stmt);
1808 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1809 tree len, fn;
1810
1811 /* If SRC and DEST are the same (and not volatile), return DEST. */
1812 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1813 {
1814 replace_call_with_value (gsi, dest);
1815 return true;
1816 }
1817
1818 if (! tree_fits_uhwi_p (size))
1819 return false;
1820
1821 tree maxlen = get_maxval_strlen (src, 1);
1822 if (! integer_all_onesp (size))
1823 {
1824 len = c_strlen (src, 1);
1825 if (! len || ! tree_fits_uhwi_p (len))
1826 {
1827 /* If LEN is not constant, try MAXLEN too.
1828 For MAXLEN only allow optimizing into non-_ocs function
1829 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1830 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1831 {
1832 if (fcode == BUILT_IN_STPCPY_CHK)
1833 {
1834 if (! ignore)
1835 return false;
1836
1837 /* If return value of __stpcpy_chk is ignored,
1838 optimize into __strcpy_chk. */
1839 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1840 if (!fn)
1841 return false;
1842
1843 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1844 replace_call_with_call_and_fold (gsi, repl);
1845 return true;
1846 }
1847
1848 if (! len || TREE_SIDE_EFFECTS (len))
1849 return false;
1850
1851 /* If c_strlen returned something, but not a constant,
1852 transform __strcpy_chk into __memcpy_chk. */
1853 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1854 if (!fn)
1855 return false;
1856
1857 gimple_seq stmts = NULL;
1858 len = gimple_convert (&stmts, loc, size_type_node, len);
1859 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1860 build_int_cst (size_type_node, 1));
1861 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1862 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1863 replace_call_with_call_and_fold (gsi, repl);
1864 return true;
1865 }
1866 }
1867 else
1868 maxlen = len;
1869
1870 if (! tree_int_cst_lt (maxlen, size))
1871 return false;
1872 }
1873
1874 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1875 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1876 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1877 if (!fn)
1878 return false;
1879
1880 gimple *repl = gimple_build_call (fn, 2, dest, src);
1881 replace_call_with_call_and_fold (gsi, repl);
1882 return true;
1883 }
1884
1885 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1886 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1887 length passed as third argument. IGNORE is true if return value can be
1888 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1889
1890 static bool
1891 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1892 tree dest, tree src,
1893 tree len, tree size,
1894 enum built_in_function fcode)
1895 {
1896 gimple *stmt = gsi_stmt (*gsi);
1897 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1898 tree fn;
1899
1900 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1901 {
1902 /* If return value of __stpncpy_chk is ignored,
1903 optimize into __strncpy_chk. */
1904 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1905 if (fn)
1906 {
1907 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1908 replace_call_with_call_and_fold (gsi, repl);
1909 return true;
1910 }
1911 }
1912
1913 if (! tree_fits_uhwi_p (size))
1914 return false;
1915
1916 tree maxlen = get_maxval_strlen (len, 2);
1917 if (! integer_all_onesp (size))
1918 {
1919 if (! tree_fits_uhwi_p (len))
1920 {
1921 /* If LEN is not constant, try MAXLEN too.
1922 For MAXLEN only allow optimizing into non-_ocs function
1923 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1924 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1925 return false;
1926 }
1927 else
1928 maxlen = len;
1929
1930 if (tree_int_cst_lt (size, maxlen))
1931 return false;
1932 }
1933
1934 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1935 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1936 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1937 if (!fn)
1938 return false;
1939
1940 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1941 replace_call_with_call_and_fold (gsi, repl);
1942 return true;
1943 }
1944
1945 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1946 Return NULL_TREE if no simplification can be made. */
1947
1948 static bool
1949 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1950 {
1951 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1952 location_t loc = gimple_location (stmt);
1953 tree dest = gimple_call_arg (stmt, 0);
1954 tree src = gimple_call_arg (stmt, 1);
1955 tree fn, len, lenp1;
1956
1957 /* If the result is unused, replace stpcpy with strcpy. */
1958 if (gimple_call_lhs (stmt) == NULL_TREE)
1959 {
1960 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1961 if (!fn)
1962 return false;
1963 gimple_call_set_fndecl (stmt, fn);
1964 fold_stmt (gsi);
1965 return true;
1966 }
1967
1968 len = c_strlen (src, 1);
1969 if (!len
1970 || TREE_CODE (len) != INTEGER_CST)
1971 return false;
1972
1973 if (optimize_function_for_size_p (cfun)
1974 /* If length is zero it's small enough. */
1975 && !integer_zerop (len))
1976 return false;
1977
1978 /* If the source has a known length replace stpcpy with memcpy. */
1979 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1980 if (!fn)
1981 return false;
1982
1983 gimple_seq stmts = NULL;
1984 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1985 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1986 tem, build_int_cst (size_type_node, 1));
1987 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1988 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1989 gimple_set_vuse (repl, gimple_vuse (stmt));
1990 gimple_set_vdef (repl, gimple_vdef (stmt));
1991 if (gimple_vdef (repl)
1992 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1993 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1994 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1995 /* Replace the result with dest + len. */
1996 stmts = NULL;
1997 tem = gimple_convert (&stmts, loc, sizetype, len);
1998 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1999 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2000 POINTER_PLUS_EXPR, dest, tem);
2001 gsi_replace (gsi, ret, false);
2002 /* Finally fold the memcpy call. */
2003 gimple_stmt_iterator gsi2 = *gsi;
2004 gsi_prev (&gsi2);
2005 fold_stmt (&gsi2);
2006 return true;
2007 }
2008
2009 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2010 NULL_TREE if a normal call should be emitted rather than expanding
2011 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2012 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2013 passed as second argument. */
2014
2015 static bool
2016 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2017 enum built_in_function fcode)
2018 {
2019 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2020 tree dest, size, len, fn, fmt, flag;
2021 const char *fmt_str;
2022
2023 /* Verify the required arguments in the original call. */
2024 if (gimple_call_num_args (stmt) < 5)
2025 return false;
2026
2027 dest = gimple_call_arg (stmt, 0);
2028 len = gimple_call_arg (stmt, 1);
2029 flag = gimple_call_arg (stmt, 2);
2030 size = gimple_call_arg (stmt, 3);
2031 fmt = gimple_call_arg (stmt, 4);
2032
2033 if (! tree_fits_uhwi_p (size))
2034 return false;
2035
2036 if (! integer_all_onesp (size))
2037 {
2038 tree maxlen = get_maxval_strlen (len, 2);
2039 if (! tree_fits_uhwi_p (len))
2040 {
2041 /* If LEN is not constant, try MAXLEN too.
2042 For MAXLEN only allow optimizing into non-_ocs function
2043 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2044 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2045 return false;
2046 }
2047 else
2048 maxlen = len;
2049
2050 if (tree_int_cst_lt (size, maxlen))
2051 return false;
2052 }
2053
2054 if (!init_target_chars ())
2055 return false;
2056
2057 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2058 or if format doesn't contain % chars or is "%s". */
2059 if (! integer_zerop (flag))
2060 {
2061 fmt_str = c_getstr (fmt);
2062 if (fmt_str == NULL)
2063 return false;
2064 if (strchr (fmt_str, target_percent) != NULL
2065 && strcmp (fmt_str, target_percent_s))
2066 return false;
2067 }
2068
2069 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2070 available. */
2071 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2072 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2073 if (!fn)
2074 return false;
2075
2076 /* Replace the called function and the first 5 argument by 3 retaining
2077 trailing varargs. */
2078 gimple_call_set_fndecl (stmt, fn);
2079 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2080 gimple_call_set_arg (stmt, 0, dest);
2081 gimple_call_set_arg (stmt, 1, len);
2082 gimple_call_set_arg (stmt, 2, fmt);
2083 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2084 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2085 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2086 fold_stmt (gsi);
2087 return true;
2088 }
2089
2090 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2091 Return NULL_TREE if a normal call should be emitted rather than
2092 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2093 or BUILT_IN_VSPRINTF_CHK. */
2094
2095 static bool
2096 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2097 enum built_in_function fcode)
2098 {
2099 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2100 tree dest, size, len, fn, fmt, flag;
2101 const char *fmt_str;
2102 unsigned nargs = gimple_call_num_args (stmt);
2103
2104 /* Verify the required arguments in the original call. */
2105 if (nargs < 4)
2106 return false;
2107 dest = gimple_call_arg (stmt, 0);
2108 flag = gimple_call_arg (stmt, 1);
2109 size = gimple_call_arg (stmt, 2);
2110 fmt = gimple_call_arg (stmt, 3);
2111
2112 if (! tree_fits_uhwi_p (size))
2113 return false;
2114
2115 len = NULL_TREE;
2116
2117 if (!init_target_chars ())
2118 return false;
2119
2120 /* Check whether the format is a literal string constant. */
2121 fmt_str = c_getstr (fmt);
2122 if (fmt_str != NULL)
2123 {
2124 /* If the format doesn't contain % args or %%, we know the size. */
2125 if (strchr (fmt_str, target_percent) == 0)
2126 {
2127 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2128 len = build_int_cstu (size_type_node, strlen (fmt_str));
2129 }
2130 /* If the format is "%s" and first ... argument is a string literal,
2131 we know the size too. */
2132 else if (fcode == BUILT_IN_SPRINTF_CHK
2133 && strcmp (fmt_str, target_percent_s) == 0)
2134 {
2135 tree arg;
2136
2137 if (nargs == 5)
2138 {
2139 arg = gimple_call_arg (stmt, 4);
2140 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2141 {
2142 len = c_strlen (arg, 1);
2143 if (! len || ! tree_fits_uhwi_p (len))
2144 len = NULL_TREE;
2145 }
2146 }
2147 }
2148 }
2149
2150 if (! integer_all_onesp (size))
2151 {
2152 if (! len || ! tree_int_cst_lt (len, size))
2153 return false;
2154 }
2155
2156 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2157 or if format doesn't contain % chars or is "%s". */
2158 if (! integer_zerop (flag))
2159 {
2160 if (fmt_str == NULL)
2161 return false;
2162 if (strchr (fmt_str, target_percent) != NULL
2163 && strcmp (fmt_str, target_percent_s))
2164 return false;
2165 }
2166
2167 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2168 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2169 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2170 if (!fn)
2171 return false;
2172
2173 /* Replace the called function and the first 4 argument by 2 retaining
2174 trailing varargs. */
2175 gimple_call_set_fndecl (stmt, fn);
2176 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2177 gimple_call_set_arg (stmt, 0, dest);
2178 gimple_call_set_arg (stmt, 1, fmt);
2179 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2180 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2181 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2182 fold_stmt (gsi);
2183 return true;
2184 }
2185
2186 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2187 ORIG may be null if this is a 2-argument call. We don't attempt to
2188 simplify calls with more than 3 arguments.
2189
2190 Return NULL_TREE if no simplification was possible, otherwise return the
2191 simplified form of the call as a tree. If IGNORED is true, it means that
2192 the caller does not use the returned value of the function. */
2193
2194 static bool
2195 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2196 {
2197 gimple *stmt = gsi_stmt (*gsi);
2198 tree dest = gimple_call_arg (stmt, 0);
2199 tree fmt = gimple_call_arg (stmt, 1);
2200 tree orig = NULL_TREE;
2201 const char *fmt_str = NULL;
2202
2203 /* Verify the required arguments in the original call. We deal with two
2204 types of sprintf() calls: 'sprintf (str, fmt)' and
2205 'sprintf (dest, "%s", orig)'. */
2206 if (gimple_call_num_args (stmt) > 3)
2207 return false;
2208
2209 if (gimple_call_num_args (stmt) == 3)
2210 orig = gimple_call_arg (stmt, 2);
2211
2212 /* Check whether the format is a literal string constant. */
2213 fmt_str = c_getstr (fmt);
2214 if (fmt_str == NULL)
2215 return false;
2216
2217 if (!init_target_chars ())
2218 return false;
2219
2220 /* If the format doesn't contain % args or %%, use strcpy. */
2221 if (strchr (fmt_str, target_percent) == NULL)
2222 {
2223 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2224
2225 if (!fn)
2226 return false;
2227
2228 /* Don't optimize sprintf (buf, "abc", ptr++). */
2229 if (orig)
2230 return false;
2231
2232 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2233 'format' is known to contain no % formats. */
2234 gimple_seq stmts = NULL;
2235 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2236 gimple_seq_add_stmt_without_update (&stmts, repl);
2237 if (gimple_call_lhs (stmt))
2238 {
2239 repl = gimple_build_assign (gimple_call_lhs (stmt),
2240 build_int_cst (integer_type_node,
2241 strlen (fmt_str)));
2242 gimple_seq_add_stmt_without_update (&stmts, repl);
2243 gsi_replace_with_seq_vops (gsi, stmts);
2244 /* gsi now points at the assignment to the lhs, get a
2245 stmt iterator to the memcpy call.
2246 ??? We can't use gsi_for_stmt as that doesn't work when the
2247 CFG isn't built yet. */
2248 gimple_stmt_iterator gsi2 = *gsi;
2249 gsi_prev (&gsi2);
2250 fold_stmt (&gsi2);
2251 }
2252 else
2253 {
2254 gsi_replace_with_seq_vops (gsi, stmts);
2255 fold_stmt (gsi);
2256 }
2257 return true;
2258 }
2259
2260 /* If the format is "%s", use strcpy if the result isn't used. */
2261 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2262 {
2263 tree fn;
2264 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2265
2266 if (!fn)
2267 return false;
2268
2269 /* Don't crash on sprintf (str1, "%s"). */
2270 if (!orig)
2271 return false;
2272
2273 tree orig_len = NULL_TREE;
2274 if (gimple_call_lhs (stmt))
2275 {
2276 orig_len = get_maxval_strlen (orig, 0);
2277 if (!orig_len)
2278 return false;
2279 }
2280
2281 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2282 gimple_seq stmts = NULL;
2283 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2284 gimple_seq_add_stmt_without_update (&stmts, repl);
2285 if (gimple_call_lhs (stmt))
2286 {
2287 if (!useless_type_conversion_p (integer_type_node,
2288 TREE_TYPE (orig_len)))
2289 orig_len = fold_convert (integer_type_node, orig_len);
2290 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2291 gimple_seq_add_stmt_without_update (&stmts, repl);
2292 gsi_replace_with_seq_vops (gsi, stmts);
2293 /* gsi now points at the assignment to the lhs, get a
2294 stmt iterator to the memcpy call.
2295 ??? We can't use gsi_for_stmt as that doesn't work when the
2296 CFG isn't built yet. */
2297 gimple_stmt_iterator gsi2 = *gsi;
2298 gsi_prev (&gsi2);
2299 fold_stmt (&gsi2);
2300 }
2301 else
2302 {
2303 gsi_replace_with_seq_vops (gsi, stmts);
2304 fold_stmt (gsi);
2305 }
2306 return true;
2307 }
2308 return false;
2309 }
2310
2311 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2312 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2313 attempt to simplify calls with more than 4 arguments.
2314
2315 Return NULL_TREE if no simplification was possible, otherwise return the
2316 simplified form of the call as a tree. If IGNORED is true, it means that
2317 the caller does not use the returned value of the function. */
2318
2319 static bool
2320 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2321 {
2322 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2323 tree dest = gimple_call_arg (stmt, 0);
2324 tree destsize = gimple_call_arg (stmt, 1);
2325 tree fmt = gimple_call_arg (stmt, 2);
2326 tree orig = NULL_TREE;
2327 const char *fmt_str = NULL;
2328
2329 if (gimple_call_num_args (stmt) > 4)
2330 return false;
2331
2332 if (gimple_call_num_args (stmt) == 4)
2333 orig = gimple_call_arg (stmt, 3);
2334
2335 if (!tree_fits_uhwi_p (destsize))
2336 return false;
2337 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2338
2339 /* Check whether the format is a literal string constant. */
2340 fmt_str = c_getstr (fmt);
2341 if (fmt_str == NULL)
2342 return false;
2343
2344 if (!init_target_chars ())
2345 return false;
2346
2347 /* If the format doesn't contain % args or %%, use strcpy. */
2348 if (strchr (fmt_str, target_percent) == NULL)
2349 {
2350 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2351 if (!fn)
2352 return false;
2353
2354 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2355 if (orig)
2356 return false;
2357
2358 /* We could expand this as
2359 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2360 or to
2361 memcpy (str, fmt_with_nul_at_cstm1, cst);
2362 but in the former case that might increase code size
2363 and in the latter case grow .rodata section too much.
2364 So punt for now. */
2365 size_t len = strlen (fmt_str);
2366 if (len >= destlen)
2367 return false;
2368
2369 gimple_seq stmts = NULL;
2370 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2371 gimple_seq_add_stmt_without_update (&stmts, repl);
2372 if (gimple_call_lhs (stmt))
2373 {
2374 repl = gimple_build_assign (gimple_call_lhs (stmt),
2375 build_int_cst (integer_type_node, len));
2376 gimple_seq_add_stmt_without_update (&stmts, repl);
2377 gsi_replace_with_seq_vops (gsi, stmts);
2378 /* gsi now points at the assignment to the lhs, get a
2379 stmt iterator to the memcpy call.
2380 ??? We can't use gsi_for_stmt as that doesn't work when the
2381 CFG isn't built yet. */
2382 gimple_stmt_iterator gsi2 = *gsi;
2383 gsi_prev (&gsi2);
2384 fold_stmt (&gsi2);
2385 }
2386 else
2387 {
2388 gsi_replace_with_seq_vops (gsi, stmts);
2389 fold_stmt (gsi);
2390 }
2391 return true;
2392 }
2393
2394 /* If the format is "%s", use strcpy if the result isn't used. */
2395 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2396 {
2397 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2398 if (!fn)
2399 return false;
2400
2401 /* Don't crash on snprintf (str1, cst, "%s"). */
2402 if (!orig)
2403 return false;
2404
2405 tree orig_len = get_maxval_strlen (orig, 0);
2406 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2407 return false;
2408
2409 /* We could expand this as
2410 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2411 or to
2412 memcpy (str1, str2_with_nul_at_cstm1, cst);
2413 but in the former case that might increase code size
2414 and in the latter case grow .rodata section too much.
2415 So punt for now. */
2416 if (compare_tree_int (orig_len, destlen) >= 0)
2417 return false;
2418
2419 /* Convert snprintf (str1, cst, "%s", str2) into
2420 strcpy (str1, str2) if strlen (str2) < cst. */
2421 gimple_seq stmts = NULL;
2422 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 if (gimple_call_lhs (stmt))
2425 {
2426 if (!useless_type_conversion_p (integer_type_node,
2427 TREE_TYPE (orig_len)))
2428 orig_len = fold_convert (integer_type_node, orig_len);
2429 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2430 gimple_seq_add_stmt_without_update (&stmts, repl);
2431 gsi_replace_with_seq_vops (gsi, stmts);
2432 /* gsi now points at the assignment to the lhs, get a
2433 stmt iterator to the memcpy call.
2434 ??? We can't use gsi_for_stmt as that doesn't work when the
2435 CFG isn't built yet. */
2436 gimple_stmt_iterator gsi2 = *gsi;
2437 gsi_prev (&gsi2);
2438 fold_stmt (&gsi2);
2439 }
2440 else
2441 {
2442 gsi_replace_with_seq_vops (gsi, stmts);
2443 fold_stmt (gsi);
2444 }
2445 return true;
2446 }
2447 return false;
2448 }
2449
2450 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2451 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2452 more than 3 arguments, and ARG may be null in the 2-argument case.
2453
2454 Return NULL_TREE if no simplification was possible, otherwise return the
2455 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2456 code of the function to be simplified. */
2457
2458 static bool
2459 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2460 tree fp, tree fmt, tree arg,
2461 enum built_in_function fcode)
2462 {
2463 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2464 tree fn_fputc, fn_fputs;
2465 const char *fmt_str = NULL;
2466
2467 /* If the return value is used, don't do the transformation. */
2468 if (gimple_call_lhs (stmt) != NULL_TREE)
2469 return false;
2470
2471 /* Check whether the format is a literal string constant. */
2472 fmt_str = c_getstr (fmt);
2473 if (fmt_str == NULL)
2474 return false;
2475
2476 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2477 {
2478 /* If we're using an unlocked function, assume the other
2479 unlocked functions exist explicitly. */
2480 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2481 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2482 }
2483 else
2484 {
2485 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2486 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2487 }
2488
2489 if (!init_target_chars ())
2490 return false;
2491
2492 /* If the format doesn't contain % args or %%, use strcpy. */
2493 if (strchr (fmt_str, target_percent) == NULL)
2494 {
2495 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2496 && arg)
2497 return false;
2498
2499 /* If the format specifier was "", fprintf does nothing. */
2500 if (fmt_str[0] == '\0')
2501 {
2502 replace_call_with_value (gsi, NULL_TREE);
2503 return true;
2504 }
2505
2506 /* When "string" doesn't contain %, replace all cases of
2507 fprintf (fp, string) with fputs (string, fp). The fputs
2508 builtin will take care of special cases like length == 1. */
2509 if (fn_fputs)
2510 {
2511 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2512 replace_call_with_call_and_fold (gsi, repl);
2513 return true;
2514 }
2515 }
2516
2517 /* The other optimizations can be done only on the non-va_list variants. */
2518 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2519 return false;
2520
2521 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2522 else if (strcmp (fmt_str, target_percent_s) == 0)
2523 {
2524 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2525 return false;
2526 if (fn_fputs)
2527 {
2528 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2529 replace_call_with_call_and_fold (gsi, repl);
2530 return true;
2531 }
2532 }
2533
2534 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2535 else if (strcmp (fmt_str, target_percent_c) == 0)
2536 {
2537 if (!arg
2538 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2539 return false;
2540 if (fn_fputc)
2541 {
2542 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2543 replace_call_with_call_and_fold (gsi, repl);
2544 return true;
2545 }
2546 }
2547
2548 return false;
2549 }
2550
2551 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2552 FMT and ARG are the arguments to the call; we don't fold cases with
2553 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2554
2555 Return NULL_TREE if no simplification was possible, otherwise return the
2556 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2557 code of the function to be simplified. */
2558
2559 static bool
2560 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2561 tree arg, enum built_in_function fcode)
2562 {
2563 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2564 tree fn_putchar, fn_puts, newarg;
2565 const char *fmt_str = NULL;
2566
2567 /* If the return value is used, don't do the transformation. */
2568 if (gimple_call_lhs (stmt) != NULL_TREE)
2569 return false;
2570
2571 /* Check whether the format is a literal string constant. */
2572 fmt_str = c_getstr (fmt);
2573 if (fmt_str == NULL)
2574 return false;
2575
2576 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2577 {
2578 /* If we're using an unlocked function, assume the other
2579 unlocked functions exist explicitly. */
2580 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2581 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2582 }
2583 else
2584 {
2585 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2586 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2587 }
2588
2589 if (!init_target_chars ())
2590 return false;
2591
2592 if (strcmp (fmt_str, target_percent_s) == 0
2593 || strchr (fmt_str, target_percent) == NULL)
2594 {
2595 const char *str;
2596
2597 if (strcmp (fmt_str, target_percent_s) == 0)
2598 {
2599 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2600 return false;
2601
2602 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2603 return false;
2604
2605 str = c_getstr (arg);
2606 if (str == NULL)
2607 return false;
2608 }
2609 else
2610 {
2611 /* The format specifier doesn't contain any '%' characters. */
2612 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2613 && arg)
2614 return false;
2615 str = fmt_str;
2616 }
2617
2618 /* If the string was "", printf does nothing. */
2619 if (str[0] == '\0')
2620 {
2621 replace_call_with_value (gsi, NULL_TREE);
2622 return true;
2623 }
2624
2625 /* If the string has length of 1, call putchar. */
2626 if (str[1] == '\0')
2627 {
2628 /* Given printf("c"), (where c is any one character,)
2629 convert "c"[0] to an int and pass that to the replacement
2630 function. */
2631 newarg = build_int_cst (integer_type_node, str[0]);
2632 if (fn_putchar)
2633 {
2634 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2635 replace_call_with_call_and_fold (gsi, repl);
2636 return true;
2637 }
2638 }
2639 else
2640 {
2641 /* If the string was "string\n", call puts("string"). */
2642 size_t len = strlen (str);
2643 if ((unsigned char)str[len - 1] == target_newline
2644 && (size_t) (int) len == len
2645 && (int) len > 0)
2646 {
2647 char *newstr;
2648 tree offset_node, string_cst;
2649
2650 /* Create a NUL-terminated string that's one char shorter
2651 than the original, stripping off the trailing '\n'. */
2652 newarg = build_string_literal (len, str);
2653 string_cst = string_constant (newarg, &offset_node);
2654 gcc_checking_assert (string_cst
2655 && (TREE_STRING_LENGTH (string_cst)
2656 == (int) len)
2657 && integer_zerop (offset_node)
2658 && (unsigned char)
2659 TREE_STRING_POINTER (string_cst)[len - 1]
2660 == target_newline);
2661 /* build_string_literal creates a new STRING_CST,
2662 modify it in place to avoid double copying. */
2663 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2664 newstr[len - 1] = '\0';
2665 if (fn_puts)
2666 {
2667 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2668 replace_call_with_call_and_fold (gsi, repl);
2669 return true;
2670 }
2671 }
2672 else
2673 /* We'd like to arrange to call fputs(string,stdout) here,
2674 but we need stdout and don't have a way to get it yet. */
2675 return false;
2676 }
2677 }
2678
2679 /* The other optimizations can be done only on the non-va_list variants. */
2680 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2681 return false;
2682
2683 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2684 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2685 {
2686 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2687 return false;
2688 if (fn_puts)
2689 {
2690 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2691 replace_call_with_call_and_fold (gsi, repl);
2692 return true;
2693 }
2694 }
2695
2696 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2697 else if (strcmp (fmt_str, target_percent_c) == 0)
2698 {
2699 if (!arg || ! useless_type_conversion_p (integer_type_node,
2700 TREE_TYPE (arg)))
2701 return false;
2702 if (fn_putchar)
2703 {
2704 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2705 replace_call_with_call_and_fold (gsi, repl);
2706 return true;
2707 }
2708 }
2709
2710 return false;
2711 }
2712
2713
2714
2715 /* Fold a call to __builtin_strlen with known length LEN. */
2716
2717 static bool
2718 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2719 {
2720 gimple *stmt = gsi_stmt (*gsi);
2721 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2722 if (!len)
2723 return false;
2724 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2725 replace_call_with_value (gsi, len);
2726 return true;
2727 }
2728
2729 /* Fold a call to __builtin_acc_on_device. */
2730
2731 static bool
2732 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2733 {
2734 /* Defer folding until we know which compiler we're in. */
2735 if (symtab->state != EXPANSION)
2736 return false;
2737
2738 unsigned val_host = GOMP_DEVICE_HOST;
2739 unsigned val_dev = GOMP_DEVICE_NONE;
2740
2741 #ifdef ACCEL_COMPILER
2742 val_host = GOMP_DEVICE_NOT_HOST;
2743 val_dev = ACCEL_COMPILER_acc_device;
2744 #endif
2745
2746 location_t loc = gimple_location (gsi_stmt (*gsi));
2747
2748 tree host_eq = make_ssa_name (boolean_type_node);
2749 gimple *host_ass = gimple_build_assign
2750 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2751 gimple_set_location (host_ass, loc);
2752 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2753
2754 tree dev_eq = make_ssa_name (boolean_type_node);
2755 gimple *dev_ass = gimple_build_assign
2756 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2757 gimple_set_location (dev_ass, loc);
2758 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2759
2760 tree result = make_ssa_name (boolean_type_node);
2761 gimple *result_ass = gimple_build_assign
2762 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2763 gimple_set_location (result_ass, loc);
2764 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2765
2766 replace_call_with_value (gsi, result);
2767
2768 return true;
2769 }
2770
2771 /* Fold the non-target builtin at *GSI and return whether any simplification
2772 was made. */
2773
2774 static bool
2775 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2776 {
2777 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2778 tree callee = gimple_call_fndecl (stmt);
2779
2780 /* Give up for always_inline inline builtins until they are
2781 inlined. */
2782 if (avoid_folding_inline_builtin (callee))
2783 return false;
2784
2785 unsigned n = gimple_call_num_args (stmt);
2786 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2787 switch (fcode)
2788 {
2789 case BUILT_IN_BZERO:
2790 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2791 gimple_call_arg (stmt, 1));
2792 case BUILT_IN_MEMSET:
2793 return gimple_fold_builtin_memset (gsi,
2794 gimple_call_arg (stmt, 1),
2795 gimple_call_arg (stmt, 2));
2796 case BUILT_IN_BCOPY:
2797 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2798 gimple_call_arg (stmt, 0), 3);
2799 case BUILT_IN_MEMCPY:
2800 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2801 gimple_call_arg (stmt, 1), 0);
2802 case BUILT_IN_MEMPCPY:
2803 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2804 gimple_call_arg (stmt, 1), 1);
2805 case BUILT_IN_MEMMOVE:
2806 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2807 gimple_call_arg (stmt, 1), 3);
2808 case BUILT_IN_SPRINTF_CHK:
2809 case BUILT_IN_VSPRINTF_CHK:
2810 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2811 case BUILT_IN_STRCAT_CHK:
2812 return gimple_fold_builtin_strcat_chk (gsi);
2813 case BUILT_IN_STRNCAT_CHK:
2814 return gimple_fold_builtin_strncat_chk (gsi);
2815 case BUILT_IN_STRLEN:
2816 return gimple_fold_builtin_strlen (gsi);
2817 case BUILT_IN_STRCPY:
2818 return gimple_fold_builtin_strcpy (gsi,
2819 gimple_call_arg (stmt, 0),
2820 gimple_call_arg (stmt, 1));
2821 case BUILT_IN_STRNCPY:
2822 return gimple_fold_builtin_strncpy (gsi,
2823 gimple_call_arg (stmt, 0),
2824 gimple_call_arg (stmt, 1),
2825 gimple_call_arg (stmt, 2));
2826 case BUILT_IN_STRCAT:
2827 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2828 gimple_call_arg (stmt, 1));
2829 case BUILT_IN_STRNCAT:
2830 return gimple_fold_builtin_strncat (gsi);
2831 case BUILT_IN_FPUTS:
2832 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2833 gimple_call_arg (stmt, 1), false);
2834 case BUILT_IN_FPUTS_UNLOCKED:
2835 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1), true);
2837 case BUILT_IN_MEMCPY_CHK:
2838 case BUILT_IN_MEMPCPY_CHK:
2839 case BUILT_IN_MEMMOVE_CHK:
2840 case BUILT_IN_MEMSET_CHK:
2841 return gimple_fold_builtin_memory_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 gimple_call_arg (stmt, 3),
2846 fcode);
2847 case BUILT_IN_STPCPY:
2848 return gimple_fold_builtin_stpcpy (gsi);
2849 case BUILT_IN_STRCPY_CHK:
2850 case BUILT_IN_STPCPY_CHK:
2851 return gimple_fold_builtin_stxcpy_chk (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2),
2855 fcode);
2856 case BUILT_IN_STRNCPY_CHK:
2857 case BUILT_IN_STPNCPY_CHK:
2858 return gimple_fold_builtin_stxncpy_chk (gsi,
2859 gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1),
2861 gimple_call_arg (stmt, 2),
2862 gimple_call_arg (stmt, 3),
2863 fcode);
2864 case BUILT_IN_SNPRINTF_CHK:
2865 case BUILT_IN_VSNPRINTF_CHK:
2866 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2867 case BUILT_IN_SNPRINTF:
2868 return gimple_fold_builtin_snprintf (gsi);
2869 case BUILT_IN_SPRINTF:
2870 return gimple_fold_builtin_sprintf (gsi);
2871 case BUILT_IN_FPRINTF:
2872 case BUILT_IN_FPRINTF_UNLOCKED:
2873 case BUILT_IN_VFPRINTF:
2874 if (n == 2 || n == 3)
2875 return gimple_fold_builtin_fprintf (gsi,
2876 gimple_call_arg (stmt, 0),
2877 gimple_call_arg (stmt, 1),
2878 n == 3
2879 ? gimple_call_arg (stmt, 2)
2880 : NULL_TREE,
2881 fcode);
2882 break;
2883 case BUILT_IN_FPRINTF_CHK:
2884 case BUILT_IN_VFPRINTF_CHK:
2885 if (n == 3 || n == 4)
2886 return gimple_fold_builtin_fprintf (gsi,
2887 gimple_call_arg (stmt, 0),
2888 gimple_call_arg (stmt, 2),
2889 n == 4
2890 ? gimple_call_arg (stmt, 3)
2891 : NULL_TREE,
2892 fcode);
2893 break;
2894 case BUILT_IN_PRINTF:
2895 case BUILT_IN_PRINTF_UNLOCKED:
2896 case BUILT_IN_VPRINTF:
2897 if (n == 1 || n == 2)
2898 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2899 n == 2
2900 ? gimple_call_arg (stmt, 1)
2901 : NULL_TREE, fcode);
2902 break;
2903 case BUILT_IN_PRINTF_CHK:
2904 case BUILT_IN_VPRINTF_CHK:
2905 if (n == 2 || n == 3)
2906 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2907 n == 3
2908 ? gimple_call_arg (stmt, 2)
2909 : NULL_TREE, fcode);
2910 break;
2911 case BUILT_IN_ACC_ON_DEVICE:
2912 return gimple_fold_builtin_acc_on_device (gsi,
2913 gimple_call_arg (stmt, 0));
2914 default:;
2915 }
2916
2917 /* Try the generic builtin folder. */
2918 bool ignore = (gimple_call_lhs (stmt) == NULL);
2919 tree result = fold_call_stmt (stmt, ignore);
2920 if (result)
2921 {
2922 if (ignore)
2923 STRIP_NOPS (result);
2924 else
2925 result = fold_convert (gimple_call_return_type (stmt), result);
2926 if (!update_call_from_tree (gsi, result))
2927 gimplify_and_update_call_from_tree (gsi, result);
2928 return true;
2929 }
2930
2931 return false;
2932 }
2933
2934 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2935 function calls to constants, where possible. */
2936
2937 static tree
2938 fold_internal_goacc_dim (const gimple *call)
2939 {
2940 int axis = get_oacc_ifn_dim_arg (call);
2941 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2942 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2943 tree result = NULL_TREE;
2944
2945 /* If the size is 1, or we only want the size and it is not dynamic,
2946 we know the answer. */
2947 if (size == 1 || (!is_pos && size))
2948 {
2949 tree type = TREE_TYPE (gimple_call_lhs (call));
2950 result = build_int_cst (type, size - is_pos);
2951 }
2952
2953 return result;
2954 }
2955
2956 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
2957 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
2958 &var where var is only addressable because of such calls. */
2959
2960 bool
2961 optimize_atomic_compare_exchange_p (gimple *stmt)
2962 {
2963 if (gimple_call_num_args (stmt) != 6
2964 || !flag_inline_atomics
2965 || !optimize
2966 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
2967 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
2968 || !gimple_vdef (stmt)
2969 || !gimple_vuse (stmt))
2970 return false;
2971
2972 tree fndecl = gimple_call_fndecl (stmt);
2973 switch (DECL_FUNCTION_CODE (fndecl))
2974 {
2975 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2976 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2977 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2978 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2979 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2980 break;
2981 default:
2982 return false;
2983 }
2984
2985 tree expected = gimple_call_arg (stmt, 1);
2986 if (TREE_CODE (expected) != ADDR_EXPR
2987 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
2988 return false;
2989
2990 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
2991 if (!is_gimple_reg_type (etype)
2992 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
2993 || TREE_THIS_VOLATILE (etype)
2994 || VECTOR_TYPE_P (etype)
2995 || TREE_CODE (etype) == COMPLEX_TYPE
2996 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
2997 might not preserve all the bits. See PR71716. */
2998 || SCALAR_FLOAT_TYPE_P (etype)
2999 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3000 return false;
3001
3002 tree weak = gimple_call_arg (stmt, 3);
3003 if (!integer_zerop (weak) && !integer_onep (weak))
3004 return false;
3005
3006 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3007 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3008 machine_mode mode = TYPE_MODE (itype);
3009
3010 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3011 == CODE_FOR_nothing
3012 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3013 return false;
3014
3015 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3016 return false;
3017
3018 return true;
3019 }
3020
3021 /* Fold
3022 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3023 into
3024 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3025 i = IMAGPART_EXPR <t>;
3026 r = (_Bool) i;
3027 e = REALPART_EXPR <t>; */
3028
3029 void
3030 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3031 {
3032 gimple *stmt = gsi_stmt (*gsi);
3033 tree fndecl = gimple_call_fndecl (stmt);
3034 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3035 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3036 tree ctype = build_complex_type (itype);
3037 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3038 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3039 expected);
3040 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3041 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3042 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3043 {
3044 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3045 build1 (VIEW_CONVERT_EXPR, itype,
3046 gimple_assign_lhs (g)));
3047 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3048 }
3049 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3050 + int_size_in_bytes (itype);
3051 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3052 gimple_call_arg (stmt, 0),
3053 gimple_assign_lhs (g),
3054 gimple_call_arg (stmt, 2),
3055 build_int_cst (integer_type_node, flag),
3056 gimple_call_arg (stmt, 4),
3057 gimple_call_arg (stmt, 5));
3058 tree lhs = make_ssa_name (ctype);
3059 gimple_call_set_lhs (g, lhs);
3060 gimple_set_vdef (g, gimple_vdef (stmt));
3061 gimple_set_vuse (g, gimple_vuse (stmt));
3062 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3063 if (gimple_call_lhs (stmt))
3064 {
3065 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3066 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3067 build1 (IMAGPART_EXPR, itype, lhs));
3068 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3069 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3070 gimple_assign_lhs (g));
3071 }
3072 gsi_replace (gsi, g, true);
3073 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3074 build1 (REALPART_EXPR, itype, lhs));
3075 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3076 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3077 {
3078 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3079 VIEW_CONVERT_EXPR,
3080 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3081 gimple_assign_lhs (g)));
3082 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3083 }
3084 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3085 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3086 *gsi = gsiret;
3087 }
3088
3089 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3090 doesn't fit into TYPE. The test for overflow should be regardless of
3091 -fwrapv, and even for unsigned types. */
3092
3093 bool
3094 arith_overflowed_p (enum tree_code code, const_tree type,
3095 const_tree arg0, const_tree arg1)
3096 {
3097 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3098 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3099 widest2_int_cst;
3100 widest2_int warg0 = widest2_int_cst (arg0);
3101 widest2_int warg1 = widest2_int_cst (arg1);
3102 widest2_int wres;
3103 switch (code)
3104 {
3105 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3106 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3107 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3108 default: gcc_unreachable ();
3109 }
3110 signop sign = TYPE_SIGN (type);
3111 if (sign == UNSIGNED && wi::neg_p (wres))
3112 return true;
3113 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3114 }
3115
3116 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3117 The statement may be replaced by another statement, e.g., if the call
3118 simplifies to a constant value. Return true if any changes were made.
3119 It is assumed that the operands have been previously folded. */
3120
3121 static bool
3122 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3123 {
3124 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3125 tree callee;
3126 bool changed = false;
3127 unsigned i;
3128
3129 /* Fold *& in call arguments. */
3130 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3131 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3132 {
3133 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3134 if (tmp)
3135 {
3136 gimple_call_set_arg (stmt, i, tmp);
3137 changed = true;
3138 }
3139 }
3140
3141 /* Check for virtual calls that became direct calls. */
3142 callee = gimple_call_fn (stmt);
3143 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3144 {
3145 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3146 {
3147 if (dump_file && virtual_method_call_p (callee)
3148 && !possible_polymorphic_call_target_p
3149 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3150 (OBJ_TYPE_REF_EXPR (callee)))))
3151 {
3152 fprintf (dump_file,
3153 "Type inheritance inconsistent devirtualization of ");
3154 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3155 fprintf (dump_file, " to ");
3156 print_generic_expr (dump_file, callee, TDF_SLIM);
3157 fprintf (dump_file, "\n");
3158 }
3159
3160 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3161 changed = true;
3162 }
3163 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3164 {
3165 bool final;
3166 vec <cgraph_node *>targets
3167 = possible_polymorphic_call_targets (callee, stmt, &final);
3168 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3169 {
3170 tree lhs = gimple_call_lhs (stmt);
3171 if (dump_enabled_p ())
3172 {
3173 location_t loc = gimple_location_safe (stmt);
3174 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3175 "folding virtual function call to %s\n",
3176 targets.length () == 1
3177 ? targets[0]->name ()
3178 : "__builtin_unreachable");
3179 }
3180 if (targets.length () == 1)
3181 {
3182 tree fndecl = targets[0]->decl;
3183 gimple_call_set_fndecl (stmt, fndecl);
3184 changed = true;
3185 /* If changing the call to __cxa_pure_virtual
3186 or similar noreturn function, adjust gimple_call_fntype
3187 too. */
3188 if ((gimple_call_flags (stmt) & ECF_NORETURN)
3189 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3190 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3191 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3192 == void_type_node))
3193 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3194 /* If the call becomes noreturn, remove the lhs. */
3195 if (lhs
3196 && gimple_call_noreturn_p (stmt)
3197 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3198 || should_remove_lhs_p (lhs)))
3199 {
3200 if (TREE_CODE (lhs) == SSA_NAME)
3201 {
3202 tree var = create_tmp_var (TREE_TYPE (lhs));
3203 tree def = get_or_create_ssa_default_def (cfun, var);
3204 gimple *new_stmt = gimple_build_assign (lhs, def);
3205 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3206 }
3207 gimple_call_set_lhs (stmt, NULL_TREE);
3208 }
3209 maybe_remove_unused_call_args (cfun, stmt);
3210 }
3211 else
3212 {
3213 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3214 gimple *new_stmt = gimple_build_call (fndecl, 0);
3215 gimple_set_location (new_stmt, gimple_location (stmt));
3216 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3217 {
3218 tree var = create_tmp_var (TREE_TYPE (lhs));
3219 tree def = get_or_create_ssa_default_def (cfun, var);
3220
3221 /* To satisfy condition for
3222 cgraph_update_edges_for_call_stmt_node,
3223 we need to preserve GIMPLE_CALL statement
3224 at position of GSI iterator. */
3225 update_call_from_tree (gsi, def);
3226 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3227 }
3228 else
3229 {
3230 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3231 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3232 gsi_replace (gsi, new_stmt, false);
3233 }
3234 return true;
3235 }
3236 }
3237 }
3238 }
3239
3240 /* Check for indirect calls that became direct calls, and then
3241 no longer require a static chain. */
3242 if (gimple_call_chain (stmt))
3243 {
3244 tree fn = gimple_call_fndecl (stmt);
3245 if (fn && !DECL_STATIC_CHAIN (fn))
3246 {
3247 gimple_call_set_chain (stmt, NULL);
3248 changed = true;
3249 }
3250 else
3251 {
3252 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3253 if (tmp)
3254 {
3255 gimple_call_set_chain (stmt, tmp);
3256 changed = true;
3257 }
3258 }
3259 }
3260
3261 if (inplace)
3262 return changed;
3263
3264 /* Check for builtins that CCP can handle using information not
3265 available in the generic fold routines. */
3266 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3267 {
3268 if (gimple_fold_builtin (gsi))
3269 changed = true;
3270 }
3271 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3272 {
3273 changed |= targetm.gimple_fold_builtin (gsi);
3274 }
3275 else if (gimple_call_internal_p (stmt))
3276 {
3277 enum tree_code subcode = ERROR_MARK;
3278 tree result = NULL_TREE;
3279 bool cplx_result = false;
3280 tree overflow = NULL_TREE;
3281 switch (gimple_call_internal_fn (stmt))
3282 {
3283 case IFN_BUILTIN_EXPECT:
3284 result = fold_builtin_expect (gimple_location (stmt),
3285 gimple_call_arg (stmt, 0),
3286 gimple_call_arg (stmt, 1),
3287 gimple_call_arg (stmt, 2));
3288 break;
3289 case IFN_UBSAN_OBJECT_SIZE:
3290 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3291 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3292 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3293 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3294 gimple_call_arg (stmt, 2))))
3295 {
3296 gsi_replace (gsi, gimple_build_nop (), false);
3297 unlink_stmt_vdef (stmt);
3298 release_defs (stmt);
3299 return true;
3300 }
3301 break;
3302 case IFN_GOACC_DIM_SIZE:
3303 case IFN_GOACC_DIM_POS:
3304 result = fold_internal_goacc_dim (stmt);
3305 break;
3306 case IFN_UBSAN_CHECK_ADD:
3307 subcode = PLUS_EXPR;
3308 break;
3309 case IFN_UBSAN_CHECK_SUB:
3310 subcode = MINUS_EXPR;
3311 break;
3312 case IFN_UBSAN_CHECK_MUL:
3313 subcode = MULT_EXPR;
3314 break;
3315 case IFN_ADD_OVERFLOW:
3316 subcode = PLUS_EXPR;
3317 cplx_result = true;
3318 break;
3319 case IFN_SUB_OVERFLOW:
3320 subcode = MINUS_EXPR;
3321 cplx_result = true;
3322 break;
3323 case IFN_MUL_OVERFLOW:
3324 subcode = MULT_EXPR;
3325 cplx_result = true;
3326 break;
3327 default:
3328 break;
3329 }
3330 if (subcode != ERROR_MARK)
3331 {
3332 tree arg0 = gimple_call_arg (stmt, 0);
3333 tree arg1 = gimple_call_arg (stmt, 1);
3334 tree type = TREE_TYPE (arg0);
3335 if (cplx_result)
3336 {
3337 tree lhs = gimple_call_lhs (stmt);
3338 if (lhs == NULL_TREE)
3339 type = NULL_TREE;
3340 else
3341 type = TREE_TYPE (TREE_TYPE (lhs));
3342 }
3343 if (type == NULL_TREE)
3344 ;
3345 /* x = y + 0; x = y - 0; x = y * 0; */
3346 else if (integer_zerop (arg1))
3347 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3348 /* x = 0 + y; x = 0 * y; */
3349 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3350 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3351 /* x = y - y; */
3352 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3353 result = integer_zero_node;
3354 /* x = y * 1; x = 1 * y; */
3355 else if (subcode == MULT_EXPR && integer_onep (arg1))
3356 result = arg0;
3357 else if (subcode == MULT_EXPR && integer_onep (arg0))
3358 result = arg1;
3359 else if (TREE_CODE (arg0) == INTEGER_CST
3360 && TREE_CODE (arg1) == INTEGER_CST)
3361 {
3362 if (cplx_result)
3363 result = int_const_binop (subcode, fold_convert (type, arg0),
3364 fold_convert (type, arg1));
3365 else
3366 result = int_const_binop (subcode, arg0, arg1);
3367 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3368 {
3369 if (cplx_result)
3370 overflow = build_one_cst (type);
3371 else
3372 result = NULL_TREE;
3373 }
3374 }
3375 if (result)
3376 {
3377 if (result == integer_zero_node)
3378 result = build_zero_cst (type);
3379 else if (cplx_result && TREE_TYPE (result) != type)
3380 {
3381 if (TREE_CODE (result) == INTEGER_CST)
3382 {
3383 if (arith_overflowed_p (PLUS_EXPR, type, result,
3384 integer_zero_node))
3385 overflow = build_one_cst (type);
3386 }
3387 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3388 && TYPE_UNSIGNED (type))
3389 || (TYPE_PRECISION (type)
3390 < (TYPE_PRECISION (TREE_TYPE (result))
3391 + (TYPE_UNSIGNED (TREE_TYPE (result))
3392 && !TYPE_UNSIGNED (type)))))
3393 result = NULL_TREE;
3394 if (result)
3395 result = fold_convert (type, result);
3396 }
3397 }
3398 }
3399
3400 if (result)
3401 {
3402 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3403 result = drop_tree_overflow (result);
3404 if (cplx_result)
3405 {
3406 if (overflow == NULL_TREE)
3407 overflow = build_zero_cst (TREE_TYPE (result));
3408 tree ctype = build_complex_type (TREE_TYPE (result));
3409 if (TREE_CODE (result) == INTEGER_CST
3410 && TREE_CODE (overflow) == INTEGER_CST)
3411 result = build_complex (ctype, result, overflow);
3412 else
3413 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3414 ctype, result, overflow);
3415 }
3416 if (!update_call_from_tree (gsi, result))
3417 gimplify_and_update_call_from_tree (gsi, result);
3418 changed = true;
3419 }
3420 }
3421
3422 return changed;
3423 }
3424
3425
3426 /* Return true whether NAME has a use on STMT. */
3427
3428 static bool
3429 has_use_on_stmt (tree name, gimple *stmt)
3430 {
3431 imm_use_iterator iter;
3432 use_operand_p use_p;
3433 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3434 if (USE_STMT (use_p) == stmt)
3435 return true;
3436 return false;
3437 }
3438
3439 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3440 gimple_simplify.
3441
3442 Replaces *GSI with the simplification result in RCODE and OPS
3443 and the associated statements in *SEQ. Does the replacement
3444 according to INPLACE and returns true if the operation succeeded. */
3445
3446 static bool
3447 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3448 code_helper rcode, tree *ops,
3449 gimple_seq *seq, bool inplace)
3450 {
3451 gimple *stmt = gsi_stmt (*gsi);
3452
3453 /* Play safe and do not allow abnormals to be mentioned in
3454 newly created statements. See also maybe_push_res_to_seq.
3455 As an exception allow such uses if there was a use of the
3456 same SSA name on the old stmt. */
3457 if ((TREE_CODE (ops[0]) == SSA_NAME
3458 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3459 && !has_use_on_stmt (ops[0], stmt))
3460 || (ops[1]
3461 && TREE_CODE (ops[1]) == SSA_NAME
3462 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3463 && !has_use_on_stmt (ops[1], stmt))
3464 || (ops[2]
3465 && TREE_CODE (ops[2]) == SSA_NAME
3466 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3467 && !has_use_on_stmt (ops[2], stmt))
3468 || (COMPARISON_CLASS_P (ops[0])
3469 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3470 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3471 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3472 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3473 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3474 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3475 return false;
3476
3477 /* Don't insert new statements when INPLACE is true, even if we could
3478 reuse STMT for the final statement. */
3479 if (inplace && !gimple_seq_empty_p (*seq))
3480 return false;
3481
3482 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3483 {
3484 gcc_assert (rcode.is_tree_code ());
3485 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3486 /* GIMPLE_CONDs condition may not throw. */
3487 && (!flag_exceptions
3488 || !cfun->can_throw_non_call_exceptions
3489 || !operation_could_trap_p (rcode,
3490 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3491 false, NULL_TREE)))
3492 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3493 else if (rcode == SSA_NAME)
3494 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3495 build_zero_cst (TREE_TYPE (ops[0])));
3496 else if (rcode == INTEGER_CST)
3497 {
3498 if (integer_zerop (ops[0]))
3499 gimple_cond_make_false (cond_stmt);
3500 else
3501 gimple_cond_make_true (cond_stmt);
3502 }
3503 else if (!inplace)
3504 {
3505 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3506 ops, seq);
3507 if (!res)
3508 return false;
3509 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3510 build_zero_cst (TREE_TYPE (res)));
3511 }
3512 else
3513 return false;
3514 if (dump_file && (dump_flags & TDF_DETAILS))
3515 {
3516 fprintf (dump_file, "gimple_simplified to ");
3517 if (!gimple_seq_empty_p (*seq))
3518 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3519 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3520 0, TDF_SLIM);
3521 }
3522 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3523 return true;
3524 }
3525 else if (is_gimple_assign (stmt)
3526 && rcode.is_tree_code ())
3527 {
3528 if (!inplace
3529 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3530 {
3531 maybe_build_generic_op (rcode,
3532 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3533 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3534 if (dump_file && (dump_flags & TDF_DETAILS))
3535 {
3536 fprintf (dump_file, "gimple_simplified to ");
3537 if (!gimple_seq_empty_p (*seq))
3538 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3539 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3540 0, TDF_SLIM);
3541 }
3542 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3543 return true;
3544 }
3545 }
3546 else if (rcode.is_fn_code ()
3547 && gimple_call_combined_fn (stmt) == rcode)
3548 {
3549 unsigned i;
3550 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3551 {
3552 gcc_assert (ops[i] != NULL_TREE);
3553 gimple_call_set_arg (stmt, i, ops[i]);
3554 }
3555 if (i < 3)
3556 gcc_assert (ops[i] == NULL_TREE);
3557 if (dump_file && (dump_flags & TDF_DETAILS))
3558 {
3559 fprintf (dump_file, "gimple_simplified to ");
3560 if (!gimple_seq_empty_p (*seq))
3561 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3562 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3563 }
3564 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3565 return true;
3566 }
3567 else if (!inplace)
3568 {
3569 if (gimple_has_lhs (stmt))
3570 {
3571 tree lhs = gimple_get_lhs (stmt);
3572 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3573 ops, seq, lhs))
3574 return false;
3575 if (dump_file && (dump_flags & TDF_DETAILS))
3576 {
3577 fprintf (dump_file, "gimple_simplified to ");
3578 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3579 }
3580 gsi_replace_with_seq_vops (gsi, *seq);
3581 return true;
3582 }
3583 else
3584 gcc_unreachable ();
3585 }
3586
3587 return false;
3588 }
3589
3590 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3591
3592 static bool
3593 maybe_canonicalize_mem_ref_addr (tree *t)
3594 {
3595 bool res = false;
3596
3597 if (TREE_CODE (*t) == ADDR_EXPR)
3598 t = &TREE_OPERAND (*t, 0);
3599
3600 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3601 generic vector extension. The actual vector referenced is
3602 view-converted to an array type for this purpose. If the index
3603 is constant the canonical representation in the middle-end is a
3604 BIT_FIELD_REF so re-write the former to the latter here. */
3605 if (TREE_CODE (*t) == ARRAY_REF
3606 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3607 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3608 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3609 {
3610 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3611 if (VECTOR_TYPE_P (vtype))
3612 {
3613 tree low = array_ref_low_bound (*t);
3614 if (TREE_CODE (low) == INTEGER_CST)
3615 {
3616 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3617 {
3618 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3619 wi::to_widest (low));
3620 idx = wi::mul (idx, wi::to_widest
3621 (TYPE_SIZE (TREE_TYPE (*t))));
3622 widest_int ext
3623 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3624 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3625 {
3626 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3627 TREE_TYPE (*t),
3628 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3629 TYPE_SIZE (TREE_TYPE (*t)),
3630 wide_int_to_tree (sizetype, idx));
3631 res = true;
3632 }
3633 }
3634 }
3635 }
3636 }
3637
3638 while (handled_component_p (*t))
3639 t = &TREE_OPERAND (*t, 0);
3640
3641 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3642 of invariant addresses into a SSA name MEM_REF address. */
3643 if (TREE_CODE (*t) == MEM_REF
3644 || TREE_CODE (*t) == TARGET_MEM_REF)
3645 {
3646 tree addr = TREE_OPERAND (*t, 0);
3647 if (TREE_CODE (addr) == ADDR_EXPR
3648 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3649 || handled_component_p (TREE_OPERAND (addr, 0))))
3650 {
3651 tree base;
3652 HOST_WIDE_INT coffset;
3653 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3654 &coffset);
3655 if (!base)
3656 gcc_unreachable ();
3657
3658 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3659 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3660 TREE_OPERAND (*t, 1),
3661 size_int (coffset));
3662 res = true;
3663 }
3664 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3665 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3666 }
3667
3668 /* Canonicalize back MEM_REFs to plain reference trees if the object
3669 accessed is a decl that has the same access semantics as the MEM_REF. */
3670 if (TREE_CODE (*t) == MEM_REF
3671 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3672 && integer_zerop (TREE_OPERAND (*t, 1))
3673 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3674 {
3675 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3676 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3677 if (/* Same volatile qualification. */
3678 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3679 /* Same TBAA behavior with -fstrict-aliasing. */
3680 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3681 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3682 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3683 /* Same alignment. */
3684 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3685 /* We have to look out here to not drop a required conversion
3686 from the rhs to the lhs if *t appears on the lhs or vice-versa
3687 if it appears on the rhs. Thus require strict type
3688 compatibility. */
3689 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3690 {
3691 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3692 res = true;
3693 }
3694 }
3695
3696 /* Canonicalize TARGET_MEM_REF in particular with respect to
3697 the indexes becoming constant. */
3698 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3699 {
3700 tree tem = maybe_fold_tmr (*t);
3701 if (tem)
3702 {
3703 *t = tem;
3704 res = true;
3705 }
3706 }
3707
3708 return res;
3709 }
3710
3711 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3712 distinguishes both cases. */
3713
3714 static bool
3715 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3716 {
3717 bool changed = false;
3718 gimple *stmt = gsi_stmt (*gsi);
3719 bool nowarning = gimple_no_warning_p (stmt);
3720 unsigned i;
3721 fold_defer_overflow_warnings ();
3722
3723 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3724 after propagation.
3725 ??? This shouldn't be done in generic folding but in the
3726 propagation helpers which also know whether an address was
3727 propagated.
3728 Also canonicalize operand order. */
3729 switch (gimple_code (stmt))
3730 {
3731 case GIMPLE_ASSIGN:
3732 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3733 {
3734 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3735 if ((REFERENCE_CLASS_P (*rhs)
3736 || TREE_CODE (*rhs) == ADDR_EXPR)
3737 && maybe_canonicalize_mem_ref_addr (rhs))
3738 changed = true;
3739 tree *lhs = gimple_assign_lhs_ptr (stmt);
3740 if (REFERENCE_CLASS_P (*lhs)
3741 && maybe_canonicalize_mem_ref_addr (lhs))
3742 changed = true;
3743 }
3744 else
3745 {
3746 /* Canonicalize operand order. */
3747 enum tree_code code = gimple_assign_rhs_code (stmt);
3748 if (TREE_CODE_CLASS (code) == tcc_comparison
3749 || commutative_tree_code (code)
3750 || commutative_ternary_tree_code (code))
3751 {
3752 tree rhs1 = gimple_assign_rhs1 (stmt);
3753 tree rhs2 = gimple_assign_rhs2 (stmt);
3754 if (tree_swap_operands_p (rhs1, rhs2, false))
3755 {
3756 gimple_assign_set_rhs1 (stmt, rhs2);
3757 gimple_assign_set_rhs2 (stmt, rhs1);
3758 if (TREE_CODE_CLASS (code) == tcc_comparison)
3759 gimple_assign_set_rhs_code (stmt,
3760 swap_tree_comparison (code));
3761 changed = true;
3762 }
3763 }
3764 }
3765 break;
3766 case GIMPLE_CALL:
3767 {
3768 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3769 {
3770 tree *arg = gimple_call_arg_ptr (stmt, i);
3771 if (REFERENCE_CLASS_P (*arg)
3772 && maybe_canonicalize_mem_ref_addr (arg))
3773 changed = true;
3774 }
3775 tree *lhs = gimple_call_lhs_ptr (stmt);
3776 if (*lhs
3777 && REFERENCE_CLASS_P (*lhs)
3778 && maybe_canonicalize_mem_ref_addr (lhs))
3779 changed = true;
3780 break;
3781 }
3782 case GIMPLE_ASM:
3783 {
3784 gasm *asm_stmt = as_a <gasm *> (stmt);
3785 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3786 {
3787 tree link = gimple_asm_output_op (asm_stmt, i);
3788 tree op = TREE_VALUE (link);
3789 if (REFERENCE_CLASS_P (op)
3790 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3791 changed = true;
3792 }
3793 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3794 {
3795 tree link = gimple_asm_input_op (asm_stmt, i);
3796 tree op = TREE_VALUE (link);
3797 if ((REFERENCE_CLASS_P (op)
3798 || TREE_CODE (op) == ADDR_EXPR)
3799 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3800 changed = true;
3801 }
3802 }
3803 break;
3804 case GIMPLE_DEBUG:
3805 if (gimple_debug_bind_p (stmt))
3806 {
3807 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3808 if (*val
3809 && (REFERENCE_CLASS_P (*val)
3810 || TREE_CODE (*val) == ADDR_EXPR)
3811 && maybe_canonicalize_mem_ref_addr (val))
3812 changed = true;
3813 }
3814 break;
3815 case GIMPLE_COND:
3816 {
3817 /* Canonicalize operand order. */
3818 tree lhs = gimple_cond_lhs (stmt);
3819 tree rhs = gimple_cond_rhs (stmt);
3820 if (tree_swap_operands_p (lhs, rhs, false))
3821 {
3822 gcond *gc = as_a <gcond *> (stmt);
3823 gimple_cond_set_lhs (gc, rhs);
3824 gimple_cond_set_rhs (gc, lhs);
3825 gimple_cond_set_code (gc,
3826 swap_tree_comparison (gimple_cond_code (gc)));
3827 changed = true;
3828 }
3829 }
3830 default:;
3831 }
3832
3833 /* Dispatch to pattern-based folding. */
3834 if (!inplace
3835 || is_gimple_assign (stmt)
3836 || gimple_code (stmt) == GIMPLE_COND)
3837 {
3838 gimple_seq seq = NULL;
3839 code_helper rcode;
3840 tree ops[3] = {};
3841 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3842 valueize, valueize))
3843 {
3844 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3845 changed = true;
3846 else
3847 gimple_seq_discard (seq);
3848 }
3849 }
3850
3851 stmt = gsi_stmt (*gsi);
3852
3853 /* Fold the main computation performed by the statement. */
3854 switch (gimple_code (stmt))
3855 {
3856 case GIMPLE_ASSIGN:
3857 {
3858 /* Try to canonicalize for boolean-typed X the comparisons
3859 X == 0, X == 1, X != 0, and X != 1. */
3860 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3861 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3862 {
3863 tree lhs = gimple_assign_lhs (stmt);
3864 tree op1 = gimple_assign_rhs1 (stmt);
3865 tree op2 = gimple_assign_rhs2 (stmt);
3866 tree type = TREE_TYPE (op1);
3867
3868 /* Check whether the comparison operands are of the same boolean
3869 type as the result type is.
3870 Check that second operand is an integer-constant with value
3871 one or zero. */
3872 if (TREE_CODE (op2) == INTEGER_CST
3873 && (integer_zerop (op2) || integer_onep (op2))
3874 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3875 {
3876 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3877 bool is_logical_not = false;
3878
3879 /* X == 0 and X != 1 is a logical-not.of X
3880 X == 1 and X != 0 is X */
3881 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3882 || (cmp_code == NE_EXPR && integer_onep (op2)))
3883 is_logical_not = true;
3884
3885 if (is_logical_not == false)
3886 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3887 /* Only for one-bit precision typed X the transformation
3888 !X -> ~X is valied. */
3889 else if (TYPE_PRECISION (type) == 1)
3890 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3891 /* Otherwise we use !X -> X ^ 1. */
3892 else
3893 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3894 build_int_cst (type, 1));
3895 changed = true;
3896 break;
3897 }
3898 }
3899
3900 unsigned old_num_ops = gimple_num_ops (stmt);
3901 tree lhs = gimple_assign_lhs (stmt);
3902 tree new_rhs = fold_gimple_assign (gsi);
3903 if (new_rhs
3904 && !useless_type_conversion_p (TREE_TYPE (lhs),
3905 TREE_TYPE (new_rhs)))
3906 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3907 if (new_rhs
3908 && (!inplace
3909 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3910 {
3911 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3912 changed = true;
3913 }
3914 break;
3915 }
3916
3917 case GIMPLE_CALL:
3918 changed |= gimple_fold_call (gsi, inplace);
3919 break;
3920
3921 case GIMPLE_ASM:
3922 /* Fold *& in asm operands. */
3923 {
3924 gasm *asm_stmt = as_a <gasm *> (stmt);
3925 size_t noutputs;
3926 const char **oconstraints;
3927 const char *constraint;
3928 bool allows_mem, allows_reg;
3929
3930 noutputs = gimple_asm_noutputs (asm_stmt);
3931 oconstraints = XALLOCAVEC (const char *, noutputs);
3932
3933 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3934 {
3935 tree link = gimple_asm_output_op (asm_stmt, i);
3936 tree op = TREE_VALUE (link);
3937 oconstraints[i]
3938 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3939 if (REFERENCE_CLASS_P (op)
3940 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3941 {
3942 TREE_VALUE (link) = op;
3943 changed = true;
3944 }
3945 }
3946 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3947 {
3948 tree link = gimple_asm_input_op (asm_stmt, i);
3949 tree op = TREE_VALUE (link);
3950 constraint
3951 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3952 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3953 oconstraints, &allows_mem, &allows_reg);
3954 if (REFERENCE_CLASS_P (op)
3955 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3956 != NULL_TREE)
3957 {
3958 TREE_VALUE (link) = op;
3959 changed = true;
3960 }
3961 }
3962 }
3963 break;
3964
3965 case GIMPLE_DEBUG:
3966 if (gimple_debug_bind_p (stmt))
3967 {
3968 tree val = gimple_debug_bind_get_value (stmt);
3969 if (val
3970 && REFERENCE_CLASS_P (val))
3971 {
3972 tree tem = maybe_fold_reference (val, false);
3973 if (tem)
3974 {
3975 gimple_debug_bind_set_value (stmt, tem);
3976 changed = true;
3977 }
3978 }
3979 else if (val
3980 && TREE_CODE (val) == ADDR_EXPR)
3981 {
3982 tree ref = TREE_OPERAND (val, 0);
3983 tree tem = maybe_fold_reference (ref, false);
3984 if (tem)
3985 {
3986 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3987 gimple_debug_bind_set_value (stmt, tem);
3988 changed = true;
3989 }
3990 }
3991 }
3992 break;
3993
3994 default:;
3995 }
3996
3997 stmt = gsi_stmt (*gsi);
3998
3999 /* Fold *& on the lhs. */
4000 if (gimple_has_lhs (stmt))
4001 {
4002 tree lhs = gimple_get_lhs (stmt);
4003 if (lhs && REFERENCE_CLASS_P (lhs))
4004 {
4005 tree new_lhs = maybe_fold_reference (lhs, true);
4006 if (new_lhs)
4007 {
4008 gimple_set_lhs (stmt, new_lhs);
4009 changed = true;
4010 }
4011 }
4012 }
4013
4014 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4015 return changed;
4016 }
4017
4018 /* Valueziation callback that ends up not following SSA edges. */
4019
4020 tree
4021 no_follow_ssa_edges (tree)
4022 {
4023 return NULL_TREE;
4024 }
4025
4026 /* Valueization callback that ends up following single-use SSA edges only. */
4027
4028 tree
4029 follow_single_use_edges (tree val)
4030 {
4031 if (TREE_CODE (val) == SSA_NAME
4032 && !has_single_use (val))
4033 return NULL_TREE;
4034 return val;
4035 }
4036
4037 /* Fold the statement pointed to by GSI. In some cases, this function may
4038 replace the whole statement with a new one. Returns true iff folding
4039 makes any changes.
4040 The statement pointed to by GSI should be in valid gimple form but may
4041 be in unfolded state as resulting from for example constant propagation
4042 which can produce *&x = 0. */
4043
4044 bool
4045 fold_stmt (gimple_stmt_iterator *gsi)
4046 {
4047 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4048 }
4049
4050 bool
4051 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4052 {
4053 return fold_stmt_1 (gsi, false, valueize);
4054 }
4055
4056 /* Perform the minimal folding on statement *GSI. Only operations like
4057 *&x created by constant propagation are handled. The statement cannot
4058 be replaced with a new one. Return true if the statement was
4059 changed, false otherwise.
4060 The statement *GSI should be in valid gimple form but may
4061 be in unfolded state as resulting from for example constant propagation
4062 which can produce *&x = 0. */
4063
4064 bool
4065 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4066 {
4067 gimple *stmt = gsi_stmt (*gsi);
4068 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4069 gcc_assert (gsi_stmt (*gsi) == stmt);
4070 return changed;
4071 }
4072
4073 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4074 if EXPR is null or we don't know how.
4075 If non-null, the result always has boolean type. */
4076
4077 static tree
4078 canonicalize_bool (tree expr, bool invert)
4079 {
4080 if (!expr)
4081 return NULL_TREE;
4082 else if (invert)
4083 {
4084 if (integer_nonzerop (expr))
4085 return boolean_false_node;
4086 else if (integer_zerop (expr))
4087 return boolean_true_node;
4088 else if (TREE_CODE (expr) == SSA_NAME)
4089 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4090 build_int_cst (TREE_TYPE (expr), 0));
4091 else if (COMPARISON_CLASS_P (expr))
4092 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4093 boolean_type_node,
4094 TREE_OPERAND (expr, 0),
4095 TREE_OPERAND (expr, 1));
4096 else
4097 return NULL_TREE;
4098 }
4099 else
4100 {
4101 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4102 return expr;
4103 if (integer_nonzerop (expr))
4104 return boolean_true_node;
4105 else if (integer_zerop (expr))
4106 return boolean_false_node;
4107 else if (TREE_CODE (expr) == SSA_NAME)
4108 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4109 build_int_cst (TREE_TYPE (expr), 0));
4110 else if (COMPARISON_CLASS_P (expr))
4111 return fold_build2 (TREE_CODE (expr),
4112 boolean_type_node,
4113 TREE_OPERAND (expr, 0),
4114 TREE_OPERAND (expr, 1));
4115 else
4116 return NULL_TREE;
4117 }
4118 }
4119
4120 /* Check to see if a boolean expression EXPR is logically equivalent to the
4121 comparison (OP1 CODE OP2). Check for various identities involving
4122 SSA_NAMEs. */
4123
4124 static bool
4125 same_bool_comparison_p (const_tree expr, enum tree_code code,
4126 const_tree op1, const_tree op2)
4127 {
4128 gimple *s;
4129
4130 /* The obvious case. */
4131 if (TREE_CODE (expr) == code
4132 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4133 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4134 return true;
4135
4136 /* Check for comparing (name, name != 0) and the case where expr
4137 is an SSA_NAME with a definition matching the comparison. */
4138 if (TREE_CODE (expr) == SSA_NAME
4139 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4140 {
4141 if (operand_equal_p (expr, op1, 0))
4142 return ((code == NE_EXPR && integer_zerop (op2))
4143 || (code == EQ_EXPR && integer_nonzerop (op2)));
4144 s = SSA_NAME_DEF_STMT (expr);
4145 if (is_gimple_assign (s)
4146 && gimple_assign_rhs_code (s) == code
4147 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4148 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4149 return true;
4150 }
4151
4152 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4153 of name is a comparison, recurse. */
4154 if (TREE_CODE (op1) == SSA_NAME
4155 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4156 {
4157 s = SSA_NAME_DEF_STMT (op1);
4158 if (is_gimple_assign (s)
4159 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4160 {
4161 enum tree_code c = gimple_assign_rhs_code (s);
4162 if ((c == NE_EXPR && integer_zerop (op2))
4163 || (c == EQ_EXPR && integer_nonzerop (op2)))
4164 return same_bool_comparison_p (expr, c,
4165 gimple_assign_rhs1 (s),
4166 gimple_assign_rhs2 (s));
4167 if ((c == EQ_EXPR && integer_zerop (op2))
4168 || (c == NE_EXPR && integer_nonzerop (op2)))
4169 return same_bool_comparison_p (expr,
4170 invert_tree_comparison (c, false),
4171 gimple_assign_rhs1 (s),
4172 gimple_assign_rhs2 (s));
4173 }
4174 }
4175 return false;
4176 }
4177
4178 /* Check to see if two boolean expressions OP1 and OP2 are logically
4179 equivalent. */
4180
4181 static bool
4182 same_bool_result_p (const_tree op1, const_tree op2)
4183 {
4184 /* Simple cases first. */
4185 if (operand_equal_p (op1, op2, 0))
4186 return true;
4187
4188 /* Check the cases where at least one of the operands is a comparison.
4189 These are a bit smarter than operand_equal_p in that they apply some
4190 identifies on SSA_NAMEs. */
4191 if (COMPARISON_CLASS_P (op2)
4192 && same_bool_comparison_p (op1, TREE_CODE (op2),
4193 TREE_OPERAND (op2, 0),
4194 TREE_OPERAND (op2, 1)))
4195 return true;
4196 if (COMPARISON_CLASS_P (op1)
4197 && same_bool_comparison_p (op2, TREE_CODE (op1),
4198 TREE_OPERAND (op1, 0),
4199 TREE_OPERAND (op1, 1)))
4200 return true;
4201
4202 /* Default case. */
4203 return false;
4204 }
4205
4206 /* Forward declarations for some mutually recursive functions. */
4207
4208 static tree
4209 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4210 enum tree_code code2, tree op2a, tree op2b);
4211 static tree
4212 and_var_with_comparison (tree var, bool invert,
4213 enum tree_code code2, tree op2a, tree op2b);
4214 static tree
4215 and_var_with_comparison_1 (gimple *stmt,
4216 enum tree_code code2, tree op2a, tree op2b);
4217 static tree
4218 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4219 enum tree_code code2, tree op2a, tree op2b);
4220 static tree
4221 or_var_with_comparison (tree var, bool invert,
4222 enum tree_code code2, tree op2a, tree op2b);
4223 static tree
4224 or_var_with_comparison_1 (gimple *stmt,
4225 enum tree_code code2, tree op2a, tree op2b);
4226
4227 /* Helper function for and_comparisons_1: try to simplify the AND of the
4228 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4229 If INVERT is true, invert the value of the VAR before doing the AND.
4230 Return NULL_EXPR if we can't simplify this to a single expression. */
4231
4232 static tree
4233 and_var_with_comparison (tree var, bool invert,
4234 enum tree_code code2, tree op2a, tree op2b)
4235 {
4236 tree t;
4237 gimple *stmt = SSA_NAME_DEF_STMT (var);
4238
4239 /* We can only deal with variables whose definitions are assignments. */
4240 if (!is_gimple_assign (stmt))
4241 return NULL_TREE;
4242
4243 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4244 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4245 Then we only have to consider the simpler non-inverted cases. */
4246 if (invert)
4247 t = or_var_with_comparison_1 (stmt,
4248 invert_tree_comparison (code2, false),
4249 op2a, op2b);
4250 else
4251 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4252 return canonicalize_bool (t, invert);
4253 }
4254
4255 /* Try to simplify the AND of the ssa variable defined by the assignment
4256 STMT with the comparison specified by (OP2A CODE2 OP2B).
4257 Return NULL_EXPR if we can't simplify this to a single expression. */
4258
4259 static tree
4260 and_var_with_comparison_1 (gimple *stmt,
4261 enum tree_code code2, tree op2a, tree op2b)
4262 {
4263 tree var = gimple_assign_lhs (stmt);
4264 tree true_test_var = NULL_TREE;
4265 tree false_test_var = NULL_TREE;
4266 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4267
4268 /* Check for identities like (var AND (var == 0)) => false. */
4269 if (TREE_CODE (op2a) == SSA_NAME
4270 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4271 {
4272 if ((code2 == NE_EXPR && integer_zerop (op2b))
4273 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4274 {
4275 true_test_var = op2a;
4276 if (var == true_test_var)
4277 return var;
4278 }
4279 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4280 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4281 {
4282 false_test_var = op2a;
4283 if (var == false_test_var)
4284 return boolean_false_node;
4285 }
4286 }
4287
4288 /* If the definition is a comparison, recurse on it. */
4289 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4290 {
4291 tree t = and_comparisons_1 (innercode,
4292 gimple_assign_rhs1 (stmt),
4293 gimple_assign_rhs2 (stmt),
4294 code2,
4295 op2a,
4296 op2b);
4297 if (t)
4298 return t;
4299 }
4300
4301 /* If the definition is an AND or OR expression, we may be able to
4302 simplify by reassociating. */
4303 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4304 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4305 {
4306 tree inner1 = gimple_assign_rhs1 (stmt);
4307 tree inner2 = gimple_assign_rhs2 (stmt);
4308 gimple *s;
4309 tree t;
4310 tree partial = NULL_TREE;
4311 bool is_and = (innercode == BIT_AND_EXPR);
4312
4313 /* Check for boolean identities that don't require recursive examination
4314 of inner1/inner2:
4315 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4316 inner1 AND (inner1 OR inner2) => inner1
4317 !inner1 AND (inner1 AND inner2) => false
4318 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4319 Likewise for similar cases involving inner2. */
4320 if (inner1 == true_test_var)
4321 return (is_and ? var : inner1);
4322 else if (inner2 == true_test_var)
4323 return (is_and ? var : inner2);
4324 else if (inner1 == false_test_var)
4325 return (is_and
4326 ? boolean_false_node
4327 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4328 else if (inner2 == false_test_var)
4329 return (is_and
4330 ? boolean_false_node
4331 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4332
4333 /* Next, redistribute/reassociate the AND across the inner tests.
4334 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4335 if (TREE_CODE (inner1) == SSA_NAME
4336 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4337 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4338 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4339 gimple_assign_rhs1 (s),
4340 gimple_assign_rhs2 (s),
4341 code2, op2a, op2b)))
4342 {
4343 /* Handle the AND case, where we are reassociating:
4344 (inner1 AND inner2) AND (op2a code2 op2b)
4345 => (t AND inner2)
4346 If the partial result t is a constant, we win. Otherwise
4347 continue on to try reassociating with the other inner test. */
4348 if (is_and)
4349 {
4350 if (integer_onep (t))
4351 return inner2;
4352 else if (integer_zerop (t))
4353 return boolean_false_node;
4354 }
4355
4356 /* Handle the OR case, where we are redistributing:
4357 (inner1 OR inner2) AND (op2a code2 op2b)
4358 => (t OR (inner2 AND (op2a code2 op2b))) */
4359 else if (integer_onep (t))
4360 return boolean_true_node;
4361
4362 /* Save partial result for later. */
4363 partial = t;
4364 }
4365
4366 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4367 if (TREE_CODE (inner2) == SSA_NAME
4368 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4369 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4370 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4371 gimple_assign_rhs1 (s),
4372 gimple_assign_rhs2 (s),
4373 code2, op2a, op2b)))
4374 {
4375 /* Handle the AND case, where we are reassociating:
4376 (inner1 AND inner2) AND (op2a code2 op2b)
4377 => (inner1 AND t) */
4378 if (is_and)
4379 {
4380 if (integer_onep (t))
4381 return inner1;
4382 else if (integer_zerop (t))
4383 return boolean_false_node;
4384 /* If both are the same, we can apply the identity
4385 (x AND x) == x. */
4386 else if (partial && same_bool_result_p (t, partial))
4387 return t;
4388 }
4389
4390 /* Handle the OR case. where we are redistributing:
4391 (inner1 OR inner2) AND (op2a code2 op2b)
4392 => (t OR (inner1 AND (op2a code2 op2b)))
4393 => (t OR partial) */
4394 else
4395 {
4396 if (integer_onep (t))
4397 return boolean_true_node;
4398 else if (partial)
4399 {
4400 /* We already got a simplification for the other
4401 operand to the redistributed OR expression. The
4402 interesting case is when at least one is false.
4403 Or, if both are the same, we can apply the identity
4404 (x OR x) == x. */
4405 if (integer_zerop (partial))
4406 return t;
4407 else if (integer_zerop (t))
4408 return partial;
4409 else if (same_bool_result_p (t, partial))
4410 return t;
4411 }
4412 }
4413 }
4414 }
4415 return NULL_TREE;
4416 }
4417
4418 /* Try to simplify the AND of two comparisons defined by
4419 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4420 If this can be done without constructing an intermediate value,
4421 return the resulting tree; otherwise NULL_TREE is returned.
4422 This function is deliberately asymmetric as it recurses on SSA_DEFs
4423 in the first comparison but not the second. */
4424
4425 static tree
4426 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4427 enum tree_code code2, tree op2a, tree op2b)
4428 {
4429 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4430
4431 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4432 if (operand_equal_p (op1a, op2a, 0)
4433 && operand_equal_p (op1b, op2b, 0))
4434 {
4435 /* Result will be either NULL_TREE, or a combined comparison. */
4436 tree t = combine_comparisons (UNKNOWN_LOCATION,
4437 TRUTH_ANDIF_EXPR, code1, code2,
4438 truth_type, op1a, op1b);
4439 if (t)
4440 return t;
4441 }
4442
4443 /* Likewise the swapped case of the above. */
4444 if (operand_equal_p (op1a, op2b, 0)
4445 && operand_equal_p (op1b, op2a, 0))
4446 {
4447 /* Result will be either NULL_TREE, or a combined comparison. */
4448 tree t = combine_comparisons (UNKNOWN_LOCATION,
4449 TRUTH_ANDIF_EXPR, code1,
4450 swap_tree_comparison (code2),
4451 truth_type, op1a, op1b);
4452 if (t)
4453 return t;
4454 }
4455
4456 /* If both comparisons are of the same value against constants, we might
4457 be able to merge them. */
4458 if (operand_equal_p (op1a, op2a, 0)
4459 && TREE_CODE (op1b) == INTEGER_CST
4460 && TREE_CODE (op2b) == INTEGER_CST)
4461 {
4462 int cmp = tree_int_cst_compare (op1b, op2b);
4463
4464 /* If we have (op1a == op1b), we should either be able to
4465 return that or FALSE, depending on whether the constant op1b
4466 also satisfies the other comparison against op2b. */
4467 if (code1 == EQ_EXPR)
4468 {
4469 bool done = true;
4470 bool val;
4471 switch (code2)
4472 {
4473 case EQ_EXPR: val = (cmp == 0); break;
4474 case NE_EXPR: val = (cmp != 0); break;
4475 case LT_EXPR: val = (cmp < 0); break;
4476 case GT_EXPR: val = (cmp > 0); break;
4477 case LE_EXPR: val = (cmp <= 0); break;
4478 case GE_EXPR: val = (cmp >= 0); break;
4479 default: done = false;
4480 }
4481 if (done)
4482 {
4483 if (val)
4484 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4485 else
4486 return boolean_false_node;
4487 }
4488 }
4489 /* Likewise if the second comparison is an == comparison. */
4490 else if (code2 == EQ_EXPR)
4491 {
4492 bool done = true;
4493 bool val;
4494 switch (code1)
4495 {
4496 case EQ_EXPR: val = (cmp == 0); break;
4497 case NE_EXPR: val = (cmp != 0); break;
4498 case LT_EXPR: val = (cmp > 0); break;
4499 case GT_EXPR: val = (cmp < 0); break;
4500 case LE_EXPR: val = (cmp >= 0); break;
4501 case GE_EXPR: val = (cmp <= 0); break;
4502 default: done = false;
4503 }
4504 if (done)
4505 {
4506 if (val)
4507 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4508 else
4509 return boolean_false_node;
4510 }
4511 }
4512
4513 /* Same business with inequality tests. */
4514 else if (code1 == NE_EXPR)
4515 {
4516 bool val;
4517 switch (code2)
4518 {
4519 case EQ_EXPR: val = (cmp != 0); break;
4520 case NE_EXPR: val = (cmp == 0); break;
4521 case LT_EXPR: val = (cmp >= 0); break;
4522 case GT_EXPR: val = (cmp <= 0); break;
4523 case LE_EXPR: val = (cmp > 0); break;
4524 case GE_EXPR: val = (cmp < 0); break;
4525 default:
4526 val = false;
4527 }
4528 if (val)
4529 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4530 }
4531 else if (code2 == NE_EXPR)
4532 {
4533 bool val;
4534 switch (code1)
4535 {
4536 case EQ_EXPR: val = (cmp == 0); break;
4537 case NE_EXPR: val = (cmp != 0); break;
4538 case LT_EXPR: val = (cmp <= 0); break;
4539 case GT_EXPR: val = (cmp >= 0); break;
4540 case LE_EXPR: val = (cmp < 0); break;
4541 case GE_EXPR: val = (cmp > 0); break;
4542 default:
4543 val = false;
4544 }
4545 if (val)
4546 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4547 }
4548
4549 /* Chose the more restrictive of two < or <= comparisons. */
4550 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4551 && (code2 == LT_EXPR || code2 == LE_EXPR))
4552 {
4553 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4554 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4555 else
4556 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4557 }
4558
4559 /* Likewise chose the more restrictive of two > or >= comparisons. */
4560 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4561 && (code2 == GT_EXPR || code2 == GE_EXPR))
4562 {
4563 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4564 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4565 else
4566 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4567 }
4568
4569 /* Check for singleton ranges. */
4570 else if (cmp == 0
4571 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4572 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4573 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4574
4575 /* Check for disjoint ranges. */
4576 else if (cmp <= 0
4577 && (code1 == LT_EXPR || code1 == LE_EXPR)
4578 && (code2 == GT_EXPR || code2 == GE_EXPR))
4579 return boolean_false_node;
4580 else if (cmp >= 0
4581 && (code1 == GT_EXPR || code1 == GE_EXPR)
4582 && (code2 == LT_EXPR || code2 == LE_EXPR))
4583 return boolean_false_node;
4584 }
4585
4586 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4587 NAME's definition is a truth value. See if there are any simplifications
4588 that can be done against the NAME's definition. */
4589 if (TREE_CODE (op1a) == SSA_NAME
4590 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4591 && (integer_zerop (op1b) || integer_onep (op1b)))
4592 {
4593 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4594 || (code1 == NE_EXPR && integer_onep (op1b)));
4595 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4596 switch (gimple_code (stmt))
4597 {
4598 case GIMPLE_ASSIGN:
4599 /* Try to simplify by copy-propagating the definition. */
4600 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4601
4602 case GIMPLE_PHI:
4603 /* If every argument to the PHI produces the same result when
4604 ANDed with the second comparison, we win.
4605 Do not do this unless the type is bool since we need a bool
4606 result here anyway. */
4607 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4608 {
4609 tree result = NULL_TREE;
4610 unsigned i;
4611 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4612 {
4613 tree arg = gimple_phi_arg_def (stmt, i);
4614
4615 /* If this PHI has itself as an argument, ignore it.
4616 If all the other args produce the same result,
4617 we're still OK. */
4618 if (arg == gimple_phi_result (stmt))
4619 continue;
4620 else if (TREE_CODE (arg) == INTEGER_CST)
4621 {
4622 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4623 {
4624 if (!result)
4625 result = boolean_false_node;
4626 else if (!integer_zerop (result))
4627 return NULL_TREE;
4628 }
4629 else if (!result)
4630 result = fold_build2 (code2, boolean_type_node,
4631 op2a, op2b);
4632 else if (!same_bool_comparison_p (result,
4633 code2, op2a, op2b))
4634 return NULL_TREE;
4635 }
4636 else if (TREE_CODE (arg) == SSA_NAME
4637 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4638 {
4639 tree temp;
4640 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4641 /* In simple cases we can look through PHI nodes,
4642 but we have to be careful with loops.
4643 See PR49073. */
4644 if (! dom_info_available_p (CDI_DOMINATORS)
4645 || gimple_bb (def_stmt) == gimple_bb (stmt)
4646 || dominated_by_p (CDI_DOMINATORS,
4647 gimple_bb (def_stmt),
4648 gimple_bb (stmt)))
4649 return NULL_TREE;
4650 temp = and_var_with_comparison (arg, invert, code2,
4651 op2a, op2b);
4652 if (!temp)
4653 return NULL_TREE;
4654 else if (!result)
4655 result = temp;
4656 else if (!same_bool_result_p (result, temp))
4657 return NULL_TREE;
4658 }
4659 else
4660 return NULL_TREE;
4661 }
4662 return result;
4663 }
4664
4665 default:
4666 break;
4667 }
4668 }
4669 return NULL_TREE;
4670 }
4671
4672 /* Try to simplify the AND of two comparisons, specified by
4673 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4674 If this can be simplified to a single expression (without requiring
4675 introducing more SSA variables to hold intermediate values),
4676 return the resulting tree. Otherwise return NULL_TREE.
4677 If the result expression is non-null, it has boolean type. */
4678
4679 tree
4680 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4681 enum tree_code code2, tree op2a, tree op2b)
4682 {
4683 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4684 if (t)
4685 return t;
4686 else
4687 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4688 }
4689
4690 /* Helper function for or_comparisons_1: try to simplify the OR of the
4691 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4692 If INVERT is true, invert the value of VAR before doing the OR.
4693 Return NULL_EXPR if we can't simplify this to a single expression. */
4694
4695 static tree
4696 or_var_with_comparison (tree var, bool invert,
4697 enum tree_code code2, tree op2a, tree op2b)
4698 {
4699 tree t;
4700 gimple *stmt = SSA_NAME_DEF_STMT (var);
4701
4702 /* We can only deal with variables whose definitions are assignments. */
4703 if (!is_gimple_assign (stmt))
4704 return NULL_TREE;
4705
4706 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4707 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4708 Then we only have to consider the simpler non-inverted cases. */
4709 if (invert)
4710 t = and_var_with_comparison_1 (stmt,
4711 invert_tree_comparison (code2, false),
4712 op2a, op2b);
4713 else
4714 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4715 return canonicalize_bool (t, invert);
4716 }
4717
4718 /* Try to simplify the OR of the ssa variable defined by the assignment
4719 STMT with the comparison specified by (OP2A CODE2 OP2B).
4720 Return NULL_EXPR if we can't simplify this to a single expression. */
4721
4722 static tree
4723 or_var_with_comparison_1 (gimple *stmt,
4724 enum tree_code code2, tree op2a, tree op2b)
4725 {
4726 tree var = gimple_assign_lhs (stmt);
4727 tree true_test_var = NULL_TREE;
4728 tree false_test_var = NULL_TREE;
4729 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4730
4731 /* Check for identities like (var OR (var != 0)) => true . */
4732 if (TREE_CODE (op2a) == SSA_NAME
4733 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4734 {
4735 if ((code2 == NE_EXPR && integer_zerop (op2b))
4736 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4737 {
4738 true_test_var = op2a;
4739 if (var == true_test_var)
4740 return var;
4741 }
4742 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4743 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4744 {
4745 false_test_var = op2a;
4746 if (var == false_test_var)
4747 return boolean_true_node;
4748 }
4749 }
4750
4751 /* If the definition is a comparison, recurse on it. */
4752 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4753 {
4754 tree t = or_comparisons_1 (innercode,
4755 gimple_assign_rhs1 (stmt),
4756 gimple_assign_rhs2 (stmt),
4757 code2,
4758 op2a,
4759 op2b);
4760 if (t)
4761 return t;
4762 }
4763
4764 /* If the definition is an AND or OR expression, we may be able to
4765 simplify by reassociating. */
4766 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4767 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4768 {
4769 tree inner1 = gimple_assign_rhs1 (stmt);
4770 tree inner2 = gimple_assign_rhs2 (stmt);
4771 gimple *s;
4772 tree t;
4773 tree partial = NULL_TREE;
4774 bool is_or = (innercode == BIT_IOR_EXPR);
4775
4776 /* Check for boolean identities that don't require recursive examination
4777 of inner1/inner2:
4778 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4779 inner1 OR (inner1 AND inner2) => inner1
4780 !inner1 OR (inner1 OR inner2) => true
4781 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4782 */
4783 if (inner1 == true_test_var)
4784 return (is_or ? var : inner1);
4785 else if (inner2 == true_test_var)
4786 return (is_or ? var : inner2);
4787 else if (inner1 == false_test_var)
4788 return (is_or
4789 ? boolean_true_node
4790 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4791 else if (inner2 == false_test_var)
4792 return (is_or
4793 ? boolean_true_node
4794 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4795
4796 /* Next, redistribute/reassociate the OR across the inner tests.
4797 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4798 if (TREE_CODE (inner1) == SSA_NAME
4799 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4800 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4801 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4802 gimple_assign_rhs1 (s),
4803 gimple_assign_rhs2 (s),
4804 code2, op2a, op2b)))
4805 {
4806 /* Handle the OR case, where we are reassociating:
4807 (inner1 OR inner2) OR (op2a code2 op2b)
4808 => (t OR inner2)
4809 If the partial result t is a constant, we win. Otherwise
4810 continue on to try reassociating with the other inner test. */
4811 if (is_or)
4812 {
4813 if (integer_onep (t))
4814 return boolean_true_node;
4815 else if (integer_zerop (t))
4816 return inner2;
4817 }
4818
4819 /* Handle the AND case, where we are redistributing:
4820 (inner1 AND inner2) OR (op2a code2 op2b)
4821 => (t AND (inner2 OR (op2a code op2b))) */
4822 else if (integer_zerop (t))
4823 return boolean_false_node;
4824
4825 /* Save partial result for later. */
4826 partial = t;
4827 }
4828
4829 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4830 if (TREE_CODE (inner2) == SSA_NAME
4831 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4832 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4833 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4834 gimple_assign_rhs1 (s),
4835 gimple_assign_rhs2 (s),
4836 code2, op2a, op2b)))
4837 {
4838 /* Handle the OR case, where we are reassociating:
4839 (inner1 OR inner2) OR (op2a code2 op2b)
4840 => (inner1 OR t)
4841 => (t OR partial) */
4842 if (is_or)
4843 {
4844 if (integer_zerop (t))
4845 return inner1;
4846 else if (integer_onep (t))
4847 return boolean_true_node;
4848 /* If both are the same, we can apply the identity
4849 (x OR x) == x. */
4850 else if (partial && same_bool_result_p (t, partial))
4851 return t;
4852 }
4853
4854 /* Handle the AND case, where we are redistributing:
4855 (inner1 AND inner2) OR (op2a code2 op2b)
4856 => (t AND (inner1 OR (op2a code2 op2b)))
4857 => (t AND partial) */
4858 else
4859 {
4860 if (integer_zerop (t))
4861 return boolean_false_node;
4862 else if (partial)
4863 {
4864 /* We already got a simplification for the other
4865 operand to the redistributed AND expression. The
4866 interesting case is when at least one is true.
4867 Or, if both are the same, we can apply the identity
4868 (x AND x) == x. */
4869 if (integer_onep (partial))
4870 return t;
4871 else if (integer_onep (t))
4872 return partial;
4873 else if (same_bool_result_p (t, partial))
4874 return t;
4875 }
4876 }
4877 }
4878 }
4879 return NULL_TREE;
4880 }
4881
4882 /* Try to simplify the OR of two comparisons defined by
4883 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4884 If this can be done without constructing an intermediate value,
4885 return the resulting tree; otherwise NULL_TREE is returned.
4886 This function is deliberately asymmetric as it recurses on SSA_DEFs
4887 in the first comparison but not the second. */
4888
4889 static tree
4890 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4891 enum tree_code code2, tree op2a, tree op2b)
4892 {
4893 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4894
4895 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4896 if (operand_equal_p (op1a, op2a, 0)
4897 && operand_equal_p (op1b, op2b, 0))
4898 {
4899 /* Result will be either NULL_TREE, or a combined comparison. */
4900 tree t = combine_comparisons (UNKNOWN_LOCATION,
4901 TRUTH_ORIF_EXPR, code1, code2,
4902 truth_type, op1a, op1b);
4903 if (t)
4904 return t;
4905 }
4906
4907 /* Likewise the swapped case of the above. */
4908 if (operand_equal_p (op1a, op2b, 0)
4909 && operand_equal_p (op1b, op2a, 0))
4910 {
4911 /* Result will be either NULL_TREE, or a combined comparison. */
4912 tree t = combine_comparisons (UNKNOWN_LOCATION,
4913 TRUTH_ORIF_EXPR, code1,
4914 swap_tree_comparison (code2),
4915 truth_type, op1a, op1b);
4916 if (t)
4917 return t;
4918 }
4919
4920 /* If both comparisons are of the same value against constants, we might
4921 be able to merge them. */
4922 if (operand_equal_p (op1a, op2a, 0)
4923 && TREE_CODE (op1b) == INTEGER_CST
4924 && TREE_CODE (op2b) == INTEGER_CST)
4925 {
4926 int cmp = tree_int_cst_compare (op1b, op2b);
4927
4928 /* If we have (op1a != op1b), we should either be able to
4929 return that or TRUE, depending on whether the constant op1b
4930 also satisfies the other comparison against op2b. */
4931 if (code1 == NE_EXPR)
4932 {
4933 bool done = true;
4934 bool val;
4935 switch (code2)
4936 {
4937 case EQ_EXPR: val = (cmp == 0); break;
4938 case NE_EXPR: val = (cmp != 0); break;
4939 case LT_EXPR: val = (cmp < 0); break;
4940 case GT_EXPR: val = (cmp > 0); break;
4941 case LE_EXPR: val = (cmp <= 0); break;
4942 case GE_EXPR: val = (cmp >= 0); break;
4943 default: done = false;
4944 }
4945 if (done)
4946 {
4947 if (val)
4948 return boolean_true_node;
4949 else
4950 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4951 }
4952 }
4953 /* Likewise if the second comparison is a != comparison. */
4954 else if (code2 == NE_EXPR)
4955 {
4956 bool done = true;
4957 bool val;
4958 switch (code1)
4959 {
4960 case EQ_EXPR: val = (cmp == 0); break;
4961 case NE_EXPR: val = (cmp != 0); break;
4962 case LT_EXPR: val = (cmp > 0); break;
4963 case GT_EXPR: val = (cmp < 0); break;
4964 case LE_EXPR: val = (cmp >= 0); break;
4965 case GE_EXPR: val = (cmp <= 0); break;
4966 default: done = false;
4967 }
4968 if (done)
4969 {
4970 if (val)
4971 return boolean_true_node;
4972 else
4973 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4974 }
4975 }
4976
4977 /* See if an equality test is redundant with the other comparison. */
4978 else if (code1 == EQ_EXPR)
4979 {
4980 bool val;
4981 switch (code2)
4982 {
4983 case EQ_EXPR: val = (cmp == 0); break;
4984 case NE_EXPR: val = (cmp != 0); break;
4985 case LT_EXPR: val = (cmp < 0); break;
4986 case GT_EXPR: val = (cmp > 0); break;
4987 case LE_EXPR: val = (cmp <= 0); break;
4988 case GE_EXPR: val = (cmp >= 0); break;
4989 default:
4990 val = false;
4991 }
4992 if (val)
4993 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4994 }
4995 else if (code2 == EQ_EXPR)
4996 {
4997 bool val;
4998 switch (code1)
4999 {
5000 case EQ_EXPR: val = (cmp == 0); break;
5001 case NE_EXPR: val = (cmp != 0); break;
5002 case LT_EXPR: val = (cmp > 0); break;
5003 case GT_EXPR: val = (cmp < 0); break;
5004 case LE_EXPR: val = (cmp >= 0); break;
5005 case GE_EXPR: val = (cmp <= 0); break;
5006 default:
5007 val = false;
5008 }
5009 if (val)
5010 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5011 }
5012
5013 /* Chose the less restrictive of two < or <= comparisons. */
5014 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5015 && (code2 == LT_EXPR || code2 == LE_EXPR))
5016 {
5017 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5018 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5019 else
5020 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5021 }
5022
5023 /* Likewise chose the less restrictive of two > or >= comparisons. */
5024 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5025 && (code2 == GT_EXPR || code2 == GE_EXPR))
5026 {
5027 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5028 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5029 else
5030 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5031 }
5032
5033 /* Check for singleton ranges. */
5034 else if (cmp == 0
5035 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5036 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5037 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5038
5039 /* Check for less/greater pairs that don't restrict the range at all. */
5040 else if (cmp >= 0
5041 && (code1 == LT_EXPR || code1 == LE_EXPR)
5042 && (code2 == GT_EXPR || code2 == GE_EXPR))
5043 return boolean_true_node;
5044 else if (cmp <= 0
5045 && (code1 == GT_EXPR || code1 == GE_EXPR)
5046 && (code2 == LT_EXPR || code2 == LE_EXPR))
5047 return boolean_true_node;
5048 }
5049
5050 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5051 NAME's definition is a truth value. See if there are any simplifications
5052 that can be done against the NAME's definition. */
5053 if (TREE_CODE (op1a) == SSA_NAME
5054 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5055 && (integer_zerop (op1b) || integer_onep (op1b)))
5056 {
5057 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5058 || (code1 == NE_EXPR && integer_onep (op1b)));
5059 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5060 switch (gimple_code (stmt))
5061 {
5062 case GIMPLE_ASSIGN:
5063 /* Try to simplify by copy-propagating the definition. */
5064 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5065
5066 case GIMPLE_PHI:
5067 /* If every argument to the PHI produces the same result when
5068 ORed with the second comparison, we win.
5069 Do not do this unless the type is bool since we need a bool
5070 result here anyway. */
5071 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5072 {
5073 tree result = NULL_TREE;
5074 unsigned i;
5075 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5076 {
5077 tree arg = gimple_phi_arg_def (stmt, i);
5078
5079 /* If this PHI has itself as an argument, ignore it.
5080 If all the other args produce the same result,
5081 we're still OK. */
5082 if (arg == gimple_phi_result (stmt))
5083 continue;
5084 else if (TREE_CODE (arg) == INTEGER_CST)
5085 {
5086 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5087 {
5088 if (!result)
5089 result = boolean_true_node;
5090 else if (!integer_onep (result))
5091 return NULL_TREE;
5092 }
5093 else if (!result)
5094 result = fold_build2 (code2, boolean_type_node,
5095 op2a, op2b);
5096 else if (!same_bool_comparison_p (result,
5097 code2, op2a, op2b))
5098 return NULL_TREE;
5099 }
5100 else if (TREE_CODE (arg) == SSA_NAME
5101 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5102 {
5103 tree temp;
5104 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5105 /* In simple cases we can look through PHI nodes,
5106 but we have to be careful with loops.
5107 See PR49073. */
5108 if (! dom_info_available_p (CDI_DOMINATORS)
5109 || gimple_bb (def_stmt) == gimple_bb (stmt)
5110 || dominated_by_p (CDI_DOMINATORS,
5111 gimple_bb (def_stmt),
5112 gimple_bb (stmt)))
5113 return NULL_TREE;
5114 temp = or_var_with_comparison (arg, invert, code2,
5115 op2a, op2b);
5116 if (!temp)
5117 return NULL_TREE;
5118 else if (!result)
5119 result = temp;
5120 else if (!same_bool_result_p (result, temp))
5121 return NULL_TREE;
5122 }
5123 else
5124 return NULL_TREE;
5125 }
5126 return result;
5127 }
5128
5129 default:
5130 break;
5131 }
5132 }
5133 return NULL_TREE;
5134 }
5135
5136 /* Try to simplify the OR of two comparisons, specified by
5137 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5138 If this can be simplified to a single expression (without requiring
5139 introducing more SSA variables to hold intermediate values),
5140 return the resulting tree. Otherwise return NULL_TREE.
5141 If the result expression is non-null, it has boolean type. */
5142
5143 tree
5144 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5145 enum tree_code code2, tree op2a, tree op2b)
5146 {
5147 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5148 if (t)
5149 return t;
5150 else
5151 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5152 }
5153
5154
5155 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5156
5157 Either NULL_TREE, a simplified but non-constant or a constant
5158 is returned.
5159
5160 ??? This should go into a gimple-fold-inline.h file to be eventually
5161 privatized with the single valueize function used in the various TUs
5162 to avoid the indirect function call overhead. */
5163
5164 tree
5165 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5166 tree (*gvalueize) (tree))
5167 {
5168 code_helper rcode;
5169 tree ops[3] = {};
5170 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5171 edges if there are intermediate VARYING defs. For this reason
5172 do not follow SSA edges here even though SCCVN can technically
5173 just deal fine with that. */
5174 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5175 {
5176 tree res = NULL_TREE;
5177 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5178 res = ops[0];
5179 else if (mprts_hook)
5180 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5181 if (res)
5182 {
5183 if (dump_file && dump_flags & TDF_DETAILS)
5184 {
5185 fprintf (dump_file, "Match-and-simplified ");
5186 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5187 fprintf (dump_file, " to ");
5188 print_generic_expr (dump_file, res, 0);
5189 fprintf (dump_file, "\n");
5190 }
5191 return res;
5192 }
5193 }
5194
5195 location_t loc = gimple_location (stmt);
5196 switch (gimple_code (stmt))
5197 {
5198 case GIMPLE_ASSIGN:
5199 {
5200 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5201
5202 switch (get_gimple_rhs_class (subcode))
5203 {
5204 case GIMPLE_SINGLE_RHS:
5205 {
5206 tree rhs = gimple_assign_rhs1 (stmt);
5207 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5208
5209 if (TREE_CODE (rhs) == SSA_NAME)
5210 {
5211 /* If the RHS is an SSA_NAME, return its known constant value,
5212 if any. */
5213 return (*valueize) (rhs);
5214 }
5215 /* Handle propagating invariant addresses into address
5216 operations. */
5217 else if (TREE_CODE (rhs) == ADDR_EXPR
5218 && !is_gimple_min_invariant (rhs))
5219 {
5220 HOST_WIDE_INT offset = 0;
5221 tree base;
5222 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5223 &offset,
5224 valueize);
5225 if (base
5226 && (CONSTANT_CLASS_P (base)
5227 || decl_address_invariant_p (base)))
5228 return build_invariant_address (TREE_TYPE (rhs),
5229 base, offset);
5230 }
5231 else if (TREE_CODE (rhs) == CONSTRUCTOR
5232 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5233 && (CONSTRUCTOR_NELTS (rhs)
5234 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5235 {
5236 unsigned i;
5237 tree val, *vec;
5238
5239 vec = XALLOCAVEC (tree,
5240 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5241 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5242 {
5243 val = (*valueize) (val);
5244 if (TREE_CODE (val) == INTEGER_CST
5245 || TREE_CODE (val) == REAL_CST
5246 || TREE_CODE (val) == FIXED_CST)
5247 vec[i] = val;
5248 else
5249 return NULL_TREE;
5250 }
5251
5252 return build_vector (TREE_TYPE (rhs), vec);
5253 }
5254 if (subcode == OBJ_TYPE_REF)
5255 {
5256 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5257 /* If callee is constant, we can fold away the wrapper. */
5258 if (is_gimple_min_invariant (val))
5259 return val;
5260 }
5261
5262 if (kind == tcc_reference)
5263 {
5264 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5265 || TREE_CODE (rhs) == REALPART_EXPR
5266 || TREE_CODE (rhs) == IMAGPART_EXPR)
5267 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5268 {
5269 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5270 return fold_unary_loc (EXPR_LOCATION (rhs),
5271 TREE_CODE (rhs),
5272 TREE_TYPE (rhs), val);
5273 }
5274 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5275 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5276 {
5277 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5278 return fold_ternary_loc (EXPR_LOCATION (rhs),
5279 TREE_CODE (rhs),
5280 TREE_TYPE (rhs), val,
5281 TREE_OPERAND (rhs, 1),
5282 TREE_OPERAND (rhs, 2));
5283 }
5284 else if (TREE_CODE (rhs) == MEM_REF
5285 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5286 {
5287 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5288 if (TREE_CODE (val) == ADDR_EXPR
5289 && is_gimple_min_invariant (val))
5290 {
5291 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5292 unshare_expr (val),
5293 TREE_OPERAND (rhs, 1));
5294 if (tem)
5295 rhs = tem;
5296 }
5297 }
5298 return fold_const_aggregate_ref_1 (rhs, valueize);
5299 }
5300 else if (kind == tcc_declaration)
5301 return get_symbol_constant_value (rhs);
5302 return rhs;
5303 }
5304
5305 case GIMPLE_UNARY_RHS:
5306 return NULL_TREE;
5307
5308 case GIMPLE_BINARY_RHS:
5309 /* Translate &x + CST into an invariant form suitable for
5310 further propagation. */
5311 if (subcode == POINTER_PLUS_EXPR)
5312 {
5313 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5314 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5315 if (TREE_CODE (op0) == ADDR_EXPR
5316 && TREE_CODE (op1) == INTEGER_CST)
5317 {
5318 tree off = fold_convert (ptr_type_node, op1);
5319 return build_fold_addr_expr_loc
5320 (loc,
5321 fold_build2 (MEM_REF,
5322 TREE_TYPE (TREE_TYPE (op0)),
5323 unshare_expr (op0), off));
5324 }
5325 }
5326 /* Canonicalize bool != 0 and bool == 0 appearing after
5327 valueization. While gimple_simplify handles this
5328 it can get confused by the ~X == 1 -> X == 0 transform
5329 which we cant reduce to a SSA name or a constant
5330 (and we have no way to tell gimple_simplify to not
5331 consider those transforms in the first place). */
5332 else if (subcode == EQ_EXPR
5333 || subcode == NE_EXPR)
5334 {
5335 tree lhs = gimple_assign_lhs (stmt);
5336 tree op0 = gimple_assign_rhs1 (stmt);
5337 if (useless_type_conversion_p (TREE_TYPE (lhs),
5338 TREE_TYPE (op0)))
5339 {
5340 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5341 op0 = (*valueize) (op0);
5342 if (TREE_CODE (op0) == INTEGER_CST)
5343 std::swap (op0, op1);
5344 if (TREE_CODE (op1) == INTEGER_CST
5345 && ((subcode == NE_EXPR && integer_zerop (op1))
5346 || (subcode == EQ_EXPR && integer_onep (op1))))
5347 return op0;
5348 }
5349 }
5350 return NULL_TREE;
5351
5352 case GIMPLE_TERNARY_RHS:
5353 {
5354 /* Handle ternary operators that can appear in GIMPLE form. */
5355 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5356 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5357 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5358 return fold_ternary_loc (loc, subcode,
5359 gimple_expr_type (stmt), op0, op1, op2);
5360 }
5361
5362 default:
5363 gcc_unreachable ();
5364 }
5365 }
5366
5367 case GIMPLE_CALL:
5368 {
5369 tree fn;
5370 gcall *call_stmt = as_a <gcall *> (stmt);
5371
5372 if (gimple_call_internal_p (stmt))
5373 {
5374 enum tree_code subcode = ERROR_MARK;
5375 switch (gimple_call_internal_fn (stmt))
5376 {
5377 case IFN_UBSAN_CHECK_ADD:
5378 subcode = PLUS_EXPR;
5379 break;
5380 case IFN_UBSAN_CHECK_SUB:
5381 subcode = MINUS_EXPR;
5382 break;
5383 case IFN_UBSAN_CHECK_MUL:
5384 subcode = MULT_EXPR;
5385 break;
5386 case IFN_BUILTIN_EXPECT:
5387 {
5388 tree arg0 = gimple_call_arg (stmt, 0);
5389 tree op0 = (*valueize) (arg0);
5390 if (TREE_CODE (op0) == INTEGER_CST)
5391 return op0;
5392 return NULL_TREE;
5393 }
5394 default:
5395 return NULL_TREE;
5396 }
5397 tree arg0 = gimple_call_arg (stmt, 0);
5398 tree arg1 = gimple_call_arg (stmt, 1);
5399 tree op0 = (*valueize) (arg0);
5400 tree op1 = (*valueize) (arg1);
5401
5402 if (TREE_CODE (op0) != INTEGER_CST
5403 || TREE_CODE (op1) != INTEGER_CST)
5404 {
5405 switch (subcode)
5406 {
5407 case MULT_EXPR:
5408 /* x * 0 = 0 * x = 0 without overflow. */
5409 if (integer_zerop (op0) || integer_zerop (op1))
5410 return build_zero_cst (TREE_TYPE (arg0));
5411 break;
5412 case MINUS_EXPR:
5413 /* y - y = 0 without overflow. */
5414 if (operand_equal_p (op0, op1, 0))
5415 return build_zero_cst (TREE_TYPE (arg0));
5416 break;
5417 default:
5418 break;
5419 }
5420 }
5421 tree res
5422 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5423 if (res
5424 && TREE_CODE (res) == INTEGER_CST
5425 && !TREE_OVERFLOW (res))
5426 return res;
5427 return NULL_TREE;
5428 }
5429
5430 fn = (*valueize) (gimple_call_fn (stmt));
5431 if (TREE_CODE (fn) == ADDR_EXPR
5432 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5433 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5434 && gimple_builtin_call_types_compatible_p (stmt,
5435 TREE_OPERAND (fn, 0)))
5436 {
5437 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5438 tree retval;
5439 unsigned i;
5440 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5441 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5442 retval = fold_builtin_call_array (loc,
5443 gimple_call_return_type (call_stmt),
5444 fn, gimple_call_num_args (stmt), args);
5445 if (retval)
5446 {
5447 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5448 STRIP_NOPS (retval);
5449 retval = fold_convert (gimple_call_return_type (call_stmt),
5450 retval);
5451 }
5452 return retval;
5453 }
5454 return NULL_TREE;
5455 }
5456
5457 default:
5458 return NULL_TREE;
5459 }
5460 }
5461
5462 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5463 Returns NULL_TREE if folding to a constant is not possible, otherwise
5464 returns a constant according to is_gimple_min_invariant. */
5465
5466 tree
5467 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5468 {
5469 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5470 if (res && is_gimple_min_invariant (res))
5471 return res;
5472 return NULL_TREE;
5473 }
5474
5475
5476 /* The following set of functions are supposed to fold references using
5477 their constant initializers. */
5478
5479 /* See if we can find constructor defining value of BASE.
5480 When we know the consructor with constant offset (such as
5481 base is array[40] and we do know constructor of array), then
5482 BIT_OFFSET is adjusted accordingly.
5483
5484 As a special case, return error_mark_node when constructor
5485 is not explicitly available, but it is known to be zero
5486 such as 'static const int a;'. */
5487 static tree
5488 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5489 tree (*valueize)(tree))
5490 {
5491 HOST_WIDE_INT bit_offset2, size, max_size;
5492 bool reverse;
5493
5494 if (TREE_CODE (base) == MEM_REF)
5495 {
5496 if (!integer_zerop (TREE_OPERAND (base, 1)))
5497 {
5498 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5499 return NULL_TREE;
5500 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5501 * BITS_PER_UNIT);
5502 }
5503
5504 if (valueize
5505 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5506 base = valueize (TREE_OPERAND (base, 0));
5507 if (!base || TREE_CODE (base) != ADDR_EXPR)
5508 return NULL_TREE;
5509 base = TREE_OPERAND (base, 0);
5510 }
5511
5512 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5513 DECL_INITIAL. If BASE is a nested reference into another
5514 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5515 the inner reference. */
5516 switch (TREE_CODE (base))
5517 {
5518 case VAR_DECL:
5519 case CONST_DECL:
5520 {
5521 tree init = ctor_for_folding (base);
5522
5523 /* Our semantic is exact opposite of ctor_for_folding;
5524 NULL means unknown, while error_mark_node is 0. */
5525 if (init == error_mark_node)
5526 return NULL_TREE;
5527 if (!init)
5528 return error_mark_node;
5529 return init;
5530 }
5531
5532 case ARRAY_REF:
5533 case COMPONENT_REF:
5534 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5535 &reverse);
5536 if (max_size == -1 || size != max_size)
5537 return NULL_TREE;
5538 *bit_offset += bit_offset2;
5539 return get_base_constructor (base, bit_offset, valueize);
5540
5541 case STRING_CST:
5542 case CONSTRUCTOR:
5543 return base;
5544
5545 default:
5546 return NULL_TREE;
5547 }
5548 }
5549
5550 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5551 SIZE to the memory at bit OFFSET. */
5552
5553 static tree
5554 fold_array_ctor_reference (tree type, tree ctor,
5555 unsigned HOST_WIDE_INT offset,
5556 unsigned HOST_WIDE_INT size,
5557 tree from_decl)
5558 {
5559 offset_int low_bound;
5560 offset_int elt_size;
5561 offset_int access_index;
5562 tree domain_type = NULL_TREE;
5563 HOST_WIDE_INT inner_offset;
5564
5565 /* Compute low bound and elt size. */
5566 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5567 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5568 if (domain_type && TYPE_MIN_VALUE (domain_type))
5569 {
5570 /* Static constructors for variably sized objects makes no sense. */
5571 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5572 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5573 }
5574 else
5575 low_bound = 0;
5576 /* Static constructors for variably sized objects makes no sense. */
5577 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5578 == INTEGER_CST);
5579 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5580
5581 /* We can handle only constantly sized accesses that are known to not
5582 be larger than size of array element. */
5583 if (!TYPE_SIZE_UNIT (type)
5584 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5585 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5586 || elt_size == 0)
5587 return NULL_TREE;
5588
5589 /* Compute the array index we look for. */
5590 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5591 elt_size);
5592 access_index += low_bound;
5593
5594 /* And offset within the access. */
5595 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5596
5597 /* See if the array field is large enough to span whole access. We do not
5598 care to fold accesses spanning multiple array indexes. */
5599 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5600 return NULL_TREE;
5601 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5602 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5603
5604 /* When memory is not explicitely mentioned in constructor,
5605 it is 0 (or out of range). */
5606 return build_zero_cst (type);
5607 }
5608
5609 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5610 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5611
5612 static tree
5613 fold_nonarray_ctor_reference (tree type, tree ctor,
5614 unsigned HOST_WIDE_INT offset,
5615 unsigned HOST_WIDE_INT size,
5616 tree from_decl)
5617 {
5618 unsigned HOST_WIDE_INT cnt;
5619 tree cfield, cval;
5620
5621 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5622 cval)
5623 {
5624 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5625 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5626 tree field_size = DECL_SIZE (cfield);
5627 offset_int bitoffset;
5628 offset_int bitoffset_end, access_end;
5629
5630 /* Variable sized objects in static constructors makes no sense,
5631 but field_size can be NULL for flexible array members. */
5632 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5633 && TREE_CODE (byte_offset) == INTEGER_CST
5634 && (field_size != NULL_TREE
5635 ? TREE_CODE (field_size) == INTEGER_CST
5636 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5637
5638 /* Compute bit offset of the field. */
5639 bitoffset = (wi::to_offset (field_offset)
5640 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5641 /* Compute bit offset where the field ends. */
5642 if (field_size != NULL_TREE)
5643 bitoffset_end = bitoffset + wi::to_offset (field_size);
5644 else
5645 bitoffset_end = 0;
5646
5647 access_end = offset_int (offset) + size;
5648
5649 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5650 [BITOFFSET, BITOFFSET_END)? */
5651 if (wi::cmps (access_end, bitoffset) > 0
5652 && (field_size == NULL_TREE
5653 || wi::lts_p (offset, bitoffset_end)))
5654 {
5655 offset_int inner_offset = offset_int (offset) - bitoffset;
5656 /* We do have overlap. Now see if field is large enough to
5657 cover the access. Give up for accesses spanning multiple
5658 fields. */
5659 if (wi::cmps (access_end, bitoffset_end) > 0)
5660 return NULL_TREE;
5661 if (offset < bitoffset)
5662 return NULL_TREE;
5663 return fold_ctor_reference (type, cval,
5664 inner_offset.to_uhwi (), size,
5665 from_decl);
5666 }
5667 }
5668 /* When memory is not explicitely mentioned in constructor, it is 0. */
5669 return build_zero_cst (type);
5670 }
5671
5672 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5673 to the memory at bit OFFSET. */
5674
5675 tree
5676 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5677 unsigned HOST_WIDE_INT size, tree from_decl)
5678 {
5679 tree ret;
5680
5681 /* We found the field with exact match. */
5682 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5683 && !offset)
5684 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5685
5686 /* We are at the end of walk, see if we can view convert the
5687 result. */
5688 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5689 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5690 && !compare_tree_int (TYPE_SIZE (type), size)
5691 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5692 {
5693 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5694 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5695 if (ret)
5696 STRIP_USELESS_TYPE_CONVERSION (ret);
5697 return ret;
5698 }
5699 /* For constants and byte-aligned/sized reads try to go through
5700 native_encode/interpret. */
5701 if (CONSTANT_CLASS_P (ctor)
5702 && BITS_PER_UNIT == 8
5703 && offset % BITS_PER_UNIT == 0
5704 && size % BITS_PER_UNIT == 0
5705 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5706 {
5707 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5708 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5709 offset / BITS_PER_UNIT);
5710 if (len > 0)
5711 return native_interpret_expr (type, buf, len);
5712 }
5713 if (TREE_CODE (ctor) == CONSTRUCTOR)
5714 {
5715
5716 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5717 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5718 return fold_array_ctor_reference (type, ctor, offset, size,
5719 from_decl);
5720 else
5721 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5722 from_decl);
5723 }
5724
5725 return NULL_TREE;
5726 }
5727
5728 /* Return the tree representing the element referenced by T if T is an
5729 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5730 names using VALUEIZE. Return NULL_TREE otherwise. */
5731
5732 tree
5733 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5734 {
5735 tree ctor, idx, base;
5736 HOST_WIDE_INT offset, size, max_size;
5737 tree tem;
5738 bool reverse;
5739
5740 if (TREE_THIS_VOLATILE (t))
5741 return NULL_TREE;
5742
5743 if (DECL_P (t))
5744 return get_symbol_constant_value (t);
5745
5746 tem = fold_read_from_constant_string (t);
5747 if (tem)
5748 return tem;
5749
5750 switch (TREE_CODE (t))
5751 {
5752 case ARRAY_REF:
5753 case ARRAY_RANGE_REF:
5754 /* Constant indexes are handled well by get_base_constructor.
5755 Only special case variable offsets.
5756 FIXME: This code can't handle nested references with variable indexes
5757 (they will be handled only by iteration of ccp). Perhaps we can bring
5758 get_ref_base_and_extent here and make it use a valueize callback. */
5759 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5760 && valueize
5761 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5762 && TREE_CODE (idx) == INTEGER_CST)
5763 {
5764 tree low_bound, unit_size;
5765
5766 /* If the resulting bit-offset is constant, track it. */
5767 if ((low_bound = array_ref_low_bound (t),
5768 TREE_CODE (low_bound) == INTEGER_CST)
5769 && (unit_size = array_ref_element_size (t),
5770 tree_fits_uhwi_p (unit_size)))
5771 {
5772 offset_int woffset
5773 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5774 TYPE_PRECISION (TREE_TYPE (idx)));
5775
5776 if (wi::fits_shwi_p (woffset))
5777 {
5778 offset = woffset.to_shwi ();
5779 /* TODO: This code seems wrong, multiply then check
5780 to see if it fits. */
5781 offset *= tree_to_uhwi (unit_size);
5782 offset *= BITS_PER_UNIT;
5783
5784 base = TREE_OPERAND (t, 0);
5785 ctor = get_base_constructor (base, &offset, valueize);
5786 /* Empty constructor. Always fold to 0. */
5787 if (ctor == error_mark_node)
5788 return build_zero_cst (TREE_TYPE (t));
5789 /* Out of bound array access. Value is undefined,
5790 but don't fold. */
5791 if (offset < 0)
5792 return NULL_TREE;
5793 /* We can not determine ctor. */
5794 if (!ctor)
5795 return NULL_TREE;
5796 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5797 tree_to_uhwi (unit_size)
5798 * BITS_PER_UNIT,
5799 base);
5800 }
5801 }
5802 }
5803 /* Fallthru. */
5804
5805 case COMPONENT_REF:
5806 case BIT_FIELD_REF:
5807 case TARGET_MEM_REF:
5808 case MEM_REF:
5809 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5810 ctor = get_base_constructor (base, &offset, valueize);
5811
5812 /* Empty constructor. Always fold to 0. */
5813 if (ctor == error_mark_node)
5814 return build_zero_cst (TREE_TYPE (t));
5815 /* We do not know precise address. */
5816 if (max_size == -1 || max_size != size)
5817 return NULL_TREE;
5818 /* We can not determine ctor. */
5819 if (!ctor)
5820 return NULL_TREE;
5821
5822 /* Out of bound array access. Value is undefined, but don't fold. */
5823 if (offset < 0)
5824 return NULL_TREE;
5825
5826 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5827 base);
5828
5829 case REALPART_EXPR:
5830 case IMAGPART_EXPR:
5831 {
5832 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5833 if (c && TREE_CODE (c) == COMPLEX_CST)
5834 return fold_build1_loc (EXPR_LOCATION (t),
5835 TREE_CODE (t), TREE_TYPE (t), c);
5836 break;
5837 }
5838
5839 default:
5840 break;
5841 }
5842
5843 return NULL_TREE;
5844 }
5845
5846 tree
5847 fold_const_aggregate_ref (tree t)
5848 {
5849 return fold_const_aggregate_ref_1 (t, NULL);
5850 }
5851
5852 /* Lookup virtual method with index TOKEN in a virtual table V
5853 at OFFSET.
5854 Set CAN_REFER if non-NULL to false if method
5855 is not referable or if the virtual table is ill-formed (such as rewriten
5856 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5857
5858 tree
5859 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5860 tree v,
5861 unsigned HOST_WIDE_INT offset,
5862 bool *can_refer)
5863 {
5864 tree vtable = v, init, fn;
5865 unsigned HOST_WIDE_INT size;
5866 unsigned HOST_WIDE_INT elt_size, access_index;
5867 tree domain_type;
5868
5869 if (can_refer)
5870 *can_refer = true;
5871
5872 /* First of all double check we have virtual table. */
5873 if (TREE_CODE (v) != VAR_DECL
5874 || !DECL_VIRTUAL_P (v))
5875 {
5876 /* Pass down that we lost track of the target. */
5877 if (can_refer)
5878 *can_refer = false;
5879 return NULL_TREE;
5880 }
5881
5882 init = ctor_for_folding (v);
5883
5884 /* The virtual tables should always be born with constructors
5885 and we always should assume that they are avaialble for
5886 folding. At the moment we do not stream them in all cases,
5887 but it should never happen that ctor seem unreachable. */
5888 gcc_assert (init);
5889 if (init == error_mark_node)
5890 {
5891 gcc_assert (in_lto_p);
5892 /* Pass down that we lost track of the target. */
5893 if (can_refer)
5894 *can_refer = false;
5895 return NULL_TREE;
5896 }
5897 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5898 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5899 offset *= BITS_PER_UNIT;
5900 offset += token * size;
5901
5902 /* Lookup the value in the constructor that is assumed to be array.
5903 This is equivalent to
5904 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5905 offset, size, NULL);
5906 but in a constant time. We expect that frontend produced a simple
5907 array without indexed initializers. */
5908
5909 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5910 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5911 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5912 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5913
5914 access_index = offset / BITS_PER_UNIT / elt_size;
5915 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5916
5917 /* This code makes an assumption that there are no
5918 indexed fileds produced by C++ FE, so we can directly index the array. */
5919 if (access_index < CONSTRUCTOR_NELTS (init))
5920 {
5921 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5922 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5923 STRIP_NOPS (fn);
5924 }
5925 else
5926 fn = NULL;
5927
5928 /* For type inconsistent program we may end up looking up virtual method
5929 in virtual table that does not contain TOKEN entries. We may overrun
5930 the virtual table and pick up a constant or RTTI info pointer.
5931 In any case the call is undefined. */
5932 if (!fn
5933 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5934 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5935 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5936 else
5937 {
5938 fn = TREE_OPERAND (fn, 0);
5939
5940 /* When cgraph node is missing and function is not public, we cannot
5941 devirtualize. This can happen in WHOPR when the actual method
5942 ends up in other partition, because we found devirtualization
5943 possibility too late. */
5944 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5945 {
5946 if (can_refer)
5947 {
5948 *can_refer = false;
5949 return fn;
5950 }
5951 return NULL_TREE;
5952 }
5953 }
5954
5955 /* Make sure we create a cgraph node for functions we'll reference.
5956 They can be non-existent if the reference comes from an entry
5957 of an external vtable for example. */
5958 cgraph_node::get_create (fn);
5959
5960 return fn;
5961 }
5962
5963 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5964 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5965 KNOWN_BINFO carries the binfo describing the true type of
5966 OBJ_TYPE_REF_OBJECT(REF).
5967 Set CAN_REFER if non-NULL to false if method
5968 is not referable or if the virtual table is ill-formed (such as rewriten
5969 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5970
5971 tree
5972 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5973 bool *can_refer)
5974 {
5975 unsigned HOST_WIDE_INT offset;
5976 tree v;
5977
5978 v = BINFO_VTABLE (known_binfo);
5979 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5980 if (!v)
5981 return NULL_TREE;
5982
5983 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5984 {
5985 if (can_refer)
5986 *can_refer = false;
5987 return NULL_TREE;
5988 }
5989 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5990 }
5991
5992 /* Given a pointer value OP0, return a simplified version of an
5993 indirection through OP0, or NULL_TREE if no simplification is
5994 possible. Note that the resulting type may be different from
5995 the type pointed to in the sense that it is still compatible
5996 from the langhooks point of view. */
5997
5998 tree
5999 gimple_fold_indirect_ref (tree t)
6000 {
6001 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6002 tree sub = t;
6003 tree subtype;
6004
6005 STRIP_NOPS (sub);
6006 subtype = TREE_TYPE (sub);
6007 if (!POINTER_TYPE_P (subtype))
6008 return NULL_TREE;
6009
6010 if (TREE_CODE (sub) == ADDR_EXPR)
6011 {
6012 tree op = TREE_OPERAND (sub, 0);
6013 tree optype = TREE_TYPE (op);
6014 /* *&p => p */
6015 if (useless_type_conversion_p (type, optype))
6016 return op;
6017
6018 /* *(foo *)&fooarray => fooarray[0] */
6019 if (TREE_CODE (optype) == ARRAY_TYPE
6020 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6021 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6022 {
6023 tree type_domain = TYPE_DOMAIN (optype);
6024 tree min_val = size_zero_node;
6025 if (type_domain && TYPE_MIN_VALUE (type_domain))
6026 min_val = TYPE_MIN_VALUE (type_domain);
6027 if (TREE_CODE (min_val) == INTEGER_CST)
6028 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6029 }
6030 /* *(foo *)&complexfoo => __real__ complexfoo */
6031 else if (TREE_CODE (optype) == COMPLEX_TYPE
6032 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6033 return fold_build1 (REALPART_EXPR, type, op);
6034 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6035 else if (TREE_CODE (optype) == VECTOR_TYPE
6036 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6037 {
6038 tree part_width = TYPE_SIZE (type);
6039 tree index = bitsize_int (0);
6040 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6041 }
6042 }
6043
6044 /* *(p + CST) -> ... */
6045 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6046 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6047 {
6048 tree addr = TREE_OPERAND (sub, 0);
6049 tree off = TREE_OPERAND (sub, 1);
6050 tree addrtype;
6051
6052 STRIP_NOPS (addr);
6053 addrtype = TREE_TYPE (addr);
6054
6055 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6056 if (TREE_CODE (addr) == ADDR_EXPR
6057 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6058 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6059 && tree_fits_uhwi_p (off))
6060 {
6061 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6062 tree part_width = TYPE_SIZE (type);
6063 unsigned HOST_WIDE_INT part_widthi
6064 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6065 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6066 tree index = bitsize_int (indexi);
6067 if (offset / part_widthi
6068 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6069 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6070 part_width, index);
6071 }
6072
6073 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6074 if (TREE_CODE (addr) == ADDR_EXPR
6075 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6076 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6077 {
6078 tree size = TYPE_SIZE_UNIT (type);
6079 if (tree_int_cst_equal (size, off))
6080 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6081 }
6082
6083 /* *(p + CST) -> MEM_REF <p, CST>. */
6084 if (TREE_CODE (addr) != ADDR_EXPR
6085 || DECL_P (TREE_OPERAND (addr, 0)))
6086 return fold_build2 (MEM_REF, type,
6087 addr,
6088 wide_int_to_tree (ptype, off));
6089 }
6090
6091 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6092 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6094 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6095 {
6096 tree type_domain;
6097 tree min_val = size_zero_node;
6098 tree osub = sub;
6099 sub = gimple_fold_indirect_ref (sub);
6100 if (! sub)
6101 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6102 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6103 if (type_domain && TYPE_MIN_VALUE (type_domain))
6104 min_val = TYPE_MIN_VALUE (type_domain);
6105 if (TREE_CODE (min_val) == INTEGER_CST)
6106 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6107 }
6108
6109 return NULL_TREE;
6110 }
6111
6112 /* Return true if CODE is an operation that when operating on signed
6113 integer types involves undefined behavior on overflow and the
6114 operation can be expressed with unsigned arithmetic. */
6115
6116 bool
6117 arith_code_with_undefined_signed_overflow (tree_code code)
6118 {
6119 switch (code)
6120 {
6121 case PLUS_EXPR:
6122 case MINUS_EXPR:
6123 case MULT_EXPR:
6124 case NEGATE_EXPR:
6125 case POINTER_PLUS_EXPR:
6126 return true;
6127 default:
6128 return false;
6129 }
6130 }
6131
6132 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6133 operation that can be transformed to unsigned arithmetic by converting
6134 its operand, carrying out the operation in the corresponding unsigned
6135 type and converting the result back to the original type.
6136
6137 Returns a sequence of statements that replace STMT and also contain
6138 a modified form of STMT itself. */
6139
6140 gimple_seq
6141 rewrite_to_defined_overflow (gimple *stmt)
6142 {
6143 if (dump_file && (dump_flags & TDF_DETAILS))
6144 {
6145 fprintf (dump_file, "rewriting stmt with undefined signed "
6146 "overflow ");
6147 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6148 }
6149
6150 tree lhs = gimple_assign_lhs (stmt);
6151 tree type = unsigned_type_for (TREE_TYPE (lhs));
6152 gimple_seq stmts = NULL;
6153 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6154 {
6155 tree op = gimple_op (stmt, i);
6156 op = gimple_convert (&stmts, type, op);
6157 gimple_set_op (stmt, i, op);
6158 }
6159 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6160 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6161 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6162 gimple_seq_add_stmt (&stmts, stmt);
6163 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6164 gimple_seq_add_stmt (&stmts, cvt);
6165
6166 return stmts;
6167 }
6168
6169
6170 /* The valueization hook we use for the gimple_build API simplification.
6171 This makes us match fold_buildN behavior by only combining with
6172 statements in the sequence(s) we are currently building. */
6173
6174 static tree
6175 gimple_build_valueize (tree op)
6176 {
6177 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6178 return op;
6179 return NULL_TREE;
6180 }
6181
6182 /* Build the expression CODE OP0 of type TYPE with location LOC,
6183 simplifying it first if possible. Returns the built
6184 expression value and appends statements possibly defining it
6185 to SEQ. */
6186
6187 tree
6188 gimple_build (gimple_seq *seq, location_t loc,
6189 enum tree_code code, tree type, tree op0)
6190 {
6191 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6192 if (!res)
6193 {
6194 if (gimple_in_ssa_p (cfun))
6195 res = make_ssa_name (type);
6196 else
6197 res = create_tmp_reg (type);
6198 gimple *stmt;
6199 if (code == REALPART_EXPR
6200 || code == IMAGPART_EXPR
6201 || code == VIEW_CONVERT_EXPR)
6202 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6203 else
6204 stmt = gimple_build_assign (res, code, op0);
6205 gimple_set_location (stmt, loc);
6206 gimple_seq_add_stmt_without_update (seq, stmt);
6207 }
6208 return res;
6209 }
6210
6211 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6212 simplifying it first if possible. Returns the built
6213 expression value and appends statements possibly defining it
6214 to SEQ. */
6215
6216 tree
6217 gimple_build (gimple_seq *seq, location_t loc,
6218 enum tree_code code, tree type, tree op0, tree op1)
6219 {
6220 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6221 if (!res)
6222 {
6223 if (gimple_in_ssa_p (cfun))
6224 res = make_ssa_name (type);
6225 else
6226 res = create_tmp_reg (type);
6227 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6228 gimple_set_location (stmt, loc);
6229 gimple_seq_add_stmt_without_update (seq, stmt);
6230 }
6231 return res;
6232 }
6233
6234 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6235 simplifying it first if possible. Returns the built
6236 expression value and appends statements possibly defining it
6237 to SEQ. */
6238
6239 tree
6240 gimple_build (gimple_seq *seq, location_t loc,
6241 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6242 {
6243 tree res = gimple_simplify (code, type, op0, op1, op2,
6244 seq, gimple_build_valueize);
6245 if (!res)
6246 {
6247 if (gimple_in_ssa_p (cfun))
6248 res = make_ssa_name (type);
6249 else
6250 res = create_tmp_reg (type);
6251 gimple *stmt;
6252 if (code == BIT_FIELD_REF)
6253 stmt = gimple_build_assign (res, code,
6254 build3 (code, type, op0, op1, op2));
6255 else
6256 stmt = gimple_build_assign (res, code, op0, op1, op2);
6257 gimple_set_location (stmt, loc);
6258 gimple_seq_add_stmt_without_update (seq, stmt);
6259 }
6260 return res;
6261 }
6262
6263 /* Build the call FN (ARG0) with a result of type TYPE
6264 (or no result if TYPE is void) with location LOC,
6265 simplifying it first if possible. Returns the built
6266 expression value (or NULL_TREE if TYPE is void) and appends
6267 statements possibly defining it to SEQ. */
6268
6269 tree
6270 gimple_build (gimple_seq *seq, location_t loc,
6271 enum built_in_function fn, tree type, tree arg0)
6272 {
6273 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6274 if (!res)
6275 {
6276 tree decl = builtin_decl_implicit (fn);
6277 gimple *stmt = gimple_build_call (decl, 1, arg0);
6278 if (!VOID_TYPE_P (type))
6279 {
6280 if (gimple_in_ssa_p (cfun))
6281 res = make_ssa_name (type);
6282 else
6283 res = create_tmp_reg (type);
6284 gimple_call_set_lhs (stmt, res);
6285 }
6286 gimple_set_location (stmt, loc);
6287 gimple_seq_add_stmt_without_update (seq, stmt);
6288 }
6289 return res;
6290 }
6291
6292 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6293 (or no result if TYPE is void) with location LOC,
6294 simplifying it first if possible. Returns the built
6295 expression value (or NULL_TREE if TYPE is void) and appends
6296 statements possibly defining it to SEQ. */
6297
6298 tree
6299 gimple_build (gimple_seq *seq, location_t loc,
6300 enum built_in_function fn, tree type, tree arg0, tree arg1)
6301 {
6302 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6303 if (!res)
6304 {
6305 tree decl = builtin_decl_implicit (fn);
6306 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6307 if (!VOID_TYPE_P (type))
6308 {
6309 if (gimple_in_ssa_p (cfun))
6310 res = make_ssa_name (type);
6311 else
6312 res = create_tmp_reg (type);
6313 gimple_call_set_lhs (stmt, res);
6314 }
6315 gimple_set_location (stmt, loc);
6316 gimple_seq_add_stmt_without_update (seq, stmt);
6317 }
6318 return res;
6319 }
6320
6321 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6322 (or no result if TYPE is void) with location LOC,
6323 simplifying it first if possible. Returns the built
6324 expression value (or NULL_TREE if TYPE is void) and appends
6325 statements possibly defining it to SEQ. */
6326
6327 tree
6328 gimple_build (gimple_seq *seq, location_t loc,
6329 enum built_in_function fn, tree type,
6330 tree arg0, tree arg1, tree arg2)
6331 {
6332 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6333 seq, gimple_build_valueize);
6334 if (!res)
6335 {
6336 tree decl = builtin_decl_implicit (fn);
6337 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6338 if (!VOID_TYPE_P (type))
6339 {
6340 if (gimple_in_ssa_p (cfun))
6341 res = make_ssa_name (type);
6342 else
6343 res = create_tmp_reg (type);
6344 gimple_call_set_lhs (stmt, res);
6345 }
6346 gimple_set_location (stmt, loc);
6347 gimple_seq_add_stmt_without_update (seq, stmt);
6348 }
6349 return res;
6350 }
6351
6352 /* Build the conversion (TYPE) OP with a result of type TYPE
6353 with location LOC if such conversion is neccesary in GIMPLE,
6354 simplifying it first.
6355 Returns the built expression value and appends
6356 statements possibly defining it to SEQ. */
6357
6358 tree
6359 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6360 {
6361 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6362 return op;
6363 return gimple_build (seq, loc, NOP_EXPR, type, op);
6364 }
6365
6366 /* Build the conversion (ptrofftype) OP with a result of a type
6367 compatible with ptrofftype with location LOC if such conversion
6368 is neccesary in GIMPLE, simplifying it first.
6369 Returns the built expression value and appends
6370 statements possibly defining it to SEQ. */
6371
6372 tree
6373 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6374 {
6375 if (ptrofftype_p (TREE_TYPE (op)))
6376 return op;
6377 return gimple_convert (seq, loc, sizetype, op);
6378 }
6379
6380 /* Return true if the result of assignment STMT is known to be non-negative.
6381 If the return value is based on the assumption that signed overflow is
6382 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6383 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6384
6385 static bool
6386 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6387 int depth)
6388 {
6389 enum tree_code code = gimple_assign_rhs_code (stmt);
6390 switch (get_gimple_rhs_class (code))
6391 {
6392 case GIMPLE_UNARY_RHS:
6393 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6394 gimple_expr_type (stmt),
6395 gimple_assign_rhs1 (stmt),
6396 strict_overflow_p, depth);
6397 case GIMPLE_BINARY_RHS:
6398 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6399 gimple_expr_type (stmt),
6400 gimple_assign_rhs1 (stmt),
6401 gimple_assign_rhs2 (stmt),
6402 strict_overflow_p, depth);
6403 case GIMPLE_TERNARY_RHS:
6404 return false;
6405 case GIMPLE_SINGLE_RHS:
6406 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6407 strict_overflow_p, depth);
6408 case GIMPLE_INVALID_RHS:
6409 break;
6410 }
6411 gcc_unreachable ();
6412 }
6413
6414 /* Return true if return value of call STMT is known to be non-negative.
6415 If the return value is based on the assumption that signed overflow is
6416 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6417 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6418
6419 static bool
6420 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6421 int depth)
6422 {
6423 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6424 gimple_call_arg (stmt, 0) : NULL_TREE;
6425 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6426 gimple_call_arg (stmt, 1) : NULL_TREE;
6427
6428 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6429 gimple_call_combined_fn (stmt),
6430 arg0,
6431 arg1,
6432 strict_overflow_p, depth);
6433 }
6434
6435 /* Return true if return value of call STMT is known to be non-negative.
6436 If the return value is based on the assumption that signed overflow is
6437 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6438 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6439
6440 static bool
6441 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6442 int depth)
6443 {
6444 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6445 {
6446 tree arg = gimple_phi_arg_def (stmt, i);
6447 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6448 return false;
6449 }
6450 return true;
6451 }
6452
6453 /* Return true if STMT is known to compute a non-negative value.
6454 If the return value is based on the assumption that signed overflow is
6455 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6456 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6457
6458 bool
6459 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6460 int depth)
6461 {
6462 switch (gimple_code (stmt))
6463 {
6464 case GIMPLE_ASSIGN:
6465 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6466 depth);
6467 case GIMPLE_CALL:
6468 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6469 depth);
6470 case GIMPLE_PHI:
6471 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6472 depth);
6473 default:
6474 return false;
6475 }
6476 }
6477
6478 /* Return true if the floating-point value computed by assignment STMT
6479 is known to have an integer value. We also allow +Inf, -Inf and NaN
6480 to be considered integer values. Return false for signaling NaN.
6481
6482 DEPTH is the current nesting depth of the query. */
6483
6484 static bool
6485 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6486 {
6487 enum tree_code code = gimple_assign_rhs_code (stmt);
6488 switch (get_gimple_rhs_class (code))
6489 {
6490 case GIMPLE_UNARY_RHS:
6491 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6492 gimple_assign_rhs1 (stmt), depth);
6493 case GIMPLE_BINARY_RHS:
6494 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6495 gimple_assign_rhs1 (stmt),
6496 gimple_assign_rhs2 (stmt), depth);
6497 case GIMPLE_TERNARY_RHS:
6498 return false;
6499 case GIMPLE_SINGLE_RHS:
6500 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6501 case GIMPLE_INVALID_RHS:
6502 break;
6503 }
6504 gcc_unreachable ();
6505 }
6506
6507 /* Return true if the floating-point value computed by call STMT is known
6508 to have an integer value. We also allow +Inf, -Inf and NaN to be
6509 considered integer values. Return false for signaling NaN.
6510
6511 DEPTH is the current nesting depth of the query. */
6512
6513 static bool
6514 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6515 {
6516 tree arg0 = (gimple_call_num_args (stmt) > 0
6517 ? gimple_call_arg (stmt, 0)
6518 : NULL_TREE);
6519 tree arg1 = (gimple_call_num_args (stmt) > 1
6520 ? gimple_call_arg (stmt, 1)
6521 : NULL_TREE);
6522 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6523 arg0, arg1, depth);
6524 }
6525
6526 /* Return true if the floating-point result of phi STMT is known to have
6527 an integer value. We also allow +Inf, -Inf and NaN to be considered
6528 integer values. Return false for signaling NaN.
6529
6530 DEPTH is the current nesting depth of the query. */
6531
6532 static bool
6533 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6534 {
6535 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6536 {
6537 tree arg = gimple_phi_arg_def (stmt, i);
6538 if (!integer_valued_real_single_p (arg, depth + 1))
6539 return false;
6540 }
6541 return true;
6542 }
6543
6544 /* Return true if the floating-point value computed by STMT is known
6545 to have an integer value. We also allow +Inf, -Inf and NaN to be
6546 considered integer values. Return false for signaling NaN.
6547
6548 DEPTH is the current nesting depth of the query. */
6549
6550 bool
6551 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6552 {
6553 switch (gimple_code (stmt))
6554 {
6555 case GIMPLE_ASSIGN:
6556 return gimple_assign_integer_valued_real_p (stmt, depth);
6557 case GIMPLE_CALL:
6558 return gimple_call_integer_valued_real_p (stmt, depth);
6559 case GIMPLE_PHI:
6560 return gimple_phi_integer_valued_real_p (stmt, depth);
6561 default:
6562 return false;
6563 }
6564 }