179535ef56677711e2432e3fd2172cc78604ab36
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
34
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
38
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
58 {
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
75 }
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
108 }
109 return true;
110 }
111
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
114
115 tree
116 canonicalize_constructor_val (tree cval)
117 {
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
121 {
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
130 }
131 if (TREE_CODE (cval) == ADDR_EXPR)
132 {
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
134
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
145 }
146 return cval;
147 }
148
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
151
152 tree
153 get_symbol_constant_value (tree sym)
154 {
155 if (const_value_known_p (sym))
156 {
157 tree val = DECL_INITIAL (sym);
158 if (val)
159 {
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
162 return val;
163 else
164 return NULL_TREE;
165 }
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
169 if (!val
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
173 }
174
175 return NULL_TREE;
176 }
177
178
179
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
184
185 static tree
186 maybe_fold_reference (tree expr, bool is_lhs)
187 {
188 tree *t = &expr;
189 tree result;
190
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
196 TREE_CODE (expr),
197 TREE_TYPE (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
202 TREE_CODE (expr),
203 TREE_TYPE (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
207
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
210
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
215 {
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
220 if (tem)
221 {
222 TREE_THIS_VOLATILE (tem) = volatile_p;
223 *t = tem;
224 tem = maybe_fold_reference (expr, is_lhs);
225 if (tem)
226 return tem;
227 return expr;
228 }
229 }
230
231 if (!is_lhs
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
234 return result;
235
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
248 compatibility. */
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
252 {
253 tree tem;
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
256 if (tem)
257 return tem;
258 return expr;
259 }
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
261 {
262 tree tem = maybe_fold_tmr (*t);
263 if (tem)
264 {
265 *t = tem;
266 tem = maybe_fold_reference (expr, is_lhs);
267 if (tem)
268 return tem;
269 return expr;
270 }
271 }
272
273 return NULL_TREE;
274 }
275
276
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
280 folded. */
281
282 static tree
283 fold_gimple_assign (gimple_stmt_iterator *si)
284 {
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
288
289 tree result = NULL_TREE;
290
291 switch (get_gimple_rhs_class (subcode))
292 {
293 case GIMPLE_SINGLE_RHS:
294 {
295 tree rhs = gimple_assign_rhs1 (stmt);
296
297 if (REFERENCE_CLASS_P (rhs))
298 return maybe_fold_reference (rhs, false);
299
300 else if (TREE_CODE (rhs) == ADDR_EXPR)
301 {
302 tree ref = TREE_OPERAND (rhs, 0);
303 tree tem = maybe_fold_reference (ref, true);
304 if (tem
305 && TREE_CODE (tem) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem, 1)))
307 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
308 else if (tem)
309 result = fold_convert (TREE_TYPE (rhs),
310 build_fold_addr_expr_loc (loc, tem));
311 else if (TREE_CODE (ref) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
314 }
315
316 else if (TREE_CODE (rhs) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
320 {
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
322 unsigned i;
323 tree val;
324
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
326 if (TREE_CODE (val) != INTEGER_CST
327 && TREE_CODE (val) != REAL_CST
328 && TREE_CODE (val) != FIXED_CST)
329 return NULL_TREE;
330
331 return build_vector_from_ctor (TREE_TYPE (rhs),
332 CONSTRUCTOR_ELTS (rhs));
333 }
334
335 else if (DECL_P (rhs))
336 return unshare_expr (get_symbol_constant_value (rhs));
337
338 /* If we couldn't fold the RHS, hand over to the generic
339 fold routines. */
340 if (result == NULL_TREE)
341 result = fold (rhs);
342
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result);
347
348 if (result != rhs && valid_gimple_rhs_p (result))
349 return result;
350
351 return NULL_TREE;
352 }
353 break;
354
355 case GIMPLE_UNARY_RHS:
356 {
357 tree rhs = gimple_assign_rhs1 (stmt);
358
359 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
360 if (result)
361 {
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode)
368 && TREE_CODE (result) == INTEGER_CST
369 && TREE_CODE (rhs) == INTEGER_CST)
370 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
371
372 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (valid_gimple_rhs_p (result))
374 return result;
375 }
376 }
377 break;
378
379 case GIMPLE_BINARY_RHS:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt) == NE_EXPR)
384 {
385 tree lhs = gimple_assign_lhs (stmt);
386 tree op1 = gimple_assign_rhs1 (stmt);
387 tree op2 = gimple_assign_rhs2 (stmt);
388 tree type = TREE_TYPE (op1);
389
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
393 one or zero. */
394 if (TREE_CODE (op2) == INTEGER_CST
395 && (integer_zerop (op2) || integer_onep (op2))
396 && useless_type_conversion_p (TREE_TYPE (lhs), type))
397 {
398 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
399 bool is_logical_not = false;
400
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
404 || (cmp_code == NE_EXPR && integer_onep (op2)))
405 is_logical_not = true;
406
407 if (is_logical_not == false)
408 result = op1;
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type) == 1)
412 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
413 type, op1);
414 /* Otherwise we use !X -> X ^ 1. */
415 else
416 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
417 type, op1, build_int_cst (type, 1));
418
419 }
420 }
421
422 if (!result)
423 result = fold_binary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt));
427
428 if (result)
429 {
430 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (valid_gimple_rhs_p (result))
432 return result;
433 }
434 break;
435
436 case GIMPLE_TERNARY_RHS:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
439 {
440 tree op0 = gimple_assign_rhs1 (stmt);
441 tree tem;
442 bool set = false;
443 location_t cond_loc = gimple_location (stmt);
444
445 if (COMPARISON_CLASS_P (op0))
446 {
447 fold_defer_overflow_warnings ();
448 tem = fold_binary_loc (cond_loc,
449 TREE_CODE (op0), TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set = (tem && is_gimple_condexpr (tem)
456 && valid_gimple_rhs_p (tem));
457 fold_undefer_overflow_warnings (set, stmt, 0);
458 }
459 else if (is_gimple_min_invariant (op0))
460 {
461 tem = op0;
462 set = true;
463 }
464 else
465 return NULL_TREE;
466
467 if (set)
468 result = fold_build3_loc (cond_loc, COND_EXPR,
469 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
470 gimple_assign_rhs2 (stmt),
471 gimple_assign_rhs3 (stmt));
472 }
473
474 if (!result)
475 result = fold_ternary_loc (loc, subcode,
476 TREE_TYPE (gimple_assign_lhs (stmt)),
477 gimple_assign_rhs1 (stmt),
478 gimple_assign_rhs2 (stmt),
479 gimple_assign_rhs3 (stmt));
480
481 if (result)
482 {
483 STRIP_USELESS_TYPE_CONVERSION (result);
484 if (valid_gimple_rhs_p (result))
485 return result;
486 }
487 break;
488
489 case GIMPLE_INVALID_RHS:
490 gcc_unreachable ();
491 }
492
493 return NULL_TREE;
494 }
495
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
500
501 static bool
502 fold_gimple_cond (gimple stmt)
503 {
504 tree result = fold_binary_loc (gimple_location (stmt),
505 gimple_cond_code (stmt),
506 boolean_type_node,
507 gimple_cond_lhs (stmt),
508 gimple_cond_rhs (stmt));
509
510 if (result)
511 {
512 STRIP_USELESS_TYPE_CONVERSION (result);
513 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
514 {
515 gimple_cond_set_condition_from_tree (stmt, result);
516 return true;
517 }
518 }
519
520 return false;
521 }
522
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
532
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 {
536 tree lhs;
537 tree tmp = NULL_TREE; /* Silence warning. */
538 gimple stmt, new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = gimple_seq_alloc();
541 struct gimplify_ctx gctx;
542 gimple last = NULL;
543 gimple laststore = NULL;
544 tree reaching_vuse;
545
546 stmt = gsi_stmt (*si_p);
547
548 gcc_assert (is_gimple_call (stmt));
549
550 lhs = gimple_call_lhs (stmt);
551 reaching_vuse = gimple_vuse (stmt);
552
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
555
556 if (lhs == NULL_TREE)
557 {
558 gimplify_and_add (expr, &stmts);
559 /* We can end up with folding a memcpy of an empty class assignment
560 which gets optimized away by C++ gimplification. */
561 if (gimple_seq_empty_p (stmts))
562 {
563 pop_gimplify_context (NULL);
564 if (gimple_in_ssa_p (cfun))
565 {
566 unlink_stmt_vdef (stmt);
567 release_defs (stmt);
568 }
569 gsi_remove (si_p, true);
570 return;
571 }
572 }
573 else
574 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
575
576 pop_gimplify_context (NULL);
577
578 if (gimple_has_location (stmt))
579 annotate_all_with_location (stmts, gimple_location (stmt));
580
581 /* The replacement can expose previously unreferenced variables. */
582 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
583 {
584 if (last)
585 {
586 gsi_insert_before (si_p, last, GSI_NEW_STMT);
587 gsi_next (si_p);
588 }
589 new_stmt = gsi_stmt (i);
590 if (gimple_in_ssa_p (cfun))
591 {
592 find_new_referenced_vars (new_stmt);
593 mark_symbols_for_renaming (new_stmt);
594 }
595 /* If the new statement has a VUSE, update it with exact SSA name we
596 know will reach this one. */
597 if (gimple_vuse (new_stmt))
598 {
599 /* If we've also seen a previous store create a new VDEF for
600 the latter one, and make that the new reaching VUSE. */
601 if (laststore)
602 {
603 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
604 gimple_set_vdef (laststore, reaching_vuse);
605 update_stmt (laststore);
606 laststore = NULL;
607 }
608 gimple_set_vuse (new_stmt, reaching_vuse);
609 gimple_set_modified (new_stmt, true);
610 }
611 if (gimple_assign_single_p (new_stmt)
612 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
613 {
614 laststore = new_stmt;
615 }
616 last = new_stmt;
617 }
618
619 if (lhs == NULL_TREE)
620 {
621 /* If we replace a call without LHS that has a VDEF and our new
622 sequence ends with a store we must make that store have the same
623 vdef in order not to break the sequencing. This can happen
624 for instance when folding memcpy calls into assignments. */
625 if (gimple_vdef (stmt) && laststore)
626 {
627 gimple_set_vdef (laststore, gimple_vdef (stmt));
628 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
629 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
630 update_stmt (laststore);
631 }
632 else if (gimple_in_ssa_p (cfun))
633 {
634 unlink_stmt_vdef (stmt);
635 release_defs (stmt);
636 }
637 new_stmt = last;
638 }
639 else
640 {
641 if (last)
642 {
643 gsi_insert_before (si_p, last, GSI_NEW_STMT);
644 gsi_next (si_p);
645 }
646 if (laststore && is_gimple_reg (lhs))
647 {
648 gimple_set_vdef (laststore, gimple_vdef (stmt));
649 update_stmt (laststore);
650 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
651 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
652 laststore = NULL;
653 }
654 else if (laststore)
655 {
656 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
657 gimple_set_vdef (laststore, reaching_vuse);
658 update_stmt (laststore);
659 laststore = NULL;
660 }
661 new_stmt = gimple_build_assign (lhs, tmp);
662 if (!is_gimple_reg (tmp))
663 gimple_set_vuse (new_stmt, reaching_vuse);
664 if (!is_gimple_reg (lhs))
665 {
666 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
667 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
668 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
669 }
670 else if (reaching_vuse == gimple_vuse (stmt))
671 unlink_stmt_vdef (stmt);
672 }
673
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 gsi_replace (si_p, new_stmt, false);
676 }
677
678 /* Return the string length, maximum string length or maximum value of
679 ARG in LENGTH.
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
686
687 static bool
688 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
689 {
690 tree var, val;
691 gimple def_stmt;
692
693 if (TREE_CODE (arg) != SSA_NAME)
694 {
695 if (TREE_CODE (arg) == COND_EXPR)
696 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
697 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
698 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
699 else if (TREE_CODE (arg) == ADDR_EXPR
700 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
701 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
702 {
703 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
704 if (TREE_CODE (aop0) == INDIRECT_REF
705 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
706 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
707 length, visited, type);
708 }
709
710 if (type == 2)
711 {
712 val = arg;
713 if (TREE_CODE (val) != INTEGER_CST
714 || tree_int_cst_sgn (val) < 0)
715 return false;
716 }
717 else
718 val = c_strlen (arg, 1);
719 if (!val)
720 return false;
721
722 if (*length)
723 {
724 if (type > 0)
725 {
726 if (TREE_CODE (*length) != INTEGER_CST
727 || TREE_CODE (val) != INTEGER_CST)
728 return false;
729
730 if (tree_int_cst_lt (*length, val))
731 *length = val;
732 return true;
733 }
734 else if (simple_cst_equal (val, *length) != 1)
735 return false;
736 }
737
738 *length = val;
739 return true;
740 }
741
742 /* If we were already here, break the infinite cycle. */
743 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
744 return true;
745
746 var = arg;
747 def_stmt = SSA_NAME_DEF_STMT (var);
748
749 switch (gimple_code (def_stmt))
750 {
751 case GIMPLE_ASSIGN:
752 /* The RHS of the statement defining VAR must either have a
753 constant length or come from another SSA_NAME with a constant
754 length. */
755 if (gimple_assign_single_p (def_stmt)
756 || gimple_assign_unary_nop_p (def_stmt))
757 {
758 tree rhs = gimple_assign_rhs1 (def_stmt);
759 return get_maxval_strlen (rhs, length, visited, type);
760 }
761 return false;
762
763 case GIMPLE_PHI:
764 {
765 /* All the arguments of the PHI node must have the same constant
766 length. */
767 unsigned i;
768
769 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
770 {
771 tree arg = gimple_phi_arg (def_stmt, i)->def;
772
773 /* If this PHI has itself as an argument, we cannot
774 determine the string length of this argument. However,
775 if we can find a constant string length for the other
776 PHI args then we can still be sure that this is a
777 constant string length. So be optimistic and just
778 continue with the next argument. */
779 if (arg == gimple_phi_result (def_stmt))
780 continue;
781
782 if (!get_maxval_strlen (arg, length, visited, type))
783 return false;
784 }
785 }
786 return true;
787
788 default:
789 return false;
790 }
791 }
792
793
794 /* Fold builtin call in statement STMT. Returns a simplified tree.
795 We may return a non-constant expression, including another call
796 to a different function and with different arguments, e.g.,
797 substituting memcpy for strcpy when the string length is known.
798 Note that some builtins expand into inline code that may not
799 be valid in GIMPLE. Callers must take care. */
800
801 tree
802 gimple_fold_builtin (gimple stmt)
803 {
804 tree result, val[3];
805 tree callee, a;
806 int arg_idx, type;
807 bitmap visited;
808 bool ignore;
809 int nargs;
810 location_t loc = gimple_location (stmt);
811
812 gcc_assert (is_gimple_call (stmt));
813
814 ignore = (gimple_call_lhs (stmt) == NULL);
815
816 /* First try the generic builtin folder. If that succeeds, return the
817 result directly. */
818 result = fold_call_stmt (stmt, ignore);
819 if (result)
820 {
821 if (ignore)
822 STRIP_NOPS (result);
823 return result;
824 }
825
826 /* Ignore MD builtins. */
827 callee = gimple_call_fndecl (stmt);
828 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
829 return NULL_TREE;
830
831 /* If the builtin could not be folded, and it has no argument list,
832 we're done. */
833 nargs = gimple_call_num_args (stmt);
834 if (nargs == 0)
835 return NULL_TREE;
836
837 /* Limit the work only for builtins we know how to simplify. */
838 switch (DECL_FUNCTION_CODE (callee))
839 {
840 case BUILT_IN_STRLEN:
841 case BUILT_IN_FPUTS:
842 case BUILT_IN_FPUTS_UNLOCKED:
843 arg_idx = 0;
844 type = 0;
845 break;
846 case BUILT_IN_STRCPY:
847 case BUILT_IN_STRNCPY:
848 arg_idx = 1;
849 type = 0;
850 break;
851 case BUILT_IN_MEMCPY_CHK:
852 case BUILT_IN_MEMPCPY_CHK:
853 case BUILT_IN_MEMMOVE_CHK:
854 case BUILT_IN_MEMSET_CHK:
855 case BUILT_IN_STRNCPY_CHK:
856 arg_idx = 2;
857 type = 2;
858 break;
859 case BUILT_IN_STRCPY_CHK:
860 case BUILT_IN_STPCPY_CHK:
861 arg_idx = 1;
862 type = 1;
863 break;
864 case BUILT_IN_SNPRINTF_CHK:
865 case BUILT_IN_VSNPRINTF_CHK:
866 arg_idx = 1;
867 type = 2;
868 break;
869 default:
870 return NULL_TREE;
871 }
872
873 if (arg_idx >= nargs)
874 return NULL_TREE;
875
876 /* Try to use the dataflow information gathered by the CCP process. */
877 visited = BITMAP_ALLOC (NULL);
878 bitmap_clear (visited);
879
880 memset (val, 0, sizeof (val));
881 a = gimple_call_arg (stmt, arg_idx);
882 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
883 val[arg_idx] = NULL_TREE;
884
885 BITMAP_FREE (visited);
886
887 result = NULL_TREE;
888 switch (DECL_FUNCTION_CODE (callee))
889 {
890 case BUILT_IN_STRLEN:
891 if (val[0] && nargs == 1)
892 {
893 tree new_val =
894 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
895
896 /* If the result is not a valid gimple value, or not a cast
897 of a valid gimple value, then we cannot use the result. */
898 if (is_gimple_val (new_val)
899 || (CONVERT_EXPR_P (new_val)
900 && is_gimple_val (TREE_OPERAND (new_val, 0))))
901 return new_val;
902 }
903 break;
904
905 case BUILT_IN_STRCPY:
906 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
907 result = fold_builtin_strcpy (loc, callee,
908 gimple_call_arg (stmt, 0),
909 gimple_call_arg (stmt, 1),
910 val[1]);
911 break;
912
913 case BUILT_IN_STRNCPY:
914 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
915 result = fold_builtin_strncpy (loc, callee,
916 gimple_call_arg (stmt, 0),
917 gimple_call_arg (stmt, 1),
918 gimple_call_arg (stmt, 2),
919 val[1]);
920 break;
921
922 case BUILT_IN_FPUTS:
923 if (nargs == 2)
924 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
925 gimple_call_arg (stmt, 1),
926 ignore, false, val[0]);
927 break;
928
929 case BUILT_IN_FPUTS_UNLOCKED:
930 if (nargs == 2)
931 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
932 gimple_call_arg (stmt, 1),
933 ignore, true, val[0]);
934 break;
935
936 case BUILT_IN_MEMCPY_CHK:
937 case BUILT_IN_MEMPCPY_CHK:
938 case BUILT_IN_MEMMOVE_CHK:
939 case BUILT_IN_MEMSET_CHK:
940 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
941 result = fold_builtin_memory_chk (loc, callee,
942 gimple_call_arg (stmt, 0),
943 gimple_call_arg (stmt, 1),
944 gimple_call_arg (stmt, 2),
945 gimple_call_arg (stmt, 3),
946 val[2], ignore,
947 DECL_FUNCTION_CODE (callee));
948 break;
949
950 case BUILT_IN_STRCPY_CHK:
951 case BUILT_IN_STPCPY_CHK:
952 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
953 result = fold_builtin_stxcpy_chk (loc, callee,
954 gimple_call_arg (stmt, 0),
955 gimple_call_arg (stmt, 1),
956 gimple_call_arg (stmt, 2),
957 val[1], ignore,
958 DECL_FUNCTION_CODE (callee));
959 break;
960
961 case BUILT_IN_STRNCPY_CHK:
962 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
963 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
964 gimple_call_arg (stmt, 1),
965 gimple_call_arg (stmt, 2),
966 gimple_call_arg (stmt, 3),
967 val[2]);
968 break;
969
970 case BUILT_IN_SNPRINTF_CHK:
971 case BUILT_IN_VSNPRINTF_CHK:
972 if (val[1] && is_gimple_val (val[1]))
973 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
974 DECL_FUNCTION_CODE (callee));
975 break;
976
977 default:
978 gcc_unreachable ();
979 }
980
981 if (result && ignore)
982 result = fold_ignored_result (result);
983 return result;
984 }
985
986 /* Generate code adjusting the first parameter of a call statement determined
987 by GSI by DELTA. */
988
989 void
990 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
991 {
992 gimple call_stmt = gsi_stmt (*gsi);
993 tree parm, tmp;
994 gimple new_stmt;
995
996 delta = convert_to_ptrofftype (delta);
997 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
998 parm = gimple_call_arg (call_stmt, 0);
999 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1000 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1001 add_referenced_var (tmp);
1002
1003 tmp = make_ssa_name (tmp, NULL);
1004 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1005 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1006 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1007 gimple_call_set_arg (call_stmt, 0, tmp);
1008 }
1009
1010 /* Return a binfo to be used for devirtualization of calls based on an object
1011 represented by a declaration (i.e. a global or automatically allocated one)
1012 or NULL if it cannot be found or is not safe. CST is expected to be an
1013 ADDR_EXPR of such object or the function will return NULL. Currently it is
1014 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1015
1016 tree
1017 gimple_extract_devirt_binfo_from_cst (tree cst)
1018 {
1019 HOST_WIDE_INT offset, size, max_size;
1020 tree base, type, expected_type, binfo;
1021 bool last_artificial = false;
1022
1023 if (!flag_devirtualize
1024 || TREE_CODE (cst) != ADDR_EXPR
1025 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1026 return NULL_TREE;
1027
1028 cst = TREE_OPERAND (cst, 0);
1029 expected_type = TREE_TYPE (cst);
1030 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1031 type = TREE_TYPE (base);
1032 if (!DECL_P (base)
1033 || max_size == -1
1034 || max_size != size
1035 || TREE_CODE (type) != RECORD_TYPE)
1036 return NULL_TREE;
1037
1038 /* Find the sub-object the constant actually refers to and mark whether it is
1039 an artificial one (as opposed to a user-defined one). */
1040 while (true)
1041 {
1042 HOST_WIDE_INT pos, size;
1043 tree fld;
1044
1045 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1046 break;
1047 if (offset < 0)
1048 return NULL_TREE;
1049
1050 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1051 {
1052 if (TREE_CODE (fld) != FIELD_DECL)
1053 continue;
1054
1055 pos = int_bit_position (fld);
1056 size = tree_low_cst (DECL_SIZE (fld), 1);
1057 if (pos <= offset && (pos + size) > offset)
1058 break;
1059 }
1060 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1061 return NULL_TREE;
1062
1063 last_artificial = DECL_ARTIFICIAL (fld);
1064 type = TREE_TYPE (fld);
1065 offset -= pos;
1066 }
1067 /* Artifical sub-objects are ancestors, we do not want to use them for
1068 devirtualization, at least not here. */
1069 if (last_artificial)
1070 return NULL_TREE;
1071 binfo = TYPE_BINFO (type);
1072 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1073 return NULL_TREE;
1074 else
1075 return binfo;
1076 }
1077
1078 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1079 The statement may be replaced by another statement, e.g., if the call
1080 simplifies to a constant value. Return true if any changes were made.
1081 It is assumed that the operands have been previously folded. */
1082
1083 bool
1084 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1085 {
1086 gimple stmt = gsi_stmt (*gsi);
1087 tree callee;
1088
1089 /* Check for builtins that CCP can handle using information not
1090 available in the generic fold routines. */
1091 callee = gimple_call_fndecl (stmt);
1092 if (!inplace && callee && DECL_BUILT_IN (callee))
1093 {
1094 tree result = gimple_fold_builtin (stmt);
1095
1096 if (result)
1097 {
1098 if (!update_call_from_tree (gsi, result))
1099 gimplify_and_update_call_from_tree (gsi, result);
1100 return true;
1101 }
1102 }
1103
1104 /* Check for virtual calls that became direct calls. */
1105 callee = gimple_call_fn (stmt);
1106 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1107 {
1108 tree binfo, fndecl, obj;
1109 HOST_WIDE_INT token;
1110
1111 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1112 {
1113 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1114 return true;
1115 }
1116
1117 obj = OBJ_TYPE_REF_OBJECT (callee);
1118 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1119 if (!binfo)
1120 return false;
1121 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1122 fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1123 if (!fndecl)
1124 return false;
1125 gimple_call_set_fndecl (stmt, fndecl);
1126 return true;
1127 }
1128
1129 return false;
1130 }
1131
1132 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1133 distinguishes both cases. */
1134
1135 static bool
1136 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1137 {
1138 bool changed = false;
1139 gimple stmt = gsi_stmt (*gsi);
1140 unsigned i;
1141 gimple_stmt_iterator gsinext = *gsi;
1142 gimple next_stmt;
1143
1144 gsi_next (&gsinext);
1145 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1146
1147 /* Fold the main computation performed by the statement. */
1148 switch (gimple_code (stmt))
1149 {
1150 case GIMPLE_ASSIGN:
1151 {
1152 unsigned old_num_ops = gimple_num_ops (stmt);
1153 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1154 tree lhs = gimple_assign_lhs (stmt);
1155 tree new_rhs;
1156 /* First canonicalize operand order. This avoids building new
1157 trees if this is the only thing fold would later do. */
1158 if ((commutative_tree_code (subcode)
1159 || commutative_ternary_tree_code (subcode))
1160 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1161 gimple_assign_rhs2 (stmt), false))
1162 {
1163 tree tem = gimple_assign_rhs1 (stmt);
1164 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1165 gimple_assign_set_rhs2 (stmt, tem);
1166 changed = true;
1167 }
1168 new_rhs = fold_gimple_assign (gsi);
1169 if (new_rhs
1170 && !useless_type_conversion_p (TREE_TYPE (lhs),
1171 TREE_TYPE (new_rhs)))
1172 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1173 if (new_rhs
1174 && (!inplace
1175 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1176 {
1177 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1178 changed = true;
1179 }
1180 break;
1181 }
1182
1183 case GIMPLE_COND:
1184 changed |= fold_gimple_cond (stmt);
1185 break;
1186
1187 case GIMPLE_CALL:
1188 /* Fold *& in call arguments. */
1189 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1190 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1191 {
1192 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1193 if (tmp)
1194 {
1195 gimple_call_set_arg (stmt, i, tmp);
1196 changed = true;
1197 }
1198 }
1199 changed |= gimple_fold_call (gsi, inplace);
1200 break;
1201
1202 case GIMPLE_ASM:
1203 /* Fold *& in asm operands. */
1204 {
1205 size_t noutputs;
1206 const char **oconstraints;
1207 const char *constraint;
1208 bool allows_mem, allows_reg;
1209
1210 noutputs = gimple_asm_noutputs (stmt);
1211 oconstraints = XALLOCAVEC (const char *, noutputs);
1212
1213 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1214 {
1215 tree link = gimple_asm_output_op (stmt, i);
1216 tree op = TREE_VALUE (link);
1217 oconstraints[i]
1218 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1219 if (REFERENCE_CLASS_P (op)
1220 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1221 {
1222 TREE_VALUE (link) = op;
1223 changed = true;
1224 }
1225 }
1226 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1227 {
1228 tree link = gimple_asm_input_op (stmt, i);
1229 tree op = TREE_VALUE (link);
1230 constraint
1231 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1232 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1233 oconstraints, &allows_mem, &allows_reg);
1234 if (REFERENCE_CLASS_P (op)
1235 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1236 != NULL_TREE)
1237 {
1238 TREE_VALUE (link) = op;
1239 changed = true;
1240 }
1241 }
1242 }
1243 break;
1244
1245 case GIMPLE_DEBUG:
1246 if (gimple_debug_bind_p (stmt))
1247 {
1248 tree val = gimple_debug_bind_get_value (stmt);
1249 if (val
1250 && REFERENCE_CLASS_P (val))
1251 {
1252 tree tem = maybe_fold_reference (val, false);
1253 if (tem)
1254 {
1255 gimple_debug_bind_set_value (stmt, tem);
1256 changed = true;
1257 }
1258 }
1259 }
1260 break;
1261
1262 default:;
1263 }
1264
1265 /* If stmt folds into nothing and it was the last stmt in a bb,
1266 don't call gsi_stmt. */
1267 if (gsi_end_p (*gsi))
1268 {
1269 gcc_assert (next_stmt == NULL);
1270 return changed;
1271 }
1272
1273 stmt = gsi_stmt (*gsi);
1274
1275 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1276 as we'd changing the next stmt. */
1277 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1278 {
1279 tree lhs = gimple_get_lhs (stmt);
1280 if (lhs && REFERENCE_CLASS_P (lhs))
1281 {
1282 tree new_lhs = maybe_fold_reference (lhs, true);
1283 if (new_lhs)
1284 {
1285 gimple_set_lhs (stmt, new_lhs);
1286 changed = true;
1287 }
1288 }
1289 }
1290
1291 return changed;
1292 }
1293
1294 /* Fold the statement pointed to by GSI. In some cases, this function may
1295 replace the whole statement with a new one. Returns true iff folding
1296 makes any changes.
1297 The statement pointed to by GSI should be in valid gimple form but may
1298 be in unfolded state as resulting from for example constant propagation
1299 which can produce *&x = 0. */
1300
1301 bool
1302 fold_stmt (gimple_stmt_iterator *gsi)
1303 {
1304 return fold_stmt_1 (gsi, false);
1305 }
1306
1307 /* Perform the minimal folding on statement *GSI. Only operations like
1308 *&x created by constant propagation are handled. The statement cannot
1309 be replaced with a new one. Return true if the statement was
1310 changed, false otherwise.
1311 The statement *GSI should be in valid gimple form but may
1312 be in unfolded state as resulting from for example constant propagation
1313 which can produce *&x = 0. */
1314
1315 bool
1316 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1317 {
1318 gimple stmt = gsi_stmt (*gsi);
1319 bool changed = fold_stmt_1 (gsi, true);
1320 gcc_assert (gsi_stmt (*gsi) == stmt);
1321 return changed;
1322 }
1323
1324 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1325 if EXPR is null or we don't know how.
1326 If non-null, the result always has boolean type. */
1327
1328 static tree
1329 canonicalize_bool (tree expr, bool invert)
1330 {
1331 if (!expr)
1332 return NULL_TREE;
1333 else if (invert)
1334 {
1335 if (integer_nonzerop (expr))
1336 return boolean_false_node;
1337 else if (integer_zerop (expr))
1338 return boolean_true_node;
1339 else if (TREE_CODE (expr) == SSA_NAME)
1340 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1341 build_int_cst (TREE_TYPE (expr), 0));
1342 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1343 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1344 boolean_type_node,
1345 TREE_OPERAND (expr, 0),
1346 TREE_OPERAND (expr, 1));
1347 else
1348 return NULL_TREE;
1349 }
1350 else
1351 {
1352 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1353 return expr;
1354 if (integer_nonzerop (expr))
1355 return boolean_true_node;
1356 else if (integer_zerop (expr))
1357 return boolean_false_node;
1358 else if (TREE_CODE (expr) == SSA_NAME)
1359 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1360 build_int_cst (TREE_TYPE (expr), 0));
1361 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1362 return fold_build2 (TREE_CODE (expr),
1363 boolean_type_node,
1364 TREE_OPERAND (expr, 0),
1365 TREE_OPERAND (expr, 1));
1366 else
1367 return NULL_TREE;
1368 }
1369 }
1370
1371 /* Check to see if a boolean expression EXPR is logically equivalent to the
1372 comparison (OP1 CODE OP2). Check for various identities involving
1373 SSA_NAMEs. */
1374
1375 static bool
1376 same_bool_comparison_p (const_tree expr, enum tree_code code,
1377 const_tree op1, const_tree op2)
1378 {
1379 gimple s;
1380
1381 /* The obvious case. */
1382 if (TREE_CODE (expr) == code
1383 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1384 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1385 return true;
1386
1387 /* Check for comparing (name, name != 0) and the case where expr
1388 is an SSA_NAME with a definition matching the comparison. */
1389 if (TREE_CODE (expr) == SSA_NAME
1390 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1391 {
1392 if (operand_equal_p (expr, op1, 0))
1393 return ((code == NE_EXPR && integer_zerop (op2))
1394 || (code == EQ_EXPR && integer_nonzerop (op2)));
1395 s = SSA_NAME_DEF_STMT (expr);
1396 if (is_gimple_assign (s)
1397 && gimple_assign_rhs_code (s) == code
1398 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1399 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1400 return true;
1401 }
1402
1403 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1404 of name is a comparison, recurse. */
1405 if (TREE_CODE (op1) == SSA_NAME
1406 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1407 {
1408 s = SSA_NAME_DEF_STMT (op1);
1409 if (is_gimple_assign (s)
1410 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1411 {
1412 enum tree_code c = gimple_assign_rhs_code (s);
1413 if ((c == NE_EXPR && integer_zerop (op2))
1414 || (c == EQ_EXPR && integer_nonzerop (op2)))
1415 return same_bool_comparison_p (expr, c,
1416 gimple_assign_rhs1 (s),
1417 gimple_assign_rhs2 (s));
1418 if ((c == EQ_EXPR && integer_zerop (op2))
1419 || (c == NE_EXPR && integer_nonzerop (op2)))
1420 return same_bool_comparison_p (expr,
1421 invert_tree_comparison (c, false),
1422 gimple_assign_rhs1 (s),
1423 gimple_assign_rhs2 (s));
1424 }
1425 }
1426 return false;
1427 }
1428
1429 /* Check to see if two boolean expressions OP1 and OP2 are logically
1430 equivalent. */
1431
1432 static bool
1433 same_bool_result_p (const_tree op1, const_tree op2)
1434 {
1435 /* Simple cases first. */
1436 if (operand_equal_p (op1, op2, 0))
1437 return true;
1438
1439 /* Check the cases where at least one of the operands is a comparison.
1440 These are a bit smarter than operand_equal_p in that they apply some
1441 identifies on SSA_NAMEs. */
1442 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1443 && same_bool_comparison_p (op1, TREE_CODE (op2),
1444 TREE_OPERAND (op2, 0),
1445 TREE_OPERAND (op2, 1)))
1446 return true;
1447 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1448 && same_bool_comparison_p (op2, TREE_CODE (op1),
1449 TREE_OPERAND (op1, 0),
1450 TREE_OPERAND (op1, 1)))
1451 return true;
1452
1453 /* Default case. */
1454 return false;
1455 }
1456
1457 /* Forward declarations for some mutually recursive functions. */
1458
1459 static tree
1460 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1461 enum tree_code code2, tree op2a, tree op2b);
1462 static tree
1463 and_var_with_comparison (tree var, bool invert,
1464 enum tree_code code2, tree op2a, tree op2b);
1465 static tree
1466 and_var_with_comparison_1 (gimple stmt,
1467 enum tree_code code2, tree op2a, tree op2b);
1468 static tree
1469 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1470 enum tree_code code2, tree op2a, tree op2b);
1471 static tree
1472 or_var_with_comparison (tree var, bool invert,
1473 enum tree_code code2, tree op2a, tree op2b);
1474 static tree
1475 or_var_with_comparison_1 (gimple stmt,
1476 enum tree_code code2, tree op2a, tree op2b);
1477
1478 /* Helper function for and_comparisons_1: try to simplify the AND of the
1479 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1480 If INVERT is true, invert the value of the VAR before doing the AND.
1481 Return NULL_EXPR if we can't simplify this to a single expression. */
1482
1483 static tree
1484 and_var_with_comparison (tree var, bool invert,
1485 enum tree_code code2, tree op2a, tree op2b)
1486 {
1487 tree t;
1488 gimple stmt = SSA_NAME_DEF_STMT (var);
1489
1490 /* We can only deal with variables whose definitions are assignments. */
1491 if (!is_gimple_assign (stmt))
1492 return NULL_TREE;
1493
1494 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1495 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1496 Then we only have to consider the simpler non-inverted cases. */
1497 if (invert)
1498 t = or_var_with_comparison_1 (stmt,
1499 invert_tree_comparison (code2, false),
1500 op2a, op2b);
1501 else
1502 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1503 return canonicalize_bool (t, invert);
1504 }
1505
1506 /* Try to simplify the AND of the ssa variable defined by the assignment
1507 STMT with the comparison specified by (OP2A CODE2 OP2B).
1508 Return NULL_EXPR if we can't simplify this to a single expression. */
1509
1510 static tree
1511 and_var_with_comparison_1 (gimple stmt,
1512 enum tree_code code2, tree op2a, tree op2b)
1513 {
1514 tree var = gimple_assign_lhs (stmt);
1515 tree true_test_var = NULL_TREE;
1516 tree false_test_var = NULL_TREE;
1517 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1518
1519 /* Check for identities like (var AND (var == 0)) => false. */
1520 if (TREE_CODE (op2a) == SSA_NAME
1521 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1522 {
1523 if ((code2 == NE_EXPR && integer_zerop (op2b))
1524 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1525 {
1526 true_test_var = op2a;
1527 if (var == true_test_var)
1528 return var;
1529 }
1530 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1531 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1532 {
1533 false_test_var = op2a;
1534 if (var == false_test_var)
1535 return boolean_false_node;
1536 }
1537 }
1538
1539 /* If the definition is a comparison, recurse on it. */
1540 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1541 {
1542 tree t = and_comparisons_1 (innercode,
1543 gimple_assign_rhs1 (stmt),
1544 gimple_assign_rhs2 (stmt),
1545 code2,
1546 op2a,
1547 op2b);
1548 if (t)
1549 return t;
1550 }
1551
1552 /* If the definition is an AND or OR expression, we may be able to
1553 simplify by reassociating. */
1554 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1555 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1556 {
1557 tree inner1 = gimple_assign_rhs1 (stmt);
1558 tree inner2 = gimple_assign_rhs2 (stmt);
1559 gimple s;
1560 tree t;
1561 tree partial = NULL_TREE;
1562 bool is_and = (innercode == BIT_AND_EXPR);
1563
1564 /* Check for boolean identities that don't require recursive examination
1565 of inner1/inner2:
1566 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1567 inner1 AND (inner1 OR inner2) => inner1
1568 !inner1 AND (inner1 AND inner2) => false
1569 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1570 Likewise for similar cases involving inner2. */
1571 if (inner1 == true_test_var)
1572 return (is_and ? var : inner1);
1573 else if (inner2 == true_test_var)
1574 return (is_and ? var : inner2);
1575 else if (inner1 == false_test_var)
1576 return (is_and
1577 ? boolean_false_node
1578 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1579 else if (inner2 == false_test_var)
1580 return (is_and
1581 ? boolean_false_node
1582 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1583
1584 /* Next, redistribute/reassociate the AND across the inner tests.
1585 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1586 if (TREE_CODE (inner1) == SSA_NAME
1587 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1588 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1589 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1590 gimple_assign_rhs1 (s),
1591 gimple_assign_rhs2 (s),
1592 code2, op2a, op2b)))
1593 {
1594 /* Handle the AND case, where we are reassociating:
1595 (inner1 AND inner2) AND (op2a code2 op2b)
1596 => (t AND inner2)
1597 If the partial result t is a constant, we win. Otherwise
1598 continue on to try reassociating with the other inner test. */
1599 if (is_and)
1600 {
1601 if (integer_onep (t))
1602 return inner2;
1603 else if (integer_zerop (t))
1604 return boolean_false_node;
1605 }
1606
1607 /* Handle the OR case, where we are redistributing:
1608 (inner1 OR inner2) AND (op2a code2 op2b)
1609 => (t OR (inner2 AND (op2a code2 op2b))) */
1610 else if (integer_onep (t))
1611 return boolean_true_node;
1612
1613 /* Save partial result for later. */
1614 partial = t;
1615 }
1616
1617 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1618 if (TREE_CODE (inner2) == SSA_NAME
1619 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1620 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1621 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1622 gimple_assign_rhs1 (s),
1623 gimple_assign_rhs2 (s),
1624 code2, op2a, op2b)))
1625 {
1626 /* Handle the AND case, where we are reassociating:
1627 (inner1 AND inner2) AND (op2a code2 op2b)
1628 => (inner1 AND t) */
1629 if (is_and)
1630 {
1631 if (integer_onep (t))
1632 return inner1;
1633 else if (integer_zerop (t))
1634 return boolean_false_node;
1635 /* If both are the same, we can apply the identity
1636 (x AND x) == x. */
1637 else if (partial && same_bool_result_p (t, partial))
1638 return t;
1639 }
1640
1641 /* Handle the OR case. where we are redistributing:
1642 (inner1 OR inner2) AND (op2a code2 op2b)
1643 => (t OR (inner1 AND (op2a code2 op2b)))
1644 => (t OR partial) */
1645 else
1646 {
1647 if (integer_onep (t))
1648 return boolean_true_node;
1649 else if (partial)
1650 {
1651 /* We already got a simplification for the other
1652 operand to the redistributed OR expression. The
1653 interesting case is when at least one is false.
1654 Or, if both are the same, we can apply the identity
1655 (x OR x) == x. */
1656 if (integer_zerop (partial))
1657 return t;
1658 else if (integer_zerop (t))
1659 return partial;
1660 else if (same_bool_result_p (t, partial))
1661 return t;
1662 }
1663 }
1664 }
1665 }
1666 return NULL_TREE;
1667 }
1668
1669 /* Try to simplify the AND of two comparisons defined by
1670 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1671 If this can be done without constructing an intermediate value,
1672 return the resulting tree; otherwise NULL_TREE is returned.
1673 This function is deliberately asymmetric as it recurses on SSA_DEFs
1674 in the first comparison but not the second. */
1675
1676 static tree
1677 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1678 enum tree_code code2, tree op2a, tree op2b)
1679 {
1680 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1681 if (operand_equal_p (op1a, op2a, 0)
1682 && operand_equal_p (op1b, op2b, 0))
1683 {
1684 /* Result will be either NULL_TREE, or a combined comparison. */
1685 tree t = combine_comparisons (UNKNOWN_LOCATION,
1686 TRUTH_ANDIF_EXPR, code1, code2,
1687 boolean_type_node, op1a, op1b);
1688 if (t)
1689 return t;
1690 }
1691
1692 /* Likewise the swapped case of the above. */
1693 if (operand_equal_p (op1a, op2b, 0)
1694 && operand_equal_p (op1b, op2a, 0))
1695 {
1696 /* Result will be either NULL_TREE, or a combined comparison. */
1697 tree t = combine_comparisons (UNKNOWN_LOCATION,
1698 TRUTH_ANDIF_EXPR, code1,
1699 swap_tree_comparison (code2),
1700 boolean_type_node, op1a, op1b);
1701 if (t)
1702 return t;
1703 }
1704
1705 /* If both comparisons are of the same value against constants, we might
1706 be able to merge them. */
1707 if (operand_equal_p (op1a, op2a, 0)
1708 && TREE_CODE (op1b) == INTEGER_CST
1709 && TREE_CODE (op2b) == INTEGER_CST)
1710 {
1711 int cmp = tree_int_cst_compare (op1b, op2b);
1712
1713 /* If we have (op1a == op1b), we should either be able to
1714 return that or FALSE, depending on whether the constant op1b
1715 also satisfies the other comparison against op2b. */
1716 if (code1 == EQ_EXPR)
1717 {
1718 bool done = true;
1719 bool val;
1720 switch (code2)
1721 {
1722 case EQ_EXPR: val = (cmp == 0); break;
1723 case NE_EXPR: val = (cmp != 0); break;
1724 case LT_EXPR: val = (cmp < 0); break;
1725 case GT_EXPR: val = (cmp > 0); break;
1726 case LE_EXPR: val = (cmp <= 0); break;
1727 case GE_EXPR: val = (cmp >= 0); break;
1728 default: done = false;
1729 }
1730 if (done)
1731 {
1732 if (val)
1733 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1734 else
1735 return boolean_false_node;
1736 }
1737 }
1738 /* Likewise if the second comparison is an == comparison. */
1739 else if (code2 == EQ_EXPR)
1740 {
1741 bool done = true;
1742 bool val;
1743 switch (code1)
1744 {
1745 case EQ_EXPR: val = (cmp == 0); break;
1746 case NE_EXPR: val = (cmp != 0); break;
1747 case LT_EXPR: val = (cmp > 0); break;
1748 case GT_EXPR: val = (cmp < 0); break;
1749 case LE_EXPR: val = (cmp >= 0); break;
1750 case GE_EXPR: val = (cmp <= 0); break;
1751 default: done = false;
1752 }
1753 if (done)
1754 {
1755 if (val)
1756 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1757 else
1758 return boolean_false_node;
1759 }
1760 }
1761
1762 /* Same business with inequality tests. */
1763 else if (code1 == NE_EXPR)
1764 {
1765 bool val;
1766 switch (code2)
1767 {
1768 case EQ_EXPR: val = (cmp != 0); break;
1769 case NE_EXPR: val = (cmp == 0); break;
1770 case LT_EXPR: val = (cmp >= 0); break;
1771 case GT_EXPR: val = (cmp <= 0); break;
1772 case LE_EXPR: val = (cmp > 0); break;
1773 case GE_EXPR: val = (cmp < 0); break;
1774 default:
1775 val = false;
1776 }
1777 if (val)
1778 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1779 }
1780 else if (code2 == NE_EXPR)
1781 {
1782 bool val;
1783 switch (code1)
1784 {
1785 case EQ_EXPR: val = (cmp == 0); break;
1786 case NE_EXPR: val = (cmp != 0); break;
1787 case LT_EXPR: val = (cmp <= 0); break;
1788 case GT_EXPR: val = (cmp >= 0); break;
1789 case LE_EXPR: val = (cmp < 0); break;
1790 case GE_EXPR: val = (cmp > 0); break;
1791 default:
1792 val = false;
1793 }
1794 if (val)
1795 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1796 }
1797
1798 /* Chose the more restrictive of two < or <= comparisons. */
1799 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1800 && (code2 == LT_EXPR || code2 == LE_EXPR))
1801 {
1802 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1803 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1804 else
1805 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1806 }
1807
1808 /* Likewise chose the more restrictive of two > or >= comparisons. */
1809 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1810 && (code2 == GT_EXPR || code2 == GE_EXPR))
1811 {
1812 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1813 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1814 else
1815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1816 }
1817
1818 /* Check for singleton ranges. */
1819 else if (cmp == 0
1820 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1821 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1822 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1823
1824 /* Check for disjoint ranges. */
1825 else if (cmp <= 0
1826 && (code1 == LT_EXPR || code1 == LE_EXPR)
1827 && (code2 == GT_EXPR || code2 == GE_EXPR))
1828 return boolean_false_node;
1829 else if (cmp >= 0
1830 && (code1 == GT_EXPR || code1 == GE_EXPR)
1831 && (code2 == LT_EXPR || code2 == LE_EXPR))
1832 return boolean_false_node;
1833 }
1834
1835 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1836 NAME's definition is a truth value. See if there are any simplifications
1837 that can be done against the NAME's definition. */
1838 if (TREE_CODE (op1a) == SSA_NAME
1839 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1840 && (integer_zerop (op1b) || integer_onep (op1b)))
1841 {
1842 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1843 || (code1 == NE_EXPR && integer_onep (op1b)));
1844 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1845 switch (gimple_code (stmt))
1846 {
1847 case GIMPLE_ASSIGN:
1848 /* Try to simplify by copy-propagating the definition. */
1849 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1850
1851 case GIMPLE_PHI:
1852 /* If every argument to the PHI produces the same result when
1853 ANDed with the second comparison, we win.
1854 Do not do this unless the type is bool since we need a bool
1855 result here anyway. */
1856 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1857 {
1858 tree result = NULL_TREE;
1859 unsigned i;
1860 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1861 {
1862 tree arg = gimple_phi_arg_def (stmt, i);
1863
1864 /* If this PHI has itself as an argument, ignore it.
1865 If all the other args produce the same result,
1866 we're still OK. */
1867 if (arg == gimple_phi_result (stmt))
1868 continue;
1869 else if (TREE_CODE (arg) == INTEGER_CST)
1870 {
1871 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1872 {
1873 if (!result)
1874 result = boolean_false_node;
1875 else if (!integer_zerop (result))
1876 return NULL_TREE;
1877 }
1878 else if (!result)
1879 result = fold_build2 (code2, boolean_type_node,
1880 op2a, op2b);
1881 else if (!same_bool_comparison_p (result,
1882 code2, op2a, op2b))
1883 return NULL_TREE;
1884 }
1885 else if (TREE_CODE (arg) == SSA_NAME
1886 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1887 {
1888 tree temp;
1889 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1890 /* In simple cases we can look through PHI nodes,
1891 but we have to be careful with loops.
1892 See PR49073. */
1893 if (! dom_info_available_p (CDI_DOMINATORS)
1894 || gimple_bb (def_stmt) == gimple_bb (stmt)
1895 || dominated_by_p (CDI_DOMINATORS,
1896 gimple_bb (def_stmt),
1897 gimple_bb (stmt)))
1898 return NULL_TREE;
1899 temp = and_var_with_comparison (arg, invert, code2,
1900 op2a, op2b);
1901 if (!temp)
1902 return NULL_TREE;
1903 else if (!result)
1904 result = temp;
1905 else if (!same_bool_result_p (result, temp))
1906 return NULL_TREE;
1907 }
1908 else
1909 return NULL_TREE;
1910 }
1911 return result;
1912 }
1913
1914 default:
1915 break;
1916 }
1917 }
1918 return NULL_TREE;
1919 }
1920
1921 /* Try to simplify the AND of two comparisons, specified by
1922 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1923 If this can be simplified to a single expression (without requiring
1924 introducing more SSA variables to hold intermediate values),
1925 return the resulting tree. Otherwise return NULL_TREE.
1926 If the result expression is non-null, it has boolean type. */
1927
1928 tree
1929 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1930 enum tree_code code2, tree op2a, tree op2b)
1931 {
1932 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1933 if (t)
1934 return t;
1935 else
1936 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1937 }
1938
1939 /* Helper function for or_comparisons_1: try to simplify the OR of the
1940 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1941 If INVERT is true, invert the value of VAR before doing the OR.
1942 Return NULL_EXPR if we can't simplify this to a single expression. */
1943
1944 static tree
1945 or_var_with_comparison (tree var, bool invert,
1946 enum tree_code code2, tree op2a, tree op2b)
1947 {
1948 tree t;
1949 gimple stmt = SSA_NAME_DEF_STMT (var);
1950
1951 /* We can only deal with variables whose definitions are assignments. */
1952 if (!is_gimple_assign (stmt))
1953 return NULL_TREE;
1954
1955 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1956 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1957 Then we only have to consider the simpler non-inverted cases. */
1958 if (invert)
1959 t = and_var_with_comparison_1 (stmt,
1960 invert_tree_comparison (code2, false),
1961 op2a, op2b);
1962 else
1963 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1964 return canonicalize_bool (t, invert);
1965 }
1966
1967 /* Try to simplify the OR of the ssa variable defined by the assignment
1968 STMT with the comparison specified by (OP2A CODE2 OP2B).
1969 Return NULL_EXPR if we can't simplify this to a single expression. */
1970
1971 static tree
1972 or_var_with_comparison_1 (gimple stmt,
1973 enum tree_code code2, tree op2a, tree op2b)
1974 {
1975 tree var = gimple_assign_lhs (stmt);
1976 tree true_test_var = NULL_TREE;
1977 tree false_test_var = NULL_TREE;
1978 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1979
1980 /* Check for identities like (var OR (var != 0)) => true . */
1981 if (TREE_CODE (op2a) == SSA_NAME
1982 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1983 {
1984 if ((code2 == NE_EXPR && integer_zerop (op2b))
1985 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1986 {
1987 true_test_var = op2a;
1988 if (var == true_test_var)
1989 return var;
1990 }
1991 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1992 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1993 {
1994 false_test_var = op2a;
1995 if (var == false_test_var)
1996 return boolean_true_node;
1997 }
1998 }
1999
2000 /* If the definition is a comparison, recurse on it. */
2001 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2002 {
2003 tree t = or_comparisons_1 (innercode,
2004 gimple_assign_rhs1 (stmt),
2005 gimple_assign_rhs2 (stmt),
2006 code2,
2007 op2a,
2008 op2b);
2009 if (t)
2010 return t;
2011 }
2012
2013 /* If the definition is an AND or OR expression, we may be able to
2014 simplify by reassociating. */
2015 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2016 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2017 {
2018 tree inner1 = gimple_assign_rhs1 (stmt);
2019 tree inner2 = gimple_assign_rhs2 (stmt);
2020 gimple s;
2021 tree t;
2022 tree partial = NULL_TREE;
2023 bool is_or = (innercode == BIT_IOR_EXPR);
2024
2025 /* Check for boolean identities that don't require recursive examination
2026 of inner1/inner2:
2027 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2028 inner1 OR (inner1 AND inner2) => inner1
2029 !inner1 OR (inner1 OR inner2) => true
2030 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2031 */
2032 if (inner1 == true_test_var)
2033 return (is_or ? var : inner1);
2034 else if (inner2 == true_test_var)
2035 return (is_or ? var : inner2);
2036 else if (inner1 == false_test_var)
2037 return (is_or
2038 ? boolean_true_node
2039 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2040 else if (inner2 == false_test_var)
2041 return (is_or
2042 ? boolean_true_node
2043 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2044
2045 /* Next, redistribute/reassociate the OR across the inner tests.
2046 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2047 if (TREE_CODE (inner1) == SSA_NAME
2048 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2049 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2050 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2051 gimple_assign_rhs1 (s),
2052 gimple_assign_rhs2 (s),
2053 code2, op2a, op2b)))
2054 {
2055 /* Handle the OR case, where we are reassociating:
2056 (inner1 OR inner2) OR (op2a code2 op2b)
2057 => (t OR inner2)
2058 If the partial result t is a constant, we win. Otherwise
2059 continue on to try reassociating with the other inner test. */
2060 if (is_or)
2061 {
2062 if (integer_onep (t))
2063 return boolean_true_node;
2064 else if (integer_zerop (t))
2065 return inner2;
2066 }
2067
2068 /* Handle the AND case, where we are redistributing:
2069 (inner1 AND inner2) OR (op2a code2 op2b)
2070 => (t AND (inner2 OR (op2a code op2b))) */
2071 else if (integer_zerop (t))
2072 return boolean_false_node;
2073
2074 /* Save partial result for later. */
2075 partial = t;
2076 }
2077
2078 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2079 if (TREE_CODE (inner2) == SSA_NAME
2080 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2081 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2082 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2083 gimple_assign_rhs1 (s),
2084 gimple_assign_rhs2 (s),
2085 code2, op2a, op2b)))
2086 {
2087 /* Handle the OR case, where we are reassociating:
2088 (inner1 OR inner2) OR (op2a code2 op2b)
2089 => (inner1 OR t)
2090 => (t OR partial) */
2091 if (is_or)
2092 {
2093 if (integer_zerop (t))
2094 return inner1;
2095 else if (integer_onep (t))
2096 return boolean_true_node;
2097 /* If both are the same, we can apply the identity
2098 (x OR x) == x. */
2099 else if (partial && same_bool_result_p (t, partial))
2100 return t;
2101 }
2102
2103 /* Handle the AND case, where we are redistributing:
2104 (inner1 AND inner2) OR (op2a code2 op2b)
2105 => (t AND (inner1 OR (op2a code2 op2b)))
2106 => (t AND partial) */
2107 else
2108 {
2109 if (integer_zerop (t))
2110 return boolean_false_node;
2111 else if (partial)
2112 {
2113 /* We already got a simplification for the other
2114 operand to the redistributed AND expression. The
2115 interesting case is when at least one is true.
2116 Or, if both are the same, we can apply the identity
2117 (x AND x) == x. */
2118 if (integer_onep (partial))
2119 return t;
2120 else if (integer_onep (t))
2121 return partial;
2122 else if (same_bool_result_p (t, partial))
2123 return t;
2124 }
2125 }
2126 }
2127 }
2128 return NULL_TREE;
2129 }
2130
2131 /* Try to simplify the OR of two comparisons defined by
2132 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2133 If this can be done without constructing an intermediate value,
2134 return the resulting tree; otherwise NULL_TREE is returned.
2135 This function is deliberately asymmetric as it recurses on SSA_DEFs
2136 in the first comparison but not the second. */
2137
2138 static tree
2139 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2140 enum tree_code code2, tree op2a, tree op2b)
2141 {
2142 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2143 if (operand_equal_p (op1a, op2a, 0)
2144 && operand_equal_p (op1b, op2b, 0))
2145 {
2146 /* Result will be either NULL_TREE, or a combined comparison. */
2147 tree t = combine_comparisons (UNKNOWN_LOCATION,
2148 TRUTH_ORIF_EXPR, code1, code2,
2149 boolean_type_node, op1a, op1b);
2150 if (t)
2151 return t;
2152 }
2153
2154 /* Likewise the swapped case of the above. */
2155 if (operand_equal_p (op1a, op2b, 0)
2156 && operand_equal_p (op1b, op2a, 0))
2157 {
2158 /* Result will be either NULL_TREE, or a combined comparison. */
2159 tree t = combine_comparisons (UNKNOWN_LOCATION,
2160 TRUTH_ORIF_EXPR, code1,
2161 swap_tree_comparison (code2),
2162 boolean_type_node, op1a, op1b);
2163 if (t)
2164 return t;
2165 }
2166
2167 /* If both comparisons are of the same value against constants, we might
2168 be able to merge them. */
2169 if (operand_equal_p (op1a, op2a, 0)
2170 && TREE_CODE (op1b) == INTEGER_CST
2171 && TREE_CODE (op2b) == INTEGER_CST)
2172 {
2173 int cmp = tree_int_cst_compare (op1b, op2b);
2174
2175 /* If we have (op1a != op1b), we should either be able to
2176 return that or TRUE, depending on whether the constant op1b
2177 also satisfies the other comparison against op2b. */
2178 if (code1 == NE_EXPR)
2179 {
2180 bool done = true;
2181 bool val;
2182 switch (code2)
2183 {
2184 case EQ_EXPR: val = (cmp == 0); break;
2185 case NE_EXPR: val = (cmp != 0); break;
2186 case LT_EXPR: val = (cmp < 0); break;
2187 case GT_EXPR: val = (cmp > 0); break;
2188 case LE_EXPR: val = (cmp <= 0); break;
2189 case GE_EXPR: val = (cmp >= 0); break;
2190 default: done = false;
2191 }
2192 if (done)
2193 {
2194 if (val)
2195 return boolean_true_node;
2196 else
2197 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2198 }
2199 }
2200 /* Likewise if the second comparison is a != comparison. */
2201 else if (code2 == NE_EXPR)
2202 {
2203 bool done = true;
2204 bool val;
2205 switch (code1)
2206 {
2207 case EQ_EXPR: val = (cmp == 0); break;
2208 case NE_EXPR: val = (cmp != 0); break;
2209 case LT_EXPR: val = (cmp > 0); break;
2210 case GT_EXPR: val = (cmp < 0); break;
2211 case LE_EXPR: val = (cmp >= 0); break;
2212 case GE_EXPR: val = (cmp <= 0); break;
2213 default: done = false;
2214 }
2215 if (done)
2216 {
2217 if (val)
2218 return boolean_true_node;
2219 else
2220 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2221 }
2222 }
2223
2224 /* See if an equality test is redundant with the other comparison. */
2225 else if (code1 == EQ_EXPR)
2226 {
2227 bool val;
2228 switch (code2)
2229 {
2230 case EQ_EXPR: val = (cmp == 0); break;
2231 case NE_EXPR: val = (cmp != 0); break;
2232 case LT_EXPR: val = (cmp < 0); break;
2233 case GT_EXPR: val = (cmp > 0); break;
2234 case LE_EXPR: val = (cmp <= 0); break;
2235 case GE_EXPR: val = (cmp >= 0); break;
2236 default:
2237 val = false;
2238 }
2239 if (val)
2240 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2241 }
2242 else if (code2 == EQ_EXPR)
2243 {
2244 bool val;
2245 switch (code1)
2246 {
2247 case EQ_EXPR: val = (cmp == 0); break;
2248 case NE_EXPR: val = (cmp != 0); break;
2249 case LT_EXPR: val = (cmp > 0); break;
2250 case GT_EXPR: val = (cmp < 0); break;
2251 case LE_EXPR: val = (cmp >= 0); break;
2252 case GE_EXPR: val = (cmp <= 0); break;
2253 default:
2254 val = false;
2255 }
2256 if (val)
2257 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2258 }
2259
2260 /* Chose the less restrictive of two < or <= comparisons. */
2261 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2262 && (code2 == LT_EXPR || code2 == LE_EXPR))
2263 {
2264 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2265 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2266 else
2267 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2268 }
2269
2270 /* Likewise chose the less restrictive of two > or >= comparisons. */
2271 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2272 && (code2 == GT_EXPR || code2 == GE_EXPR))
2273 {
2274 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2275 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2276 else
2277 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2278 }
2279
2280 /* Check for singleton ranges. */
2281 else if (cmp == 0
2282 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2283 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2284 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2285
2286 /* Check for less/greater pairs that don't restrict the range at all. */
2287 else if (cmp >= 0
2288 && (code1 == LT_EXPR || code1 == LE_EXPR)
2289 && (code2 == GT_EXPR || code2 == GE_EXPR))
2290 return boolean_true_node;
2291 else if (cmp <= 0
2292 && (code1 == GT_EXPR || code1 == GE_EXPR)
2293 && (code2 == LT_EXPR || code2 == LE_EXPR))
2294 return boolean_true_node;
2295 }
2296
2297 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2298 NAME's definition is a truth value. See if there are any simplifications
2299 that can be done against the NAME's definition. */
2300 if (TREE_CODE (op1a) == SSA_NAME
2301 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2302 && (integer_zerop (op1b) || integer_onep (op1b)))
2303 {
2304 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2305 || (code1 == NE_EXPR && integer_onep (op1b)));
2306 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2307 switch (gimple_code (stmt))
2308 {
2309 case GIMPLE_ASSIGN:
2310 /* Try to simplify by copy-propagating the definition. */
2311 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2312
2313 case GIMPLE_PHI:
2314 /* If every argument to the PHI produces the same result when
2315 ORed with the second comparison, we win.
2316 Do not do this unless the type is bool since we need a bool
2317 result here anyway. */
2318 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2319 {
2320 tree result = NULL_TREE;
2321 unsigned i;
2322 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2323 {
2324 tree arg = gimple_phi_arg_def (stmt, i);
2325
2326 /* If this PHI has itself as an argument, ignore it.
2327 If all the other args produce the same result,
2328 we're still OK. */
2329 if (arg == gimple_phi_result (stmt))
2330 continue;
2331 else if (TREE_CODE (arg) == INTEGER_CST)
2332 {
2333 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2334 {
2335 if (!result)
2336 result = boolean_true_node;
2337 else if (!integer_onep (result))
2338 return NULL_TREE;
2339 }
2340 else if (!result)
2341 result = fold_build2 (code2, boolean_type_node,
2342 op2a, op2b);
2343 else if (!same_bool_comparison_p (result,
2344 code2, op2a, op2b))
2345 return NULL_TREE;
2346 }
2347 else if (TREE_CODE (arg) == SSA_NAME
2348 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2349 {
2350 tree temp;
2351 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2352 /* In simple cases we can look through PHI nodes,
2353 but we have to be careful with loops.
2354 See PR49073. */
2355 if (! dom_info_available_p (CDI_DOMINATORS)
2356 || gimple_bb (def_stmt) == gimple_bb (stmt)
2357 || dominated_by_p (CDI_DOMINATORS,
2358 gimple_bb (def_stmt),
2359 gimple_bb (stmt)))
2360 return NULL_TREE;
2361 temp = or_var_with_comparison (arg, invert, code2,
2362 op2a, op2b);
2363 if (!temp)
2364 return NULL_TREE;
2365 else if (!result)
2366 result = temp;
2367 else if (!same_bool_result_p (result, temp))
2368 return NULL_TREE;
2369 }
2370 else
2371 return NULL_TREE;
2372 }
2373 return result;
2374 }
2375
2376 default:
2377 break;
2378 }
2379 }
2380 return NULL_TREE;
2381 }
2382
2383 /* Try to simplify the OR of two comparisons, specified by
2384 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2385 If this can be simplified to a single expression (without requiring
2386 introducing more SSA variables to hold intermediate values),
2387 return the resulting tree. Otherwise return NULL_TREE.
2388 If the result expression is non-null, it has boolean type. */
2389
2390 tree
2391 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2392 enum tree_code code2, tree op2a, tree op2b)
2393 {
2394 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2395 if (t)
2396 return t;
2397 else
2398 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2399 }
2400
2401
2402 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2403
2404 Either NULL_TREE, a simplified but non-constant or a constant
2405 is returned.
2406
2407 ??? This should go into a gimple-fold-inline.h file to be eventually
2408 privatized with the single valueize function used in the various TUs
2409 to avoid the indirect function call overhead. */
2410
2411 tree
2412 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2413 {
2414 location_t loc = gimple_location (stmt);
2415 switch (gimple_code (stmt))
2416 {
2417 case GIMPLE_ASSIGN:
2418 {
2419 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2420
2421 switch (get_gimple_rhs_class (subcode))
2422 {
2423 case GIMPLE_SINGLE_RHS:
2424 {
2425 tree rhs = gimple_assign_rhs1 (stmt);
2426 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2427
2428 if (TREE_CODE (rhs) == SSA_NAME)
2429 {
2430 /* If the RHS is an SSA_NAME, return its known constant value,
2431 if any. */
2432 return (*valueize) (rhs);
2433 }
2434 /* Handle propagating invariant addresses into address
2435 operations. */
2436 else if (TREE_CODE (rhs) == ADDR_EXPR
2437 && !is_gimple_min_invariant (rhs))
2438 {
2439 HOST_WIDE_INT offset;
2440 tree base;
2441 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2442 &offset,
2443 valueize);
2444 if (base
2445 && (CONSTANT_CLASS_P (base)
2446 || decl_address_invariant_p (base)))
2447 return build_invariant_address (TREE_TYPE (rhs),
2448 base, offset);
2449 }
2450 else if (TREE_CODE (rhs) == CONSTRUCTOR
2451 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2452 && (CONSTRUCTOR_NELTS (rhs)
2453 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2454 {
2455 unsigned i;
2456 tree val, list;
2457
2458 list = NULL_TREE;
2459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2460 {
2461 val = (*valueize) (val);
2462 if (TREE_CODE (val) == INTEGER_CST
2463 || TREE_CODE (val) == REAL_CST
2464 || TREE_CODE (val) == FIXED_CST)
2465 list = tree_cons (NULL_TREE, val, list);
2466 else
2467 return NULL_TREE;
2468 }
2469
2470 return build_vector (TREE_TYPE (rhs), nreverse (list));
2471 }
2472
2473 if (kind == tcc_reference)
2474 {
2475 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2476 || TREE_CODE (rhs) == REALPART_EXPR
2477 || TREE_CODE (rhs) == IMAGPART_EXPR)
2478 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2479 {
2480 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2481 return fold_unary_loc (EXPR_LOCATION (rhs),
2482 TREE_CODE (rhs),
2483 TREE_TYPE (rhs), val);
2484 }
2485 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2486 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2487 {
2488 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2489 return fold_ternary_loc (EXPR_LOCATION (rhs),
2490 TREE_CODE (rhs),
2491 TREE_TYPE (rhs), val,
2492 TREE_OPERAND (rhs, 1),
2493 TREE_OPERAND (rhs, 2));
2494 }
2495 else if (TREE_CODE (rhs) == MEM_REF
2496 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2497 {
2498 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2499 if (TREE_CODE (val) == ADDR_EXPR
2500 && is_gimple_min_invariant (val))
2501 {
2502 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2503 unshare_expr (val),
2504 TREE_OPERAND (rhs, 1));
2505 if (tem)
2506 rhs = tem;
2507 }
2508 }
2509 return fold_const_aggregate_ref_1 (rhs, valueize);
2510 }
2511 else if (kind == tcc_declaration)
2512 return get_symbol_constant_value (rhs);
2513 return rhs;
2514 }
2515
2516 case GIMPLE_UNARY_RHS:
2517 {
2518 /* Handle unary operators that can appear in GIMPLE form.
2519 Note that we know the single operand must be a constant,
2520 so this should almost always return a simplified RHS. */
2521 tree lhs = gimple_assign_lhs (stmt);
2522 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2523
2524 /* Conversions are useless for CCP purposes if they are
2525 value-preserving. Thus the restrictions that
2526 useless_type_conversion_p places for restrict qualification
2527 of pointer types should not apply here.
2528 Substitution later will only substitute to allowed places. */
2529 if (CONVERT_EXPR_CODE_P (subcode)
2530 && POINTER_TYPE_P (TREE_TYPE (lhs))
2531 && POINTER_TYPE_P (TREE_TYPE (op0))
2532 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2533 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2534 return op0;
2535
2536 return
2537 fold_unary_ignore_overflow_loc (loc, subcode,
2538 gimple_expr_type (stmt), op0);
2539 }
2540
2541 case GIMPLE_BINARY_RHS:
2542 {
2543 /* Handle binary operators that can appear in GIMPLE form. */
2544 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2545 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2546
2547 /* Translate &x + CST into an invariant form suitable for
2548 further propagation. */
2549 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2550 && TREE_CODE (op0) == ADDR_EXPR
2551 && TREE_CODE (op1) == INTEGER_CST)
2552 {
2553 tree off = fold_convert (ptr_type_node, op1);
2554 return build_fold_addr_expr_loc
2555 (loc,
2556 fold_build2 (MEM_REF,
2557 TREE_TYPE (TREE_TYPE (op0)),
2558 unshare_expr (op0), off));
2559 }
2560
2561 return fold_binary_loc (loc, subcode,
2562 gimple_expr_type (stmt), op0, op1);
2563 }
2564
2565 case GIMPLE_TERNARY_RHS:
2566 {
2567 /* Handle ternary operators that can appear in GIMPLE form. */
2568 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2569 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2570 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2571
2572 return fold_ternary_loc (loc, subcode,
2573 gimple_expr_type (stmt), op0, op1, op2);
2574 }
2575
2576 default:
2577 gcc_unreachable ();
2578 }
2579 }
2580
2581 case GIMPLE_CALL:
2582 {
2583 tree fn;
2584
2585 if (gimple_call_internal_p (stmt))
2586 /* No folding yet for these functions. */
2587 return NULL_TREE;
2588
2589 fn = (*valueize) (gimple_call_fn (stmt));
2590 if (TREE_CODE (fn) == ADDR_EXPR
2591 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2592 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2593 {
2594 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2595 tree call, retval;
2596 unsigned i;
2597 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2598 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2599 call = build_call_array_loc (loc,
2600 gimple_call_return_type (stmt),
2601 fn, gimple_call_num_args (stmt), args);
2602 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2603 if (retval)
2604 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2605 STRIP_NOPS (retval);
2606 return retval;
2607 }
2608 return NULL_TREE;
2609 }
2610
2611 default:
2612 return NULL_TREE;
2613 }
2614 }
2615
2616 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2617 Returns NULL_TREE if folding to a constant is not possible, otherwise
2618 returns a constant according to is_gimple_min_invariant. */
2619
2620 tree
2621 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2622 {
2623 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2624 if (res && is_gimple_min_invariant (res))
2625 return res;
2626 return NULL_TREE;
2627 }
2628
2629
2630 /* The following set of functions are supposed to fold references using
2631 their constant initializers. */
2632
2633 static tree fold_ctor_reference (tree type, tree ctor,
2634 unsigned HOST_WIDE_INT offset,
2635 unsigned HOST_WIDE_INT size);
2636
2637 /* See if we can find constructor defining value of BASE.
2638 When we know the consructor with constant offset (such as
2639 base is array[40] and we do know constructor of array), then
2640 BIT_OFFSET is adjusted accordingly.
2641
2642 As a special case, return error_mark_node when constructor
2643 is not explicitly available, but it is known to be zero
2644 such as 'static const int a;'. */
2645 static tree
2646 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2647 tree (*valueize)(tree))
2648 {
2649 HOST_WIDE_INT bit_offset2, size, max_size;
2650 if (TREE_CODE (base) == MEM_REF)
2651 {
2652 if (!integer_zerop (TREE_OPERAND (base, 1)))
2653 {
2654 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2655 return NULL_TREE;
2656 *bit_offset += (mem_ref_offset (base).low
2657 * BITS_PER_UNIT);
2658 }
2659
2660 if (valueize
2661 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2662 base = valueize (TREE_OPERAND (base, 0));
2663 if (!base || TREE_CODE (base) != ADDR_EXPR)
2664 return NULL_TREE;
2665 base = TREE_OPERAND (base, 0);
2666 }
2667
2668 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2669 DECL_INITIAL. If BASE is a nested reference into another
2670 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2671 the inner reference. */
2672 switch (TREE_CODE (base))
2673 {
2674 case VAR_DECL:
2675 if (!const_value_known_p (base))
2676 return NULL_TREE;
2677
2678 /* Fallthru. */
2679 case CONST_DECL:
2680 if (!DECL_INITIAL (base)
2681 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2682 return error_mark_node;
2683 return DECL_INITIAL (base);
2684
2685 case ARRAY_REF:
2686 case COMPONENT_REF:
2687 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2688 if (max_size == -1 || size != max_size)
2689 return NULL_TREE;
2690 *bit_offset += bit_offset2;
2691 return get_base_constructor (base, bit_offset, valueize);
2692
2693 case STRING_CST:
2694 case CONSTRUCTOR:
2695 return base;
2696
2697 default:
2698 return NULL_TREE;
2699 }
2700 }
2701
2702 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2703 to the memory at bit OFFSET.
2704
2705 We do only simple job of folding byte accesses. */
2706
2707 static tree
2708 fold_string_cst_ctor_reference (tree type, tree ctor,
2709 unsigned HOST_WIDE_INT offset,
2710 unsigned HOST_WIDE_INT size)
2711 {
2712 if (INTEGRAL_TYPE_P (type)
2713 && (TYPE_MODE (type)
2714 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2715 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2716 == MODE_INT)
2717 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2718 && size == BITS_PER_UNIT
2719 && !(offset % BITS_PER_UNIT))
2720 {
2721 offset /= BITS_PER_UNIT;
2722 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2723 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2724 [offset]));
2725 /* Folding
2726 const char a[20]="hello";
2727 return a[10];
2728
2729 might lead to offset greater than string length. In this case we
2730 know value is either initialized to 0 or out of bounds. Return 0
2731 in both cases. */
2732 return build_zero_cst (type);
2733 }
2734 return NULL_TREE;
2735 }
2736
2737 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2738 SIZE to the memory at bit OFFSET. */
2739
2740 static tree
2741 fold_array_ctor_reference (tree type, tree ctor,
2742 unsigned HOST_WIDE_INT offset,
2743 unsigned HOST_WIDE_INT size)
2744 {
2745 unsigned HOST_WIDE_INT cnt;
2746 tree cfield, cval;
2747 double_int low_bound, elt_size;
2748 double_int index, max_index;
2749 double_int access_index;
2750 tree domain_type = NULL_TREE;
2751 HOST_WIDE_INT inner_offset;
2752
2753 /* Compute low bound and elt size. */
2754 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2755 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2756 if (domain_type && TYPE_MIN_VALUE (domain_type))
2757 {
2758 /* Static constructors for variably sized objects makes no sense. */
2759 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2760 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2761 }
2762 else
2763 low_bound = double_int_zero;
2764 /* Static constructors for variably sized objects makes no sense. */
2765 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2766 == INTEGER_CST);
2767 elt_size =
2768 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2769
2770
2771 /* We can handle only constantly sized accesses that are known to not
2772 be larger than size of array element. */
2773 if (!TYPE_SIZE_UNIT (type)
2774 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2775 || double_int_cmp (elt_size,
2776 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2777 return NULL_TREE;
2778
2779 /* Compute the array index we look for. */
2780 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2781 elt_size, TRUNC_DIV_EXPR);
2782 access_index = double_int_add (access_index, low_bound);
2783
2784 /* And offset within the access. */
2785 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2786
2787 /* See if the array field is large enough to span whole access. We do not
2788 care to fold accesses spanning multiple array indexes. */
2789 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2790 return NULL_TREE;
2791
2792 index = double_int_sub (low_bound, double_int_one);
2793 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2794 {
2795 /* Array constructor might explicitely set index, or specify range
2796 or leave index NULL meaning that it is next index after previous
2797 one. */
2798 if (cfield)
2799 {
2800 if (TREE_CODE (cfield) == INTEGER_CST)
2801 max_index = index = tree_to_double_int (cfield);
2802 else
2803 {
2804 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2805 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2806 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2807 }
2808 }
2809 else
2810 max_index = index = double_int_add (index, double_int_one);
2811
2812 /* Do we have match? */
2813 if (double_int_cmp (access_index, index, 1) >= 0
2814 && double_int_cmp (access_index, max_index, 1) <= 0)
2815 return fold_ctor_reference (type, cval, inner_offset, size);
2816 }
2817 /* When memory is not explicitely mentioned in constructor,
2818 it is 0 (or out of range). */
2819 return build_zero_cst (type);
2820 }
2821
2822 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2823 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2824
2825 static tree
2826 fold_nonarray_ctor_reference (tree type, tree ctor,
2827 unsigned HOST_WIDE_INT offset,
2828 unsigned HOST_WIDE_INT size)
2829 {
2830 unsigned HOST_WIDE_INT cnt;
2831 tree cfield, cval;
2832
2833 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2834 cval)
2835 {
2836 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2837 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2838 tree field_size = DECL_SIZE (cfield);
2839 double_int bitoffset;
2840 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2841 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2842 double_int bitoffset_end, access_end;
2843
2844 /* Variable sized objects in static constructors makes no sense,
2845 but field_size can be NULL for flexible array members. */
2846 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2847 && TREE_CODE (byte_offset) == INTEGER_CST
2848 && (field_size != NULL_TREE
2849 ? TREE_CODE (field_size) == INTEGER_CST
2850 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2851
2852 /* Compute bit offset of the field. */
2853 bitoffset = double_int_add (tree_to_double_int (field_offset),
2854 double_int_mul (byte_offset_cst,
2855 bits_per_unit_cst));
2856 /* Compute bit offset where the field ends. */
2857 if (field_size != NULL_TREE)
2858 bitoffset_end = double_int_add (bitoffset,
2859 tree_to_double_int (field_size));
2860 else
2861 bitoffset_end = double_int_zero;
2862
2863 access_end = double_int_add (uhwi_to_double_int (offset),
2864 uhwi_to_double_int (size));
2865
2866 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2867 [BITOFFSET, BITOFFSET_END)? */
2868 if (double_int_cmp (access_end, bitoffset, 0) > 0
2869 && (field_size == NULL_TREE
2870 || double_int_cmp (uhwi_to_double_int (offset),
2871 bitoffset_end, 0) < 0))
2872 {
2873 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2874 bitoffset);
2875 /* We do have overlap. Now see if field is large enough to
2876 cover the access. Give up for accesses spanning multiple
2877 fields. */
2878 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2879 return NULL_TREE;
2880 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2881 return NULL_TREE;
2882 return fold_ctor_reference (type, cval,
2883 double_int_to_uhwi (inner_offset), size);
2884 }
2885 }
2886 /* When memory is not explicitely mentioned in constructor, it is 0. */
2887 return build_zero_cst (type);
2888 }
2889
2890 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2891 to the memory at bit OFFSET. */
2892
2893 static tree
2894 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2895 unsigned HOST_WIDE_INT size)
2896 {
2897 tree ret;
2898
2899 /* We found the field with exact match. */
2900 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2901 && !offset)
2902 return canonicalize_constructor_val (ctor);
2903
2904 /* We are at the end of walk, see if we can view convert the
2905 result. */
2906 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2907 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2908 && operand_equal_p (TYPE_SIZE (type),
2909 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2910 {
2911 ret = canonicalize_constructor_val (ctor);
2912 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2913 if (ret)
2914 STRIP_NOPS (ret);
2915 return ret;
2916 }
2917 if (TREE_CODE (ctor) == STRING_CST)
2918 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2919 if (TREE_CODE (ctor) == CONSTRUCTOR)
2920 {
2921
2922 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2923 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2924 return fold_array_ctor_reference (type, ctor, offset, size);
2925 else
2926 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2927 }
2928
2929 return NULL_TREE;
2930 }
2931
2932 /* Return the tree representing the element referenced by T if T is an
2933 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2934 names using VALUEIZE. Return NULL_TREE otherwise. */
2935
2936 tree
2937 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2938 {
2939 tree ctor, idx, base;
2940 HOST_WIDE_INT offset, size, max_size;
2941 tree tem;
2942
2943 if (TREE_THIS_VOLATILE (t))
2944 return NULL_TREE;
2945
2946 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2947 return get_symbol_constant_value (t);
2948
2949 tem = fold_read_from_constant_string (t);
2950 if (tem)
2951 return tem;
2952
2953 switch (TREE_CODE (t))
2954 {
2955 case ARRAY_REF:
2956 case ARRAY_RANGE_REF:
2957 /* Constant indexes are handled well by get_base_constructor.
2958 Only special case variable offsets.
2959 FIXME: This code can't handle nested references with variable indexes
2960 (they will be handled only by iteration of ccp). Perhaps we can bring
2961 get_ref_base_and_extent here and make it use a valueize callback. */
2962 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2963 && valueize
2964 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2965 && host_integerp (idx, 0))
2966 {
2967 tree low_bound, unit_size;
2968
2969 /* If the resulting bit-offset is constant, track it. */
2970 if ((low_bound = array_ref_low_bound (t),
2971 host_integerp (low_bound, 0))
2972 && (unit_size = array_ref_element_size (t),
2973 host_integerp (unit_size, 1)))
2974 {
2975 offset = TREE_INT_CST_LOW (idx);
2976 offset -= TREE_INT_CST_LOW (low_bound);
2977 offset *= TREE_INT_CST_LOW (unit_size);
2978 offset *= BITS_PER_UNIT;
2979
2980 base = TREE_OPERAND (t, 0);
2981 ctor = get_base_constructor (base, &offset, valueize);
2982 /* Empty constructor. Always fold to 0. */
2983 if (ctor == error_mark_node)
2984 return build_zero_cst (TREE_TYPE (t));
2985 /* Out of bound array access. Value is undefined,
2986 but don't fold. */
2987 if (offset < 0)
2988 return NULL_TREE;
2989 /* We can not determine ctor. */
2990 if (!ctor)
2991 return NULL_TREE;
2992 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
2993 TREE_INT_CST_LOW (unit_size)
2994 * BITS_PER_UNIT);
2995 }
2996 }
2997 /* Fallthru. */
2998
2999 case COMPONENT_REF:
3000 case BIT_FIELD_REF:
3001 case TARGET_MEM_REF:
3002 case MEM_REF:
3003 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3004 ctor = get_base_constructor (base, &offset, valueize);
3005
3006 /* Empty constructor. Always fold to 0. */
3007 if (ctor == error_mark_node)
3008 return build_zero_cst (TREE_TYPE (t));
3009 /* We do not know precise address. */
3010 if (max_size == -1 || max_size != size)
3011 return NULL_TREE;
3012 /* We can not determine ctor. */
3013 if (!ctor)
3014 return NULL_TREE;
3015
3016 /* Out of bound array access. Value is undefined, but don't fold. */
3017 if (offset < 0)
3018 return NULL_TREE;
3019
3020 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3021
3022 case REALPART_EXPR:
3023 case IMAGPART_EXPR:
3024 {
3025 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3026 if (c && TREE_CODE (c) == COMPLEX_CST)
3027 return fold_build1_loc (EXPR_LOCATION (t),
3028 TREE_CODE (t), TREE_TYPE (t), c);
3029 break;
3030 }
3031
3032 default:
3033 break;
3034 }
3035
3036 return NULL_TREE;
3037 }
3038
3039 tree
3040 fold_const_aggregate_ref (tree t)
3041 {
3042 return fold_const_aggregate_ref_1 (t, NULL);
3043 }
3044
3045 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3046 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3047 KNOWN_BINFO carries the binfo describing the true type of
3048 OBJ_TYPE_REF_OBJECT(REF). */
3049
3050 tree
3051 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3052 {
3053 unsigned HOST_WIDE_INT offset, size;
3054 tree v, fn;
3055
3056 v = BINFO_VTABLE (known_binfo);
3057 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3058 if (!v)
3059 return NULL_TREE;
3060
3061 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3062 {
3063 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3064 v = TREE_OPERAND (v, 0);
3065 }
3066 else
3067 offset = 0;
3068
3069 if (TREE_CODE (v) != ADDR_EXPR)
3070 return NULL_TREE;
3071 v = TREE_OPERAND (v, 0);
3072
3073 if (TREE_CODE (v) != VAR_DECL
3074 || !DECL_VIRTUAL_P (v)
3075 || !DECL_INITIAL (v)
3076 || DECL_INITIAL (v) == error_mark_node)
3077 return NULL_TREE;
3078 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3079 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3080 offset += token * size;
3081 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3082 offset, size);
3083 if (!fn)
3084 return NULL_TREE;
3085 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3086 || TREE_CODE (fn) == FDESC_EXPR);
3087 fn = TREE_OPERAND (fn, 0);
3088 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3089
3090 /* When cgraph node is missing and function is not public, we cannot
3091 devirtualize. This can happen in WHOPR when the actual method
3092 ends up in other partition, because we found devirtualization
3093 possibility too late. */
3094 if (!can_refer_decl_in_current_unit_p (fn))
3095 return NULL_TREE;
3096
3097 return fn;
3098 }
3099
3100 /* Return true iff VAL is a gimple expression that is known to be
3101 non-negative. Restricted to floating-point inputs. */
3102
3103 bool
3104 gimple_val_nonnegative_real_p (tree val)
3105 {
3106 gimple def_stmt;
3107
3108 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3109
3110 /* Use existing logic for non-gimple trees. */
3111 if (tree_expr_nonnegative_p (val))
3112 return true;
3113
3114 if (TREE_CODE (val) != SSA_NAME)
3115 return false;
3116
3117 /* Currently we look only at the immediately defining statement
3118 to make this determination, since recursion on defining
3119 statements of operands can lead to quadratic behavior in the
3120 worst case. This is expected to catch almost all occurrences
3121 in practice. It would be possible to implement limited-depth
3122 recursion if important cases are lost. Alternatively, passes
3123 that need this information (such as the pow/powi lowering code
3124 in the cse_sincos pass) could be revised to provide it through
3125 dataflow propagation. */
3126
3127 def_stmt = SSA_NAME_DEF_STMT (val);
3128
3129 if (is_gimple_assign (def_stmt))
3130 {
3131 tree op0, op1;
3132
3133 /* See fold-const.c:tree_expr_nonnegative_p for additional
3134 cases that could be handled with recursion. */
3135
3136 switch (gimple_assign_rhs_code (def_stmt))
3137 {
3138 case ABS_EXPR:
3139 /* Always true for floating-point operands. */
3140 return true;
3141
3142 case MULT_EXPR:
3143 /* True if the two operands are identical (since we are
3144 restricted to floating-point inputs). */
3145 op0 = gimple_assign_rhs1 (def_stmt);
3146 op1 = gimple_assign_rhs2 (def_stmt);
3147
3148 if (op0 == op1
3149 || operand_equal_p (op0, op1, 0))
3150 return true;
3151
3152 default:
3153 return false;
3154 }
3155 }
3156 else if (is_gimple_call (def_stmt))
3157 {
3158 tree fndecl = gimple_call_fndecl (def_stmt);
3159 if (fndecl
3160 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3161 {
3162 tree arg1;
3163
3164 switch (DECL_FUNCTION_CODE (fndecl))
3165 {
3166 CASE_FLT_FN (BUILT_IN_ACOS):
3167 CASE_FLT_FN (BUILT_IN_ACOSH):
3168 CASE_FLT_FN (BUILT_IN_CABS):
3169 CASE_FLT_FN (BUILT_IN_COSH):
3170 CASE_FLT_FN (BUILT_IN_ERFC):
3171 CASE_FLT_FN (BUILT_IN_EXP):
3172 CASE_FLT_FN (BUILT_IN_EXP10):
3173 CASE_FLT_FN (BUILT_IN_EXP2):
3174 CASE_FLT_FN (BUILT_IN_FABS):
3175 CASE_FLT_FN (BUILT_IN_FDIM):
3176 CASE_FLT_FN (BUILT_IN_HYPOT):
3177 CASE_FLT_FN (BUILT_IN_POW10):
3178 return true;
3179
3180 CASE_FLT_FN (BUILT_IN_SQRT):
3181 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3182 nonnegative inputs. */
3183 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3184 return true;
3185
3186 break;
3187
3188 CASE_FLT_FN (BUILT_IN_POWI):
3189 /* True if the second argument is an even integer. */
3190 arg1 = gimple_call_arg (def_stmt, 1);
3191
3192 if (TREE_CODE (arg1) == INTEGER_CST
3193 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3194 return true;
3195
3196 break;
3197
3198 CASE_FLT_FN (BUILT_IN_POW):
3199 /* True if the second argument is an even integer-valued
3200 real. */
3201 arg1 = gimple_call_arg (def_stmt, 1);
3202
3203 if (TREE_CODE (arg1) == REAL_CST)
3204 {
3205 REAL_VALUE_TYPE c;
3206 HOST_WIDE_INT n;
3207
3208 c = TREE_REAL_CST (arg1);
3209 n = real_to_integer (&c);
3210
3211 if ((n & 1) == 0)
3212 {
3213 REAL_VALUE_TYPE cint;
3214 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3215 if (real_identical (&c, &cint))
3216 return true;
3217 }
3218 }
3219
3220 break;
3221
3222 default:
3223 return false;
3224 }
3225 }
3226 }
3227
3228 return false;
3229 }