re PR preprocessor/36674 (#include location is offset by one row in errors from prepr...
[gcc.git] / gcc / tree-ssa-ccp.c
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
66
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
102
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
105
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
112
113 For instance, consider the following code fragment:
114
115 struct A a;
116 const int B = 42;
117
118 void foo (int i)
119 {
120 if (i > 10)
121 a.a = 42;
122 else
123 {
124 a.b = 21;
125 a.a = a.b + 21;
126 }
127
128 if (a.a != B)
129 never_executed ();
130 }
131
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
138
139 # a_5 = VDEF <a_4>
140 a.a = 2;
141
142 # VUSE <a_5>
143 x_3 = a.b;
144
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
148
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
151
152 int A;
153
154 foo (int i)
155 {
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
159
160 # VUSE <A_5>
161 A.0_6 = A;
162
163 return A.0_6;
164 }
165
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
169
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
176
177 References:
178
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
181
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
184
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
187
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
211 #include "dbgcnt.h"
212
213
214 /* Possible lattice values. */
215 typedef enum
216 {
217 UNINITIALIZED,
218 UNDEFINED,
219 CONSTANT,
220 VARYING
221 } ccp_lattice_t;
222
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
228 doing the store). */
229 static prop_value_t *const_val;
230
231 static void canonicalize_float_value (prop_value_t *);
232
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
234
235 static void
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
237 {
238 switch (val.lattice_val)
239 {
240 case UNINITIALIZED:
241 fprintf (outf, "%sUNINITIALIZED", prefix);
242 break;
243 case UNDEFINED:
244 fprintf (outf, "%sUNDEFINED", prefix);
245 break;
246 case VARYING:
247 fprintf (outf, "%sVARYING", prefix);
248 break;
249 case CONSTANT:
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
252 break;
253 default:
254 gcc_unreachable ();
255 }
256 }
257
258
259 /* Print lattice value VAL to stderr. */
260
261 void debug_lattice_value (prop_value_t val);
262
263 void
264 debug_lattice_value (prop_value_t val)
265 {
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
268 }
269
270
271
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
274
275 tree
276 get_symbol_constant_value (tree sym)
277 {
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym))
280 {
281 tree val = DECL_INITIAL (sym);
282 if (val)
283 {
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
286 {
287 if (TREE_CODE (val) == ADDR_EXPR)
288 {
289 tree base = get_base_address (TREE_OPERAND (val, 0));
290 if (base && TREE_CODE (base) == VAR_DECL)
291 add_referenced_var (base);
292 }
293 return val;
294 }
295 }
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
299 if (!val
300 && !DECL_EXTERNAL (sym)
301 && targetm.binds_local_p (sym)
302 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
304 return fold_convert (TREE_TYPE (sym), integer_zero_node);
305 }
306
307 return NULL_TREE;
308 }
309
310 /* Compute a default value for variable VAR and store it in the
311 CONST_VAL array. The following rules are used to get default
312 values:
313
314 1- Global and static variables that are declared constant are
315 considered CONSTANT.
316
317 2- Any other value is considered UNDEFINED. This is useful when
318 considering PHI nodes. PHI arguments that are undefined do not
319 change the constant value of the PHI node, which allows for more
320 constants to be propagated.
321
322 3- Variables defined by statements other than assignments and PHI
323 nodes are considered VARYING.
324
325 4- Initial values of variables that are not GIMPLE registers are
326 considered VARYING. */
327
328 static prop_value_t
329 get_default_value (tree var)
330 {
331 tree sym = SSA_NAME_VAR (var);
332 prop_value_t val = { UNINITIALIZED, NULL_TREE };
333 gimple stmt;
334
335 stmt = SSA_NAME_DEF_STMT (var);
336
337 if (gimple_nop_p (stmt))
338 {
339 /* Variables defined by an empty statement are those used
340 before being initialized. If VAR is a local variable, we
341 can assume initially that it is UNDEFINED, otherwise we must
342 consider it VARYING. */
343 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
344 val.lattice_val = UNDEFINED;
345 else
346 val.lattice_val = VARYING;
347 }
348 else if (is_gimple_assign (stmt)
349 /* Value-returning GIMPLE_CALL statements assign to
350 a variable, and are treated similarly to GIMPLE_ASSIGN. */
351 || (is_gimple_call (stmt)
352 && gimple_call_lhs (stmt) != NULL_TREE)
353 || gimple_code (stmt) == GIMPLE_PHI)
354 {
355 tree cst;
356 if (gimple_assign_single_p (stmt)
357 && DECL_P (gimple_assign_rhs1 (stmt))
358 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
359 {
360 val.lattice_val = CONSTANT;
361 val.value = cst;
362 }
363 else
364 /* Any other variable defined by an assignment or a PHI node
365 is considered UNDEFINED. */
366 val.lattice_val = UNDEFINED;
367 }
368 else
369 {
370 /* Otherwise, VAR will never take on a constant value. */
371 val.lattice_val = VARYING;
372 }
373
374 return val;
375 }
376
377
378 /* Get the constant value associated with variable VAR. */
379
380 static inline prop_value_t *
381 get_value (tree var)
382 {
383 prop_value_t *val;
384
385 if (const_val == NULL)
386 return NULL;
387
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
391
392 canonicalize_float_value (val);
393
394 return val;
395 }
396
397 /* Sets the value associated with VAR to VARYING. */
398
399 static inline void
400 set_value_varying (tree var)
401 {
402 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
403
404 val->lattice_val = VARYING;
405 val->value = NULL_TREE;
406 }
407
408 /* For float types, modify the value of VAL to make ccp work correctly
409 for non-standard values (-0, NaN):
410
411 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
412 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
413 This is to fix the following problem (see PR 29921): Suppose we have
414
415 x = 0.0 * y
416
417 and we set value of y to NaN. This causes value of x to be set to NaN.
418 When we later determine that y is in fact VARYING, fold uses the fact
419 that HONOR_NANS is false, and we try to change the value of x to 0,
420 causing an ICE. With HONOR_NANS being false, the real appearance of
421 NaN would cause undefined behavior, though, so claiming that y (and x)
422 are UNDEFINED initially is correct. */
423
424 static void
425 canonicalize_float_value (prop_value_t *val)
426 {
427 enum machine_mode mode;
428 tree type;
429 REAL_VALUE_TYPE d;
430
431 if (val->lattice_val != CONSTANT
432 || TREE_CODE (val->value) != REAL_CST)
433 return;
434
435 d = TREE_REAL_CST (val->value);
436 type = TREE_TYPE (val->value);
437 mode = TYPE_MODE (type);
438
439 if (!HONOR_SIGNED_ZEROS (mode)
440 && REAL_VALUE_MINUS_ZERO (d))
441 {
442 val->value = build_real (type, dconst0);
443 return;
444 }
445
446 if (!HONOR_NANS (mode)
447 && REAL_VALUE_ISNAN (d))
448 {
449 val->lattice_val = UNDEFINED;
450 val->value = NULL;
451 return;
452 }
453 }
454
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
457
458 static bool
459 set_lattice_value (tree var, prop_value_t new_val)
460 {
461 prop_value_t *old_val = get_value (var);
462
463 canonicalize_float_value (&new_val);
464
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
468
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))));
473
474 if (old_val->lattice_val != new_val.lattice_val)
475 {
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 {
478 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
479 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
480 }
481
482 *old_val = new_val;
483
484 gcc_assert (new_val.lattice_val != UNDEFINED);
485 return true;
486 }
487
488 return false;
489 }
490
491
492 /* Return the likely CCP lattice value for STMT.
493
494 If STMT has no operands, then return CONSTANT.
495
496 Else if undefinedness of operands of STMT cause its value to be
497 undefined, then return UNDEFINED.
498
499 Else if any operands of STMT are constants, then return CONSTANT.
500
501 Else return VARYING. */
502
503 static ccp_lattice_t
504 likely_value (gimple stmt)
505 {
506 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
507 tree use;
508 ssa_op_iter iter;
509 unsigned i;
510
511 enum gimple_code code = gimple_code (stmt);
512
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
519
520 /* If the statement has volatile operands, it won't fold to a
521 constant value. */
522 if (gimple_has_volatile_ops (stmt))
523 return VARYING;
524
525 /* Arrive here for more complex cases. */
526 has_constant_operand = false;
527 has_undefined_operand = false;
528 all_undefined_operands = true;
529 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
530 {
531 prop_value_t *val = get_value (use);
532
533 if (val->lattice_val == UNDEFINED)
534 has_undefined_operand = true;
535 else
536 all_undefined_operands = false;
537
538 if (val->lattice_val == CONSTANT)
539 has_constant_operand = true;
540 }
541
542 /* There may be constants in regular rhs operands. For calls we
543 have to ignore lhs, fndecl and static chain, otherwise only
544 the lhs. */
545 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
546 i < gimple_num_ops (stmt); ++i)
547 {
548 tree op = gimple_op (stmt, i);
549 if (!op || TREE_CODE (op) == SSA_NAME)
550 continue;
551 if (is_gimple_min_invariant (op))
552 has_constant_operand = true;
553 }
554
555 /* If the operation combines operands like COMPLEX_EXPR make sure to
556 not mark the result UNDEFINED if only one part of the result is
557 undefined. */
558 if (has_undefined_operand && all_undefined_operands)
559 return UNDEFINED;
560 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
561 {
562 switch (gimple_assign_rhs_code (stmt))
563 {
564 /* Unary operators are handled with all_undefined_operands. */
565 case PLUS_EXPR:
566 case MINUS_EXPR:
567 case POINTER_PLUS_EXPR:
568 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
569 Not bitwise operators, one VARYING operand may specify the
570 result completely. Not logical operators for the same reason.
571 Not COMPLEX_EXPR as one VARYING operand makes the result partly
572 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
573 the undefined operand may be promoted. */
574 return UNDEFINED;
575
576 default:
577 ;
578 }
579 }
580 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
581 fall back to VARYING even if there were CONSTANT operands. */
582 if (has_undefined_operand)
583 return VARYING;
584
585 /* We do not consider virtual operands here -- load from read-only
586 memory may have only VARYING virtual operands, but still be
587 constant. */
588 if (has_constant_operand
589 || gimple_references_memory_p (stmt))
590 return CONSTANT;
591
592 return VARYING;
593 }
594
595 /* Returns true if STMT cannot be constant. */
596
597 static bool
598 surely_varying_stmt_p (gimple stmt)
599 {
600 /* If the statement has operands that we cannot handle, it cannot be
601 constant. */
602 if (gimple_has_volatile_ops (stmt))
603 return true;
604
605 /* If it is a call and does not return a value or is not a
606 builtin and not an indirect call, it is varying. */
607 if (is_gimple_call (stmt))
608 {
609 tree fndecl;
610 if (!gimple_call_lhs (stmt)
611 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
612 && !DECL_BUILT_IN (fndecl)))
613 return true;
614 }
615
616 /* Any other store operation is not interesting. */
617 else if (gimple_vdef (stmt))
618 return true;
619
620 /* Anything other than assignments and conditional jumps are not
621 interesting for CCP. */
622 if (gimple_code (stmt) != GIMPLE_ASSIGN
623 && gimple_code (stmt) != GIMPLE_COND
624 && gimple_code (stmt) != GIMPLE_SWITCH
625 && gimple_code (stmt) != GIMPLE_CALL)
626 return true;
627
628 return false;
629 }
630
631 /* Initialize local data structures for CCP. */
632
633 static void
634 ccp_initialize (void)
635 {
636 basic_block bb;
637
638 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
639
640 /* Initialize simulation flags for PHI nodes and statements. */
641 FOR_EACH_BB (bb)
642 {
643 gimple_stmt_iterator i;
644
645 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
646 {
647 gimple stmt = gsi_stmt (i);
648 bool is_varying = surely_varying_stmt_p (stmt);
649
650 if (is_varying)
651 {
652 tree def;
653 ssa_op_iter iter;
654
655 /* If the statement will not produce a constant, mark
656 all its outputs VARYING. */
657 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
658 set_value_varying (def);
659 }
660 prop_set_simulate_again (stmt, !is_varying);
661 }
662 }
663
664 /* Now process PHI nodes. We never clear the simulate_again flag on
665 phi nodes, since we do not know which edges are executable yet,
666 except for phi nodes for virtual operands when we do not do store ccp. */
667 FOR_EACH_BB (bb)
668 {
669 gimple_stmt_iterator i;
670
671 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
672 {
673 gimple phi = gsi_stmt (i);
674
675 if (!is_gimple_reg (gimple_phi_result (phi)))
676 prop_set_simulate_again (phi, false);
677 else
678 prop_set_simulate_again (phi, true);
679 }
680 }
681 }
682
683 /* Debug count support. Reset the values of ssa names
684 VARYING when the total number ssa names analyzed is
685 beyond the debug count specified. */
686
687 static void
688 do_dbg_cnt (void)
689 {
690 unsigned i;
691 for (i = 0; i < num_ssa_names; i++)
692 {
693 if (!dbg_cnt (ccp))
694 {
695 const_val[i].lattice_val = VARYING;
696 const_val[i].value = NULL_TREE;
697 }
698 }
699 }
700
701
702 /* Do final substitution of propagated values, cleanup the flowgraph and
703 free allocated storage.
704
705 Return TRUE when something was optimized. */
706
707 static bool
708 ccp_finalize (void)
709 {
710 bool something_changed;
711
712 do_dbg_cnt ();
713 /* Perform substitutions based on the known constant values. */
714 something_changed = substitute_and_fold (const_val, false);
715
716 free (const_val);
717 const_val = NULL;
718 return something_changed;;
719 }
720
721
722 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
723 in VAL1.
724
725 any M UNDEFINED = any
726 any M VARYING = VARYING
727 Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
729 */
730
731 static void
732 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
733 {
734 if (val1->lattice_val == UNDEFINED)
735 {
736 /* UNDEFINED M any = any */
737 *val1 = *val2;
738 }
739 else if (val2->lattice_val == UNDEFINED)
740 {
741 /* any M UNDEFINED = any
742 Nothing to do. VAL1 already contains the value we want. */
743 ;
744 }
745 else if (val1->lattice_val == VARYING
746 || val2->lattice_val == VARYING)
747 {
748 /* any M VARYING = VARYING. */
749 val1->lattice_val = VARYING;
750 val1->value = NULL_TREE;
751 }
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1)
755 {
756 /* Ci M Cj = Ci if (i == j)
757 Ci M Cj = VARYING if (i != j)
758
759 If these two values come from memory stores, make sure that
760 they come from the same memory reference. */
761 val1->lattice_val = CONSTANT;
762 val1->value = val1->value;
763 }
764 else
765 {
766 /* Any other combination is VARYING. */
767 val1->lattice_val = VARYING;
768 val1->value = NULL_TREE;
769 }
770 }
771
772
773 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
774 lattice values to determine PHI_NODE's lattice value. The value of a
775 PHI node is determined calling ccp_lattice_meet with all the arguments
776 of the PHI node that are incoming via executable edges. */
777
778 static enum ssa_prop_result
779 ccp_visit_phi_node (gimple phi)
780 {
781 unsigned i;
782 prop_value_t *old_val, new_val;
783
784 if (dump_file && (dump_flags & TDF_DETAILS))
785 {
786 fprintf (dump_file, "\nVisiting PHI node: ");
787 print_gimple_stmt (dump_file, phi, 0, dump_flags);
788 }
789
790 old_val = get_value (gimple_phi_result (phi));
791 switch (old_val->lattice_val)
792 {
793 case VARYING:
794 return SSA_PROP_VARYING;
795
796 case CONSTANT:
797 new_val = *old_val;
798 break;
799
800 case UNDEFINED:
801 new_val.lattice_val = UNDEFINED;
802 new_val.value = NULL_TREE;
803 break;
804
805 default:
806 gcc_unreachable ();
807 }
808
809 for (i = 0; i < gimple_phi_num_args (phi); i++)
810 {
811 /* Compute the meet operator over all the PHI arguments flowing
812 through executable edges. */
813 edge e = gimple_phi_arg_edge (phi, i);
814
815 if (dump_file && (dump_flags & TDF_DETAILS))
816 {
817 fprintf (dump_file,
818 "\n Argument #%d (%d -> %d %sexecutable)\n",
819 i, e->src->index, e->dest->index,
820 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
821 }
822
823 /* If the incoming edge is executable, Compute the meet operator for
824 the existing value of the PHI node and the current PHI argument. */
825 if (e->flags & EDGE_EXECUTABLE)
826 {
827 tree arg = gimple_phi_arg (phi, i)->def;
828 prop_value_t arg_val;
829
830 if (is_gimple_min_invariant (arg))
831 {
832 arg_val.lattice_val = CONSTANT;
833 arg_val.value = arg;
834 }
835 else
836 arg_val = *(get_value (arg));
837
838 ccp_lattice_meet (&new_val, &arg_val);
839
840 if (dump_file && (dump_flags & TDF_DETAILS))
841 {
842 fprintf (dump_file, "\t");
843 print_generic_expr (dump_file, arg, dump_flags);
844 dump_lattice_value (dump_file, "\tValue: ", arg_val);
845 fprintf (dump_file, "\n");
846 }
847
848 if (new_val.lattice_val == VARYING)
849 break;
850 }
851 }
852
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 {
855 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
856 fprintf (dump_file, "\n\n");
857 }
858
859 /* Make the transition to the new value. */
860 if (set_lattice_value (gimple_phi_result (phi), new_val))
861 {
862 if (new_val.lattice_val == VARYING)
863 return SSA_PROP_VARYING;
864 else
865 return SSA_PROP_INTERESTING;
866 }
867 else
868 return SSA_PROP_NOT_INTERESTING;
869 }
870
871 /* Return true if we may propagate the address expression ADDR into the
872 dereference DEREF and cancel them. */
873
874 bool
875 may_propagate_address_into_dereference (tree addr, tree deref)
876 {
877 gcc_assert (INDIRECT_REF_P (deref)
878 && TREE_CODE (addr) == ADDR_EXPR);
879
880 /* Don't propagate if ADDR's operand has incomplete type. */
881 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
882 return false;
883
884 /* If the address is invariant then we do not need to preserve restrict
885 qualifications. But we do need to preserve volatile qualifiers until
886 we can annotate the folded dereference itself properly. */
887 if (is_gimple_min_invariant (addr)
888 && (!TREE_THIS_VOLATILE (deref)
889 || TYPE_VOLATILE (TREE_TYPE (addr))))
890 return useless_type_conversion_p (TREE_TYPE (deref),
891 TREE_TYPE (TREE_OPERAND (addr, 0)));
892
893 /* Else both the address substitution and the folding must result in
894 a valid useless type conversion sequence. */
895 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
896 TREE_TYPE (addr))
897 && useless_type_conversion_p (TREE_TYPE (deref),
898 TREE_TYPE (TREE_OPERAND (addr, 0))));
899 }
900
901 /* CCP specific front-end to the non-destructive constant folding
902 routines.
903
904 Attempt to simplify the RHS of STMT knowing that one or more
905 operands are constants.
906
907 If simplification is possible, return the simplified RHS,
908 otherwise return the original RHS or NULL_TREE. */
909
910 static tree
911 ccp_fold (gimple stmt)
912 {
913 switch (gimple_code (stmt))
914 {
915 case GIMPLE_ASSIGN:
916 {
917 enum tree_code subcode = gimple_assign_rhs_code (stmt);
918
919 switch (get_gimple_rhs_class (subcode))
920 {
921 case GIMPLE_SINGLE_RHS:
922 {
923 tree rhs = gimple_assign_rhs1 (stmt);
924 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
925
926 if (TREE_CODE (rhs) == SSA_NAME)
927 {
928 /* If the RHS is an SSA_NAME, return its known constant value,
929 if any. */
930 return get_value (rhs)->value;
931 }
932 /* Handle propagating invariant addresses into address operations.
933 The folding we do here matches that in tree-ssa-forwprop.c. */
934 else if (TREE_CODE (rhs) == ADDR_EXPR)
935 {
936 tree *base;
937 base = &TREE_OPERAND (rhs, 0);
938 while (handled_component_p (*base))
939 base = &TREE_OPERAND (*base, 0);
940 if (TREE_CODE (*base) == INDIRECT_REF
941 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
942 {
943 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
944 if (val->lattice_val == CONSTANT
945 && TREE_CODE (val->value) == ADDR_EXPR
946 && may_propagate_address_into_dereference
947 (val->value, *base))
948 {
949 /* We need to return a new tree, not modify the IL
950 or share parts of it. So play some tricks to
951 avoid manually building it. */
952 tree ret, save = *base;
953 *base = TREE_OPERAND (val->value, 0);
954 ret = unshare_expr (rhs);
955 recompute_tree_invariant_for_addr_expr (ret);
956 *base = save;
957 return ret;
958 }
959 }
960 }
961
962 if (kind == tcc_reference)
963 {
964 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
965 || TREE_CODE (rhs) == REALPART_EXPR
966 || TREE_CODE (rhs) == IMAGPART_EXPR)
967 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
968 {
969 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
970 if (val->lattice_val == CONSTANT)
971 return fold_unary (TREE_CODE (rhs),
972 TREE_TYPE (rhs), val->value);
973 }
974 else if (TREE_CODE (rhs) == INDIRECT_REF
975 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
976 {
977 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
978 if (val->lattice_val == CONSTANT
979 && TREE_CODE (val->value) == ADDR_EXPR
980 && useless_type_conversion_p (TREE_TYPE (rhs),
981 TREE_TYPE (TREE_TYPE (val->value))))
982 rhs = TREE_OPERAND (val->value, 0);
983 }
984 return fold_const_aggregate_ref (rhs);
985 }
986 else if (kind == tcc_declaration)
987 return get_symbol_constant_value (rhs);
988 return rhs;
989 }
990
991 case GIMPLE_UNARY_RHS:
992 {
993 /* Handle unary operators that can appear in GIMPLE form.
994 Note that we know the single operand must be a constant,
995 so this should almost always return a simplified RHS. */
996 tree lhs = gimple_assign_lhs (stmt);
997 tree op0 = gimple_assign_rhs1 (stmt);
998
999 /* Simplify the operand down to a constant. */
1000 if (TREE_CODE (op0) == SSA_NAME)
1001 {
1002 prop_value_t *val = get_value (op0);
1003 if (val->lattice_val == CONSTANT)
1004 op0 = get_value (op0)->value;
1005 }
1006
1007 /* Conversions are useless for CCP purposes if they are
1008 value-preserving. Thus the restrictions that
1009 useless_type_conversion_p places for pointer type conversions
1010 do not apply here. Substitution later will only substitute to
1011 allowed places. */
1012 if (CONVERT_EXPR_CODE_P (subcode)
1013 && POINTER_TYPE_P (TREE_TYPE (lhs))
1014 && POINTER_TYPE_P (TREE_TYPE (op0))
1015 /* Do not allow differences in volatile qualification
1016 as this might get us confused as to whether a
1017 propagation destination statement is volatile
1018 or not. See PR36988. */
1019 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1020 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1021 {
1022 tree tem;
1023 /* Still try to generate a constant of correct type. */
1024 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1025 TREE_TYPE (op0))
1026 && ((tem = maybe_fold_offset_to_address
1027 (op0, integer_zero_node, TREE_TYPE (lhs)))
1028 != NULL_TREE))
1029 return tem;
1030 return op0;
1031 }
1032
1033 return fold_unary_ignore_overflow (subcode,
1034 gimple_expr_type (stmt), op0);
1035 }
1036
1037 case GIMPLE_BINARY_RHS:
1038 {
1039 /* Handle binary operators that can appear in GIMPLE form. */
1040 tree op0 = gimple_assign_rhs1 (stmt);
1041 tree op1 = gimple_assign_rhs2 (stmt);
1042
1043 /* Simplify the operands down to constants when appropriate. */
1044 if (TREE_CODE (op0) == SSA_NAME)
1045 {
1046 prop_value_t *val = get_value (op0);
1047 if (val->lattice_val == CONSTANT)
1048 op0 = val->value;
1049 }
1050
1051 if (TREE_CODE (op1) == SSA_NAME)
1052 {
1053 prop_value_t *val = get_value (op1);
1054 if (val->lattice_val == CONSTANT)
1055 op1 = val->value;
1056 }
1057
1058 /* Fold &foo + CST into an invariant reference if possible. */
1059 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1060 && TREE_CODE (op0) == ADDR_EXPR
1061 && TREE_CODE (op1) == INTEGER_CST)
1062 {
1063 tree lhs = gimple_assign_lhs (stmt);
1064 tree tem = maybe_fold_offset_to_address (op0, op1,
1065 TREE_TYPE (lhs));
1066 if (tem != NULL_TREE)
1067 return tem;
1068 }
1069
1070 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1071 }
1072
1073 default:
1074 gcc_unreachable ();
1075 }
1076 }
1077 break;
1078
1079 case GIMPLE_CALL:
1080 {
1081 tree fn = gimple_call_fn (stmt);
1082 prop_value_t *val;
1083
1084 if (TREE_CODE (fn) == SSA_NAME)
1085 {
1086 val = get_value (fn);
1087 if (val->lattice_val == CONSTANT)
1088 fn = val->value;
1089 }
1090 if (TREE_CODE (fn) == ADDR_EXPR
1091 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1092 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1093 {
1094 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1095 tree call, retval;
1096 unsigned i;
1097 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1098 {
1099 args[i] = gimple_call_arg (stmt, i);
1100 if (TREE_CODE (args[i]) == SSA_NAME)
1101 {
1102 val = get_value (args[i]);
1103 if (val->lattice_val == CONSTANT)
1104 args[i] = val->value;
1105 }
1106 }
1107 call = build_call_array (gimple_call_return_type (stmt),
1108 fn, gimple_call_num_args (stmt), args);
1109 retval = fold_call_expr (call, false);
1110 if (retval)
1111 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1112 STRIP_NOPS (retval);
1113 return retval;
1114 }
1115 return NULL_TREE;
1116 }
1117
1118 case GIMPLE_COND:
1119 {
1120 /* Handle comparison operators that can appear in GIMPLE form. */
1121 tree op0 = gimple_cond_lhs (stmt);
1122 tree op1 = gimple_cond_rhs (stmt);
1123 enum tree_code code = gimple_cond_code (stmt);
1124
1125 /* Simplify the operands down to constants when appropriate. */
1126 if (TREE_CODE (op0) == SSA_NAME)
1127 {
1128 prop_value_t *val = get_value (op0);
1129 if (val->lattice_val == CONSTANT)
1130 op0 = val->value;
1131 }
1132
1133 if (TREE_CODE (op1) == SSA_NAME)
1134 {
1135 prop_value_t *val = get_value (op1);
1136 if (val->lattice_val == CONSTANT)
1137 op1 = val->value;
1138 }
1139
1140 return fold_binary (code, boolean_type_node, op0, op1);
1141 }
1142
1143 case GIMPLE_SWITCH:
1144 {
1145 tree rhs = gimple_switch_index (stmt);
1146
1147 if (TREE_CODE (rhs) == SSA_NAME)
1148 {
1149 /* If the RHS is an SSA_NAME, return its known constant value,
1150 if any. */
1151 return get_value (rhs)->value;
1152 }
1153
1154 return rhs;
1155 }
1156
1157 default:
1158 gcc_unreachable ();
1159 }
1160 }
1161
1162
1163 /* Return the tree representing the element referenced by T if T is an
1164 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1165 NULL_TREE otherwise. */
1166
1167 tree
1168 fold_const_aggregate_ref (tree t)
1169 {
1170 prop_value_t *value;
1171 tree base, ctor, idx, field;
1172 unsigned HOST_WIDE_INT cnt;
1173 tree cfield, cval;
1174
1175 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1176 return get_symbol_constant_value (t);
1177
1178 switch (TREE_CODE (t))
1179 {
1180 case ARRAY_REF:
1181 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1182 DECL_INITIAL. If BASE is a nested reference into another
1183 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1184 the inner reference. */
1185 base = TREE_OPERAND (t, 0);
1186 switch (TREE_CODE (base))
1187 {
1188 case VAR_DECL:
1189 if (!TREE_READONLY (base)
1190 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1191 || !targetm.binds_local_p (base))
1192 return NULL_TREE;
1193
1194 ctor = DECL_INITIAL (base);
1195 break;
1196
1197 case ARRAY_REF:
1198 case COMPONENT_REF:
1199 ctor = fold_const_aggregate_ref (base);
1200 break;
1201
1202 case STRING_CST:
1203 case CONSTRUCTOR:
1204 ctor = base;
1205 break;
1206
1207 default:
1208 return NULL_TREE;
1209 }
1210
1211 if (ctor == NULL_TREE
1212 || (TREE_CODE (ctor) != CONSTRUCTOR
1213 && TREE_CODE (ctor) != STRING_CST)
1214 || !TREE_STATIC (ctor))
1215 return NULL_TREE;
1216
1217 /* Get the index. If we have an SSA_NAME, try to resolve it
1218 with the current lattice value for the SSA_NAME. */
1219 idx = TREE_OPERAND (t, 1);
1220 switch (TREE_CODE (idx))
1221 {
1222 case SSA_NAME:
1223 if ((value = get_value (idx))
1224 && value->lattice_val == CONSTANT
1225 && TREE_CODE (value->value) == INTEGER_CST)
1226 idx = value->value;
1227 else
1228 return NULL_TREE;
1229 break;
1230
1231 case INTEGER_CST:
1232 break;
1233
1234 default:
1235 return NULL_TREE;
1236 }
1237
1238 /* Fold read from constant string. */
1239 if (TREE_CODE (ctor) == STRING_CST)
1240 {
1241 if ((TYPE_MODE (TREE_TYPE (t))
1242 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1243 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1244 == MODE_INT)
1245 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1246 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1247 return build_int_cst_type (TREE_TYPE (t),
1248 (TREE_STRING_POINTER (ctor)
1249 [TREE_INT_CST_LOW (idx)]));
1250 return NULL_TREE;
1251 }
1252
1253 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1254 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1255 if (tree_int_cst_equal (cfield, idx))
1256 {
1257 STRIP_USELESS_TYPE_CONVERSION (cval);
1258 if (TREE_CODE (cval) == ADDR_EXPR)
1259 {
1260 tree base = get_base_address (TREE_OPERAND (cval, 0));
1261 if (base && TREE_CODE (base) == VAR_DECL)
1262 add_referenced_var (base);
1263 }
1264 return cval;
1265 }
1266 break;
1267
1268 case COMPONENT_REF:
1269 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1270 DECL_INITIAL. If BASE is a nested reference into another
1271 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1272 the inner reference. */
1273 base = TREE_OPERAND (t, 0);
1274 switch (TREE_CODE (base))
1275 {
1276 case VAR_DECL:
1277 if (!TREE_READONLY (base)
1278 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1279 || !targetm.binds_local_p (base))
1280 return NULL_TREE;
1281
1282 ctor = DECL_INITIAL (base);
1283 break;
1284
1285 case ARRAY_REF:
1286 case COMPONENT_REF:
1287 ctor = fold_const_aggregate_ref (base);
1288 break;
1289
1290 default:
1291 return NULL_TREE;
1292 }
1293
1294 if (ctor == NULL_TREE
1295 || TREE_CODE (ctor) != CONSTRUCTOR
1296 || !TREE_STATIC (ctor))
1297 return NULL_TREE;
1298
1299 field = TREE_OPERAND (t, 1);
1300
1301 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1302 if (cfield == field
1303 /* FIXME: Handle bit-fields. */
1304 && ! DECL_BIT_FIELD (cfield))
1305 {
1306 STRIP_USELESS_TYPE_CONVERSION (cval);
1307 if (TREE_CODE (cval) == ADDR_EXPR)
1308 {
1309 tree base = get_base_address (TREE_OPERAND (cval, 0));
1310 if (base && TREE_CODE (base) == VAR_DECL)
1311 add_referenced_var (base);
1312 }
1313 return cval;
1314 }
1315 break;
1316
1317 case REALPART_EXPR:
1318 case IMAGPART_EXPR:
1319 {
1320 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1321 if (c && TREE_CODE (c) == COMPLEX_CST)
1322 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1323 break;
1324 }
1325
1326 case INDIRECT_REF:
1327 {
1328 tree base = TREE_OPERAND (t, 0);
1329 if (TREE_CODE (base) == SSA_NAME
1330 && (value = get_value (base))
1331 && value->lattice_val == CONSTANT
1332 && TREE_CODE (value->value) == ADDR_EXPR
1333 && useless_type_conversion_p (TREE_TYPE (t),
1334 TREE_TYPE (TREE_TYPE (value->value))))
1335 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1336 break;
1337 }
1338
1339 default:
1340 break;
1341 }
1342
1343 return NULL_TREE;
1344 }
1345
1346 /* Evaluate statement STMT.
1347 Valid only for assignments, calls, conditionals, and switches. */
1348
1349 static prop_value_t
1350 evaluate_stmt (gimple stmt)
1351 {
1352 prop_value_t val;
1353 tree simplified = NULL_TREE;
1354 ccp_lattice_t likelyvalue = likely_value (stmt);
1355 bool is_constant;
1356
1357 fold_defer_overflow_warnings ();
1358
1359 /* If the statement is likely to have a CONSTANT result, then try
1360 to fold the statement to determine the constant value. */
1361 /* FIXME. This is the only place that we call ccp_fold.
1362 Since likely_value never returns CONSTANT for calls, we will
1363 not attempt to fold them, including builtins that may profit. */
1364 if (likelyvalue == CONSTANT)
1365 simplified = ccp_fold (stmt);
1366 /* If the statement is likely to have a VARYING result, then do not
1367 bother folding the statement. */
1368 else if (likelyvalue == VARYING)
1369 {
1370 enum gimple_code code = gimple_code (stmt);
1371 if (code == GIMPLE_ASSIGN)
1372 {
1373 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1374
1375 /* Other cases cannot satisfy is_gimple_min_invariant
1376 without folding. */
1377 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1378 simplified = gimple_assign_rhs1 (stmt);
1379 }
1380 else if (code == GIMPLE_SWITCH)
1381 simplified = gimple_switch_index (stmt);
1382 else
1383 /* These cannot satisfy is_gimple_min_invariant without folding. */
1384 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1385 }
1386
1387 is_constant = simplified && is_gimple_min_invariant (simplified);
1388
1389 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1390
1391 if (dump_file && (dump_flags & TDF_DETAILS))
1392 {
1393 fprintf (dump_file, "which is likely ");
1394 switch (likelyvalue)
1395 {
1396 case CONSTANT:
1397 fprintf (dump_file, "CONSTANT");
1398 break;
1399 case UNDEFINED:
1400 fprintf (dump_file, "UNDEFINED");
1401 break;
1402 case VARYING:
1403 fprintf (dump_file, "VARYING");
1404 break;
1405 default:;
1406 }
1407 fprintf (dump_file, "\n");
1408 }
1409
1410 if (is_constant)
1411 {
1412 /* The statement produced a constant value. */
1413 val.lattice_val = CONSTANT;
1414 val.value = simplified;
1415 }
1416 else
1417 {
1418 /* The statement produced a nonconstant value. If the statement
1419 had UNDEFINED operands, then the result of the statement
1420 should be UNDEFINED. Otherwise, the statement is VARYING. */
1421 if (likelyvalue == UNDEFINED)
1422 val.lattice_val = likelyvalue;
1423 else
1424 val.lattice_val = VARYING;
1425
1426 val.value = NULL_TREE;
1427 }
1428
1429 return val;
1430 }
1431
1432 /* Visit the assignment statement STMT. Set the value of its LHS to the
1433 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1434 creates virtual definitions, set the value of each new name to that
1435 of the RHS (if we can derive a constant out of the RHS).
1436 Value-returning call statements also perform an assignment, and
1437 are handled here. */
1438
1439 static enum ssa_prop_result
1440 visit_assignment (gimple stmt, tree *output_p)
1441 {
1442 prop_value_t val;
1443 enum ssa_prop_result retval;
1444
1445 tree lhs = gimple_get_lhs (stmt);
1446
1447 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1448 || gimple_call_lhs (stmt) != NULL_TREE);
1449
1450 if (gimple_assign_copy_p (stmt))
1451 {
1452 tree rhs = gimple_assign_rhs1 (stmt);
1453
1454 if (TREE_CODE (rhs) == SSA_NAME)
1455 {
1456 /* For a simple copy operation, we copy the lattice values. */
1457 prop_value_t *nval = get_value (rhs);
1458 val = *nval;
1459 }
1460 else
1461 val = evaluate_stmt (stmt);
1462 }
1463 else
1464 /* Evaluate the statement, which could be
1465 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1466 val = evaluate_stmt (stmt);
1467
1468 retval = SSA_PROP_NOT_INTERESTING;
1469
1470 /* Set the lattice value of the statement's output. */
1471 if (TREE_CODE (lhs) == SSA_NAME)
1472 {
1473 /* If STMT is an assignment to an SSA_NAME, we only have one
1474 value to set. */
1475 if (set_lattice_value (lhs, val))
1476 {
1477 *output_p = lhs;
1478 if (val.lattice_val == VARYING)
1479 retval = SSA_PROP_VARYING;
1480 else
1481 retval = SSA_PROP_INTERESTING;
1482 }
1483 }
1484
1485 return retval;
1486 }
1487
1488
1489 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1490 if it can determine which edge will be taken. Otherwise, return
1491 SSA_PROP_VARYING. */
1492
1493 static enum ssa_prop_result
1494 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1495 {
1496 prop_value_t val;
1497 basic_block block;
1498
1499 block = gimple_bb (stmt);
1500 val = evaluate_stmt (stmt);
1501
1502 /* Find which edge out of the conditional block will be taken and add it
1503 to the worklist. If no single edge can be determined statically,
1504 return SSA_PROP_VARYING to feed all the outgoing edges to the
1505 propagation engine. */
1506 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1507 if (*taken_edge_p)
1508 return SSA_PROP_INTERESTING;
1509 else
1510 return SSA_PROP_VARYING;
1511 }
1512
1513
1514 /* Evaluate statement STMT. If the statement produces an output value and
1515 its evaluation changes the lattice value of its output, return
1516 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1517 output value.
1518
1519 If STMT is a conditional branch and we can determine its truth
1520 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1521 value, return SSA_PROP_VARYING. */
1522
1523 static enum ssa_prop_result
1524 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1525 {
1526 tree def;
1527 ssa_op_iter iter;
1528
1529 if (dump_file && (dump_flags & TDF_DETAILS))
1530 {
1531 fprintf (dump_file, "\nVisiting statement:\n");
1532 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1533 }
1534
1535 switch (gimple_code (stmt))
1536 {
1537 case GIMPLE_ASSIGN:
1538 /* If the statement is an assignment that produces a single
1539 output value, evaluate its RHS to see if the lattice value of
1540 its output has changed. */
1541 return visit_assignment (stmt, output_p);
1542
1543 case GIMPLE_CALL:
1544 /* A value-returning call also performs an assignment. */
1545 if (gimple_call_lhs (stmt) != NULL_TREE)
1546 return visit_assignment (stmt, output_p);
1547 break;
1548
1549 case GIMPLE_COND:
1550 case GIMPLE_SWITCH:
1551 /* If STMT is a conditional branch, see if we can determine
1552 which branch will be taken. */
1553 /* FIXME. It appears that we should be able to optimize
1554 computed GOTOs here as well. */
1555 return visit_cond_stmt (stmt, taken_edge_p);
1556
1557 default:
1558 break;
1559 }
1560
1561 /* Any other kind of statement is not interesting for constant
1562 propagation and, therefore, not worth simulating. */
1563 if (dump_file && (dump_flags & TDF_DETAILS))
1564 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1565
1566 /* Definitions made by statements other than assignments to
1567 SSA_NAMEs represent unknown modifications to their outputs.
1568 Mark them VARYING. */
1569 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1570 {
1571 prop_value_t v = { VARYING, NULL_TREE };
1572 set_lattice_value (def, v);
1573 }
1574
1575 return SSA_PROP_VARYING;
1576 }
1577
1578
1579 /* Main entry point for SSA Conditional Constant Propagation. */
1580
1581 static unsigned int
1582 do_ssa_ccp (void)
1583 {
1584 ccp_initialize ();
1585 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1586 if (ccp_finalize ())
1587 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1588 else
1589 return 0;
1590 }
1591
1592
1593 static bool
1594 gate_ccp (void)
1595 {
1596 return flag_tree_ccp != 0;
1597 }
1598
1599
1600 struct gimple_opt_pass pass_ccp =
1601 {
1602 {
1603 GIMPLE_PASS,
1604 "ccp", /* name */
1605 gate_ccp, /* gate */
1606 do_ssa_ccp, /* execute */
1607 NULL, /* sub */
1608 NULL, /* next */
1609 0, /* static_pass_number */
1610 TV_TREE_CCP, /* tv_id */
1611 PROP_cfg | PROP_ssa, /* properties_required */
1612 0, /* properties_provided */
1613 0, /* properties_destroyed */
1614 0, /* todo_flags_start */
1615 TODO_dump_func | TODO_verify_ssa
1616 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1617 }
1618 };
1619
1620
1621 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1622 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1623 is the desired result type. */
1624
1625 static tree
1626 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1627 bool allow_negative_idx)
1628 {
1629 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1630 tree array_type, elt_type, elt_size;
1631 tree domain_type;
1632
1633 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1634 measured in units of the size of elements type) from that ARRAY_REF).
1635 We can't do anything if either is variable.
1636
1637 The case we handle here is *(&A[N]+O). */
1638 if (TREE_CODE (base) == ARRAY_REF)
1639 {
1640 tree low_bound = array_ref_low_bound (base);
1641
1642 elt_offset = TREE_OPERAND (base, 1);
1643 if (TREE_CODE (low_bound) != INTEGER_CST
1644 || TREE_CODE (elt_offset) != INTEGER_CST)
1645 return NULL_TREE;
1646
1647 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1648 base = TREE_OPERAND (base, 0);
1649 }
1650
1651 /* Ignore stupid user tricks of indexing non-array variables. */
1652 array_type = TREE_TYPE (base);
1653 if (TREE_CODE (array_type) != ARRAY_TYPE)
1654 return NULL_TREE;
1655 elt_type = TREE_TYPE (array_type);
1656 if (!useless_type_conversion_p (orig_type, elt_type))
1657 return NULL_TREE;
1658
1659 /* Use signed size type for intermediate computation on the index. */
1660 idx_type = signed_type_for (size_type_node);
1661
1662 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1663 element type (so we can use the alignment if it's not constant).
1664 Otherwise, compute the offset as an index by using a division. If the
1665 division isn't exact, then don't do anything. */
1666 elt_size = TYPE_SIZE_UNIT (elt_type);
1667 if (!elt_size)
1668 return NULL;
1669 if (integer_zerop (offset))
1670 {
1671 if (TREE_CODE (elt_size) != INTEGER_CST)
1672 elt_size = size_int (TYPE_ALIGN (elt_type));
1673
1674 idx = build_int_cst (idx_type, 0);
1675 }
1676 else
1677 {
1678 unsigned HOST_WIDE_INT lquo, lrem;
1679 HOST_WIDE_INT hquo, hrem;
1680 double_int soffset;
1681
1682 /* The final array offset should be signed, so we need
1683 to sign-extend the (possibly pointer) offset here
1684 and use signed division. */
1685 soffset = double_int_sext (tree_to_double_int (offset),
1686 TYPE_PRECISION (TREE_TYPE (offset)));
1687 if (TREE_CODE (elt_size) != INTEGER_CST
1688 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1689 soffset.low, soffset.high,
1690 TREE_INT_CST_LOW (elt_size),
1691 TREE_INT_CST_HIGH (elt_size),
1692 &lquo, &hquo, &lrem, &hrem)
1693 || lrem || hrem)
1694 return NULL_TREE;
1695
1696 idx = build_int_cst_wide (idx_type, lquo, hquo);
1697 }
1698
1699 /* Assume the low bound is zero. If there is a domain type, get the
1700 low bound, if any, convert the index into that type, and add the
1701 low bound. */
1702 min_idx = build_int_cst (idx_type, 0);
1703 domain_type = TYPE_DOMAIN (array_type);
1704 if (domain_type)
1705 {
1706 idx_type = domain_type;
1707 if (TYPE_MIN_VALUE (idx_type))
1708 min_idx = TYPE_MIN_VALUE (idx_type);
1709 else
1710 min_idx = fold_convert (idx_type, min_idx);
1711
1712 if (TREE_CODE (min_idx) != INTEGER_CST)
1713 return NULL_TREE;
1714
1715 elt_offset = fold_convert (idx_type, elt_offset);
1716 }
1717
1718 if (!integer_zerop (min_idx))
1719 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1720 if (!integer_zerop (elt_offset))
1721 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1722
1723 /* Make sure to possibly truncate late after offsetting. */
1724 idx = fold_convert (idx_type, idx);
1725
1726 /* We don't want to construct access past array bounds. For example
1727 char *(c[4]);
1728 c[3][2];
1729 should not be simplified into (*c)[14] or tree-vrp will
1730 give false warnings. The same is true for
1731 struct A { long x; char d[0]; } *a;
1732 (char *)a - 4;
1733 which should be not folded to &a->d[-8]. */
1734 if (domain_type
1735 && TYPE_MAX_VALUE (domain_type)
1736 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1737 {
1738 tree up_bound = TYPE_MAX_VALUE (domain_type);
1739
1740 if (tree_int_cst_lt (up_bound, idx)
1741 /* Accesses after the end of arrays of size 0 (gcc
1742 extension) and 1 are likely intentional ("struct
1743 hack"). */
1744 && compare_tree_int (up_bound, 1) > 0)
1745 return NULL_TREE;
1746 }
1747 if (domain_type
1748 && TYPE_MIN_VALUE (domain_type))
1749 {
1750 if (!allow_negative_idx
1751 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1752 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1753 return NULL_TREE;
1754 }
1755 else if (!allow_negative_idx
1756 && compare_tree_int (idx, 0) < 0)
1757 return NULL_TREE;
1758
1759 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1760 }
1761
1762
1763 /* Attempt to fold *(S+O) to S.X.
1764 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1765 is the desired result type. */
1766
1767 static tree
1768 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1769 tree orig_type, bool base_is_ptr)
1770 {
1771 tree f, t, field_type, tail_array_field, field_offset;
1772 tree ret;
1773 tree new_base;
1774
1775 if (TREE_CODE (record_type) != RECORD_TYPE
1776 && TREE_CODE (record_type) != UNION_TYPE
1777 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1778 return NULL_TREE;
1779
1780 /* Short-circuit silly cases. */
1781 if (useless_type_conversion_p (record_type, orig_type))
1782 return NULL_TREE;
1783
1784 tail_array_field = NULL_TREE;
1785 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1786 {
1787 int cmp;
1788
1789 if (TREE_CODE (f) != FIELD_DECL)
1790 continue;
1791 if (DECL_BIT_FIELD (f))
1792 continue;
1793
1794 if (!DECL_FIELD_OFFSET (f))
1795 continue;
1796 field_offset = byte_position (f);
1797 if (TREE_CODE (field_offset) != INTEGER_CST)
1798 continue;
1799
1800 /* ??? Java creates "interesting" fields for representing base classes.
1801 They have no name, and have no context. With no context, we get into
1802 trouble with nonoverlapping_component_refs_p. Skip them. */
1803 if (!DECL_FIELD_CONTEXT (f))
1804 continue;
1805
1806 /* The previous array field isn't at the end. */
1807 tail_array_field = NULL_TREE;
1808
1809 /* Check to see if this offset overlaps with the field. */
1810 cmp = tree_int_cst_compare (field_offset, offset);
1811 if (cmp > 0)
1812 continue;
1813
1814 field_type = TREE_TYPE (f);
1815
1816 /* Here we exactly match the offset being checked. If the types match,
1817 then we can return that field. */
1818 if (cmp == 0
1819 && useless_type_conversion_p (orig_type, field_type))
1820 {
1821 if (base_is_ptr)
1822 base = build1 (INDIRECT_REF, record_type, base);
1823 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1824 return t;
1825 }
1826
1827 /* Don't care about offsets into the middle of scalars. */
1828 if (!AGGREGATE_TYPE_P (field_type))
1829 continue;
1830
1831 /* Check for array at the end of the struct. This is often
1832 used as for flexible array members. We should be able to
1833 turn this into an array access anyway. */
1834 if (TREE_CODE (field_type) == ARRAY_TYPE)
1835 tail_array_field = f;
1836
1837 /* Check the end of the field against the offset. */
1838 if (!DECL_SIZE_UNIT (f)
1839 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1840 continue;
1841 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1842 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1843 continue;
1844
1845 /* If we matched, then set offset to the displacement into
1846 this field. */
1847 if (base_is_ptr)
1848 new_base = build1 (INDIRECT_REF, record_type, base);
1849 else
1850 new_base = base;
1851 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1852
1853 /* Recurse to possibly find the match. */
1854 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1855 f == TYPE_FIELDS (record_type));
1856 if (ret)
1857 return ret;
1858 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1859 orig_type, false);
1860 if (ret)
1861 return ret;
1862 }
1863
1864 if (!tail_array_field)
1865 return NULL_TREE;
1866
1867 f = tail_array_field;
1868 field_type = TREE_TYPE (f);
1869 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1870
1871 /* If we get here, we've got an aggregate field, and a possibly
1872 nonzero offset into them. Recurse and hope for a valid match. */
1873 if (base_is_ptr)
1874 base = build1 (INDIRECT_REF, record_type, base);
1875 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1876
1877 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1878 f == TYPE_FIELDS (record_type));
1879 if (t)
1880 return t;
1881 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1882 orig_type, false);
1883 }
1884
1885 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1886 or BASE[index] or by combination of those.
1887
1888 Before attempting the conversion strip off existing ADDR_EXPRs and
1889 handled component refs. */
1890
1891 tree
1892 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1893 {
1894 tree ret;
1895 tree type;
1896 bool base_is_ptr = true;
1897
1898 STRIP_NOPS (base);
1899 if (TREE_CODE (base) == ADDR_EXPR)
1900 {
1901 base_is_ptr = false;
1902
1903 base = TREE_OPERAND (base, 0);
1904
1905 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1906 so it needs to be removed and new COMPONENT_REF constructed.
1907 The wrong COMPONENT_REF are often constructed by folding the
1908 (type *)&object within the expression (type *)&object+offset */
1909 if (handled_component_p (base))
1910 {
1911 HOST_WIDE_INT sub_offset, size, maxsize;
1912 tree newbase;
1913 newbase = get_ref_base_and_extent (base, &sub_offset,
1914 &size, &maxsize);
1915 gcc_assert (newbase);
1916 if (size == maxsize
1917 && size != -1
1918 && !(sub_offset & (BITS_PER_UNIT - 1)))
1919 {
1920 base = newbase;
1921 if (sub_offset)
1922 offset = int_const_binop (PLUS_EXPR, offset,
1923 build_int_cst (TREE_TYPE (offset),
1924 sub_offset / BITS_PER_UNIT), 1);
1925 }
1926 }
1927 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1928 && integer_zerop (offset))
1929 return base;
1930 type = TREE_TYPE (base);
1931 }
1932 else
1933 {
1934 base_is_ptr = true;
1935 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1936 return NULL_TREE;
1937 type = TREE_TYPE (TREE_TYPE (base));
1938 }
1939 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1940 orig_type, base_is_ptr);
1941 if (!ret)
1942 {
1943 if (base_is_ptr)
1944 base = build1 (INDIRECT_REF, type, base);
1945 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1946 }
1947 return ret;
1948 }
1949
1950 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1951 or &BASE[index] or by combination of those.
1952
1953 Before attempting the conversion strip off existing component refs. */
1954
1955 tree
1956 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1957 {
1958 tree t;
1959
1960 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1961 && POINTER_TYPE_P (orig_type));
1962
1963 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1964 if (t != NULL_TREE)
1965 {
1966 tree orig = addr;
1967 tree ptr_type;
1968
1969 /* For __builtin_object_size to function correctly we need to
1970 make sure not to fold address arithmetic so that we change
1971 reference from one array to another. This would happen for
1972 example for
1973
1974 struct X { char s1[10]; char s2[10] } s;
1975 char *foo (void) { return &s.s2[-4]; }
1976
1977 where we need to avoid generating &s.s1[6]. As the C and
1978 C++ frontends create different initial trees
1979 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1980 sophisticated comparisons here. Note that checking for the
1981 condition after the fact is easier than trying to avoid doing
1982 the folding. */
1983 STRIP_NOPS (orig);
1984 if (TREE_CODE (orig) == ADDR_EXPR)
1985 orig = TREE_OPERAND (orig, 0);
1986 if ((TREE_CODE (orig) == ARRAY_REF
1987 || (TREE_CODE (orig) == COMPONENT_REF
1988 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1989 && (TREE_CODE (t) == ARRAY_REF
1990 || TREE_CODE (t) == COMPONENT_REF)
1991 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1992 ? TREE_OPERAND (orig, 0) : orig,
1993 TREE_CODE (t) == ARRAY_REF
1994 ? TREE_OPERAND (t, 0) : t, 0))
1995 return NULL_TREE;
1996
1997 ptr_type = build_pointer_type (TREE_TYPE (t));
1998 if (!useless_type_conversion_p (orig_type, ptr_type))
1999 return NULL_TREE;
2000 return build_fold_addr_expr_with_type (t, ptr_type);
2001 }
2002
2003 return NULL_TREE;
2004 }
2005
2006 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2007 Return the simplified expression, or NULL if nothing could be done. */
2008
2009 static tree
2010 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2011 {
2012 tree t;
2013 bool volatile_p = TREE_THIS_VOLATILE (expr);
2014
2015 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2016 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2017 are sometimes added. */
2018 base = fold (base);
2019 STRIP_TYPE_NOPS (base);
2020 TREE_OPERAND (expr, 0) = base;
2021
2022 /* One possibility is that the address reduces to a string constant. */
2023 t = fold_read_from_constant_string (expr);
2024 if (t)
2025 return t;
2026
2027 /* Add in any offset from a POINTER_PLUS_EXPR. */
2028 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2029 {
2030 tree offset2;
2031
2032 offset2 = TREE_OPERAND (base, 1);
2033 if (TREE_CODE (offset2) != INTEGER_CST)
2034 return NULL_TREE;
2035 base = TREE_OPERAND (base, 0);
2036
2037 offset = fold_convert (sizetype,
2038 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2039 }
2040
2041 if (TREE_CODE (base) == ADDR_EXPR)
2042 {
2043 tree base_addr = base;
2044
2045 /* Strip the ADDR_EXPR. */
2046 base = TREE_OPERAND (base, 0);
2047
2048 /* Fold away CONST_DECL to its value, if the type is scalar. */
2049 if (TREE_CODE (base) == CONST_DECL
2050 && is_gimple_min_invariant (DECL_INITIAL (base)))
2051 return DECL_INITIAL (base);
2052
2053 /* Try folding *(&B+O) to B.X. */
2054 t = maybe_fold_offset_to_reference (base_addr, offset,
2055 TREE_TYPE (expr));
2056 if (t)
2057 {
2058 /* Preserve volatileness of the original expression.
2059 We can end up with a plain decl here which is shared
2060 and we shouldn't mess with its flags. */
2061 if (!SSA_VAR_P (t))
2062 TREE_THIS_VOLATILE (t) = volatile_p;
2063 return t;
2064 }
2065 }
2066 else
2067 {
2068 /* We can get here for out-of-range string constant accesses,
2069 such as "_"[3]. Bail out of the entire substitution search
2070 and arrange for the entire statement to be replaced by a
2071 call to __builtin_trap. In all likelihood this will all be
2072 constant-folded away, but in the meantime we can't leave with
2073 something that get_expr_operands can't understand. */
2074
2075 t = base;
2076 STRIP_NOPS (t);
2077 if (TREE_CODE (t) == ADDR_EXPR
2078 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2079 {
2080 /* FIXME: Except that this causes problems elsewhere with dead
2081 code not being deleted, and we die in the rtl expanders
2082 because we failed to remove some ssa_name. In the meantime,
2083 just return zero. */
2084 /* FIXME2: This condition should be signaled by
2085 fold_read_from_constant_string directly, rather than
2086 re-checking for it here. */
2087 return integer_zero_node;
2088 }
2089
2090 /* Try folding *(B+O) to B->X. Still an improvement. */
2091 if (POINTER_TYPE_P (TREE_TYPE (base)))
2092 {
2093 t = maybe_fold_offset_to_reference (base, offset,
2094 TREE_TYPE (expr));
2095 if (t)
2096 return t;
2097 }
2098 }
2099
2100 /* Otherwise we had an offset that we could not simplify. */
2101 return NULL_TREE;
2102 }
2103
2104
2105 /* A quaint feature extant in our address arithmetic is that there
2106 can be hidden type changes here. The type of the result need
2107 not be the same as the type of the input pointer.
2108
2109 What we're after here is an expression of the form
2110 (T *)(&array + const)
2111 where array is OP0, const is OP1, RES_TYPE is T and
2112 the cast doesn't actually exist, but is implicit in the
2113 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2114 &array[x]
2115 which may be able to propagate further. */
2116
2117 tree
2118 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2119 {
2120 tree ptd_type;
2121 tree t;
2122
2123 /* The first operand should be an ADDR_EXPR. */
2124 if (TREE_CODE (op0) != ADDR_EXPR)
2125 return NULL_TREE;
2126 op0 = TREE_OPERAND (op0, 0);
2127
2128 /* It had better be a constant. */
2129 if (TREE_CODE (op1) != INTEGER_CST)
2130 {
2131 /* Or op0 should now be A[0] and the non-constant offset defined
2132 via a multiplication by the array element size. */
2133 if (TREE_CODE (op0) == ARRAY_REF
2134 && integer_zerop (TREE_OPERAND (op0, 1))
2135 && TREE_CODE (op1) == SSA_NAME
2136 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2137 {
2138 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2139 if (!is_gimple_assign (offset_def))
2140 return NULL_TREE;
2141
2142 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2143 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2144 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2145 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2146 return build1 (ADDR_EXPR, res_type,
2147 build4 (ARRAY_REF, TREE_TYPE (op0),
2148 TREE_OPERAND (op0, 0),
2149 gimple_assign_rhs1 (offset_def),
2150 TREE_OPERAND (op0, 2),
2151 TREE_OPERAND (op0, 3)));
2152 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2153 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2154 return build1 (ADDR_EXPR, res_type,
2155 build4 (ARRAY_REF, TREE_TYPE (op0),
2156 TREE_OPERAND (op0, 0),
2157 op1,
2158 TREE_OPERAND (op0, 2),
2159 TREE_OPERAND (op0, 3)));
2160 }
2161 return NULL_TREE;
2162 }
2163
2164 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2165 the offset into it. */
2166 while (TREE_CODE (op0) == ARRAY_REF)
2167 {
2168 tree array_obj = TREE_OPERAND (op0, 0);
2169 tree array_idx = TREE_OPERAND (op0, 1);
2170 tree elt_type = TREE_TYPE (op0);
2171 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2172 tree min_idx;
2173
2174 if (TREE_CODE (array_idx) != INTEGER_CST)
2175 break;
2176 if (TREE_CODE (elt_size) != INTEGER_CST)
2177 break;
2178
2179 /* Un-bias the index by the min index of the array type. */
2180 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2181 if (min_idx)
2182 {
2183 min_idx = TYPE_MIN_VALUE (min_idx);
2184 if (min_idx)
2185 {
2186 if (TREE_CODE (min_idx) != INTEGER_CST)
2187 break;
2188
2189 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2190 if (!integer_zerop (min_idx))
2191 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2192 min_idx, 0);
2193 }
2194 }
2195
2196 /* Convert the index to a byte offset. */
2197 array_idx = fold_convert (sizetype, array_idx);
2198 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2199
2200 /* Update the operands for the next round, or for folding. */
2201 op1 = int_const_binop (PLUS_EXPR,
2202 array_idx, op1, 0);
2203 op0 = array_obj;
2204 }
2205
2206 ptd_type = TREE_TYPE (res_type);
2207 /* If we want a pointer to void, reconstruct the reference from the
2208 array element type. A pointer to that can be trivially converted
2209 to void *. This happens as we fold (void *)(ptr p+ off). */
2210 if (VOID_TYPE_P (ptd_type)
2211 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2212 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2213
2214 /* At which point we can try some of the same things as for indirects. */
2215 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2216 if (!t)
2217 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2218 ptd_type, false);
2219 if (t)
2220 t = build1 (ADDR_EXPR, res_type, t);
2221
2222 return t;
2223 }
2224
2225 /* Subroutine of fold_stmt. We perform several simplifications of the
2226 memory reference tree EXPR and make sure to re-gimplify them properly
2227 after propagation of constant addresses. IS_LHS is true if the
2228 reference is supposed to be an lvalue. */
2229
2230 static tree
2231 maybe_fold_reference (tree expr, bool is_lhs)
2232 {
2233 tree *t = &expr;
2234
2235 if (TREE_CODE (expr) == ARRAY_REF
2236 && !is_lhs)
2237 {
2238 tree tem = fold_read_from_constant_string (expr);
2239 if (tem)
2240 return tem;
2241 }
2242
2243 /* ??? We might want to open-code the relevant remaining cases
2244 to avoid using the generic fold. */
2245 if (handled_component_p (*t)
2246 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
2247 {
2248 tree tem = fold (*t);
2249 if (tem != *t)
2250 return tem;
2251 }
2252
2253 while (handled_component_p (*t))
2254 t = &TREE_OPERAND (*t, 0);
2255
2256 if (TREE_CODE (*t) == INDIRECT_REF)
2257 {
2258 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2259 integer_zero_node);
2260 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2261 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2262 tem = NULL_TREE;
2263 if (!tem
2264 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
2265 /* If we had a good reason for propagating the address here,
2266 make sure we end up with valid gimple. See PR34989. */
2267 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2268
2269 if (tem)
2270 {
2271 *t = tem;
2272 tem = maybe_fold_reference (expr, is_lhs);
2273 if (tem)
2274 return tem;
2275 return expr;
2276 }
2277 }
2278
2279 return NULL_TREE;
2280 }
2281
2282
2283 /* Return the string length, maximum string length or maximum value of
2284 ARG in LENGTH.
2285 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2286 is not NULL and, for TYPE == 0, its value is not equal to the length
2287 we determine or if we are unable to determine the length or value,
2288 return false. VISITED is a bitmap of visited variables.
2289 TYPE is 0 if string length should be returned, 1 for maximum string
2290 length and 2 for maximum value ARG can have. */
2291
2292 static bool
2293 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2294 {
2295 tree var, val;
2296 gimple def_stmt;
2297
2298 if (TREE_CODE (arg) != SSA_NAME)
2299 {
2300 if (TREE_CODE (arg) == COND_EXPR)
2301 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2302 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2303 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2304 else if (TREE_CODE (arg) == ADDR_EXPR
2305 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2306 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2307 {
2308 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2309 if (TREE_CODE (aop0) == INDIRECT_REF
2310 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2311 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2312 length, visited, type);
2313 }
2314
2315 if (type == 2)
2316 {
2317 val = arg;
2318 if (TREE_CODE (val) != INTEGER_CST
2319 || tree_int_cst_sgn (val) < 0)
2320 return false;
2321 }
2322 else
2323 val = c_strlen (arg, 1);
2324 if (!val)
2325 return false;
2326
2327 if (*length)
2328 {
2329 if (type > 0)
2330 {
2331 if (TREE_CODE (*length) != INTEGER_CST
2332 || TREE_CODE (val) != INTEGER_CST)
2333 return false;
2334
2335 if (tree_int_cst_lt (*length, val))
2336 *length = val;
2337 return true;
2338 }
2339 else if (simple_cst_equal (val, *length) != 1)
2340 return false;
2341 }
2342
2343 *length = val;
2344 return true;
2345 }
2346
2347 /* If we were already here, break the infinite cycle. */
2348 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2349 return true;
2350 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2351
2352 var = arg;
2353 def_stmt = SSA_NAME_DEF_STMT (var);
2354
2355 switch (gimple_code (def_stmt))
2356 {
2357 case GIMPLE_ASSIGN:
2358 /* The RHS of the statement defining VAR must either have a
2359 constant length or come from another SSA_NAME with a constant
2360 length. */
2361 if (gimple_assign_single_p (def_stmt)
2362 || gimple_assign_unary_nop_p (def_stmt))
2363 {
2364 tree rhs = gimple_assign_rhs1 (def_stmt);
2365 return get_maxval_strlen (rhs, length, visited, type);
2366 }
2367 return false;
2368
2369 case GIMPLE_PHI:
2370 {
2371 /* All the arguments of the PHI node must have the same constant
2372 length. */
2373 unsigned i;
2374
2375 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2376 {
2377 tree arg = gimple_phi_arg (def_stmt, i)->def;
2378
2379 /* If this PHI has itself as an argument, we cannot
2380 determine the string length of this argument. However,
2381 if we can find a constant string length for the other
2382 PHI args then we can still be sure that this is a
2383 constant string length. So be optimistic and just
2384 continue with the next argument. */
2385 if (arg == gimple_phi_result (def_stmt))
2386 continue;
2387
2388 if (!get_maxval_strlen (arg, length, visited, type))
2389 return false;
2390 }
2391 }
2392 return true;
2393
2394 default:
2395 return false;
2396 }
2397 }
2398
2399
2400 /* Fold builtin call in statement STMT. Returns a simplified tree.
2401 We may return a non-constant expression, including another call
2402 to a different function and with different arguments, e.g.,
2403 substituting memcpy for strcpy when the string length is known.
2404 Note that some builtins expand into inline code that may not
2405 be valid in GIMPLE. Callers must take care. */
2406
2407 static tree
2408 ccp_fold_builtin (gimple stmt)
2409 {
2410 tree result, val[3];
2411 tree callee, a;
2412 int arg_idx, type;
2413 bitmap visited;
2414 bool ignore;
2415 int nargs;
2416
2417 gcc_assert (is_gimple_call (stmt));
2418
2419 ignore = (gimple_call_lhs (stmt) == NULL);
2420
2421 /* First try the generic builtin folder. If that succeeds, return the
2422 result directly. */
2423 result = fold_call_stmt (stmt, ignore);
2424 if (result)
2425 {
2426 if (ignore)
2427 STRIP_NOPS (result);
2428 return result;
2429 }
2430
2431 /* Ignore MD builtins. */
2432 callee = gimple_call_fndecl (stmt);
2433 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2434 return NULL_TREE;
2435
2436 /* If the builtin could not be folded, and it has no argument list,
2437 we're done. */
2438 nargs = gimple_call_num_args (stmt);
2439 if (nargs == 0)
2440 return NULL_TREE;
2441
2442 /* Limit the work only for builtins we know how to simplify. */
2443 switch (DECL_FUNCTION_CODE (callee))
2444 {
2445 case BUILT_IN_STRLEN:
2446 case BUILT_IN_FPUTS:
2447 case BUILT_IN_FPUTS_UNLOCKED:
2448 arg_idx = 0;
2449 type = 0;
2450 break;
2451 case BUILT_IN_STRCPY:
2452 case BUILT_IN_STRNCPY:
2453 arg_idx = 1;
2454 type = 0;
2455 break;
2456 case BUILT_IN_MEMCPY_CHK:
2457 case BUILT_IN_MEMPCPY_CHK:
2458 case BUILT_IN_MEMMOVE_CHK:
2459 case BUILT_IN_MEMSET_CHK:
2460 case BUILT_IN_STRNCPY_CHK:
2461 arg_idx = 2;
2462 type = 2;
2463 break;
2464 case BUILT_IN_STRCPY_CHK:
2465 case BUILT_IN_STPCPY_CHK:
2466 arg_idx = 1;
2467 type = 1;
2468 break;
2469 case BUILT_IN_SNPRINTF_CHK:
2470 case BUILT_IN_VSNPRINTF_CHK:
2471 arg_idx = 1;
2472 type = 2;
2473 break;
2474 default:
2475 return NULL_TREE;
2476 }
2477
2478 if (arg_idx >= nargs)
2479 return NULL_TREE;
2480
2481 /* Try to use the dataflow information gathered by the CCP process. */
2482 visited = BITMAP_ALLOC (NULL);
2483 bitmap_clear (visited);
2484
2485 memset (val, 0, sizeof (val));
2486 a = gimple_call_arg (stmt, arg_idx);
2487 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2488 val[arg_idx] = NULL_TREE;
2489
2490 BITMAP_FREE (visited);
2491
2492 result = NULL_TREE;
2493 switch (DECL_FUNCTION_CODE (callee))
2494 {
2495 case BUILT_IN_STRLEN:
2496 if (val[0] && nargs == 1)
2497 {
2498 tree new_val =
2499 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2500
2501 /* If the result is not a valid gimple value, or not a cast
2502 of a valid gimple value, then we can not use the result. */
2503 if (is_gimple_val (new_val)
2504 || (is_gimple_cast (new_val)
2505 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2506 return new_val;
2507 }
2508 break;
2509
2510 case BUILT_IN_STRCPY:
2511 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2512 result = fold_builtin_strcpy (callee,
2513 gimple_call_arg (stmt, 0),
2514 gimple_call_arg (stmt, 1),
2515 val[1]);
2516 break;
2517
2518 case BUILT_IN_STRNCPY:
2519 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2520 result = fold_builtin_strncpy (callee,
2521 gimple_call_arg (stmt, 0),
2522 gimple_call_arg (stmt, 1),
2523 gimple_call_arg (stmt, 2),
2524 val[1]);
2525 break;
2526
2527 case BUILT_IN_FPUTS:
2528 if (nargs == 2)
2529 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2530 gimple_call_arg (stmt, 1),
2531 ignore, false, val[0]);
2532 break;
2533
2534 case BUILT_IN_FPUTS_UNLOCKED:
2535 if (nargs == 2)
2536 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2537 gimple_call_arg (stmt, 1),
2538 ignore, true, val[0]);
2539 break;
2540
2541 case BUILT_IN_MEMCPY_CHK:
2542 case BUILT_IN_MEMPCPY_CHK:
2543 case BUILT_IN_MEMMOVE_CHK:
2544 case BUILT_IN_MEMSET_CHK:
2545 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2546 result = fold_builtin_memory_chk (callee,
2547 gimple_call_arg (stmt, 0),
2548 gimple_call_arg (stmt, 1),
2549 gimple_call_arg (stmt, 2),
2550 gimple_call_arg (stmt, 3),
2551 val[2], ignore,
2552 DECL_FUNCTION_CODE (callee));
2553 break;
2554
2555 case BUILT_IN_STRCPY_CHK:
2556 case BUILT_IN_STPCPY_CHK:
2557 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2558 result = fold_builtin_stxcpy_chk (callee,
2559 gimple_call_arg (stmt, 0),
2560 gimple_call_arg (stmt, 1),
2561 gimple_call_arg (stmt, 2),
2562 val[1], ignore,
2563 DECL_FUNCTION_CODE (callee));
2564 break;
2565
2566 case BUILT_IN_STRNCPY_CHK:
2567 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2568 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2569 gimple_call_arg (stmt, 1),
2570 gimple_call_arg (stmt, 2),
2571 gimple_call_arg (stmt, 3),
2572 val[2]);
2573 break;
2574
2575 case BUILT_IN_SNPRINTF_CHK:
2576 case BUILT_IN_VSNPRINTF_CHK:
2577 if (val[1] && is_gimple_val (val[1]))
2578 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2579 DECL_FUNCTION_CODE (callee));
2580 break;
2581
2582 default:
2583 gcc_unreachable ();
2584 }
2585
2586 if (result && ignore)
2587 result = fold_ignored_result (result);
2588 return result;
2589 }
2590
2591 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2592 replacement rhs for the statement or NULL_TREE if no simplification
2593 could be made. It is assumed that the operands have been previously
2594 folded. */
2595
2596 static tree
2597 fold_gimple_assign (gimple_stmt_iterator *si)
2598 {
2599 gimple stmt = gsi_stmt (*si);
2600 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2601
2602 tree result = NULL_TREE;
2603
2604 switch (get_gimple_rhs_class (subcode))
2605 {
2606 case GIMPLE_SINGLE_RHS:
2607 {
2608 tree rhs = gimple_assign_rhs1 (stmt);
2609
2610 /* Try to fold a conditional expression. */
2611 if (TREE_CODE (rhs) == COND_EXPR)
2612 {
2613 tree op0 = COND_EXPR_COND (rhs);
2614 tree tem;
2615 bool set = false;
2616
2617 if (COMPARISON_CLASS_P (op0))
2618 {
2619 fold_defer_overflow_warnings ();
2620 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2621 TREE_OPERAND (op0, 0),
2622 TREE_OPERAND (op0, 1));
2623 /* This is actually a conditional expression, not a GIMPLE
2624 conditional statement, however, the valid_gimple_rhs_p
2625 test still applies. */
2626 set = (tem && is_gimple_condexpr (tem)
2627 && valid_gimple_rhs_p (tem));
2628 fold_undefer_overflow_warnings (set, stmt, 0);
2629 }
2630 else if (is_gimple_min_invariant (op0))
2631 {
2632 tem = op0;
2633 set = true;
2634 }
2635 else
2636 return NULL_TREE;
2637
2638 if (set)
2639 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), tem,
2640 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2641 }
2642
2643 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2644 return maybe_fold_tmr (rhs);
2645
2646 else if (REFERENCE_CLASS_P (rhs))
2647 return maybe_fold_reference (rhs, false);
2648
2649 else if (TREE_CODE (rhs) == ADDR_EXPR)
2650 {
2651 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2652 if (tem)
2653 result = fold_convert (TREE_TYPE (rhs),
2654 build_fold_addr_expr (tem));
2655 }
2656
2657 /* If we couldn't fold the RHS, hand over to the generic
2658 fold routines. */
2659 if (result == NULL_TREE)
2660 result = fold (rhs);
2661
2662 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2663 that may have been added by fold, and "useless" type
2664 conversions that might now be apparent due to propagation. */
2665 STRIP_USELESS_TYPE_CONVERSION (result);
2666
2667 if (result != rhs && valid_gimple_rhs_p (result))
2668 return result;
2669
2670 return NULL_TREE;
2671 }
2672 break;
2673
2674 case GIMPLE_UNARY_RHS:
2675 {
2676 tree rhs = gimple_assign_rhs1 (stmt);
2677
2678 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2679 if (result)
2680 {
2681 /* If the operation was a conversion do _not_ mark a
2682 resulting constant with TREE_OVERFLOW if the original
2683 constant was not. These conversions have implementation
2684 defined behavior and retaining the TREE_OVERFLOW flag
2685 here would confuse later passes such as VRP. */
2686 if (CONVERT_EXPR_CODE_P (subcode)
2687 && TREE_CODE (result) == INTEGER_CST
2688 && TREE_CODE (rhs) == INTEGER_CST)
2689 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2690
2691 STRIP_USELESS_TYPE_CONVERSION (result);
2692 if (valid_gimple_rhs_p (result))
2693 return result;
2694 }
2695 else if (CONVERT_EXPR_CODE_P (subcode)
2696 && POINTER_TYPE_P (gimple_expr_type (stmt))
2697 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2698 {
2699 tree type = gimple_expr_type (stmt);
2700 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2701 integer_zero_node, type);
2702 if (t)
2703 return t;
2704 }
2705 }
2706 break;
2707
2708 case GIMPLE_BINARY_RHS:
2709 /* Try to fold pointer addition. */
2710 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2711 {
2712 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2713 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2714 {
2715 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2716 if (!useless_type_conversion_p
2717 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2718 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2719 }
2720 result = maybe_fold_stmt_addition (type,
2721 gimple_assign_rhs1 (stmt),
2722 gimple_assign_rhs2 (stmt));
2723 }
2724
2725 if (!result)
2726 result = fold_binary (subcode,
2727 TREE_TYPE (gimple_assign_lhs (stmt)),
2728 gimple_assign_rhs1 (stmt),
2729 gimple_assign_rhs2 (stmt));
2730
2731 if (result)
2732 {
2733 STRIP_USELESS_TYPE_CONVERSION (result);
2734 if (valid_gimple_rhs_p (result))
2735 return result;
2736
2737 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2738 we lose canonicalization opportunities. Do not go again
2739 through fold here though, or the same non-GIMPLE will be
2740 produced. */
2741 if (commutative_tree_code (subcode)
2742 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2743 gimple_assign_rhs2 (stmt), false))
2744 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2745 gimple_assign_rhs2 (stmt),
2746 gimple_assign_rhs1 (stmt));
2747 }
2748 break;
2749
2750 case GIMPLE_INVALID_RHS:
2751 gcc_unreachable ();
2752 }
2753
2754 return NULL_TREE;
2755 }
2756
2757 /* Attempt to fold a conditional statement. Return true if any changes were
2758 made. We only attempt to fold the condition expression, and do not perform
2759 any transformation that would require alteration of the cfg. It is
2760 assumed that the operands have been previously folded. */
2761
2762 static bool
2763 fold_gimple_cond (gimple stmt)
2764 {
2765 tree result = fold_binary (gimple_cond_code (stmt),
2766 boolean_type_node,
2767 gimple_cond_lhs (stmt),
2768 gimple_cond_rhs (stmt));
2769
2770 if (result)
2771 {
2772 STRIP_USELESS_TYPE_CONVERSION (result);
2773 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2774 {
2775 gimple_cond_set_condition_from_tree (stmt, result);
2776 return true;
2777 }
2778 }
2779
2780 return false;
2781 }
2782
2783
2784 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2785 The statement may be replaced by another statement, e.g., if the call
2786 simplifies to a constant value. Return true if any changes were made.
2787 It is assumed that the operands have been previously folded. */
2788
2789 static bool
2790 fold_gimple_call (gimple_stmt_iterator *gsi)
2791 {
2792 gimple stmt = gsi_stmt (*gsi);
2793
2794 tree callee = gimple_call_fndecl (stmt);
2795
2796 /* Check for builtins that CCP can handle using information not
2797 available in the generic fold routines. */
2798 if (callee && DECL_BUILT_IN (callee))
2799 {
2800 tree result = ccp_fold_builtin (stmt);
2801
2802 if (result)
2803 return update_call_from_tree (gsi, result);
2804 }
2805 else
2806 {
2807 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2808 here are when we've propagated the address of a decl into the
2809 object slot. */
2810 /* ??? Should perhaps do this in fold proper. However, doing it
2811 there requires that we create a new CALL_EXPR, and that requires
2812 copying EH region info to the new node. Easier to just do it
2813 here where we can just smash the call operand. */
2814 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2815 callee = gimple_call_fn (stmt);
2816 if (TREE_CODE (callee) == OBJ_TYPE_REF
2817 && lang_hooks.fold_obj_type_ref
2818 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2819 && DECL_P (TREE_OPERAND
2820 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2821 {
2822 tree t;
2823
2824 /* ??? Caution: Broken ADDR_EXPR semantics means that
2825 looking at the type of the operand of the addr_expr
2826 can yield an array type. See silly exception in
2827 check_pointer_types_r. */
2828 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2829 t = lang_hooks.fold_obj_type_ref (callee, t);
2830 if (t)
2831 {
2832 gimple_call_set_fn (stmt, t);
2833 return true;
2834 }
2835 }
2836 }
2837
2838 return false;
2839 }
2840
2841 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2842 distinguishes both cases. */
2843
2844 static bool
2845 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2846 {
2847 bool changed = false;
2848 gimple stmt = gsi_stmt (*gsi);
2849 unsigned i;
2850
2851 /* Fold the main computation performed by the statement. */
2852 switch (gimple_code (stmt))
2853 {
2854 case GIMPLE_ASSIGN:
2855 {
2856 unsigned old_num_ops = gimple_num_ops (stmt);
2857 tree new_rhs = fold_gimple_assign (gsi);
2858 if (new_rhs != NULL_TREE
2859 && (!inplace
2860 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2861 {
2862 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2863 changed = true;
2864 }
2865 break;
2866 }
2867
2868 case GIMPLE_COND:
2869 changed |= fold_gimple_cond (stmt);
2870 break;
2871
2872 case GIMPLE_CALL:
2873 /* Fold *& in call arguments. */
2874 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2875 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2876 {
2877 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2878 if (tmp)
2879 {
2880 gimple_call_set_arg (stmt, i, tmp);
2881 changed = true;
2882 }
2883 }
2884 /* The entire statement may be replaced in this case. */
2885 if (!inplace)
2886 changed |= fold_gimple_call (gsi);
2887 break;
2888
2889 case GIMPLE_ASM:
2890 /* Fold *& in asm operands. */
2891 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2892 {
2893 tree link = gimple_asm_output_op (stmt, i);
2894 tree op = TREE_VALUE (link);
2895 if (REFERENCE_CLASS_P (op)
2896 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2897 {
2898 TREE_VALUE (link) = op;
2899 changed = true;
2900 }
2901 }
2902 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2903 {
2904 tree link = gimple_asm_input_op (stmt, i);
2905 tree op = TREE_VALUE (link);
2906 if (REFERENCE_CLASS_P (op)
2907 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
2908 {
2909 TREE_VALUE (link) = op;
2910 changed = true;
2911 }
2912 }
2913 break;
2914
2915 default:;
2916 }
2917
2918 stmt = gsi_stmt (*gsi);
2919
2920 /* Fold *& on the lhs. */
2921 if (gimple_has_lhs (stmt))
2922 {
2923 tree lhs = gimple_get_lhs (stmt);
2924 if (lhs && REFERENCE_CLASS_P (lhs))
2925 {
2926 tree new_lhs = maybe_fold_reference (lhs, true);
2927 if (new_lhs)
2928 {
2929 gimple_set_lhs (stmt, new_lhs);
2930 changed = true;
2931 }
2932 }
2933 }
2934
2935 return changed;
2936 }
2937
2938 /* Fold the statement pointed to by GSI. In some cases, this function may
2939 replace the whole statement with a new one. Returns true iff folding
2940 makes any changes.
2941 The statement pointed to by GSI should be in valid gimple form but may
2942 be in unfolded state as resulting from for example constant propagation
2943 which can produce *&x = 0. */
2944
2945 bool
2946 fold_stmt (gimple_stmt_iterator *gsi)
2947 {
2948 return fold_stmt_1 (gsi, false);
2949 }
2950
2951 /* Perform the minimal folding on statement STMT. Only operations like
2952 *&x created by constant propagation are handled. The statement cannot
2953 be replaced with a new one. Return true if the statement was
2954 changed, false otherwise.
2955 The statement STMT should be in valid gimple form but may
2956 be in unfolded state as resulting from for example constant propagation
2957 which can produce *&x = 0. */
2958
2959 bool
2960 fold_stmt_inplace (gimple stmt)
2961 {
2962 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2963 bool changed = fold_stmt_1 (&gsi, true);
2964 gcc_assert (gsi_stmt (gsi) == stmt);
2965 return changed;
2966 }
2967
2968 /* Try to optimize out __builtin_stack_restore. Optimize it out
2969 if there is another __builtin_stack_restore in the same basic
2970 block and no calls or ASM_EXPRs are in between, or if this block's
2971 only outgoing edge is to EXIT_BLOCK and there are no calls or
2972 ASM_EXPRs after this __builtin_stack_restore. */
2973
2974 static tree
2975 optimize_stack_restore (gimple_stmt_iterator i)
2976 {
2977 tree callee, rhs;
2978 gimple stmt, stack_save;
2979 gimple_stmt_iterator stack_save_gsi;
2980
2981 basic_block bb = gsi_bb (i);
2982 gimple call = gsi_stmt (i);
2983
2984 if (gimple_code (call) != GIMPLE_CALL
2985 || gimple_call_num_args (call) != 1
2986 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2987 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2988 return NULL_TREE;
2989
2990 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2991 {
2992 stmt = gsi_stmt (i);
2993 if (gimple_code (stmt) == GIMPLE_ASM)
2994 return NULL_TREE;
2995 if (gimple_code (stmt) != GIMPLE_CALL)
2996 continue;
2997
2998 callee = gimple_call_fndecl (stmt);
2999 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3000 return NULL_TREE;
3001
3002 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3003 break;
3004 }
3005
3006 if (gsi_end_p (i)
3007 && (! single_succ_p (bb)
3008 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3009 return NULL_TREE;
3010
3011 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3012 if (gimple_code (stack_save) != GIMPLE_CALL
3013 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3014 || stmt_could_throw_p (stack_save)
3015 || !has_single_use (gimple_call_arg (call, 0)))
3016 return NULL_TREE;
3017
3018 callee = gimple_call_fndecl (stack_save);
3019 if (!callee
3020 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3021 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3022 || gimple_call_num_args (stack_save) != 0)
3023 return NULL_TREE;
3024
3025 stack_save_gsi = gsi_for_stmt (stack_save);
3026 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3027 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3028 if (!update_call_from_tree (&stack_save_gsi, rhs))
3029 {
3030 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3031 return NULL_TREE;
3032 }
3033 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3034
3035 /* No effect, so the statement will be deleted. */
3036 return integer_zero_node;
3037 }
3038
3039 /* If va_list type is a simple pointer and nothing special is needed,
3040 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3041 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3042 pointer assignment. */
3043
3044 static tree
3045 optimize_stdarg_builtin (gimple call)
3046 {
3047 tree callee, lhs, rhs, cfun_va_list;
3048 bool va_list_simple_ptr;
3049
3050 if (gimple_code (call) != GIMPLE_CALL)
3051 return NULL_TREE;
3052
3053 callee = gimple_call_fndecl (call);
3054
3055 cfun_va_list = targetm.fn_abi_va_list (callee);
3056 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3057 && (TREE_TYPE (cfun_va_list) == void_type_node
3058 || TREE_TYPE (cfun_va_list) == char_type_node);
3059
3060 switch (DECL_FUNCTION_CODE (callee))
3061 {
3062 case BUILT_IN_VA_START:
3063 if (!va_list_simple_ptr
3064 || targetm.expand_builtin_va_start != NULL
3065 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3066 return NULL_TREE;
3067
3068 if (gimple_call_num_args (call) != 2)
3069 return NULL_TREE;
3070
3071 lhs = gimple_call_arg (call, 0);
3072 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3073 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3074 != TYPE_MAIN_VARIANT (cfun_va_list))
3075 return NULL_TREE;
3076
3077 lhs = build_fold_indirect_ref (lhs);
3078 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3079 1, integer_zero_node);
3080 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3081 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3082
3083 case BUILT_IN_VA_COPY:
3084 if (!va_list_simple_ptr)
3085 return NULL_TREE;
3086
3087 if (gimple_call_num_args (call) != 2)
3088 return NULL_TREE;
3089
3090 lhs = gimple_call_arg (call, 0);
3091 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3092 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3093 != TYPE_MAIN_VARIANT (cfun_va_list))
3094 return NULL_TREE;
3095
3096 lhs = build_fold_indirect_ref (lhs);
3097 rhs = gimple_call_arg (call, 1);
3098 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3099 != TYPE_MAIN_VARIANT (cfun_va_list))
3100 return NULL_TREE;
3101
3102 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3103 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3104
3105 case BUILT_IN_VA_END:
3106 /* No effect, so the statement will be deleted. */
3107 return integer_zero_node;
3108
3109 default:
3110 gcc_unreachable ();
3111 }
3112 }
3113
3114 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3115 RHS of an assignment. Insert the necessary statements before
3116 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3117 is replaced. If the call is expected to produces a result, then it
3118 is replaced by an assignment of the new RHS to the result variable.
3119 If the result is to be ignored, then the call is replaced by a
3120 GIMPLE_NOP. */
3121
3122 static void
3123 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3124 {
3125 tree lhs;
3126 tree tmp = NULL_TREE; /* Silence warning. */
3127 gimple stmt, new_stmt;
3128 gimple_stmt_iterator i;
3129 gimple_seq stmts = gimple_seq_alloc();
3130 struct gimplify_ctx gctx;
3131
3132 stmt = gsi_stmt (*si_p);
3133
3134 gcc_assert (is_gimple_call (stmt));
3135
3136 lhs = gimple_call_lhs (stmt);
3137
3138 push_gimplify_context (&gctx);
3139
3140 if (lhs == NULL_TREE)
3141 gimplify_and_add (expr, &stmts);
3142 else
3143 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3144
3145 pop_gimplify_context (NULL);
3146
3147 if (gimple_has_location (stmt))
3148 annotate_all_with_location (stmts, gimple_location (stmt));
3149
3150 /* The replacement can expose previously unreferenced variables. */
3151 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3152 {
3153 new_stmt = gsi_stmt (i);
3154 find_new_referenced_vars (new_stmt);
3155 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3156 mark_symbols_for_renaming (new_stmt);
3157 gsi_next (si_p);
3158 }
3159
3160 if (lhs == NULL_TREE)
3161 {
3162 new_stmt = gimple_build_nop ();
3163 unlink_stmt_vdef (stmt);
3164 release_defs (stmt);
3165 }
3166 else
3167 {
3168 new_stmt = gimple_build_assign (lhs, tmp);
3169 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3170 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3171 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3172 }
3173
3174 gimple_set_location (new_stmt, gimple_location (stmt));
3175 gsi_replace (si_p, new_stmt, false);
3176 }
3177
3178 /* A simple pass that attempts to fold all builtin functions. This pass
3179 is run after we've propagated as many constants as we can. */
3180
3181 static unsigned int
3182 execute_fold_all_builtins (void)
3183 {
3184 bool cfg_changed = false;
3185 basic_block bb;
3186 unsigned int todoflags = 0;
3187
3188 FOR_EACH_BB (bb)
3189 {
3190 gimple_stmt_iterator i;
3191 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3192 {
3193 gimple stmt, old_stmt;
3194 tree callee, result;
3195 enum built_in_function fcode;
3196
3197 stmt = gsi_stmt (i);
3198
3199 if (gimple_code (stmt) != GIMPLE_CALL)
3200 {
3201 gsi_next (&i);
3202 continue;
3203 }
3204 callee = gimple_call_fndecl (stmt);
3205 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3206 {
3207 gsi_next (&i);
3208 continue;
3209 }
3210 fcode = DECL_FUNCTION_CODE (callee);
3211
3212 result = ccp_fold_builtin (stmt);
3213
3214 if (result)
3215 gimple_remove_stmt_histograms (cfun, stmt);
3216
3217 if (!result)
3218 switch (DECL_FUNCTION_CODE (callee))
3219 {
3220 case BUILT_IN_CONSTANT_P:
3221 /* Resolve __builtin_constant_p. If it hasn't been
3222 folded to integer_one_node by now, it's fairly
3223 certain that the value simply isn't constant. */
3224 result = integer_zero_node;
3225 break;
3226
3227 case BUILT_IN_STACK_RESTORE:
3228 result = optimize_stack_restore (i);
3229 if (result)
3230 break;
3231 gsi_next (&i);
3232 continue;
3233
3234 case BUILT_IN_VA_START:
3235 case BUILT_IN_VA_END:
3236 case BUILT_IN_VA_COPY:
3237 /* These shouldn't be folded before pass_stdarg. */
3238 result = optimize_stdarg_builtin (stmt);
3239 if (result)
3240 break;
3241 /* FALLTHRU */
3242
3243 default:
3244 gsi_next (&i);
3245 continue;
3246 }
3247
3248 if (dump_file && (dump_flags & TDF_DETAILS))
3249 {
3250 fprintf (dump_file, "Simplified\n ");
3251 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3252 }
3253
3254 old_stmt = stmt;
3255 push_stmt_changes (gsi_stmt_ptr (&i));
3256
3257 if (!update_call_from_tree (&i, result))
3258 {
3259 gimplify_and_update_call_from_tree (&i, result);
3260 todoflags |= TODO_update_address_taken;
3261 }
3262
3263 stmt = gsi_stmt (i);
3264 pop_stmt_changes (gsi_stmt_ptr (&i));
3265
3266 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3267 && gimple_purge_dead_eh_edges (bb))
3268 cfg_changed = true;
3269
3270 if (dump_file && (dump_flags & TDF_DETAILS))
3271 {
3272 fprintf (dump_file, "to\n ");
3273 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3274 fprintf (dump_file, "\n");
3275 }
3276
3277 /* Retry the same statement if it changed into another
3278 builtin, there might be new opportunities now. */
3279 if (gimple_code (stmt) != GIMPLE_CALL)
3280 {
3281 gsi_next (&i);
3282 continue;
3283 }
3284 callee = gimple_call_fndecl (stmt);
3285 if (!callee
3286 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3287 || DECL_FUNCTION_CODE (callee) == fcode)
3288 gsi_next (&i);
3289 }
3290 }
3291
3292 /* Delete unreachable blocks. */
3293 if (cfg_changed)
3294 todoflags |= TODO_cleanup_cfg;
3295
3296 return todoflags;
3297 }
3298
3299
3300 struct gimple_opt_pass pass_fold_builtins =
3301 {
3302 {
3303 GIMPLE_PASS,
3304 "fab", /* name */
3305 NULL, /* gate */
3306 execute_fold_all_builtins, /* execute */
3307 NULL, /* sub */
3308 NULL, /* next */
3309 0, /* static_pass_number */
3310 TV_NONE, /* tv_id */
3311 PROP_cfg | PROP_ssa, /* properties_required */
3312 0, /* properties_provided */
3313 0, /* properties_destroyed */
3314 0, /* todo_flags_start */
3315 TODO_dump_func
3316 | TODO_verify_ssa
3317 | TODO_update_ssa /* todo_flags_finish */
3318 }
3319 };