real.h (struct real_format): Split the signbit field into two two fields, signbit_ro...
[gcc.git] / gcc / tree-ssa-ccp.c
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 /* Conditional constant propagation.
25
26 References:
27
28 Constant propagation with conditional branches,
29 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30
31 Building an Optimizing Compiler,
32 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33
34 Advanced Compiler Design and Implementation,
35 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "rtl.h"
44 #include "tm_p.h"
45 #include "ggc.h"
46 #include "basic-block.h"
47 #include "output.h"
48 #include "errors.h"
49 #include "expr.h"
50 #include "function.h"
51 #include "diagnostic.h"
52 #include "timevar.h"
53 #include "tree-dump.h"
54 #include "tree-flow.h"
55 #include "tree-pass.h"
56 #include "tree-ssa-propagate.h"
57 #include "langhooks.h"
58
59
60 /* Possible lattice values. */
61 typedef enum
62 {
63 UNINITIALIZED = 0,
64 UNDEFINED,
65 UNKNOWN_VAL,
66 CONSTANT,
67 VARYING
68 } latticevalue;
69
70 /* Main structure for CCP. Contains the lattice value and, if it's a
71 constant, the constant value. */
72 typedef struct
73 {
74 latticevalue lattice_val;
75 tree const_val;
76 } value;
77
78 /* This is used to track the current value of each variable. */
79 static value *value_vector;
80
81
82 /* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
83
84 static void
85 dump_lattice_value (FILE *outf, const char *prefix, value val)
86 {
87 switch (val.lattice_val)
88 {
89 case UNDEFINED:
90 fprintf (outf, "%sUNDEFINED", prefix);
91 break;
92 case VARYING:
93 fprintf (outf, "%sVARYING", prefix);
94 break;
95 case UNKNOWN_VAL:
96 fprintf (outf, "%sUNKNOWN_VAL", prefix);
97 break;
98 case CONSTANT:
99 fprintf (outf, "%sCONSTANT ", prefix);
100 print_generic_expr (outf, val.const_val, dump_flags);
101 break;
102 default:
103 gcc_unreachable ();
104 }
105 }
106
107
108 /* Return a default value for variable VAR using the following rules:
109
110 1- Function arguments are considered VARYING.
111
112 2- Global and static variables that are declared constant are
113 considered CONSTANT.
114
115 3- Any other virtually defined variable is considered UNKNOWN_VAL.
116
117 4- Any other value is considered UNDEFINED. This is useful when
118 considering PHI nodes. PHI arguments that are undefined do not
119 change the constant value of the PHI node, which allows for more
120 constants to be propagated. */
121
122 static value
123 get_default_value (tree var)
124 {
125 value val;
126 tree sym;
127
128 if (TREE_CODE (var) == SSA_NAME)
129 sym = SSA_NAME_VAR (var);
130 else
131 {
132 gcc_assert (DECL_P (var));
133 sym = var;
134 }
135
136 val.lattice_val = UNDEFINED;
137 val.const_val = NULL_TREE;
138
139 if (TREE_CODE (var) == SSA_NAME
140 && SSA_NAME_VALUE (var)
141 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
142 {
143 val.lattice_val = CONSTANT;
144 val.const_val = SSA_NAME_VALUE (var);
145 }
146 else if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
147 {
148 /* Function arguments and volatile variables are considered VARYING. */
149 val.lattice_val = VARYING;
150 }
151 else if (TREE_STATIC (sym))
152 {
153 /* Globals and static variables are considered UNKNOWN_VAL,
154 unless they are declared 'const'. */
155 if (TREE_READONLY (sym)
156 && DECL_INITIAL (sym)
157 && is_gimple_min_invariant (DECL_INITIAL (sym)))
158 {
159 val.lattice_val = CONSTANT;
160 val.const_val = DECL_INITIAL (sym);
161 }
162 else
163 {
164 val.const_val = NULL_TREE;
165 val.lattice_val = UNKNOWN_VAL;
166 }
167 }
168 else if (!is_gimple_reg (sym))
169 {
170 val.const_val = NULL_TREE;
171 val.lattice_val = UNKNOWN_VAL;
172 }
173 else
174 {
175 enum tree_code code;
176 tree stmt = SSA_NAME_DEF_STMT (var);
177
178 if (!IS_EMPTY_STMT (stmt))
179 {
180 code = TREE_CODE (stmt);
181 if (code != MODIFY_EXPR && code != PHI_NODE)
182 val.lattice_val = VARYING;
183 }
184 }
185
186 return val;
187 }
188
189 /* Get the constant value associated with variable VAR. */
190
191 static value *
192 get_value (tree var)
193 {
194 value *val;
195
196 gcc_assert (TREE_CODE (var) == SSA_NAME);
197
198 val = &value_vector[SSA_NAME_VERSION (var)];
199 if (val->lattice_val == UNINITIALIZED)
200 *val = get_default_value (var);
201
202 return val;
203 }
204
205
206 /* Set the lattice value for variable VAR to VAL. Return true if VAL
207 is different from VAR's previous value. */
208
209 static bool
210 set_lattice_value (tree var, value val)
211 {
212 value *old = get_value (var);
213
214 if (val.lattice_val == UNDEFINED)
215 {
216 /* CONSTANT->UNDEFINED is never a valid state transition. */
217 gcc_assert (old->lattice_val != CONSTANT);
218
219 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
220 gcc_assert (old->lattice_val != UNKNOWN_VAL);
221
222 /* VARYING->UNDEFINED is generally not a valid state transition,
223 except for values which are initialized to VARYING. */
224 gcc_assert (old->lattice_val != VARYING
225 || get_default_value (var).lattice_val == VARYING);
226 }
227 else if (val.lattice_val == CONSTANT)
228 /* VARYING -> CONSTANT is an invalid state transition, except
229 for objects which start off in a VARYING state. */
230 gcc_assert (old->lattice_val != VARYING
231 || get_default_value (var).lattice_val == VARYING);
232
233 /* If the constant for VAR has changed, then this VAR is really varying. */
234 if (old->lattice_val == CONSTANT
235 && val.lattice_val == CONSTANT
236 && !simple_cst_equal (old->const_val, val.const_val))
237 {
238 val.lattice_val = VARYING;
239 val.const_val = NULL_TREE;
240 }
241
242 if (old->lattice_val != val.lattice_val)
243 {
244 if (dump_file && (dump_flags & TDF_DETAILS))
245 {
246 dump_lattice_value (dump_file, "Lattice value changed to ", val);
247 fprintf (dump_file, ". Adding definition to SSA edges.\n");
248 }
249
250 *old = val;
251 return true;
252 }
253
254 return false;
255 }
256
257
258 /* Set the lattice value for the variable VAR to VARYING. */
259
260 static void
261 def_to_varying (tree var)
262 {
263 value val;
264 val.lattice_val = VARYING;
265 val.const_val = NULL_TREE;
266 set_lattice_value (var, val);
267 }
268
269
270 /* Return the likely latticevalue for STMT.
271
272 If STMT has no operands, then return CONSTANT.
273
274 Else if any operands of STMT are undefined, then return UNDEFINED.
275
276 Else if any operands of STMT are constants, then return CONSTANT.
277
278 Else return VARYING. */
279
280 static latticevalue
281 likely_value (tree stmt)
282 {
283 vuse_optype vuses;
284 int found_constant = 0;
285 stmt_ann_t ann;
286 tree use;
287 ssa_op_iter iter;
288
289 /* If the statement makes aliased loads or has volatile operands, it
290 won't fold to a constant value. */
291 ann = stmt_ann (stmt);
292 if (ann->makes_aliased_loads || ann->has_volatile_ops)
293 return VARYING;
294
295 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
296 in the presence of const and pure calls. */
297 if (get_call_expr_in (stmt) != NULL_TREE)
298 return VARYING;
299
300 get_stmt_operands (stmt);
301
302 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
303 {
304 value *val = get_value (use);
305
306 if (val->lattice_val == UNDEFINED)
307 return UNDEFINED;
308
309 if (val->lattice_val == CONSTANT)
310 found_constant = 1;
311 }
312
313 vuses = VUSE_OPS (ann);
314
315 if (NUM_VUSES (vuses))
316 {
317 tree vuse = VUSE_OP (vuses, 0);
318 value *val = get_value (vuse);
319
320 if (val->lattice_val == UNKNOWN_VAL)
321 return UNKNOWN_VAL;
322
323 /* There should be no VUSE operands that are UNDEFINED. */
324 gcc_assert (val->lattice_val != UNDEFINED);
325
326 if (val->lattice_val == CONSTANT)
327 found_constant = 1;
328 }
329
330 return ((found_constant || (!USE_OPS (ann) && !vuses)) ? CONSTANT : VARYING);
331 }
332
333
334 /* Function indicating whether we ought to include information for VAR
335 when calculating immediate uses. */
336
337 static bool
338 need_imm_uses_for (tree var)
339 {
340 return get_value (var)->lattice_val != VARYING;
341 }
342
343
344 /* Initialize local data structures for CCP. */
345
346 static void
347 ccp_initialize (void)
348 {
349 basic_block bb;
350 sbitmap is_may_def;
351
352 value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
353 memset (value_vector, 0, num_ssa_names * sizeof (value));
354
355 /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
356 is_may_def = sbitmap_alloc (num_ssa_names);
357 sbitmap_zero (is_may_def);
358
359 /* Initialize simulation flags for PHI nodes and statements. */
360 FOR_EACH_BB (bb)
361 {
362 block_stmt_iterator i;
363
364 /* Mark all V_MAY_DEF operands VARYING. */
365 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
366 {
367 bool is_varying = false;
368 tree stmt = bsi_stmt (i);
369 ssa_op_iter iter;
370 tree def;
371
372 get_stmt_operands (stmt);
373
374 /* Get the default value for each DEF and V_MUST_DEF. */
375 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter,
376 (SSA_OP_DEF | SSA_OP_VMUSTDEF))
377 {
378 if (get_value (def)->lattice_val == VARYING)
379 is_varying = true;
380 }
381
382 /* Mark all V_MAY_DEF operands VARYING. */
383 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
384 {
385 get_value (def)->lattice_val = VARYING;
386 SET_BIT (is_may_def, SSA_NAME_VERSION (def));
387 }
388
389 /* Statements other than MODIFY_EXPR, COND_EXPR and
390 SWITCH_EXPR are not interesting for constant propagation.
391 Mark them VARYING. */
392 if (TREE_CODE (stmt) != MODIFY_EXPR
393 && TREE_CODE (stmt) != COND_EXPR
394 && TREE_CODE (stmt) != SWITCH_EXPR)
395 is_varying = true;
396
397 DONT_SIMULATE_AGAIN (stmt) = is_varying;
398 }
399 }
400
401 /* Now process PHI nodes. */
402 FOR_EACH_BB (bb)
403 {
404 tree phi, var;
405 int x;
406
407 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
408 {
409 value *val = get_value (PHI_RESULT (phi));
410
411 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
412 {
413 var = PHI_ARG_DEF (phi, x);
414
415 /* If one argument has a V_MAY_DEF, the result is
416 VARYING. */
417 if (TREE_CODE (var) == SSA_NAME)
418 {
419 if (TEST_BIT (is_may_def, SSA_NAME_VERSION (var)))
420 {
421 val->lattice_val = VARYING;
422 SET_BIT (is_may_def, SSA_NAME_VERSION (PHI_RESULT (phi)));
423 break;
424 }
425 }
426 }
427
428 DONT_SIMULATE_AGAIN (phi) = (val->lattice_val == VARYING);
429 }
430 }
431
432 sbitmap_free (is_may_def);
433
434 /* Compute immediate uses for variables we care about. */
435 compute_immediate_uses (TDFA_USE_OPS | TDFA_USE_VOPS, need_imm_uses_for);
436 }
437
438
439 /* Replace USE references in statement STMT with their immediate reaching
440 definition. Return true if at least one reference was replaced. If
441 REPLACED_ADDRESSES_P is given, it will be set to true if an address
442 constant was replaced. */
443
444 static bool
445 replace_uses_in (tree stmt, bool *replaced_addresses_p)
446 {
447 bool replaced = false;
448 use_operand_p use;
449 ssa_op_iter iter;
450
451 if (replaced_addresses_p)
452 *replaced_addresses_p = false;
453
454 get_stmt_operands (stmt);
455
456 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
457 {
458 tree tuse = USE_FROM_PTR (use);
459 value *val = get_value (tuse);
460
461 if (val->lattice_val != CONSTANT)
462 continue;
463
464 if (TREE_CODE (stmt) == ASM_EXPR
465 && !may_propagate_copy_into_asm (tuse))
466 continue;
467
468 SET_USE (use, val->const_val);
469
470 replaced = true;
471 if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
472 *replaced_addresses_p = true;
473 }
474
475 return replaced;
476 }
477
478
479 /* Replace the VUSE references in statement STMT with its immediate reaching
480 definition. Return true if the reference was replaced. If
481 REPLACED_ADDRESSES_P is given, it will be set to true if an address
482 constant was replaced. */
483
484 static bool
485 replace_vuse_in (tree stmt, bool *replaced_addresses_p)
486 {
487 bool replaced = false;
488 vuse_optype vuses;
489 use_operand_p vuse;
490 value *val;
491
492 if (replaced_addresses_p)
493 *replaced_addresses_p = false;
494
495 get_stmt_operands (stmt);
496
497 vuses = STMT_VUSE_OPS (stmt);
498
499 if (NUM_VUSES (vuses) != 1)
500 return false;
501
502 vuse = VUSE_OP_PTR (vuses, 0);
503 val = get_value (USE_FROM_PTR (vuse));
504
505 if (val->lattice_val == CONSTANT
506 && TREE_CODE (stmt) == MODIFY_EXPR
507 && DECL_P (TREE_OPERAND (stmt, 1))
508 && TREE_OPERAND (stmt, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse)))
509 {
510 TREE_OPERAND (stmt, 1) = val->const_val;
511 replaced = true;
512 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse)))
513 && replaced_addresses_p)
514 *replaced_addresses_p = true;
515 }
516
517 return replaced;
518 }
519
520
521 /* Perform final substitution and folding. After this pass the program
522 should still be in SSA form. */
523
524 static void
525 substitute_and_fold (void)
526 {
527 basic_block bb;
528 unsigned int i;
529
530 if (dump_file && (dump_flags & TDF_DETAILS))
531 fprintf (dump_file,
532 "\nSubstituing constants and folding statements\n\n");
533
534 /* Substitute constants in every statement of every basic block. */
535 FOR_EACH_BB (bb)
536 {
537 block_stmt_iterator i;
538 tree phi;
539
540 /* Propagate our known constants into PHI nodes. */
541 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
542 {
543 int i;
544
545 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
546 {
547 value *new_val;
548 use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
549 tree orig = USE_FROM_PTR (orig_p);
550
551 if (! SSA_VAR_P (orig))
552 break;
553
554 new_val = get_value (orig);
555 if (new_val->lattice_val == CONSTANT
556 && may_propagate_copy (orig, new_val->const_val))
557 SET_USE (orig_p, new_val->const_val);
558 }
559 }
560
561 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
562 {
563 bool replaced_address;
564 tree stmt = bsi_stmt (i);
565
566 /* Skip statements that have been folded already. */
567 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
568 continue;
569
570 /* Replace the statement with its folded version and mark it
571 folded. */
572 if (dump_file && (dump_flags & TDF_DETAILS))
573 {
574 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
575 print_generic_stmt (dump_file, stmt, TDF_SLIM);
576 }
577
578 if (replace_uses_in (stmt, &replaced_address)
579 || replace_vuse_in (stmt, &replaced_address))
580 {
581 bool changed = fold_stmt (bsi_stmt_ptr (i));
582 stmt = bsi_stmt(i);
583
584 /* If we folded a builtin function, we'll likely
585 need to rename VDEFs. */
586 if (replaced_address || changed)
587 mark_new_vars_to_rename (stmt, vars_to_rename);
588
589 /* If we cleaned up EH information from the statement,
590 remove EH edges. */
591 if (maybe_clean_eh_stmt (stmt))
592 tree_purge_dead_eh_edges (bb);
593
594 modify_stmt (stmt);
595 }
596
597 if (dump_file && (dump_flags & TDF_DETAILS))
598 {
599 fprintf (dump_file, " with ");
600 print_generic_stmt (dump_file, stmt, TDF_SLIM);
601 fprintf (dump_file, "\n");
602 }
603 }
604 }
605
606 /* And transfer what we learned from VALUE_VECTOR into the
607 SSA_NAMEs themselves. This probably isn't terribly important
608 since we probably constant propagated the values to their
609 use sites above. */
610 for (i = 0; i < num_ssa_names; i++)
611 {
612 tree name = ssa_name (i);
613 value *value;
614
615 if (!name)
616 continue;
617
618 value = get_value (name);
619 if (value->lattice_val == CONSTANT
620 && is_gimple_reg (name)
621 && is_gimple_min_invariant (value->const_val))
622 SSA_NAME_VALUE (name) = value->const_val;
623 }
624 }
625
626
627 /* Free allocated storage. */
628
629 static void
630 ccp_finalize (void)
631 {
632 /* Perform substitutions based on the known constant values. */
633 substitute_and_fold ();
634
635 free (value_vector);
636 }
637
638
639
640 /* Compute the meet operator between VAL1 and VAL2:
641
642 any M UNDEFINED = any
643 any M VARYING = VARYING
644 any M UNKNOWN_VAL = UNKNOWN_VAL
645 Ci M Cj = Ci if (i == j)
646 Ci M Cj = VARYING if (i != j) */
647 static value
648 ccp_lattice_meet (value val1, value val2)
649 {
650 value result;
651
652 /* any M UNDEFINED = any. */
653 if (val1.lattice_val == UNDEFINED)
654 return val2;
655 else if (val2.lattice_val == UNDEFINED)
656 return val1;
657
658 /* any M VARYING = VARYING. */
659 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
660 {
661 result.lattice_val = VARYING;
662 result.const_val = NULL_TREE;
663 return result;
664 }
665
666 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
667 if (val1.lattice_val == UNKNOWN_VAL
668 || val2.lattice_val == UNKNOWN_VAL)
669 {
670 result.lattice_val = UNKNOWN_VAL;
671 result.const_val = NULL_TREE;
672 return result;
673 }
674
675 /* Ci M Cj = Ci if (i == j)
676 Ci M Cj = VARYING if (i != j) */
677 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
678 {
679 result.lattice_val = CONSTANT;
680 result.const_val = val1.const_val;
681 }
682 else
683 {
684 result.lattice_val = VARYING;
685 result.const_val = NULL_TREE;
686 }
687
688 return result;
689 }
690
691
692 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
693 lattice values to determine PHI_NODE's lattice value. The value of a
694 PHI node is determined calling ccp_lattice_meet() with all the arguments
695 of the PHI node that are incoming via executable edges. */
696
697 static enum ssa_prop_result
698 ccp_visit_phi_node (tree phi)
699 {
700 value new_val, *old_val;
701 int i;
702
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 {
705 fprintf (dump_file, "\nVisiting PHI node: ");
706 print_generic_expr (dump_file, phi, dump_flags);
707 }
708
709 old_val = get_value (PHI_RESULT (phi));
710 switch (old_val->lattice_val)
711 {
712 case VARYING:
713 return SSA_PROP_NOT_INTERESTING;
714
715 case CONSTANT:
716 new_val = *old_val;
717 break;
718
719 case UNKNOWN_VAL:
720 /* To avoid the default value of UNKNOWN_VAL overriding
721 that of its possible constant arguments, temporarily
722 set the PHI node's default lattice value to be
723 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
724 the new value is UNDEFINED, then we prevent the invalid
725 transition by not calling set_lattice_value. */
726 new_val.lattice_val = UNDEFINED;
727 new_val.const_val = NULL_TREE;
728 break;
729
730 case UNDEFINED:
731 case UNINITIALIZED:
732 new_val.lattice_val = UNDEFINED;
733 new_val.const_val = NULL_TREE;
734 break;
735
736 default:
737 gcc_unreachable ();
738 }
739
740 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
741 {
742 /* Compute the meet operator over all the PHI arguments. */
743 edge e = PHI_ARG_EDGE (phi, i);
744
745 if (dump_file && (dump_flags & TDF_DETAILS))
746 {
747 fprintf (dump_file,
748 "\n Argument #%d (%d -> %d %sexecutable)\n",
749 i, e->src->index, e->dest->index,
750 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
751 }
752
753 /* If the incoming edge is executable, Compute the meet operator for
754 the existing value of the PHI node and the current PHI argument. */
755 if (e->flags & EDGE_EXECUTABLE)
756 {
757 tree rdef = PHI_ARG_DEF (phi, i);
758 value *rdef_val, val;
759
760 if (is_gimple_min_invariant (rdef))
761 {
762 val.lattice_val = CONSTANT;
763 val.const_val = rdef;
764 rdef_val = &val;
765 }
766 else
767 rdef_val = get_value (rdef);
768
769 new_val = ccp_lattice_meet (new_val, *rdef_val);
770
771 if (dump_file && (dump_flags & TDF_DETAILS))
772 {
773 fprintf (dump_file, "\t");
774 print_generic_expr (dump_file, rdef, dump_flags);
775 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
776 fprintf (dump_file, "\n");
777 }
778
779 if (new_val.lattice_val == VARYING)
780 break;
781 }
782 }
783
784 if (dump_file && (dump_flags & TDF_DETAILS))
785 {
786 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
787 fprintf (dump_file, "\n\n");
788 }
789
790 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
791 if (old_val->lattice_val == UNKNOWN_VAL
792 && new_val.lattice_val == UNDEFINED)
793 return SSA_PROP_NOT_INTERESTING;
794
795 /* Otherwise, make the transition to the new value. */
796 if (set_lattice_value (PHI_RESULT (phi), new_val))
797 {
798 if (new_val.lattice_val == VARYING)
799 return SSA_PROP_VARYING;
800 else
801 return SSA_PROP_INTERESTING;
802 }
803 else
804 return SSA_PROP_NOT_INTERESTING;
805 }
806
807
808 /* CCP specific front-end to the non-destructive constant folding
809 routines.
810
811 Attempt to simplify the RHS of STMT knowing that one or more
812 operands are constants.
813
814 If simplification is possible, return the simplified RHS,
815 otherwise return the original RHS. */
816
817 static tree
818 ccp_fold (tree stmt)
819 {
820 tree rhs = get_rhs (stmt);
821 enum tree_code code = TREE_CODE (rhs);
822 enum tree_code_class kind = TREE_CODE_CLASS (code);
823 tree retval = NULL_TREE;
824 vuse_optype vuses;
825
826 vuses = STMT_VUSE_OPS (stmt);
827
828 /* If the RHS is just a variable, then that variable must now have
829 a constant value that we can return directly. */
830 if (TREE_CODE (rhs) == SSA_NAME)
831 return get_value (rhs)->const_val;
832 else if (DECL_P (rhs)
833 && NUM_VUSES (vuses) == 1
834 && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
835 return get_value (VUSE_OP (vuses, 0))->const_val;
836
837 /* Unary operators. Note that we know the single operand must
838 be a constant. So this should almost always return a
839 simplified RHS. */
840 if (kind == tcc_unary)
841 {
842 /* Handle unary operators which can appear in GIMPLE form. */
843 tree op0 = TREE_OPERAND (rhs, 0);
844
845 /* Simplify the operand down to a constant. */
846 if (TREE_CODE (op0) == SSA_NAME)
847 {
848 value *val = get_value (op0);
849 if (val->lattice_val == CONSTANT)
850 op0 = get_value (op0)->const_val;
851 }
852
853 retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0);
854
855 /* If we folded, but did not create an invariant, then we can not
856 use this expression. */
857 if (retval && ! is_gimple_min_invariant (retval))
858 return NULL;
859
860 /* If we could not fold the expression, but the arguments are all
861 constants and gimple values, then build and return the new
862 expression.
863
864 In some cases the new expression is still something we can
865 use as a replacement for an argument. This happens with
866 NOP conversions of types for example.
867
868 In other cases the new expression can not be used as a
869 replacement for an argument (as it would create non-gimple
870 code). But the new expression can still be used to derive
871 other constants. */
872 if (! retval && is_gimple_min_invariant (op0))
873 return build1 (code, TREE_TYPE (rhs), op0);
874 }
875
876 /* Binary and comparison operators. We know one or both of the
877 operands are constants. */
878 else if (kind == tcc_binary
879 || kind == tcc_comparison
880 || code == TRUTH_AND_EXPR
881 || code == TRUTH_OR_EXPR
882 || code == TRUTH_XOR_EXPR)
883 {
884 /* Handle binary and comparison operators that can appear in
885 GIMPLE form. */
886 tree op0 = TREE_OPERAND (rhs, 0);
887 tree op1 = TREE_OPERAND (rhs, 1);
888
889 /* Simplify the operands down to constants when appropriate. */
890 if (TREE_CODE (op0) == SSA_NAME)
891 {
892 value *val = get_value (op0);
893 if (val->lattice_val == CONSTANT)
894 op0 = val->const_val;
895 }
896
897 if (TREE_CODE (op1) == SSA_NAME)
898 {
899 value *val = get_value (op1);
900 if (val->lattice_val == CONSTANT)
901 op1 = val->const_val;
902 }
903
904 retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1);
905
906 /* If we folded, but did not create an invariant, then we can not
907 use this expression. */
908 if (retval && ! is_gimple_min_invariant (retval))
909 return NULL;
910
911 /* If we could not fold the expression, but the arguments are all
912 constants and gimple values, then build and return the new
913 expression.
914
915 In some cases the new expression is still something we can
916 use as a replacement for an argument. This happens with
917 NOP conversions of types for example.
918
919 In other cases the new expression can not be used as a
920 replacement for an argument (as it would create non-gimple
921 code). But the new expression can still be used to derive
922 other constants. */
923 if (! retval
924 && is_gimple_min_invariant (op0)
925 && is_gimple_min_invariant (op1))
926 return build (code, TREE_TYPE (rhs), op0, op1);
927 }
928
929 /* We may be able to fold away calls to builtin functions if their
930 arguments are constants. */
931 else if (code == CALL_EXPR
932 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
933 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
934 == FUNCTION_DECL)
935 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
936 {
937 use_optype uses = STMT_USE_OPS (stmt);
938 if (NUM_USES (uses) != 0)
939 {
940 tree *orig;
941 size_t i;
942
943 /* Preserve the original values of every operand. */
944 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
945 for (i = 0; i < NUM_USES (uses); i++)
946 orig[i] = USE_OP (uses, i);
947
948 /* Substitute operands with their values and try to fold. */
949 replace_uses_in (stmt, NULL);
950 retval = fold_builtin (rhs, false);
951
952 /* Restore operands to their original form. */
953 for (i = 0; i < NUM_USES (uses); i++)
954 SET_USE_OP (uses, i, orig[i]);
955 free (orig);
956 }
957 }
958 else
959 return rhs;
960
961 /* If we got a simplified form, see if we need to convert its type. */
962 if (retval)
963 return fold_convert (TREE_TYPE (rhs), retval);
964
965 /* No simplification was possible. */
966 return rhs;
967 }
968
969
970 /* Evaluate statement STMT. */
971
972 static value
973 evaluate_stmt (tree stmt)
974 {
975 value val;
976 tree simplified;
977 latticevalue likelyvalue = likely_value (stmt);
978
979 /* If the statement is likely to have a CONSTANT result, then try
980 to fold the statement to determine the constant value. */
981 if (likelyvalue == CONSTANT)
982 simplified = ccp_fold (stmt);
983 /* If the statement is likely to have a VARYING result, then do not
984 bother folding the statement. */
985 else if (likelyvalue == VARYING)
986 simplified = get_rhs (stmt);
987 /* Otherwise the statement is likely to have an UNDEFINED value and
988 there will be nothing to do. */
989 else
990 simplified = NULL_TREE;
991
992 if (simplified && is_gimple_min_invariant (simplified))
993 {
994 /* The statement produced a constant value. */
995 val.lattice_val = CONSTANT;
996 val.const_val = simplified;
997 }
998 else
999 {
1000 /* The statement produced a nonconstant value. If the statement
1001 had undefined or virtual operands, then the result of the
1002 statement should be undefined or virtual respectively.
1003 Else the result of the statement is VARYING. */
1004 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1005 val.lattice_val = (likelyvalue == UNKNOWN_VAL
1006 ? UNKNOWN_VAL : val.lattice_val);
1007 val.const_val = NULL_TREE;
1008 }
1009
1010 return val;
1011 }
1012
1013
1014 /* Visit the assignment statement STMT. Set the value of its LHS to the
1015 value computed by the RHS and store LHS in *OUTPUT_P. */
1016
1017 static enum ssa_prop_result
1018 visit_assignment (tree stmt, tree *output_p)
1019 {
1020 value val;
1021 tree lhs, rhs;
1022 vuse_optype vuses;
1023 v_must_def_optype v_must_defs;
1024
1025 lhs = TREE_OPERAND (stmt, 0);
1026 rhs = TREE_OPERAND (stmt, 1);
1027 vuses = STMT_VUSE_OPS (stmt);
1028 v_must_defs = STMT_V_MUST_DEF_OPS (stmt);
1029
1030 gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0);
1031 gcc_assert (NUM_V_MUST_DEFS (v_must_defs) == 1
1032 || TREE_CODE (lhs) == SSA_NAME);
1033
1034 /* We require the SSA version number of the lhs for the value_vector.
1035 Make sure we have it. */
1036 if (TREE_CODE (lhs) != SSA_NAME)
1037 {
1038 /* If we make it here, then stmt only has one definition:
1039 a V_MUST_DEF. */
1040 lhs = V_MUST_DEF_RESULT (v_must_defs, 0);
1041 }
1042
1043 if (TREE_CODE (rhs) == SSA_NAME)
1044 {
1045 /* For a simple copy operation, we copy the lattice values. */
1046 value *nval = get_value (rhs);
1047 val = *nval;
1048 }
1049 else if (DECL_P (rhs)
1050 && NUM_VUSES (vuses) == 1
1051 && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
1052 {
1053 /* Same as above, but the rhs is not a gimple register and yet
1054 has a known VUSE. */
1055 value *nval = get_value (VUSE_OP (vuses, 0));
1056 val = *nval;
1057 }
1058 else
1059 /* Evaluate the statement. */
1060 val = evaluate_stmt (stmt);
1061
1062 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1063 value to be a VIEW_CONVERT_EXPR of the old constant value.
1064
1065 ??? Also, if this was a definition of a bitfield, we need to widen
1066 the constant value into the type of the destination variable. This
1067 should not be necessary if GCC represented bitfields properly. */
1068 {
1069 tree orig_lhs = TREE_OPERAND (stmt, 0);
1070
1071 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1072 && val.lattice_val == CONSTANT)
1073 {
1074 tree w = fold (build1 (VIEW_CONVERT_EXPR,
1075 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1076 val.const_val));
1077
1078 orig_lhs = TREE_OPERAND (orig_lhs, 1);
1079 if (w && is_gimple_min_invariant (w))
1080 val.const_val = w;
1081 else
1082 {
1083 val.lattice_val = VARYING;
1084 val.const_val = NULL;
1085 }
1086 }
1087
1088 if (val.lattice_val == CONSTANT
1089 && TREE_CODE (orig_lhs) == COMPONENT_REF
1090 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1091 {
1092 tree w = widen_bitfield (val.const_val, TREE_OPERAND (orig_lhs, 1),
1093 orig_lhs);
1094
1095 if (w && is_gimple_min_invariant (w))
1096 val.const_val = w;
1097 else
1098 {
1099 val.lattice_val = VARYING;
1100 val.const_val = NULL;
1101 }
1102 }
1103 }
1104
1105 /* If LHS is not a gimple register, then it cannot take on an
1106 UNDEFINED value. */
1107 if (!is_gimple_reg (SSA_NAME_VAR (lhs))
1108 && val.lattice_val == UNDEFINED)
1109 val.lattice_val = UNKNOWN_VAL;
1110
1111 /* Set the lattice value of the statement's output. */
1112 if (set_lattice_value (lhs, val))
1113 {
1114 *output_p = lhs;
1115 if (val.lattice_val == VARYING)
1116 return SSA_PROP_VARYING;
1117 else
1118 return SSA_PROP_INTERESTING;
1119 }
1120 else
1121 return SSA_PROP_NOT_INTERESTING;
1122 }
1123
1124
1125 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1126 if it can determine which edge will be taken. Otherwise, return
1127 SSA_PROP_VARYING. */
1128
1129 static enum ssa_prop_result
1130 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1131 {
1132 value val;
1133 basic_block block;
1134
1135 block = bb_for_stmt (stmt);
1136 val = evaluate_stmt (stmt);
1137
1138 /* Find which edge out of the conditional block will be taken and add it
1139 to the worklist. If no single edge can be determined statically,
1140 return SSA_PROP_VARYING to feed all the outgoing edges to the
1141 propagation engine. */
1142 *taken_edge_p = val.const_val ? find_taken_edge (block, val.const_val) : 0;
1143 if (*taken_edge_p)
1144 return SSA_PROP_INTERESTING;
1145 else
1146 return SSA_PROP_VARYING;
1147 }
1148
1149
1150 /* Evaluate statement STMT. If the statement produces an output value and
1151 its evaluation changes the lattice value of its output, return
1152 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1153 output value.
1154
1155 If STMT is a conditional branch and we can determine its truth
1156 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1157 value, return SSA_PROP_VARYING. */
1158
1159 static enum ssa_prop_result
1160 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1161 {
1162 stmt_ann_t ann;
1163 v_may_def_optype v_may_defs;
1164 v_must_def_optype v_must_defs;
1165 tree def;
1166 ssa_op_iter iter;
1167
1168 if (dump_file && (dump_flags & TDF_DETAILS))
1169 {
1170 fprintf (dump_file, "\nVisiting statement: ");
1171 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1172 fprintf (dump_file, "\n");
1173 }
1174
1175 ann = stmt_ann (stmt);
1176
1177 v_must_defs = V_MUST_DEF_OPS (ann);
1178 v_may_defs = V_MAY_DEF_OPS (ann);
1179 if (TREE_CODE (stmt) == MODIFY_EXPR
1180 && NUM_V_MAY_DEFS (v_may_defs) == 0
1181 && (NUM_V_MUST_DEFS (v_must_defs) == 1
1182 || TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME))
1183 {
1184 /* If the statement is an assignment that produces a single
1185 output value, evaluate its RHS to see if the lattice value of
1186 its output has changed. */
1187 return visit_assignment (stmt, output_p);
1188 }
1189 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1190 {
1191 /* If STMT is a conditional branch, see if we can determine
1192 which branch will be taken. */
1193 return visit_cond_stmt (stmt, taken_edge_p);
1194 }
1195
1196 /* Any other kind of statement is not interesting for constant
1197 propagation and, therefore, not worth simulating. */
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1199 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1200
1201 /* Definitions made by statements other than assignments to
1202 SSA_NAMEs represent unknown modifications to their outputs.
1203 Mark them VARYING. */
1204 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
1205 def_to_varying (def);
1206
1207 /* Mark all V_MAY_DEF operands VARYING. */
1208 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
1209 def_to_varying (def);
1210
1211 return SSA_PROP_VARYING;
1212 }
1213
1214
1215 /* Main entry point for SSA Conditional Constant Propagation.
1216
1217 [ DESCRIBE MAIN ALGORITHM HERE ] */
1218
1219 static void
1220 execute_ssa_ccp (void)
1221 {
1222 ccp_initialize ();
1223 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1224 ccp_finalize ();
1225 }
1226
1227
1228 static bool
1229 gate_ccp (void)
1230 {
1231 return flag_tree_ccp != 0;
1232 }
1233
1234
1235 struct tree_opt_pass pass_ccp =
1236 {
1237 "ccp", /* name */
1238 gate_ccp, /* gate */
1239 execute_ssa_ccp, /* execute */
1240 NULL, /* sub */
1241 NULL, /* next */
1242 0, /* static_pass_number */
1243 TV_TREE_CCP, /* tv_id */
1244 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1245 0, /* properties_provided */
1246 0, /* properties_destroyed */
1247 0, /* todo_flags_start */
1248 TODO_cleanup_cfg | TODO_dump_func | TODO_rename_vars
1249 | TODO_ggc_collect | TODO_verify_ssa
1250 | TODO_verify_stmts, /* todo_flags_finish */
1251 0 /* letter */
1252 };
1253
1254
1255 /* Given a constant value VAL for bitfield FIELD, and a destination
1256 variable VAR, return VAL appropriately widened to fit into VAR. If
1257 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1258
1259 tree
1260 widen_bitfield (tree val, tree field, tree var)
1261 {
1262 unsigned HOST_WIDE_INT var_size, field_size;
1263 tree wide_val;
1264 unsigned HOST_WIDE_INT mask;
1265 unsigned int i;
1266
1267 /* We can only do this if the size of the type and field and VAL are
1268 all constants representable in HOST_WIDE_INT. */
1269 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1270 || !host_integerp (DECL_SIZE (field), 1)
1271 || !host_integerp (val, 0))
1272 return NULL_TREE;
1273
1274 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1275 field_size = tree_low_cst (DECL_SIZE (field), 1);
1276
1277 /* Give up if either the bitfield or the variable are too wide. */
1278 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1279 return NULL_TREE;
1280
1281 gcc_assert (var_size >= field_size);
1282
1283 /* If the sign bit of the value is not set or the field's type is unsigned,
1284 just mask off the high order bits of the value. */
1285 if (DECL_UNSIGNED (field)
1286 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1287 {
1288 /* Zero extension. Build a mask with the lower 'field_size' bits
1289 set and a BIT_AND_EXPR node to clear the high order bits of
1290 the value. */
1291 for (i = 0, mask = 0; i < field_size; i++)
1292 mask |= ((HOST_WIDE_INT) 1) << i;
1293
1294 wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1295 build_int_cst (TREE_TYPE (var), mask));
1296 }
1297 else
1298 {
1299 /* Sign extension. Create a mask with the upper 'field_size'
1300 bits set and a BIT_IOR_EXPR to set the high order bits of the
1301 value. */
1302 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1303 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1304
1305 wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1306 build_int_cst (TREE_TYPE (var), mask));
1307 }
1308
1309 return fold (wide_val);
1310 }
1311
1312
1313 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1314 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1315 is the desired result type. */
1316
1317 static tree
1318 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1319 {
1320 tree min_idx, idx, elt_offset = integer_zero_node;
1321 tree array_type, elt_type, elt_size;
1322
1323 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1324 measured in units of the size of elements type) from that ARRAY_REF).
1325 We can't do anything if either is variable.
1326
1327 The case we handle here is *(&A[N]+O). */
1328 if (TREE_CODE (base) == ARRAY_REF)
1329 {
1330 tree low_bound = array_ref_low_bound (base);
1331
1332 elt_offset = TREE_OPERAND (base, 1);
1333 if (TREE_CODE (low_bound) != INTEGER_CST
1334 || TREE_CODE (elt_offset) != INTEGER_CST)
1335 return NULL_TREE;
1336
1337 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1338 base = TREE_OPERAND (base, 0);
1339 }
1340
1341 /* Ignore stupid user tricks of indexing non-array variables. */
1342 array_type = TREE_TYPE (base);
1343 if (TREE_CODE (array_type) != ARRAY_TYPE)
1344 return NULL_TREE;
1345 elt_type = TREE_TYPE (array_type);
1346 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1347 return NULL_TREE;
1348
1349 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1350 element type (so we can use the alignment if it's not constant).
1351 Otherwise, compute the offset as an index by using a division. If the
1352 division isn't exact, then don't do anything. */
1353 elt_size = TYPE_SIZE_UNIT (elt_type);
1354 if (integer_zerop (offset))
1355 {
1356 if (TREE_CODE (elt_size) != INTEGER_CST)
1357 elt_size = size_int (TYPE_ALIGN (elt_type));
1358
1359 idx = integer_zero_node;
1360 }
1361 else
1362 {
1363 unsigned HOST_WIDE_INT lquo, lrem;
1364 HOST_WIDE_INT hquo, hrem;
1365
1366 if (TREE_CODE (elt_size) != INTEGER_CST
1367 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1368 TREE_INT_CST_LOW (offset),
1369 TREE_INT_CST_HIGH (offset),
1370 TREE_INT_CST_LOW (elt_size),
1371 TREE_INT_CST_HIGH (elt_size),
1372 &lquo, &hquo, &lrem, &hrem)
1373 || lrem || hrem)
1374 return NULL_TREE;
1375
1376 idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
1377 }
1378
1379 /* Assume the low bound is zero. If there is a domain type, get the
1380 low bound, if any, convert the index into that type, and add the
1381 low bound. */
1382 min_idx = integer_zero_node;
1383 if (TYPE_DOMAIN (array_type))
1384 {
1385 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1386 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1387 else
1388 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1389
1390 if (TREE_CODE (min_idx) != INTEGER_CST)
1391 return NULL_TREE;
1392
1393 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1394 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
1395 }
1396
1397 if (!integer_zerop (min_idx))
1398 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1399 if (!integer_zerop (elt_offset))
1400 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1401
1402 return build (ARRAY_REF, orig_type, base, idx, min_idx,
1403 size_int (tree_low_cst (elt_size, 1)
1404 / (TYPE_ALIGN_UNIT (elt_type))));
1405 }
1406
1407
1408 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1409 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1410 is the desired result type. */
1411 /* ??? This doesn't handle class inheritance. */
1412
1413 static tree
1414 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1415 tree orig_type, bool base_is_ptr)
1416 {
1417 tree f, t, field_type, tail_array_field, field_offset;
1418
1419 if (TREE_CODE (record_type) != RECORD_TYPE
1420 && TREE_CODE (record_type) != UNION_TYPE
1421 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1422 return NULL_TREE;
1423
1424 /* Short-circuit silly cases. */
1425 if (lang_hooks.types_compatible_p (record_type, orig_type))
1426 return NULL_TREE;
1427
1428 tail_array_field = NULL_TREE;
1429 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1430 {
1431 int cmp;
1432
1433 if (TREE_CODE (f) != FIELD_DECL)
1434 continue;
1435 if (DECL_BIT_FIELD (f))
1436 continue;
1437
1438 field_offset = byte_position (f);
1439 if (TREE_CODE (field_offset) != INTEGER_CST)
1440 continue;
1441
1442 /* ??? Java creates "interesting" fields for representing base classes.
1443 They have no name, and have no context. With no context, we get into
1444 trouble with nonoverlapping_component_refs_p. Skip them. */
1445 if (!DECL_FIELD_CONTEXT (f))
1446 continue;
1447
1448 /* The previous array field isn't at the end. */
1449 tail_array_field = NULL_TREE;
1450
1451 /* Check to see if this offset overlaps with the field. */
1452 cmp = tree_int_cst_compare (field_offset, offset);
1453 if (cmp > 0)
1454 continue;
1455
1456 field_type = TREE_TYPE (f);
1457
1458 /* Here we exactly match the offset being checked. If the types match,
1459 then we can return that field. */
1460 if (cmp == 0
1461 && lang_hooks.types_compatible_p (orig_type, field_type))
1462 {
1463 if (base_is_ptr)
1464 base = build1 (INDIRECT_REF, record_type, base);
1465 t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1466 return t;
1467 }
1468
1469 /* Don't care about offsets into the middle of scalars. */
1470 if (!AGGREGATE_TYPE_P (field_type))
1471 continue;
1472
1473 /* Check for array at the end of the struct. This is often
1474 used as for flexible array members. We should be able to
1475 turn this into an array access anyway. */
1476 if (TREE_CODE (field_type) == ARRAY_TYPE)
1477 tail_array_field = f;
1478
1479 /* Check the end of the field against the offset. */
1480 if (!DECL_SIZE_UNIT (f)
1481 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1482 continue;
1483 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1484 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1485 continue;
1486
1487 /* If we matched, then set offset to the displacement into
1488 this field. */
1489 offset = t;
1490 goto found;
1491 }
1492
1493 if (!tail_array_field)
1494 return NULL_TREE;
1495
1496 f = tail_array_field;
1497 field_type = TREE_TYPE (f);
1498 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1499
1500 found:
1501 /* If we get here, we've got an aggregate field, and a possibly
1502 nonzero offset into them. Recurse and hope for a valid match. */
1503 if (base_is_ptr)
1504 base = build1 (INDIRECT_REF, record_type, base);
1505 base = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1506
1507 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1508 if (t)
1509 return t;
1510 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1511 orig_type, false);
1512 }
1513
1514
1515 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1516 Return the simplified expression, or NULL if nothing could be done. */
1517
1518 static tree
1519 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1520 {
1521 tree t;
1522
1523 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1524 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1525 are sometimes added. */
1526 base = fold (base);
1527 STRIP_NOPS (base);
1528 TREE_OPERAND (expr, 0) = base;
1529
1530 /* One possibility is that the address reduces to a string constant. */
1531 t = fold_read_from_constant_string (expr);
1532 if (t)
1533 return t;
1534
1535 /* Add in any offset from a PLUS_EXPR. */
1536 if (TREE_CODE (base) == PLUS_EXPR)
1537 {
1538 tree offset2;
1539
1540 offset2 = TREE_OPERAND (base, 1);
1541 if (TREE_CODE (offset2) != INTEGER_CST)
1542 return NULL_TREE;
1543 base = TREE_OPERAND (base, 0);
1544
1545 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1546 }
1547
1548 if (TREE_CODE (base) == ADDR_EXPR)
1549 {
1550 /* Strip the ADDR_EXPR. */
1551 base = TREE_OPERAND (base, 0);
1552
1553 /* Fold away CONST_DECL to its value, if the type is scalar. */
1554 if (TREE_CODE (base) == CONST_DECL
1555 && is_gimple_min_invariant (DECL_INITIAL (base)))
1556 return DECL_INITIAL (base);
1557
1558 /* Try folding *(&B+O) to B[X]. */
1559 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1560 if (t)
1561 return t;
1562
1563 /* Try folding *(&B+O) to B.X. */
1564 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1565 TREE_TYPE (expr), false);
1566 if (t)
1567 return t;
1568
1569 /* Fold *&B to B. We can only do this if EXPR is the same type
1570 as BASE. We can't do this if EXPR is the element type of an array
1571 and BASE is the array. */
1572 if (integer_zerop (offset)
1573 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1574 TREE_TYPE (expr)))
1575 return base;
1576 }
1577 else
1578 {
1579 /* We can get here for out-of-range string constant accesses,
1580 such as "_"[3]. Bail out of the entire substitution search
1581 and arrange for the entire statement to be replaced by a
1582 call to __builtin_trap. In all likelyhood this will all be
1583 constant-folded away, but in the meantime we can't leave with
1584 something that get_expr_operands can't understand. */
1585
1586 t = base;
1587 STRIP_NOPS (t);
1588 if (TREE_CODE (t) == ADDR_EXPR
1589 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1590 {
1591 /* FIXME: Except that this causes problems elsewhere with dead
1592 code not being deleted, and we abort in the rtl expanders
1593 because we failed to remove some ssa_name. In the meantime,
1594 just return zero. */
1595 /* FIXME2: This condition should be signaled by
1596 fold_read_from_constant_string directly, rather than
1597 re-checking for it here. */
1598 return integer_zero_node;
1599 }
1600
1601 /* Try folding *(B+O) to B->X. Still an improvement. */
1602 if (POINTER_TYPE_P (TREE_TYPE (base)))
1603 {
1604 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1605 base, offset,
1606 TREE_TYPE (expr), true);
1607 if (t)
1608 return t;
1609 }
1610 }
1611
1612 /* Otherwise we had an offset that we could not simplify. */
1613 return NULL_TREE;
1614 }
1615
1616
1617 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1618
1619 A quaint feature extant in our address arithmetic is that there
1620 can be hidden type changes here. The type of the result need
1621 not be the same as the type of the input pointer.
1622
1623 What we're after here is an expression of the form
1624 (T *)(&array + const)
1625 where the cast doesn't actually exist, but is implicit in the
1626 type of the PLUS_EXPR. We'd like to turn this into
1627 &array[x]
1628 which may be able to propagate further. */
1629
1630 static tree
1631 maybe_fold_stmt_addition (tree expr)
1632 {
1633 tree op0 = TREE_OPERAND (expr, 0);
1634 tree op1 = TREE_OPERAND (expr, 1);
1635 tree ptr_type = TREE_TYPE (expr);
1636 tree ptd_type;
1637 tree t;
1638 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1639
1640 /* We're only interested in pointer arithmetic. */
1641 if (!POINTER_TYPE_P (ptr_type))
1642 return NULL_TREE;
1643 /* Canonicalize the integral operand to op1. */
1644 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1645 {
1646 if (subtract)
1647 return NULL_TREE;
1648 t = op0, op0 = op1, op1 = t;
1649 }
1650 /* It had better be a constant. */
1651 if (TREE_CODE (op1) != INTEGER_CST)
1652 return NULL_TREE;
1653 /* The first operand should be an ADDR_EXPR. */
1654 if (TREE_CODE (op0) != ADDR_EXPR)
1655 return NULL_TREE;
1656 op0 = TREE_OPERAND (op0, 0);
1657
1658 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1659 the offset into it. */
1660 while (TREE_CODE (op0) == ARRAY_REF)
1661 {
1662 tree array_obj = TREE_OPERAND (op0, 0);
1663 tree array_idx = TREE_OPERAND (op0, 1);
1664 tree elt_type = TREE_TYPE (op0);
1665 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1666 tree min_idx;
1667
1668 if (TREE_CODE (array_idx) != INTEGER_CST)
1669 break;
1670 if (TREE_CODE (elt_size) != INTEGER_CST)
1671 break;
1672
1673 /* Un-bias the index by the min index of the array type. */
1674 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1675 if (min_idx)
1676 {
1677 min_idx = TYPE_MIN_VALUE (min_idx);
1678 if (min_idx)
1679 {
1680 if (TREE_CODE (min_idx) != INTEGER_CST)
1681 break;
1682
1683 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1684 if (!integer_zerop (min_idx))
1685 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1686 min_idx, 0);
1687 }
1688 }
1689
1690 /* Convert the index to a byte offset. */
1691 array_idx = convert (sizetype, array_idx);
1692 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1693
1694 /* Update the operands for the next round, or for folding. */
1695 /* If we're manipulating unsigned types, then folding into negative
1696 values can produce incorrect results. Particularly if the type
1697 is smaller than the width of the pointer. */
1698 if (subtract
1699 && TYPE_UNSIGNED (TREE_TYPE (op1))
1700 && tree_int_cst_lt (array_idx, op1))
1701 return NULL;
1702 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1703 array_idx, op1, 0);
1704 subtract = false;
1705 op0 = array_obj;
1706 }
1707
1708 /* If we weren't able to fold the subtraction into another array reference,
1709 canonicalize the integer for passing to the array and component ref
1710 simplification functions. */
1711 if (subtract)
1712 {
1713 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1714 return NULL;
1715 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1716 /* ??? In theory fold should always produce another integer. */
1717 if (TREE_CODE (op1) != INTEGER_CST)
1718 return NULL;
1719 }
1720
1721 ptd_type = TREE_TYPE (ptr_type);
1722
1723 /* At which point we can try some of the same things as for indirects. */
1724 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1725 if (!t)
1726 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1727 ptd_type, false);
1728 if (t)
1729 t = build1 (ADDR_EXPR, ptr_type, t);
1730
1731 return t;
1732 }
1733
1734
1735 /* Subroutine of fold_stmt called via walk_tree. We perform several
1736 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1737
1738 static tree
1739 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1740 {
1741 bool *changed_p = data;
1742 tree expr = *expr_p, t;
1743
1744 /* ??? It'd be nice if walk_tree had a pre-order option. */
1745 switch (TREE_CODE (expr))
1746 {
1747 case INDIRECT_REF:
1748 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1749 if (t)
1750 return t;
1751 *walk_subtrees = 0;
1752
1753 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1754 integer_zero_node);
1755 break;
1756
1757 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1758 We'd only want to bother decomposing an existing ARRAY_REF if
1759 the base array is found to have another offset contained within.
1760 Otherwise we'd be wasting time. */
1761
1762 case ADDR_EXPR:
1763 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1764 if (t)
1765 return t;
1766 *walk_subtrees = 0;
1767
1768 /* Set TREE_INVARIANT properly so that the value is properly
1769 considered constant, and so gets propagated as expected. */
1770 if (*changed_p)
1771 recompute_tree_invarant_for_addr_expr (expr);
1772 return NULL_TREE;
1773
1774 case PLUS_EXPR:
1775 case MINUS_EXPR:
1776 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1777 if (t)
1778 return t;
1779 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1780 if (t)
1781 return t;
1782 *walk_subtrees = 0;
1783
1784 t = maybe_fold_stmt_addition (expr);
1785 break;
1786
1787 case COMPONENT_REF:
1788 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1789 if (t)
1790 return t;
1791 *walk_subtrees = 0;
1792
1793 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1794 We've already checked that the records are compatible, so we should
1795 come up with a set of compatible fields. */
1796 {
1797 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
1798 tree expr_field = TREE_OPERAND (expr, 1);
1799
1800 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
1801 {
1802 expr_field = find_compatible_field (expr_record, expr_field);
1803 TREE_OPERAND (expr, 1) = expr_field;
1804 }
1805 }
1806 break;
1807
1808 default:
1809 return NULL_TREE;
1810 }
1811
1812 if (t)
1813 {
1814 *expr_p = t;
1815 *changed_p = true;
1816 }
1817
1818 return NULL_TREE;
1819 }
1820
1821
1822 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
1823 follow its use-def chains. If LENGTH is not NULL and its value is not
1824 equal to the length we determine, or if we are unable to determine the
1825 length, return false. VISITED is a bitmap of visited variables. */
1826
1827 static bool
1828 get_strlen (tree arg, tree *length, bitmap visited)
1829 {
1830 tree var, def_stmt, val;
1831
1832 if (TREE_CODE (arg) != SSA_NAME)
1833 {
1834 val = c_strlen (arg, 1);
1835 if (!val)
1836 return false;
1837
1838 if (*length && simple_cst_equal (val, *length) != 1)
1839 return false;
1840
1841 *length = val;
1842 return true;
1843 }
1844
1845 /* If we were already here, break the infinite cycle. */
1846 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1847 return true;
1848 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1849
1850 var = arg;
1851 def_stmt = SSA_NAME_DEF_STMT (var);
1852
1853 switch (TREE_CODE (def_stmt))
1854 {
1855 case MODIFY_EXPR:
1856 {
1857 tree len, rhs;
1858
1859 /* The RHS of the statement defining VAR must either have a
1860 constant length or come from another SSA_NAME with a constant
1861 length. */
1862 rhs = TREE_OPERAND (def_stmt, 1);
1863 STRIP_NOPS (rhs);
1864 if (TREE_CODE (rhs) == SSA_NAME)
1865 return get_strlen (rhs, length, visited);
1866
1867 /* See if the RHS is a constant length. */
1868 len = c_strlen (rhs, 1);
1869 if (len)
1870 {
1871 if (*length && simple_cst_equal (len, *length) != 1)
1872 return false;
1873
1874 *length = len;
1875 return true;
1876 }
1877
1878 break;
1879 }
1880
1881 case PHI_NODE:
1882 {
1883 /* All the arguments of the PHI node must have the same constant
1884 length. */
1885 int i;
1886
1887 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1888 {
1889 tree arg = PHI_ARG_DEF (def_stmt, i);
1890
1891 /* If this PHI has itself as an argument, we cannot
1892 determine the string length of this argument. However,
1893 if we can find a constant string length for the other
1894 PHI args then we can still be sure that this is a
1895 constant string length. So be optimistic and just
1896 continue with the next argument. */
1897 if (arg == PHI_RESULT (def_stmt))
1898 continue;
1899
1900 if (!get_strlen (arg, length, visited))
1901 return false;
1902 }
1903
1904 return true;
1905 }
1906
1907 default:
1908 break;
1909 }
1910
1911
1912 return false;
1913 }
1914
1915
1916 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
1917 constant, return NULL_TREE. Otherwise, return its constant value. */
1918
1919 static tree
1920 ccp_fold_builtin (tree stmt, tree fn)
1921 {
1922 tree result, strlen_val[2];
1923 tree callee, arglist, a;
1924 int strlen_arg, i;
1925 bitmap visited;
1926 bool ignore;
1927
1928 ignore = TREE_CODE (stmt) != MODIFY_EXPR;
1929
1930 /* First try the generic builtin folder. If that succeeds, return the
1931 result directly. */
1932 result = fold_builtin (fn, ignore);
1933 if (result)
1934 {
1935 if (ignore)
1936 STRIP_NOPS (result);
1937 return result;
1938 }
1939
1940 /* Ignore MD builtins. */
1941 callee = get_callee_fndecl (fn);
1942 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1943 return NULL_TREE;
1944
1945 /* If the builtin could not be folded, and it has no argument list,
1946 we're done. */
1947 arglist = TREE_OPERAND (fn, 1);
1948 if (!arglist)
1949 return NULL_TREE;
1950
1951 /* Limit the work only for builtins we know how to simplify. */
1952 switch (DECL_FUNCTION_CODE (callee))
1953 {
1954 case BUILT_IN_STRLEN:
1955 case BUILT_IN_FPUTS:
1956 case BUILT_IN_FPUTS_UNLOCKED:
1957 strlen_arg = 1;
1958 break;
1959 case BUILT_IN_STRCPY:
1960 case BUILT_IN_STRNCPY:
1961 strlen_arg = 2;
1962 break;
1963 default:
1964 return NULL_TREE;
1965 }
1966
1967 /* Try to use the dataflow information gathered by the CCP process. */
1968 visited = BITMAP_ALLOC (NULL);
1969
1970 memset (strlen_val, 0, sizeof (strlen_val));
1971 for (i = 0, a = arglist;
1972 strlen_arg;
1973 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
1974 if (strlen_arg & 1)
1975 {
1976 bitmap_clear (visited);
1977 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
1978 strlen_val[i] = NULL_TREE;
1979 }
1980
1981 BITMAP_FREE (visited);
1982
1983 result = NULL_TREE;
1984 switch (DECL_FUNCTION_CODE (callee))
1985 {
1986 case BUILT_IN_STRLEN:
1987 if (strlen_val[0])
1988 {
1989 tree new = fold_convert (TREE_TYPE (fn), strlen_val[0]);
1990
1991 /* If the result is not a valid gimple value, or not a cast
1992 of a valid gimple value, then we can not use the result. */
1993 if (is_gimple_val (new)
1994 || (is_gimple_cast (new)
1995 && is_gimple_val (TREE_OPERAND (new, 0))))
1996 return new;
1997 }
1998 break;
1999
2000 case BUILT_IN_STRCPY:
2001 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2002 result = fold_builtin_strcpy (fn, strlen_val[1]);
2003 break;
2004
2005 case BUILT_IN_STRNCPY:
2006 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2007 result = fold_builtin_strncpy (fn, strlen_val[1]);
2008 break;
2009
2010 case BUILT_IN_FPUTS:
2011 result = fold_builtin_fputs (arglist,
2012 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2013 strlen_val[0]);
2014 break;
2015
2016 case BUILT_IN_FPUTS_UNLOCKED:
2017 result = fold_builtin_fputs (arglist,
2018 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2019 strlen_val[0]);
2020 break;
2021
2022 default:
2023 gcc_unreachable ();
2024 }
2025
2026 if (result && ignore)
2027 result = fold_ignored_result (result);
2028 return result;
2029 }
2030
2031
2032 /* Fold the statement pointed by STMT_P. In some cases, this function may
2033 replace the whole statement with a new one. Returns true iff folding
2034 makes any changes. */
2035
2036 bool
2037 fold_stmt (tree *stmt_p)
2038 {
2039 tree rhs, result, stmt;
2040 bool changed = false;
2041
2042 stmt = *stmt_p;
2043
2044 /* If we replaced constants and the statement makes pointer dereferences,
2045 then we may need to fold instances of *&VAR into VAR, etc. */
2046 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
2047 {
2048 *stmt_p
2049 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2050 NULL);
2051 return true;
2052 }
2053
2054 rhs = get_rhs (stmt);
2055 if (!rhs)
2056 return changed;
2057 result = NULL_TREE;
2058
2059 if (TREE_CODE (rhs) == CALL_EXPR)
2060 {
2061 tree callee;
2062
2063 /* Check for builtins that CCP can handle using information not
2064 available in the generic fold routines. */
2065 callee = get_callee_fndecl (rhs);
2066 if (callee && DECL_BUILT_IN (callee))
2067 result = ccp_fold_builtin (stmt, rhs);
2068 else
2069 {
2070 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2071 here are when we've propagated the address of a decl into the
2072 object slot. */
2073 /* ??? Should perhaps do this in fold proper. However, doing it
2074 there requires that we create a new CALL_EXPR, and that requires
2075 copying EH region info to the new node. Easier to just do it
2076 here where we can just smash the call operand. */
2077 callee = TREE_OPERAND (rhs, 0);
2078 if (TREE_CODE (callee) == OBJ_TYPE_REF
2079 && lang_hooks.fold_obj_type_ref
2080 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2081 && DECL_P (TREE_OPERAND
2082 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2083 {
2084 tree t;
2085
2086 /* ??? Caution: Broken ADDR_EXPR semantics means that
2087 looking at the type of the operand of the addr_expr
2088 can yield an array type. See silly exception in
2089 check_pointer_types_r. */
2090
2091 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2092 t = lang_hooks.fold_obj_type_ref (callee, t);
2093 if (t)
2094 {
2095 TREE_OPERAND (rhs, 0) = t;
2096 changed = true;
2097 }
2098 }
2099 }
2100 }
2101
2102 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2103 if (result == NULL_TREE)
2104 result = fold (rhs);
2105
2106 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2107 may have been added by fold, and "useless" type conversions that might
2108 now be apparent due to propagation. */
2109 STRIP_USELESS_TYPE_CONVERSION (result);
2110
2111 if (result != rhs)
2112 changed |= set_rhs (stmt_p, result);
2113
2114 return changed;
2115 }
2116
2117 \f
2118 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2119 RHS of an assignment. Insert the necessary statements before
2120 iterator *SI_P. */
2121
2122 static tree
2123 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
2124 {
2125 tree_stmt_iterator ti;
2126 tree stmt = bsi_stmt (*si_p);
2127 tree tmp, stmts = NULL;
2128
2129 push_gimplify_context ();
2130 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2131 pop_gimplify_context (NULL);
2132
2133 /* The replacement can expose previously unreferenced variables. */
2134 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2135 {
2136 find_new_referenced_vars (tsi_stmt_ptr (ti));
2137 mark_new_vars_to_rename (tsi_stmt (ti), vars_to_rename);
2138 }
2139
2140 if (EXPR_HAS_LOCATION (stmt))
2141 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2142
2143 bsi_insert_before (si_p, stmts, BSI_SAME_STMT);
2144
2145 return tmp;
2146 }
2147
2148
2149 /* A simple pass that attempts to fold all builtin functions. This pass
2150 is run after we've propagated as many constants as we can. */
2151
2152 static void
2153 execute_fold_all_builtins (void)
2154 {
2155 bool cfg_changed = false;
2156 basic_block bb;
2157 FOR_EACH_BB (bb)
2158 {
2159 block_stmt_iterator i;
2160 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2161 {
2162 tree *stmtp = bsi_stmt_ptr (i);
2163 tree call = get_rhs (*stmtp);
2164 tree callee, result;
2165
2166 if (!call || TREE_CODE (call) != CALL_EXPR)
2167 continue;
2168 callee = get_callee_fndecl (call);
2169 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2170 continue;
2171
2172 result = ccp_fold_builtin (*stmtp, call);
2173 if (!result)
2174 switch (DECL_FUNCTION_CODE (callee))
2175 {
2176 case BUILT_IN_CONSTANT_P:
2177 /* Resolve __builtin_constant_p. If it hasn't been
2178 folded to integer_one_node by now, it's fairly
2179 certain that the value simply isn't constant. */
2180 result = integer_zero_node;
2181 break;
2182
2183 default:
2184 continue;
2185 }
2186
2187 if (dump_file && (dump_flags & TDF_DETAILS))
2188 {
2189 fprintf (dump_file, "Simplified\n ");
2190 print_generic_stmt (dump_file, *stmtp, dump_flags);
2191 }
2192
2193 if (!set_rhs (stmtp, result))
2194 {
2195 result = convert_to_gimple_builtin (&i, result);
2196 if (result && !set_rhs (stmtp, result))
2197 abort ();
2198 }
2199 modify_stmt (*stmtp);
2200 if (maybe_clean_eh_stmt (*stmtp)
2201 && tree_purge_dead_eh_edges (bb))
2202 cfg_changed = true;
2203
2204 if (dump_file && (dump_flags & TDF_DETAILS))
2205 {
2206 fprintf (dump_file, "to\n ");
2207 print_generic_stmt (dump_file, *stmtp, dump_flags);
2208 fprintf (dump_file, "\n");
2209 }
2210 }
2211 }
2212
2213 /* Delete unreachable blocks. */
2214 if (cfg_changed)
2215 cleanup_tree_cfg ();
2216 }
2217
2218
2219 struct tree_opt_pass pass_fold_builtins =
2220 {
2221 "fab", /* name */
2222 NULL, /* gate */
2223 execute_fold_all_builtins, /* execute */
2224 NULL, /* sub */
2225 NULL, /* next */
2226 0, /* static_pass_number */
2227 0, /* tv_id */
2228 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2229 0, /* properties_provided */
2230 0, /* properties_destroyed */
2231 0, /* todo_flags_start */
2232 TODO_dump_func
2233 | TODO_verify_ssa
2234 | TODO_rename_vars, /* todo_flags_finish */
2235 0 /* letter */
2236 };