target.h (globalize_decl_name): New.
[gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "tree-dump.h"
35 #include "langhooks.h"
36
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
41
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
46
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
49
50 bb0:
51 x = a COND b;
52 if (x) goto ... else goto ...
53
54 Will be transformed into:
55
56 bb0:
57 if (a COND b) goto ... else goto ...
58
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
60
61 Or (assuming c1 and c2 are constants):
62
63 bb0:
64 x = a + c1;
65 if (x EQ/NEQ c2) goto ... else goto ...
66
67 Will be transformed into:
68
69 bb0:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
71
72 Similarly for x = a - c1.
73
74 Or
75
76 bb0:
77 x = !a
78 if (x) goto ... else goto ...
79
80 Will be transformed into:
81
82 bb0:
83 if (a == 0) goto ... else goto ...
84
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
88
89 Or
90
91 bb0:
92 x = (typecast) a
93 if (x) goto ... else goto ...
94
95 Will be transformed into:
96
97 bb0:
98 if (a != 0) goto ... else goto ...
99
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
102
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
106
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
110
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
115
116 A second class of propagation opportunities arises for ADDR_EXPR
117 nodes.
118
119 ptr = &x->y->z;
120 res = *ptr;
121
122 Will get turned into
123
124 res = x->y->z;
125
126 Or
127
128 ptr = &x[0];
129 ptr2 = ptr + <constant>;
130
131 Will get turned into
132
133 ptr2 = &x[constant/elementsize];
134
135 Or
136
137 ptr = &x[0];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
141
142 Will get turned into:
143
144 ptr2 = &x[index];
145
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
148 {NOT_EXPR,NEG_EXPR}.
149
150 This will (of course) be extended as other needs arise. */
151
152
153 /* Set to true if we delete EH edges during the optimization. */
154 static bool cfg_changed;
155
156
157 /* Given an SSA_NAME VAR, return true if and only if VAR is defined by
158 a comparison. */
159
160 static bool
161 ssa_name_defined_by_comparison_p (tree var)
162 {
163 tree def = SSA_NAME_DEF_STMT (var);
164
165 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
166 {
167 tree rhs = GIMPLE_STMT_OPERAND (def, 1);
168 return COMPARISON_CLASS_P (rhs);
169 }
170
171 return 0;
172 }
173
174 /* Forward propagate a single-use variable into COND once. Return a
175 new condition if successful. Return NULL_TREE otherwise. */
176
177 static tree
178 forward_propagate_into_cond_1 (tree cond, tree *test_var_p)
179 {
180 tree new_cond = NULL_TREE;
181 enum tree_code cond_code = TREE_CODE (cond);
182 tree test_var = NULL_TREE;
183 tree def;
184 tree def_rhs;
185
186 /* If the condition is not a lone variable or an equality test of an
187 SSA_NAME against an integral constant, then we do not have an
188 optimizable case.
189
190 Note these conditions also ensure the COND_EXPR has no
191 virtual operands or other side effects. */
192 if (cond_code != SSA_NAME
193 && !((cond_code == EQ_EXPR || cond_code == NE_EXPR)
194 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
195 && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1))
196 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
197 return NULL_TREE;
198
199 /* Extract the single variable used in the test into TEST_VAR. */
200 if (cond_code == SSA_NAME)
201 test_var = cond;
202 else
203 test_var = TREE_OPERAND (cond, 0);
204
205 /* Now get the defining statement for TEST_VAR. Skip this case if
206 it's not defined by some GIMPLE_MODIFY_STMT. */
207 def = SSA_NAME_DEF_STMT (test_var);
208 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT)
209 return NULL_TREE;
210
211 def_rhs = GIMPLE_STMT_OPERAND (def, 1);
212
213 /* If TEST_VAR is set by adding or subtracting a constant
214 from an SSA_NAME, then it is interesting to us as we
215 can adjust the constant in the conditional and thus
216 eliminate the arithmetic operation. */
217 if (TREE_CODE (def_rhs) == PLUS_EXPR
218 || TREE_CODE (def_rhs) == MINUS_EXPR)
219 {
220 tree op0 = TREE_OPERAND (def_rhs, 0);
221 tree op1 = TREE_OPERAND (def_rhs, 1);
222
223 /* The first operand must be an SSA_NAME and the second
224 operand must be a constant. */
225 if (TREE_CODE (op0) != SSA_NAME
226 || !CONSTANT_CLASS_P (op1)
227 || !INTEGRAL_TYPE_P (TREE_TYPE (op1)))
228 return NULL_TREE;
229
230 /* Don't propagate if the first operand occurs in
231 an abnormal PHI. */
232 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
233 return NULL_TREE;
234
235 if (has_single_use (test_var))
236 {
237 enum tree_code new_code;
238 tree t;
239
240 /* If the variable was defined via X + C, then we must
241 subtract C from the constant in the conditional.
242 Otherwise we add C to the constant in the
243 conditional. The result must fold into a valid
244 gimple operand to be optimizable. */
245 new_code = (TREE_CODE (def_rhs) == PLUS_EXPR
246 ? MINUS_EXPR : PLUS_EXPR);
247 t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0);
248 if (!is_gimple_val (t))
249 return NULL_TREE;
250
251 new_cond = build2 (cond_code, boolean_type_node, op0, t);
252 }
253 }
254
255 /* These cases require comparisons of a naked SSA_NAME or
256 comparison of an SSA_NAME against zero or one. */
257 else if (TREE_CODE (cond) == SSA_NAME
258 || integer_zerop (TREE_OPERAND (cond, 1))
259 || integer_onep (TREE_OPERAND (cond, 1)))
260 {
261 /* If TEST_VAR is set from a relational operation
262 between two SSA_NAMEs or a combination of an SSA_NAME
263 and a constant, then it is interesting. */
264 if (COMPARISON_CLASS_P (def_rhs))
265 {
266 tree op0 = TREE_OPERAND (def_rhs, 0);
267 tree op1 = TREE_OPERAND (def_rhs, 1);
268
269 /* Both operands of DEF_RHS must be SSA_NAMEs or
270 constants. */
271 if ((TREE_CODE (op0) != SSA_NAME
272 && !is_gimple_min_invariant (op0))
273 || (TREE_CODE (op1) != SSA_NAME
274 && !is_gimple_min_invariant (op1)))
275 return NULL_TREE;
276
277 /* Don't propagate if the first operand occurs in
278 an abnormal PHI. */
279 if (TREE_CODE (op0) == SSA_NAME
280 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
281 return NULL_TREE;
282
283 /* Don't propagate if the second operand occurs in
284 an abnormal PHI. */
285 if (TREE_CODE (op1) == SSA_NAME
286 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1))
287 return NULL_TREE;
288
289 if (has_single_use (test_var))
290 {
291 /* TEST_VAR was set from a relational operator. */
292 new_cond = build2 (TREE_CODE (def_rhs),
293 boolean_type_node, op0, op1);
294
295 /* Invert the conditional if necessary. */
296 if ((cond_code == EQ_EXPR
297 && integer_zerop (TREE_OPERAND (cond, 1)))
298 || (cond_code == NE_EXPR
299 && integer_onep (TREE_OPERAND (cond, 1))))
300 {
301 new_cond = invert_truthvalue (new_cond);
302
303 /* If we did not get a simple relational
304 expression or bare SSA_NAME, then we can
305 not optimize this case. */
306 if (!COMPARISON_CLASS_P (new_cond)
307 && TREE_CODE (new_cond) != SSA_NAME)
308 new_cond = NULL_TREE;
309 }
310 }
311 }
312
313 /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it
314 is interesting. */
315 else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR)
316 {
317 enum tree_code new_code;
318
319 def_rhs = TREE_OPERAND (def_rhs, 0);
320
321 /* DEF_RHS must be an SSA_NAME or constant. */
322 if (TREE_CODE (def_rhs) != SSA_NAME
323 && !is_gimple_min_invariant (def_rhs))
324 return NULL_TREE;
325
326 /* Don't propagate if the operand occurs in
327 an abnormal PHI. */
328 if (TREE_CODE (def_rhs) == SSA_NAME
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs))
330 return NULL_TREE;
331
332 if (cond_code == SSA_NAME
333 || (cond_code == NE_EXPR
334 && integer_zerop (TREE_OPERAND (cond, 1)))
335 || (cond_code == EQ_EXPR
336 && integer_onep (TREE_OPERAND (cond, 1))))
337 new_code = EQ_EXPR;
338 else
339 new_code = NE_EXPR;
340
341 new_cond = build2 (new_code, boolean_type_node, def_rhs,
342 fold_convert (TREE_TYPE (def_rhs),
343 integer_zero_node));
344 }
345
346 /* If TEST_VAR was set from a cast of an integer type
347 to a boolean type or a cast of a boolean to an
348 integral, then it is interesting. */
349 else if (TREE_CODE (def_rhs) == NOP_EXPR
350 || TREE_CODE (def_rhs) == CONVERT_EXPR)
351 {
352 tree outer_type;
353 tree inner_type;
354
355 outer_type = TREE_TYPE (def_rhs);
356 inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0));
357
358 if ((TREE_CODE (outer_type) == BOOLEAN_TYPE
359 && INTEGRAL_TYPE_P (inner_type))
360 || (TREE_CODE (inner_type) == BOOLEAN_TYPE
361 && INTEGRAL_TYPE_P (outer_type)))
362 ;
363 else if (INTEGRAL_TYPE_P (outer_type)
364 && INTEGRAL_TYPE_P (inner_type)
365 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
366 && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs,
367 0)))
368 ;
369 else
370 return NULL_TREE;
371
372 /* Don't propagate if the operand occurs in
373 an abnormal PHI. */
374 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
375 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND
376 (def_rhs, 0)))
377 return NULL_TREE;
378
379 if (has_single_use (test_var))
380 {
381 enum tree_code new_code;
382 tree new_arg;
383
384 if (cond_code == SSA_NAME
385 || (cond_code == NE_EXPR
386 && integer_zerop (TREE_OPERAND (cond, 1)))
387 || (cond_code == EQ_EXPR
388 && integer_onep (TREE_OPERAND (cond, 1))))
389 new_code = NE_EXPR;
390 else
391 new_code = EQ_EXPR;
392
393 new_arg = TREE_OPERAND (def_rhs, 0);
394 new_cond = build2 (new_code, boolean_type_node, new_arg,
395 fold_convert (TREE_TYPE (new_arg),
396 integer_zero_node));
397 }
398 }
399 }
400
401 *test_var_p = test_var;
402 return new_cond;
403 }
404
405 /* COND is a condition of the form:
406
407 x == const or x != const
408
409 Look back to x's defining statement and see if x is defined as
410
411 x = (type) y;
412
413 If const is unchanged if we convert it to type, then we can build
414 the equivalent expression:
415
416
417 y == const or y != const
418
419 Which may allow further optimizations.
420
421 Return the equivalent comparison or NULL if no such equivalent comparison
422 was found. */
423
424 static tree
425 find_equivalent_equality_comparison (tree cond)
426 {
427 tree op0 = TREE_OPERAND (cond, 0);
428 tree op1 = TREE_OPERAND (cond, 1);
429 tree def_stmt = SSA_NAME_DEF_STMT (op0);
430
431 while (def_stmt
432 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
433 && TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == SSA_NAME)
434 def_stmt = SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (def_stmt, 1));
435
436 /* OP0 might have been a parameter, so first make sure it
437 was defined by a GIMPLE_MODIFY_STMT. */
438 if (def_stmt && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT)
439 {
440 tree def_rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
441
442 /* If either operand to the comparison is a pointer to
443 a function, then we can not apply this optimization
444 as some targets require function pointers to be
445 canonicalized and in this case this optimization would
446 eliminate a necessary canonicalization. */
447 if ((POINTER_TYPE_P (TREE_TYPE (op0))
448 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
449 || (POINTER_TYPE_P (TREE_TYPE (op1))
450 && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
451 return NULL;
452
453 /* Now make sure the RHS of the GIMPLE_MODIFY_STMT is a typecast. */
454 if ((TREE_CODE (def_rhs) == NOP_EXPR
455 || TREE_CODE (def_rhs) == CONVERT_EXPR)
456 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
457 {
458 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
459 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
460 tree new;
461
462 if (TYPE_PRECISION (def_rhs_inner_type)
463 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
464 return NULL;
465
466 /* If the inner type of the conversion is a pointer to
467 a function, then we can not apply this optimization
468 as some targets require function pointers to be
469 canonicalized. This optimization would result in
470 canonicalization of the pointer when it was not originally
471 needed/intended. */
472 if (POINTER_TYPE_P (def_rhs_inner_type)
473 && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
474 return NULL;
475
476 /* What we want to prove is that if we convert OP1 to
477 the type of the object inside the NOP_EXPR that the
478 result is still equivalent to SRC.
479
480 If that is true, the build and return new equivalent
481 condition which uses the source of the typecast and the
482 new constant (which has only changed its type). */
483 new = fold_build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
484 STRIP_USELESS_TYPE_CONVERSION (new);
485 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
486 return build2 (TREE_CODE (cond), TREE_TYPE (cond),
487 def_rhs_inner, new);
488 }
489 }
490 return NULL;
491 }
492
493 /* EXPR is a COND_EXPR
494 STMT is the statement containing EXPR.
495
496 This routine attempts to find equivalent forms of the condition
497 which we may be able to optimize better. */
498
499 static void
500 simplify_cond (tree cond_expr, tree stmt)
501 {
502 tree cond = COND_EXPR_COND (cond_expr);
503
504 if (COMPARISON_CLASS_P (cond))
505 {
506 tree op0 = TREE_OPERAND (cond, 0);
507 tree op1 = TREE_OPERAND (cond, 1);
508
509 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
510 {
511 /* First see if we have test of an SSA_NAME against a constant
512 where the SSA_NAME is defined by an earlier typecast which
513 is irrelevant when performing tests against the given
514 constant. */
515 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
516 {
517 tree new_cond = find_equivalent_equality_comparison (cond);
518
519 if (new_cond)
520 {
521 COND_EXPR_COND (cond_expr) = new_cond;
522 update_stmt (stmt);
523 }
524 }
525 }
526 }
527 }
528
529 /* Forward propagate a single-use variable into COND_EXPR as many
530 times as possible. */
531
532 static void
533 forward_propagate_into_cond (tree cond_expr, tree stmt)
534 {
535 gcc_assert (TREE_CODE (cond_expr) == COND_EXPR);
536
537 while (1)
538 {
539 tree test_var = NULL_TREE;
540 tree cond = COND_EXPR_COND (cond_expr);
541 tree new_cond = forward_propagate_into_cond_1 (cond, &test_var);
542
543 /* Return if unsuccessful. */
544 if (new_cond == NULL_TREE)
545 break;
546
547 /* Dump details. */
548 if (dump_file && (dump_flags & TDF_DETAILS))
549 {
550 fprintf (dump_file, " Replaced '");
551 print_generic_expr (dump_file, cond, dump_flags);
552 fprintf (dump_file, "' with '");
553 print_generic_expr (dump_file, new_cond, dump_flags);
554 fprintf (dump_file, "'\n");
555 }
556
557 COND_EXPR_COND (cond_expr) = new_cond;
558 update_stmt (stmt);
559
560 if (has_zero_uses (test_var))
561 {
562 tree def = SSA_NAME_DEF_STMT (test_var);
563 block_stmt_iterator bsi = bsi_for_stmt (def);
564 bsi_remove (&bsi, true);
565 release_defs (def);
566 }
567 }
568
569 /* There are further simplifications that can be performed
570 on COND_EXPRs. Specifically, when comparing an SSA_NAME
571 against a constant where the SSA_NAME is the result of a
572 conversion. Perhaps this should be folded into the rest
573 of the COND_EXPR simplification code. */
574 simplify_cond (cond_expr, stmt);
575 }
576
577 /* We've just substituted an ADDR_EXPR into stmt. Update all the
578 relevant data structures to match. */
579
580 static void
581 tidy_after_forward_propagate_addr (tree stmt)
582 {
583 /* We may have turned a trapping insn into a non-trapping insn. */
584 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
585 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
586 cfg_changed = true;
587
588 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
589 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
590
591 mark_symbols_for_renaming (stmt);
592 }
593
594 /* STMT defines LHS which is contains the address of the 0th element
595 in an array. USE_STMT uses LHS to compute the address of an
596 arbitrary element within the array. The (variable) byte offset
597 of the element is contained in OFFSET.
598
599 We walk back through the use-def chains of OFFSET to verify that
600 it is indeed computing the offset of an element within the array
601 and extract the index corresponding to the given byte offset.
602
603 We then try to fold the entire address expression into a form
604 &array[index].
605
606 If we are successful, we replace the right hand side of USE_STMT
607 with the new address computation. */
608
609 static bool
610 forward_propagate_addr_into_variable_array_index (tree offset, tree lhs,
611 tree stmt, tree use_stmt)
612 {
613 tree index;
614
615 /* The offset must be defined by a simple GIMPLE_MODIFY_STMT statement. */
616 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
617 return false;
618
619 /* The RHS of the statement which defines OFFSET must be a gimple
620 cast of another SSA_NAME. */
621 offset = GIMPLE_STMT_OPERAND (offset, 1);
622 if (!is_gimple_cast (offset))
623 return false;
624
625 offset = TREE_OPERAND (offset, 0);
626 if (TREE_CODE (offset) != SSA_NAME)
627 return false;
628
629 /* Get the defining statement of the offset before type
630 conversion. */
631 offset = SSA_NAME_DEF_STMT (offset);
632
633 /* The statement which defines OFFSET before type conversion
634 must be a simple GIMPLE_MODIFY_STMT. */
635 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
636 return false;
637
638 /* The RHS of the statement which defines OFFSET must be a
639 multiplication of an object by the size of the array elements.
640 This implicitly verifies that the size of the array elements
641 is constant. */
642 offset = GIMPLE_STMT_OPERAND (offset, 1);
643 if (TREE_CODE (offset) != MULT_EXPR
644 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
645 || !simple_cst_equal (TREE_OPERAND (offset, 1),
646 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (lhs)))))
647 return false;
648
649 /* The first operand to the MULT_EXPR is the desired index. */
650 index = TREE_OPERAND (offset, 0);
651
652 /* Replace the pointer addition with array indexing. */
653 GIMPLE_STMT_OPERAND (use_stmt, 1)
654 = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
655 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
656 = index;
657
658 /* That should have created gimple, so there is no need to
659 record information to undo the propagation. */
660 fold_stmt_inplace (use_stmt);
661 tidy_after_forward_propagate_addr (use_stmt);
662 return true;
663 }
664
665 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
666
667 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
668 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
669 node or for recovery of array indexing from pointer arithmetic.
670
671 CHANGED is an optional pointer to a boolean variable set to true if
672 either the LHS or RHS was changed in the USE_STMT.
673
674 Return true if the propagation was successful (the propagation can
675 be not totally successful, yet things may have been changed). */
676
677 static bool
678 forward_propagate_addr_expr_1 (tree stmt, tree use_stmt, bool *changed)
679 {
680 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
681 tree lhs, rhs, array_ref;
682
683 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
684 ADDR_EXPR will not appear on the LHS. */
685 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
686 while (TREE_CODE (lhs) == COMPONENT_REF || TREE_CODE (lhs) == ARRAY_REF)
687 lhs = TREE_OPERAND (lhs, 0);
688
689 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
690 propagate the ADDR_EXPR into the use of NAME and fold the result. */
691 if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
692 {
693 /* This should always succeed in creating gimple, so there is
694 no need to save enough state to undo this propagation. */
695 TREE_OPERAND (lhs, 0) = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
696 fold_stmt_inplace (use_stmt);
697 tidy_after_forward_propagate_addr (use_stmt);
698 if (changed)
699 *changed = true;
700 }
701
702 /* Trivial case. The use statement could be a trivial copy. We
703 go ahead and handle that case here since it's trivial and
704 removes the need to run copy-prop before this pass to get
705 the best results. Also note that by handling this case here
706 we can catch some cascading effects, ie the single use is
707 in a copy, and the copy is used later by a single INDIRECT_REF
708 for example. */
709 else if (TREE_CODE (lhs) == SSA_NAME
710 && GIMPLE_STMT_OPERAND (use_stmt, 1) == name)
711 {
712 GIMPLE_STMT_OPERAND (use_stmt, 1)
713 = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
714 tidy_after_forward_propagate_addr (use_stmt);
715 if (changed)
716 *changed = true;
717 return true;
718 }
719
720 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
721 nodes from the RHS. */
722 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
723 while (TREE_CODE (rhs) == COMPONENT_REF
724 || TREE_CODE (rhs) == ARRAY_REF
725 || TREE_CODE (rhs) == ADDR_EXPR)
726 rhs = TREE_OPERAND (rhs, 0);
727
728 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
729 propagate the ADDR_EXPR into the use of NAME and fold the result. */
730 if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
731 {
732 /* This should always succeed in creating gimple, so there is
733 no need to save enough state to undo this propagation. */
734 TREE_OPERAND (rhs, 0) = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
735 fold_stmt_inplace (use_stmt);
736 tidy_after_forward_propagate_addr (use_stmt);
737 if (changed)
738 *changed = true;
739 return true;
740 }
741
742 /* The remaining cases are all for turning pointer arithmetic into
743 array indexing. They only apply when we have the address of
744 element zero in an array. If that is not the case then there
745 is nothing to do. */
746 array_ref = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0);
747 if (TREE_CODE (array_ref) != ARRAY_REF
748 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
749 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
750 return false;
751
752 /* If the use of the ADDR_EXPR must be a PLUS_EXPR, or else there
753 is nothing to do. */
754 if (TREE_CODE (rhs) != PLUS_EXPR)
755 return false;
756
757 /* Try to optimize &x[0] + C where C is a multiple of the size
758 of the elements in X into &x[C/element size]. */
759 if (TREE_OPERAND (rhs, 0) == name
760 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
761 {
762 tree orig = unshare_expr (rhs);
763 TREE_OPERAND (rhs, 0) = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
764
765 /* If folding succeeds, then we have just exposed new variables
766 in USE_STMT which will need to be renamed. If folding fails,
767 then we need to put everything back the way it was. */
768 if (fold_stmt_inplace (use_stmt))
769 {
770 tidy_after_forward_propagate_addr (use_stmt);
771 if (changed)
772 *changed = true;
773 return true;
774 }
775 else
776 {
777 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
778 update_stmt (use_stmt);
779 return false;
780 }
781 }
782
783 /* Try to optimize &x[0] + OFFSET where OFFSET is defined by
784 converting a multiplication of an index by the size of the
785 array elements, then the result is converted into the proper
786 type for the arithmetic. */
787 if (TREE_OPERAND (rhs, 0) == name
788 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
789 /* Avoid problems with IVopts creating PLUS_EXPRs with a
790 different type than their operands. */
791 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
792 {
793 bool res;
794 tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
795
796 res = forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
797 stmt, use_stmt);
798 if (res && changed)
799 *changed = true;
800 return res;
801 }
802
803 /* Same as the previous case, except the operands of the PLUS_EXPR
804 were reversed. */
805 if (TREE_OPERAND (rhs, 1) == name
806 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
807 /* Avoid problems with IVopts creating PLUS_EXPRs with a
808 different type than their operands. */
809 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
810 {
811 bool res;
812 tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
813 res = forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
814 stmt, use_stmt);
815 if (res && changed)
816 *changed = true;
817 return res;
818 }
819 return false;
820 }
821
822 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
823 SOME is a pointer to a boolean value indicating whether we
824 propagated the address expression anywhere.
825
826 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
827 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
828 node or for recovery of array indexing from pointer arithmetic.
829 Returns true, if all uses have been propagated into. */
830
831 static bool
832 forward_propagate_addr_expr (tree stmt, bool *some)
833 {
834 int stmt_loop_depth = bb_for_stmt (stmt)->loop_depth;
835 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
836 imm_use_iterator iter;
837 tree use_stmt;
838 bool all = true;
839
840 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
841 {
842 bool result;
843
844 /* If the use is not in a simple assignment statement, then
845 there is nothing we can do. */
846 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
847 {
848 all = false;
849 continue;
850 }
851
852 /* If the use is in a deeper loop nest, then we do not want
853 to propagate the ADDR_EXPR into the loop as that is likely
854 adding expression evaluations into the loop. */
855 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
856 {
857 all = false;
858 continue;
859 }
860
861 push_stmt_changes (&use_stmt);
862
863 result = forward_propagate_addr_expr_1 (stmt, use_stmt, some);
864 *some |= result;
865 all &= result;
866
867 pop_stmt_changes (&use_stmt);
868 }
869
870 return all;
871 }
872
873 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
874 If so, we can change STMT into lhs = y which can later be copy
875 propagated. Similarly for negation.
876
877 This could trivially be formulated as a forward propagation
878 to immediate uses. However, we already had an implementation
879 from DOM which used backward propagation via the use-def links.
880
881 It turns out that backward propagation is actually faster as
882 there's less work to do for each NOT/NEG expression we find.
883 Backwards propagation needs to look at the statement in a single
884 backlink. Forward propagation needs to look at potentially more
885 than one forward link. */
886
887 static void
888 simplify_not_neg_expr (tree stmt)
889 {
890 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
891 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
892
893 /* See if the RHS_DEF_STMT has the same form as our statement. */
894 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
895 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
896 {
897 tree rhs_def_operand =
898 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
899
900 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
901 if (TREE_CODE (rhs_def_operand) == SSA_NAME
902 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
903 {
904 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
905 update_stmt (stmt);
906 }
907 }
908 }
909
910 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
911 the condition which we may be able to optimize better. */
912
913 static void
914 simplify_switch_expr (tree stmt)
915 {
916 tree cond = SWITCH_COND (stmt);
917 tree def, to, ti;
918
919 /* The optimization that we really care about is removing unnecessary
920 casts. That will let us do much better in propagating the inferred
921 constant at the switch target. */
922 if (TREE_CODE (cond) == SSA_NAME)
923 {
924 def = SSA_NAME_DEF_STMT (cond);
925 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
926 {
927 def = GIMPLE_STMT_OPERAND (def, 1);
928 if (TREE_CODE (def) == NOP_EXPR)
929 {
930 int need_precision;
931 bool fail;
932
933 def = TREE_OPERAND (def, 0);
934
935 #ifdef ENABLE_CHECKING
936 /* ??? Why was Jeff testing this? We are gimple... */
937 gcc_assert (is_gimple_val (def));
938 #endif
939
940 to = TREE_TYPE (cond);
941 ti = TREE_TYPE (def);
942
943 /* If we have an extension that preserves value, then we
944 can copy the source value into the switch. */
945
946 need_precision = TYPE_PRECISION (ti);
947 fail = false;
948 if (! INTEGRAL_TYPE_P (ti))
949 fail = true;
950 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
951 fail = true;
952 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
953 need_precision += 1;
954 if (TYPE_PRECISION (to) < need_precision)
955 fail = true;
956
957 if (!fail)
958 {
959 SWITCH_COND (stmt) = def;
960 update_stmt (stmt);
961 }
962 }
963 }
964 }
965 }
966
967 /* Main entry point for the forward propagation optimizer. */
968
969 static unsigned int
970 tree_ssa_forward_propagate_single_use_vars (void)
971 {
972 basic_block bb;
973 unsigned int todoflags = 0;
974
975 cfg_changed = false;
976
977 FOR_EACH_BB (bb)
978 {
979 block_stmt_iterator bsi;
980
981 /* Note we update BSI within the loop as necessary. */
982 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
983 {
984 tree stmt = bsi_stmt (bsi);
985
986 /* If this statement sets an SSA_NAME to an address,
987 try to propagate the address into the uses of the SSA_NAME. */
988 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
989 {
990 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
991 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
992
993
994 if (TREE_CODE (lhs) != SSA_NAME)
995 {
996 bsi_next (&bsi);
997 continue;
998 }
999
1000 if (TREE_CODE (rhs) == ADDR_EXPR)
1001 {
1002 bool some = false;
1003 if (forward_propagate_addr_expr (stmt, &some))
1004 {
1005 release_defs (stmt);
1006 bsi_remove (&bsi, true);
1007 }
1008 else
1009 bsi_next (&bsi);
1010 if (some)
1011 todoflags |= TODO_update_smt_usage;
1012 }
1013 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
1014 || TREE_CODE (rhs) == NEGATE_EXPR)
1015 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1016 {
1017 simplify_not_neg_expr (stmt);
1018 bsi_next (&bsi);
1019 }
1020 else if (TREE_CODE (rhs) == COND_EXPR)
1021 {
1022 forward_propagate_into_cond (rhs, stmt);
1023 bsi_next (&bsi);
1024 }
1025 else
1026 bsi_next (&bsi);
1027 }
1028 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1029 {
1030 simplify_switch_expr (stmt);
1031 bsi_next (&bsi);
1032 }
1033 else if (TREE_CODE (stmt) == COND_EXPR)
1034 {
1035 forward_propagate_into_cond (stmt, stmt);
1036 bsi_next (&bsi);
1037 }
1038 else
1039 bsi_next (&bsi);
1040 }
1041 }
1042
1043 if (cfg_changed)
1044 todoflags |= TODO_cleanup_cfg;
1045 return todoflags;
1046 }
1047
1048
1049 static bool
1050 gate_forwprop (void)
1051 {
1052 return 1;
1053 }
1054
1055 struct tree_opt_pass pass_forwprop = {
1056 "forwprop", /* name */
1057 gate_forwprop, /* gate */
1058 tree_ssa_forward_propagate_single_use_vars, /* execute */
1059 NULL, /* sub */
1060 NULL, /* next */
1061 0, /* static_pass_number */
1062 TV_TREE_FORWPROP, /* tv_id */
1063 PROP_cfg | PROP_ssa, /* properties_required */
1064 0, /* properties_provided */
1065 0, /* properties_destroyed */
1066 0, /* todo_flags_start */
1067 TODO_dump_func
1068 | TODO_ggc_collect
1069 | TODO_update_ssa
1070 | TODO_verify_ssa, /* todo_flags_finish */
1071 0 /* letter */
1072 };