decl.c (elaborate_expression_1): Remove GNAT_EXPR parameter and move check for static...
[gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "gimple.h"
37
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
42
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
45
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
49
50 Will be transformed into:
51
52 bb0:
53 if (a COND b) goto ... else goto ...
54
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56
57 Or (assuming c1 and c2 are constants):
58
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
62
63 Will be transformed into:
64
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67
68 Similarly for x = a - c1.
69
70 Or
71
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
75
76 Will be transformed into:
77
78 bb0:
79 if (a == 0) goto ... else goto ...
80
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
84
85 Or
86
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
90
91 Will be transformed into:
92
93 bb0:
94 if (a != 0) goto ... else goto ...
95
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
98
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
102
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
106
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
111
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
114
115 ptr = &x->y->z;
116 res = *ptr;
117
118 Will get turned into
119
120 res = x->y->z;
121
122 Or
123 ptr = (type1*)&type2var;
124 res = *ptr
125
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
129
130 Or
131
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
134
135 Will get turned into
136
137 ptr2 = &x[constant/elementsize];
138
139 Or
140
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
145
146 Will get turned into:
147
148 ptr2 = &x[index];
149
150 Or
151 ssa = (int) decl
152 res = ssa & 1
153
154 Provided that decl has known alignment >= 2, will get turned into
155
156 res = 0
157
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
161
162 This will (of course) be extended as other needs arise. */
163
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
165
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
168
169 static tree rhs_to_tree (tree type, gimple stmt);
170
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
176
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
179 {
180 use_operand_p use;
181 gimple use_stmt;
182
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
187
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
192
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
196
197 if (final_name_p)
198 *final_name_p = name;
199
200 return use_stmt;
201 }
202
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
213 {
214 bool single_use = true;
215
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
218
219 if (!has_single_use (name))
220 {
221 single_use = false;
222 if (single_use_only)
223 return NULL;
224 }
225
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
229
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
232 {
233 tree rhs;
234
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
237
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
248 }
249 else
250 {
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
253 }
254 } while (1);
255 }
256
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
259
260 static bool
261 can_propagate_from (gimple def_stmt)
262 {
263 use_operand_p use_p;
264 ssa_op_iter iter;
265
266 gcc_assert (is_gimple_assign (def_stmt));
267
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
270 return false;
271
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 return false;
276
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 return true;
281
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 return false;
286
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
292 {
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
296 return false;
297 }
298
299 return true;
300 }
301
302 /* Remove a copy chain ending in NAME along the defs but not
303 further or including UP_TO_STMT. If NAME was replaced in
304 its only use then this function can be used to clean up
305 dead stmts. Returns true if UP_TO_STMT can be removed
306 as well, otherwise false. */
307
308 static bool
309 remove_prop_source_from_use (tree name, gimple up_to_stmt)
310 {
311 gimple_stmt_iterator gsi;
312 gimple stmt;
313
314 do {
315 if (!has_zero_uses (name))
316 return false;
317
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (stmt == up_to_stmt)
320 return true;
321
322 gsi = gsi_for_stmt (stmt);
323 release_defs (stmt);
324 gsi_remove (&gsi, true);
325
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
328
329 return false;
330 }
331
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
334
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
338
339 static tree
340 rhs_to_tree (tree type, gimple stmt)
341 {
342 enum tree_code code = gimple_assign_rhs_code (stmt);
343 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
344 return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
345 gimple_assign_rhs2 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
347 return build1 (code, type, gimple_assign_rhs1 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
349 return gimple_assign_rhs1 (stmt);
350 else
351 gcc_unreachable ();
352 }
353
354 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
355 the folded result in a form suitable for COND_EXPR_COND or
356 NULL_TREE, if there is no suitable simplified form. If
357 INVARIANT_ONLY is true only gimple_min_invariant results are
358 considered simplified. */
359
360 static tree
361 combine_cond_expr_cond (enum tree_code code, tree type,
362 tree op0, tree op1, bool invariant_only)
363 {
364 tree t;
365
366 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
367
368 t = fold_binary (code, type, op0, op1);
369 if (!t)
370 return NULL_TREE;
371
372 /* Require that we got a boolean type out if we put one in. */
373 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
374
375 /* Canonicalize the combined condition for use in a COND_EXPR. */
376 t = canonicalize_cond_expr_cond (t);
377
378 /* Bail out if we required an invariant but didn't get one. */
379 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
380 return NULL_TREE;
381
382 return t;
383 }
384
385 /* Propagate from the ssa name definition statements of COND_EXPR
386 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
387 Returns zero if no statement was changed, one if there were
388 changes and two if cfg_cleanup needs to run.
389
390 This must be kept in sync with forward_propagate_into_cond. */
391
392 static int
393 forward_propagate_into_gimple_cond (gimple stmt)
394 {
395 int did_something = 0;
396
397 do {
398 tree tmp = NULL_TREE;
399 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
400 gimple def_stmt;
401 bool single_use0_p = false, single_use1_p = false;
402 enum tree_code code = gimple_cond_code (stmt);
403
404 /* We can do tree combining on SSA_NAME and comparison expressions. */
405 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
406 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
407 {
408 /* For comparisons use the first operand, that is likely to
409 simplify comparisons against constants. */
410 name = gimple_cond_lhs (stmt);
411 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
412 if (def_stmt && can_propagate_from (def_stmt))
413 {
414 tree op1 = gimple_cond_rhs (stmt);
415 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
416 tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
417 op1, !single_use0_p);
418 }
419 /* If that wasn't successful, try the second operand. */
420 if (tmp == NULL_TREE
421 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
422 {
423 tree op0 = gimple_cond_lhs (stmt);
424 name = gimple_cond_rhs (stmt);
425 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
426 if (!def_stmt || !can_propagate_from (def_stmt))
427 return did_something;
428
429 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
430 tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
431 !single_use1_p);
432 }
433 /* If that wasn't successful either, try both operands. */
434 if (tmp == NULL_TREE
435 && rhs0 != NULL_TREE
436 && rhs1 != NULL_TREE)
437 tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
438 fold_convert (TREE_TYPE (rhs0), rhs1),
439 !(single_use0_p && single_use1_p));
440 }
441
442 if (tmp)
443 {
444 if (dump_file && tmp)
445 {
446 tree cond = build2 (gimple_cond_code (stmt),
447 boolean_type_node,
448 gimple_cond_lhs (stmt),
449 gimple_cond_rhs (stmt));
450 fprintf (dump_file, " Replaced '");
451 print_generic_expr (dump_file, cond, 0);
452 fprintf (dump_file, "' with '");
453 print_generic_expr (dump_file, tmp, 0);
454 fprintf (dump_file, "'\n");
455 }
456
457 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
458 update_stmt (stmt);
459
460 /* Remove defining statements. */
461 remove_prop_source_from_use (name, NULL);
462
463 if (is_gimple_min_invariant (tmp))
464 did_something = 2;
465 else if (did_something == 0)
466 did_something = 1;
467
468 /* Continue combining. */
469 continue;
470 }
471
472 break;
473 } while (1);
474
475 return did_something;
476 }
477
478
479 /* Propagate from the ssa name definition statements of COND_EXPR
480 in the rhs of statement STMT into the conditional if that simplifies it.
481 Returns zero if no statement was changed, one if there were
482 changes and two if cfg_cleanup needs to run.
483
484 This must be kept in sync with forward_propagate_into_gimple_cond. */
485
486 static int
487 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
488 {
489 gimple stmt = gsi_stmt (*gsi_p);
490 int did_something = 0;
491
492 do {
493 tree tmp = NULL_TREE;
494 tree cond = gimple_assign_rhs1 (stmt);
495 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
496 gimple def_stmt;
497 bool single_use0_p = false, single_use1_p = false;
498
499 /* We can do tree combining on SSA_NAME and comparison expressions. */
500 if (COMPARISON_CLASS_P (cond)
501 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
502 {
503 /* For comparisons use the first operand, that is likely to
504 simplify comparisons against constants. */
505 name = TREE_OPERAND (cond, 0);
506 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
507 if (def_stmt && can_propagate_from (def_stmt))
508 {
509 tree op1 = TREE_OPERAND (cond, 1);
510 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
511 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
512 rhs0, op1, !single_use0_p);
513 }
514 /* If that wasn't successful, try the second operand. */
515 if (tmp == NULL_TREE
516 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
517 {
518 tree op0 = TREE_OPERAND (cond, 0);
519 name = TREE_OPERAND (cond, 1);
520 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
521 if (!def_stmt || !can_propagate_from (def_stmt))
522 return did_something;
523
524 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
525 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
526 op0, rhs1, !single_use1_p);
527 }
528 /* If that wasn't successful either, try both operands. */
529 if (tmp == NULL_TREE
530 && rhs0 != NULL_TREE
531 && rhs1 != NULL_TREE)
532 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
533 rhs0, fold_convert (TREE_TYPE (rhs0),
534 rhs1),
535 !(single_use0_p && single_use1_p));
536 }
537 else if (TREE_CODE (cond) == SSA_NAME)
538 {
539 name = cond;
540 def_stmt = get_prop_source_stmt (name, true, NULL);
541 if (def_stmt || !can_propagate_from (def_stmt))
542 return did_something;
543
544 rhs0 = gimple_assign_rhs1 (def_stmt);
545 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
546 build_int_cst (TREE_TYPE (rhs0), 0),
547 false);
548 }
549
550 if (tmp)
551 {
552 if (dump_file && tmp)
553 {
554 fprintf (dump_file, " Replaced '");
555 print_generic_expr (dump_file, cond, 0);
556 fprintf (dump_file, "' with '");
557 print_generic_expr (dump_file, tmp, 0);
558 fprintf (dump_file, "'\n");
559 }
560
561 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
562 stmt = gsi_stmt (*gsi_p);
563 update_stmt (stmt);
564
565 /* Remove defining statements. */
566 remove_prop_source_from_use (name, NULL);
567
568 if (is_gimple_min_invariant (tmp))
569 did_something = 2;
570 else if (did_something == 0)
571 did_something = 1;
572
573 /* Continue combining. */
574 continue;
575 }
576
577 break;
578 } while (1);
579
580 return did_something;
581 }
582
583 /* We've just substituted an ADDR_EXPR into stmt. Update all the
584 relevant data structures to match. */
585
586 static void
587 tidy_after_forward_propagate_addr (gimple stmt)
588 {
589 /* We may have turned a trapping insn into a non-trapping insn. */
590 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
591 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
592 cfg_changed = true;
593
594 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
595 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
596 }
597
598 /* DEF_RHS contains the address of the 0th element in an array.
599 USE_STMT uses type of DEF_RHS to compute the address of an
600 arbitrary element within the array. The (variable) byte offset
601 of the element is contained in OFFSET.
602
603 We walk back through the use-def chains of OFFSET to verify that
604 it is indeed computing the offset of an element within the array
605 and extract the index corresponding to the given byte offset.
606
607 We then try to fold the entire address expression into a form
608 &array[index].
609
610 If we are successful, we replace the right hand side of USE_STMT
611 with the new address computation. */
612
613 static bool
614 forward_propagate_addr_into_variable_array_index (tree offset,
615 tree def_rhs,
616 gimple_stmt_iterator *use_stmt_gsi)
617 {
618 tree index, tunit;
619 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
620 tree tmp;
621
622 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
623 if (!host_integerp (tunit, 1))
624 return false;
625
626 /* Get the offset's defining statement. */
627 offset_def = SSA_NAME_DEF_STMT (offset);
628
629 /* Try to find an expression for a proper index. This is either a
630 multiplication expression by the element size or just the ssa name we came
631 along in case the element size is one. In that case, however, we do not
632 allow multiplications because they can be computing index to a higher
633 level dimension (PR 37861). */
634 if (integer_onep (tunit))
635 {
636 if (is_gimple_assign (offset_def)
637 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
638 return false;
639
640 index = offset;
641 }
642 else
643 {
644 /* The statement which defines OFFSET before type conversion
645 must be a simple GIMPLE_ASSIGN. */
646 if (!is_gimple_assign (offset_def))
647 return false;
648
649 /* The RHS of the statement which defines OFFSET must be a
650 multiplication of an object by the size of the array elements.
651 This implicitly verifies that the size of the array elements
652 is constant. */
653 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
654 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
655 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
656 {
657 /* The first operand to the MULT_EXPR is the desired index. */
658 index = gimple_assign_rhs1 (offset_def);
659 }
660 /* If we have idx * tunit + CST * tunit re-associate that. */
661 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
662 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
663 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
664 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
665 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
666 gimple_assign_rhs2 (offset_def),
667 tunit)) != NULL_TREE)
668 {
669 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
670 if (is_gimple_assign (offset_def2)
671 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
672 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
673 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
674 {
675 index = fold_build2 (gimple_assign_rhs_code (offset_def),
676 TREE_TYPE (offset),
677 gimple_assign_rhs1 (offset_def2), tmp);
678 }
679 else
680 return false;
681 }
682 else
683 return false;
684 }
685
686 /* Replace the pointer addition with array indexing. */
687 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
688 true, GSI_SAME_STMT);
689 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
690 use_stmt = gsi_stmt (*use_stmt_gsi);
691 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
692 = index;
693
694 /* That should have created gimple, so there is no need to
695 record information to undo the propagation. */
696 fold_stmt_inplace (use_stmt);
697 tidy_after_forward_propagate_addr (use_stmt);
698 return true;
699 }
700
701 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
702 ADDR_EXPR <whatever>.
703
704 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
705 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
706 node or for recovery of array indexing from pointer arithmetic.
707
708 Return true if the propagation was successful (the propagation can
709 be not totally successful, yet things may have been changed). */
710
711 static bool
712 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
713 gimple_stmt_iterator *use_stmt_gsi,
714 bool single_use_p)
715 {
716 tree lhs, rhs, rhs2, array_ref;
717 tree *rhsp, *lhsp;
718 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
719 enum tree_code rhs_code;
720
721 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
722
723 lhs = gimple_assign_lhs (use_stmt);
724 rhs_code = gimple_assign_rhs_code (use_stmt);
725 rhs = gimple_assign_rhs1 (use_stmt);
726
727 /* Trivial cases. The use statement could be a trivial copy or a
728 useless conversion. Recurse to the uses of the lhs as copyprop does
729 not copy through different variant pointers and FRE does not catch
730 all useless conversions. Treat the case of a single-use name and
731 a conversion to def_rhs type separate, though. */
732 if (TREE_CODE (lhs) == SSA_NAME
733 && ((rhs_code == SSA_NAME && rhs == name)
734 || CONVERT_EXPR_CODE_P (rhs_code)))
735 {
736 /* Only recurse if we don't deal with a single use or we cannot
737 do the propagation to the current statement. In particular
738 we can end up with a conversion needed for a non-invariant
739 address which we cannot do in a single statement. */
740 if (!single_use_p
741 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
742 && !is_gimple_min_invariant (def_rhs)))
743 return forward_propagate_addr_expr (lhs, def_rhs);
744
745 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
746 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
747 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
748 else
749 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
750 return true;
751 }
752
753 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
754 ADDR_EXPR will not appear on the LHS. */
755 lhsp = gimple_assign_lhs_ptr (use_stmt);
756 while (handled_component_p (*lhsp))
757 lhsp = &TREE_OPERAND (*lhsp, 0);
758 lhs = *lhsp;
759
760 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
761 propagate the ADDR_EXPR into the use of NAME and fold the result. */
762 if (TREE_CODE (lhs) == INDIRECT_REF
763 && TREE_OPERAND (lhs, 0) == name
764 && may_propagate_address_into_dereference (def_rhs, lhs)
765 && (lhsp != gimple_assign_lhs_ptr (use_stmt)
766 || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
767 TREE_TYPE (rhs))))
768 {
769 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
770 fold_stmt_inplace (use_stmt);
771 tidy_after_forward_propagate_addr (use_stmt);
772
773 /* Continue propagating into the RHS if this was not the only use. */
774 if (single_use_p)
775 return true;
776 }
777
778 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
779 nodes from the RHS. */
780 rhsp = gimple_assign_rhs1_ptr (use_stmt);
781 while (handled_component_p (*rhsp)
782 || TREE_CODE (*rhsp) == ADDR_EXPR)
783 rhsp = &TREE_OPERAND (*rhsp, 0);
784 rhs = *rhsp;
785
786 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
787 propagate the ADDR_EXPR into the use of NAME and fold the result. */
788 if (TREE_CODE (rhs) == INDIRECT_REF
789 && TREE_OPERAND (rhs, 0) == name
790 && may_propagate_address_into_dereference (def_rhs, rhs))
791 {
792 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
793 fold_stmt_inplace (use_stmt);
794 tidy_after_forward_propagate_addr (use_stmt);
795 return true;
796 }
797
798 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
799 propagate the ADDR_EXPR into the use of NAME and try to
800 create a VCE and fold the result. */
801 if (TREE_CODE (rhs) == INDIRECT_REF
802 && TREE_OPERAND (rhs, 0) == name
803 && TYPE_SIZE (TREE_TYPE (rhs))
804 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
805 /* Function decls should not be used for VCE either as it could be a
806 function descriptor that we want and not the actual function code. */
807 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
808 /* We should not convert volatile loads to non volatile loads. */
809 && !TYPE_VOLATILE (TREE_TYPE (rhs))
810 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
811 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
812 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
813 {
814 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
815 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
816 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
817 {
818 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
819 valid if we can replace the whole rhs of the use statement. */
820 if (rhs != gimple_assign_rhs1 (use_stmt))
821 return false;
822 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
823 true, GSI_NEW_STMT);
824 gimple_assign_set_rhs1 (use_stmt, new_rhs);
825 tidy_after_forward_propagate_addr (use_stmt);
826 return true;
827 }
828 /* If the defining rhs comes from an indirect reference, then do not
829 convert into a VIEW_CONVERT_EXPR. */
830 def_rhs_base = TREE_OPERAND (def_rhs, 0);
831 while (handled_component_p (def_rhs_base))
832 def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
833 if (!INDIRECT_REF_P (def_rhs_base))
834 {
835 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
836 reference. Place it there and fold the thing. */
837 *rhsp = new_rhs;
838 fold_stmt_inplace (use_stmt);
839 tidy_after_forward_propagate_addr (use_stmt);
840 return true;
841 }
842 }
843
844 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
845 is nothing to do. */
846 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
847 || gimple_assign_rhs1 (use_stmt) != name)
848 return false;
849
850 /* The remaining cases are all for turning pointer arithmetic into
851 array indexing. They only apply when we have the address of
852 element zero in an array. If that is not the case then there
853 is nothing to do. */
854 array_ref = TREE_OPERAND (def_rhs, 0);
855 if (TREE_CODE (array_ref) != ARRAY_REF
856 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
857 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
858 return false;
859
860 rhs2 = gimple_assign_rhs2 (use_stmt);
861 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
862 of the elements in X into &x[C1 + C2/element size]. */
863 if (TREE_CODE (rhs2) == INTEGER_CST)
864 {
865 tree new_rhs = maybe_fold_stmt_addition (TREE_TYPE (def_rhs),
866 def_rhs, rhs2);
867 if (new_rhs)
868 {
869 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
870 new_rhs = unshare_expr (new_rhs);
871 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
872 {
873 if (!is_gimple_min_invariant (new_rhs))
874 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
875 true, NULL_TREE,
876 true, GSI_SAME_STMT);
877 new_rhs = fold_convert (type, new_rhs);
878 }
879 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
880 use_stmt = gsi_stmt (*use_stmt_gsi);
881 update_stmt (use_stmt);
882 tidy_after_forward_propagate_addr (use_stmt);
883 return true;
884 }
885 }
886
887 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
888 converting a multiplication of an index by the size of the
889 array elements, then the result is converted into the proper
890 type for the arithmetic. */
891 if (TREE_CODE (rhs2) == SSA_NAME
892 && integer_zerop (TREE_OPERAND (array_ref, 1))
893 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
894 /* Avoid problems with IVopts creating PLUS_EXPRs with a
895 different type than their operands. */
896 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
897 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
898 use_stmt_gsi);
899 return false;
900 }
901
902 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
903
904 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
905 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
906 node or for recovery of array indexing from pointer arithmetic.
907 Returns true, if all uses have been propagated into. */
908
909 static bool
910 forward_propagate_addr_expr (tree name, tree rhs)
911 {
912 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
913 imm_use_iterator iter;
914 gimple use_stmt;
915 bool all = true;
916 bool single_use_p = has_single_use (name);
917
918 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
919 {
920 bool result;
921 tree use_rhs;
922
923 /* If the use is not in a simple assignment statement, then
924 there is nothing we can do. */
925 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
926 {
927 all = false;
928 continue;
929 }
930
931 /* If the use is in a deeper loop nest, then we do not want
932 to propagate the ADDR_EXPR into the loop as that is likely
933 adding expression evaluations into the loop. */
934 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
935 {
936 all = false;
937 continue;
938 }
939
940 {
941 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
942 push_stmt_changes (&use_stmt);
943 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
944 single_use_p);
945 /* If the use has moved to a different statement adjust
946 the update machinery. */
947 if (use_stmt != gsi_stmt (gsi))
948 {
949 pop_stmt_changes (&use_stmt);
950 use_stmt = gsi_stmt (gsi);
951 update_stmt (use_stmt);
952 }
953 else
954 pop_stmt_changes (&use_stmt);
955 }
956 all &= result;
957
958 /* Remove intermediate now unused copy and conversion chains. */
959 use_rhs = gimple_assign_rhs1 (use_stmt);
960 if (result
961 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
962 && TREE_CODE (use_rhs) == SSA_NAME
963 && has_zero_uses (gimple_assign_lhs (use_stmt)))
964 {
965 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
966 release_defs (use_stmt);
967 gsi_remove (&gsi, true);
968 }
969 }
970
971 return all;
972 }
973
974 /* Forward propagate the comparison defined in STMT like
975 cond_1 = x CMP y to uses of the form
976 a_1 = (T')cond_1
977 a_1 = !cond_1
978 a_1 = cond_1 != 0
979 Returns true if stmt is now unused. */
980
981 static bool
982 forward_propagate_comparison (gimple stmt)
983 {
984 tree name = gimple_assign_lhs (stmt);
985 gimple use_stmt;
986 tree tmp = NULL_TREE;
987
988 /* Don't propagate ssa names that occur in abnormal phis. */
989 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
990 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
991 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
992 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
993 return false;
994
995 /* Do not un-cse comparisons. But propagate through copies. */
996 use_stmt = get_prop_dest_stmt (name, &name);
997 if (!use_stmt)
998 return false;
999
1000 /* Conversion of the condition result to another integral type. */
1001 if (is_gimple_assign (use_stmt)
1002 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1003 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1004 == tcc_comparison
1005 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1006 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1007 {
1008 tree lhs = gimple_assign_lhs (use_stmt);
1009
1010 /* We can propagate the condition into a conversion. */
1011 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1012 {
1013 /* Avoid using fold here as that may create a COND_EXPR with
1014 non-boolean condition as canonical form. */
1015 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1016 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1017 }
1018 /* We can propagate the condition into X op CST where op
1019 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1020 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1021 == tcc_comparison
1022 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1023 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1024 {
1025 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1026 tree cst = gimple_assign_rhs2 (use_stmt);
1027 tree cond;
1028
1029 cond = build2 (gimple_assign_rhs_code (stmt),
1030 TREE_TYPE (cst),
1031 gimple_assign_rhs1 (stmt),
1032 gimple_assign_rhs2 (stmt));
1033
1034 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
1035 if (tmp == NULL_TREE)
1036 return false;
1037 }
1038 /* We can propagate the condition into a statement that
1039 computes the logical negation of the comparison result. */
1040 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1041 {
1042 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1043 bool nans = HONOR_NANS (TYPE_MODE (type));
1044 enum tree_code code;
1045 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1046 if (code == ERROR_MARK)
1047 return false;
1048
1049 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1050 gimple_assign_rhs2 (stmt));
1051 }
1052 else
1053 return false;
1054
1055 {
1056 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1057 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1058 use_stmt = gsi_stmt (gsi);
1059 update_stmt (use_stmt);
1060 }
1061
1062 /* Remove defining statements. */
1063 remove_prop_source_from_use (name, stmt);
1064
1065 if (dump_file && (dump_flags & TDF_DETAILS))
1066 {
1067 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1068 stmt);
1069 fprintf (dump_file, " Replaced '");
1070 print_generic_expr (dump_file, old_rhs, dump_flags);
1071 fprintf (dump_file, "' with '");
1072 print_generic_expr (dump_file, tmp, dump_flags);
1073 fprintf (dump_file, "'\n");
1074 }
1075
1076 return true;
1077 }
1078
1079 return false;
1080 }
1081
1082 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1083 If so, we can change STMT into lhs = y which can later be copy
1084 propagated. Similarly for negation.
1085
1086 This could trivially be formulated as a forward propagation
1087 to immediate uses. However, we already had an implementation
1088 from DOM which used backward propagation via the use-def links.
1089
1090 It turns out that backward propagation is actually faster as
1091 there's less work to do for each NOT/NEG expression we find.
1092 Backwards propagation needs to look at the statement in a single
1093 backlink. Forward propagation needs to look at potentially more
1094 than one forward link. */
1095
1096 static void
1097 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1098 {
1099 gimple stmt = gsi_stmt (*gsi_p);
1100 tree rhs = gimple_assign_rhs1 (stmt);
1101 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1102
1103 /* See if the RHS_DEF_STMT has the same form as our statement. */
1104 if (is_gimple_assign (rhs_def_stmt)
1105 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1106 {
1107 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1108
1109 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1110 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1111 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1112 {
1113 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1114 stmt = gsi_stmt (*gsi_p);
1115 update_stmt (stmt);
1116 }
1117 }
1118 }
1119
1120 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1121 the condition which we may be able to optimize better. */
1122
1123 static void
1124 simplify_gimple_switch (gimple stmt)
1125 {
1126 tree cond = gimple_switch_index (stmt);
1127 tree def, to, ti;
1128 gimple def_stmt;
1129
1130 /* The optimization that we really care about is removing unnecessary
1131 casts. That will let us do much better in propagating the inferred
1132 constant at the switch target. */
1133 if (TREE_CODE (cond) == SSA_NAME)
1134 {
1135 def_stmt = SSA_NAME_DEF_STMT (cond);
1136 if (is_gimple_assign (def_stmt))
1137 {
1138 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1139 {
1140 int need_precision;
1141 bool fail;
1142
1143 def = gimple_assign_rhs1 (def_stmt);
1144
1145 #ifdef ENABLE_CHECKING
1146 /* ??? Why was Jeff testing this? We are gimple... */
1147 gcc_assert (is_gimple_val (def));
1148 #endif
1149
1150 to = TREE_TYPE (cond);
1151 ti = TREE_TYPE (def);
1152
1153 /* If we have an extension that preserves value, then we
1154 can copy the source value into the switch. */
1155
1156 need_precision = TYPE_PRECISION (ti);
1157 fail = false;
1158 if (! INTEGRAL_TYPE_P (ti))
1159 fail = true;
1160 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1161 fail = true;
1162 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1163 need_precision += 1;
1164 if (TYPE_PRECISION (to) < need_precision)
1165 fail = true;
1166
1167 if (!fail)
1168 {
1169 gimple_switch_set_index (stmt, def);
1170 update_stmt (stmt);
1171 }
1172 }
1173 }
1174 }
1175 }
1176
1177 /* Run bitwise and assignments throug the folder. If the first argument is an
1178 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1179 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1180 */
1181
1182 static void
1183 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1184 {
1185 tree res;
1186 tree arg1 = gimple_assign_rhs1 (stmt);
1187 tree arg2 = gimple_assign_rhs2 (stmt);
1188
1189 if (TREE_CODE (arg2) != INTEGER_CST)
1190 return;
1191
1192 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1193 {
1194 gimple def = SSA_NAME_DEF_STMT (arg1);
1195
1196 if (gimple_assign_cast_p (def)
1197 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1198 {
1199 tree op = gimple_assign_rhs1 (def);
1200
1201 if (TREE_CODE (op) == ADDR_EXPR)
1202 arg1 = op;
1203 }
1204 }
1205
1206 res = fold_binary (BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1207 arg1, arg2);
1208 if (res && is_gimple_min_invariant (res))
1209 {
1210 gimple_assign_set_rhs_from_tree (gsi, res);
1211 update_stmt (stmt);
1212 }
1213 return;
1214 }
1215
1216 /* Main entry point for the forward propagation optimizer. */
1217
1218 static unsigned int
1219 tree_ssa_forward_propagate_single_use_vars (void)
1220 {
1221 basic_block bb;
1222 unsigned int todoflags = 0;
1223
1224 cfg_changed = false;
1225
1226 FOR_EACH_BB (bb)
1227 {
1228 gimple_stmt_iterator gsi;
1229
1230 /* Note we update GSI within the loop as necessary. */
1231 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1232 {
1233 gimple stmt = gsi_stmt (gsi);
1234
1235 /* If this statement sets an SSA_NAME to an address,
1236 try to propagate the address into the uses of the SSA_NAME. */
1237 if (is_gimple_assign (stmt))
1238 {
1239 tree lhs = gimple_assign_lhs (stmt);
1240 tree rhs = gimple_assign_rhs1 (stmt);
1241
1242 if (TREE_CODE (lhs) != SSA_NAME)
1243 {
1244 gsi_next (&gsi);
1245 continue;
1246 }
1247
1248 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1249 /* Handle pointer conversions on invariant addresses
1250 as well, as this is valid gimple. */
1251 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1252 && TREE_CODE (rhs) == ADDR_EXPR
1253 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1254 {
1255 STRIP_NOPS (rhs);
1256 if (!stmt_references_abnormal_ssa_name (stmt)
1257 && forward_propagate_addr_expr (lhs, rhs))
1258 {
1259 release_defs (stmt);
1260 todoflags |= TODO_remove_unused_locals;
1261 gsi_remove (&gsi, true);
1262 }
1263 else
1264 gsi_next (&gsi);
1265 }
1266 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1267 && is_gimple_min_invariant (rhs))
1268 {
1269 /* Make sure to fold &a[0] + off_1 here. */
1270 fold_stmt_inplace (stmt);
1271 update_stmt (stmt);
1272 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1273 gsi_next (&gsi);
1274 }
1275 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1276 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1277 && TREE_CODE (rhs) == SSA_NAME)
1278 {
1279 simplify_not_neg_expr (&gsi);
1280 gsi_next (&gsi);
1281 }
1282 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1283 {
1284 /* In this case the entire COND_EXPR is in rhs1. */
1285 int did_something;
1286 fold_defer_overflow_warnings ();
1287 did_something = forward_propagate_into_cond (&gsi);
1288 stmt = gsi_stmt (gsi);
1289 if (did_something == 2)
1290 cfg_changed = true;
1291 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1292 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1293 gsi_next (&gsi);
1294 }
1295 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1296 == tcc_comparison)
1297 {
1298 if (forward_propagate_comparison (stmt))
1299 {
1300 release_defs (stmt);
1301 todoflags |= TODO_remove_unused_locals;
1302 gsi_remove (&gsi, true);
1303 }
1304 else
1305 gsi_next (&gsi);
1306 }
1307 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1308 {
1309 simplify_bitwise_and (&gsi, stmt);
1310 gsi_next (&gsi);
1311 }
1312 else
1313 gsi_next (&gsi);
1314 }
1315 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1316 {
1317 simplify_gimple_switch (stmt);
1318 gsi_next (&gsi);
1319 }
1320 else if (gimple_code (stmt) == GIMPLE_COND)
1321 {
1322 int did_something;
1323 fold_defer_overflow_warnings ();
1324 did_something = forward_propagate_into_gimple_cond (stmt);
1325 if (did_something == 2)
1326 cfg_changed = true;
1327 fold_undefer_overflow_warnings (did_something, stmt,
1328 WARN_STRICT_OVERFLOW_CONDITIONAL);
1329 gsi_next (&gsi);
1330 }
1331 else
1332 gsi_next (&gsi);
1333 }
1334 }
1335
1336 if (cfg_changed)
1337 todoflags |= TODO_cleanup_cfg;
1338 return todoflags;
1339 }
1340
1341
1342 static bool
1343 gate_forwprop (void)
1344 {
1345 return 1;
1346 }
1347
1348 struct gimple_opt_pass pass_forwprop =
1349 {
1350 {
1351 GIMPLE_PASS,
1352 "forwprop", /* name */
1353 gate_forwprop, /* gate */
1354 tree_ssa_forward_propagate_single_use_vars, /* execute */
1355 NULL, /* sub */
1356 NULL, /* next */
1357 0, /* static_pass_number */
1358 TV_TREE_FORWPROP, /* tv_id */
1359 PROP_cfg | PROP_ssa, /* properties_required */
1360 0, /* properties_provided */
1361 0, /* properties_destroyed */
1362 0, /* todo_flags_start */
1363 TODO_dump_func
1364 | TODO_ggc_collect
1365 | TODO_update_ssa
1366 | TODO_verify_ssa /* todo_flags_finish */
1367 }
1368 };
1369