Use vec<> in build_vector
[gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 #include "optabs-tree.h"
49
50 /* This pass propagates the RHS of assignment statements into use
51 sites of the LHS of the assignment. It's basically a specialized
52 form of tree combination. It is hoped all of this can disappear
53 when we have a generalized tree combiner.
54
55 One class of common cases we handle is forward propagating a single use
56 variable into a COND_EXPR.
57
58 bb0:
59 x = a COND b;
60 if (x) goto ... else goto ...
61
62 Will be transformed into:
63
64 bb0:
65 if (a COND b) goto ... else goto ...
66
67 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
68
69 Or (assuming c1 and c2 are constants):
70
71 bb0:
72 x = a + c1;
73 if (x EQ/NEQ c2) goto ... else goto ...
74
75 Will be transformed into:
76
77 bb0:
78 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
79
80 Similarly for x = a - c1.
81
82 Or
83
84 bb0:
85 x = !a
86 if (x) goto ... else goto ...
87
88 Will be transformed into:
89
90 bb0:
91 if (a == 0) goto ... else goto ...
92
93 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
94 For these cases, we propagate A into all, possibly more than one,
95 COND_EXPRs that use X.
96
97 Or
98
99 bb0:
100 x = (typecast) a
101 if (x) goto ... else goto ...
102
103 Will be transformed into:
104
105 bb0:
106 if (a != 0) goto ... else goto ...
107
108 (Assuming a is an integral type and x is a boolean or x is an
109 integral and a is a boolean.)
110
111 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
112 For these cases, we propagate A into all, possibly more than one,
113 COND_EXPRs that use X.
114
115 In addition to eliminating the variable and the statement which assigns
116 a value to the variable, we may be able to later thread the jump without
117 adding insane complexity in the dominator optimizer.
118
119 Also note these transformations can cascade. We handle this by having
120 a worklist of COND_EXPR statements to examine. As we make a change to
121 a statement, we put it back on the worklist to examine on the next
122 iteration of the main loop.
123
124 A second class of propagation opportunities arises for ADDR_EXPR
125 nodes.
126
127 ptr = &x->y->z;
128 res = *ptr;
129
130 Will get turned into
131
132 res = x->y->z;
133
134 Or
135 ptr = (type1*)&type2var;
136 res = *ptr
137
138 Will get turned into (if type1 and type2 are the same size
139 and neither have volatile on them):
140 res = VIEW_CONVERT_EXPR<type1>(type2var)
141
142 Or
143
144 ptr = &x[0];
145 ptr2 = ptr + <constant>;
146
147 Will get turned into
148
149 ptr2 = &x[constant/elementsize];
150
151 Or
152
153 ptr = &x[0];
154 offset = index * element_size;
155 offset_p = (pointer) offset;
156 ptr2 = ptr + offset_p
157
158 Will get turned into:
159
160 ptr2 = &x[index];
161
162 Or
163 ssa = (int) decl
164 res = ssa & 1
165
166 Provided that decl has known alignment >= 2, will get turned into
167
168 res = 0
169
170 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
171 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
172 {NOT_EXPR,NEG_EXPR}.
173
174 This will (of course) be extended as other needs arise. */
175
176 static bool forward_propagate_addr_expr (tree, tree, bool);
177
178 /* Set to true if we delete dead edges during the optimization. */
179 static bool cfg_changed;
180
181 static tree rhs_to_tree (tree type, gimple *stmt);
182
183 static bitmap to_purge;
184
185 /* Const-and-copy lattice. */
186 static vec<tree> lattice;
187
188 /* Set the lattice entry for NAME to VAL. */
189 static void
190 fwprop_set_lattice_val (tree name, tree val)
191 {
192 if (TREE_CODE (name) == SSA_NAME)
193 {
194 if (SSA_NAME_VERSION (name) >= lattice.length ())
195 {
196 lattice.reserve (num_ssa_names - lattice.length ());
197 lattice.quick_grow_cleared (num_ssa_names);
198 }
199 lattice[SSA_NAME_VERSION (name)] = val;
200 }
201 }
202
203 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
204 static void
205 fwprop_invalidate_lattice (tree name)
206 {
207 if (name
208 && TREE_CODE (name) == SSA_NAME
209 && SSA_NAME_VERSION (name) < lattice.length ())
210 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
211 }
212
213
214 /* Get the statement we can propagate from into NAME skipping
215 trivial copies. Returns the statement which defines the
216 propagation source or NULL_TREE if there is no such one.
217 If SINGLE_USE_ONLY is set considers only sources which have
218 a single use chain up to NAME. If SINGLE_USE_P is non-null,
219 it is set to whether the chain to NAME is a single use chain
220 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
221
222 static gimple *
223 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
224 {
225 bool single_use = true;
226
227 do {
228 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
229
230 if (!has_single_use (name))
231 {
232 single_use = false;
233 if (single_use_only)
234 return NULL;
235 }
236
237 /* If name is defined by a PHI node or is the default def, bail out. */
238 if (!is_gimple_assign (def_stmt))
239 return NULL;
240
241 /* If def_stmt is a simple copy, continue looking. */
242 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
243 name = gimple_assign_rhs1 (def_stmt);
244 else
245 {
246 if (!single_use_only && single_use_p)
247 *single_use_p = single_use;
248
249 return def_stmt;
250 }
251 } while (1);
252 }
253
254 /* Checks if the destination ssa name in DEF_STMT can be used as
255 propagation source. Returns true if so, otherwise false. */
256
257 static bool
258 can_propagate_from (gimple *def_stmt)
259 {
260 gcc_assert (is_gimple_assign (def_stmt));
261
262 /* If the rhs has side-effects we cannot propagate from it. */
263 if (gimple_has_volatile_ops (def_stmt))
264 return false;
265
266 /* If the rhs is a load we cannot propagate from it. */
267 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
268 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
269 return false;
270
271 /* Constants can be always propagated. */
272 if (gimple_assign_single_p (def_stmt)
273 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
274 return true;
275
276 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
277 if (stmt_references_abnormal_ssa_name (def_stmt))
278 return false;
279
280 /* If the definition is a conversion of a pointer to a function type,
281 then we can not apply optimizations as some targets require
282 function pointers to be canonicalized and in this case this
283 optimization could eliminate a necessary canonicalization. */
284 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
285 {
286 tree rhs = gimple_assign_rhs1 (def_stmt);
287 if (POINTER_TYPE_P (TREE_TYPE (rhs))
288 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
289 return false;
290 }
291
292 return true;
293 }
294
295 /* Remove a chain of dead statements starting at the definition of
296 NAME. The chain is linked via the first operand of the defining statements.
297 If NAME was replaced in its only use then this function can be used
298 to clean up dead stmts. The function handles already released SSA
299 names gracefully.
300 Returns true if cleanup-cfg has to run. */
301
302 static bool
303 remove_prop_source_from_use (tree name)
304 {
305 gimple_stmt_iterator gsi;
306 gimple *stmt;
307 bool cfg_changed = false;
308
309 do {
310 basic_block bb;
311
312 if (SSA_NAME_IN_FREE_LIST (name)
313 || SSA_NAME_IS_DEFAULT_DEF (name)
314 || !has_zero_uses (name))
315 return cfg_changed;
316
317 stmt = SSA_NAME_DEF_STMT (name);
318 if (gimple_code (stmt) == GIMPLE_PHI
319 || gimple_has_side_effects (stmt))
320 return cfg_changed;
321
322 bb = gimple_bb (stmt);
323 gsi = gsi_for_stmt (stmt);
324 unlink_stmt_vdef (stmt);
325 if (gsi_remove (&gsi, true))
326 bitmap_set_bit (to_purge, bb->index);
327 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
328 release_defs (stmt);
329
330 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
331 } while (name && TREE_CODE (name) == SSA_NAME);
332
333 return cfg_changed;
334 }
335
336 /* Return the rhs of a gassign *STMT in a form of a single tree,
337 converted to type TYPE.
338
339 This should disappear, but is needed so we can combine expressions and use
340 the fold() interfaces. Long term, we need to develop folding and combine
341 routines that deal with gimple exclusively . */
342
343 static tree
344 rhs_to_tree (tree type, gimple *stmt)
345 {
346 location_t loc = gimple_location (stmt);
347 enum tree_code code = gimple_assign_rhs_code (stmt);
348 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
349 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt),
351 gimple_assign_rhs3 (stmt));
352 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
353 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 gimple_assign_rhs2 (stmt));
355 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
356 return build1 (code, type, gimple_assign_rhs1 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
358 return gimple_assign_rhs1 (stmt);
359 else
360 gcc_unreachable ();
361 }
362
363 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
364 the folded result in a form suitable for COND_EXPR_COND or
365 NULL_TREE, if there is no suitable simplified form. If
366 INVARIANT_ONLY is true only gimple_min_invariant results are
367 considered simplified. */
368
369 static tree
370 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
371 tree op0, tree op1, bool invariant_only)
372 {
373 tree t;
374
375 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
376
377 fold_defer_overflow_warnings ();
378 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
379 if (!t)
380 {
381 fold_undefer_overflow_warnings (false, NULL, 0);
382 return NULL_TREE;
383 }
384
385 /* Require that we got a boolean type out if we put one in. */
386 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
387
388 /* Canonicalize the combined condition for use in a COND_EXPR. */
389 t = canonicalize_cond_expr_cond (t);
390
391 /* Bail out if we required an invariant but didn't get one. */
392 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
393 {
394 fold_undefer_overflow_warnings (false, NULL, 0);
395 return NULL_TREE;
396 }
397
398 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
399
400 return t;
401 }
402
403 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
404 of its operand. Return a new comparison tree or NULL_TREE if there
405 were no simplifying combines. */
406
407 static tree
408 forward_propagate_into_comparison_1 (gimple *stmt,
409 enum tree_code code, tree type,
410 tree op0, tree op1)
411 {
412 tree tmp = NULL_TREE;
413 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
414 bool single_use0_p = false, single_use1_p = false;
415
416 /* For comparisons use the first operand, that is likely to
417 simplify comparisons against constants. */
418 if (TREE_CODE (op0) == SSA_NAME)
419 {
420 gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
421 if (def_stmt && can_propagate_from (def_stmt))
422 {
423 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
424 bool invariant_only_p = !single_use0_p;
425
426 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
427
428 /* Always combine comparisons or conversions from booleans. */
429 if (TREE_CODE (op1) == INTEGER_CST
430 && ((CONVERT_EXPR_CODE_P (def_code)
431 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
432 == BOOLEAN_TYPE)
433 || TREE_CODE_CLASS (def_code) == tcc_comparison))
434 invariant_only_p = false;
435
436 tmp = combine_cond_expr_cond (stmt, code, type,
437 rhs0, op1, invariant_only_p);
438 if (tmp)
439 return tmp;
440 }
441 }
442
443 /* If that wasn't successful, try the second operand. */
444 if (TREE_CODE (op1) == SSA_NAME)
445 {
446 gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
447 if (def_stmt && can_propagate_from (def_stmt))
448 {
449 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
450 tmp = combine_cond_expr_cond (stmt, code, type,
451 op0, rhs1, !single_use1_p);
452 if (tmp)
453 return tmp;
454 }
455 }
456
457 /* If that wasn't successful either, try both operands. */
458 if (rhs0 != NULL_TREE
459 && rhs1 != NULL_TREE)
460 tmp = combine_cond_expr_cond (stmt, code, type,
461 rhs0, rhs1,
462 !(single_use0_p && single_use1_p));
463
464 return tmp;
465 }
466
467 /* Propagate from the ssa name definition statements of the assignment
468 from a comparison at *GSI into the conditional if that simplifies it.
469 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
470 otherwise returns 0. */
471
472 static int
473 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
474 {
475 gimple *stmt = gsi_stmt (*gsi);
476 tree tmp;
477 bool cfg_changed = false;
478 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
479 tree rhs1 = gimple_assign_rhs1 (stmt);
480 tree rhs2 = gimple_assign_rhs2 (stmt);
481
482 /* Combine the comparison with defining statements. */
483 tmp = forward_propagate_into_comparison_1 (stmt,
484 gimple_assign_rhs_code (stmt),
485 type, rhs1, rhs2);
486 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
487 {
488 gimple_assign_set_rhs_from_tree (gsi, tmp);
489 fold_stmt (gsi);
490 update_stmt (gsi_stmt (*gsi));
491
492 if (TREE_CODE (rhs1) == SSA_NAME)
493 cfg_changed |= remove_prop_source_from_use (rhs1);
494 if (TREE_CODE (rhs2) == SSA_NAME)
495 cfg_changed |= remove_prop_source_from_use (rhs2);
496 return cfg_changed ? 2 : 1;
497 }
498
499 return 0;
500 }
501
502 /* Propagate from the ssa name definition statements of COND_EXPR
503 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
504 Returns zero if no statement was changed, one if there were
505 changes and two if cfg_cleanup needs to run.
506
507 This must be kept in sync with forward_propagate_into_cond. */
508
509 static int
510 forward_propagate_into_gimple_cond (gcond *stmt)
511 {
512 tree tmp;
513 enum tree_code code = gimple_cond_code (stmt);
514 bool cfg_changed = false;
515 tree rhs1 = gimple_cond_lhs (stmt);
516 tree rhs2 = gimple_cond_rhs (stmt);
517
518 /* We can do tree combining on SSA_NAME and comparison expressions. */
519 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
520 return 0;
521
522 tmp = forward_propagate_into_comparison_1 (stmt, code,
523 boolean_type_node,
524 rhs1, rhs2);
525 if (tmp)
526 {
527 if (dump_file && tmp)
528 {
529 fprintf (dump_file, " Replaced '");
530 print_gimple_expr (dump_file, stmt, 0);
531 fprintf (dump_file, "' with '");
532 print_generic_expr (dump_file, tmp);
533 fprintf (dump_file, "'\n");
534 }
535
536 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
537 update_stmt (stmt);
538
539 if (TREE_CODE (rhs1) == SSA_NAME)
540 cfg_changed |= remove_prop_source_from_use (rhs1);
541 if (TREE_CODE (rhs2) == SSA_NAME)
542 cfg_changed |= remove_prop_source_from_use (rhs2);
543 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
544 }
545
546 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
547 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
548 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
549 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
550 && ((code == EQ_EXPR
551 && integer_zerop (rhs2))
552 || (code == NE_EXPR
553 && integer_onep (rhs2))))
554 {
555 basic_block bb = gimple_bb (stmt);
556 gimple_cond_set_code (stmt, NE_EXPR);
557 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
558 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
559 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
560 return 1;
561 }
562
563 return 0;
564 }
565
566
567 /* Propagate from the ssa name definition statements of COND_EXPR
568 in the rhs of statement STMT into the conditional if that simplifies it.
569 Returns true zero if the stmt was changed. */
570
571 static bool
572 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
573 {
574 gimple *stmt = gsi_stmt (*gsi_p);
575 tree tmp = NULL_TREE;
576 tree cond = gimple_assign_rhs1 (stmt);
577 enum tree_code code = gimple_assign_rhs_code (stmt);
578
579 /* We can do tree combining on SSA_NAME and comparison expressions. */
580 if (COMPARISON_CLASS_P (cond))
581 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
582 TREE_TYPE (cond),
583 TREE_OPERAND (cond, 0),
584 TREE_OPERAND (cond, 1));
585 else if (TREE_CODE (cond) == SSA_NAME)
586 {
587 enum tree_code def_code;
588 tree name = cond;
589 gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
590 if (!def_stmt || !can_propagate_from (def_stmt))
591 return 0;
592
593 def_code = gimple_assign_rhs_code (def_stmt);
594 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
595 tmp = fold_build2_loc (gimple_location (def_stmt),
596 def_code,
597 TREE_TYPE (cond),
598 gimple_assign_rhs1 (def_stmt),
599 gimple_assign_rhs2 (def_stmt));
600 }
601
602 if (tmp
603 && is_gimple_condexpr (tmp))
604 {
605 if (dump_file && tmp)
606 {
607 fprintf (dump_file, " Replaced '");
608 print_generic_expr (dump_file, cond);
609 fprintf (dump_file, "' with '");
610 print_generic_expr (dump_file, tmp);
611 fprintf (dump_file, "'\n");
612 }
613
614 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
615 : integer_onep (tmp))
616 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
617 else if (integer_zerop (tmp))
618 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
619 else
620 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
621 stmt = gsi_stmt (*gsi_p);
622 update_stmt (stmt);
623
624 return true;
625 }
626
627 return 0;
628 }
629
630 /* We've just substituted an ADDR_EXPR into stmt. Update all the
631 relevant data structures to match. */
632
633 static void
634 tidy_after_forward_propagate_addr (gimple *stmt)
635 {
636 /* We may have turned a trapping insn into a non-trapping insn. */
637 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
638 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
639
640 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
641 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
642 }
643
644 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
645 ADDR_EXPR <whatever>.
646
647 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
648 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
649 node or for recovery of array indexing from pointer arithmetic.
650
651 Return true if the propagation was successful (the propagation can
652 be not totally successful, yet things may have been changed). */
653
654 static bool
655 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
656 gimple_stmt_iterator *use_stmt_gsi,
657 bool single_use_p)
658 {
659 tree lhs, rhs, rhs2, array_ref;
660 gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
661 enum tree_code rhs_code;
662 bool res = true;
663
664 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
665
666 lhs = gimple_assign_lhs (use_stmt);
667 rhs_code = gimple_assign_rhs_code (use_stmt);
668 rhs = gimple_assign_rhs1 (use_stmt);
669
670 /* Do not perform copy-propagation but recurse through copy chains. */
671 if (TREE_CODE (lhs) == SSA_NAME
672 && rhs_code == SSA_NAME)
673 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
674
675 /* The use statement could be a conversion. Recurse to the uses of the
676 lhs as copyprop does not copy through pointer to integer to pointer
677 conversions and FRE does not catch all cases either.
678 Treat the case of a single-use name and
679 a conversion to def_rhs type separate, though. */
680 if (TREE_CODE (lhs) == SSA_NAME
681 && CONVERT_EXPR_CODE_P (rhs_code))
682 {
683 /* If there is a point in a conversion chain where the types match
684 so we can remove a conversion re-materialize the address here
685 and stop. */
686 if (single_use_p
687 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
688 {
689 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
690 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
691 return true;
692 }
693
694 /* Else recurse if the conversion preserves the address value. */
695 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
696 || POINTER_TYPE_P (TREE_TYPE (lhs)))
697 && (TYPE_PRECISION (TREE_TYPE (lhs))
698 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
699 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
700
701 return false;
702 }
703
704 /* If this isn't a conversion chain from this on we only can propagate
705 into compatible pointer contexts. */
706 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
707 return false;
708
709 /* Propagate through constant pointer adjustments. */
710 if (TREE_CODE (lhs) == SSA_NAME
711 && rhs_code == POINTER_PLUS_EXPR
712 && rhs == name
713 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
714 {
715 tree new_def_rhs;
716 /* As we come here with non-invariant addresses in def_rhs we need
717 to make sure we can build a valid constant offsetted address
718 for further propagation. Simply rely on fold building that
719 and check after the fact. */
720 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
721 def_rhs,
722 fold_convert (ptr_type_node,
723 gimple_assign_rhs2 (use_stmt)));
724 if (TREE_CODE (new_def_rhs) == MEM_REF
725 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
726 return false;
727 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
728 TREE_TYPE (rhs));
729
730 /* Recurse. If we could propagate into all uses of lhs do not
731 bother to replace into the current use but just pretend we did. */
732 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
733 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
734 return true;
735
736 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
737 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
738 new_def_rhs);
739 else if (is_gimple_min_invariant (new_def_rhs))
740 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
741 else
742 return false;
743 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
744 update_stmt (use_stmt);
745 return true;
746 }
747
748 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
749 ADDR_EXPR will not appear on the LHS. */
750 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
751 while (handled_component_p (*lhsp))
752 lhsp = &TREE_OPERAND (*lhsp, 0);
753 lhs = *lhsp;
754
755 /* Now see if the LHS node is a MEM_REF using NAME. If so,
756 propagate the ADDR_EXPR into the use of NAME and fold the result. */
757 if (TREE_CODE (lhs) == MEM_REF
758 && TREE_OPERAND (lhs, 0) == name)
759 {
760 tree def_rhs_base;
761 HOST_WIDE_INT def_rhs_offset;
762 /* If the address is invariant we can always fold it. */
763 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
764 &def_rhs_offset)))
765 {
766 offset_int off = mem_ref_offset (lhs);
767 tree new_ptr;
768 off += def_rhs_offset;
769 if (TREE_CODE (def_rhs_base) == MEM_REF)
770 {
771 off += mem_ref_offset (def_rhs_base);
772 new_ptr = TREE_OPERAND (def_rhs_base, 0);
773 }
774 else
775 new_ptr = build_fold_addr_expr (def_rhs_base);
776 TREE_OPERAND (lhs, 0) = new_ptr;
777 TREE_OPERAND (lhs, 1)
778 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
779 tidy_after_forward_propagate_addr (use_stmt);
780 /* Continue propagating into the RHS if this was not the only use. */
781 if (single_use_p)
782 return true;
783 }
784 /* If the LHS is a plain dereference and the value type is the same as
785 that of the pointed-to type of the address we can put the
786 dereferenced address on the LHS preserving the original alias-type. */
787 else if (integer_zerop (TREE_OPERAND (lhs, 1))
788 && ((gimple_assign_lhs (use_stmt) == lhs
789 && useless_type_conversion_p
790 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
791 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
792 || types_compatible_p (TREE_TYPE (lhs),
793 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
794 /* Don't forward anything into clobber stmts if it would result
795 in the lhs no longer being a MEM_REF. */
796 && (!gimple_clobber_p (use_stmt)
797 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
798 {
799 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
800 tree new_offset, new_base, saved, new_lhs;
801 while (handled_component_p (*def_rhs_basep))
802 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
803 saved = *def_rhs_basep;
804 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
805 {
806 new_base = TREE_OPERAND (*def_rhs_basep, 0);
807 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
808 TREE_OPERAND (*def_rhs_basep, 1));
809 }
810 else
811 {
812 new_base = build_fold_addr_expr (*def_rhs_basep);
813 new_offset = TREE_OPERAND (lhs, 1);
814 }
815 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
816 new_base, new_offset);
817 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
818 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
819 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
820 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
821 *lhsp = new_lhs;
822 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
823 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
824 *def_rhs_basep = saved;
825 tidy_after_forward_propagate_addr (use_stmt);
826 /* Continue propagating into the RHS if this was not the
827 only use. */
828 if (single_use_p)
829 return true;
830 }
831 else
832 /* We can have a struct assignment dereferencing our name twice.
833 Note that we didn't propagate into the lhs to not falsely
834 claim we did when propagating into the rhs. */
835 res = false;
836 }
837
838 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
839 nodes from the RHS. */
840 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
841 if (TREE_CODE (*rhsp) == ADDR_EXPR)
842 rhsp = &TREE_OPERAND (*rhsp, 0);
843 while (handled_component_p (*rhsp))
844 rhsp = &TREE_OPERAND (*rhsp, 0);
845 rhs = *rhsp;
846
847 /* Now see if the RHS node is a MEM_REF using NAME. If so,
848 propagate the ADDR_EXPR into the use of NAME and fold the result. */
849 if (TREE_CODE (rhs) == MEM_REF
850 && TREE_OPERAND (rhs, 0) == name)
851 {
852 tree def_rhs_base;
853 HOST_WIDE_INT def_rhs_offset;
854 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
855 &def_rhs_offset)))
856 {
857 offset_int off = mem_ref_offset (rhs);
858 tree new_ptr;
859 off += def_rhs_offset;
860 if (TREE_CODE (def_rhs_base) == MEM_REF)
861 {
862 off += mem_ref_offset (def_rhs_base);
863 new_ptr = TREE_OPERAND (def_rhs_base, 0);
864 }
865 else
866 new_ptr = build_fold_addr_expr (def_rhs_base);
867 TREE_OPERAND (rhs, 0) = new_ptr;
868 TREE_OPERAND (rhs, 1)
869 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
870 fold_stmt_inplace (use_stmt_gsi);
871 tidy_after_forward_propagate_addr (use_stmt);
872 return res;
873 }
874 /* If the RHS is a plain dereference and the value type is the same as
875 that of the pointed-to type of the address we can put the
876 dereferenced address on the RHS preserving the original alias-type. */
877 else if (integer_zerop (TREE_OPERAND (rhs, 1))
878 && ((gimple_assign_rhs1 (use_stmt) == rhs
879 && useless_type_conversion_p
880 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
881 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
882 || types_compatible_p (TREE_TYPE (rhs),
883 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
884 {
885 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
886 tree new_offset, new_base, saved, new_rhs;
887 while (handled_component_p (*def_rhs_basep))
888 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
889 saved = *def_rhs_basep;
890 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
891 {
892 new_base = TREE_OPERAND (*def_rhs_basep, 0);
893 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
894 TREE_OPERAND (*def_rhs_basep, 1));
895 }
896 else
897 {
898 new_base = build_fold_addr_expr (*def_rhs_basep);
899 new_offset = TREE_OPERAND (rhs, 1);
900 }
901 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
902 new_base, new_offset);
903 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
904 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
905 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
906 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
907 *rhsp = new_rhs;
908 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
909 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
910 *def_rhs_basep = saved;
911 fold_stmt_inplace (use_stmt_gsi);
912 tidy_after_forward_propagate_addr (use_stmt);
913 return res;
914 }
915 }
916
917 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
918 is nothing to do. */
919 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
920 || gimple_assign_rhs1 (use_stmt) != name)
921 return false;
922
923 /* The remaining cases are all for turning pointer arithmetic into
924 array indexing. They only apply when we have the address of
925 element zero in an array. If that is not the case then there
926 is nothing to do. */
927 array_ref = TREE_OPERAND (def_rhs, 0);
928 if ((TREE_CODE (array_ref) != ARRAY_REF
929 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
930 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
931 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
932 return false;
933
934 rhs2 = gimple_assign_rhs2 (use_stmt);
935 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
936 if (TREE_CODE (rhs2) == INTEGER_CST)
937 {
938 tree new_rhs = build1_loc (gimple_location (use_stmt),
939 ADDR_EXPR, TREE_TYPE (def_rhs),
940 fold_build2 (MEM_REF,
941 TREE_TYPE (TREE_TYPE (def_rhs)),
942 unshare_expr (def_rhs),
943 fold_convert (ptr_type_node,
944 rhs2)));
945 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
946 use_stmt = gsi_stmt (*use_stmt_gsi);
947 update_stmt (use_stmt);
948 tidy_after_forward_propagate_addr (use_stmt);
949 return true;
950 }
951
952 return false;
953 }
954
955 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
956
957 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
958 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
959 node or for recovery of array indexing from pointer arithmetic.
960
961 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
962 the single use in the previous invocation. Pass true when calling
963 this as toplevel.
964
965 Returns true, if all uses have been propagated into. */
966
967 static bool
968 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
969 {
970 imm_use_iterator iter;
971 gimple *use_stmt;
972 bool all = true;
973 bool single_use_p = parent_single_use_p && has_single_use (name);
974
975 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
976 {
977 bool result;
978 tree use_rhs;
979
980 /* If the use is not in a simple assignment statement, then
981 there is nothing we can do. */
982 if (!is_gimple_assign (use_stmt))
983 {
984 if (!is_gimple_debug (use_stmt))
985 all = false;
986 continue;
987 }
988
989 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
990 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
991 single_use_p);
992 /* If the use has moved to a different statement adjust
993 the update machinery for the old statement too. */
994 if (use_stmt != gsi_stmt (gsi))
995 {
996 update_stmt (use_stmt);
997 use_stmt = gsi_stmt (gsi);
998 }
999 update_stmt (use_stmt);
1000 all &= result;
1001
1002 /* Remove intermediate now unused copy and conversion chains. */
1003 use_rhs = gimple_assign_rhs1 (use_stmt);
1004 if (result
1005 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1006 && TREE_CODE (use_rhs) == SSA_NAME
1007 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1008 {
1009 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1010 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1011 release_defs (use_stmt);
1012 gsi_remove (&gsi, true);
1013 }
1014 }
1015
1016 return all && has_zero_uses (name);
1017 }
1018
1019
1020 /* Helper function for simplify_gimple_switch. Remove case labels that
1021 have values outside the range of the new type. */
1022
1023 static void
1024 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1025 {
1026 unsigned int branch_num = gimple_switch_num_labels (stmt);
1027 auto_vec<tree> labels (branch_num);
1028 unsigned int i, len;
1029
1030 /* Collect the existing case labels in a VEC, and preprocess it as if
1031 we are gimplifying a GENERIC SWITCH_EXPR. */
1032 for (i = 1; i < branch_num; i++)
1033 labels.quick_push (gimple_switch_label (stmt, i));
1034 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1035
1036 /* If any labels were removed, replace the existing case labels
1037 in the GIMPLE_SWITCH statement with the correct ones.
1038 Note that the type updates were done in-place on the case labels,
1039 so we only have to replace the case labels in the GIMPLE_SWITCH
1040 if the number of labels changed. */
1041 len = labels.length ();
1042 if (len < branch_num - 1)
1043 {
1044 bitmap target_blocks;
1045 edge_iterator ei;
1046 edge e;
1047
1048 /* Corner case: *all* case labels have been removed as being
1049 out-of-range for INDEX_TYPE. Push one label and let the
1050 CFG cleanups deal with this further. */
1051 if (len == 0)
1052 {
1053 tree label, elt;
1054
1055 label = CASE_LABEL (gimple_switch_default_label (stmt));
1056 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1057 labels.quick_push (elt);
1058 len = 1;
1059 }
1060
1061 for (i = 0; i < labels.length (); i++)
1062 gimple_switch_set_label (stmt, i + 1, labels[i]);
1063 for (i++ ; i < branch_num; i++)
1064 gimple_switch_set_label (stmt, i, NULL_TREE);
1065 gimple_switch_set_num_labels (stmt, len + 1);
1066
1067 /* Cleanup any edges that are now dead. */
1068 target_blocks = BITMAP_ALLOC (NULL);
1069 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1070 {
1071 tree elt = gimple_switch_label (stmt, i);
1072 basic_block target = label_to_block (CASE_LABEL (elt));
1073 bitmap_set_bit (target_blocks, target->index);
1074 }
1075 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1076 {
1077 if (! bitmap_bit_p (target_blocks, e->dest->index))
1078 {
1079 remove_edge (e);
1080 cfg_changed = true;
1081 free_dominance_info (CDI_DOMINATORS);
1082 }
1083 else
1084 ei_next (&ei);
1085 }
1086 BITMAP_FREE (target_blocks);
1087 }
1088 }
1089
1090 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1091 the condition which we may be able to optimize better. */
1092
1093 static bool
1094 simplify_gimple_switch (gswitch *stmt)
1095 {
1096 /* The optimization that we really care about is removing unnecessary
1097 casts. That will let us do much better in propagating the inferred
1098 constant at the switch target. */
1099 tree cond = gimple_switch_index (stmt);
1100 if (TREE_CODE (cond) == SSA_NAME)
1101 {
1102 gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1103 if (gimple_assign_cast_p (def_stmt))
1104 {
1105 tree def = gimple_assign_rhs1 (def_stmt);
1106 if (TREE_CODE (def) != SSA_NAME)
1107 return false;
1108
1109 /* If we have an extension or sign-change that preserves the
1110 values we check against then we can copy the source value into
1111 the switch. */
1112 tree ti = TREE_TYPE (def);
1113 if (INTEGRAL_TYPE_P (ti)
1114 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1115 {
1116 size_t n = gimple_switch_num_labels (stmt);
1117 tree min = NULL_TREE, max = NULL_TREE;
1118 if (n > 1)
1119 {
1120 min = CASE_LOW (gimple_switch_label (stmt, 1));
1121 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1122 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1123 else
1124 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1125 }
1126 if ((!min || int_fits_type_p (min, ti))
1127 && (!max || int_fits_type_p (max, ti)))
1128 {
1129 gimple_switch_set_index (stmt, def);
1130 simplify_gimple_switch_label_vec (stmt, ti);
1131 update_stmt (stmt);
1132 return true;
1133 }
1134 }
1135 }
1136 }
1137
1138 return false;
1139 }
1140
1141 /* For pointers p2 and p1 return p2 - p1 if the
1142 difference is known and constant, otherwise return NULL. */
1143
1144 static tree
1145 constant_pointer_difference (tree p1, tree p2)
1146 {
1147 int i, j;
1148 #define CPD_ITERATIONS 5
1149 tree exps[2][CPD_ITERATIONS];
1150 tree offs[2][CPD_ITERATIONS];
1151 int cnt[2];
1152
1153 for (i = 0; i < 2; i++)
1154 {
1155 tree p = i ? p1 : p2;
1156 tree off = size_zero_node;
1157 gimple *stmt;
1158 enum tree_code code;
1159
1160 /* For each of p1 and p2 we need to iterate at least
1161 twice, to handle ADDR_EXPR directly in p1/p2,
1162 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1163 on definition's stmt RHS. Iterate a few extra times. */
1164 j = 0;
1165 do
1166 {
1167 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1168 break;
1169 if (TREE_CODE (p) == ADDR_EXPR)
1170 {
1171 tree q = TREE_OPERAND (p, 0);
1172 HOST_WIDE_INT offset;
1173 tree base = get_addr_base_and_unit_offset (q, &offset);
1174 if (base)
1175 {
1176 q = base;
1177 if (offset)
1178 off = size_binop (PLUS_EXPR, off, size_int (offset));
1179 }
1180 if (TREE_CODE (q) == MEM_REF
1181 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1182 {
1183 p = TREE_OPERAND (q, 0);
1184 off = size_binop (PLUS_EXPR, off,
1185 wide_int_to_tree (sizetype,
1186 mem_ref_offset (q)));
1187 }
1188 else
1189 {
1190 exps[i][j] = q;
1191 offs[i][j++] = off;
1192 break;
1193 }
1194 }
1195 if (TREE_CODE (p) != SSA_NAME)
1196 break;
1197 exps[i][j] = p;
1198 offs[i][j++] = off;
1199 if (j == CPD_ITERATIONS)
1200 break;
1201 stmt = SSA_NAME_DEF_STMT (p);
1202 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1203 break;
1204 code = gimple_assign_rhs_code (stmt);
1205 if (code == POINTER_PLUS_EXPR)
1206 {
1207 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1208 break;
1209 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1210 p = gimple_assign_rhs1 (stmt);
1211 }
1212 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1213 p = gimple_assign_rhs1 (stmt);
1214 else
1215 break;
1216 }
1217 while (1);
1218 cnt[i] = j;
1219 }
1220
1221 for (i = 0; i < cnt[0]; i++)
1222 for (j = 0; j < cnt[1]; j++)
1223 if (exps[0][i] == exps[1][j])
1224 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1225
1226 return NULL_TREE;
1227 }
1228
1229 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1230 Optimize
1231 memcpy (p, "abcd", 4);
1232 memset (p + 4, ' ', 3);
1233 into
1234 memcpy (p, "abcd ", 7);
1235 call if the latter can be stored by pieces during expansion. */
1236
1237 static bool
1238 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1239 {
1240 gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1241 tree vuse = gimple_vuse (stmt2);
1242 if (vuse == NULL)
1243 return false;
1244 stmt1 = SSA_NAME_DEF_STMT (vuse);
1245
1246 switch (DECL_FUNCTION_CODE (callee2))
1247 {
1248 case BUILT_IN_MEMSET:
1249 if (gimple_call_num_args (stmt2) != 3
1250 || gimple_call_lhs (stmt2)
1251 || CHAR_BIT != 8
1252 || BITS_PER_UNIT != 8)
1253 break;
1254 else
1255 {
1256 tree callee1;
1257 tree ptr1, src1, str1, off1, len1, lhs1;
1258 tree ptr2 = gimple_call_arg (stmt2, 0);
1259 tree val2 = gimple_call_arg (stmt2, 1);
1260 tree len2 = gimple_call_arg (stmt2, 2);
1261 tree diff, vdef, new_str_cst;
1262 gimple *use_stmt;
1263 unsigned int ptr1_align;
1264 unsigned HOST_WIDE_INT src_len;
1265 char *src_buf;
1266 use_operand_p use_p;
1267
1268 if (!tree_fits_shwi_p (val2)
1269 || !tree_fits_uhwi_p (len2)
1270 || compare_tree_int (len2, 1024) == 1)
1271 break;
1272 if (is_gimple_call (stmt1))
1273 {
1274 /* If first stmt is a call, it needs to be memcpy
1275 or mempcpy, with string literal as second argument and
1276 constant length. */
1277 callee1 = gimple_call_fndecl (stmt1);
1278 if (callee1 == NULL_TREE
1279 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1280 || gimple_call_num_args (stmt1) != 3)
1281 break;
1282 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1283 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1284 break;
1285 ptr1 = gimple_call_arg (stmt1, 0);
1286 src1 = gimple_call_arg (stmt1, 1);
1287 len1 = gimple_call_arg (stmt1, 2);
1288 lhs1 = gimple_call_lhs (stmt1);
1289 if (!tree_fits_uhwi_p (len1))
1290 break;
1291 str1 = string_constant (src1, &off1);
1292 if (str1 == NULL_TREE)
1293 break;
1294 if (!tree_fits_uhwi_p (off1)
1295 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1296 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1297 - tree_to_uhwi (off1)) > 0
1298 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1299 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1300 != TYPE_MODE (char_type_node))
1301 break;
1302 }
1303 else if (gimple_assign_single_p (stmt1))
1304 {
1305 /* Otherwise look for length 1 memcpy optimized into
1306 assignment. */
1307 ptr1 = gimple_assign_lhs (stmt1);
1308 src1 = gimple_assign_rhs1 (stmt1);
1309 if (TREE_CODE (ptr1) != MEM_REF
1310 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1311 || !tree_fits_shwi_p (src1))
1312 break;
1313 ptr1 = build_fold_addr_expr (ptr1);
1314 callee1 = NULL_TREE;
1315 len1 = size_one_node;
1316 lhs1 = NULL_TREE;
1317 off1 = size_zero_node;
1318 str1 = NULL_TREE;
1319 }
1320 else
1321 break;
1322
1323 diff = constant_pointer_difference (ptr1, ptr2);
1324 if (diff == NULL && lhs1 != NULL)
1325 {
1326 diff = constant_pointer_difference (lhs1, ptr2);
1327 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1328 && diff != NULL)
1329 diff = size_binop (PLUS_EXPR, diff,
1330 fold_convert (sizetype, len1));
1331 }
1332 /* If the difference between the second and first destination pointer
1333 is not constant, or is bigger than memcpy length, bail out. */
1334 if (diff == NULL
1335 || !tree_fits_uhwi_p (diff)
1336 || tree_int_cst_lt (len1, diff)
1337 || compare_tree_int (diff, 1024) == 1)
1338 break;
1339
1340 /* Use maximum of difference plus memset length and memcpy length
1341 as the new memcpy length, if it is too big, bail out. */
1342 src_len = tree_to_uhwi (diff);
1343 src_len += tree_to_uhwi (len2);
1344 if (src_len < tree_to_uhwi (len1))
1345 src_len = tree_to_uhwi (len1);
1346 if (src_len > 1024)
1347 break;
1348
1349 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1350 with bigger length will return different result. */
1351 if (lhs1 != NULL_TREE
1352 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1353 && (TREE_CODE (lhs1) != SSA_NAME
1354 || !single_imm_use (lhs1, &use_p, &use_stmt)
1355 || use_stmt != stmt2))
1356 break;
1357
1358 /* If anything reads memory in between memcpy and memset
1359 call, the modified memcpy call might change it. */
1360 vdef = gimple_vdef (stmt1);
1361 if (vdef != NULL
1362 && (!single_imm_use (vdef, &use_p, &use_stmt)
1363 || use_stmt != stmt2))
1364 break;
1365
1366 ptr1_align = get_pointer_alignment (ptr1);
1367 /* Construct the new source string literal. */
1368 src_buf = XALLOCAVEC (char, src_len + 1);
1369 if (callee1)
1370 memcpy (src_buf,
1371 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1372 tree_to_uhwi (len1));
1373 else
1374 src_buf[0] = tree_to_shwi (src1);
1375 memset (src_buf + tree_to_uhwi (diff),
1376 tree_to_shwi (val2), tree_to_uhwi (len2));
1377 src_buf[src_len] = '\0';
1378 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1379 handle embedded '\0's. */
1380 if (strlen (src_buf) != src_len)
1381 break;
1382 rtl_profile_for_bb (gimple_bb (stmt2));
1383 /* If the new memcpy wouldn't be emitted by storing the literal
1384 by pieces, this optimization might enlarge .rodata too much,
1385 as commonly used string literals couldn't be shared any
1386 longer. */
1387 if (!can_store_by_pieces (src_len,
1388 builtin_strncpy_read_str,
1389 src_buf, ptr1_align, false))
1390 break;
1391
1392 new_str_cst = build_string_literal (src_len, src_buf);
1393 if (callee1)
1394 {
1395 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1396 memset call. */
1397 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1398 gimple_call_set_lhs (stmt1, NULL_TREE);
1399 gimple_call_set_arg (stmt1, 1, new_str_cst);
1400 gimple_call_set_arg (stmt1, 2,
1401 build_int_cst (TREE_TYPE (len1), src_len));
1402 update_stmt (stmt1);
1403 unlink_stmt_vdef (stmt2);
1404 gsi_remove (gsi_p, true);
1405 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1406 release_defs (stmt2);
1407 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1408 {
1409 fwprop_invalidate_lattice (lhs1);
1410 release_ssa_name (lhs1);
1411 }
1412 return true;
1413 }
1414 else
1415 {
1416 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1417 assignment, remove STMT1 and change memset call into
1418 memcpy call. */
1419 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1420
1421 if (!is_gimple_val (ptr1))
1422 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1423 true, GSI_SAME_STMT);
1424 gimple_call_set_fndecl (stmt2,
1425 builtin_decl_explicit (BUILT_IN_MEMCPY));
1426 gimple_call_set_arg (stmt2, 0, ptr1);
1427 gimple_call_set_arg (stmt2, 1, new_str_cst);
1428 gimple_call_set_arg (stmt2, 2,
1429 build_int_cst (TREE_TYPE (len2), src_len));
1430 unlink_stmt_vdef (stmt1);
1431 gsi_remove (&gsi, true);
1432 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1433 release_defs (stmt1);
1434 update_stmt (stmt2);
1435 return false;
1436 }
1437 }
1438 break;
1439 default:
1440 break;
1441 }
1442 return false;
1443 }
1444
1445 /* Given a ssa_name in NAME see if it was defined by an assignment and
1446 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1447 to the second operand on the rhs. */
1448
1449 static inline void
1450 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1451 {
1452 gimple *def;
1453 enum tree_code code1;
1454 tree arg11;
1455 tree arg21;
1456 tree arg31;
1457 enum gimple_rhs_class grhs_class;
1458
1459 code1 = TREE_CODE (name);
1460 arg11 = name;
1461 arg21 = NULL_TREE;
1462 arg31 = NULL_TREE;
1463 grhs_class = get_gimple_rhs_class (code1);
1464
1465 if (code1 == SSA_NAME)
1466 {
1467 def = SSA_NAME_DEF_STMT (name);
1468
1469 if (def && is_gimple_assign (def)
1470 && can_propagate_from (def))
1471 {
1472 code1 = gimple_assign_rhs_code (def);
1473 arg11 = gimple_assign_rhs1 (def);
1474 arg21 = gimple_assign_rhs2 (def);
1475 arg31 = gimple_assign_rhs3 (def);
1476 }
1477 }
1478 else if (grhs_class != GIMPLE_SINGLE_RHS)
1479 code1 = ERROR_MARK;
1480
1481 *code = code1;
1482 *arg1 = arg11;
1483 if (arg2)
1484 *arg2 = arg21;
1485 if (arg31)
1486 *code = ERROR_MARK;
1487 }
1488
1489
1490 /* Recognize rotation patterns. Return true if a transformation
1491 applied, otherwise return false.
1492
1493 We are looking for X with unsigned type T with bitsize B, OP being
1494 +, | or ^, some type T2 wider than T and
1495 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1496 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1497 (X << Y) OP (X >> (B - Y))
1498 (X << (int) Y) OP (X >> (int) (B - Y))
1499 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1500 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1501 (X << Y) | (X >> ((-Y) & (B - 1)))
1502 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1503 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1504 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1505
1506 and transform these into:
1507 X r<< CNT1
1508 X r<< Y
1509
1510 Note, in the patterns with T2 type, the type of OP operands
1511 might be even a signed type, but should have precision B. */
1512
1513 static bool
1514 simplify_rotate (gimple_stmt_iterator *gsi)
1515 {
1516 gimple *stmt = gsi_stmt (*gsi);
1517 tree arg[2], rtype, rotcnt = NULL_TREE;
1518 tree def_arg1[2], def_arg2[2];
1519 enum tree_code def_code[2];
1520 tree lhs;
1521 int i;
1522 bool swapped_p = false;
1523 gimple *g;
1524
1525 arg[0] = gimple_assign_rhs1 (stmt);
1526 arg[1] = gimple_assign_rhs2 (stmt);
1527 rtype = TREE_TYPE (arg[0]);
1528
1529 /* Only create rotates in complete modes. Other cases are not
1530 expanded properly. */
1531 if (!INTEGRAL_TYPE_P (rtype)
1532 || !type_has_mode_precision_p (rtype))
1533 return false;
1534
1535 for (i = 0; i < 2; i++)
1536 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1537
1538 /* Look through narrowing conversions. */
1539 if (CONVERT_EXPR_CODE_P (def_code[0])
1540 && CONVERT_EXPR_CODE_P (def_code[1])
1541 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1542 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1543 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1544 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1545 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1546 && has_single_use (arg[0])
1547 && has_single_use (arg[1]))
1548 {
1549 for (i = 0; i < 2; i++)
1550 {
1551 arg[i] = def_arg1[i];
1552 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1553 }
1554 }
1555
1556 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1557 for (i = 0; i < 2; i++)
1558 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1559 return false;
1560 else if (!has_single_use (arg[i]))
1561 return false;
1562 if (def_code[0] == def_code[1])
1563 return false;
1564
1565 /* If we've looked through narrowing conversions before, look through
1566 widening conversions from unsigned type with the same precision
1567 as rtype here. */
1568 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1569 for (i = 0; i < 2; i++)
1570 {
1571 tree tem;
1572 enum tree_code code;
1573 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1574 if (!CONVERT_EXPR_CODE_P (code)
1575 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1576 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1577 return false;
1578 def_arg1[i] = tem;
1579 }
1580 /* Both shifts have to use the same first operand. */
1581 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1582 return false;
1583 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1584 return false;
1585
1586 /* CNT1 + CNT2 == B case above. */
1587 if (tree_fits_uhwi_p (def_arg2[0])
1588 && tree_fits_uhwi_p (def_arg2[1])
1589 && tree_to_uhwi (def_arg2[0])
1590 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1591 rotcnt = def_arg2[0];
1592 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1593 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1594 return false;
1595 else
1596 {
1597 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1598 enum tree_code cdef_code[2];
1599 /* Look through conversion of the shift count argument.
1600 The C/C++ FE cast any shift count argument to integer_type_node.
1601 The only problem might be if the shift count type maximum value
1602 is equal or smaller than number of bits in rtype. */
1603 for (i = 0; i < 2; i++)
1604 {
1605 def_arg2_alt[i] = def_arg2[i];
1606 defcodefor_name (def_arg2[i], &cdef_code[i],
1607 &cdef_arg1[i], &cdef_arg2[i]);
1608 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1609 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1610 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1611 > floor_log2 (TYPE_PRECISION (rtype))
1612 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
1613 {
1614 def_arg2_alt[i] = cdef_arg1[i];
1615 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1616 &cdef_arg1[i], &cdef_arg2[i]);
1617 }
1618 }
1619 for (i = 0; i < 2; i++)
1620 /* Check for one shift count being Y and the other B - Y,
1621 with optional casts. */
1622 if (cdef_code[i] == MINUS_EXPR
1623 && tree_fits_shwi_p (cdef_arg1[i])
1624 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1625 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1626 {
1627 tree tem;
1628 enum tree_code code;
1629
1630 if (cdef_arg2[i] == def_arg2[1 - i]
1631 || cdef_arg2[i] == def_arg2_alt[1 - i])
1632 {
1633 rotcnt = cdef_arg2[i];
1634 break;
1635 }
1636 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1637 if (CONVERT_EXPR_CODE_P (code)
1638 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1639 && TYPE_PRECISION (TREE_TYPE (tem))
1640 > floor_log2 (TYPE_PRECISION (rtype))
1641 && type_has_mode_precision_p (TREE_TYPE (tem))
1642 && (tem == def_arg2[1 - i]
1643 || tem == def_arg2_alt[1 - i]))
1644 {
1645 rotcnt = tem;
1646 break;
1647 }
1648 }
1649 /* The above sequence isn't safe for Y being 0,
1650 because then one of the shifts triggers undefined behavior.
1651 This alternative is safe even for rotation count of 0.
1652 One shift count is Y and the other (-Y) & (B - 1). */
1653 else if (cdef_code[i] == BIT_AND_EXPR
1654 && tree_fits_shwi_p (cdef_arg2[i])
1655 && tree_to_shwi (cdef_arg2[i])
1656 == TYPE_PRECISION (rtype) - 1
1657 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1658 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1659 {
1660 tree tem;
1661 enum tree_code code;
1662
1663 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1664 if (CONVERT_EXPR_CODE_P (code)
1665 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1666 && TYPE_PRECISION (TREE_TYPE (tem))
1667 > floor_log2 (TYPE_PRECISION (rtype))
1668 && type_has_mode_precision_p (TREE_TYPE (tem)))
1669 defcodefor_name (tem, &code, &tem, NULL);
1670
1671 if (code == NEGATE_EXPR)
1672 {
1673 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1674 {
1675 rotcnt = tem;
1676 break;
1677 }
1678 defcodefor_name (tem, &code, &tem, NULL);
1679 if (CONVERT_EXPR_CODE_P (code)
1680 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1681 && TYPE_PRECISION (TREE_TYPE (tem))
1682 > floor_log2 (TYPE_PRECISION (rtype))
1683 && type_has_mode_precision_p (TREE_TYPE (tem))
1684 && (tem == def_arg2[1 - i]
1685 || tem == def_arg2_alt[1 - i]))
1686 {
1687 rotcnt = tem;
1688 break;
1689 }
1690 }
1691 }
1692 if (rotcnt == NULL_TREE)
1693 return false;
1694 swapped_p = i != 1;
1695 }
1696
1697 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1698 TREE_TYPE (rotcnt)))
1699 {
1700 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1701 NOP_EXPR, rotcnt);
1702 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1703 rotcnt = gimple_assign_lhs (g);
1704 }
1705 lhs = gimple_assign_lhs (stmt);
1706 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1707 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1708 g = gimple_build_assign (lhs,
1709 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1710 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1711 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1712 {
1713 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1714 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1715 }
1716 gsi_replace (gsi, g, false);
1717 return true;
1718 }
1719
1720 /* Combine an element access with a shuffle. Returns true if there were
1721 any changes made, else it returns false. */
1722
1723 static bool
1724 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1725 {
1726 gimple *stmt = gsi_stmt (*gsi);
1727 gimple *def_stmt;
1728 tree op, op0, op1, op2;
1729 tree elem_type;
1730 unsigned idx, n, size;
1731 enum tree_code code;
1732
1733 op = gimple_assign_rhs1 (stmt);
1734 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1735
1736 op0 = TREE_OPERAND (op, 0);
1737 if (TREE_CODE (op0) != SSA_NAME
1738 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1739 return false;
1740
1741 def_stmt = get_prop_source_stmt (op0, false, NULL);
1742 if (!def_stmt || !can_propagate_from (def_stmt))
1743 return false;
1744
1745 op1 = TREE_OPERAND (op, 1);
1746 op2 = TREE_OPERAND (op, 2);
1747 code = gimple_assign_rhs_code (def_stmt);
1748
1749 if (code == CONSTRUCTOR)
1750 {
1751 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1752 gimple_assign_rhs1 (def_stmt), op1, op2);
1753 if (!tem || !valid_gimple_rhs_p (tem))
1754 return false;
1755 gimple_assign_set_rhs_from_tree (gsi, tem);
1756 update_stmt (gsi_stmt (*gsi));
1757 return true;
1758 }
1759
1760 elem_type = TREE_TYPE (TREE_TYPE (op0));
1761 if (TREE_TYPE (op) != elem_type)
1762 return false;
1763
1764 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1765 n = TREE_INT_CST_LOW (op1) / size;
1766 if (n != 1)
1767 return false;
1768 idx = TREE_INT_CST_LOW (op2) / size;
1769
1770 if (code == VEC_PERM_EXPR)
1771 {
1772 tree p, m, tem;
1773 unsigned nelts;
1774 m = gimple_assign_rhs3 (def_stmt);
1775 if (TREE_CODE (m) != VECTOR_CST)
1776 return false;
1777 nelts = VECTOR_CST_NELTS (m);
1778 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1779 idx %= 2 * nelts;
1780 if (idx < nelts)
1781 {
1782 p = gimple_assign_rhs1 (def_stmt);
1783 }
1784 else
1785 {
1786 p = gimple_assign_rhs2 (def_stmt);
1787 idx -= nelts;
1788 }
1789 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1790 unshare_expr (p), op1, bitsize_int (idx * size));
1791 gimple_assign_set_rhs1 (stmt, tem);
1792 fold_stmt (gsi);
1793 update_stmt (gsi_stmt (*gsi));
1794 return true;
1795 }
1796
1797 return false;
1798 }
1799
1800 /* Determine whether applying the 2 permutations (mask1 then mask2)
1801 gives back one of the input. */
1802
1803 static int
1804 is_combined_permutation_identity (tree mask1, tree mask2)
1805 {
1806 tree mask;
1807 unsigned int nelts, i, j;
1808 bool maybe_identity1 = true;
1809 bool maybe_identity2 = true;
1810
1811 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1812 && TREE_CODE (mask2) == VECTOR_CST);
1813 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1814 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1815
1816 nelts = VECTOR_CST_NELTS (mask);
1817 for (i = 0; i < nelts; i++)
1818 {
1819 tree val = VECTOR_CST_ELT (mask, i);
1820 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1821 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1822 if (j == i)
1823 maybe_identity2 = false;
1824 else if (j == i + nelts)
1825 maybe_identity1 = false;
1826 else
1827 return 0;
1828 }
1829 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1830 }
1831
1832 /* Combine a shuffle with its arguments. Returns 1 if there were any
1833 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1834
1835 static int
1836 simplify_permutation (gimple_stmt_iterator *gsi)
1837 {
1838 gimple *stmt = gsi_stmt (*gsi);
1839 gimple *def_stmt;
1840 tree op0, op1, op2, op3, arg0, arg1;
1841 enum tree_code code;
1842 bool single_use_op0 = false;
1843
1844 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1845
1846 op0 = gimple_assign_rhs1 (stmt);
1847 op1 = gimple_assign_rhs2 (stmt);
1848 op2 = gimple_assign_rhs3 (stmt);
1849
1850 if (TREE_CODE (op2) != VECTOR_CST)
1851 return 0;
1852
1853 if (TREE_CODE (op0) == VECTOR_CST)
1854 {
1855 code = VECTOR_CST;
1856 arg0 = op0;
1857 }
1858 else if (TREE_CODE (op0) == SSA_NAME)
1859 {
1860 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1861 if (!def_stmt || !can_propagate_from (def_stmt))
1862 return 0;
1863
1864 code = gimple_assign_rhs_code (def_stmt);
1865 arg0 = gimple_assign_rhs1 (def_stmt);
1866 }
1867 else
1868 return 0;
1869
1870 /* Two consecutive shuffles. */
1871 if (code == VEC_PERM_EXPR)
1872 {
1873 tree orig;
1874 int ident;
1875
1876 if (op0 != op1)
1877 return 0;
1878 op3 = gimple_assign_rhs3 (def_stmt);
1879 if (TREE_CODE (op3) != VECTOR_CST)
1880 return 0;
1881 ident = is_combined_permutation_identity (op3, op2);
1882 if (!ident)
1883 return 0;
1884 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1885 : gimple_assign_rhs2 (def_stmt);
1886 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1887 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1888 gimple_set_num_ops (stmt, 2);
1889 update_stmt (stmt);
1890 return remove_prop_source_from_use (op0) ? 2 : 1;
1891 }
1892
1893 /* Shuffle of a constructor. */
1894 else if (code == CONSTRUCTOR || code == VECTOR_CST)
1895 {
1896 tree opt;
1897 bool ret = false;
1898 if (op0 != op1)
1899 {
1900 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1901 return 0;
1902
1903 if (TREE_CODE (op1) == VECTOR_CST)
1904 arg1 = op1;
1905 else if (TREE_CODE (op1) == SSA_NAME)
1906 {
1907 enum tree_code code2;
1908
1909 gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1910 if (!def_stmt2 || !can_propagate_from (def_stmt2))
1911 return 0;
1912
1913 code2 = gimple_assign_rhs_code (def_stmt2);
1914 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1915 return 0;
1916 arg1 = gimple_assign_rhs1 (def_stmt2);
1917 }
1918 else
1919 return 0;
1920 }
1921 else
1922 {
1923 /* Already used twice in this statement. */
1924 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1925 return 0;
1926 arg1 = arg0;
1927 }
1928 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1929 if (!opt
1930 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1931 return 0;
1932 gimple_assign_set_rhs_from_tree (gsi, opt);
1933 update_stmt (gsi_stmt (*gsi));
1934 if (TREE_CODE (op0) == SSA_NAME)
1935 ret = remove_prop_source_from_use (op0);
1936 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1937 ret |= remove_prop_source_from_use (op1);
1938 return ret ? 2 : 1;
1939 }
1940
1941 return 0;
1942 }
1943
1944 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
1945
1946 static bool
1947 simplify_vector_constructor (gimple_stmt_iterator *gsi)
1948 {
1949 gimple *stmt = gsi_stmt (*gsi);
1950 gimple *def_stmt;
1951 tree op, op2, orig, type, elem_type;
1952 unsigned elem_size, nelts, i;
1953 enum tree_code code, conv_code;
1954 constructor_elt *elt;
1955 unsigned char *sel;
1956 bool maybe_ident;
1957
1958 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
1959
1960 op = gimple_assign_rhs1 (stmt);
1961 type = TREE_TYPE (op);
1962 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
1963
1964 nelts = TYPE_VECTOR_SUBPARTS (type);
1965 elem_type = TREE_TYPE (type);
1966 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1967
1968 sel = XALLOCAVEC (unsigned char, nelts);
1969 orig = NULL;
1970 conv_code = ERROR_MARK;
1971 maybe_ident = true;
1972 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
1973 {
1974 tree ref, op1;
1975
1976 if (i >= nelts)
1977 return false;
1978
1979 if (TREE_CODE (elt->value) != SSA_NAME)
1980 return false;
1981 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
1982 if (!def_stmt)
1983 return false;
1984 code = gimple_assign_rhs_code (def_stmt);
1985 if (code == FLOAT_EXPR
1986 || code == FIX_TRUNC_EXPR)
1987 {
1988 op1 = gimple_assign_rhs1 (def_stmt);
1989 if (conv_code == ERROR_MARK)
1990 {
1991 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (elt->value)))
1992 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1))))
1993 return false;
1994 conv_code = code;
1995 }
1996 else if (conv_code != code)
1997 return false;
1998 if (TREE_CODE (op1) != SSA_NAME)
1999 return false;
2000 def_stmt = SSA_NAME_DEF_STMT (op1);
2001 if (! is_gimple_assign (def_stmt))
2002 return false;
2003 code = gimple_assign_rhs_code (def_stmt);
2004 }
2005 if (code != BIT_FIELD_REF)
2006 return false;
2007 op1 = gimple_assign_rhs1 (def_stmt);
2008 ref = TREE_OPERAND (op1, 0);
2009 if (orig)
2010 {
2011 if (ref != orig)
2012 return false;
2013 }
2014 else
2015 {
2016 if (TREE_CODE (ref) != SSA_NAME)
2017 return false;
2018 if (! VECTOR_TYPE_P (TREE_TYPE (ref))
2019 || ! useless_type_conversion_p (TREE_TYPE (op1),
2020 TREE_TYPE (TREE_TYPE (ref))))
2021 return false;
2022 orig = ref;
2023 }
2024 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2025 return false;
2026 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2027 if (sel[i] != i) maybe_ident = false;
2028 }
2029 if (i < nelts)
2030 return false;
2031
2032 if (! VECTOR_TYPE_P (TREE_TYPE (orig))
2033 || (TYPE_VECTOR_SUBPARTS (type)
2034 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig))))
2035 return false;
2036
2037 tree tem;
2038 if (conv_code != ERROR_MARK
2039 && (! supportable_convert_operation (conv_code, type, TREE_TYPE (orig),
2040 &tem, &conv_code)
2041 || conv_code == CALL_EXPR))
2042 return false;
2043
2044 if (maybe_ident)
2045 {
2046 if (conv_code == ERROR_MARK)
2047 gimple_assign_set_rhs_from_tree (gsi, orig);
2048 else
2049 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2050 NULL_TREE, NULL_TREE);
2051 }
2052 else
2053 {
2054 tree mask_type;
2055
2056 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2057 return false;
2058 mask_type
2059 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2060 nelts);
2061 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2062 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2063 != GET_MODE_SIZE (TYPE_MODE (type)))
2064 return false;
2065 auto_vec<tree, 32> mask_elts (nelts);
2066 for (i = 0; i < nelts; i++)
2067 mask_elts.quick_push (build_int_cst (TREE_TYPE (mask_type), sel[i]));
2068 op2 = build_vector (mask_type, mask_elts);
2069 if (conv_code == ERROR_MARK)
2070 gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2071 else
2072 {
2073 gimple *perm
2074 = gimple_build_assign (make_ssa_name (TREE_TYPE (orig)),
2075 VEC_PERM_EXPR, orig, orig, op2);
2076 orig = gimple_assign_lhs (perm);
2077 gsi_insert_before (gsi, perm, GSI_SAME_STMT);
2078 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2079 NULL_TREE, NULL_TREE);
2080 }
2081 }
2082 update_stmt (gsi_stmt (*gsi));
2083 return true;
2084 }
2085
2086
2087 /* Primitive "lattice" function for gimple_simplify. */
2088
2089 static tree
2090 fwprop_ssa_val (tree name)
2091 {
2092 /* First valueize NAME. */
2093 if (TREE_CODE (name) == SSA_NAME
2094 && SSA_NAME_VERSION (name) < lattice.length ())
2095 {
2096 tree val = lattice[SSA_NAME_VERSION (name)];
2097 if (val)
2098 name = val;
2099 }
2100 /* We continue matching along SSA use-def edges for SSA names
2101 that are not single-use. Currently there are no patterns
2102 that would cause any issues with that. */
2103 return name;
2104 }
2105
2106 /* Main entry point for the forward propagation and statement combine
2107 optimizer. */
2108
2109 namespace {
2110
2111 const pass_data pass_data_forwprop =
2112 {
2113 GIMPLE_PASS, /* type */
2114 "forwprop", /* name */
2115 OPTGROUP_NONE, /* optinfo_flags */
2116 TV_TREE_FORWPROP, /* tv_id */
2117 ( PROP_cfg | PROP_ssa ), /* properties_required */
2118 0, /* properties_provided */
2119 0, /* properties_destroyed */
2120 0, /* todo_flags_start */
2121 TODO_update_ssa, /* todo_flags_finish */
2122 };
2123
2124 class pass_forwprop : public gimple_opt_pass
2125 {
2126 public:
2127 pass_forwprop (gcc::context *ctxt)
2128 : gimple_opt_pass (pass_data_forwprop, ctxt)
2129 {}
2130
2131 /* opt_pass methods: */
2132 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2133 virtual bool gate (function *) { return flag_tree_forwprop; }
2134 virtual unsigned int execute (function *);
2135
2136 }; // class pass_forwprop
2137
2138 unsigned int
2139 pass_forwprop::execute (function *fun)
2140 {
2141 unsigned int todoflags = 0;
2142
2143 cfg_changed = false;
2144
2145 /* Combine stmts with the stmts defining their operands. Do that
2146 in an order that guarantees visiting SSA defs before SSA uses. */
2147 lattice.create (num_ssa_names);
2148 lattice.quick_grow_cleared (num_ssa_names);
2149 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2150 int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2151 postorder, false);
2152 auto_vec<gimple *, 4> to_fixup;
2153 to_purge = BITMAP_ALLOC (NULL);
2154 for (int i = 0; i < postorder_num; ++i)
2155 {
2156 gimple_stmt_iterator gsi;
2157 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2158
2159 /* Propagate into PHIs and record degenerate ones in the lattice. */
2160 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2161 gsi_next (&si))
2162 {
2163 gphi *phi = si.phi ();
2164 tree res = gimple_phi_result (phi);
2165 if (virtual_operand_p (res))
2166 continue;
2167
2168 use_operand_p use_p;
2169 ssa_op_iter it;
2170 tree first = NULL_TREE;
2171 bool all_same = true;
2172 FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2173 {
2174 tree use = USE_FROM_PTR (use_p);
2175 tree tem = fwprop_ssa_val (use);
2176 if (! first)
2177 first = tem;
2178 else if (! operand_equal_p (first, tem, 0))
2179 all_same = false;
2180 if (tem != use
2181 && may_propagate_copy (use, tem))
2182 propagate_value (use_p, tem);
2183 }
2184 if (all_same)
2185 fwprop_set_lattice_val (res, first);
2186 }
2187
2188 /* Apply forward propagation to all stmts in the basic-block.
2189 Note we update GSI within the loop as necessary. */
2190 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2191 {
2192 gimple *stmt = gsi_stmt (gsi);
2193 tree lhs, rhs;
2194 enum tree_code code;
2195
2196 if (!is_gimple_assign (stmt))
2197 {
2198 gsi_next (&gsi);
2199 continue;
2200 }
2201
2202 lhs = gimple_assign_lhs (stmt);
2203 rhs = gimple_assign_rhs1 (stmt);
2204 code = gimple_assign_rhs_code (stmt);
2205 if (TREE_CODE (lhs) != SSA_NAME
2206 || has_zero_uses (lhs))
2207 {
2208 gsi_next (&gsi);
2209 continue;
2210 }
2211
2212 /* If this statement sets an SSA_NAME to an address,
2213 try to propagate the address into the uses of the SSA_NAME. */
2214 if (code == ADDR_EXPR
2215 /* Handle pointer conversions on invariant addresses
2216 as well, as this is valid gimple. */
2217 || (CONVERT_EXPR_CODE_P (code)
2218 && TREE_CODE (rhs) == ADDR_EXPR
2219 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2220 {
2221 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2222 if ((!base
2223 || !DECL_P (base)
2224 || decl_address_invariant_p (base))
2225 && !stmt_references_abnormal_ssa_name (stmt)
2226 && forward_propagate_addr_expr (lhs, rhs, true))
2227 {
2228 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2229 release_defs (stmt);
2230 gsi_remove (&gsi, true);
2231 }
2232 else
2233 gsi_next (&gsi);
2234 }
2235 else if (code == POINTER_PLUS_EXPR)
2236 {
2237 tree off = gimple_assign_rhs2 (stmt);
2238 if (TREE_CODE (off) == INTEGER_CST
2239 && can_propagate_from (stmt)
2240 && !simple_iv_increment_p (stmt)
2241 /* ??? Better adjust the interface to that function
2242 instead of building new trees here. */
2243 && forward_propagate_addr_expr
2244 (lhs,
2245 build1_loc (gimple_location (stmt),
2246 ADDR_EXPR, TREE_TYPE (rhs),
2247 fold_build2 (MEM_REF,
2248 TREE_TYPE (TREE_TYPE (rhs)),
2249 rhs,
2250 fold_convert (ptr_type_node,
2251 off))), true))
2252 {
2253 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2254 release_defs (stmt);
2255 gsi_remove (&gsi, true);
2256 }
2257 else if (is_gimple_min_invariant (rhs))
2258 {
2259 /* Make sure to fold &a[0] + off_1 here. */
2260 fold_stmt_inplace (&gsi);
2261 update_stmt (stmt);
2262 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2263 gsi_next (&gsi);
2264 }
2265 else
2266 gsi_next (&gsi);
2267 }
2268 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2269 && gimple_assign_load_p (stmt)
2270 && !gimple_has_volatile_ops (stmt)
2271 && (TREE_CODE (gimple_assign_rhs1 (stmt))
2272 != TARGET_MEM_REF)
2273 && !stmt_can_throw_internal (stmt))
2274 {
2275 /* Rewrite loads used only in real/imagpart extractions to
2276 component-wise loads. */
2277 use_operand_p use_p;
2278 imm_use_iterator iter;
2279 bool rewrite = true;
2280 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2281 {
2282 gimple *use_stmt = USE_STMT (use_p);
2283 if (is_gimple_debug (use_stmt))
2284 continue;
2285 if (!is_gimple_assign (use_stmt)
2286 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2287 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2288 {
2289 rewrite = false;
2290 break;
2291 }
2292 }
2293 if (rewrite)
2294 {
2295 gimple *use_stmt;
2296 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2297 {
2298 if (is_gimple_debug (use_stmt))
2299 {
2300 if (gimple_debug_bind_p (use_stmt))
2301 {
2302 gimple_debug_bind_reset_value (use_stmt);
2303 update_stmt (use_stmt);
2304 }
2305 continue;
2306 }
2307
2308 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2309 TREE_TYPE (TREE_TYPE (rhs)),
2310 unshare_expr (rhs));
2311 gimple *new_stmt
2312 = gimple_build_assign (gimple_assign_lhs (use_stmt),
2313 new_rhs);
2314
2315 location_t loc = gimple_location (use_stmt);
2316 gimple_set_location (new_stmt, loc);
2317 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2318 unlink_stmt_vdef (use_stmt);
2319 gsi_remove (&gsi2, true);
2320
2321 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2322 }
2323
2324 release_defs (stmt);
2325 gsi_remove (&gsi, true);
2326 }
2327 else
2328 gsi_next (&gsi);
2329 }
2330 else if (code == COMPLEX_EXPR)
2331 {
2332 /* Rewrite stores of a single-use complex build expression
2333 to component-wise stores. */
2334 use_operand_p use_p;
2335 gimple *use_stmt;
2336 if (single_imm_use (lhs, &use_p, &use_stmt)
2337 && gimple_store_p (use_stmt)
2338 && !gimple_has_volatile_ops (use_stmt)
2339 && is_gimple_assign (use_stmt)
2340 && (TREE_CODE (gimple_assign_lhs (use_stmt))
2341 != TARGET_MEM_REF))
2342 {
2343 tree use_lhs = gimple_assign_lhs (use_stmt);
2344 tree new_lhs = build1 (REALPART_EXPR,
2345 TREE_TYPE (TREE_TYPE (use_lhs)),
2346 unshare_expr (use_lhs));
2347 gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2348 location_t loc = gimple_location (use_stmt);
2349 gimple_set_location (new_stmt, loc);
2350 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2351 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2352 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2353 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2354 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2355 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2356
2357 new_lhs = build1 (IMAGPART_EXPR,
2358 TREE_TYPE (TREE_TYPE (use_lhs)),
2359 unshare_expr (use_lhs));
2360 gimple_assign_set_lhs (use_stmt, new_lhs);
2361 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2362 update_stmt (use_stmt);
2363
2364 release_defs (stmt);
2365 gsi_remove (&gsi, true);
2366 }
2367 else
2368 gsi_next (&gsi);
2369 }
2370 else
2371 gsi_next (&gsi);
2372 }
2373
2374 /* Combine stmts with the stmts defining their operands.
2375 Note we update GSI within the loop as necessary. */
2376 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2377 {
2378 gimple *stmt = gsi_stmt (gsi);
2379 gimple *orig_stmt = stmt;
2380 bool changed = false;
2381 bool was_noreturn = (is_gimple_call (stmt)
2382 && gimple_call_noreturn_p (stmt));
2383
2384 /* Mark stmt as potentially needing revisiting. */
2385 gimple_set_plf (stmt, GF_PLF_1, false);
2386
2387 if (fold_stmt (&gsi, fwprop_ssa_val))
2388 {
2389 changed = true;
2390 stmt = gsi_stmt (gsi);
2391 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2392 bitmap_set_bit (to_purge, bb->index);
2393 if (!was_noreturn
2394 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2395 to_fixup.safe_push (stmt);
2396 /* Cleanup the CFG if we simplified a condition to
2397 true or false. */
2398 if (gcond *cond = dyn_cast <gcond *> (stmt))
2399 if (gimple_cond_true_p (cond)
2400 || gimple_cond_false_p (cond))
2401 cfg_changed = true;
2402 update_stmt (stmt);
2403 }
2404
2405 switch (gimple_code (stmt))
2406 {
2407 case GIMPLE_ASSIGN:
2408 {
2409 tree rhs1 = gimple_assign_rhs1 (stmt);
2410 enum tree_code code = gimple_assign_rhs_code (stmt);
2411
2412 if (code == COND_EXPR
2413 || code == VEC_COND_EXPR)
2414 {
2415 /* In this case the entire COND_EXPR is in rhs1. */
2416 if (forward_propagate_into_cond (&gsi))
2417 {
2418 changed = true;
2419 stmt = gsi_stmt (gsi);
2420 }
2421 }
2422 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2423 {
2424 int did_something;
2425 did_something = forward_propagate_into_comparison (&gsi);
2426 if (did_something == 2)
2427 cfg_changed = true;
2428 changed = did_something != 0;
2429 }
2430 else if ((code == PLUS_EXPR
2431 || code == BIT_IOR_EXPR
2432 || code == BIT_XOR_EXPR)
2433 && simplify_rotate (&gsi))
2434 changed = true;
2435 else if (code == VEC_PERM_EXPR)
2436 {
2437 int did_something = simplify_permutation (&gsi);
2438 if (did_something == 2)
2439 cfg_changed = true;
2440 changed = did_something != 0;
2441 }
2442 else if (code == BIT_FIELD_REF)
2443 changed = simplify_bitfield_ref (&gsi);
2444 else if (code == CONSTRUCTOR
2445 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2446 changed = simplify_vector_constructor (&gsi);
2447 break;
2448 }
2449
2450 case GIMPLE_SWITCH:
2451 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2452 break;
2453
2454 case GIMPLE_COND:
2455 {
2456 int did_something
2457 = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2458 if (did_something == 2)
2459 cfg_changed = true;
2460 changed = did_something != 0;
2461 break;
2462 }
2463
2464 case GIMPLE_CALL:
2465 {
2466 tree callee = gimple_call_fndecl (stmt);
2467 if (callee != NULL_TREE
2468 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2469 changed = simplify_builtin_call (&gsi, callee);
2470 break;
2471 }
2472
2473 default:;
2474 }
2475
2476 if (changed)
2477 {
2478 /* If the stmt changed then re-visit it and the statements
2479 inserted before it. */
2480 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2481 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2482 break;
2483 if (gsi_end_p (gsi))
2484 gsi = gsi_start_bb (bb);
2485 else
2486 gsi_next (&gsi);
2487 }
2488 else
2489 {
2490 /* Stmt no longer needs to be revisited. */
2491 gimple_set_plf (stmt, GF_PLF_1, true);
2492
2493 /* Fill up the lattice. */
2494 if (gimple_assign_single_p (stmt))
2495 {
2496 tree lhs = gimple_assign_lhs (stmt);
2497 tree rhs = gimple_assign_rhs1 (stmt);
2498 if (TREE_CODE (lhs) == SSA_NAME)
2499 {
2500 tree val = lhs;
2501 if (TREE_CODE (rhs) == SSA_NAME)
2502 val = fwprop_ssa_val (rhs);
2503 else if (is_gimple_min_invariant (rhs))
2504 val = rhs;
2505 fwprop_set_lattice_val (lhs, val);
2506 }
2507 }
2508
2509 gsi_next (&gsi);
2510 }
2511 }
2512 }
2513 free (postorder);
2514 lattice.release ();
2515
2516 /* Fixup stmts that became noreturn calls. This may require splitting
2517 blocks and thus isn't possible during the walk. Do this
2518 in reverse order so we don't inadvertedly remove a stmt we want to
2519 fixup by visiting a dominating now noreturn call first. */
2520 while (!to_fixup.is_empty ())
2521 {
2522 gimple *stmt = to_fixup.pop ();
2523 if (dump_file && dump_flags & TDF_DETAILS)
2524 {
2525 fprintf (dump_file, "Fixing up noreturn call ");
2526 print_gimple_stmt (dump_file, stmt, 0);
2527 fprintf (dump_file, "\n");
2528 }
2529 cfg_changed |= fixup_noreturn_call (stmt);
2530 }
2531
2532 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2533 BITMAP_FREE (to_purge);
2534
2535 if (cfg_changed)
2536 todoflags |= TODO_cleanup_cfg;
2537
2538 return todoflags;
2539 }
2540
2541 } // anon namespace
2542
2543 gimple_opt_pass *
2544 make_pass_forwprop (gcc::context *ctxt)
2545 {
2546 return new pass_forwprop (ctxt);
2547 }