cond.md (stzx_16): Use register_operand for operand 0.
[gcc.git] / gcc / tree-ssa-forwprop.c
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "gimple-iterator.h"
32 #include "gimplify-me.h"
33 #include "gimple-ssa.h"
34 #include "tree-cfg.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "expr.h"
40 #include "tree-dfa.h"
41 #include "tree-pass.h"
42 #include "langhooks.h"
43 #include "flags.h"
44 #include "expr.h"
45 #include "cfgloop.h"
46 #include "optabs.h"
47 #include "tree-ssa-propagate.h"
48 #include "tree-ssa-dom.h"
49
50 /* This pass propagates the RHS of assignment statements into use
51 sites of the LHS of the assignment. It's basically a specialized
52 form of tree combination. It is hoped all of this can disappear
53 when we have a generalized tree combiner.
54
55 One class of common cases we handle is forward propagating a single use
56 variable into a COND_EXPR.
57
58 bb0:
59 x = a COND b;
60 if (x) goto ... else goto ...
61
62 Will be transformed into:
63
64 bb0:
65 if (a COND b) goto ... else goto ...
66
67 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
68
69 Or (assuming c1 and c2 are constants):
70
71 bb0:
72 x = a + c1;
73 if (x EQ/NEQ c2) goto ... else goto ...
74
75 Will be transformed into:
76
77 bb0:
78 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
79
80 Similarly for x = a - c1.
81
82 Or
83
84 bb0:
85 x = !a
86 if (x) goto ... else goto ...
87
88 Will be transformed into:
89
90 bb0:
91 if (a == 0) goto ... else goto ...
92
93 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
94 For these cases, we propagate A into all, possibly more than one,
95 COND_EXPRs that use X.
96
97 Or
98
99 bb0:
100 x = (typecast) a
101 if (x) goto ... else goto ...
102
103 Will be transformed into:
104
105 bb0:
106 if (a != 0) goto ... else goto ...
107
108 (Assuming a is an integral type and x is a boolean or x is an
109 integral and a is a boolean.)
110
111 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
112 For these cases, we propagate A into all, possibly more than one,
113 COND_EXPRs that use X.
114
115 In addition to eliminating the variable and the statement which assigns
116 a value to the variable, we may be able to later thread the jump without
117 adding insane complexity in the dominator optimizer.
118
119 Also note these transformations can cascade. We handle this by having
120 a worklist of COND_EXPR statements to examine. As we make a change to
121 a statement, we put it back on the worklist to examine on the next
122 iteration of the main loop.
123
124 A second class of propagation opportunities arises for ADDR_EXPR
125 nodes.
126
127 ptr = &x->y->z;
128 res = *ptr;
129
130 Will get turned into
131
132 res = x->y->z;
133
134 Or
135 ptr = (type1*)&type2var;
136 res = *ptr
137
138 Will get turned into (if type1 and type2 are the same size
139 and neither have volatile on them):
140 res = VIEW_CONVERT_EXPR<type1>(type2var)
141
142 Or
143
144 ptr = &x[0];
145 ptr2 = ptr + <constant>;
146
147 Will get turned into
148
149 ptr2 = &x[constant/elementsize];
150
151 Or
152
153 ptr = &x[0];
154 offset = index * element_size;
155 offset_p = (pointer) offset;
156 ptr2 = ptr + offset_p
157
158 Will get turned into:
159
160 ptr2 = &x[index];
161
162 Or
163 ssa = (int) decl
164 res = ssa & 1
165
166 Provided that decl has known alignment >= 2, will get turned into
167
168 res = 0
169
170 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
171 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
172 {NOT_EXPR,NEG_EXPR}.
173
174 This will (of course) be extended as other needs arise. */
175
176 static bool forward_propagate_addr_expr (tree, tree, bool);
177
178 /* Set to true if we delete dead edges during the optimization. */
179 static bool cfg_changed;
180
181 static tree rhs_to_tree (tree type, gimple stmt);
182
183 /* Get the next statement we can propagate NAME's value into skipping
184 trivial copies. Returns the statement that is suitable as a
185 propagation destination or NULL_TREE if there is no such one.
186 This only returns destinations in a single-use chain. FINAL_NAME_P
187 if non-NULL is written to the ssa name that represents the use. */
188
189 static gimple
190 get_prop_dest_stmt (tree name, tree *final_name_p)
191 {
192 use_operand_p use;
193 gimple use_stmt;
194
195 do {
196 /* If name has multiple uses, bail out. */
197 if (!single_imm_use (name, &use, &use_stmt))
198 return NULL;
199
200 /* If this is not a trivial copy, we found it. */
201 if (!gimple_assign_ssa_name_copy_p (use_stmt)
202 || gimple_assign_rhs1 (use_stmt) != name)
203 break;
204
205 /* Continue searching uses of the copy destination. */
206 name = gimple_assign_lhs (use_stmt);
207 } while (1);
208
209 if (final_name_p)
210 *final_name_p = name;
211
212 return use_stmt;
213 }
214
215 /* Get the statement we can propagate from into NAME skipping
216 trivial copies. Returns the statement which defines the
217 propagation source or NULL_TREE if there is no such one.
218 If SINGLE_USE_ONLY is set considers only sources which have
219 a single use chain up to NAME. If SINGLE_USE_P is non-null,
220 it is set to whether the chain to NAME is a single use chain
221 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
222
223 static gimple
224 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
225 {
226 bool single_use = true;
227
228 do {
229 gimple def_stmt = SSA_NAME_DEF_STMT (name);
230
231 if (!has_single_use (name))
232 {
233 single_use = false;
234 if (single_use_only)
235 return NULL;
236 }
237
238 /* If name is defined by a PHI node or is the default def, bail out. */
239 if (!is_gimple_assign (def_stmt))
240 return NULL;
241
242 /* If def_stmt is a simple copy, continue looking. */
243 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
244 name = gimple_assign_rhs1 (def_stmt);
245 else
246 {
247 if (!single_use_only && single_use_p)
248 *single_use_p = single_use;
249
250 return def_stmt;
251 }
252 } while (1);
253 }
254
255 /* Checks if the destination ssa name in DEF_STMT can be used as
256 propagation source. Returns true if so, otherwise false. */
257
258 static bool
259 can_propagate_from (gimple def_stmt)
260 {
261 gcc_assert (is_gimple_assign (def_stmt));
262
263 /* If the rhs has side-effects we cannot propagate from it. */
264 if (gimple_has_volatile_ops (def_stmt))
265 return false;
266
267 /* If the rhs is a load we cannot propagate from it. */
268 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
269 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
270 return false;
271
272 /* Constants can be always propagated. */
273 if (gimple_assign_single_p (def_stmt)
274 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
275 return true;
276
277 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
278 if (stmt_references_abnormal_ssa_name (def_stmt))
279 return false;
280
281 /* If the definition is a conversion of a pointer to a function type,
282 then we can not apply optimizations as some targets require
283 function pointers to be canonicalized and in this case this
284 optimization could eliminate a necessary canonicalization. */
285 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
286 {
287 tree rhs = gimple_assign_rhs1 (def_stmt);
288 if (POINTER_TYPE_P (TREE_TYPE (rhs))
289 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
290 return false;
291 }
292
293 return true;
294 }
295
296 /* Remove a chain of dead statements starting at the definition of
297 NAME. The chain is linked via the first operand of the defining statements.
298 If NAME was replaced in its only use then this function can be used
299 to clean up dead stmts. The function handles already released SSA
300 names gracefully.
301 Returns true if cleanup-cfg has to run. */
302
303 static bool
304 remove_prop_source_from_use (tree name)
305 {
306 gimple_stmt_iterator gsi;
307 gimple stmt;
308 bool cfg_changed = false;
309
310 do {
311 basic_block bb;
312
313 if (SSA_NAME_IN_FREE_LIST (name)
314 || SSA_NAME_IS_DEFAULT_DEF (name)
315 || !has_zero_uses (name))
316 return cfg_changed;
317
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (gimple_code (stmt) == GIMPLE_PHI
320 || gimple_has_side_effects (stmt))
321 return cfg_changed;
322
323 bb = gimple_bb (stmt);
324 gsi = gsi_for_stmt (stmt);
325 unlink_stmt_vdef (stmt);
326 if (gsi_remove (&gsi, true))
327 cfg_changed |= gimple_purge_dead_eh_edges (bb);
328 release_defs (stmt);
329
330 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
331 } while (name && TREE_CODE (name) == SSA_NAME);
332
333 return cfg_changed;
334 }
335
336 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
337 converted to type TYPE.
338
339 This should disappear, but is needed so we can combine expressions and use
340 the fold() interfaces. Long term, we need to develop folding and combine
341 routines that deal with gimple exclusively . */
342
343 static tree
344 rhs_to_tree (tree type, gimple stmt)
345 {
346 location_t loc = gimple_location (stmt);
347 enum tree_code code = gimple_assign_rhs_code (stmt);
348 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
349 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt),
351 gimple_assign_rhs3 (stmt));
352 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
353 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 gimple_assign_rhs2 (stmt));
355 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
356 return build1 (code, type, gimple_assign_rhs1 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
358 return gimple_assign_rhs1 (stmt);
359 else
360 gcc_unreachable ();
361 }
362
363 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
364 the folded result in a form suitable for COND_EXPR_COND or
365 NULL_TREE, if there is no suitable simplified form. If
366 INVARIANT_ONLY is true only gimple_min_invariant results are
367 considered simplified. */
368
369 static tree
370 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
371 tree op0, tree op1, bool invariant_only)
372 {
373 tree t;
374
375 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
376
377 fold_defer_overflow_warnings ();
378 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
379 if (!t)
380 {
381 fold_undefer_overflow_warnings (false, NULL, 0);
382 return NULL_TREE;
383 }
384
385 /* Require that we got a boolean type out if we put one in. */
386 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
387
388 /* Canonicalize the combined condition for use in a COND_EXPR. */
389 t = canonicalize_cond_expr_cond (t);
390
391 /* Bail out if we required an invariant but didn't get one. */
392 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
393 {
394 fold_undefer_overflow_warnings (false, NULL, 0);
395 return NULL_TREE;
396 }
397
398 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
399
400 return t;
401 }
402
403 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
404 of its operand. Return a new comparison tree or NULL_TREE if there
405 were no simplifying combines. */
406
407 static tree
408 forward_propagate_into_comparison_1 (gimple stmt,
409 enum tree_code code, tree type,
410 tree op0, tree op1)
411 {
412 tree tmp = NULL_TREE;
413 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
414 bool single_use0_p = false, single_use1_p = false;
415
416 /* For comparisons use the first operand, that is likely to
417 simplify comparisons against constants. */
418 if (TREE_CODE (op0) == SSA_NAME)
419 {
420 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
421 if (def_stmt && can_propagate_from (def_stmt))
422 {
423 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
424 tmp = combine_cond_expr_cond (stmt, code, type,
425 rhs0, op1, !single_use0_p);
426 if (tmp)
427 return tmp;
428 }
429 }
430
431 /* If that wasn't successful, try the second operand. */
432 if (TREE_CODE (op1) == SSA_NAME)
433 {
434 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
435 if (def_stmt && can_propagate_from (def_stmt))
436 {
437 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
438 tmp = combine_cond_expr_cond (stmt, code, type,
439 op0, rhs1, !single_use1_p);
440 if (tmp)
441 return tmp;
442 }
443 }
444
445 /* If that wasn't successful either, try both operands. */
446 if (rhs0 != NULL_TREE
447 && rhs1 != NULL_TREE)
448 tmp = combine_cond_expr_cond (stmt, code, type,
449 rhs0, rhs1,
450 !(single_use0_p && single_use1_p));
451
452 return tmp;
453 }
454
455 /* Propagate from the ssa name definition statements of the assignment
456 from a comparison at *GSI into the conditional if that simplifies it.
457 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
458 otherwise returns 0. */
459
460 static int
461 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
462 {
463 gimple stmt = gsi_stmt (*gsi);
464 tree tmp;
465 bool cfg_changed = false;
466 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
467 tree rhs1 = gimple_assign_rhs1 (stmt);
468 tree rhs2 = gimple_assign_rhs2 (stmt);
469
470 /* Combine the comparison with defining statements. */
471 tmp = forward_propagate_into_comparison_1 (stmt,
472 gimple_assign_rhs_code (stmt),
473 type, rhs1, rhs2);
474 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
475 {
476 gimple_assign_set_rhs_from_tree (gsi, tmp);
477 fold_stmt (gsi);
478 update_stmt (gsi_stmt (*gsi));
479
480 if (TREE_CODE (rhs1) == SSA_NAME)
481 cfg_changed |= remove_prop_source_from_use (rhs1);
482 if (TREE_CODE (rhs2) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs2);
484 return cfg_changed ? 2 : 1;
485 }
486
487 return 0;
488 }
489
490 /* Propagate from the ssa name definition statements of COND_EXPR
491 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
492 Returns zero if no statement was changed, one if there were
493 changes and two if cfg_cleanup needs to run.
494
495 This must be kept in sync with forward_propagate_into_cond. */
496
497 static int
498 forward_propagate_into_gimple_cond (gimple stmt)
499 {
500 tree tmp;
501 enum tree_code code = gimple_cond_code (stmt);
502 bool cfg_changed = false;
503 tree rhs1 = gimple_cond_lhs (stmt);
504 tree rhs2 = gimple_cond_rhs (stmt);
505
506 /* We can do tree combining on SSA_NAME and comparison expressions. */
507 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
508 return 0;
509
510 tmp = forward_propagate_into_comparison_1 (stmt, code,
511 boolean_type_node,
512 rhs1, rhs2);
513 if (tmp)
514 {
515 if (dump_file && tmp)
516 {
517 fprintf (dump_file, " Replaced '");
518 print_gimple_expr (dump_file, stmt, 0, 0);
519 fprintf (dump_file, "' with '");
520 print_generic_expr (dump_file, tmp, 0);
521 fprintf (dump_file, "'\n");
522 }
523
524 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
525 update_stmt (stmt);
526
527 if (TREE_CODE (rhs1) == SSA_NAME)
528 cfg_changed |= remove_prop_source_from_use (rhs1);
529 if (TREE_CODE (rhs2) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs2);
531 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
532 }
533
534 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
535 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
536 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
537 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
538 && ((code == EQ_EXPR
539 && integer_zerop (rhs2))
540 || (code == NE_EXPR
541 && integer_onep (rhs2))))
542 {
543 basic_block bb = gimple_bb (stmt);
544 gimple_cond_set_code (stmt, NE_EXPR);
545 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
546 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
547 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
548 return 1;
549 }
550
551 return 0;
552 }
553
554
555 /* Propagate from the ssa name definition statements of COND_EXPR
556 in the rhs of statement STMT into the conditional if that simplifies it.
557 Returns true zero if the stmt was changed. */
558
559 static bool
560 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
561 {
562 gimple stmt = gsi_stmt (*gsi_p);
563 tree tmp = NULL_TREE;
564 tree cond = gimple_assign_rhs1 (stmt);
565 enum tree_code code = gimple_assign_rhs_code (stmt);
566 bool swap = false;
567
568 /* We can do tree combining on SSA_NAME and comparison expressions. */
569 if (COMPARISON_CLASS_P (cond))
570 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
571 TREE_TYPE (cond),
572 TREE_OPERAND (cond, 0),
573 TREE_OPERAND (cond, 1));
574 else if (TREE_CODE (cond) == SSA_NAME)
575 {
576 enum tree_code def_code;
577 tree name = cond;
578 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
579 if (!def_stmt || !can_propagate_from (def_stmt))
580 return 0;
581
582 def_code = gimple_assign_rhs_code (def_stmt);
583 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
584 tmp = fold_build2_loc (gimple_location (def_stmt),
585 def_code,
586 TREE_TYPE (cond),
587 gimple_assign_rhs1 (def_stmt),
588 gimple_assign_rhs2 (def_stmt));
589 else if (code == COND_EXPR
590 && ((def_code == BIT_NOT_EXPR
591 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
592 || (def_code == BIT_XOR_EXPR
593 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
594 {
595 tmp = gimple_assign_rhs1 (def_stmt);
596 swap = true;
597 }
598 }
599
600 if (tmp
601 && is_gimple_condexpr (tmp))
602 {
603 if (dump_file && tmp)
604 {
605 fprintf (dump_file, " Replaced '");
606 print_generic_expr (dump_file, cond, 0);
607 fprintf (dump_file, "' with '");
608 print_generic_expr (dump_file, tmp, 0);
609 fprintf (dump_file, "'\n");
610 }
611
612 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
613 : integer_onep (tmp))
614 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
615 else if (integer_zerop (tmp))
616 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
617 else
618 {
619 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
620 if (swap)
621 {
622 tree t = gimple_assign_rhs2 (stmt);
623 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
624 gimple_assign_set_rhs3 (stmt, t);
625 }
626 }
627 stmt = gsi_stmt (*gsi_p);
628 update_stmt (stmt);
629
630 return true;
631 }
632
633 return 0;
634 }
635
636 /* Propagate from the ssa name definition statements of COND_EXPR
637 values in the rhs of statement STMT into the conditional arms
638 if that simplifies it.
639 Returns true if the stmt was changed. */
640
641 static bool
642 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
643 {
644 gimple stmt = gsi_stmt (*gsi_p);
645 tree cond, val1, val2;
646 bool changed = false;
647
648 cond = gimple_assign_rhs1 (stmt);
649 val1 = gimple_assign_rhs2 (stmt);
650 if (TREE_CODE (val1) == SSA_NAME)
651 {
652 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
653 if (is_gimple_assign (def_stmt)
654 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
655 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
656 {
657 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
658 gimple_assign_set_rhs2 (stmt, val1);
659 changed = true;
660 }
661 }
662 val2 = gimple_assign_rhs3 (stmt);
663 if (TREE_CODE (val2) == SSA_NAME)
664 {
665 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
666 if (is_gimple_assign (def_stmt)
667 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
668 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
669 {
670 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
671 gimple_assign_set_rhs3 (stmt, val2);
672 changed = true;
673 }
674 }
675 if (operand_equal_p (val1, val2, 0))
676 {
677 gimple_assign_set_rhs_from_tree (gsi_p, val1);
678 stmt = gsi_stmt (*gsi_p);
679 changed = true;
680 }
681
682 if (changed)
683 update_stmt (stmt);
684
685 return changed;
686 }
687
688 /* We've just substituted an ADDR_EXPR into stmt. Update all the
689 relevant data structures to match. */
690
691 static void
692 tidy_after_forward_propagate_addr (gimple stmt)
693 {
694 /* We may have turned a trapping insn into a non-trapping insn. */
695 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
696 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
697 cfg_changed = true;
698
699 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
700 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
701 }
702
703 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
704 ADDR_EXPR <whatever>.
705
706 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
707 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
708 node or for recovery of array indexing from pointer arithmetic.
709
710 Return true if the propagation was successful (the propagation can
711 be not totally successful, yet things may have been changed). */
712
713 static bool
714 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
715 gimple_stmt_iterator *use_stmt_gsi,
716 bool single_use_p)
717 {
718 tree lhs, rhs, rhs2, array_ref;
719 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
720 enum tree_code rhs_code;
721 bool res = true;
722
723 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
724
725 lhs = gimple_assign_lhs (use_stmt);
726 rhs_code = gimple_assign_rhs_code (use_stmt);
727 rhs = gimple_assign_rhs1 (use_stmt);
728
729 /* Do not perform copy-propagation but recurse through copy chains. */
730 if (TREE_CODE (lhs) == SSA_NAME
731 && rhs_code == SSA_NAME)
732 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
733
734 /* The use statement could be a conversion. Recurse to the uses of the
735 lhs as copyprop does not copy through pointer to integer to pointer
736 conversions and FRE does not catch all cases either.
737 Treat the case of a single-use name and
738 a conversion to def_rhs type separate, though. */
739 if (TREE_CODE (lhs) == SSA_NAME
740 && CONVERT_EXPR_CODE_P (rhs_code))
741 {
742 /* If there is a point in a conversion chain where the types match
743 so we can remove a conversion re-materialize the address here
744 and stop. */
745 if (single_use_p
746 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
747 {
748 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
749 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
750 return true;
751 }
752
753 /* Else recurse if the conversion preserves the address value. */
754 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
755 || POINTER_TYPE_P (TREE_TYPE (lhs)))
756 && (TYPE_PRECISION (TREE_TYPE (lhs))
757 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
758 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
759
760 return false;
761 }
762
763 /* If this isn't a conversion chain from this on we only can propagate
764 into compatible pointer contexts. */
765 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
766 return false;
767
768 /* Propagate through constant pointer adjustments. */
769 if (TREE_CODE (lhs) == SSA_NAME
770 && rhs_code == POINTER_PLUS_EXPR
771 && rhs == name
772 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
773 {
774 tree new_def_rhs;
775 /* As we come here with non-invariant addresses in def_rhs we need
776 to make sure we can build a valid constant offsetted address
777 for further propagation. Simply rely on fold building that
778 and check after the fact. */
779 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
780 def_rhs,
781 fold_convert (ptr_type_node,
782 gimple_assign_rhs2 (use_stmt)));
783 if (TREE_CODE (new_def_rhs) == MEM_REF
784 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
785 return false;
786 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
787 TREE_TYPE (rhs));
788
789 /* Recurse. If we could propagate into all uses of lhs do not
790 bother to replace into the current use but just pretend we did. */
791 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
792 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
793 return true;
794
795 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
796 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
797 new_def_rhs, NULL_TREE);
798 else if (is_gimple_min_invariant (new_def_rhs))
799 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
800 new_def_rhs, NULL_TREE);
801 else
802 return false;
803 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
804 update_stmt (use_stmt);
805 return true;
806 }
807
808 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
809 ADDR_EXPR will not appear on the LHS. */
810 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
811 while (handled_component_p (*lhsp))
812 lhsp = &TREE_OPERAND (*lhsp, 0);
813 lhs = *lhsp;
814
815 /* Now see if the LHS node is a MEM_REF using NAME. If so,
816 propagate the ADDR_EXPR into the use of NAME and fold the result. */
817 if (TREE_CODE (lhs) == MEM_REF
818 && TREE_OPERAND (lhs, 0) == name)
819 {
820 tree def_rhs_base;
821 HOST_WIDE_INT def_rhs_offset;
822 /* If the address is invariant we can always fold it. */
823 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
824 &def_rhs_offset)))
825 {
826 double_int off = mem_ref_offset (lhs);
827 tree new_ptr;
828 off += double_int::from_shwi (def_rhs_offset);
829 if (TREE_CODE (def_rhs_base) == MEM_REF)
830 {
831 off += mem_ref_offset (def_rhs_base);
832 new_ptr = TREE_OPERAND (def_rhs_base, 0);
833 }
834 else
835 new_ptr = build_fold_addr_expr (def_rhs_base);
836 TREE_OPERAND (lhs, 0) = new_ptr;
837 TREE_OPERAND (lhs, 1)
838 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
839 tidy_after_forward_propagate_addr (use_stmt);
840 /* Continue propagating into the RHS if this was not the only use. */
841 if (single_use_p)
842 return true;
843 }
844 /* If the LHS is a plain dereference and the value type is the same as
845 that of the pointed-to type of the address we can put the
846 dereferenced address on the LHS preserving the original alias-type. */
847 else if (integer_zerop (TREE_OPERAND (lhs, 1))
848 && ((gimple_assign_lhs (use_stmt) == lhs
849 && useless_type_conversion_p
850 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
851 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
852 || types_compatible_p (TREE_TYPE (lhs),
853 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
854 /* Don't forward anything into clobber stmts if it would result
855 in the lhs no longer being a MEM_REF. */
856 && (!gimple_clobber_p (use_stmt)
857 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
858 {
859 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
860 tree new_offset, new_base, saved, new_lhs;
861 while (handled_component_p (*def_rhs_basep))
862 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
863 saved = *def_rhs_basep;
864 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
865 {
866 new_base = TREE_OPERAND (*def_rhs_basep, 0);
867 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
868 TREE_OPERAND (*def_rhs_basep, 1));
869 }
870 else
871 {
872 new_base = build_fold_addr_expr (*def_rhs_basep);
873 new_offset = TREE_OPERAND (lhs, 1);
874 }
875 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
876 new_base, new_offset);
877 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
878 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
879 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
880 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
881 *lhsp = new_lhs;
882 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
883 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
884 *def_rhs_basep = saved;
885 tidy_after_forward_propagate_addr (use_stmt);
886 /* Continue propagating into the RHS if this was not the
887 only use. */
888 if (single_use_p)
889 return true;
890 }
891 else
892 /* We can have a struct assignment dereferencing our name twice.
893 Note that we didn't propagate into the lhs to not falsely
894 claim we did when propagating into the rhs. */
895 res = false;
896 }
897
898 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
899 nodes from the RHS. */
900 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
901 if (TREE_CODE (*rhsp) == ADDR_EXPR)
902 rhsp = &TREE_OPERAND (*rhsp, 0);
903 while (handled_component_p (*rhsp))
904 rhsp = &TREE_OPERAND (*rhsp, 0);
905 rhs = *rhsp;
906
907 /* Now see if the RHS node is a MEM_REF using NAME. If so,
908 propagate the ADDR_EXPR into the use of NAME and fold the result. */
909 if (TREE_CODE (rhs) == MEM_REF
910 && TREE_OPERAND (rhs, 0) == name)
911 {
912 tree def_rhs_base;
913 HOST_WIDE_INT def_rhs_offset;
914 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
915 &def_rhs_offset)))
916 {
917 double_int off = mem_ref_offset (rhs);
918 tree new_ptr;
919 off += double_int::from_shwi (def_rhs_offset);
920 if (TREE_CODE (def_rhs_base) == MEM_REF)
921 {
922 off += mem_ref_offset (def_rhs_base);
923 new_ptr = TREE_OPERAND (def_rhs_base, 0);
924 }
925 else
926 new_ptr = build_fold_addr_expr (def_rhs_base);
927 TREE_OPERAND (rhs, 0) = new_ptr;
928 TREE_OPERAND (rhs, 1)
929 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
930 fold_stmt_inplace (use_stmt_gsi);
931 tidy_after_forward_propagate_addr (use_stmt);
932 return res;
933 }
934 /* If the RHS is a plain dereference and the value type is the same as
935 that of the pointed-to type of the address we can put the
936 dereferenced address on the RHS preserving the original alias-type. */
937 else if (integer_zerop (TREE_OPERAND (rhs, 1))
938 && ((gimple_assign_rhs1 (use_stmt) == rhs
939 && useless_type_conversion_p
940 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
941 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
942 || types_compatible_p (TREE_TYPE (rhs),
943 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
944 {
945 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
946 tree new_offset, new_base, saved, new_rhs;
947 while (handled_component_p (*def_rhs_basep))
948 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
949 saved = *def_rhs_basep;
950 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
951 {
952 new_base = TREE_OPERAND (*def_rhs_basep, 0);
953 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
954 TREE_OPERAND (*def_rhs_basep, 1));
955 }
956 else
957 {
958 new_base = build_fold_addr_expr (*def_rhs_basep);
959 new_offset = TREE_OPERAND (rhs, 1);
960 }
961 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
962 new_base, new_offset);
963 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
964 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
965 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
966 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
967 *rhsp = new_rhs;
968 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
969 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
970 *def_rhs_basep = saved;
971 fold_stmt_inplace (use_stmt_gsi);
972 tidy_after_forward_propagate_addr (use_stmt);
973 return res;
974 }
975 }
976
977 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
978 is nothing to do. */
979 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
980 || gimple_assign_rhs1 (use_stmt) != name)
981 return false;
982
983 /* The remaining cases are all for turning pointer arithmetic into
984 array indexing. They only apply when we have the address of
985 element zero in an array. If that is not the case then there
986 is nothing to do. */
987 array_ref = TREE_OPERAND (def_rhs, 0);
988 if ((TREE_CODE (array_ref) != ARRAY_REF
989 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
990 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
991 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
992 return false;
993
994 rhs2 = gimple_assign_rhs2 (use_stmt);
995 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
996 if (TREE_CODE (rhs2) == INTEGER_CST)
997 {
998 tree new_rhs = build1_loc (gimple_location (use_stmt),
999 ADDR_EXPR, TREE_TYPE (def_rhs),
1000 fold_build2 (MEM_REF,
1001 TREE_TYPE (TREE_TYPE (def_rhs)),
1002 unshare_expr (def_rhs),
1003 fold_convert (ptr_type_node,
1004 rhs2)));
1005 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1006 use_stmt = gsi_stmt (*use_stmt_gsi);
1007 update_stmt (use_stmt);
1008 tidy_after_forward_propagate_addr (use_stmt);
1009 return true;
1010 }
1011
1012 return false;
1013 }
1014
1015 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1016
1017 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1018 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1019 node or for recovery of array indexing from pointer arithmetic.
1020
1021 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1022 the single use in the previous invocation. Pass true when calling
1023 this as toplevel.
1024
1025 Returns true, if all uses have been propagated into. */
1026
1027 static bool
1028 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1029 {
1030 imm_use_iterator iter;
1031 gimple use_stmt;
1032 bool all = true;
1033 bool single_use_p = parent_single_use_p && has_single_use (name);
1034
1035 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1036 {
1037 bool result;
1038 tree use_rhs;
1039
1040 /* If the use is not in a simple assignment statement, then
1041 there is nothing we can do. */
1042 if (!is_gimple_assign (use_stmt))
1043 {
1044 if (!is_gimple_debug (use_stmt))
1045 all = false;
1046 continue;
1047 }
1048
1049 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1050 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1051 single_use_p);
1052 /* If the use has moved to a different statement adjust
1053 the update machinery for the old statement too. */
1054 if (use_stmt != gsi_stmt (gsi))
1055 {
1056 update_stmt (use_stmt);
1057 use_stmt = gsi_stmt (gsi);
1058 }
1059 update_stmt (use_stmt);
1060 all &= result;
1061
1062 /* Remove intermediate now unused copy and conversion chains. */
1063 use_rhs = gimple_assign_rhs1 (use_stmt);
1064 if (result
1065 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1066 && TREE_CODE (use_rhs) == SSA_NAME
1067 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1068 {
1069 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1070 release_defs (use_stmt);
1071 gsi_remove (&gsi, true);
1072 }
1073 }
1074
1075 return all && has_zero_uses (name);
1076 }
1077
1078
1079 /* Forward propagate the comparison defined in *DEFGSI like
1080 cond_1 = x CMP y to uses of the form
1081 a_1 = (T')cond_1
1082 a_1 = !cond_1
1083 a_1 = cond_1 != 0
1084 Returns true if stmt is now unused. Advance DEFGSI to the next
1085 statement. */
1086
1087 static bool
1088 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1089 {
1090 gimple stmt = gsi_stmt (*defgsi);
1091 tree name = gimple_assign_lhs (stmt);
1092 gimple use_stmt;
1093 tree tmp = NULL_TREE;
1094 gimple_stmt_iterator gsi;
1095 enum tree_code code;
1096 tree lhs;
1097
1098 /* Don't propagate ssa names that occur in abnormal phis. */
1099 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1100 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1101 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1102 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1103 goto bailout;
1104
1105 /* Do not un-cse comparisons. But propagate through copies. */
1106 use_stmt = get_prop_dest_stmt (name, &name);
1107 if (!use_stmt
1108 || !is_gimple_assign (use_stmt))
1109 goto bailout;
1110
1111 code = gimple_assign_rhs_code (use_stmt);
1112 lhs = gimple_assign_lhs (use_stmt);
1113 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1114 goto bailout;
1115
1116 /* We can propagate the condition into a statement that
1117 computes the logical negation of the comparison result. */
1118 if ((code == BIT_NOT_EXPR
1119 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1120 || (code == BIT_XOR_EXPR
1121 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1122 {
1123 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1124 bool nans = HONOR_NANS (TYPE_MODE (type));
1125 enum tree_code inv_code;
1126 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1127 if (inv_code == ERROR_MARK)
1128 goto bailout;
1129
1130 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1131 gimple_assign_rhs2 (stmt));
1132 }
1133 else
1134 goto bailout;
1135
1136 gsi = gsi_for_stmt (use_stmt);
1137 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1138 use_stmt = gsi_stmt (gsi);
1139 update_stmt (use_stmt);
1140
1141 if (dump_file && (dump_flags & TDF_DETAILS))
1142 {
1143 fprintf (dump_file, " Replaced '");
1144 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1145 fprintf (dump_file, "' with '");
1146 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1147 fprintf (dump_file, "'\n");
1148 }
1149
1150 /* When we remove stmt now the iterator defgsi goes off it's current
1151 sequence, hence advance it now. */
1152 gsi_next (defgsi);
1153
1154 /* Remove defining statements. */
1155 return remove_prop_source_from_use (name);
1156
1157 bailout:
1158 gsi_next (defgsi);
1159 return false;
1160 }
1161
1162
1163 /* GSI_P points to a statement which performs a narrowing integral
1164 conversion.
1165
1166 Look for cases like:
1167
1168 t = x & c;
1169 y = (T) t;
1170
1171 Turn them into:
1172
1173 t = x & c;
1174 y = (T) x;
1175
1176 If T is narrower than X's type and C merely masks off bits outside
1177 of (T) and nothing else.
1178
1179 Normally we'd let DCE remove the dead statement. But no DCE runs
1180 after the last forwprop/combine pass, so we remove the obviously
1181 dead code ourselves.
1182
1183 Return TRUE if a change was made, FALSE otherwise. */
1184
1185 static bool
1186 simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1187 {
1188 gimple stmt = gsi_stmt (*gsi_p);
1189 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1190
1191 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1192 the only use of the BIT_AND_EXPR result is the conversion. */
1193 if (is_gimple_assign (rhs_def_stmt)
1194 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1195 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1196 {
1197 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1198 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1199 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1200
1201 /* Now verify suitability of the BIT_AND_EXPR's operands.
1202 The first must be an SSA_NAME that we can propagate and the
1203 second must be an integer constant that masks out all the
1204 bits outside the final result's type, but nothing else. */
1205 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1206 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1207 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1208 && operand_equal_p (rhs_def_operand2,
1209 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1210 TYPE_PRECISION (lhs_type)),
1211 0))
1212 {
1213 /* This is an optimizable case. Replace the source operand
1214 in the conversion with the first source operand of the
1215 BIT_AND_EXPR. */
1216 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1217 stmt = gsi_stmt (*gsi_p);
1218 update_stmt (stmt);
1219
1220 /* There is no DCE after the last forwprop pass. It's
1221 easy to clean up the first order effects here. */
1222 gimple_stmt_iterator si;
1223 si = gsi_for_stmt (rhs_def_stmt);
1224 gsi_remove (&si, true);
1225 release_defs (rhs_def_stmt);
1226 return true;
1227 }
1228 }
1229
1230 return false;
1231 }
1232
1233
1234 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1235 If so, we can change STMT into lhs = y which can later be copy
1236 propagated. Similarly for negation.
1237
1238 This could trivially be formulated as a forward propagation
1239 to immediate uses. However, we already had an implementation
1240 from DOM which used backward propagation via the use-def links.
1241
1242 It turns out that backward propagation is actually faster as
1243 there's less work to do for each NOT/NEG expression we find.
1244 Backwards propagation needs to look at the statement in a single
1245 backlink. Forward propagation needs to look at potentially more
1246 than one forward link.
1247
1248 Returns true when the statement was changed. */
1249
1250 static bool
1251 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1252 {
1253 gimple stmt = gsi_stmt (*gsi_p);
1254 tree rhs = gimple_assign_rhs1 (stmt);
1255 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1256
1257 /* See if the RHS_DEF_STMT has the same form as our statement. */
1258 if (is_gimple_assign (rhs_def_stmt)
1259 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1260 {
1261 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1262
1263 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1264 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1265 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1266 {
1267 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1268 stmt = gsi_stmt (*gsi_p);
1269 update_stmt (stmt);
1270 return true;
1271 }
1272 }
1273
1274 return false;
1275 }
1276
1277 /* Helper function for simplify_gimple_switch. Remove case labels that
1278 have values outside the range of the new type. */
1279
1280 static void
1281 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1282 {
1283 unsigned int branch_num = gimple_switch_num_labels (stmt);
1284 auto_vec<tree> labels (branch_num);
1285 unsigned int i, len;
1286
1287 /* Collect the existing case labels in a VEC, and preprocess it as if
1288 we are gimplifying a GENERIC SWITCH_EXPR. */
1289 for (i = 1; i < branch_num; i++)
1290 labels.quick_push (gimple_switch_label (stmt, i));
1291 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1292
1293 /* If any labels were removed, replace the existing case labels
1294 in the GIMPLE_SWITCH statement with the correct ones.
1295 Note that the type updates were done in-place on the case labels,
1296 so we only have to replace the case labels in the GIMPLE_SWITCH
1297 if the number of labels changed. */
1298 len = labels.length ();
1299 if (len < branch_num - 1)
1300 {
1301 bitmap target_blocks;
1302 edge_iterator ei;
1303 edge e;
1304
1305 /* Corner case: *all* case labels have been removed as being
1306 out-of-range for INDEX_TYPE. Push one label and let the
1307 CFG cleanups deal with this further. */
1308 if (len == 0)
1309 {
1310 tree label, elt;
1311
1312 label = CASE_LABEL (gimple_switch_default_label (stmt));
1313 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1314 labels.quick_push (elt);
1315 len = 1;
1316 }
1317
1318 for (i = 0; i < labels.length (); i++)
1319 gimple_switch_set_label (stmt, i + 1, labels[i]);
1320 for (i++ ; i < branch_num; i++)
1321 gimple_switch_set_label (stmt, i, NULL_TREE);
1322 gimple_switch_set_num_labels (stmt, len + 1);
1323
1324 /* Cleanup any edges that are now dead. */
1325 target_blocks = BITMAP_ALLOC (NULL);
1326 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1327 {
1328 tree elt = gimple_switch_label (stmt, i);
1329 basic_block target = label_to_block (CASE_LABEL (elt));
1330 bitmap_set_bit (target_blocks, target->index);
1331 }
1332 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1333 {
1334 if (! bitmap_bit_p (target_blocks, e->dest->index))
1335 {
1336 remove_edge (e);
1337 cfg_changed = true;
1338 free_dominance_info (CDI_DOMINATORS);
1339 }
1340 else
1341 ei_next (&ei);
1342 }
1343 BITMAP_FREE (target_blocks);
1344 }
1345 }
1346
1347 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1348 the condition which we may be able to optimize better. */
1349
1350 static bool
1351 simplify_gimple_switch (gimple stmt)
1352 {
1353 tree cond = gimple_switch_index (stmt);
1354 tree def, to, ti;
1355 gimple def_stmt;
1356
1357 /* The optimization that we really care about is removing unnecessary
1358 casts. That will let us do much better in propagating the inferred
1359 constant at the switch target. */
1360 if (TREE_CODE (cond) == SSA_NAME)
1361 {
1362 def_stmt = SSA_NAME_DEF_STMT (cond);
1363 if (is_gimple_assign (def_stmt))
1364 {
1365 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1366 {
1367 int need_precision;
1368 bool fail;
1369
1370 def = gimple_assign_rhs1 (def_stmt);
1371
1372 to = TREE_TYPE (cond);
1373 ti = TREE_TYPE (def);
1374
1375 /* If we have an extension that preserves value, then we
1376 can copy the source value into the switch. */
1377
1378 need_precision = TYPE_PRECISION (ti);
1379 fail = false;
1380 if (! INTEGRAL_TYPE_P (ti))
1381 fail = true;
1382 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1383 fail = true;
1384 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1385 need_precision += 1;
1386 if (TYPE_PRECISION (to) < need_precision)
1387 fail = true;
1388
1389 if (!fail)
1390 {
1391 gimple_switch_set_index (stmt, def);
1392 simplify_gimple_switch_label_vec (stmt, ti);
1393 update_stmt (stmt);
1394 return true;
1395 }
1396 }
1397 }
1398 }
1399
1400 return false;
1401 }
1402
1403 /* For pointers p2 and p1 return p2 - p1 if the
1404 difference is known and constant, otherwise return NULL. */
1405
1406 static tree
1407 constant_pointer_difference (tree p1, tree p2)
1408 {
1409 int i, j;
1410 #define CPD_ITERATIONS 5
1411 tree exps[2][CPD_ITERATIONS];
1412 tree offs[2][CPD_ITERATIONS];
1413 int cnt[2];
1414
1415 for (i = 0; i < 2; i++)
1416 {
1417 tree p = i ? p1 : p2;
1418 tree off = size_zero_node;
1419 gimple stmt;
1420 enum tree_code code;
1421
1422 /* For each of p1 and p2 we need to iterate at least
1423 twice, to handle ADDR_EXPR directly in p1/p2,
1424 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1425 on definition's stmt RHS. Iterate a few extra times. */
1426 j = 0;
1427 do
1428 {
1429 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1430 break;
1431 if (TREE_CODE (p) == ADDR_EXPR)
1432 {
1433 tree q = TREE_OPERAND (p, 0);
1434 HOST_WIDE_INT offset;
1435 tree base = get_addr_base_and_unit_offset (q, &offset);
1436 if (base)
1437 {
1438 q = base;
1439 if (offset)
1440 off = size_binop (PLUS_EXPR, off, size_int (offset));
1441 }
1442 if (TREE_CODE (q) == MEM_REF
1443 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1444 {
1445 p = TREE_OPERAND (q, 0);
1446 off = size_binop (PLUS_EXPR, off,
1447 double_int_to_tree (sizetype,
1448 mem_ref_offset (q)));
1449 }
1450 else
1451 {
1452 exps[i][j] = q;
1453 offs[i][j++] = off;
1454 break;
1455 }
1456 }
1457 if (TREE_CODE (p) != SSA_NAME)
1458 break;
1459 exps[i][j] = p;
1460 offs[i][j++] = off;
1461 if (j == CPD_ITERATIONS)
1462 break;
1463 stmt = SSA_NAME_DEF_STMT (p);
1464 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1465 break;
1466 code = gimple_assign_rhs_code (stmt);
1467 if (code == POINTER_PLUS_EXPR)
1468 {
1469 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1470 break;
1471 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1472 p = gimple_assign_rhs1 (stmt);
1473 }
1474 else if (code == ADDR_EXPR || code == NOP_EXPR)
1475 p = gimple_assign_rhs1 (stmt);
1476 else
1477 break;
1478 }
1479 while (1);
1480 cnt[i] = j;
1481 }
1482
1483 for (i = 0; i < cnt[0]; i++)
1484 for (j = 0; j < cnt[1]; j++)
1485 if (exps[0][i] == exps[1][j])
1486 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1487
1488 return NULL_TREE;
1489 }
1490
1491 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1492 Optimize
1493 memcpy (p, "abcd", 4);
1494 memset (p + 4, ' ', 3);
1495 into
1496 memcpy (p, "abcd ", 7);
1497 call if the latter can be stored by pieces during expansion. */
1498
1499 static bool
1500 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1501 {
1502 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1503 tree vuse = gimple_vuse (stmt2);
1504 if (vuse == NULL)
1505 return false;
1506 stmt1 = SSA_NAME_DEF_STMT (vuse);
1507
1508 switch (DECL_FUNCTION_CODE (callee2))
1509 {
1510 case BUILT_IN_MEMSET:
1511 if (gimple_call_num_args (stmt2) != 3
1512 || gimple_call_lhs (stmt2)
1513 || CHAR_BIT != 8
1514 || BITS_PER_UNIT != 8)
1515 break;
1516 else
1517 {
1518 tree callee1;
1519 tree ptr1, src1, str1, off1, len1, lhs1;
1520 tree ptr2 = gimple_call_arg (stmt2, 0);
1521 tree val2 = gimple_call_arg (stmt2, 1);
1522 tree len2 = gimple_call_arg (stmt2, 2);
1523 tree diff, vdef, new_str_cst;
1524 gimple use_stmt;
1525 unsigned int ptr1_align;
1526 unsigned HOST_WIDE_INT src_len;
1527 char *src_buf;
1528 use_operand_p use_p;
1529
1530 if (!tree_fits_shwi_p (val2)
1531 || !tree_fits_uhwi_p (len2))
1532 break;
1533 if (is_gimple_call (stmt1))
1534 {
1535 /* If first stmt is a call, it needs to be memcpy
1536 or mempcpy, with string literal as second argument and
1537 constant length. */
1538 callee1 = gimple_call_fndecl (stmt1);
1539 if (callee1 == NULL_TREE
1540 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1541 || gimple_call_num_args (stmt1) != 3)
1542 break;
1543 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1544 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1545 break;
1546 ptr1 = gimple_call_arg (stmt1, 0);
1547 src1 = gimple_call_arg (stmt1, 1);
1548 len1 = gimple_call_arg (stmt1, 2);
1549 lhs1 = gimple_call_lhs (stmt1);
1550 if (!tree_fits_uhwi_p (len1))
1551 break;
1552 str1 = string_constant (src1, &off1);
1553 if (str1 == NULL_TREE)
1554 break;
1555 if (!tree_fits_uhwi_p (off1)
1556 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1557 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1558 - tree_to_uhwi (off1)) > 0
1559 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1560 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1561 != TYPE_MODE (char_type_node))
1562 break;
1563 }
1564 else if (gimple_assign_single_p (stmt1))
1565 {
1566 /* Otherwise look for length 1 memcpy optimized into
1567 assignment. */
1568 ptr1 = gimple_assign_lhs (stmt1);
1569 src1 = gimple_assign_rhs1 (stmt1);
1570 if (TREE_CODE (ptr1) != MEM_REF
1571 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1572 || !tree_fits_shwi_p (src1))
1573 break;
1574 ptr1 = build_fold_addr_expr (ptr1);
1575 callee1 = NULL_TREE;
1576 len1 = size_one_node;
1577 lhs1 = NULL_TREE;
1578 off1 = size_zero_node;
1579 str1 = NULL_TREE;
1580 }
1581 else
1582 break;
1583
1584 diff = constant_pointer_difference (ptr1, ptr2);
1585 if (diff == NULL && lhs1 != NULL)
1586 {
1587 diff = constant_pointer_difference (lhs1, ptr2);
1588 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1589 && diff != NULL)
1590 diff = size_binop (PLUS_EXPR, diff,
1591 fold_convert (sizetype, len1));
1592 }
1593 /* If the difference between the second and first destination pointer
1594 is not constant, or is bigger than memcpy length, bail out. */
1595 if (diff == NULL
1596 || !tree_fits_uhwi_p (diff)
1597 || tree_int_cst_lt (len1, diff))
1598 break;
1599
1600 /* Use maximum of difference plus memset length and memcpy length
1601 as the new memcpy length, if it is too big, bail out. */
1602 src_len = tree_to_uhwi (diff);
1603 src_len += tree_to_uhwi (len2);
1604 if (src_len < tree_to_uhwi (len1))
1605 src_len = tree_to_uhwi (len1);
1606 if (src_len > 1024)
1607 break;
1608
1609 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1610 with bigger length will return different result. */
1611 if (lhs1 != NULL_TREE
1612 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1613 && (TREE_CODE (lhs1) != SSA_NAME
1614 || !single_imm_use (lhs1, &use_p, &use_stmt)
1615 || use_stmt != stmt2))
1616 break;
1617
1618 /* If anything reads memory in between memcpy and memset
1619 call, the modified memcpy call might change it. */
1620 vdef = gimple_vdef (stmt1);
1621 if (vdef != NULL
1622 && (!single_imm_use (vdef, &use_p, &use_stmt)
1623 || use_stmt != stmt2))
1624 break;
1625
1626 ptr1_align = get_pointer_alignment (ptr1);
1627 /* Construct the new source string literal. */
1628 src_buf = XALLOCAVEC (char, src_len + 1);
1629 if (callee1)
1630 memcpy (src_buf,
1631 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1632 tree_to_uhwi (len1));
1633 else
1634 src_buf[0] = tree_to_shwi (src1);
1635 memset (src_buf + tree_to_uhwi (diff),
1636 tree_to_shwi (val2), tree_to_uhwi (len2));
1637 src_buf[src_len] = '\0';
1638 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1639 handle embedded '\0's. */
1640 if (strlen (src_buf) != src_len)
1641 break;
1642 rtl_profile_for_bb (gimple_bb (stmt2));
1643 /* If the new memcpy wouldn't be emitted by storing the literal
1644 by pieces, this optimization might enlarge .rodata too much,
1645 as commonly used string literals couldn't be shared any
1646 longer. */
1647 if (!can_store_by_pieces (src_len,
1648 builtin_strncpy_read_str,
1649 src_buf, ptr1_align, false))
1650 break;
1651
1652 new_str_cst = build_string_literal (src_len, src_buf);
1653 if (callee1)
1654 {
1655 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1656 memset call. */
1657 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1658 gimple_call_set_lhs (stmt1, NULL_TREE);
1659 gimple_call_set_arg (stmt1, 1, new_str_cst);
1660 gimple_call_set_arg (stmt1, 2,
1661 build_int_cst (TREE_TYPE (len1), src_len));
1662 update_stmt (stmt1);
1663 unlink_stmt_vdef (stmt2);
1664 gsi_remove (gsi_p, true);
1665 release_defs (stmt2);
1666 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1667 release_ssa_name (lhs1);
1668 return true;
1669 }
1670 else
1671 {
1672 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1673 assignment, remove STMT1 and change memset call into
1674 memcpy call. */
1675 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1676
1677 if (!is_gimple_val (ptr1))
1678 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1679 true, GSI_SAME_STMT);
1680 gimple_call_set_fndecl (stmt2,
1681 builtin_decl_explicit (BUILT_IN_MEMCPY));
1682 gimple_call_set_arg (stmt2, 0, ptr1);
1683 gimple_call_set_arg (stmt2, 1, new_str_cst);
1684 gimple_call_set_arg (stmt2, 2,
1685 build_int_cst (TREE_TYPE (len2), src_len));
1686 unlink_stmt_vdef (stmt1);
1687 gsi_remove (&gsi, true);
1688 release_defs (stmt1);
1689 update_stmt (stmt2);
1690 return false;
1691 }
1692 }
1693 break;
1694 default:
1695 break;
1696 }
1697 return false;
1698 }
1699
1700 /* Checks if expression has type of one-bit precision, or is a known
1701 truth-valued expression. */
1702 static bool
1703 truth_valued_ssa_name (tree name)
1704 {
1705 gimple def;
1706 tree type = TREE_TYPE (name);
1707
1708 if (!INTEGRAL_TYPE_P (type))
1709 return false;
1710 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1711 necessarily one and so ~X is not equal to !X. */
1712 if (TYPE_PRECISION (type) == 1)
1713 return true;
1714 def = SSA_NAME_DEF_STMT (name);
1715 if (is_gimple_assign (def))
1716 return truth_value_p (gimple_assign_rhs_code (def));
1717 return false;
1718 }
1719
1720 /* Helper routine for simplify_bitwise_binary_1 function.
1721 Return for the SSA name NAME the expression X if it mets condition
1722 NAME = !X. Otherwise return NULL_TREE.
1723 Detected patterns for NAME = !X are:
1724 !X and X == 0 for X with integral type.
1725 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1726 static tree
1727 lookup_logical_inverted_value (tree name)
1728 {
1729 tree op1, op2;
1730 enum tree_code code;
1731 gimple def;
1732
1733 /* If name has none-intergal type, or isn't a SSA_NAME, then
1734 return. */
1735 if (TREE_CODE (name) != SSA_NAME
1736 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1737 return NULL_TREE;
1738 def = SSA_NAME_DEF_STMT (name);
1739 if (!is_gimple_assign (def))
1740 return NULL_TREE;
1741
1742 code = gimple_assign_rhs_code (def);
1743 op1 = gimple_assign_rhs1 (def);
1744 op2 = NULL_TREE;
1745
1746 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1747 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1748 if (code == EQ_EXPR || code == NE_EXPR
1749 || code == BIT_XOR_EXPR)
1750 op2 = gimple_assign_rhs2 (def);
1751
1752 switch (code)
1753 {
1754 case BIT_NOT_EXPR:
1755 if (truth_valued_ssa_name (name))
1756 return op1;
1757 break;
1758 case EQ_EXPR:
1759 /* Check if we have X == 0 and X has an integral type. */
1760 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1761 break;
1762 if (integer_zerop (op2))
1763 return op1;
1764 break;
1765 case NE_EXPR:
1766 /* Check if we have X != 1 and X is a truth-valued. */
1767 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1768 break;
1769 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1770 return op1;
1771 break;
1772 case BIT_XOR_EXPR:
1773 /* Check if we have X ^ 1 and X is truth valued. */
1774 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1775 return op1;
1776 break;
1777 default:
1778 break;
1779 }
1780
1781 return NULL_TREE;
1782 }
1783
1784 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1785 operations CODE, if one operand has the logically inverted
1786 value of the other. */
1787 static tree
1788 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1789 tree arg1, tree arg2)
1790 {
1791 tree anot;
1792
1793 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1794 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1795 && code != BIT_XOR_EXPR)
1796 return NULL_TREE;
1797
1798 /* First check if operands ARG1 and ARG2 are equal. If so
1799 return NULL_TREE as this optimization is handled fold_stmt. */
1800 if (arg1 == arg2)
1801 return NULL_TREE;
1802 /* See if we have in arguments logical-not patterns. */
1803 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1804 || anot != arg2)
1805 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1806 || anot != arg1))
1807 return NULL_TREE;
1808
1809 /* X & !X -> 0. */
1810 if (code == BIT_AND_EXPR)
1811 return fold_convert (type, integer_zero_node);
1812 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1813 if (truth_valued_ssa_name (anot))
1814 return fold_convert (type, integer_one_node);
1815
1816 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1817 return NULL_TREE;
1818 }
1819
1820 /* Given a ssa_name in NAME see if it was defined by an assignment and
1821 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1822 to the second operand on the rhs. */
1823
1824 static inline void
1825 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1826 {
1827 gimple def;
1828 enum tree_code code1;
1829 tree arg11;
1830 tree arg21;
1831 tree arg31;
1832 enum gimple_rhs_class grhs_class;
1833
1834 code1 = TREE_CODE (name);
1835 arg11 = name;
1836 arg21 = NULL_TREE;
1837 grhs_class = get_gimple_rhs_class (code1);
1838
1839 if (code1 == SSA_NAME)
1840 {
1841 def = SSA_NAME_DEF_STMT (name);
1842
1843 if (def && is_gimple_assign (def)
1844 && can_propagate_from (def))
1845 {
1846 code1 = gimple_assign_rhs_code (def);
1847 arg11 = gimple_assign_rhs1 (def);
1848 arg21 = gimple_assign_rhs2 (def);
1849 arg31 = gimple_assign_rhs2 (def);
1850 }
1851 }
1852 else if (grhs_class == GIMPLE_TERNARY_RHS
1853 || GIMPLE_BINARY_RHS
1854 || GIMPLE_UNARY_RHS
1855 || GIMPLE_SINGLE_RHS)
1856 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1857
1858 *code = code1;
1859 *arg1 = arg11;
1860 if (arg2)
1861 *arg2 = arg21;
1862 /* Ignore arg3 currently. */
1863 }
1864
1865 /* Return true if a conversion of an operand from type FROM to type TO
1866 should be applied after performing the operation instead. */
1867
1868 static bool
1869 hoist_conversion_for_bitop_p (tree to, tree from)
1870 {
1871 /* That's a good idea if the conversion widens the operand, thus
1872 after hoisting the conversion the operation will be narrower. */
1873 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1874 return true;
1875
1876 /* It's also a good idea if the conversion is to a non-integer mode. */
1877 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1878 return true;
1879
1880 /* Or if the precision of TO is not the same as the precision
1881 of its mode. */
1882 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1883 return true;
1884
1885 return false;
1886 }
1887
1888 /* GSI points to a statement of the form
1889
1890 result = OP0 CODE OP1
1891
1892 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1893 BIT_AND_EXPR or BIT_IOR_EXPR.
1894
1895 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1896 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1897 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1898
1899 If a simplification is made, return TRUE, else return FALSE. */
1900 static bool
1901 simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1902 enum tree_code code,
1903 tree op0, tree op1)
1904 {
1905 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1906
1907 if (!is_gimple_assign (op0_def_stmt)
1908 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1909 return false;
1910
1911 tree x = gimple_assign_rhs1 (op0_def_stmt);
1912 if (TREE_CODE (x) == SSA_NAME
1913 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1914 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1915 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1916 {
1917 enum tree_code newcode;
1918
1919 gimple stmt = gsi_stmt (*gsi);
1920 gimple_assign_set_rhs1 (stmt, x);
1921 gimple_assign_set_rhs2 (stmt, op1);
1922 if (code == BIT_AND_EXPR)
1923 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1924 else
1925 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1926 gimple_assign_set_rhs_code (stmt, newcode);
1927 update_stmt (stmt);
1928 return true;
1929 }
1930 return false;
1931
1932 }
1933
1934 /* Simplify bitwise binary operations.
1935 Return true if a transformation applied, otherwise return false. */
1936
1937 static bool
1938 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1939 {
1940 gimple stmt = gsi_stmt (*gsi);
1941 tree arg1 = gimple_assign_rhs1 (stmt);
1942 tree arg2 = gimple_assign_rhs2 (stmt);
1943 enum tree_code code = gimple_assign_rhs_code (stmt);
1944 tree res;
1945 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1946 enum tree_code def1_code, def2_code;
1947
1948 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1949 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1950
1951 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1952 when profitable. */
1953 if (TREE_CODE (arg2) == INTEGER_CST
1954 && CONVERT_EXPR_CODE_P (def1_code)
1955 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
1956 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1957 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1958 {
1959 gimple newop;
1960 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1961 newop =
1962 gimple_build_assign_with_ops (code, tem, def1_arg1,
1963 fold_convert_loc (gimple_location (stmt),
1964 TREE_TYPE (def1_arg1),
1965 arg2));
1966 gimple_set_location (newop, gimple_location (stmt));
1967 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1968 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1969 tem, NULL_TREE, NULL_TREE);
1970 update_stmt (gsi_stmt (*gsi));
1971 return true;
1972 }
1973
1974 /* For bitwise binary operations apply operand conversions to the
1975 binary operation result instead of to the operands. This allows
1976 to combine successive conversions and bitwise binary operations. */
1977 if (CONVERT_EXPR_CODE_P (def1_code)
1978 && CONVERT_EXPR_CODE_P (def2_code)
1979 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1980 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1981 {
1982 gimple newop;
1983 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1984 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1985 gimple_set_location (newop, gimple_location (stmt));
1986 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1987 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1988 tem, NULL_TREE, NULL_TREE);
1989 update_stmt (gsi_stmt (*gsi));
1990 return true;
1991 }
1992
1993
1994 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1995 if (def1_code == def2_code
1996 && def1_code == BIT_AND_EXPR
1997 && operand_equal_for_phi_arg_p (def1_arg2,
1998 def2_arg2))
1999 {
2000 tree b = def1_arg2;
2001 tree a = def1_arg1;
2002 tree c = def2_arg1;
2003 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2004 /* If A OP0 C (this usually means C is the same as A) is 0
2005 then fold it down correctly. */
2006 if (integer_zerop (inner))
2007 {
2008 gimple_assign_set_rhs_from_tree (gsi, inner);
2009 update_stmt (stmt);
2010 return true;
2011 }
2012 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2013 then fold it down correctly. */
2014 else if (TREE_CODE (inner) == SSA_NAME)
2015 {
2016 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2017 inner, b);
2018 gimple_assign_set_rhs_from_tree (gsi, outer);
2019 update_stmt (stmt);
2020 return true;
2021 }
2022 else
2023 {
2024 gimple newop;
2025 tree tem;
2026 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2027 newop = gimple_build_assign_with_ops (code, tem, a, c);
2028 gimple_set_location (newop, gimple_location (stmt));
2029 /* Make sure to re-process the new stmt as it's walking upwards. */
2030 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2031 gimple_assign_set_rhs1 (stmt, tem);
2032 gimple_assign_set_rhs2 (stmt, b);
2033 gimple_assign_set_rhs_code (stmt, def1_code);
2034 update_stmt (stmt);
2035 return true;
2036 }
2037 }
2038
2039 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2040 if (code == BIT_AND_EXPR
2041 && def1_code == BIT_IOR_EXPR
2042 && CONSTANT_CLASS_P (arg2)
2043 && CONSTANT_CLASS_P (def1_arg2))
2044 {
2045 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2046 arg2, def1_arg2);
2047 tree tem;
2048 gimple newop;
2049 if (integer_zerop (cst))
2050 {
2051 gimple_assign_set_rhs1 (stmt, def1_arg1);
2052 update_stmt (stmt);
2053 return true;
2054 }
2055 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2056 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2057 tem, def1_arg1, arg2);
2058 gimple_set_location (newop, gimple_location (stmt));
2059 /* Make sure to re-process the new stmt as it's walking upwards. */
2060 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2061 gimple_assign_set_rhs1 (stmt, tem);
2062 gimple_assign_set_rhs2 (stmt, cst);
2063 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2064 update_stmt (stmt);
2065 return true;
2066 }
2067
2068 /* Combine successive equal operations with constants. */
2069 if ((code == BIT_AND_EXPR
2070 || code == BIT_IOR_EXPR
2071 || code == BIT_XOR_EXPR)
2072 && def1_code == code
2073 && CONSTANT_CLASS_P (arg2)
2074 && CONSTANT_CLASS_P (def1_arg2))
2075 {
2076 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2077 arg2, def1_arg2);
2078 gimple_assign_set_rhs1 (stmt, def1_arg1);
2079 gimple_assign_set_rhs2 (stmt, cst);
2080 update_stmt (stmt);
2081 return true;
2082 }
2083
2084 /* Canonicalize X ^ ~0 to ~X. */
2085 if (code == BIT_XOR_EXPR
2086 && integer_all_onesp (arg2))
2087 {
2088 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2089 gcc_assert (gsi_stmt (*gsi) == stmt);
2090 update_stmt (stmt);
2091 return true;
2092 }
2093
2094 /* Try simple folding for X op !X, and X op X. */
2095 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2096 if (res != NULL_TREE)
2097 {
2098 gimple_assign_set_rhs_from_tree (gsi, res);
2099 update_stmt (gsi_stmt (*gsi));
2100 return true;
2101 }
2102
2103 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2104 {
2105 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2106 if (def1_code == ocode)
2107 {
2108 tree x = arg2;
2109 enum tree_code coden;
2110 tree a1, a2;
2111 /* ( X | Y) & X -> X */
2112 /* ( X & Y) | X -> X */
2113 if (x == def1_arg1
2114 || x == def1_arg2)
2115 {
2116 gimple_assign_set_rhs_from_tree (gsi, x);
2117 update_stmt (gsi_stmt (*gsi));
2118 return true;
2119 }
2120
2121 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2122 /* (~X | Y) & X -> X & Y */
2123 /* (~X & Y) | X -> X | Y */
2124 if (coden == BIT_NOT_EXPR && a1 == x)
2125 {
2126 gimple_assign_set_rhs_with_ops (gsi, code,
2127 x, def1_arg2);
2128 gcc_assert (gsi_stmt (*gsi) == stmt);
2129 update_stmt (stmt);
2130 return true;
2131 }
2132 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2133 /* (Y | ~X) & X -> X & Y */
2134 /* (Y & ~X) | X -> X | Y */
2135 if (coden == BIT_NOT_EXPR && a1 == x)
2136 {
2137 gimple_assign_set_rhs_with_ops (gsi, code,
2138 x, def1_arg1);
2139 gcc_assert (gsi_stmt (*gsi) == stmt);
2140 update_stmt (stmt);
2141 return true;
2142 }
2143 }
2144 if (def2_code == ocode)
2145 {
2146 enum tree_code coden;
2147 tree a1;
2148 tree x = arg1;
2149 /* X & ( X | Y) -> X */
2150 /* X | ( X & Y) -> X */
2151 if (x == def2_arg1
2152 || x == def2_arg2)
2153 {
2154 gimple_assign_set_rhs_from_tree (gsi, x);
2155 update_stmt (gsi_stmt (*gsi));
2156 return true;
2157 }
2158 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2159 /* (~X | Y) & X -> X & Y */
2160 /* (~X & Y) | X -> X | Y */
2161 if (coden == BIT_NOT_EXPR && a1 == x)
2162 {
2163 gimple_assign_set_rhs_with_ops (gsi, code,
2164 x, def2_arg2);
2165 gcc_assert (gsi_stmt (*gsi) == stmt);
2166 update_stmt (stmt);
2167 return true;
2168 }
2169 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2170 /* (Y | ~X) & X -> X & Y */
2171 /* (Y & ~X) | X -> X | Y */
2172 if (coden == BIT_NOT_EXPR && a1 == x)
2173 {
2174 gimple_assign_set_rhs_with_ops (gsi, code,
2175 x, def2_arg1);
2176 gcc_assert (gsi_stmt (*gsi) == stmt);
2177 update_stmt (stmt);
2178 return true;
2179 }
2180 }
2181
2182 /* If arg1 and arg2 are booleans (or any single bit type)
2183 then try to simplify:
2184
2185 (~X & Y) -> X < Y
2186 (X & ~Y) -> Y < X
2187 (~X | Y) -> X <= Y
2188 (X | ~Y) -> Y <= X
2189
2190 But only do this if our result feeds into a comparison as
2191 this transformation is not always a win, particularly on
2192 targets with and-not instructions. */
2193 if (TREE_CODE (arg1) == SSA_NAME
2194 && TREE_CODE (arg2) == SSA_NAME
2195 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2196 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2197 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2198 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2199 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2200 {
2201 use_operand_p use_p;
2202 gimple use_stmt;
2203
2204 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2205 {
2206 if (gimple_code (use_stmt) == GIMPLE_COND
2207 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2208 && integer_zerop (gimple_cond_rhs (use_stmt))
2209 && gimple_cond_code (use_stmt) == NE_EXPR)
2210 {
2211 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2212 return true;
2213 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2214 return true;
2215 }
2216 }
2217 }
2218 }
2219 return false;
2220 }
2221
2222
2223 /* Recognize rotation patterns. Return true if a transformation
2224 applied, otherwise return false.
2225
2226 We are looking for X with unsigned type T with bitsize B, OP being
2227 +, | or ^, some type T2 wider than T and
2228 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2229 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2230 (X << Y) OP (X >> (B - Y))
2231 (X << (int) Y) OP (X >> (int) (B - Y))
2232 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2233 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2234 (X << Y) | (X >> ((-Y) & (B - 1)))
2235 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2236 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2237 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2238
2239 and transform these into:
2240 X r<< CNT1
2241 X r<< Y
2242
2243 Note, in the patterns with T2 type, the type of OP operands
2244 might be even a signed type, but should have precision B. */
2245
2246 static bool
2247 simplify_rotate (gimple_stmt_iterator *gsi)
2248 {
2249 gimple stmt = gsi_stmt (*gsi);
2250 tree arg[2], rtype, rotcnt = NULL_TREE;
2251 tree def_arg1[2], def_arg2[2];
2252 enum tree_code def_code[2];
2253 tree lhs;
2254 int i;
2255 bool swapped_p = false;
2256 gimple g;
2257
2258 arg[0] = gimple_assign_rhs1 (stmt);
2259 arg[1] = gimple_assign_rhs2 (stmt);
2260 rtype = TREE_TYPE (arg[0]);
2261
2262 /* Only create rotates in complete modes. Other cases are not
2263 expanded properly. */
2264 if (!INTEGRAL_TYPE_P (rtype)
2265 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2266 return false;
2267
2268 for (i = 0; i < 2; i++)
2269 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2270
2271 /* Look through narrowing conversions. */
2272 if (CONVERT_EXPR_CODE_P (def_code[0])
2273 && CONVERT_EXPR_CODE_P (def_code[1])
2274 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2275 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2276 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2277 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2278 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2279 && has_single_use (arg[0])
2280 && has_single_use (arg[1]))
2281 {
2282 for (i = 0; i < 2; i++)
2283 {
2284 arg[i] = def_arg1[i];
2285 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2286 }
2287 }
2288
2289 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2290 for (i = 0; i < 2; i++)
2291 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2292 return false;
2293 else if (!has_single_use (arg[i]))
2294 return false;
2295 if (def_code[0] == def_code[1])
2296 return false;
2297
2298 /* If we've looked through narrowing conversions before, look through
2299 widening conversions from unsigned type with the same precision
2300 as rtype here. */
2301 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2302 for (i = 0; i < 2; i++)
2303 {
2304 tree tem;
2305 enum tree_code code;
2306 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2307 if (!CONVERT_EXPR_CODE_P (code)
2308 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2309 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2310 return false;
2311 def_arg1[i] = tem;
2312 }
2313 /* Both shifts have to use the same first operand. */
2314 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2315 return false;
2316 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2317 return false;
2318
2319 /* CNT1 + CNT2 == B case above. */
2320 if (tree_fits_uhwi_p (def_arg2[0])
2321 && tree_fits_uhwi_p (def_arg2[1])
2322 && tree_to_uhwi (def_arg2[0])
2323 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
2324 rotcnt = def_arg2[0];
2325 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2326 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2327 return false;
2328 else
2329 {
2330 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2331 enum tree_code cdef_code[2];
2332 /* Look through conversion of the shift count argument.
2333 The C/C++ FE cast any shift count argument to integer_type_node.
2334 The only problem might be if the shift count type maximum value
2335 is equal or smaller than number of bits in rtype. */
2336 for (i = 0; i < 2; i++)
2337 {
2338 def_arg2_alt[i] = def_arg2[i];
2339 defcodefor_name (def_arg2[i], &cdef_code[i],
2340 &cdef_arg1[i], &cdef_arg2[i]);
2341 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2342 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2343 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2344 > floor_log2 (TYPE_PRECISION (rtype))
2345 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2346 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2347 {
2348 def_arg2_alt[i] = cdef_arg1[i];
2349 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2350 &cdef_arg1[i], &cdef_arg2[i]);
2351 }
2352 }
2353 for (i = 0; i < 2; i++)
2354 /* Check for one shift count being Y and the other B - Y,
2355 with optional casts. */
2356 if (cdef_code[i] == MINUS_EXPR
2357 && tree_fits_shwi_p (cdef_arg1[i])
2358 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
2359 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2360 {
2361 tree tem;
2362 enum tree_code code;
2363
2364 if (cdef_arg2[i] == def_arg2[1 - i]
2365 || cdef_arg2[i] == def_arg2_alt[1 - i])
2366 {
2367 rotcnt = cdef_arg2[i];
2368 break;
2369 }
2370 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2371 if (CONVERT_EXPR_CODE_P (code)
2372 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2373 && TYPE_PRECISION (TREE_TYPE (tem))
2374 > floor_log2 (TYPE_PRECISION (rtype))
2375 && TYPE_PRECISION (TREE_TYPE (tem))
2376 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2377 && (tem == def_arg2[1 - i]
2378 || tem == def_arg2_alt[1 - i]))
2379 {
2380 rotcnt = tem;
2381 break;
2382 }
2383 }
2384 /* The above sequence isn't safe for Y being 0,
2385 because then one of the shifts triggers undefined behavior.
2386 This alternative is safe even for rotation count of 0.
2387 One shift count is Y and the other (-Y) & (B - 1). */
2388 else if (cdef_code[i] == BIT_AND_EXPR
2389 && tree_fits_shwi_p (cdef_arg2[i])
2390 && tree_to_shwi (cdef_arg2[i])
2391 == TYPE_PRECISION (rtype) - 1
2392 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2393 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2394 {
2395 tree tem;
2396 enum tree_code code;
2397
2398 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2399 if (CONVERT_EXPR_CODE_P (code)
2400 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2401 && TYPE_PRECISION (TREE_TYPE (tem))
2402 > floor_log2 (TYPE_PRECISION (rtype))
2403 && TYPE_PRECISION (TREE_TYPE (tem))
2404 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2405 defcodefor_name (tem, &code, &tem, NULL);
2406
2407 if (code == NEGATE_EXPR)
2408 {
2409 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2410 {
2411 rotcnt = tem;
2412 break;
2413 }
2414 defcodefor_name (tem, &code, &tem, NULL);
2415 if (CONVERT_EXPR_CODE_P (code)
2416 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2417 && TYPE_PRECISION (TREE_TYPE (tem))
2418 > floor_log2 (TYPE_PRECISION (rtype))
2419 && TYPE_PRECISION (TREE_TYPE (tem))
2420 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2421 && (tem == def_arg2[1 - i]
2422 || tem == def_arg2_alt[1 - i]))
2423 {
2424 rotcnt = tem;
2425 break;
2426 }
2427 }
2428 }
2429 if (rotcnt == NULL_TREE)
2430 return false;
2431 swapped_p = i != 1;
2432 }
2433
2434 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2435 TREE_TYPE (rotcnt)))
2436 {
2437 g = gimple_build_assign_with_ops (NOP_EXPR,
2438 make_ssa_name (TREE_TYPE (def_arg2[0]),
2439 NULL),
2440 rotcnt, NULL_TREE);
2441 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2442 rotcnt = gimple_assign_lhs (g);
2443 }
2444 lhs = gimple_assign_lhs (stmt);
2445 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2446 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2447 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2448 ? LROTATE_EXPR : RROTATE_EXPR,
2449 lhs, def_arg1[0], rotcnt);
2450 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2451 {
2452 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2453 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2454 lhs, NULL_TREE);
2455 }
2456 gsi_replace (gsi, g, false);
2457 return true;
2458 }
2459
2460 /* Perform re-associations of the plus or minus statement STMT that are
2461 always permitted. Returns true if the CFG was changed. */
2462
2463 static bool
2464 associate_plusminus (gimple_stmt_iterator *gsi)
2465 {
2466 gimple stmt = gsi_stmt (*gsi);
2467 tree rhs1 = gimple_assign_rhs1 (stmt);
2468 tree rhs2 = gimple_assign_rhs2 (stmt);
2469 enum tree_code code = gimple_assign_rhs_code (stmt);
2470 bool changed;
2471
2472 /* We can't reassociate at all for saturating types. */
2473 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2474 return false;
2475
2476 /* First contract negates. */
2477 do
2478 {
2479 changed = false;
2480
2481 /* A +- (-B) -> A -+ B. */
2482 if (TREE_CODE (rhs2) == SSA_NAME)
2483 {
2484 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2485 if (is_gimple_assign (def_stmt)
2486 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2487 && can_propagate_from (def_stmt))
2488 {
2489 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2490 gimple_assign_set_rhs_code (stmt, code);
2491 rhs2 = gimple_assign_rhs1 (def_stmt);
2492 gimple_assign_set_rhs2 (stmt, rhs2);
2493 gimple_set_modified (stmt, true);
2494 changed = true;
2495 }
2496 }
2497
2498 /* (-A) + B -> B - A. */
2499 if (TREE_CODE (rhs1) == SSA_NAME
2500 && code == PLUS_EXPR)
2501 {
2502 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2503 if (is_gimple_assign (def_stmt)
2504 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2505 && can_propagate_from (def_stmt))
2506 {
2507 code = MINUS_EXPR;
2508 gimple_assign_set_rhs_code (stmt, code);
2509 rhs1 = rhs2;
2510 gimple_assign_set_rhs1 (stmt, rhs1);
2511 rhs2 = gimple_assign_rhs1 (def_stmt);
2512 gimple_assign_set_rhs2 (stmt, rhs2);
2513 gimple_set_modified (stmt, true);
2514 changed = true;
2515 }
2516 }
2517 }
2518 while (changed);
2519
2520 /* We can't reassociate floating-point or fixed-point plus or minus
2521 because of saturation to +-Inf. */
2522 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2523 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2524 goto out;
2525
2526 /* Second match patterns that allow contracting a plus-minus pair
2527 irrespective of overflow issues.
2528
2529 (A +- B) - A -> +- B
2530 (A +- B) -+ B -> A
2531 (CST +- A) +- CST -> CST +- A
2532 (A +- CST) +- CST -> A +- CST
2533 ~A + A -> -1
2534 ~A + 1 -> -A
2535 A - (A +- B) -> -+ B
2536 A +- (B +- A) -> +- B
2537 CST +- (CST +- A) -> CST +- A
2538 CST +- (A +- CST) -> CST +- A
2539 A + ~A -> -1
2540
2541 via commutating the addition and contracting operations to zero
2542 by reassociation. */
2543
2544 if (TREE_CODE (rhs1) == SSA_NAME)
2545 {
2546 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2547 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2548 {
2549 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2550 if (def_code == PLUS_EXPR
2551 || def_code == MINUS_EXPR)
2552 {
2553 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2554 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2555 if (operand_equal_p (def_rhs1, rhs2, 0)
2556 && code == MINUS_EXPR)
2557 {
2558 /* (A +- B) - A -> +- B. */
2559 code = ((def_code == PLUS_EXPR)
2560 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2561 rhs1 = def_rhs2;
2562 rhs2 = NULL_TREE;
2563 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2564 gcc_assert (gsi_stmt (*gsi) == stmt);
2565 gimple_set_modified (stmt, true);
2566 }
2567 else if (operand_equal_p (def_rhs2, rhs2, 0)
2568 && code != def_code)
2569 {
2570 /* (A +- B) -+ B -> A. */
2571 code = TREE_CODE (def_rhs1);
2572 rhs1 = def_rhs1;
2573 rhs2 = NULL_TREE;
2574 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2575 gcc_assert (gsi_stmt (*gsi) == stmt);
2576 gimple_set_modified (stmt, true);
2577 }
2578 else if (CONSTANT_CLASS_P (rhs2)
2579 && CONSTANT_CLASS_P (def_rhs1))
2580 {
2581 /* (CST +- A) +- CST -> CST +- A. */
2582 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2583 def_rhs1, rhs2);
2584 if (cst && !TREE_OVERFLOW (cst))
2585 {
2586 code = def_code;
2587 gimple_assign_set_rhs_code (stmt, code);
2588 rhs1 = cst;
2589 gimple_assign_set_rhs1 (stmt, rhs1);
2590 rhs2 = def_rhs2;
2591 gimple_assign_set_rhs2 (stmt, rhs2);
2592 gimple_set_modified (stmt, true);
2593 }
2594 }
2595 else if (CONSTANT_CLASS_P (rhs2)
2596 && CONSTANT_CLASS_P (def_rhs2))
2597 {
2598 /* (A +- CST) +- CST -> A +- CST. */
2599 enum tree_code mix = (code == def_code)
2600 ? PLUS_EXPR : MINUS_EXPR;
2601 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2602 def_rhs2, rhs2);
2603 if (cst && !TREE_OVERFLOW (cst))
2604 {
2605 code = def_code;
2606 gimple_assign_set_rhs_code (stmt, code);
2607 rhs1 = def_rhs1;
2608 gimple_assign_set_rhs1 (stmt, rhs1);
2609 rhs2 = cst;
2610 gimple_assign_set_rhs2 (stmt, rhs2);
2611 gimple_set_modified (stmt, true);
2612 }
2613 }
2614 }
2615 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2616 {
2617 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2618 if (operand_equal_p (def_rhs1, rhs2, 0))
2619 {
2620 /* ~A + A -> -1. */
2621 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2622 rhs2 = NULL_TREE;
2623 code = TREE_CODE (rhs1);
2624 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2625 gcc_assert (gsi_stmt (*gsi) == stmt);
2626 gimple_set_modified (stmt, true);
2627 }
2628 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2629 && integer_onep (rhs2))
2630 || (TREE_CODE (rhs2) == COMPLEX_CST
2631 && integer_onep (TREE_REALPART (rhs2))
2632 && integer_onep (TREE_IMAGPART (rhs2))))
2633 {
2634 /* ~A + 1 -> -A. */
2635 code = NEGATE_EXPR;
2636 rhs1 = def_rhs1;
2637 rhs2 = NULL_TREE;
2638 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2639 gcc_assert (gsi_stmt (*gsi) == stmt);
2640 gimple_set_modified (stmt, true);
2641 }
2642 }
2643 }
2644 }
2645
2646 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2647 {
2648 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2649 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2650 {
2651 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2652 if (def_code == PLUS_EXPR
2653 || def_code == MINUS_EXPR)
2654 {
2655 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2656 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2657 if (operand_equal_p (def_rhs1, rhs1, 0)
2658 && code == MINUS_EXPR)
2659 {
2660 /* A - (A +- B) -> -+ B. */
2661 code = ((def_code == PLUS_EXPR)
2662 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2663 rhs1 = def_rhs2;
2664 rhs2 = NULL_TREE;
2665 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2666 gcc_assert (gsi_stmt (*gsi) == stmt);
2667 gimple_set_modified (stmt, true);
2668 }
2669 else if (operand_equal_p (def_rhs2, rhs1, 0)
2670 && code != def_code)
2671 {
2672 /* A +- (B +- A) -> +- B. */
2673 code = ((code == PLUS_EXPR)
2674 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2675 rhs1 = def_rhs1;
2676 rhs2 = NULL_TREE;
2677 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2678 gcc_assert (gsi_stmt (*gsi) == stmt);
2679 gimple_set_modified (stmt, true);
2680 }
2681 else if (CONSTANT_CLASS_P (rhs1)
2682 && CONSTANT_CLASS_P (def_rhs1))
2683 {
2684 /* CST +- (CST +- A) -> CST +- A. */
2685 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2686 rhs1, def_rhs1);
2687 if (cst && !TREE_OVERFLOW (cst))
2688 {
2689 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2690 gimple_assign_set_rhs_code (stmt, code);
2691 rhs1 = cst;
2692 gimple_assign_set_rhs1 (stmt, rhs1);
2693 rhs2 = def_rhs2;
2694 gimple_assign_set_rhs2 (stmt, rhs2);
2695 gimple_set_modified (stmt, true);
2696 }
2697 }
2698 else if (CONSTANT_CLASS_P (rhs1)
2699 && CONSTANT_CLASS_P (def_rhs2))
2700 {
2701 /* CST +- (A +- CST) -> CST +- A. */
2702 tree cst = fold_binary (def_code == code
2703 ? PLUS_EXPR : MINUS_EXPR,
2704 TREE_TYPE (rhs2),
2705 rhs1, def_rhs2);
2706 if (cst && !TREE_OVERFLOW (cst))
2707 {
2708 rhs1 = cst;
2709 gimple_assign_set_rhs1 (stmt, rhs1);
2710 rhs2 = def_rhs1;
2711 gimple_assign_set_rhs2 (stmt, rhs2);
2712 gimple_set_modified (stmt, true);
2713 }
2714 }
2715 }
2716 else if (def_code == BIT_NOT_EXPR)
2717 {
2718 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2719 if (code == PLUS_EXPR
2720 && operand_equal_p (def_rhs1, rhs1, 0))
2721 {
2722 /* A + ~A -> -1. */
2723 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2724 rhs2 = NULL_TREE;
2725 code = TREE_CODE (rhs1);
2726 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2727 gcc_assert (gsi_stmt (*gsi) == stmt);
2728 gimple_set_modified (stmt, true);
2729 }
2730 }
2731 }
2732 }
2733
2734 out:
2735 if (gimple_modified_p (stmt))
2736 {
2737 fold_stmt_inplace (gsi);
2738 update_stmt (stmt);
2739 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2740 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2741 return true;
2742 }
2743
2744 return false;
2745 }
2746
2747 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2748 true if anything changed, false otherwise. */
2749
2750 static bool
2751 associate_pointerplus (gimple_stmt_iterator *gsi)
2752 {
2753 gimple stmt = gsi_stmt (*gsi);
2754 gimple def_stmt;
2755 tree ptr, rhs, algn;
2756
2757 /* Pattern match
2758 tem = (sizetype) ptr;
2759 tem = tem & algn;
2760 tem = -tem;
2761 ... = ptr p+ tem;
2762 and produce the simpler and easier to analyze with respect to alignment
2763 ... = ptr & ~algn; */
2764 ptr = gimple_assign_rhs1 (stmt);
2765 rhs = gimple_assign_rhs2 (stmt);
2766 if (TREE_CODE (rhs) != SSA_NAME)
2767 return false;
2768 def_stmt = SSA_NAME_DEF_STMT (rhs);
2769 if (!is_gimple_assign (def_stmt)
2770 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2771 return false;
2772 rhs = gimple_assign_rhs1 (def_stmt);
2773 if (TREE_CODE (rhs) != SSA_NAME)
2774 return false;
2775 def_stmt = SSA_NAME_DEF_STMT (rhs);
2776 if (!is_gimple_assign (def_stmt)
2777 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2778 return false;
2779 rhs = gimple_assign_rhs1 (def_stmt);
2780 algn = gimple_assign_rhs2 (def_stmt);
2781 if (TREE_CODE (rhs) != SSA_NAME
2782 || TREE_CODE (algn) != INTEGER_CST)
2783 return false;
2784 def_stmt = SSA_NAME_DEF_STMT (rhs);
2785 if (!is_gimple_assign (def_stmt)
2786 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2787 return false;
2788 if (gimple_assign_rhs1 (def_stmt) != ptr)
2789 return false;
2790
2791 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
2792 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2793 fold_stmt_inplace (gsi);
2794 update_stmt (stmt);
2795
2796 return true;
2797 }
2798
2799 /* Combine two conversions in a row for the second conversion at *GSI.
2800 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2801 run. Else it returns 0. */
2802
2803 static int
2804 combine_conversions (gimple_stmt_iterator *gsi)
2805 {
2806 gimple stmt = gsi_stmt (*gsi);
2807 gimple def_stmt;
2808 tree op0, lhs;
2809 enum tree_code code = gimple_assign_rhs_code (stmt);
2810 enum tree_code code2;
2811
2812 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2813 || code == FLOAT_EXPR
2814 || code == FIX_TRUNC_EXPR);
2815
2816 lhs = gimple_assign_lhs (stmt);
2817 op0 = gimple_assign_rhs1 (stmt);
2818 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2819 {
2820 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2821 return 1;
2822 }
2823
2824 if (TREE_CODE (op0) != SSA_NAME)
2825 return 0;
2826
2827 def_stmt = SSA_NAME_DEF_STMT (op0);
2828 if (!is_gimple_assign (def_stmt))
2829 return 0;
2830
2831 code2 = gimple_assign_rhs_code (def_stmt);
2832
2833 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2834 {
2835 tree defop0 = gimple_assign_rhs1 (def_stmt);
2836 tree type = TREE_TYPE (lhs);
2837 tree inside_type = TREE_TYPE (defop0);
2838 tree inter_type = TREE_TYPE (op0);
2839 int inside_int = INTEGRAL_TYPE_P (inside_type);
2840 int inside_ptr = POINTER_TYPE_P (inside_type);
2841 int inside_float = FLOAT_TYPE_P (inside_type);
2842 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2843 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2844 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2845 int inter_int = INTEGRAL_TYPE_P (inter_type);
2846 int inter_ptr = POINTER_TYPE_P (inter_type);
2847 int inter_float = FLOAT_TYPE_P (inter_type);
2848 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2849 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2850 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2851 int final_int = INTEGRAL_TYPE_P (type);
2852 int final_ptr = POINTER_TYPE_P (type);
2853 int final_float = FLOAT_TYPE_P (type);
2854 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2855 unsigned int final_prec = TYPE_PRECISION (type);
2856 int final_unsignedp = TYPE_UNSIGNED (type);
2857
2858 /* Don't propagate ssa names that occur in abnormal phis. */
2859 if (TREE_CODE (defop0) == SSA_NAME
2860 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2861 return 0;
2862
2863 /* In addition to the cases of two conversions in a row
2864 handled below, if we are converting something to its own
2865 type via an object of identical or wider precision, neither
2866 conversion is needed. */
2867 if (useless_type_conversion_p (type, inside_type)
2868 && (((inter_int || inter_ptr) && final_int)
2869 || (inter_float && final_float))
2870 && inter_prec >= final_prec)
2871 {
2872 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2873 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2874 update_stmt (stmt);
2875 return remove_prop_source_from_use (op0) ? 2 : 1;
2876 }
2877
2878 /* Likewise, if the intermediate and initial types are either both
2879 float or both integer, we don't need the middle conversion if the
2880 former is wider than the latter and doesn't change the signedness
2881 (for integers). Avoid this if the final type is a pointer since
2882 then we sometimes need the middle conversion. Likewise if the
2883 final type has a precision not equal to the size of its mode. */
2884 if (((inter_int && inside_int)
2885 || (inter_float && inside_float)
2886 || (inter_vec && inside_vec))
2887 && inter_prec >= inside_prec
2888 && (inter_float || inter_vec
2889 || inter_unsignedp == inside_unsignedp)
2890 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2892 && ! final_ptr
2893 && (! final_vec || inter_prec == inside_prec))
2894 {
2895 gimple_assign_set_rhs1 (stmt, defop0);
2896 update_stmt (stmt);
2897 return remove_prop_source_from_use (op0) ? 2 : 1;
2898 }
2899
2900 /* If we have a sign-extension of a zero-extended value, we can
2901 replace that by a single zero-extension. Likewise if the
2902 final conversion does not change precision we can drop the
2903 intermediate conversion. */
2904 if (inside_int && inter_int && final_int
2905 && ((inside_prec < inter_prec && inter_prec < final_prec
2906 && inside_unsignedp && !inter_unsignedp)
2907 || final_prec == inter_prec))
2908 {
2909 gimple_assign_set_rhs1 (stmt, defop0);
2910 update_stmt (stmt);
2911 return remove_prop_source_from_use (op0) ? 2 : 1;
2912 }
2913
2914 /* Two conversions in a row are not needed unless:
2915 - some conversion is floating-point (overstrict for now), or
2916 - some conversion is a vector (overstrict for now), or
2917 - the intermediate type is narrower than both initial and
2918 final, or
2919 - the intermediate type and innermost type differ in signedness,
2920 and the outermost type is wider than the intermediate, or
2921 - the initial type is a pointer type and the precisions of the
2922 intermediate and final types differ, or
2923 - the final type is a pointer type and the precisions of the
2924 initial and intermediate types differ. */
2925 if (! inside_float && ! inter_float && ! final_float
2926 && ! inside_vec && ! inter_vec && ! final_vec
2927 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2928 && ! (inside_int && inter_int
2929 && inter_unsignedp != inside_unsignedp
2930 && inter_prec < final_prec)
2931 && ((inter_unsignedp && inter_prec > inside_prec)
2932 == (final_unsignedp && final_prec > inter_prec))
2933 && ! (inside_ptr && inter_prec != final_prec)
2934 && ! (final_ptr && inside_prec != inter_prec)
2935 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2936 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2937 {
2938 gimple_assign_set_rhs1 (stmt, defop0);
2939 update_stmt (stmt);
2940 return remove_prop_source_from_use (op0) ? 2 : 1;
2941 }
2942
2943 /* A truncation to an unsigned type should be canonicalized as
2944 bitwise and of a mask. */
2945 if (final_int && inter_int && inside_int
2946 && final_prec == inside_prec
2947 && final_prec > inter_prec
2948 && inter_unsignedp)
2949 {
2950 tree tem;
2951 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2952 defop0,
2953 double_int_to_tree
2954 (inside_type, double_int::mask (inter_prec)));
2955 if (!useless_type_conversion_p (type, inside_type))
2956 {
2957 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2958 GSI_SAME_STMT);
2959 gimple_assign_set_rhs1 (stmt, tem);
2960 }
2961 else
2962 gimple_assign_set_rhs_from_tree (gsi, tem);
2963 update_stmt (gsi_stmt (*gsi));
2964 return 1;
2965 }
2966
2967 /* If we are converting an integer to a floating-point that can
2968 represent it exactly and back to an integer, we can skip the
2969 floating-point conversion. */
2970 if (inside_int && inter_float && final_int &&
2971 (unsigned) significand_size (TYPE_MODE (inter_type))
2972 >= inside_prec - !inside_unsignedp)
2973 {
2974 if (useless_type_conversion_p (type, inside_type))
2975 {
2976 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2977 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2978 update_stmt (stmt);
2979 return remove_prop_source_from_use (op0) ? 2 : 1;
2980 }
2981 else
2982 {
2983 gimple_assign_set_rhs1 (stmt, defop0);
2984 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2985 update_stmt (stmt);
2986 return remove_prop_source_from_use (op0) ? 2 : 1;
2987 }
2988 }
2989 }
2990
2991 return 0;
2992 }
2993
2994 /* Combine VIEW_CONVERT_EXPRs with their defining statement. */
2995
2996 static bool
2997 simplify_vce (gimple_stmt_iterator *gsi)
2998 {
2999 gimple stmt = gsi_stmt (*gsi);
3000 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
3001
3002 /* Drop useless VIEW_CONVERT_EXPRs. */
3003 tree op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3004 if (useless_type_conversion_p (type, TREE_TYPE (op)))
3005 {
3006 gimple_assign_set_rhs1 (stmt, op);
3007 update_stmt (stmt);
3008 return true;
3009 }
3010
3011 if (TREE_CODE (op) != SSA_NAME)
3012 return false;
3013
3014 gimple def_stmt = SSA_NAME_DEF_STMT (op);
3015 if (!is_gimple_assign (def_stmt))
3016 return false;
3017
3018 tree def_op = gimple_assign_rhs1 (def_stmt);
3019 switch (gimple_assign_rhs_code (def_stmt))
3020 {
3021 CASE_CONVERT:
3022 /* Strip integral conversions that do not change the precision. */
3023 if ((INTEGRAL_TYPE_P (TREE_TYPE (op))
3024 || POINTER_TYPE_P (TREE_TYPE (op)))
3025 && (INTEGRAL_TYPE_P (TREE_TYPE (def_op))
3026 || POINTER_TYPE_P (TREE_TYPE (def_op)))
3027 && (TYPE_PRECISION (TREE_TYPE (op))
3028 == TYPE_PRECISION (TREE_TYPE (def_op))))
3029 {
3030 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) = def_op;
3031 update_stmt (stmt);
3032 return true;
3033 }
3034 break;
3035
3036 case VIEW_CONVERT_EXPR:
3037 /* Series of VIEW_CONVERT_EXPRs on register operands can
3038 be contracted. */
3039 if (TREE_CODE (TREE_OPERAND (def_op, 0)) == SSA_NAME)
3040 {
3041 if (useless_type_conversion_p (type,
3042 TREE_TYPE (TREE_OPERAND (def_op, 0))))
3043 gimple_assign_set_rhs1 (stmt, TREE_OPERAND (def_op, 0));
3044 else
3045 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)
3046 = TREE_OPERAND (def_op, 0);
3047 update_stmt (stmt);
3048 return true;
3049 }
3050
3051 default:;
3052 }
3053
3054 return false;
3055 }
3056
3057 /* Combine an element access with a shuffle. Returns true if there were
3058 any changes made, else it returns false. */
3059
3060 static bool
3061 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
3062 {
3063 gimple stmt = gsi_stmt (*gsi);
3064 gimple def_stmt;
3065 tree op, op0, op1, op2;
3066 tree elem_type;
3067 unsigned idx, n, size;
3068 enum tree_code code;
3069
3070 op = gimple_assign_rhs1 (stmt);
3071 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3072
3073 op0 = TREE_OPERAND (op, 0);
3074 if (TREE_CODE (op0) != SSA_NAME
3075 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3076 return false;
3077
3078 def_stmt = get_prop_source_stmt (op0, false, NULL);
3079 if (!def_stmt || !can_propagate_from (def_stmt))
3080 return false;
3081
3082 op1 = TREE_OPERAND (op, 1);
3083 op2 = TREE_OPERAND (op, 2);
3084 code = gimple_assign_rhs_code (def_stmt);
3085
3086 if (code == CONSTRUCTOR)
3087 {
3088 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3089 gimple_assign_rhs1 (def_stmt), op1, op2);
3090 if (!tem || !valid_gimple_rhs_p (tem))
3091 return false;
3092 gimple_assign_set_rhs_from_tree (gsi, tem);
3093 update_stmt (gsi_stmt (*gsi));
3094 return true;
3095 }
3096
3097 elem_type = TREE_TYPE (TREE_TYPE (op0));
3098 if (TREE_TYPE (op) != elem_type)
3099 return false;
3100
3101 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3102 n = TREE_INT_CST_LOW (op1) / size;
3103 if (n != 1)
3104 return false;
3105 idx = TREE_INT_CST_LOW (op2) / size;
3106
3107 if (code == VEC_PERM_EXPR)
3108 {
3109 tree p, m, index, tem;
3110 unsigned nelts;
3111 m = gimple_assign_rhs3 (def_stmt);
3112 if (TREE_CODE (m) != VECTOR_CST)
3113 return false;
3114 nelts = VECTOR_CST_NELTS (m);
3115 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3116 idx %= 2 * nelts;
3117 if (idx < nelts)
3118 {
3119 p = gimple_assign_rhs1 (def_stmt);
3120 }
3121 else
3122 {
3123 p = gimple_assign_rhs2 (def_stmt);
3124 idx -= nelts;
3125 }
3126 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3127 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3128 unshare_expr (p), op1, index);
3129 gimple_assign_set_rhs1 (stmt, tem);
3130 fold_stmt (gsi);
3131 update_stmt (gsi_stmt (*gsi));
3132 return true;
3133 }
3134
3135 return false;
3136 }
3137
3138 /* Determine whether applying the 2 permutations (mask1 then mask2)
3139 gives back one of the input. */
3140
3141 static int
3142 is_combined_permutation_identity (tree mask1, tree mask2)
3143 {
3144 tree mask;
3145 unsigned int nelts, i, j;
3146 bool maybe_identity1 = true;
3147 bool maybe_identity2 = true;
3148
3149 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3150 && TREE_CODE (mask2) == VECTOR_CST);
3151 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3152 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3153
3154 nelts = VECTOR_CST_NELTS (mask);
3155 for (i = 0; i < nelts; i++)
3156 {
3157 tree val = VECTOR_CST_ELT (mask, i);
3158 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3159 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3160 if (j == i)
3161 maybe_identity2 = false;
3162 else if (j == i + nelts)
3163 maybe_identity1 = false;
3164 else
3165 return 0;
3166 }
3167 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3168 }
3169
3170 /* Combine a shuffle with its arguments. Returns 1 if there were any
3171 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3172
3173 static int
3174 simplify_permutation (gimple_stmt_iterator *gsi)
3175 {
3176 gimple stmt = gsi_stmt (*gsi);
3177 gimple def_stmt;
3178 tree op0, op1, op2, op3, arg0, arg1;
3179 enum tree_code code;
3180 bool single_use_op0 = false;
3181
3182 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
3183
3184 op0 = gimple_assign_rhs1 (stmt);
3185 op1 = gimple_assign_rhs2 (stmt);
3186 op2 = gimple_assign_rhs3 (stmt);
3187
3188 if (TREE_CODE (op2) != VECTOR_CST)
3189 return 0;
3190
3191 if (TREE_CODE (op0) == VECTOR_CST)
3192 {
3193 code = VECTOR_CST;
3194 arg0 = op0;
3195 }
3196 else if (TREE_CODE (op0) == SSA_NAME)
3197 {
3198 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3199 if (!def_stmt || !can_propagate_from (def_stmt))
3200 return 0;
3201
3202 code = gimple_assign_rhs_code (def_stmt);
3203 arg0 = gimple_assign_rhs1 (def_stmt);
3204 }
3205 else
3206 return 0;
3207
3208 /* Two consecutive shuffles. */
3209 if (code == VEC_PERM_EXPR)
3210 {
3211 tree orig;
3212 int ident;
3213
3214 if (op0 != op1)
3215 return 0;
3216 op3 = gimple_assign_rhs3 (def_stmt);
3217 if (TREE_CODE (op3) != VECTOR_CST)
3218 return 0;
3219 ident = is_combined_permutation_identity (op3, op2);
3220 if (!ident)
3221 return 0;
3222 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3223 : gimple_assign_rhs2 (def_stmt);
3224 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3225 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3226 gimple_set_num_ops (stmt, 2);
3227 update_stmt (stmt);
3228 return remove_prop_source_from_use (op0) ? 2 : 1;
3229 }
3230
3231 /* Shuffle of a constructor. */
3232 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3233 {
3234 tree opt;
3235 bool ret = false;
3236 if (op0 != op1)
3237 {
3238 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
3239 return 0;
3240
3241 if (TREE_CODE (op1) == VECTOR_CST)
3242 arg1 = op1;
3243 else if (TREE_CODE (op1) == SSA_NAME)
3244 {
3245 enum tree_code code2;
3246
3247 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3248 if (!def_stmt2 || !can_propagate_from (def_stmt2))
3249 return 0;
3250
3251 code2 = gimple_assign_rhs_code (def_stmt2);
3252 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3253 return 0;
3254 arg1 = gimple_assign_rhs1 (def_stmt2);
3255 }
3256 else
3257 return 0;
3258 }
3259 else
3260 {
3261 /* Already used twice in this statement. */
3262 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3263 return 0;
3264 arg1 = arg0;
3265 }
3266 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
3267 if (!opt
3268 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
3269 return 0;
3270 gimple_assign_set_rhs_from_tree (gsi, opt);
3271 update_stmt (gsi_stmt (*gsi));
3272 if (TREE_CODE (op0) == SSA_NAME)
3273 ret = remove_prop_source_from_use (op0);
3274 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3275 ret |= remove_prop_source_from_use (op1);
3276 return ret ? 2 : 1;
3277 }
3278
3279 return 0;
3280 }
3281
3282 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3283
3284 static bool
3285 simplify_vector_constructor (gimple_stmt_iterator *gsi)
3286 {
3287 gimple stmt = gsi_stmt (*gsi);
3288 gimple def_stmt;
3289 tree op, op2, orig, type, elem_type;
3290 unsigned elem_size, nelts, i;
3291 enum tree_code code;
3292 constructor_elt *elt;
3293 unsigned char *sel;
3294 bool maybe_ident;
3295
3296 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3297
3298 op = gimple_assign_rhs1 (stmt);
3299 type = TREE_TYPE (op);
3300 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3301
3302 nelts = TYPE_VECTOR_SUBPARTS (type);
3303 elem_type = TREE_TYPE (type);
3304 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3305
3306 sel = XALLOCAVEC (unsigned char, nelts);
3307 orig = NULL;
3308 maybe_ident = true;
3309 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
3310 {
3311 tree ref, op1;
3312
3313 if (i >= nelts)
3314 return false;
3315
3316 if (TREE_CODE (elt->value) != SSA_NAME)
3317 return false;
3318 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3319 if (!def_stmt)
3320 return false;
3321 code = gimple_assign_rhs_code (def_stmt);
3322 if (code != BIT_FIELD_REF)
3323 return false;
3324 op1 = gimple_assign_rhs1 (def_stmt);
3325 ref = TREE_OPERAND (op1, 0);
3326 if (orig)
3327 {
3328 if (ref != orig)
3329 return false;
3330 }
3331 else
3332 {
3333 if (TREE_CODE (ref) != SSA_NAME)
3334 return false;
3335 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3336 return false;
3337 orig = ref;
3338 }
3339 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3340 return false;
3341 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3342 if (sel[i] != i) maybe_ident = false;
3343 }
3344 if (i < nelts)
3345 return false;
3346
3347 if (maybe_ident)
3348 gimple_assign_set_rhs_from_tree (gsi, orig);
3349 else
3350 {
3351 tree mask_type, *mask_elts;
3352
3353 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3354 return false;
3355 mask_type
3356 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3357 nelts);
3358 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3359 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3360 != GET_MODE_SIZE (TYPE_MODE (type)))
3361 return false;
3362 mask_elts = XALLOCAVEC (tree, nelts);
3363 for (i = 0; i < nelts; i++)
3364 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3365 op2 = build_vector (mask_type, mask_elts);
3366 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3367 }
3368 update_stmt (gsi_stmt (*gsi));
3369 return true;
3370 }
3371
3372 /* Main entry point for the forward propagation and statement combine
3373 optimizer. */
3374
3375 static unsigned int
3376 ssa_forward_propagate_and_combine (void)
3377 {
3378 basic_block bb;
3379 unsigned int todoflags = 0;
3380
3381 cfg_changed = false;
3382
3383 FOR_EACH_BB (bb)
3384 {
3385 gimple_stmt_iterator gsi;
3386
3387 /* Apply forward propagation to all stmts in the basic-block.
3388 Note we update GSI within the loop as necessary. */
3389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3390 {
3391 gimple stmt = gsi_stmt (gsi);
3392 tree lhs, rhs;
3393 enum tree_code code;
3394
3395 if (!is_gimple_assign (stmt))
3396 {
3397 gsi_next (&gsi);
3398 continue;
3399 }
3400
3401 lhs = gimple_assign_lhs (stmt);
3402 rhs = gimple_assign_rhs1 (stmt);
3403 code = gimple_assign_rhs_code (stmt);
3404 if (TREE_CODE (lhs) != SSA_NAME
3405 || has_zero_uses (lhs))
3406 {
3407 gsi_next (&gsi);
3408 continue;
3409 }
3410
3411 /* If this statement sets an SSA_NAME to an address,
3412 try to propagate the address into the uses of the SSA_NAME. */
3413 if (code == ADDR_EXPR
3414 /* Handle pointer conversions on invariant addresses
3415 as well, as this is valid gimple. */
3416 || (CONVERT_EXPR_CODE_P (code)
3417 && TREE_CODE (rhs) == ADDR_EXPR
3418 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3419 {
3420 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3421 if ((!base
3422 || !DECL_P (base)
3423 || decl_address_invariant_p (base))
3424 && !stmt_references_abnormal_ssa_name (stmt)
3425 && forward_propagate_addr_expr (lhs, rhs, true))
3426 {
3427 release_defs (stmt);
3428 gsi_remove (&gsi, true);
3429 }
3430 else
3431 gsi_next (&gsi);
3432 }
3433 else if (code == POINTER_PLUS_EXPR)
3434 {
3435 tree off = gimple_assign_rhs2 (stmt);
3436 if (TREE_CODE (off) == INTEGER_CST
3437 && can_propagate_from (stmt)
3438 && !simple_iv_increment_p (stmt)
3439 /* ??? Better adjust the interface to that function
3440 instead of building new trees here. */
3441 && forward_propagate_addr_expr
3442 (lhs,
3443 build1_loc (gimple_location (stmt),
3444 ADDR_EXPR, TREE_TYPE (rhs),
3445 fold_build2 (MEM_REF,
3446 TREE_TYPE (TREE_TYPE (rhs)),
3447 rhs,
3448 fold_convert (ptr_type_node,
3449 off))), true))
3450 {
3451 release_defs (stmt);
3452 gsi_remove (&gsi, true);
3453 }
3454 else if (is_gimple_min_invariant (rhs))
3455 {
3456 /* Make sure to fold &a[0] + off_1 here. */
3457 fold_stmt_inplace (&gsi);
3458 update_stmt (stmt);
3459 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3460 gsi_next (&gsi);
3461 }
3462 else
3463 gsi_next (&gsi);
3464 }
3465 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3466 {
3467 if (forward_propagate_comparison (&gsi))
3468 cfg_changed = true;
3469 }
3470 else
3471 gsi_next (&gsi);
3472 }
3473
3474 /* Combine stmts with the stmts defining their operands.
3475 Note we update GSI within the loop as necessary. */
3476 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3477 {
3478 gimple stmt = gsi_stmt (gsi);
3479 bool changed = false;
3480
3481 /* Mark stmt as potentially needing revisiting. */
3482 gimple_set_plf (stmt, GF_PLF_1, false);
3483
3484 switch (gimple_code (stmt))
3485 {
3486 case GIMPLE_ASSIGN:
3487 {
3488 tree rhs1 = gimple_assign_rhs1 (stmt);
3489 enum tree_code code = gimple_assign_rhs_code (stmt);
3490
3491 if ((code == BIT_NOT_EXPR
3492 || code == NEGATE_EXPR)
3493 && TREE_CODE (rhs1) == SSA_NAME)
3494 changed = simplify_not_neg_expr (&gsi);
3495 else if (code == COND_EXPR
3496 || code == VEC_COND_EXPR)
3497 {
3498 /* In this case the entire COND_EXPR is in rhs1. */
3499 if (forward_propagate_into_cond (&gsi)
3500 || combine_cond_exprs (&gsi))
3501 {
3502 changed = true;
3503 stmt = gsi_stmt (gsi);
3504 }
3505 }
3506 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3507 {
3508 int did_something;
3509 did_something = forward_propagate_into_comparison (&gsi);
3510 if (did_something == 2)
3511 cfg_changed = true;
3512 changed = did_something != 0;
3513 }
3514 else if ((code == PLUS_EXPR
3515 || code == BIT_IOR_EXPR
3516 || code == BIT_XOR_EXPR)
3517 && simplify_rotate (&gsi))
3518 changed = true;
3519 else if (code == BIT_AND_EXPR
3520 || code == BIT_IOR_EXPR
3521 || code == BIT_XOR_EXPR)
3522 changed = simplify_bitwise_binary (&gsi);
3523 else if (code == PLUS_EXPR
3524 || code == MINUS_EXPR)
3525 changed = associate_plusminus (&gsi);
3526 else if (code == POINTER_PLUS_EXPR)
3527 changed = associate_pointerplus (&gsi);
3528 else if (CONVERT_EXPR_CODE_P (code)
3529 || code == FLOAT_EXPR
3530 || code == FIX_TRUNC_EXPR)
3531 {
3532 int did_something = combine_conversions (&gsi);
3533 if (did_something == 2)
3534 cfg_changed = true;
3535
3536 /* If we have a narrowing conversion to an integral
3537 type that is fed by a BIT_AND_EXPR, we might be
3538 able to remove the BIT_AND_EXPR if it merely
3539 masks off bits outside the final type (and nothing
3540 else. */
3541 if (! did_something)
3542 {
3543 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3544 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3545 if (INTEGRAL_TYPE_P (outer_type)
3546 && INTEGRAL_TYPE_P (inner_type)
3547 && (TYPE_PRECISION (outer_type)
3548 <= TYPE_PRECISION (inner_type)))
3549 did_something = simplify_conversion_from_bitmask (&gsi);
3550 }
3551
3552 changed = did_something != 0;
3553 }
3554 else if (code == VIEW_CONVERT_EXPR)
3555 changed = simplify_vce (&gsi);
3556 else if (code == VEC_PERM_EXPR)
3557 {
3558 int did_something = simplify_permutation (&gsi);
3559 if (did_something == 2)
3560 cfg_changed = true;
3561 changed = did_something != 0;
3562 }
3563 else if (code == BIT_FIELD_REF)
3564 changed = simplify_bitfield_ref (&gsi);
3565 else if (code == CONSTRUCTOR
3566 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3567 changed = simplify_vector_constructor (&gsi);
3568 break;
3569 }
3570
3571 case GIMPLE_SWITCH:
3572 changed = simplify_gimple_switch (stmt);
3573 break;
3574
3575 case GIMPLE_COND:
3576 {
3577 int did_something;
3578 did_something = forward_propagate_into_gimple_cond (stmt);
3579 if (did_something == 2)
3580 cfg_changed = true;
3581 changed = did_something != 0;
3582 break;
3583 }
3584
3585 case GIMPLE_CALL:
3586 {
3587 tree callee = gimple_call_fndecl (stmt);
3588 if (callee != NULL_TREE
3589 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3590 changed = simplify_builtin_call (&gsi, callee);
3591 break;
3592 }
3593
3594 default:;
3595 }
3596
3597 if (changed)
3598 {
3599 /* If the stmt changed then re-visit it and the statements
3600 inserted before it. */
3601 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3602 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3603 break;
3604 if (gsi_end_p (gsi))
3605 gsi = gsi_start_bb (bb);
3606 else
3607 gsi_next (&gsi);
3608 }
3609 else
3610 {
3611 /* Stmt no longer needs to be revisited. */
3612 gimple_set_plf (stmt, GF_PLF_1, true);
3613 gsi_next (&gsi);
3614 }
3615 }
3616 }
3617
3618 if (cfg_changed)
3619 todoflags |= TODO_cleanup_cfg;
3620
3621 return todoflags;
3622 }
3623
3624
3625 static bool
3626 gate_forwprop (void)
3627 {
3628 return flag_tree_forwprop;
3629 }
3630
3631 namespace {
3632
3633 const pass_data pass_data_forwprop =
3634 {
3635 GIMPLE_PASS, /* type */
3636 "forwprop", /* name */
3637 OPTGROUP_NONE, /* optinfo_flags */
3638 true, /* has_gate */
3639 true, /* has_execute */
3640 TV_TREE_FORWPROP, /* tv_id */
3641 ( PROP_cfg | PROP_ssa ), /* properties_required */
3642 0, /* properties_provided */
3643 0, /* properties_destroyed */
3644 0, /* todo_flags_start */
3645 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3646 };
3647
3648 class pass_forwprop : public gimple_opt_pass
3649 {
3650 public:
3651 pass_forwprop (gcc::context *ctxt)
3652 : gimple_opt_pass (pass_data_forwprop, ctxt)
3653 {}
3654
3655 /* opt_pass methods: */
3656 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3657 bool gate () { return gate_forwprop (); }
3658 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3659
3660 }; // class pass_forwprop
3661
3662 } // anon namespace
3663
3664 gimple_opt_pass *
3665 make_pass_forwprop (gcc::context *ctxt)
3666 {
3667 return new pass_forwprop (ctxt);
3668 }