gimple-walk.h: New File.
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tree.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "basic-block.h"
43 #include "tree-pass.h"
44 #include "cfgloop.h"
45 #include "gimple-pretty-print.h"
46 #include "gimple-ssa.h"
47 #include "tree-cfg.h"
48 #include "tree-phinodes.h"
49 #include "ssa-iterators.h"
50 #include "tree-ssanames.h"
51 #include "domwalk.h"
52 #include "pointer-set.h"
53 #include "expmed.h"
54 #include "params.h"
55 #include "hash-table.h"
56 #include "tree-ssa-address.h"
57 \f
58 /* Information about a strength reduction candidate. Each statement
59 in the candidate table represents an expression of one of the
60 following forms (the special case of CAND_REF will be described
61 later):
62
63 (CAND_MULT) S1: X = (B + i) * S
64 (CAND_ADD) S1: X = B + (i * S)
65
66 Here X and B are SSA names, i is an integer constant, and S is
67 either an SSA name or a constant. We call B the "base," i the
68 "index", and S the "stride."
69
70 Any statement S0 that dominates S1 and is of the form:
71
72 (CAND_MULT) S0: Y = (B + i') * S
73 (CAND_ADD) S0: Y = B + (i' * S)
74
75 is called a "basis" for S1. In both cases, S1 may be replaced by
76
77 S1': X = Y + (i - i') * S,
78
79 where (i - i') * S is folded to the extent possible.
80
81 All gimple statements are visited in dominator order, and each
82 statement that may contribute to one of the forms of S1 above is
83 given at least one entry in the candidate table. Such statements
84 include addition, pointer addition, subtraction, multiplication,
85 negation, copies, and nontrivial type casts. If a statement may
86 represent more than one expression of the forms of S1 above,
87 multiple "interpretations" are stored in the table and chained
88 together. Examples:
89
90 * An add of two SSA names may treat either operand as the base.
91 * A multiply of two SSA names, likewise.
92 * A copy or cast may be thought of as either a CAND_MULT with
93 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
94
95 Candidate records are allocated from an obstack. They are addressed
96 both from a hash table keyed on S1, and from a vector of candidate
97 pointers arranged in predominator order.
98
99 Opportunity note
100 ----------------
101 Currently we don't recognize:
102
103 S0: Y = (S * i') - B
104 S1: X = (S * i) - B
105
106 as a strength reduction opportunity, even though this S1 would
107 also be replaceable by the S1' above. This can be added if it
108 comes up in practice.
109
110 Strength reduction in addressing
111 --------------------------------
112 There is another kind of candidate known as CAND_REF. A CAND_REF
113 describes a statement containing a memory reference having
114 complex addressing that might benefit from strength reduction.
115 Specifically, we are interested in references for which
116 get_inner_reference returns a base address, offset, and bitpos as
117 follows:
118
119 base: MEM_REF (T1, C1)
120 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
121 bitpos: C4 * BITS_PER_UNIT
122
123 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
124 arbitrary integer constants. Note that C2 may be zero, in which
125 case the offset will be MULT_EXPR (T2, C3).
126
127 When this pattern is recognized, the original memory reference
128 can be replaced with:
129
130 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
131 C1 + (C2 * C3) + C4)
132
133 which distributes the multiply to allow constant folding. When
134 two or more addressing expressions can be represented by MEM_REFs
135 of this form, differing only in the constants C1, C2, and C4,
136 making this substitution produces more efficient addressing during
137 the RTL phases. When there are not at least two expressions with
138 the same values of T1, T2, and C3, there is nothing to be gained
139 by the replacement.
140
141 Strength reduction of CAND_REFs uses the same infrastructure as
142 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
143 field, MULT_EXPR (T2, C3) in the stride (S) field, and
144 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
145 is thus another CAND_REF with the same B and S values. When at
146 least two CAND_REFs are chained together using the basis relation,
147 each of them is replaced as above, resulting in improved code
148 generation for addressing.
149
150 Conditional candidates
151 ======================
152
153 Conditional candidates are best illustrated with an example.
154 Consider the code sequence:
155
156 (1) x_0 = ...;
157 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
158 if (...)
159 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
160 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
161 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
162 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
163
164 Here strength reduction is complicated by the uncertain value of x_2.
165 A legitimate transformation is:
166
167 (1) x_0 = ...;
168 (2) a_0 = x_0 * 5;
169 if (...)
170 {
171 (3) [x_1 = x_0 + 1;]
172 (3a) t_1 = a_0 + 5;
173 }
174 (4) [x_2 = PHI <x_0, x_1>;]
175 (4a) t_2 = PHI <a_0, t_1>;
176 (5) [x_3 = x_2 + 1;]
177 (6r) a_1 = t_2 + 5;
178
179 where the bracketed instructions may go dead.
180
181 To recognize this opportunity, we have to observe that statement (6)
182 has a "hidden basis" (2). The hidden basis is unlike a normal basis
183 in that the statement and the hidden basis have different base SSA
184 names (x_2 and x_0, respectively). The relationship is established
185 when a statement's base name (x_2) is defined by a phi statement (4),
186 each argument of which (x_0, x_1) has an identical "derived base name."
187 If the argument is defined by a candidate (as x_1 is by (3)) that is a
188 CAND_ADD having a stride of 1, the derived base name of the argument is
189 the base name of the candidate (x_0). Otherwise, the argument itself
190 is its derived base name (as is the case with argument x_0).
191
192 The hidden basis for statement (6) is the nearest dominating candidate
193 whose base name is the derived base name (x_0) of the feeding phi (4),
194 and whose stride is identical to that of the statement. We can then
195 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
196 allowing the final replacement of (6) by the strength-reduced (6r).
197
198 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
199 A CAND_PHI is not a candidate for replacement, but is maintained in the
200 candidate table to ease discovery of hidden bases. Any phi statement
201 whose arguments share a common derived base name is entered into the
202 table with the derived base name, an (arbitrary) index of zero, and a
203 stride of 1. A statement with a hidden basis can then be detected by
204 simply looking up its feeding phi definition in the candidate table,
205 extracting the derived base name, and searching for a basis in the
206 usual manner after substituting the derived base name.
207
208 Note that the transformation is only valid when the original phi and
209 the statements that define the phi's arguments are all at the same
210 position in the loop hierarchy. */
211
212
213 /* Index into the candidate vector, offset by 1. VECs are zero-based,
214 while cand_idx's are one-based, with zero indicating null. */
215 typedef unsigned cand_idx;
216
217 /* The kind of candidate. */
218 enum cand_kind
219 {
220 CAND_MULT,
221 CAND_ADD,
222 CAND_REF,
223 CAND_PHI
224 };
225
226 struct slsr_cand_d
227 {
228 /* The candidate statement S1. */
229 gimple cand_stmt;
230
231 /* The base expression B: often an SSA name, but not always. */
232 tree base_expr;
233
234 /* The stride S. */
235 tree stride;
236
237 /* The index constant i. */
238 double_int index;
239
240 /* The type of the candidate. This is normally the type of base_expr,
241 but casts may have occurred when combining feeding instructions.
242 A candidate can only be a basis for candidates of the same final type.
243 (For CAND_REFs, this is the type to be used for operand 1 of the
244 replacement MEM_REF.) */
245 tree cand_type;
246
247 /* The kind of candidate (CAND_MULT, etc.). */
248 enum cand_kind kind;
249
250 /* Index of this candidate in the candidate vector. */
251 cand_idx cand_num;
252
253 /* Index of the next candidate record for the same statement.
254 A statement may be useful in more than one way (e.g., due to
255 commutativity). So we can have multiple "interpretations"
256 of a statement. */
257 cand_idx next_interp;
258
259 /* Index of the basis statement S0, if any, in the candidate vector. */
260 cand_idx basis;
261
262 /* First candidate for which this candidate is a basis, if one exists. */
263 cand_idx dependent;
264
265 /* Next candidate having the same basis as this one. */
266 cand_idx sibling;
267
268 /* If this is a conditional candidate, the CAND_PHI candidate
269 that defines the base SSA name B. */
270 cand_idx def_phi;
271
272 /* Savings that can be expected from eliminating dead code if this
273 candidate is replaced. */
274 int dead_savings;
275 };
276
277 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
278 typedef const struct slsr_cand_d *const_slsr_cand_t;
279
280 /* Pointers to candidates are chained together as part of a mapping
281 from base expressions to the candidates that use them. */
282
283 struct cand_chain_d
284 {
285 /* Base expression for the chain of candidates: often, but not
286 always, an SSA name. */
287 tree base_expr;
288
289 /* Pointer to a candidate. */
290 slsr_cand_t cand;
291
292 /* Chain pointer. */
293 struct cand_chain_d *next;
294
295 };
296
297 typedef struct cand_chain_d cand_chain, *cand_chain_t;
298 typedef const struct cand_chain_d *const_cand_chain_t;
299
300 /* Information about a unique "increment" associated with candidates
301 having an SSA name for a stride. An increment is the difference
302 between the index of the candidate and the index of its basis,
303 i.e., (i - i') as discussed in the module commentary.
304
305 When we are not going to generate address arithmetic we treat
306 increments that differ only in sign as the same, allowing sharing
307 of the cost of initializers. The absolute value of the increment
308 is stored in the incr_info. */
309
310 struct incr_info_d
311 {
312 /* The increment that relates a candidate to its basis. */
313 double_int incr;
314
315 /* How many times the increment occurs in the candidate tree. */
316 unsigned count;
317
318 /* Cost of replacing candidates using this increment. Negative and
319 zero costs indicate replacement should be performed. */
320 int cost;
321
322 /* If this increment is profitable but is not -1, 0, or 1, it requires
323 an initializer T_0 = stride * incr to be found or introduced in the
324 nearest common dominator of all candidates. This field holds T_0
325 for subsequent use. */
326 tree initializer;
327
328 /* If the initializer was found to already exist, this is the block
329 where it was found. */
330 basic_block init_bb;
331 };
332
333 typedef struct incr_info_d incr_info, *incr_info_t;
334
335 /* Candidates are maintained in a vector. If candidate X dominates
336 candidate Y, then X appears before Y in the vector; but the
337 converse does not necessarily hold. */
338 static vec<slsr_cand_t> cand_vec;
339
340 enum cost_consts
341 {
342 COST_NEUTRAL = 0,
343 COST_INFINITE = 1000
344 };
345
346 enum stride_status
347 {
348 UNKNOWN_STRIDE = 0,
349 KNOWN_STRIDE = 1
350 };
351
352 enum phi_adjust_status
353 {
354 NOT_PHI_ADJUST = 0,
355 PHI_ADJUST = 1
356 };
357
358 enum count_phis_status
359 {
360 DONT_COUNT_PHIS = 0,
361 COUNT_PHIS = 1
362 };
363
364 /* Pointer map embodying a mapping from statements to candidates. */
365 static struct pointer_map_t *stmt_cand_map;
366
367 /* Obstack for candidates. */
368 static struct obstack cand_obstack;
369
370 /* Obstack for candidate chains. */
371 static struct obstack chain_obstack;
372
373 /* An array INCR_VEC of incr_infos is used during analysis of related
374 candidates having an SSA name for a stride. INCR_VEC_LEN describes
375 its current length. MAX_INCR_VEC_LEN is used to avoid costly
376 pathological cases. */
377 static incr_info_t incr_vec;
378 static unsigned incr_vec_len;
379 const int MAX_INCR_VEC_LEN = 16;
380
381 /* For a chain of candidates with unknown stride, indicates whether or not
382 we must generate pointer arithmetic when replacing statements. */
383 static bool address_arithmetic_p;
384
385 /* Forward function declarations. */
386 static slsr_cand_t base_cand_from_table (tree);
387 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
388 static bool legal_cast_p_1 (tree, tree);
389 \f
390 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
391
392 static slsr_cand_t
393 lookup_cand (cand_idx idx)
394 {
395 return cand_vec[idx - 1];
396 }
397
398 /* Helper for hashing a candidate chain header. */
399
400 struct cand_chain_hasher : typed_noop_remove <cand_chain>
401 {
402 typedef cand_chain value_type;
403 typedef cand_chain compare_type;
404 static inline hashval_t hash (const value_type *);
405 static inline bool equal (const value_type *, const compare_type *);
406 };
407
408 inline hashval_t
409 cand_chain_hasher::hash (const value_type *p)
410 {
411 tree base_expr = p->base_expr;
412 return iterative_hash_expr (base_expr, 0);
413 }
414
415 inline bool
416 cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
417 {
418 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
419 }
420
421 /* Hash table embodying a mapping from base exprs to chains of candidates. */
422 static hash_table <cand_chain_hasher> base_cand_map;
423 \f
424 /* Look in the candidate table for a CAND_PHI that defines BASE and
425 return it if found; otherwise return NULL. */
426
427 static cand_idx
428 find_phi_def (tree base)
429 {
430 slsr_cand_t c;
431
432 if (TREE_CODE (base) != SSA_NAME)
433 return 0;
434
435 c = base_cand_from_table (base);
436
437 if (!c || c->kind != CAND_PHI)
438 return 0;
439
440 return c->cand_num;
441 }
442
443 /* Helper routine for find_basis_for_candidate. May be called twice:
444 once for the candidate's base expr, and optionally again for the
445 candidate's phi definition. */
446
447 static slsr_cand_t
448 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
449 {
450 cand_chain mapping_key;
451 cand_chain_t chain;
452 slsr_cand_t basis = NULL;
453
454 // Limit potential of N^2 behavior for long candidate chains.
455 int iters = 0;
456 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
457
458 mapping_key.base_expr = base_expr;
459 chain = base_cand_map.find (&mapping_key);
460
461 for (; chain && iters < max_iters; chain = chain->next, ++iters)
462 {
463 slsr_cand_t one_basis = chain->cand;
464
465 if (one_basis->kind != c->kind
466 || one_basis->cand_stmt == c->cand_stmt
467 || !operand_equal_p (one_basis->stride, c->stride, 0)
468 || !types_compatible_p (one_basis->cand_type, c->cand_type)
469 || !dominated_by_p (CDI_DOMINATORS,
470 gimple_bb (c->cand_stmt),
471 gimple_bb (one_basis->cand_stmt)))
472 continue;
473
474 if (!basis || basis->cand_num < one_basis->cand_num)
475 basis = one_basis;
476 }
477
478 return basis;
479 }
480
481 /* Use the base expr from candidate C to look for possible candidates
482 that can serve as a basis for C. Each potential basis must also
483 appear in a block that dominates the candidate statement and have
484 the same stride and type. If more than one possible basis exists,
485 the one with highest index in the vector is chosen; this will be
486 the most immediately dominating basis. */
487
488 static int
489 find_basis_for_candidate (slsr_cand_t c)
490 {
491 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
492
493 /* If a candidate doesn't have a basis using its base expression,
494 it may have a basis hidden by one or more intervening phis. */
495 if (!basis && c->def_phi)
496 {
497 basic_block basis_bb, phi_bb;
498 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
499 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
500
501 if (basis)
502 {
503 /* A hidden basis must dominate the phi-definition of the
504 candidate's base name. */
505 phi_bb = gimple_bb (phi_cand->cand_stmt);
506 basis_bb = gimple_bb (basis->cand_stmt);
507
508 if (phi_bb == basis_bb
509 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
510 {
511 basis = NULL;
512 c->basis = 0;
513 }
514
515 /* If we found a hidden basis, estimate additional dead-code
516 savings if the phi and its feeding statements can be removed. */
517 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
518 c->dead_savings += phi_cand->dead_savings;
519 }
520 }
521
522 if (basis)
523 {
524 c->sibling = basis->dependent;
525 basis->dependent = c->cand_num;
526 return basis->cand_num;
527 }
528
529 return 0;
530 }
531
532 /* Record a mapping from the base expression of C to C itself, indicating that
533 C may potentially serve as a basis using that base expression. */
534
535 static void
536 record_potential_basis (slsr_cand_t c)
537 {
538 cand_chain_t node;
539 cand_chain **slot;
540
541 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
542 node->base_expr = c->base_expr;
543 node->cand = c;
544 node->next = NULL;
545 slot = base_cand_map.find_slot (node, INSERT);
546
547 if (*slot)
548 {
549 cand_chain_t head = (cand_chain_t) (*slot);
550 node->next = head->next;
551 head->next = node;
552 }
553 else
554 *slot = node;
555 }
556
557 /* Allocate storage for a new candidate and initialize its fields.
558 Attempt to find a basis for the candidate. */
559
560 static slsr_cand_t
561 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
562 double_int index, tree stride, tree ctype,
563 unsigned savings)
564 {
565 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
566 sizeof (slsr_cand));
567 c->cand_stmt = gs;
568 c->base_expr = base;
569 c->stride = stride;
570 c->index = index;
571 c->cand_type = ctype;
572 c->kind = kind;
573 c->cand_num = cand_vec.length () + 1;
574 c->next_interp = 0;
575 c->dependent = 0;
576 c->sibling = 0;
577 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
578 c->dead_savings = savings;
579
580 cand_vec.safe_push (c);
581
582 if (kind == CAND_PHI)
583 c->basis = 0;
584 else
585 c->basis = find_basis_for_candidate (c);
586
587 record_potential_basis (c);
588
589 return c;
590 }
591
592 /* Determine the target cost of statement GS when compiling according
593 to SPEED. */
594
595 static int
596 stmt_cost (gimple gs, bool speed)
597 {
598 tree lhs, rhs1, rhs2;
599 enum machine_mode lhs_mode;
600
601 gcc_assert (is_gimple_assign (gs));
602 lhs = gimple_assign_lhs (gs);
603 rhs1 = gimple_assign_rhs1 (gs);
604 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
605
606 switch (gimple_assign_rhs_code (gs))
607 {
608 case MULT_EXPR:
609 rhs2 = gimple_assign_rhs2 (gs);
610
611 if (host_integerp (rhs2, 0))
612 return mult_by_coeff_cost (TREE_INT_CST_LOW (rhs2), lhs_mode, speed);
613
614 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
615 return mul_cost (speed, lhs_mode);
616
617 case PLUS_EXPR:
618 case POINTER_PLUS_EXPR:
619 case MINUS_EXPR:
620 return add_cost (speed, lhs_mode);
621
622 case NEGATE_EXPR:
623 return neg_cost (speed, lhs_mode);
624
625 case NOP_EXPR:
626 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
627
628 /* Note that we don't assign costs to copies that in most cases
629 will go away. */
630 default:
631 ;
632 }
633
634 gcc_unreachable ();
635 return 0;
636 }
637
638 /* Look up the defining statement for BASE_IN and return a pointer
639 to its candidate in the candidate table, if any; otherwise NULL.
640 Only CAND_ADD and CAND_MULT candidates are returned. */
641
642 static slsr_cand_t
643 base_cand_from_table (tree base_in)
644 {
645 slsr_cand_t *result;
646
647 gimple def = SSA_NAME_DEF_STMT (base_in);
648 if (!def)
649 return (slsr_cand_t) NULL;
650
651 result = (slsr_cand_t *) pointer_map_contains (stmt_cand_map, def);
652
653 if (result && (*result)->kind != CAND_REF)
654 return *result;
655
656 return (slsr_cand_t) NULL;
657 }
658
659 /* Add an entry to the statement-to-candidate mapping. */
660
661 static void
662 add_cand_for_stmt (gimple gs, slsr_cand_t c)
663 {
664 void **slot = pointer_map_insert (stmt_cand_map, gs);
665 gcc_assert (!*slot);
666 *slot = c;
667 }
668 \f
669 /* Given PHI which contains a phi statement, determine whether it
670 satisfies all the requirements of a phi candidate. If so, create
671 a candidate. Note that a CAND_PHI never has a basis itself, but
672 is used to help find a basis for subsequent candidates. */
673
674 static void
675 slsr_process_phi (gimple phi, bool speed)
676 {
677 unsigned i;
678 tree arg0_base = NULL_TREE, base_type;
679 slsr_cand_t c;
680 struct loop *cand_loop = gimple_bb (phi)->loop_father;
681 unsigned savings = 0;
682
683 /* A CAND_PHI requires each of its arguments to have the same
684 derived base name. (See the module header commentary for a
685 definition of derived base names.) Furthermore, all feeding
686 definitions must be in the same position in the loop hierarchy
687 as PHI. */
688
689 for (i = 0; i < gimple_phi_num_args (phi); i++)
690 {
691 slsr_cand_t arg_cand;
692 tree arg = gimple_phi_arg_def (phi, i);
693 tree derived_base_name = NULL_TREE;
694 gimple arg_stmt = NULL;
695 basic_block arg_bb = NULL;
696
697 if (TREE_CODE (arg) != SSA_NAME)
698 return;
699
700 arg_cand = base_cand_from_table (arg);
701
702 if (arg_cand)
703 {
704 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
705 {
706 if (!arg_cand->next_interp)
707 return;
708
709 arg_cand = lookup_cand (arg_cand->next_interp);
710 }
711
712 if (!integer_onep (arg_cand->stride))
713 return;
714
715 derived_base_name = arg_cand->base_expr;
716 arg_stmt = arg_cand->cand_stmt;
717 arg_bb = gimple_bb (arg_stmt);
718
719 /* Gather potential dead code savings if the phi statement
720 can be removed later on. */
721 if (has_single_use (arg))
722 {
723 if (gimple_code (arg_stmt) == GIMPLE_PHI)
724 savings += arg_cand->dead_savings;
725 else
726 savings += stmt_cost (arg_stmt, speed);
727 }
728 }
729 else
730 {
731 derived_base_name = arg;
732
733 if (SSA_NAME_IS_DEFAULT_DEF (arg))
734 arg_bb = single_succ (ENTRY_BLOCK_PTR);
735 else
736 gimple_bb (SSA_NAME_DEF_STMT (arg));
737 }
738
739 if (!arg_bb || arg_bb->loop_father != cand_loop)
740 return;
741
742 if (i == 0)
743 arg0_base = derived_base_name;
744 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
745 return;
746 }
747
748 /* Create the candidate. "alloc_cand_and_find_basis" is named
749 misleadingly for this case, as no basis will be sought for a
750 CAND_PHI. */
751 base_type = TREE_TYPE (arg0_base);
752
753 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base, double_int_zero,
754 integer_one_node, base_type, savings);
755
756 /* Add the candidate to the statement-candidate mapping. */
757 add_cand_for_stmt (phi, c);
758 }
759
760 /* Given PBASE which is a pointer to tree, look up the defining
761 statement for it and check whether the candidate is in the
762 form of:
763
764 X = B + (1 * S), S is integer constant
765 X = B + (i * S), S is integer one
766
767 If so, set PBASE to the candidate's base_expr and return double
768 int (i * S).
769 Otherwise, just return double int zero. */
770
771 static double_int
772 backtrace_base_for_ref (tree *pbase)
773 {
774 tree base_in = *pbase;
775 slsr_cand_t base_cand;
776
777 STRIP_NOPS (base_in);
778
779 /* Strip off widening conversion(s) to handle cases where
780 e.g. 'B' is widened from an 'int' in order to calculate
781 a 64-bit address. */
782 if (CONVERT_EXPR_P (base_in)
783 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
784 base_in = get_unwidened (base_in, NULL_TREE);
785
786 if (TREE_CODE (base_in) != SSA_NAME)
787 return tree_to_double_int (integer_zero_node);
788
789 base_cand = base_cand_from_table (base_in);
790
791 while (base_cand && base_cand->kind != CAND_PHI)
792 {
793 if (base_cand->kind == CAND_ADD
794 && base_cand->index.is_one ()
795 && TREE_CODE (base_cand->stride) == INTEGER_CST)
796 {
797 /* X = B + (1 * S), S is integer constant. */
798 *pbase = base_cand->base_expr;
799 return tree_to_double_int (base_cand->stride);
800 }
801 else if (base_cand->kind == CAND_ADD
802 && TREE_CODE (base_cand->stride) == INTEGER_CST
803 && integer_onep (base_cand->stride))
804 {
805 /* X = B + (i * S), S is integer one. */
806 *pbase = base_cand->base_expr;
807 return base_cand->index;
808 }
809
810 if (base_cand->next_interp)
811 base_cand = lookup_cand (base_cand->next_interp);
812 else
813 base_cand = NULL;
814 }
815
816 return tree_to_double_int (integer_zero_node);
817 }
818
819 /* Look for the following pattern:
820
821 *PBASE: MEM_REF (T1, C1)
822
823 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
824 or
825 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
826 or
827 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
828
829 *PINDEX: C4 * BITS_PER_UNIT
830
831 If not present, leave the input values unchanged and return FALSE.
832 Otherwise, modify the input values as follows and return TRUE:
833
834 *PBASE: T1
835 *POFFSET: MULT_EXPR (T2, C3)
836 *PINDEX: C1 + (C2 * C3) + C4
837
838 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
839 will be further restructured to:
840
841 *PBASE: T1
842 *POFFSET: MULT_EXPR (T2', C3)
843 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
844
845 static bool
846 restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
847 tree *ptype)
848 {
849 tree base = *pbase, offset = *poffset;
850 double_int index = *pindex;
851 double_int bpu = double_int::from_uhwi (BITS_PER_UNIT);
852 tree mult_op0, mult_op1, t1, t2, type;
853 double_int c1, c2, c3, c4, c5;
854
855 if (!base
856 || !offset
857 || TREE_CODE (base) != MEM_REF
858 || TREE_CODE (offset) != MULT_EXPR
859 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
860 || !index.umod (bpu, FLOOR_MOD_EXPR).is_zero ())
861 return false;
862
863 t1 = TREE_OPERAND (base, 0);
864 c1 = mem_ref_offset (base);
865 type = TREE_TYPE (TREE_OPERAND (base, 1));
866
867 mult_op0 = TREE_OPERAND (offset, 0);
868 mult_op1 = TREE_OPERAND (offset, 1);
869
870 c3 = tree_to_double_int (mult_op1);
871
872 if (TREE_CODE (mult_op0) == PLUS_EXPR)
873
874 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
875 {
876 t2 = TREE_OPERAND (mult_op0, 0);
877 c2 = tree_to_double_int (TREE_OPERAND (mult_op0, 1));
878 }
879 else
880 return false;
881
882 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
883
884 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
885 {
886 t2 = TREE_OPERAND (mult_op0, 0);
887 c2 = -tree_to_double_int (TREE_OPERAND (mult_op0, 1));
888 }
889 else
890 return false;
891
892 else
893 {
894 t2 = mult_op0;
895 c2 = double_int_zero;
896 }
897
898 c4 = index.udiv (bpu, FLOOR_DIV_EXPR);
899 c5 = backtrace_base_for_ref (&t2);
900
901 *pbase = t1;
902 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
903 double_int_to_tree (sizetype, c3));
904 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
905 *ptype = type;
906
907 return true;
908 }
909
910 /* Given GS which contains a data reference, create a CAND_REF entry in
911 the candidate table and attempt to find a basis. */
912
913 static void
914 slsr_process_ref (gimple gs)
915 {
916 tree ref_expr, base, offset, type;
917 HOST_WIDE_INT bitsize, bitpos;
918 enum machine_mode mode;
919 int unsignedp, volatilep;
920 double_int index;
921 slsr_cand_t c;
922
923 if (gimple_vdef (gs))
924 ref_expr = gimple_assign_lhs (gs);
925 else
926 ref_expr = gimple_assign_rhs1 (gs);
927
928 if (!handled_component_p (ref_expr)
929 || TREE_CODE (ref_expr) == BIT_FIELD_REF
930 || (TREE_CODE (ref_expr) == COMPONENT_REF
931 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
932 return;
933
934 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
935 &unsignedp, &volatilep, false);
936 index = double_int::from_uhwi (bitpos);
937
938 if (!restructure_reference (&base, &offset, &index, &type))
939 return;
940
941 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
942 type, 0);
943
944 /* Add the candidate to the statement-candidate mapping. */
945 add_cand_for_stmt (gs, c);
946 }
947
948 /* Create a candidate entry for a statement GS, where GS multiplies
949 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
950 about the two SSA names into the new candidate. Return the new
951 candidate. */
952
953 static slsr_cand_t
954 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
955 {
956 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
957 double_int index;
958 unsigned savings = 0;
959 slsr_cand_t c;
960 slsr_cand_t base_cand = base_cand_from_table (base_in);
961
962 /* Look at all interpretations of the base candidate, if necessary,
963 to find information to propagate into this candidate. */
964 while (base_cand && !base && base_cand->kind != CAND_PHI)
965 {
966
967 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
968 {
969 /* Y = (B + i') * 1
970 X = Y * Z
971 ================
972 X = (B + i') * Z */
973 base = base_cand->base_expr;
974 index = base_cand->index;
975 stride = stride_in;
976 ctype = base_cand->cand_type;
977 if (has_single_use (base_in))
978 savings = (base_cand->dead_savings
979 + stmt_cost (base_cand->cand_stmt, speed));
980 }
981 else if (base_cand->kind == CAND_ADD
982 && TREE_CODE (base_cand->stride) == INTEGER_CST)
983 {
984 /* Y = B + (i' * S), S constant
985 X = Y * Z
986 ============================
987 X = B + ((i' * S) * Z) */
988 base = base_cand->base_expr;
989 index = base_cand->index * tree_to_double_int (base_cand->stride);
990 stride = stride_in;
991 ctype = base_cand->cand_type;
992 if (has_single_use (base_in))
993 savings = (base_cand->dead_savings
994 + stmt_cost (base_cand->cand_stmt, speed));
995 }
996
997 if (base_cand->next_interp)
998 base_cand = lookup_cand (base_cand->next_interp);
999 else
1000 base_cand = NULL;
1001 }
1002
1003 if (!base)
1004 {
1005 /* No interpretations had anything useful to propagate, so
1006 produce X = (Y + 0) * Z. */
1007 base = base_in;
1008 index = double_int_zero;
1009 stride = stride_in;
1010 ctype = TREE_TYPE (base_in);
1011 }
1012
1013 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1014 ctype, savings);
1015 return c;
1016 }
1017
1018 /* Create a candidate entry for a statement GS, where GS multiplies
1019 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1020 information about BASE_IN into the new candidate. Return the new
1021 candidate. */
1022
1023 static slsr_cand_t
1024 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1025 {
1026 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1027 double_int index, temp;
1028 unsigned savings = 0;
1029 slsr_cand_t c;
1030 slsr_cand_t base_cand = base_cand_from_table (base_in);
1031
1032 /* Look at all interpretations of the base candidate, if necessary,
1033 to find information to propagate into this candidate. */
1034 while (base_cand && !base && base_cand->kind != CAND_PHI)
1035 {
1036 if (base_cand->kind == CAND_MULT
1037 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1038 {
1039 /* Y = (B + i') * S, S constant
1040 X = Y * c
1041 ============================
1042 X = (B + i') * (S * c) */
1043 base = base_cand->base_expr;
1044 index = base_cand->index;
1045 temp = tree_to_double_int (base_cand->stride)
1046 * tree_to_double_int (stride_in);
1047 stride = double_int_to_tree (TREE_TYPE (stride_in), temp);
1048 ctype = base_cand->cand_type;
1049 if (has_single_use (base_in))
1050 savings = (base_cand->dead_savings
1051 + stmt_cost (base_cand->cand_stmt, speed));
1052 }
1053 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1054 {
1055 /* Y = B + (i' * 1)
1056 X = Y * c
1057 ===========================
1058 X = (B + i') * c */
1059 base = base_cand->base_expr;
1060 index = base_cand->index;
1061 stride = stride_in;
1062 ctype = base_cand->cand_type;
1063 if (has_single_use (base_in))
1064 savings = (base_cand->dead_savings
1065 + stmt_cost (base_cand->cand_stmt, speed));
1066 }
1067 else if (base_cand->kind == CAND_ADD
1068 && base_cand->index.is_one ()
1069 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1070 {
1071 /* Y = B + (1 * S), S constant
1072 X = Y * c
1073 ===========================
1074 X = (B + S) * c */
1075 base = base_cand->base_expr;
1076 index = tree_to_double_int (base_cand->stride);
1077 stride = stride_in;
1078 ctype = base_cand->cand_type;
1079 if (has_single_use (base_in))
1080 savings = (base_cand->dead_savings
1081 + stmt_cost (base_cand->cand_stmt, speed));
1082 }
1083
1084 if (base_cand->next_interp)
1085 base_cand = lookup_cand (base_cand->next_interp);
1086 else
1087 base_cand = NULL;
1088 }
1089
1090 if (!base)
1091 {
1092 /* No interpretations had anything useful to propagate, so
1093 produce X = (Y + 0) * c. */
1094 base = base_in;
1095 index = double_int_zero;
1096 stride = stride_in;
1097 ctype = TREE_TYPE (base_in);
1098 }
1099
1100 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1101 ctype, savings);
1102 return c;
1103 }
1104
1105 /* Given GS which is a multiply of scalar integers, make an appropriate
1106 entry in the candidate table. If this is a multiply of two SSA names,
1107 create two CAND_MULT interpretations and attempt to find a basis for
1108 each of them. Otherwise, create a single CAND_MULT and attempt to
1109 find a basis. */
1110
1111 static void
1112 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1113 {
1114 slsr_cand_t c, c2;
1115
1116 /* If this is a multiply of an SSA name with itself, it is highly
1117 unlikely that we will get a strength reduction opportunity, so
1118 don't record it as a candidate. This simplifies the logic for
1119 finding a basis, so if this is removed that must be considered. */
1120 if (rhs1 == rhs2)
1121 return;
1122
1123 if (TREE_CODE (rhs2) == SSA_NAME)
1124 {
1125 /* Record an interpretation of this statement in the candidate table
1126 assuming RHS1 is the base expression and RHS2 is the stride. */
1127 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1128
1129 /* Add the first interpretation to the statement-candidate mapping. */
1130 add_cand_for_stmt (gs, c);
1131
1132 /* Record another interpretation of this statement assuming RHS1
1133 is the stride and RHS2 is the base expression. */
1134 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1135 c->next_interp = c2->cand_num;
1136 }
1137 else
1138 {
1139 /* Record an interpretation for the multiply-immediate. */
1140 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1141
1142 /* Add the interpretation to the statement-candidate mapping. */
1143 add_cand_for_stmt (gs, c);
1144 }
1145 }
1146
1147 /* Create a candidate entry for a statement GS, where GS adds two
1148 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1149 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1150 information about the two SSA names into the new candidate.
1151 Return the new candidate. */
1152
1153 static slsr_cand_t
1154 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1155 bool subtract_p, bool speed)
1156 {
1157 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1158 double_int index;
1159 unsigned savings = 0;
1160 slsr_cand_t c;
1161 slsr_cand_t base_cand = base_cand_from_table (base_in);
1162 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1163
1164 /* The most useful transformation is a multiply-immediate feeding
1165 an add or subtract. Look for that first. */
1166 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1167 {
1168 if (addend_cand->kind == CAND_MULT
1169 && addend_cand->index.is_zero ()
1170 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1171 {
1172 /* Z = (B + 0) * S, S constant
1173 X = Y +/- Z
1174 ===========================
1175 X = Y + ((+/-1 * S) * B) */
1176 base = base_in;
1177 index = tree_to_double_int (addend_cand->stride);
1178 if (subtract_p)
1179 index = -index;
1180 stride = addend_cand->base_expr;
1181 ctype = TREE_TYPE (base_in);
1182 if (has_single_use (addend_in))
1183 savings = (addend_cand->dead_savings
1184 + stmt_cost (addend_cand->cand_stmt, speed));
1185 }
1186
1187 if (addend_cand->next_interp)
1188 addend_cand = lookup_cand (addend_cand->next_interp);
1189 else
1190 addend_cand = NULL;
1191 }
1192
1193 while (base_cand && !base && base_cand->kind != CAND_PHI)
1194 {
1195 if (base_cand->kind == CAND_ADD
1196 && (base_cand->index.is_zero ()
1197 || operand_equal_p (base_cand->stride,
1198 integer_zero_node, 0)))
1199 {
1200 /* Y = B + (i' * S), i' * S = 0
1201 X = Y +/- Z
1202 ============================
1203 X = B + (+/-1 * Z) */
1204 base = base_cand->base_expr;
1205 index = subtract_p ? double_int_minus_one : double_int_one;
1206 stride = addend_in;
1207 ctype = base_cand->cand_type;
1208 if (has_single_use (base_in))
1209 savings = (base_cand->dead_savings
1210 + stmt_cost (base_cand->cand_stmt, speed));
1211 }
1212 else if (subtract_p)
1213 {
1214 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1215
1216 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1217 {
1218 if (subtrahend_cand->kind == CAND_MULT
1219 && subtrahend_cand->index.is_zero ()
1220 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1221 {
1222 /* Z = (B + 0) * S, S constant
1223 X = Y - Z
1224 ===========================
1225 Value: X = Y + ((-1 * S) * B) */
1226 base = base_in;
1227 index = tree_to_double_int (subtrahend_cand->stride);
1228 index = -index;
1229 stride = subtrahend_cand->base_expr;
1230 ctype = TREE_TYPE (base_in);
1231 if (has_single_use (addend_in))
1232 savings = (subtrahend_cand->dead_savings
1233 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1234 }
1235
1236 if (subtrahend_cand->next_interp)
1237 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1238 else
1239 subtrahend_cand = NULL;
1240 }
1241 }
1242
1243 if (base_cand->next_interp)
1244 base_cand = lookup_cand (base_cand->next_interp);
1245 else
1246 base_cand = NULL;
1247 }
1248
1249 if (!base)
1250 {
1251 /* No interpretations had anything useful to propagate, so
1252 produce X = Y + (1 * Z). */
1253 base = base_in;
1254 index = subtract_p ? double_int_minus_one : double_int_one;
1255 stride = addend_in;
1256 ctype = TREE_TYPE (base_in);
1257 }
1258
1259 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1260 ctype, savings);
1261 return c;
1262 }
1263
1264 /* Create a candidate entry for a statement GS, where GS adds SSA
1265 name BASE_IN to constant INDEX_IN. Propagate any known information
1266 about BASE_IN into the new candidate. Return the new candidate. */
1267
1268 static slsr_cand_t
1269 create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed)
1270 {
1271 enum cand_kind kind = CAND_ADD;
1272 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1273 double_int index, multiple;
1274 unsigned savings = 0;
1275 slsr_cand_t c;
1276 slsr_cand_t base_cand = base_cand_from_table (base_in);
1277
1278 while (base_cand && !base && base_cand->kind != CAND_PHI)
1279 {
1280 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (base_cand->stride));
1281
1282 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1283 && index_in.multiple_of (tree_to_double_int (base_cand->stride),
1284 unsigned_p, &multiple))
1285 {
1286 /* Y = (B + i') * S, S constant, c = kS for some integer k
1287 X = Y + c
1288 ============================
1289 X = (B + (i'+ k)) * S
1290 OR
1291 Y = B + (i' * S), S constant, c = kS for some integer k
1292 X = Y + c
1293 ============================
1294 X = (B + (i'+ k)) * S */
1295 kind = base_cand->kind;
1296 base = base_cand->base_expr;
1297 index = base_cand->index + multiple;
1298 stride = base_cand->stride;
1299 ctype = base_cand->cand_type;
1300 if (has_single_use (base_in))
1301 savings = (base_cand->dead_savings
1302 + stmt_cost (base_cand->cand_stmt, speed));
1303 }
1304
1305 if (base_cand->next_interp)
1306 base_cand = lookup_cand (base_cand->next_interp);
1307 else
1308 base_cand = NULL;
1309 }
1310
1311 if (!base)
1312 {
1313 /* No interpretations had anything useful to propagate, so
1314 produce X = Y + (c * 1). */
1315 kind = CAND_ADD;
1316 base = base_in;
1317 index = index_in;
1318 stride = integer_one_node;
1319 ctype = TREE_TYPE (base_in);
1320 }
1321
1322 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1323 ctype, savings);
1324 return c;
1325 }
1326
1327 /* Given GS which is an add or subtract of scalar integers or pointers,
1328 make at least one appropriate entry in the candidate table. */
1329
1330 static void
1331 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1332 {
1333 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1334 slsr_cand_t c = NULL, c2;
1335
1336 if (TREE_CODE (rhs2) == SSA_NAME)
1337 {
1338 /* First record an interpretation assuming RHS1 is the base expression
1339 and RHS2 is the stride. But it doesn't make sense for the
1340 stride to be a pointer, so don't record a candidate in that case. */
1341 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1342 {
1343 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1344
1345 /* Add the first interpretation to the statement-candidate
1346 mapping. */
1347 add_cand_for_stmt (gs, c);
1348 }
1349
1350 /* If the two RHS operands are identical, or this is a subtract,
1351 we're done. */
1352 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1353 return;
1354
1355 /* Otherwise, record another interpretation assuming RHS2 is the
1356 base expression and RHS1 is the stride, again provided that the
1357 stride is not a pointer. */
1358 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1359 {
1360 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1361 if (c)
1362 c->next_interp = c2->cand_num;
1363 else
1364 add_cand_for_stmt (gs, c2);
1365 }
1366 }
1367 else
1368 {
1369 double_int index;
1370
1371 /* Record an interpretation for the add-immediate. */
1372 index = tree_to_double_int (rhs2);
1373 if (subtract_p)
1374 index = -index;
1375
1376 c = create_add_imm_cand (gs, rhs1, index, speed);
1377
1378 /* Add the interpretation to the statement-candidate mapping. */
1379 add_cand_for_stmt (gs, c);
1380 }
1381 }
1382
1383 /* Given GS which is a negate of a scalar integer, make an appropriate
1384 entry in the candidate table. A negate is equivalent to a multiply
1385 by -1. */
1386
1387 static void
1388 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1389 {
1390 /* Record a CAND_MULT interpretation for the multiply by -1. */
1391 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1392
1393 /* Add the interpretation to the statement-candidate mapping. */
1394 add_cand_for_stmt (gs, c);
1395 }
1396
1397 /* Help function for legal_cast_p, operating on two trees. Checks
1398 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1399 for more details. */
1400
1401 static bool
1402 legal_cast_p_1 (tree lhs, tree rhs)
1403 {
1404 tree lhs_type, rhs_type;
1405 unsigned lhs_size, rhs_size;
1406 bool lhs_wraps, rhs_wraps;
1407
1408 lhs_type = TREE_TYPE (lhs);
1409 rhs_type = TREE_TYPE (rhs);
1410 lhs_size = TYPE_PRECISION (lhs_type);
1411 rhs_size = TYPE_PRECISION (rhs_type);
1412 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1413 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1414
1415 if (lhs_size < rhs_size
1416 || (rhs_wraps && !lhs_wraps)
1417 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1418 return false;
1419
1420 return true;
1421 }
1422
1423 /* Return TRUE if GS is a statement that defines an SSA name from
1424 a conversion and is legal for us to combine with an add and multiply
1425 in the candidate table. For example, suppose we have:
1426
1427 A = B + i;
1428 C = (type) A;
1429 D = C * S;
1430
1431 Without the type-cast, we would create a CAND_MULT for D with base B,
1432 index i, and stride S. We want to record this candidate only if it
1433 is equivalent to apply the type cast following the multiply:
1434
1435 A = B + i;
1436 E = A * S;
1437 D = (type) E;
1438
1439 We will record the type with the candidate for D. This allows us
1440 to use a similar previous candidate as a basis. If we have earlier seen
1441
1442 A' = B + i';
1443 C' = (type) A';
1444 D' = C' * S;
1445
1446 we can replace D with
1447
1448 D = D' + (i - i') * S;
1449
1450 But if moving the type-cast would change semantics, we mustn't do this.
1451
1452 This is legitimate for casts from a non-wrapping integral type to
1453 any integral type of the same or larger size. It is not legitimate
1454 to convert a wrapping type to a non-wrapping type, or to a wrapping
1455 type of a different size. I.e., with a wrapping type, we must
1456 assume that the addition B + i could wrap, in which case performing
1457 the multiply before or after one of the "illegal" type casts will
1458 have different semantics. */
1459
1460 static bool
1461 legal_cast_p (gimple gs, tree rhs)
1462 {
1463 if (!is_gimple_assign (gs)
1464 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1465 return false;
1466
1467 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1468 }
1469
1470 /* Given GS which is a cast to a scalar integer type, determine whether
1471 the cast is legal for strength reduction. If so, make at least one
1472 appropriate entry in the candidate table. */
1473
1474 static void
1475 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1476 {
1477 tree lhs, ctype;
1478 slsr_cand_t base_cand, c, c2;
1479 unsigned savings = 0;
1480
1481 if (!legal_cast_p (gs, rhs1))
1482 return;
1483
1484 lhs = gimple_assign_lhs (gs);
1485 base_cand = base_cand_from_table (rhs1);
1486 ctype = TREE_TYPE (lhs);
1487
1488 if (base_cand && base_cand->kind != CAND_PHI)
1489 {
1490 while (base_cand)
1491 {
1492 /* Propagate all data from the base candidate except the type,
1493 which comes from the cast, and the base candidate's cast,
1494 which is no longer applicable. */
1495 if (has_single_use (rhs1))
1496 savings = (base_cand->dead_savings
1497 + stmt_cost (base_cand->cand_stmt, speed));
1498
1499 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1500 base_cand->base_expr,
1501 base_cand->index, base_cand->stride,
1502 ctype, savings);
1503 if (base_cand->next_interp)
1504 base_cand = lookup_cand (base_cand->next_interp);
1505 else
1506 base_cand = NULL;
1507 }
1508 }
1509 else
1510 {
1511 /* If nothing is known about the RHS, create fresh CAND_ADD and
1512 CAND_MULT interpretations:
1513
1514 X = Y + (0 * 1)
1515 X = (Y + 0) * 1
1516
1517 The first of these is somewhat arbitrary, but the choice of
1518 1 for the stride simplifies the logic for propagating casts
1519 into their uses. */
1520 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, double_int_zero,
1521 integer_one_node, ctype, 0);
1522 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, double_int_zero,
1523 integer_one_node, ctype, 0);
1524 c->next_interp = c2->cand_num;
1525 }
1526
1527 /* Add the first (or only) interpretation to the statement-candidate
1528 mapping. */
1529 add_cand_for_stmt (gs, c);
1530 }
1531
1532 /* Given GS which is a copy of a scalar integer type, make at least one
1533 appropriate entry in the candidate table.
1534
1535 This interface is included for completeness, but is unnecessary
1536 if this pass immediately follows a pass that performs copy
1537 propagation, such as DOM. */
1538
1539 static void
1540 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1541 {
1542 slsr_cand_t base_cand, c, c2;
1543 unsigned savings = 0;
1544
1545 base_cand = base_cand_from_table (rhs1);
1546
1547 if (base_cand && base_cand->kind != CAND_PHI)
1548 {
1549 while (base_cand)
1550 {
1551 /* Propagate all data from the base candidate. */
1552 if (has_single_use (rhs1))
1553 savings = (base_cand->dead_savings
1554 + stmt_cost (base_cand->cand_stmt, speed));
1555
1556 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1557 base_cand->base_expr,
1558 base_cand->index, base_cand->stride,
1559 base_cand->cand_type, savings);
1560 if (base_cand->next_interp)
1561 base_cand = lookup_cand (base_cand->next_interp);
1562 else
1563 base_cand = NULL;
1564 }
1565 }
1566 else
1567 {
1568 /* If nothing is known about the RHS, create fresh CAND_ADD and
1569 CAND_MULT interpretations:
1570
1571 X = Y + (0 * 1)
1572 X = (Y + 0) * 1
1573
1574 The first of these is somewhat arbitrary, but the choice of
1575 1 for the stride simplifies the logic for propagating casts
1576 into their uses. */
1577 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, double_int_zero,
1578 integer_one_node, TREE_TYPE (rhs1), 0);
1579 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, double_int_zero,
1580 integer_one_node, TREE_TYPE (rhs1), 0);
1581 c->next_interp = c2->cand_num;
1582 }
1583
1584 /* Add the first (or only) interpretation to the statement-candidate
1585 mapping. */
1586 add_cand_for_stmt (gs, c);
1587 }
1588 \f
1589 class find_candidates_dom_walker : public dom_walker
1590 {
1591 public:
1592 find_candidates_dom_walker (cdi_direction direction)
1593 : dom_walker (direction) {}
1594 virtual void before_dom_children (basic_block);
1595 };
1596
1597 /* Find strength-reduction candidates in block BB. */
1598
1599 void
1600 find_candidates_dom_walker::before_dom_children (basic_block bb)
1601 {
1602 bool speed = optimize_bb_for_speed_p (bb);
1603 gimple_stmt_iterator gsi;
1604
1605 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1606 slsr_process_phi (gsi_stmt (gsi), speed);
1607
1608 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1609 {
1610 gimple gs = gsi_stmt (gsi);
1611
1612 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1613 slsr_process_ref (gs);
1614
1615 else if (is_gimple_assign (gs)
1616 && SCALAR_INT_MODE_P
1617 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1618 {
1619 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1620
1621 switch (gimple_assign_rhs_code (gs))
1622 {
1623 case MULT_EXPR:
1624 case PLUS_EXPR:
1625 rhs1 = gimple_assign_rhs1 (gs);
1626 rhs2 = gimple_assign_rhs2 (gs);
1627 /* Should never happen, but currently some buggy situations
1628 in earlier phases put constants in rhs1. */
1629 if (TREE_CODE (rhs1) != SSA_NAME)
1630 continue;
1631 break;
1632
1633 /* Possible future opportunity: rhs1 of a ptr+ can be
1634 an ADDR_EXPR. */
1635 case POINTER_PLUS_EXPR:
1636 case MINUS_EXPR:
1637 rhs2 = gimple_assign_rhs2 (gs);
1638 /* Fall-through. */
1639
1640 case NOP_EXPR:
1641 case MODIFY_EXPR:
1642 case NEGATE_EXPR:
1643 rhs1 = gimple_assign_rhs1 (gs);
1644 if (TREE_CODE (rhs1) != SSA_NAME)
1645 continue;
1646 break;
1647
1648 default:
1649 ;
1650 }
1651
1652 switch (gimple_assign_rhs_code (gs))
1653 {
1654 case MULT_EXPR:
1655 slsr_process_mul (gs, rhs1, rhs2, speed);
1656 break;
1657
1658 case PLUS_EXPR:
1659 case POINTER_PLUS_EXPR:
1660 case MINUS_EXPR:
1661 slsr_process_add (gs, rhs1, rhs2, speed);
1662 break;
1663
1664 case NEGATE_EXPR:
1665 slsr_process_neg (gs, rhs1, speed);
1666 break;
1667
1668 case NOP_EXPR:
1669 slsr_process_cast (gs, rhs1, speed);
1670 break;
1671
1672 case MODIFY_EXPR:
1673 slsr_process_copy (gs, rhs1, speed);
1674 break;
1675
1676 default:
1677 ;
1678 }
1679 }
1680 }
1681 }
1682 \f
1683 /* Dump a candidate for debug. */
1684
1685 static void
1686 dump_candidate (slsr_cand_t c)
1687 {
1688 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1689 gimple_bb (c->cand_stmt)->index);
1690 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1691 switch (c->kind)
1692 {
1693 case CAND_MULT:
1694 fputs (" MULT : (", dump_file);
1695 print_generic_expr (dump_file, c->base_expr, 0);
1696 fputs (" + ", dump_file);
1697 dump_double_int (dump_file, c->index, false);
1698 fputs (") * ", dump_file);
1699 print_generic_expr (dump_file, c->stride, 0);
1700 fputs (" : ", dump_file);
1701 break;
1702 case CAND_ADD:
1703 fputs (" ADD : ", dump_file);
1704 print_generic_expr (dump_file, c->base_expr, 0);
1705 fputs (" + (", dump_file);
1706 dump_double_int (dump_file, c->index, false);
1707 fputs (" * ", dump_file);
1708 print_generic_expr (dump_file, c->stride, 0);
1709 fputs (") : ", dump_file);
1710 break;
1711 case CAND_REF:
1712 fputs (" REF : ", dump_file);
1713 print_generic_expr (dump_file, c->base_expr, 0);
1714 fputs (" + (", dump_file);
1715 print_generic_expr (dump_file, c->stride, 0);
1716 fputs (") + ", dump_file);
1717 dump_double_int (dump_file, c->index, false);
1718 fputs (" : ", dump_file);
1719 break;
1720 case CAND_PHI:
1721 fputs (" PHI : ", dump_file);
1722 print_generic_expr (dump_file, c->base_expr, 0);
1723 fputs (" + (unknown * ", dump_file);
1724 print_generic_expr (dump_file, c->stride, 0);
1725 fputs (") : ", dump_file);
1726 break;
1727 default:
1728 gcc_unreachable ();
1729 }
1730 print_generic_expr (dump_file, c->cand_type, 0);
1731 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1732 c->basis, c->dependent, c->sibling);
1733 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1734 c->next_interp, c->dead_savings);
1735 if (c->def_phi)
1736 fprintf (dump_file, " phi: %d\n", c->def_phi);
1737 fputs ("\n", dump_file);
1738 }
1739
1740 /* Dump the candidate vector for debug. */
1741
1742 static void
1743 dump_cand_vec (void)
1744 {
1745 unsigned i;
1746 slsr_cand_t c;
1747
1748 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1749
1750 FOR_EACH_VEC_ELT (cand_vec, i, c)
1751 dump_candidate (c);
1752 }
1753
1754 /* Callback used to dump the candidate chains hash table. */
1755
1756 int
1757 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1758 {
1759 const_cand_chain_t chain = *slot;
1760 cand_chain_t p;
1761
1762 print_generic_expr (dump_file, chain->base_expr, 0);
1763 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1764
1765 for (p = chain->next; p; p = p->next)
1766 fprintf (dump_file, " -> %d", p->cand->cand_num);
1767
1768 fputs ("\n", dump_file);
1769 return 1;
1770 }
1771
1772 /* Dump the candidate chains. */
1773
1774 static void
1775 dump_cand_chains (void)
1776 {
1777 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1778 base_cand_map.traverse_noresize <void *, ssa_base_cand_dump_callback> (NULL);
1779 fputs ("\n", dump_file);
1780 }
1781
1782 /* Dump the increment vector for debug. */
1783
1784 static void
1785 dump_incr_vec (void)
1786 {
1787 if (dump_file && (dump_flags & TDF_DETAILS))
1788 {
1789 unsigned i;
1790
1791 fprintf (dump_file, "\nIncrement vector:\n\n");
1792
1793 for (i = 0; i < incr_vec_len; i++)
1794 {
1795 fprintf (dump_file, "%3d increment: ", i);
1796 dump_double_int (dump_file, incr_vec[i].incr, false);
1797 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1798 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1799 fputs ("\n initializer: ", dump_file);
1800 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1801 fputs ("\n\n", dump_file);
1802 }
1803 }
1804 }
1805 \f
1806 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1807 data reference. */
1808
1809 static void
1810 replace_ref (tree *expr, slsr_cand_t c)
1811 {
1812 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1813 unsigned HOST_WIDE_INT misalign;
1814 unsigned align;
1815
1816 /* Ensure the memory reference carries the minimum alignment
1817 requirement for the data type. See PR58041. */
1818 get_object_alignment_1 (*expr, &align, &misalign);
1819 if (misalign != 0)
1820 align = (misalign & -misalign);
1821 if (align < TYPE_ALIGN (acc_type))
1822 acc_type = build_aligned_type (acc_type, align);
1823
1824 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1825 c->base_expr, c->stride);
1826 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1827 double_int_to_tree (c->cand_type, c->index));
1828
1829 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1830 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1831 TREE_OPERAND (mem_ref, 0)
1832 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1833 /*simple_p=*/true, NULL,
1834 /*before=*/true, GSI_SAME_STMT);
1835 copy_ref_info (mem_ref, *expr);
1836 *expr = mem_ref;
1837 update_stmt (c->cand_stmt);
1838 }
1839
1840 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1841 dependent of candidate C with an equivalent strength-reduced data
1842 reference. */
1843
1844 static void
1845 replace_refs (slsr_cand_t c)
1846 {
1847 if (gimple_vdef (c->cand_stmt))
1848 {
1849 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1850 replace_ref (lhs, c);
1851 }
1852 else
1853 {
1854 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1855 replace_ref (rhs, c);
1856 }
1857
1858 if (c->sibling)
1859 replace_refs (lookup_cand (c->sibling));
1860
1861 if (c->dependent)
1862 replace_refs (lookup_cand (c->dependent));
1863 }
1864
1865 /* Return TRUE if candidate C is dependent upon a PHI. */
1866
1867 static bool
1868 phi_dependent_cand_p (slsr_cand_t c)
1869 {
1870 /* A candidate is not necessarily dependent upon a PHI just because
1871 it has a phi definition for its base name. It may have a basis
1872 that relies upon the same phi definition, in which case the PHI
1873 is irrelevant to this candidate. */
1874 return (c->def_phi
1875 && c->basis
1876 && lookup_cand (c->basis)->def_phi != c->def_phi);
1877 }
1878
1879 /* Calculate the increment required for candidate C relative to
1880 its basis. */
1881
1882 static double_int
1883 cand_increment (slsr_cand_t c)
1884 {
1885 slsr_cand_t basis;
1886
1887 /* If the candidate doesn't have a basis, just return its own
1888 index. This is useful in record_increments to help us find
1889 an existing initializer. Also, if the candidate's basis is
1890 hidden by a phi, then its own index will be the increment
1891 from the newly introduced phi basis. */
1892 if (!c->basis || phi_dependent_cand_p (c))
1893 return c->index;
1894
1895 basis = lookup_cand (c->basis);
1896 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1897 return c->index - basis->index;
1898 }
1899
1900 /* Calculate the increment required for candidate C relative to
1901 its basis. If we aren't going to generate pointer arithmetic
1902 for this candidate, return the absolute value of that increment
1903 instead. */
1904
1905 static inline double_int
1906 cand_abs_increment (slsr_cand_t c)
1907 {
1908 double_int increment = cand_increment (c);
1909
1910 if (!address_arithmetic_p && increment.is_negative ())
1911 increment = -increment;
1912
1913 return increment;
1914 }
1915
1916 /* Return TRUE iff candidate C has already been replaced under
1917 another interpretation. */
1918
1919 static inline bool
1920 cand_already_replaced (slsr_cand_t c)
1921 {
1922 return (gimple_bb (c->cand_stmt) == 0);
1923 }
1924
1925 /* Common logic used by replace_unconditional_candidate and
1926 replace_conditional_candidate. */
1927
1928 static void
1929 replace_mult_candidate (slsr_cand_t c, tree basis_name, double_int bump)
1930 {
1931 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
1932 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
1933
1934 /* It is highly unlikely, but possible, that the resulting
1935 bump doesn't fit in a HWI. Abandon the replacement
1936 in this case. This does not affect siblings or dependents
1937 of C. Restriction to signed HWI is conservative for unsigned
1938 types but allows for safe negation without twisted logic. */
1939 if (bump.fits_shwi ()
1940 && bump.to_shwi () != HOST_WIDE_INT_MIN
1941 /* It is not useful to replace casts, copies, or adds of
1942 an SSA name and a constant. */
1943 && cand_code != MODIFY_EXPR
1944 && cand_code != NOP_EXPR
1945 && cand_code != PLUS_EXPR
1946 && cand_code != POINTER_PLUS_EXPR
1947 && cand_code != MINUS_EXPR)
1948 {
1949 enum tree_code code = PLUS_EXPR;
1950 tree bump_tree;
1951 gimple stmt_to_print = NULL;
1952
1953 /* If the basis name and the candidate's LHS have incompatible
1954 types, introduce a cast. */
1955 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
1956 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
1957 if (bump.is_negative ())
1958 {
1959 code = MINUS_EXPR;
1960 bump = -bump;
1961 }
1962
1963 bump_tree = double_int_to_tree (target_type, bump);
1964
1965 if (dump_file && (dump_flags & TDF_DETAILS))
1966 {
1967 fputs ("Replacing: ", dump_file);
1968 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1969 }
1970
1971 if (bump.is_zero ())
1972 {
1973 tree lhs = gimple_assign_lhs (c->cand_stmt);
1974 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
1975 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1976 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
1977 gsi_replace (&gsi, copy_stmt, false);
1978 c->cand_stmt = copy_stmt;
1979 if (dump_file && (dump_flags & TDF_DETAILS))
1980 stmt_to_print = copy_stmt;
1981 }
1982 else
1983 {
1984 tree rhs1, rhs2;
1985 if (cand_code != NEGATE_EXPR) {
1986 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
1987 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
1988 }
1989 if (cand_code != NEGATE_EXPR
1990 && ((operand_equal_p (rhs1, basis_name, 0)
1991 && operand_equal_p (rhs2, bump_tree, 0))
1992 || (operand_equal_p (rhs1, bump_tree, 0)
1993 && operand_equal_p (rhs2, basis_name, 0))))
1994 {
1995 if (dump_file && (dump_flags & TDF_DETAILS))
1996 {
1997 fputs ("(duplicate, not actually replacing)", dump_file);
1998 stmt_to_print = c->cand_stmt;
1999 }
2000 }
2001 else
2002 {
2003 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2004 gimple_assign_set_rhs_with_ops (&gsi, code,
2005 basis_name, bump_tree);
2006 update_stmt (gsi_stmt (gsi));
2007 c->cand_stmt = gsi_stmt (gsi);
2008 if (dump_file && (dump_flags & TDF_DETAILS))
2009 stmt_to_print = gsi_stmt (gsi);
2010 }
2011 }
2012
2013 if (dump_file && (dump_flags & TDF_DETAILS))
2014 {
2015 fputs ("With: ", dump_file);
2016 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2017 fputs ("\n", dump_file);
2018 }
2019 }
2020 }
2021
2022 /* Replace candidate C with an add or subtract. Note that we only
2023 operate on CAND_MULTs with known strides, so we will never generate
2024 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2025 X = Y + ((i - i') * S), as described in the module commentary. The
2026 folded value ((i - i') * S) is referred to here as the "bump." */
2027
2028 static void
2029 replace_unconditional_candidate (slsr_cand_t c)
2030 {
2031 slsr_cand_t basis;
2032 double_int stride, bump;
2033
2034 if (cand_already_replaced (c))
2035 return;
2036
2037 basis = lookup_cand (c->basis);
2038 stride = tree_to_double_int (c->stride);
2039 bump = cand_increment (c) * stride;
2040
2041 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2042 }
2043 \f
2044 /* Return the index in the increment vector of the given INCREMENT,
2045 or -1 if not found. The latter can occur if more than
2046 MAX_INCR_VEC_LEN increments have been found. */
2047
2048 static inline int
2049 incr_vec_index (double_int increment)
2050 {
2051 unsigned i;
2052
2053 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2054 ;
2055
2056 if (i < incr_vec_len)
2057 return i;
2058 else
2059 return -1;
2060 }
2061
2062 /* Create a new statement along edge E to add BASIS_NAME to the product
2063 of INCREMENT and the stride of candidate C. Create and return a new
2064 SSA name from *VAR to be used as the LHS of the new statement.
2065 KNOWN_STRIDE is true iff C's stride is a constant. */
2066
2067 static tree
2068 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2069 double_int increment, edge e, location_t loc,
2070 bool known_stride)
2071 {
2072 basic_block insert_bb;
2073 gimple_stmt_iterator gsi;
2074 tree lhs, basis_type;
2075 gimple new_stmt;
2076
2077 /* If the add candidate along this incoming edge has the same
2078 index as C's hidden basis, the hidden basis represents this
2079 edge correctly. */
2080 if (increment.is_zero ())
2081 return basis_name;
2082
2083 basis_type = TREE_TYPE (basis_name);
2084 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2085
2086 if (known_stride)
2087 {
2088 tree bump_tree;
2089 enum tree_code code = PLUS_EXPR;
2090 double_int bump = increment * tree_to_double_int (c->stride);
2091 if (bump.is_negative ())
2092 {
2093 code = MINUS_EXPR;
2094 bump = -bump;
2095 }
2096
2097 bump_tree = double_int_to_tree (basis_type, bump);
2098 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2099 bump_tree);
2100 }
2101 else
2102 {
2103 int i;
2104 bool negate_incr = (!address_arithmetic_p && increment.is_negative ());
2105 i = incr_vec_index (negate_incr ? -increment : increment);
2106 gcc_assert (i >= 0);
2107
2108 if (incr_vec[i].initializer)
2109 {
2110 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2111 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2112 incr_vec[i].initializer);
2113 }
2114 else if (increment.is_one ())
2115 new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, lhs, basis_name,
2116 c->stride);
2117 else if (increment.is_minus_one ())
2118 new_stmt = gimple_build_assign_with_ops (MINUS_EXPR, lhs, basis_name,
2119 c->stride);
2120 else
2121 gcc_unreachable ();
2122 }
2123
2124 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2125 gsi = gsi_last_bb (insert_bb);
2126
2127 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2128 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2129 else
2130 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2131
2132 gimple_set_location (new_stmt, loc);
2133
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2135 {
2136 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2137 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2138 }
2139
2140 return lhs;
2141 }
2142
2143 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2144 is hidden by the phi node FROM_PHI, create a new phi node in the same
2145 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2146 with its phi arguments representing conditional adjustments to the
2147 hidden basis along conditional incoming paths. Those adjustments are
2148 made by creating add statements (and sometimes recursively creating
2149 phis) along those incoming paths. LOC is the location to attach to
2150 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2151 constant. */
2152
2153 static tree
2154 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2155 location_t loc, bool known_stride)
2156 {
2157 int i;
2158 tree name, phi_arg;
2159 gimple phi;
2160 vec<tree> phi_args;
2161 slsr_cand_t basis = lookup_cand (c->basis);
2162 int nargs = gimple_phi_num_args (from_phi);
2163 basic_block phi_bb = gimple_bb (from_phi);
2164 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2165 phi_args.create (nargs);
2166
2167 /* Process each argument of the existing phi that represents
2168 conditionally-executed add candidates. */
2169 for (i = 0; i < nargs; i++)
2170 {
2171 edge e = (*phi_bb->preds)[i];
2172 tree arg = gimple_phi_arg_def (from_phi, i);
2173 tree feeding_def;
2174
2175 /* If the phi argument is the base name of the CAND_PHI, then
2176 this incoming arc should use the hidden basis. */
2177 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2178 if (basis->index.is_zero ())
2179 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2180 else
2181 {
2182 double_int incr = -basis->index;
2183 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2184 e, loc, known_stride);
2185 }
2186 else
2187 {
2188 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2189
2190 /* If there is another phi along this incoming edge, we must
2191 process it in the same fashion to ensure that all basis
2192 adjustments are made along its incoming edges. */
2193 if (gimple_code (arg_def) == GIMPLE_PHI)
2194 feeding_def = create_phi_basis (c, arg_def, basis_name,
2195 loc, known_stride);
2196 else
2197 {
2198 slsr_cand_t arg_cand = base_cand_from_table (arg);
2199 double_int diff = arg_cand->index - basis->index;
2200 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2201 e, loc, known_stride);
2202 }
2203 }
2204
2205 /* Because of recursion, we need to save the arguments in a vector
2206 so we can create the PHI statement all at once. Otherwise the
2207 storage for the half-created PHI can be reclaimed. */
2208 phi_args.safe_push (feeding_def);
2209 }
2210
2211 /* Create the new phi basis. */
2212 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2213 phi = create_phi_node (name, phi_bb);
2214 SSA_NAME_DEF_STMT (name) = phi;
2215
2216 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2217 {
2218 edge e = (*phi_bb->preds)[i];
2219 add_phi_arg (phi, phi_arg, e, loc);
2220 }
2221
2222 update_stmt (phi);
2223
2224 if (dump_file && (dump_flags & TDF_DETAILS))
2225 {
2226 fputs ("Introducing new phi basis: ", dump_file);
2227 print_gimple_stmt (dump_file, phi, 0, 0);
2228 }
2229
2230 return name;
2231 }
2232
2233 /* Given a candidate C whose basis is hidden by at least one intervening
2234 phi, introduce a matching number of new phis to represent its basis
2235 adjusted by conditional increments along possible incoming paths. Then
2236 replace C as though it were an unconditional candidate, using the new
2237 basis. */
2238
2239 static void
2240 replace_conditional_candidate (slsr_cand_t c)
2241 {
2242 tree basis_name, name;
2243 slsr_cand_t basis;
2244 location_t loc;
2245 double_int stride, bump;
2246
2247 /* Look up the LHS SSA name from C's basis. This will be the
2248 RHS1 of the adds we will introduce to create new phi arguments. */
2249 basis = lookup_cand (c->basis);
2250 basis_name = gimple_assign_lhs (basis->cand_stmt);
2251
2252 /* Create a new phi statement which will represent C's true basis
2253 after the transformation is complete. */
2254 loc = gimple_location (c->cand_stmt);
2255 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2256 basis_name, loc, KNOWN_STRIDE);
2257 /* Replace C with an add of the new basis phi and a constant. */
2258 stride = tree_to_double_int (c->stride);
2259 bump = c->index * stride;
2260
2261 replace_mult_candidate (c, name, bump);
2262 }
2263
2264 /* Compute the expected costs of inserting basis adjustments for
2265 candidate C with phi-definition PHI. The cost of inserting
2266 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2267 which are themselves phi results, recursively calculate costs
2268 for those phis as well. */
2269
2270 static int
2271 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2272 {
2273 unsigned i;
2274 int cost = 0;
2275 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2276
2277 /* If we work our way back to a phi that isn't dominated by the hidden
2278 basis, this isn't a candidate for replacement. Indicate this by
2279 returning an unreasonably high cost. It's not easy to detect
2280 these situations when determining the basis, so we defer the
2281 decision until now. */
2282 basic_block phi_bb = gimple_bb (phi);
2283 slsr_cand_t basis = lookup_cand (c->basis);
2284 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2285
2286 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2287 return COST_INFINITE;
2288
2289 for (i = 0; i < gimple_phi_num_args (phi); i++)
2290 {
2291 tree arg = gimple_phi_arg_def (phi, i);
2292
2293 if (arg != phi_cand->base_expr)
2294 {
2295 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2296
2297 if (gimple_code (arg_def) == GIMPLE_PHI)
2298 cost += phi_add_costs (arg_def, c, one_add_cost);
2299 else
2300 {
2301 slsr_cand_t arg_cand = base_cand_from_table (arg);
2302
2303 if (arg_cand->index != c->index)
2304 cost += one_add_cost;
2305 }
2306 }
2307 }
2308
2309 return cost;
2310 }
2311
2312 /* For candidate C, each sibling of candidate C, and each dependent of
2313 candidate C, determine whether the candidate is dependent upon a
2314 phi that hides its basis. If not, replace the candidate unconditionally.
2315 Otherwise, determine whether the cost of introducing compensation code
2316 for the candidate is offset by the gains from strength reduction. If
2317 so, replace the candidate and introduce the compensation code. */
2318
2319 static void
2320 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2321 {
2322 if (phi_dependent_cand_p (c))
2323 {
2324 if (c->kind == CAND_MULT)
2325 {
2326 /* A candidate dependent upon a phi will replace a multiply by
2327 a constant with an add, and will insert at most one add for
2328 each phi argument. Add these costs with the potential dead-code
2329 savings to determine profitability. */
2330 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2331 int mult_savings = stmt_cost (c->cand_stmt, speed);
2332 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2333 tree phi_result = gimple_phi_result (phi);
2334 int one_add_cost = add_cost (speed,
2335 TYPE_MODE (TREE_TYPE (phi_result)));
2336 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2337 int cost = add_costs - mult_savings - c->dead_savings;
2338
2339 if (dump_file && (dump_flags & TDF_DETAILS))
2340 {
2341 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2342 fprintf (dump_file, " add_costs = %d\n", add_costs);
2343 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2344 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2345 fprintf (dump_file, " cost = %d\n", cost);
2346 if (cost <= COST_NEUTRAL)
2347 fputs (" Replacing...\n", dump_file);
2348 else
2349 fputs (" Not replaced.\n", dump_file);
2350 }
2351
2352 if (cost <= COST_NEUTRAL)
2353 replace_conditional_candidate (c);
2354 }
2355 }
2356 else
2357 replace_unconditional_candidate (c);
2358
2359 if (c->sibling)
2360 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2361
2362 if (c->dependent)
2363 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2364 }
2365 \f
2366 /* Count the number of candidates in the tree rooted at C that have
2367 not already been replaced under other interpretations. */
2368
2369 static int
2370 count_candidates (slsr_cand_t c)
2371 {
2372 unsigned count = cand_already_replaced (c) ? 0 : 1;
2373
2374 if (c->sibling)
2375 count += count_candidates (lookup_cand (c->sibling));
2376
2377 if (c->dependent)
2378 count += count_candidates (lookup_cand (c->dependent));
2379
2380 return count;
2381 }
2382
2383 /* Increase the count of INCREMENT by one in the increment vector.
2384 INCREMENT is associated with candidate C. If INCREMENT is to be
2385 conditionally executed as part of a conditional candidate replacement,
2386 IS_PHI_ADJUST is true, otherwise false. If an initializer
2387 T_0 = stride * I is provided by a candidate that dominates all
2388 candidates with the same increment, also record T_0 for subsequent use. */
2389
2390 static void
2391 record_increment (slsr_cand_t c, double_int increment, bool is_phi_adjust)
2392 {
2393 bool found = false;
2394 unsigned i;
2395
2396 /* Treat increments that differ only in sign as identical so as to
2397 share initializers, unless we are generating pointer arithmetic. */
2398 if (!address_arithmetic_p && increment.is_negative ())
2399 increment = -increment;
2400
2401 for (i = 0; i < incr_vec_len; i++)
2402 {
2403 if (incr_vec[i].incr == increment)
2404 {
2405 incr_vec[i].count++;
2406 found = true;
2407
2408 /* If we previously recorded an initializer that doesn't
2409 dominate this candidate, it's not going to be useful to
2410 us after all. */
2411 if (incr_vec[i].initializer
2412 && !dominated_by_p (CDI_DOMINATORS,
2413 gimple_bb (c->cand_stmt),
2414 incr_vec[i].init_bb))
2415 {
2416 incr_vec[i].initializer = NULL_TREE;
2417 incr_vec[i].init_bb = NULL;
2418 }
2419
2420 break;
2421 }
2422 }
2423
2424 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2425 {
2426 /* The first time we see an increment, create the entry for it.
2427 If this is the root candidate which doesn't have a basis, set
2428 the count to zero. We're only processing it so it can possibly
2429 provide an initializer for other candidates. */
2430 incr_vec[incr_vec_len].incr = increment;
2431 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2432 incr_vec[incr_vec_len].cost = COST_INFINITE;
2433
2434 /* Optimistically record the first occurrence of this increment
2435 as providing an initializer (if it does); we will revise this
2436 opinion later if it doesn't dominate all other occurrences.
2437 Exception: increments of -1, 0, 1 never need initializers;
2438 and phi adjustments don't ever provide initializers. */
2439 if (c->kind == CAND_ADD
2440 && !is_phi_adjust
2441 && c->index == increment
2442 && (increment.sgt (double_int_one)
2443 || increment.slt (double_int_minus_one))
2444 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2445 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2446 {
2447 tree t0 = NULL_TREE;
2448 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2449 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2450 if (operand_equal_p (rhs1, c->base_expr, 0))
2451 t0 = rhs2;
2452 else if (operand_equal_p (rhs2, c->base_expr, 0))
2453 t0 = rhs1;
2454 if (t0
2455 && SSA_NAME_DEF_STMT (t0)
2456 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2457 {
2458 incr_vec[incr_vec_len].initializer = t0;
2459 incr_vec[incr_vec_len++].init_bb
2460 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2461 }
2462 else
2463 {
2464 incr_vec[incr_vec_len].initializer = NULL_TREE;
2465 incr_vec[incr_vec_len++].init_bb = NULL;
2466 }
2467 }
2468 else
2469 {
2470 incr_vec[incr_vec_len].initializer = NULL_TREE;
2471 incr_vec[incr_vec_len++].init_bb = NULL;
2472 }
2473 }
2474 }
2475
2476 /* Given phi statement PHI that hides a candidate from its BASIS, find
2477 the increments along each incoming arc (recursively handling additional
2478 phis that may be present) and record them. These increments are the
2479 difference in index between the index-adjusting statements and the
2480 index of the basis. */
2481
2482 static void
2483 record_phi_increments (slsr_cand_t basis, gimple phi)
2484 {
2485 unsigned i;
2486 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2487
2488 for (i = 0; i < gimple_phi_num_args (phi); i++)
2489 {
2490 tree arg = gimple_phi_arg_def (phi, i);
2491
2492 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2493 {
2494 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2495
2496 if (gimple_code (arg_def) == GIMPLE_PHI)
2497 record_phi_increments (basis, arg_def);
2498 else
2499 {
2500 slsr_cand_t arg_cand = base_cand_from_table (arg);
2501 double_int diff = arg_cand->index - basis->index;
2502 record_increment (arg_cand, diff, PHI_ADJUST);
2503 }
2504 }
2505 }
2506 }
2507
2508 /* Determine how many times each unique increment occurs in the set
2509 of candidates rooted at C's parent, recording the data in the
2510 increment vector. For each unique increment I, if an initializer
2511 T_0 = stride * I is provided by a candidate that dominates all
2512 candidates with the same increment, also record T_0 for subsequent
2513 use. */
2514
2515 static void
2516 record_increments (slsr_cand_t c)
2517 {
2518 if (!cand_already_replaced (c))
2519 {
2520 if (!phi_dependent_cand_p (c))
2521 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2522 else
2523 {
2524 /* A candidate with a basis hidden by a phi will have one
2525 increment for its relationship to the index represented by
2526 the phi, and potentially additional increments along each
2527 incoming edge. For the root of the dependency tree (which
2528 has no basis), process just the initial index in case it has
2529 an initializer that can be used by subsequent candidates. */
2530 record_increment (c, c->index, NOT_PHI_ADJUST);
2531
2532 if (c->basis)
2533 record_phi_increments (lookup_cand (c->basis),
2534 lookup_cand (c->def_phi)->cand_stmt);
2535 }
2536 }
2537
2538 if (c->sibling)
2539 record_increments (lookup_cand (c->sibling));
2540
2541 if (c->dependent)
2542 record_increments (lookup_cand (c->dependent));
2543 }
2544
2545 /* Add up and return the costs of introducing add statements that
2546 require the increment INCR on behalf of candidate C and phi
2547 statement PHI. Accumulate into *SAVINGS the potential savings
2548 from removing existing statements that feed PHI and have no other
2549 uses. */
2550
2551 static int
2552 phi_incr_cost (slsr_cand_t c, double_int incr, gimple phi, int *savings)
2553 {
2554 unsigned i;
2555 int cost = 0;
2556 slsr_cand_t basis = lookup_cand (c->basis);
2557 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2558
2559 for (i = 0; i < gimple_phi_num_args (phi); i++)
2560 {
2561 tree arg = gimple_phi_arg_def (phi, i);
2562
2563 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2564 {
2565 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2566
2567 if (gimple_code (arg_def) == GIMPLE_PHI)
2568 {
2569 int feeding_savings = 0;
2570 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2571 if (has_single_use (gimple_phi_result (arg_def)))
2572 *savings += feeding_savings;
2573 }
2574 else
2575 {
2576 slsr_cand_t arg_cand = base_cand_from_table (arg);
2577 double_int diff = arg_cand->index - basis->index;
2578
2579 if (incr == diff)
2580 {
2581 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2582 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2583 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2584 if (has_single_use (lhs))
2585 *savings += stmt_cost (arg_cand->cand_stmt, true);
2586 }
2587 }
2588 }
2589 }
2590
2591 return cost;
2592 }
2593
2594 /* Return the first candidate in the tree rooted at C that has not
2595 already been replaced, favoring siblings over dependents. */
2596
2597 static slsr_cand_t
2598 unreplaced_cand_in_tree (slsr_cand_t c)
2599 {
2600 if (!cand_already_replaced (c))
2601 return c;
2602
2603 if (c->sibling)
2604 {
2605 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2606 if (sib)
2607 return sib;
2608 }
2609
2610 if (c->dependent)
2611 {
2612 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2613 if (dep)
2614 return dep;
2615 }
2616
2617 return NULL;
2618 }
2619
2620 /* Return TRUE if the candidates in the tree rooted at C should be
2621 optimized for speed, else FALSE. We estimate this based on the block
2622 containing the most dominant candidate in the tree that has not yet
2623 been replaced. */
2624
2625 static bool
2626 optimize_cands_for_speed_p (slsr_cand_t c)
2627 {
2628 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2629 gcc_assert (c2);
2630 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2631 }
2632
2633 /* Add COST_IN to the lowest cost of any dependent path starting at
2634 candidate C or any of its siblings, counting only candidates along
2635 such paths with increment INCR. Assume that replacing a candidate
2636 reduces cost by REPL_SAVINGS. Also account for savings from any
2637 statements that would go dead. If COUNT_PHIS is true, include
2638 costs of introducing feeding statements for conditional candidates. */
2639
2640 static int
2641 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2642 double_int incr, bool count_phis)
2643 {
2644 int local_cost, sib_cost, savings = 0;
2645 double_int cand_incr = cand_abs_increment (c);
2646
2647 if (cand_already_replaced (c))
2648 local_cost = cost_in;
2649 else if (incr == cand_incr)
2650 local_cost = cost_in - repl_savings - c->dead_savings;
2651 else
2652 local_cost = cost_in - c->dead_savings;
2653
2654 if (count_phis
2655 && phi_dependent_cand_p (c)
2656 && !cand_already_replaced (c))
2657 {
2658 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2659 local_cost += phi_incr_cost (c, incr, phi, &savings);
2660
2661 if (has_single_use (gimple_phi_result (phi)))
2662 local_cost -= savings;
2663 }
2664
2665 if (c->dependent)
2666 local_cost = lowest_cost_path (local_cost, repl_savings,
2667 lookup_cand (c->dependent), incr,
2668 count_phis);
2669
2670 if (c->sibling)
2671 {
2672 sib_cost = lowest_cost_path (cost_in, repl_savings,
2673 lookup_cand (c->sibling), incr,
2674 count_phis);
2675 local_cost = MIN (local_cost, sib_cost);
2676 }
2677
2678 return local_cost;
2679 }
2680
2681 /* Compute the total savings that would accrue from all replacements
2682 in the candidate tree rooted at C, counting only candidates with
2683 increment INCR. Assume that replacing a candidate reduces cost
2684 by REPL_SAVINGS. Also account for savings from statements that
2685 would go dead. */
2686
2687 static int
2688 total_savings (int repl_savings, slsr_cand_t c, double_int incr,
2689 bool count_phis)
2690 {
2691 int savings = 0;
2692 double_int cand_incr = cand_abs_increment (c);
2693
2694 if (incr == cand_incr && !cand_already_replaced (c))
2695 savings += repl_savings + c->dead_savings;
2696
2697 if (count_phis
2698 && phi_dependent_cand_p (c)
2699 && !cand_already_replaced (c))
2700 {
2701 int phi_savings = 0;
2702 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2703 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2704
2705 if (has_single_use (gimple_phi_result (phi)))
2706 savings += phi_savings;
2707 }
2708
2709 if (c->dependent)
2710 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2711 count_phis);
2712
2713 if (c->sibling)
2714 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2715 count_phis);
2716
2717 return savings;
2718 }
2719
2720 /* Use target-specific costs to determine and record which increments
2721 in the current candidate tree are profitable to replace, assuming
2722 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2723 the candidate tree.
2724
2725 One slight limitation here is that we don't account for the possible
2726 introduction of casts in some cases. See replace_one_candidate for
2727 the cases where these are introduced. This should probably be cleaned
2728 up sometime. */
2729
2730 static void
2731 analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed)
2732 {
2733 unsigned i;
2734
2735 for (i = 0; i < incr_vec_len; i++)
2736 {
2737 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2738
2739 /* If somehow this increment is bigger than a HWI, we won't
2740 be optimizing candidates that use it. And if the increment
2741 has a count of zero, nothing will be done with it. */
2742 if (!incr_vec[i].incr.fits_shwi () || !incr_vec[i].count)
2743 incr_vec[i].cost = COST_INFINITE;
2744
2745 /* Increments of 0, 1, and -1 are always profitable to replace,
2746 because they always replace a multiply or add with an add or
2747 copy, and may cause one or more existing instructions to go
2748 dead. Exception: -1 can't be assumed to be profitable for
2749 pointer addition. */
2750 else if (incr == 0
2751 || incr == 1
2752 || (incr == -1
2753 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2754 != POINTER_PLUS_EXPR)))
2755 incr_vec[i].cost = COST_NEUTRAL;
2756
2757 /* FORNOW: If we need to add an initializer, give up if a cast from
2758 the candidate's type to its stride's type can lose precision.
2759 This could eventually be handled better by expressly retaining the
2760 result of a cast to a wider type in the stride. Example:
2761
2762 short int _1;
2763 _2 = (int) _1;
2764 _3 = _2 * 10;
2765 _4 = x + _3; ADD: x + (10 * _1) : int
2766 _5 = _2 * 15;
2767 _6 = x + _3; ADD: x + (15 * _1) : int
2768
2769 Right now replacing _6 would cause insertion of an initializer
2770 of the form "short int T = _1 * 5;" followed by a cast to
2771 int, which could overflow incorrectly. Had we recorded _2 or
2772 (int)_1 as the stride, this wouldn't happen. However, doing
2773 this breaks other opportunities, so this will require some
2774 care. */
2775 else if (!incr_vec[i].initializer
2776 && TREE_CODE (first_dep->stride) != INTEGER_CST
2777 && !legal_cast_p_1 (first_dep->stride,
2778 gimple_assign_lhs (first_dep->cand_stmt)))
2779
2780 incr_vec[i].cost = COST_INFINITE;
2781
2782 /* If we need to add an initializer, make sure we don't introduce
2783 a multiply by a pointer type, which can happen in certain cast
2784 scenarios. FIXME: When cleaning up these cast issues, we can
2785 afford to introduce the multiply provided we cast out to an
2786 unsigned int of appropriate size. */
2787 else if (!incr_vec[i].initializer
2788 && TREE_CODE (first_dep->stride) != INTEGER_CST
2789 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2790
2791 incr_vec[i].cost = COST_INFINITE;
2792
2793 /* For any other increment, if this is a multiply candidate, we
2794 must introduce a temporary T and initialize it with
2795 T_0 = stride * increment. When optimizing for speed, walk the
2796 candidate tree to calculate the best cost reduction along any
2797 path; if it offsets the fixed cost of inserting the initializer,
2798 replacing the increment is profitable. When optimizing for
2799 size, instead calculate the total cost reduction from replacing
2800 all candidates with this increment. */
2801 else if (first_dep->kind == CAND_MULT)
2802 {
2803 int cost = mult_by_coeff_cost (incr, mode, speed);
2804 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2805 if (speed)
2806 cost = lowest_cost_path (cost, repl_savings, first_dep,
2807 incr_vec[i].incr, COUNT_PHIS);
2808 else
2809 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2810 COUNT_PHIS);
2811
2812 incr_vec[i].cost = cost;
2813 }
2814
2815 /* If this is an add candidate, the initializer may already
2816 exist, so only calculate the cost of the initializer if it
2817 doesn't. We are replacing one add with another here, so the
2818 known replacement savings is zero. We will account for removal
2819 of dead instructions in lowest_cost_path or total_savings. */
2820 else
2821 {
2822 int cost = 0;
2823 if (!incr_vec[i].initializer)
2824 cost = mult_by_coeff_cost (incr, mode, speed);
2825
2826 if (speed)
2827 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2828 DONT_COUNT_PHIS);
2829 else
2830 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2831 DONT_COUNT_PHIS);
2832
2833 incr_vec[i].cost = cost;
2834 }
2835 }
2836 }
2837
2838 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2839 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2840 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2841 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2842 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2843
2844 static basic_block
2845 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2846 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2847 {
2848 basic_block ncd;
2849
2850 if (!bb1)
2851 {
2852 *where = c2;
2853 return bb2;
2854 }
2855
2856 if (!bb2)
2857 {
2858 *where = c1;
2859 return bb1;
2860 }
2861
2862 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2863
2864 /* If both candidates are in the same block, the earlier
2865 candidate wins. */
2866 if (bb1 == ncd && bb2 == ncd)
2867 {
2868 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2869 *where = c2;
2870 else
2871 *where = c1;
2872 }
2873
2874 /* Otherwise, if one of them produced a candidate in the
2875 dominator, that one wins. */
2876 else if (bb1 == ncd)
2877 *where = c1;
2878
2879 else if (bb2 == ncd)
2880 *where = c2;
2881
2882 /* If neither matches the dominator, neither wins. */
2883 else
2884 *where = NULL;
2885
2886 return ncd;
2887 }
2888
2889 /* Consider all candidates that feed PHI. Find the nearest common
2890 dominator of those candidates requiring the given increment INCR.
2891 Further find and return the nearest common dominator of this result
2892 with block NCD. If the returned block contains one or more of the
2893 candidates, return the earliest candidate in the block in *WHERE. */
2894
2895 static basic_block
2896 ncd_with_phi (slsr_cand_t c, double_int incr, gimple phi,
2897 basic_block ncd, slsr_cand_t *where)
2898 {
2899 unsigned i;
2900 slsr_cand_t basis = lookup_cand (c->basis);
2901 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2902
2903 for (i = 0; i < gimple_phi_num_args (phi); i++)
2904 {
2905 tree arg = gimple_phi_arg_def (phi, i);
2906
2907 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2908 {
2909 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2910
2911 if (gimple_code (arg_def) == GIMPLE_PHI)
2912 ncd = ncd_with_phi (c, incr, arg_def, ncd, where);
2913 else
2914 {
2915 slsr_cand_t arg_cand = base_cand_from_table (arg);
2916 double_int diff = arg_cand->index - basis->index;
2917
2918 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2919 ncd = ncd_for_two_cands (ncd, gimple_bb (arg_cand->cand_stmt),
2920 *where, arg_cand, where);
2921 }
2922 }
2923 }
2924
2925 return ncd;
2926 }
2927
2928 /* Consider the candidate C together with any candidates that feed
2929 C's phi dependence (if any). Find and return the nearest common
2930 dominator of those candidates requiring the given increment INCR.
2931 If the returned block contains one or more of the candidates,
2932 return the earliest candidate in the block in *WHERE. */
2933
2934 static basic_block
2935 ncd_of_cand_and_phis (slsr_cand_t c, double_int incr, slsr_cand_t *where)
2936 {
2937 basic_block ncd = NULL;
2938
2939 if (cand_abs_increment (c) == incr)
2940 {
2941 ncd = gimple_bb (c->cand_stmt);
2942 *where = c;
2943 }
2944
2945 if (phi_dependent_cand_p (c))
2946 ncd = ncd_with_phi (c, incr, lookup_cand (c->def_phi)->cand_stmt,
2947 ncd, where);
2948
2949 return ncd;
2950 }
2951
2952 /* Consider all candidates in the tree rooted at C for which INCR
2953 represents the required increment of C relative to its basis.
2954 Find and return the basic block that most nearly dominates all
2955 such candidates. If the returned block contains one or more of
2956 the candidates, return the earliest candidate in the block in
2957 *WHERE. */
2958
2959 static basic_block
2960 nearest_common_dominator_for_cands (slsr_cand_t c, double_int incr,
2961 slsr_cand_t *where)
2962 {
2963 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
2964 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
2965
2966 /* First find the NCD of all siblings and dependents. */
2967 if (c->sibling)
2968 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
2969 incr, &sib_where);
2970 if (c->dependent)
2971 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
2972 incr, &dep_where);
2973 if (!sib_ncd && !dep_ncd)
2974 {
2975 new_where = NULL;
2976 ncd = NULL;
2977 }
2978 else if (sib_ncd && !dep_ncd)
2979 {
2980 new_where = sib_where;
2981 ncd = sib_ncd;
2982 }
2983 else if (dep_ncd && !sib_ncd)
2984 {
2985 new_where = dep_where;
2986 ncd = dep_ncd;
2987 }
2988 else
2989 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
2990 dep_where, &new_where);
2991
2992 /* If the candidate's increment doesn't match the one we're interested
2993 in (and nor do any increments for feeding defs of a phi-dependence),
2994 then the result depends only on siblings and dependents. */
2995 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
2996
2997 if (!this_ncd || cand_already_replaced (c))
2998 {
2999 *where = new_where;
3000 return ncd;
3001 }
3002
3003 /* Otherwise, compare this candidate with the result from all siblings
3004 and dependents. */
3005 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3006
3007 return ncd;
3008 }
3009
3010 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3011
3012 static inline bool
3013 profitable_increment_p (unsigned index)
3014 {
3015 return (incr_vec[index].cost <= COST_NEUTRAL);
3016 }
3017
3018 /* For each profitable increment in the increment vector not equal to
3019 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3020 dominator of all statements in the candidate chain rooted at C
3021 that require that increment, and insert an initializer
3022 T_0 = stride * increment at that location. Record T_0 with the
3023 increment record. */
3024
3025 static void
3026 insert_initializers (slsr_cand_t c)
3027 {
3028 unsigned i;
3029
3030 for (i = 0; i < incr_vec_len; i++)
3031 {
3032 basic_block bb;
3033 slsr_cand_t where = NULL;
3034 gimple init_stmt;
3035 tree stride_type, new_name, incr_tree;
3036 double_int incr = incr_vec[i].incr;
3037
3038 if (!profitable_increment_p (i)
3039 || incr.is_one ()
3040 || (incr.is_minus_one ()
3041 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3042 || incr.is_zero ())
3043 continue;
3044
3045 /* We may have already identified an existing initializer that
3046 will suffice. */
3047 if (incr_vec[i].initializer)
3048 {
3049 if (dump_file && (dump_flags & TDF_DETAILS))
3050 {
3051 fputs ("Using existing initializer: ", dump_file);
3052 print_gimple_stmt (dump_file,
3053 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3054 0, 0);
3055 }
3056 continue;
3057 }
3058
3059 /* Find the block that most closely dominates all candidates
3060 with this increment. If there is at least one candidate in
3061 that block, the earliest one will be returned in WHERE. */
3062 bb = nearest_common_dominator_for_cands (c, incr, &where);
3063
3064 /* Create a new SSA name to hold the initializer's value. */
3065 stride_type = TREE_TYPE (c->stride);
3066 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3067 incr_vec[i].initializer = new_name;
3068
3069 /* Create the initializer and insert it in the latest possible
3070 dominating position. */
3071 incr_tree = double_int_to_tree (stride_type, incr);
3072 init_stmt = gimple_build_assign_with_ops (MULT_EXPR, new_name,
3073 c->stride, incr_tree);
3074 if (where)
3075 {
3076 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3077 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3078 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3079 }
3080 else
3081 {
3082 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3083 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3084
3085 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3086 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3087 else
3088 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3089
3090 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3091 }
3092
3093 if (dump_file && (dump_flags & TDF_DETAILS))
3094 {
3095 fputs ("Inserting initializer: ", dump_file);
3096 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3097 }
3098 }
3099 }
3100
3101 /* Return TRUE iff all required increments for candidates feeding PHI
3102 are profitable to replace on behalf of candidate C. */
3103
3104 static bool
3105 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3106 {
3107 unsigned i;
3108 slsr_cand_t basis = lookup_cand (c->basis);
3109 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3110
3111 for (i = 0; i < gimple_phi_num_args (phi); i++)
3112 {
3113 tree arg = gimple_phi_arg_def (phi, i);
3114
3115 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3116 {
3117 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3118
3119 if (gimple_code (arg_def) == GIMPLE_PHI)
3120 {
3121 if (!all_phi_incrs_profitable (c, arg_def))
3122 return false;
3123 }
3124 else
3125 {
3126 int j;
3127 slsr_cand_t arg_cand = base_cand_from_table (arg);
3128 double_int increment = arg_cand->index - basis->index;
3129
3130 if (!address_arithmetic_p && increment.is_negative ())
3131 increment = -increment;
3132
3133 j = incr_vec_index (increment);
3134
3135 if (dump_file && (dump_flags & TDF_DETAILS))
3136 {
3137 fprintf (dump_file, " Conditional candidate %d, phi: ",
3138 c->cand_num);
3139 print_gimple_stmt (dump_file, phi, 0, 0);
3140 fputs (" increment: ", dump_file);
3141 dump_double_int (dump_file, increment, false);
3142 if (j < 0)
3143 fprintf (dump_file,
3144 "\n Not replaced; incr_vec overflow.\n");
3145 else {
3146 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3147 if (profitable_increment_p (j))
3148 fputs (" Replacing...\n", dump_file);
3149 else
3150 fputs (" Not replaced.\n", dump_file);
3151 }
3152 }
3153
3154 if (j < 0 || !profitable_increment_p (j))
3155 return false;
3156 }
3157 }
3158 }
3159
3160 return true;
3161 }
3162
3163 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3164 type TO_TYPE, and insert it in front of the statement represented
3165 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3166 the new SSA name. */
3167
3168 static tree
3169 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3170 {
3171 tree cast_lhs;
3172 gimple cast_stmt;
3173 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3174
3175 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3176 cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, cast_lhs,
3177 from_expr, NULL_TREE);
3178 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3179 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3180
3181 if (dump_file && (dump_flags & TDF_DETAILS))
3182 {
3183 fputs (" Inserting: ", dump_file);
3184 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3185 }
3186
3187 return cast_lhs;
3188 }
3189
3190 /* Replace the RHS of the statement represented by candidate C with
3191 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3192 leave C unchanged or just interchange its operands. The original
3193 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3194 If the replacement was made and we are doing a details dump,
3195 return the revised statement, else NULL. */
3196
3197 static gimple
3198 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3199 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3200 slsr_cand_t c)
3201 {
3202 if (new_code != old_code
3203 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3204 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3205 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3206 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3207 {
3208 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3209 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3210 update_stmt (gsi_stmt (gsi));
3211 c->cand_stmt = gsi_stmt (gsi);
3212
3213 if (dump_file && (dump_flags & TDF_DETAILS))
3214 return gsi_stmt (gsi);
3215 }
3216
3217 else if (dump_file && (dump_flags & TDF_DETAILS))
3218 fputs (" (duplicate, not actually replacing)\n", dump_file);
3219
3220 return NULL;
3221 }
3222
3223 /* Strength-reduce the statement represented by candidate C by replacing
3224 it with an equivalent addition or subtraction. I is the index into
3225 the increment vector identifying C's increment. NEW_VAR is used to
3226 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3227 is the rhs1 to use in creating the add/subtract. */
3228
3229 static void
3230 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3231 {
3232 gimple stmt_to_print = NULL;
3233 tree orig_rhs1, orig_rhs2;
3234 tree rhs2;
3235 enum tree_code orig_code, repl_code;
3236 double_int cand_incr;
3237
3238 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3239 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3240 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3241 cand_incr = cand_increment (c);
3242
3243 if (dump_file && (dump_flags & TDF_DETAILS))
3244 {
3245 fputs ("Replacing: ", dump_file);
3246 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3247 stmt_to_print = c->cand_stmt;
3248 }
3249
3250 if (address_arithmetic_p)
3251 repl_code = POINTER_PLUS_EXPR;
3252 else
3253 repl_code = PLUS_EXPR;
3254
3255 /* If the increment has an initializer T_0, replace the candidate
3256 statement with an add of the basis name and the initializer. */
3257 if (incr_vec[i].initializer)
3258 {
3259 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3260 tree orig_type = TREE_TYPE (orig_rhs2);
3261
3262 if (types_compatible_p (orig_type, init_type))
3263 rhs2 = incr_vec[i].initializer;
3264 else
3265 rhs2 = introduce_cast_before_cand (c, orig_type,
3266 incr_vec[i].initializer);
3267
3268 if (incr_vec[i].incr != cand_incr)
3269 {
3270 gcc_assert (repl_code == PLUS_EXPR);
3271 repl_code = MINUS_EXPR;
3272 }
3273
3274 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3275 orig_code, orig_rhs1, orig_rhs2,
3276 c);
3277 }
3278
3279 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3280 with a subtract of the stride from the basis name, a copy
3281 from the basis name, or an add of the stride to the basis
3282 name, respectively. It may be necessary to introduce a
3283 cast (or reuse an existing cast). */
3284 else if (cand_incr.is_one ())
3285 {
3286 tree stride_type = TREE_TYPE (c->stride);
3287 tree orig_type = TREE_TYPE (orig_rhs2);
3288
3289 if (types_compatible_p (orig_type, stride_type))
3290 rhs2 = c->stride;
3291 else
3292 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3293
3294 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3295 orig_code, orig_rhs1, orig_rhs2,
3296 c);
3297 }
3298
3299 else if (cand_incr.is_minus_one ())
3300 {
3301 tree stride_type = TREE_TYPE (c->stride);
3302 tree orig_type = TREE_TYPE (orig_rhs2);
3303 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3304
3305 if (types_compatible_p (orig_type, stride_type))
3306 rhs2 = c->stride;
3307 else
3308 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3309
3310 if (orig_code != MINUS_EXPR
3311 || !operand_equal_p (basis_name, orig_rhs1, 0)
3312 || !operand_equal_p (rhs2, orig_rhs2, 0))
3313 {
3314 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3315 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3316 update_stmt (gsi_stmt (gsi));
3317 c->cand_stmt = gsi_stmt (gsi);
3318
3319 if (dump_file && (dump_flags & TDF_DETAILS))
3320 stmt_to_print = gsi_stmt (gsi);
3321 }
3322 else if (dump_file && (dump_flags & TDF_DETAILS))
3323 fputs (" (duplicate, not actually replacing)\n", dump_file);
3324 }
3325
3326 else if (cand_incr.is_zero ())
3327 {
3328 tree lhs = gimple_assign_lhs (c->cand_stmt);
3329 tree lhs_type = TREE_TYPE (lhs);
3330 tree basis_type = TREE_TYPE (basis_name);
3331
3332 if (types_compatible_p (lhs_type, basis_type))
3333 {
3334 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
3335 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3336 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3337 gsi_replace (&gsi, copy_stmt, false);
3338 c->cand_stmt = copy_stmt;
3339
3340 if (dump_file && (dump_flags & TDF_DETAILS))
3341 stmt_to_print = copy_stmt;
3342 }
3343 else
3344 {
3345 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3346 gimple cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
3347 basis_name,
3348 NULL_TREE);
3349 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3350 gsi_replace (&gsi, cast_stmt, false);
3351 c->cand_stmt = cast_stmt;
3352
3353 if (dump_file && (dump_flags & TDF_DETAILS))
3354 stmt_to_print = cast_stmt;
3355 }
3356 }
3357 else
3358 gcc_unreachable ();
3359
3360 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3361 {
3362 fputs ("With: ", dump_file);
3363 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3364 fputs ("\n", dump_file);
3365 }
3366 }
3367
3368 /* For each candidate in the tree rooted at C, replace it with
3369 an increment if such has been shown to be profitable. */
3370
3371 static void
3372 replace_profitable_candidates (slsr_cand_t c)
3373 {
3374 if (!cand_already_replaced (c))
3375 {
3376 double_int increment = cand_abs_increment (c);
3377 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3378 int i;
3379
3380 i = incr_vec_index (increment);
3381
3382 /* Only process profitable increments. Nothing useful can be done
3383 to a cast or copy. */
3384 if (i >= 0
3385 && profitable_increment_p (i)
3386 && orig_code != MODIFY_EXPR
3387 && orig_code != NOP_EXPR)
3388 {
3389 if (phi_dependent_cand_p (c))
3390 {
3391 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3392
3393 if (all_phi_incrs_profitable (c, phi))
3394 {
3395 /* Look up the LHS SSA name from C's basis. This will be
3396 the RHS1 of the adds we will introduce to create new
3397 phi arguments. */
3398 slsr_cand_t basis = lookup_cand (c->basis);
3399 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3400
3401 /* Create a new phi statement that will represent C's true
3402 basis after the transformation is complete. */
3403 location_t loc = gimple_location (c->cand_stmt);
3404 tree name = create_phi_basis (c, phi, basis_name,
3405 loc, UNKNOWN_STRIDE);
3406
3407 /* Replace C with an add of the new basis phi and the
3408 increment. */
3409 replace_one_candidate (c, i, name);
3410 }
3411 }
3412 else
3413 {
3414 slsr_cand_t basis = lookup_cand (c->basis);
3415 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3416 replace_one_candidate (c, i, basis_name);
3417 }
3418 }
3419 }
3420
3421 if (c->sibling)
3422 replace_profitable_candidates (lookup_cand (c->sibling));
3423
3424 if (c->dependent)
3425 replace_profitable_candidates (lookup_cand (c->dependent));
3426 }
3427 \f
3428 /* Analyze costs of related candidates in the candidate vector,
3429 and make beneficial replacements. */
3430
3431 static void
3432 analyze_candidates_and_replace (void)
3433 {
3434 unsigned i;
3435 slsr_cand_t c;
3436
3437 /* Each candidate that has a null basis and a non-null
3438 dependent is the root of a tree of related statements.
3439 Analyze each tree to determine a subset of those
3440 statements that can be replaced with maximum benefit. */
3441 FOR_EACH_VEC_ELT (cand_vec, i, c)
3442 {
3443 slsr_cand_t first_dep;
3444
3445 if (c->basis != 0 || c->dependent == 0)
3446 continue;
3447
3448 if (dump_file && (dump_flags & TDF_DETAILS))
3449 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3450 c->cand_num);
3451
3452 first_dep = lookup_cand (c->dependent);
3453
3454 /* If this is a chain of CAND_REFs, unconditionally replace
3455 each of them with a strength-reduced data reference. */
3456 if (c->kind == CAND_REF)
3457 replace_refs (c);
3458
3459 /* If the common stride of all related candidates is a known
3460 constant, each candidate without a phi-dependence can be
3461 profitably replaced. Each replaces a multiply by a single
3462 add, with the possibility that a feeding add also goes dead.
3463 A candidate with a phi-dependence is replaced only if the
3464 compensation code it requires is offset by the strength
3465 reduction savings. */
3466 else if (TREE_CODE (c->stride) == INTEGER_CST)
3467 replace_uncond_cands_and_profitable_phis (first_dep);
3468
3469 /* When the stride is an SSA name, it may still be profitable
3470 to replace some or all of the dependent candidates, depending
3471 on whether the introduced increments can be reused, or are
3472 less expensive to calculate than the replaced statements. */
3473 else
3474 {
3475 enum machine_mode mode;
3476 bool speed;
3477
3478 /* Determine whether we'll be generating pointer arithmetic
3479 when replacing candidates. */
3480 address_arithmetic_p = (c->kind == CAND_ADD
3481 && POINTER_TYPE_P (c->cand_type));
3482
3483 /* If all candidates have already been replaced under other
3484 interpretations, nothing remains to be done. */
3485 if (!count_candidates (c))
3486 continue;
3487
3488 /* Construct an array of increments for this candidate chain. */
3489 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3490 incr_vec_len = 0;
3491 record_increments (c);
3492
3493 /* Determine which increments are profitable to replace. */
3494 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3495 speed = optimize_cands_for_speed_p (c);
3496 analyze_increments (first_dep, mode, speed);
3497
3498 /* Insert initializers of the form T_0 = stride * increment
3499 for use in profitable replacements. */
3500 insert_initializers (first_dep);
3501 dump_incr_vec ();
3502
3503 /* Perform the replacements. */
3504 replace_profitable_candidates (first_dep);
3505 free (incr_vec);
3506 }
3507 }
3508 }
3509
3510 static unsigned
3511 execute_strength_reduction (void)
3512 {
3513 /* Create the obstack where candidates will reside. */
3514 gcc_obstack_init (&cand_obstack);
3515
3516 /* Allocate the candidate vector. */
3517 cand_vec.create (128);
3518
3519 /* Allocate the mapping from statements to candidate indices. */
3520 stmt_cand_map = pointer_map_create ();
3521
3522 /* Create the obstack where candidate chains will reside. */
3523 gcc_obstack_init (&chain_obstack);
3524
3525 /* Allocate the mapping from base expressions to candidate chains. */
3526 base_cand_map.create (500);
3527
3528 /* Initialize the loop optimizer. We need to detect flow across
3529 back edges, and this gives us dominator information as well. */
3530 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3531
3532 /* Walk the CFG in predominator order looking for strength reduction
3533 candidates. */
3534 find_candidates_dom_walker (CDI_DOMINATORS)
3535 .walk (cfun->cfg->x_entry_block_ptr);
3536
3537 if (dump_file && (dump_flags & TDF_DETAILS))
3538 {
3539 dump_cand_vec ();
3540 dump_cand_chains ();
3541 }
3542
3543 /* Analyze costs and make appropriate replacements. */
3544 analyze_candidates_and_replace ();
3545
3546 loop_optimizer_finalize ();
3547 base_cand_map.dispose ();
3548 obstack_free (&chain_obstack, NULL);
3549 pointer_map_destroy (stmt_cand_map);
3550 cand_vec.release ();
3551 obstack_free (&cand_obstack, NULL);
3552
3553 return 0;
3554 }
3555
3556 static bool
3557 gate_strength_reduction (void)
3558 {
3559 return flag_tree_slsr;
3560 }
3561
3562 namespace {
3563
3564 const pass_data pass_data_strength_reduction =
3565 {
3566 GIMPLE_PASS, /* type */
3567 "slsr", /* name */
3568 OPTGROUP_NONE, /* optinfo_flags */
3569 true, /* has_gate */
3570 true, /* has_execute */
3571 TV_GIMPLE_SLSR, /* tv_id */
3572 ( PROP_cfg | PROP_ssa ), /* properties_required */
3573 0, /* properties_provided */
3574 0, /* properties_destroyed */
3575 0, /* todo_flags_start */
3576 TODO_verify_ssa, /* todo_flags_finish */
3577 };
3578
3579 class pass_strength_reduction : public gimple_opt_pass
3580 {
3581 public:
3582 pass_strength_reduction (gcc::context *ctxt)
3583 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3584 {}
3585
3586 /* opt_pass methods: */
3587 bool gate () { return gate_strength_reduction (); }
3588 unsigned int execute () { return execute_strength_reduction (); }
3589
3590 }; // class pass_strength_reduction
3591
3592 } // anon namespace
3593
3594 gimple_opt_pass *
3595 make_pass_strength_reduction (gcc::context *ctxt)
3596 {
3597 return new pass_strength_reduction (ctxt);
3598 }