Update copyright years.
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
59 \f
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
64
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
67
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
71
72 Any statement S0 that dominates S1 and is of the form:
73
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
76
77 is called a "basis" for S1. In both cases, S1 may be replaced by
78
79 S1': X = Y + (i - i') * S,
80
81 where (i - i') * S is folded to the extent possible.
82
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
91
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
96
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
100
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
104
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
107
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
111
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
120
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
124
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
128
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
131
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
134
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
142
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
151
152 Conditional candidates
153 ======================
154
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
157
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
165
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
168
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
172 {
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
175 }
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
180
181 where the bracketed instructions may go dead.
182
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
193
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
199
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
209
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
213
214
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
218
219 /* The kind of candidate. */
220 enum cand_kind
221 {
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
226 };
227
228 struct slsr_cand_d
229 {
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
232
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
235
236 /* The stride S. */
237 tree stride;
238
239 /* The index constant i. */
240 widest_int index;
241
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
248
249 /* The kind of candidate (CAND_MULT, etc.). */
250 enum cand_kind kind;
251
252 /* Index of this candidate in the candidate vector. */
253 cand_idx cand_num;
254
255 /* Index of the next candidate record for the same statement.
256 A statement may be useful in more than one way (e.g., due to
257 commutativity). So we can have multiple "interpretations"
258 of a statement. */
259 cand_idx next_interp;
260
261 /* Index of the basis statement S0, if any, in the candidate vector. */
262 cand_idx basis;
263
264 /* First candidate for which this candidate is a basis, if one exists. */
265 cand_idx dependent;
266
267 /* Next candidate having the same basis as this one. */
268 cand_idx sibling;
269
270 /* If this is a conditional candidate, the CAND_PHI candidate
271 that defines the base SSA name B. */
272 cand_idx def_phi;
273
274 /* Savings that can be expected from eliminating dead code if this
275 candidate is replaced. */
276 int dead_savings;
277 };
278
279 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
280 typedef const struct slsr_cand_d *const_slsr_cand_t;
281
282 /* Pointers to candidates are chained together as part of a mapping
283 from base expressions to the candidates that use them. */
284
285 struct cand_chain_d
286 {
287 /* Base expression for the chain of candidates: often, but not
288 always, an SSA name. */
289 tree base_expr;
290
291 /* Pointer to a candidate. */
292 slsr_cand_t cand;
293
294 /* Chain pointer. */
295 struct cand_chain_d *next;
296
297 };
298
299 typedef struct cand_chain_d cand_chain, *cand_chain_t;
300 typedef const struct cand_chain_d *const_cand_chain_t;
301
302 /* Information about a unique "increment" associated with candidates
303 having an SSA name for a stride. An increment is the difference
304 between the index of the candidate and the index of its basis,
305 i.e., (i - i') as discussed in the module commentary.
306
307 When we are not going to generate address arithmetic we treat
308 increments that differ only in sign as the same, allowing sharing
309 of the cost of initializers. The absolute value of the increment
310 is stored in the incr_info. */
311
312 struct incr_info_d
313 {
314 /* The increment that relates a candidate to its basis. */
315 widest_int incr;
316
317 /* How many times the increment occurs in the candidate tree. */
318 unsigned count;
319
320 /* Cost of replacing candidates using this increment. Negative and
321 zero costs indicate replacement should be performed. */
322 int cost;
323
324 /* If this increment is profitable but is not -1, 0, or 1, it requires
325 an initializer T_0 = stride * incr to be found or introduced in the
326 nearest common dominator of all candidates. This field holds T_0
327 for subsequent use. */
328 tree initializer;
329
330 /* If the initializer was found to already exist, this is the block
331 where it was found. */
332 basic_block init_bb;
333 };
334
335 typedef struct incr_info_d incr_info, *incr_info_t;
336
337 /* Candidates are maintained in a vector. If candidate X dominates
338 candidate Y, then X appears before Y in the vector; but the
339 converse does not necessarily hold. */
340 static vec<slsr_cand_t> cand_vec;
341
342 enum cost_consts
343 {
344 COST_NEUTRAL = 0,
345 COST_INFINITE = 1000
346 };
347
348 enum stride_status
349 {
350 UNKNOWN_STRIDE = 0,
351 KNOWN_STRIDE = 1
352 };
353
354 enum phi_adjust_status
355 {
356 NOT_PHI_ADJUST = 0,
357 PHI_ADJUST = 1
358 };
359
360 enum count_phis_status
361 {
362 DONT_COUNT_PHIS = 0,
363 COUNT_PHIS = 1
364 };
365
366 /* Pointer map embodying a mapping from statements to candidates. */
367 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
368
369 /* Obstack for candidates. */
370 static struct obstack cand_obstack;
371
372 /* Obstack for candidate chains. */
373 static struct obstack chain_obstack;
374
375 /* An array INCR_VEC of incr_infos is used during analysis of related
376 candidates having an SSA name for a stride. INCR_VEC_LEN describes
377 its current length. MAX_INCR_VEC_LEN is used to avoid costly
378 pathological cases. */
379 static incr_info_t incr_vec;
380 static unsigned incr_vec_len;
381 const int MAX_INCR_VEC_LEN = 16;
382
383 /* For a chain of candidates with unknown stride, indicates whether or not
384 we must generate pointer arithmetic when replacing statements. */
385 static bool address_arithmetic_p;
386
387 /* Forward function declarations. */
388 static slsr_cand_t base_cand_from_table (tree);
389 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
390 static bool legal_cast_p_1 (tree, tree);
391 \f
392 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
393
394 static slsr_cand_t
395 lookup_cand (cand_idx idx)
396 {
397 return cand_vec[idx - 1];
398 }
399
400 /* Helper for hashing a candidate chain header. */
401
402 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
403 {
404 static inline hashval_t hash (const cand_chain *);
405 static inline bool equal (const cand_chain *, const cand_chain *);
406 };
407
408 inline hashval_t
409 cand_chain_hasher::hash (const cand_chain *p)
410 {
411 tree base_expr = p->base_expr;
412 return iterative_hash_expr (base_expr, 0);
413 }
414
415 inline bool
416 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
417 {
418 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
419 }
420
421 /* Hash table embodying a mapping from base exprs to chains of candidates. */
422 static hash_table<cand_chain_hasher> *base_cand_map;
423 \f
424 /* Pointer map used by tree_to_aff_combination_expand. */
425 static hash_map<tree, name_expansion *> *name_expansions;
426 /* Pointer map embodying a mapping from bases to alternative bases. */
427 static hash_map<tree, tree> *alt_base_map;
428
429 /* Given BASE, use the tree affine combiniation facilities to
430 find the underlying tree expression for BASE, with any
431 immediate offset excluded.
432
433 N.B. we should eliminate this backtracking with better forward
434 analysis in a future release. */
435
436 static tree
437 get_alternative_base (tree base)
438 {
439 tree *result = alt_base_map->get (base);
440
441 if (result == NULL)
442 {
443 tree expr;
444 aff_tree aff;
445
446 tree_to_aff_combination_expand (base, TREE_TYPE (base),
447 &aff, &name_expansions);
448 aff.offset = 0;
449 expr = aff_combination_to_tree (&aff);
450
451 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
452
453 return expr == base ? NULL : expr;
454 }
455
456 return *result;
457 }
458
459 /* Look in the candidate table for a CAND_PHI that defines BASE and
460 return it if found; otherwise return NULL. */
461
462 static cand_idx
463 find_phi_def (tree base)
464 {
465 slsr_cand_t c;
466
467 if (TREE_CODE (base) != SSA_NAME)
468 return 0;
469
470 c = base_cand_from_table (base);
471
472 if (!c || c->kind != CAND_PHI)
473 return 0;
474
475 return c->cand_num;
476 }
477
478 /* Helper routine for find_basis_for_candidate. May be called twice:
479 once for the candidate's base expr, and optionally again either for
480 the candidate's phi definition or for a CAND_REF's alternative base
481 expression. */
482
483 static slsr_cand_t
484 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
485 {
486 cand_chain mapping_key;
487 cand_chain_t chain;
488 slsr_cand_t basis = NULL;
489
490 // Limit potential of N^2 behavior for long candidate chains.
491 int iters = 0;
492 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
493
494 mapping_key.base_expr = base_expr;
495 chain = base_cand_map->find (&mapping_key);
496
497 for (; chain && iters < max_iters; chain = chain->next, ++iters)
498 {
499 slsr_cand_t one_basis = chain->cand;
500
501 if (one_basis->kind != c->kind
502 || one_basis->cand_stmt == c->cand_stmt
503 || !operand_equal_p (one_basis->stride, c->stride, 0)
504 || !types_compatible_p (one_basis->cand_type, c->cand_type)
505 || !dominated_by_p (CDI_DOMINATORS,
506 gimple_bb (c->cand_stmt),
507 gimple_bb (one_basis->cand_stmt)))
508 continue;
509
510 if (!basis || basis->cand_num < one_basis->cand_num)
511 basis = one_basis;
512 }
513
514 return basis;
515 }
516
517 /* Use the base expr from candidate C to look for possible candidates
518 that can serve as a basis for C. Each potential basis must also
519 appear in a block that dominates the candidate statement and have
520 the same stride and type. If more than one possible basis exists,
521 the one with highest index in the vector is chosen; this will be
522 the most immediately dominating basis. */
523
524 static int
525 find_basis_for_candidate (slsr_cand_t c)
526 {
527 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
528
529 /* If a candidate doesn't have a basis using its base expression,
530 it may have a basis hidden by one or more intervening phis. */
531 if (!basis && c->def_phi)
532 {
533 basic_block basis_bb, phi_bb;
534 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
535 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
536
537 if (basis)
538 {
539 /* A hidden basis must dominate the phi-definition of the
540 candidate's base name. */
541 phi_bb = gimple_bb (phi_cand->cand_stmt);
542 basis_bb = gimple_bb (basis->cand_stmt);
543
544 if (phi_bb == basis_bb
545 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
546 {
547 basis = NULL;
548 c->basis = 0;
549 }
550
551 /* If we found a hidden basis, estimate additional dead-code
552 savings if the phi and its feeding statements can be removed. */
553 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
554 c->dead_savings += phi_cand->dead_savings;
555 }
556 }
557
558 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
559 {
560 tree alt_base_expr = get_alternative_base (c->base_expr);
561 if (alt_base_expr)
562 basis = find_basis_for_base_expr (c, alt_base_expr);
563 }
564
565 if (basis)
566 {
567 c->sibling = basis->dependent;
568 basis->dependent = c->cand_num;
569 return basis->cand_num;
570 }
571
572 return 0;
573 }
574
575 /* Record a mapping from BASE to C, indicating that C may potentially serve
576 as a basis using that base expression. BASE may be the same as
577 C->BASE_EXPR; alternatively BASE can be a different tree that share the
578 underlining expression of C->BASE_EXPR. */
579
580 static void
581 record_potential_basis (slsr_cand_t c, tree base)
582 {
583 cand_chain_t node;
584 cand_chain **slot;
585
586 gcc_assert (base);
587
588 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
589 node->base_expr = base;
590 node->cand = c;
591 node->next = NULL;
592 slot = base_cand_map->find_slot (node, INSERT);
593
594 if (*slot)
595 {
596 cand_chain_t head = (cand_chain_t) (*slot);
597 node->next = head->next;
598 head->next = node;
599 }
600 else
601 *slot = node;
602 }
603
604 /* Allocate storage for a new candidate and initialize its fields.
605 Attempt to find a basis for the candidate.
606
607 For CAND_REF, an alternative base may also be recorded and used
608 to find a basis. This helps cases where the expression hidden
609 behind BASE (which is usually an SSA_NAME) has immediate offset,
610 e.g.
611
612 a2[i][j] = 1;
613 a2[i + 20][j] = 2; */
614
615 static slsr_cand_t
616 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
617 const widest_int &index, tree stride, tree ctype,
618 unsigned savings)
619 {
620 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
621 sizeof (slsr_cand));
622 c->cand_stmt = gs;
623 c->base_expr = base;
624 c->stride = stride;
625 c->index = index;
626 c->cand_type = ctype;
627 c->kind = kind;
628 c->cand_num = cand_vec.length () + 1;
629 c->next_interp = 0;
630 c->dependent = 0;
631 c->sibling = 0;
632 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
633 c->dead_savings = savings;
634
635 cand_vec.safe_push (c);
636
637 if (kind == CAND_PHI)
638 c->basis = 0;
639 else
640 c->basis = find_basis_for_candidate (c);
641
642 record_potential_basis (c, base);
643 if (flag_expensive_optimizations && kind == CAND_REF)
644 {
645 tree alt_base = get_alternative_base (base);
646 if (alt_base)
647 record_potential_basis (c, alt_base);
648 }
649
650 return c;
651 }
652
653 /* Determine the target cost of statement GS when compiling according
654 to SPEED. */
655
656 static int
657 stmt_cost (gimple *gs, bool speed)
658 {
659 tree lhs, rhs1, rhs2;
660 machine_mode lhs_mode;
661
662 gcc_assert (is_gimple_assign (gs));
663 lhs = gimple_assign_lhs (gs);
664 rhs1 = gimple_assign_rhs1 (gs);
665 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
666
667 switch (gimple_assign_rhs_code (gs))
668 {
669 case MULT_EXPR:
670 rhs2 = gimple_assign_rhs2 (gs);
671
672 if (tree_fits_shwi_p (rhs2))
673 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
674
675 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
676 return mul_cost (speed, lhs_mode);
677
678 case PLUS_EXPR:
679 case POINTER_PLUS_EXPR:
680 case MINUS_EXPR:
681 return add_cost (speed, lhs_mode);
682
683 case NEGATE_EXPR:
684 return neg_cost (speed, lhs_mode);
685
686 CASE_CONVERT:
687 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
688
689 /* Note that we don't assign costs to copies that in most cases
690 will go away. */
691 default:
692 ;
693 }
694
695 gcc_unreachable ();
696 return 0;
697 }
698
699 /* Look up the defining statement for BASE_IN and return a pointer
700 to its candidate in the candidate table, if any; otherwise NULL.
701 Only CAND_ADD and CAND_MULT candidates are returned. */
702
703 static slsr_cand_t
704 base_cand_from_table (tree base_in)
705 {
706 slsr_cand_t *result;
707
708 gimple *def = SSA_NAME_DEF_STMT (base_in);
709 if (!def)
710 return (slsr_cand_t) NULL;
711
712 result = stmt_cand_map->get (def);
713
714 if (result && (*result)->kind != CAND_REF)
715 return *result;
716
717 return (slsr_cand_t) NULL;
718 }
719
720 /* Add an entry to the statement-to-candidate mapping. */
721
722 static void
723 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
724 {
725 gcc_assert (!stmt_cand_map->put (gs, c));
726 }
727 \f
728 /* Given PHI which contains a phi statement, determine whether it
729 satisfies all the requirements of a phi candidate. If so, create
730 a candidate. Note that a CAND_PHI never has a basis itself, but
731 is used to help find a basis for subsequent candidates. */
732
733 static void
734 slsr_process_phi (gphi *phi, bool speed)
735 {
736 unsigned i;
737 tree arg0_base = NULL_TREE, base_type;
738 slsr_cand_t c;
739 struct loop *cand_loop = gimple_bb (phi)->loop_father;
740 unsigned savings = 0;
741
742 /* A CAND_PHI requires each of its arguments to have the same
743 derived base name. (See the module header commentary for a
744 definition of derived base names.) Furthermore, all feeding
745 definitions must be in the same position in the loop hierarchy
746 as PHI. */
747
748 for (i = 0; i < gimple_phi_num_args (phi); i++)
749 {
750 slsr_cand_t arg_cand;
751 tree arg = gimple_phi_arg_def (phi, i);
752 tree derived_base_name = NULL_TREE;
753 gimple *arg_stmt = NULL;
754 basic_block arg_bb = NULL;
755
756 if (TREE_CODE (arg) != SSA_NAME)
757 return;
758
759 arg_cand = base_cand_from_table (arg);
760
761 if (arg_cand)
762 {
763 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
764 {
765 if (!arg_cand->next_interp)
766 return;
767
768 arg_cand = lookup_cand (arg_cand->next_interp);
769 }
770
771 if (!integer_onep (arg_cand->stride))
772 return;
773
774 derived_base_name = arg_cand->base_expr;
775 arg_stmt = arg_cand->cand_stmt;
776 arg_bb = gimple_bb (arg_stmt);
777
778 /* Gather potential dead code savings if the phi statement
779 can be removed later on. */
780 if (has_single_use (arg))
781 {
782 if (gimple_code (arg_stmt) == GIMPLE_PHI)
783 savings += arg_cand->dead_savings;
784 else
785 savings += stmt_cost (arg_stmt, speed);
786 }
787 }
788 else
789 {
790 derived_base_name = arg;
791
792 if (SSA_NAME_IS_DEFAULT_DEF (arg))
793 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
794 else
795 gimple_bb (SSA_NAME_DEF_STMT (arg));
796 }
797
798 if (!arg_bb || arg_bb->loop_father != cand_loop)
799 return;
800
801 if (i == 0)
802 arg0_base = derived_base_name;
803 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
804 return;
805 }
806
807 /* Create the candidate. "alloc_cand_and_find_basis" is named
808 misleadingly for this case, as no basis will be sought for a
809 CAND_PHI. */
810 base_type = TREE_TYPE (arg0_base);
811
812 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
813 0, integer_one_node, base_type, savings);
814
815 /* Add the candidate to the statement-candidate mapping. */
816 add_cand_for_stmt (phi, c);
817 }
818
819 /* Given PBASE which is a pointer to tree, look up the defining
820 statement for it and check whether the candidate is in the
821 form of:
822
823 X = B + (1 * S), S is integer constant
824 X = B + (i * S), S is integer one
825
826 If so, set PBASE to the candidate's base_expr and return double
827 int (i * S).
828 Otherwise, just return double int zero. */
829
830 static widest_int
831 backtrace_base_for_ref (tree *pbase)
832 {
833 tree base_in = *pbase;
834 slsr_cand_t base_cand;
835
836 STRIP_NOPS (base_in);
837
838 /* Strip off widening conversion(s) to handle cases where
839 e.g. 'B' is widened from an 'int' in order to calculate
840 a 64-bit address. */
841 if (CONVERT_EXPR_P (base_in)
842 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
843 base_in = get_unwidened (base_in, NULL_TREE);
844
845 if (TREE_CODE (base_in) != SSA_NAME)
846 return 0;
847
848 base_cand = base_cand_from_table (base_in);
849
850 while (base_cand && base_cand->kind != CAND_PHI)
851 {
852 if (base_cand->kind == CAND_ADD
853 && base_cand->index == 1
854 && TREE_CODE (base_cand->stride) == INTEGER_CST)
855 {
856 /* X = B + (1 * S), S is integer constant. */
857 *pbase = base_cand->base_expr;
858 return wi::to_widest (base_cand->stride);
859 }
860 else if (base_cand->kind == CAND_ADD
861 && TREE_CODE (base_cand->stride) == INTEGER_CST
862 && integer_onep (base_cand->stride))
863 {
864 /* X = B + (i * S), S is integer one. */
865 *pbase = base_cand->base_expr;
866 return base_cand->index;
867 }
868
869 if (base_cand->next_interp)
870 base_cand = lookup_cand (base_cand->next_interp);
871 else
872 base_cand = NULL;
873 }
874
875 return 0;
876 }
877
878 /* Look for the following pattern:
879
880 *PBASE: MEM_REF (T1, C1)
881
882 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
883 or
884 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
885 or
886 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
887
888 *PINDEX: C4 * BITS_PER_UNIT
889
890 If not present, leave the input values unchanged and return FALSE.
891 Otherwise, modify the input values as follows and return TRUE:
892
893 *PBASE: T1
894 *POFFSET: MULT_EXPR (T2, C3)
895 *PINDEX: C1 + (C2 * C3) + C4
896
897 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
898 will be further restructured to:
899
900 *PBASE: T1
901 *POFFSET: MULT_EXPR (T2', C3)
902 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
903
904 static bool
905 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
906 tree *ptype)
907 {
908 tree base = *pbase, offset = *poffset;
909 widest_int index = *pindex;
910 tree mult_op0, t1, t2, type;
911 widest_int c1, c2, c3, c4, c5;
912
913 if (!base
914 || !offset
915 || TREE_CODE (base) != MEM_REF
916 || TREE_CODE (offset) != MULT_EXPR
917 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
918 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
919 return false;
920
921 t1 = TREE_OPERAND (base, 0);
922 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
923 type = TREE_TYPE (TREE_OPERAND (base, 1));
924
925 mult_op0 = TREE_OPERAND (offset, 0);
926 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
927
928 if (TREE_CODE (mult_op0) == PLUS_EXPR)
929
930 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
931 {
932 t2 = TREE_OPERAND (mult_op0, 0);
933 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
934 }
935 else
936 return false;
937
938 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
939
940 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
941 {
942 t2 = TREE_OPERAND (mult_op0, 0);
943 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
944 }
945 else
946 return false;
947
948 else
949 {
950 t2 = mult_op0;
951 c2 = 0;
952 }
953
954 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
955 c5 = backtrace_base_for_ref (&t2);
956
957 *pbase = t1;
958 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
959 wide_int_to_tree (sizetype, c3));
960 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
961 *ptype = type;
962
963 return true;
964 }
965
966 /* Given GS which contains a data reference, create a CAND_REF entry in
967 the candidate table and attempt to find a basis. */
968
969 static void
970 slsr_process_ref (gimple *gs)
971 {
972 tree ref_expr, base, offset, type;
973 HOST_WIDE_INT bitsize, bitpos;
974 machine_mode mode;
975 int unsignedp, reversep, volatilep;
976 slsr_cand_t c;
977
978 if (gimple_vdef (gs))
979 ref_expr = gimple_assign_lhs (gs);
980 else
981 ref_expr = gimple_assign_rhs1 (gs);
982
983 if (!handled_component_p (ref_expr)
984 || TREE_CODE (ref_expr) == BIT_FIELD_REF
985 || (TREE_CODE (ref_expr) == COMPONENT_REF
986 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
987 return;
988
989 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
990 &unsignedp, &reversep, &volatilep, false);
991 if (reversep)
992 return;
993 widest_int index = bitpos;
994
995 if (!restructure_reference (&base, &offset, &index, &type))
996 return;
997
998 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
999 type, 0);
1000
1001 /* Add the candidate to the statement-candidate mapping. */
1002 add_cand_for_stmt (gs, c);
1003 }
1004
1005 /* Create a candidate entry for a statement GS, where GS multiplies
1006 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1007 about the two SSA names into the new candidate. Return the new
1008 candidate. */
1009
1010 static slsr_cand_t
1011 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1012 {
1013 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1014 widest_int index;
1015 unsigned savings = 0;
1016 slsr_cand_t c;
1017 slsr_cand_t base_cand = base_cand_from_table (base_in);
1018
1019 /* Look at all interpretations of the base candidate, if necessary,
1020 to find information to propagate into this candidate. */
1021 while (base_cand && !base && base_cand->kind != CAND_PHI)
1022 {
1023
1024 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1025 {
1026 /* Y = (B + i') * 1
1027 X = Y * Z
1028 ================
1029 X = (B + i') * Z */
1030 base = base_cand->base_expr;
1031 index = base_cand->index;
1032 stride = stride_in;
1033 ctype = base_cand->cand_type;
1034 if (has_single_use (base_in))
1035 savings = (base_cand->dead_savings
1036 + stmt_cost (base_cand->cand_stmt, speed));
1037 }
1038 else if (base_cand->kind == CAND_ADD
1039 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1040 {
1041 /* Y = B + (i' * S), S constant
1042 X = Y * Z
1043 ============================
1044 X = B + ((i' * S) * Z) */
1045 base = base_cand->base_expr;
1046 index = base_cand->index * wi::to_widest (base_cand->stride);
1047 stride = stride_in;
1048 ctype = base_cand->cand_type;
1049 if (has_single_use (base_in))
1050 savings = (base_cand->dead_savings
1051 + stmt_cost (base_cand->cand_stmt, speed));
1052 }
1053
1054 if (base_cand->next_interp)
1055 base_cand = lookup_cand (base_cand->next_interp);
1056 else
1057 base_cand = NULL;
1058 }
1059
1060 if (!base)
1061 {
1062 /* No interpretations had anything useful to propagate, so
1063 produce X = (Y + 0) * Z. */
1064 base = base_in;
1065 index = 0;
1066 stride = stride_in;
1067 ctype = TREE_TYPE (base_in);
1068 }
1069
1070 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1071 ctype, savings);
1072 return c;
1073 }
1074
1075 /* Create a candidate entry for a statement GS, where GS multiplies
1076 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1077 information about BASE_IN into the new candidate. Return the new
1078 candidate. */
1079
1080 static slsr_cand_t
1081 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1082 {
1083 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1084 widest_int index, temp;
1085 unsigned savings = 0;
1086 slsr_cand_t c;
1087 slsr_cand_t base_cand = base_cand_from_table (base_in);
1088
1089 /* Look at all interpretations of the base candidate, if necessary,
1090 to find information to propagate into this candidate. */
1091 while (base_cand && !base && base_cand->kind != CAND_PHI)
1092 {
1093 if (base_cand->kind == CAND_MULT
1094 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1095 {
1096 /* Y = (B + i') * S, S constant
1097 X = Y * c
1098 ============================
1099 X = (B + i') * (S * c) */
1100 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1101 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1102 {
1103 base = base_cand->base_expr;
1104 index = base_cand->index;
1105 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1106 ctype = base_cand->cand_type;
1107 if (has_single_use (base_in))
1108 savings = (base_cand->dead_savings
1109 + stmt_cost (base_cand->cand_stmt, speed));
1110 }
1111 }
1112 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1113 {
1114 /* Y = B + (i' * 1)
1115 X = Y * c
1116 ===========================
1117 X = (B + i') * c */
1118 base = base_cand->base_expr;
1119 index = base_cand->index;
1120 stride = stride_in;
1121 ctype = base_cand->cand_type;
1122 if (has_single_use (base_in))
1123 savings = (base_cand->dead_savings
1124 + stmt_cost (base_cand->cand_stmt, speed));
1125 }
1126 else if (base_cand->kind == CAND_ADD
1127 && base_cand->index == 1
1128 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1129 {
1130 /* Y = B + (1 * S), S constant
1131 X = Y * c
1132 ===========================
1133 X = (B + S) * c */
1134 base = base_cand->base_expr;
1135 index = wi::to_widest (base_cand->stride);
1136 stride = stride_in;
1137 ctype = base_cand->cand_type;
1138 if (has_single_use (base_in))
1139 savings = (base_cand->dead_savings
1140 + stmt_cost (base_cand->cand_stmt, speed));
1141 }
1142
1143 if (base_cand->next_interp)
1144 base_cand = lookup_cand (base_cand->next_interp);
1145 else
1146 base_cand = NULL;
1147 }
1148
1149 if (!base)
1150 {
1151 /* No interpretations had anything useful to propagate, so
1152 produce X = (Y + 0) * c. */
1153 base = base_in;
1154 index = 0;
1155 stride = stride_in;
1156 ctype = TREE_TYPE (base_in);
1157 }
1158
1159 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1160 ctype, savings);
1161 return c;
1162 }
1163
1164 /* Given GS which is a multiply of scalar integers, make an appropriate
1165 entry in the candidate table. If this is a multiply of two SSA names,
1166 create two CAND_MULT interpretations and attempt to find a basis for
1167 each of them. Otherwise, create a single CAND_MULT and attempt to
1168 find a basis. */
1169
1170 static void
1171 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1172 {
1173 slsr_cand_t c, c2;
1174
1175 /* If this is a multiply of an SSA name with itself, it is highly
1176 unlikely that we will get a strength reduction opportunity, so
1177 don't record it as a candidate. This simplifies the logic for
1178 finding a basis, so if this is removed that must be considered. */
1179 if (rhs1 == rhs2)
1180 return;
1181
1182 if (TREE_CODE (rhs2) == SSA_NAME)
1183 {
1184 /* Record an interpretation of this statement in the candidate table
1185 assuming RHS1 is the base expression and RHS2 is the stride. */
1186 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1187
1188 /* Add the first interpretation to the statement-candidate mapping. */
1189 add_cand_for_stmt (gs, c);
1190
1191 /* Record another interpretation of this statement assuming RHS1
1192 is the stride and RHS2 is the base expression. */
1193 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1194 c->next_interp = c2->cand_num;
1195 }
1196 else
1197 {
1198 /* Record an interpretation for the multiply-immediate. */
1199 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1200
1201 /* Add the interpretation to the statement-candidate mapping. */
1202 add_cand_for_stmt (gs, c);
1203 }
1204 }
1205
1206 /* Create a candidate entry for a statement GS, where GS adds two
1207 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1208 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1209 information about the two SSA names into the new candidate.
1210 Return the new candidate. */
1211
1212 static slsr_cand_t
1213 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1214 bool subtract_p, bool speed)
1215 {
1216 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1217 widest_int index;
1218 unsigned savings = 0;
1219 slsr_cand_t c;
1220 slsr_cand_t base_cand = base_cand_from_table (base_in);
1221 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1222
1223 /* The most useful transformation is a multiply-immediate feeding
1224 an add or subtract. Look for that first. */
1225 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1226 {
1227 if (addend_cand->kind == CAND_MULT
1228 && addend_cand->index == 0
1229 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1230 {
1231 /* Z = (B + 0) * S, S constant
1232 X = Y +/- Z
1233 ===========================
1234 X = Y + ((+/-1 * S) * B) */
1235 base = base_in;
1236 index = wi::to_widest (addend_cand->stride);
1237 if (subtract_p)
1238 index = -index;
1239 stride = addend_cand->base_expr;
1240 ctype = TREE_TYPE (base_in);
1241 if (has_single_use (addend_in))
1242 savings = (addend_cand->dead_savings
1243 + stmt_cost (addend_cand->cand_stmt, speed));
1244 }
1245
1246 if (addend_cand->next_interp)
1247 addend_cand = lookup_cand (addend_cand->next_interp);
1248 else
1249 addend_cand = NULL;
1250 }
1251
1252 while (base_cand && !base && base_cand->kind != CAND_PHI)
1253 {
1254 if (base_cand->kind == CAND_ADD
1255 && (base_cand->index == 0
1256 || operand_equal_p (base_cand->stride,
1257 integer_zero_node, 0)))
1258 {
1259 /* Y = B + (i' * S), i' * S = 0
1260 X = Y +/- Z
1261 ============================
1262 X = B + (+/-1 * Z) */
1263 base = base_cand->base_expr;
1264 index = subtract_p ? -1 : 1;
1265 stride = addend_in;
1266 ctype = base_cand->cand_type;
1267 if (has_single_use (base_in))
1268 savings = (base_cand->dead_savings
1269 + stmt_cost (base_cand->cand_stmt, speed));
1270 }
1271 else if (subtract_p)
1272 {
1273 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1274
1275 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1276 {
1277 if (subtrahend_cand->kind == CAND_MULT
1278 && subtrahend_cand->index == 0
1279 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1280 {
1281 /* Z = (B + 0) * S, S constant
1282 X = Y - Z
1283 ===========================
1284 Value: X = Y + ((-1 * S) * B) */
1285 base = base_in;
1286 index = wi::to_widest (subtrahend_cand->stride);
1287 index = -index;
1288 stride = subtrahend_cand->base_expr;
1289 ctype = TREE_TYPE (base_in);
1290 if (has_single_use (addend_in))
1291 savings = (subtrahend_cand->dead_savings
1292 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1293 }
1294
1295 if (subtrahend_cand->next_interp)
1296 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1297 else
1298 subtrahend_cand = NULL;
1299 }
1300 }
1301
1302 if (base_cand->next_interp)
1303 base_cand = lookup_cand (base_cand->next_interp);
1304 else
1305 base_cand = NULL;
1306 }
1307
1308 if (!base)
1309 {
1310 /* No interpretations had anything useful to propagate, so
1311 produce X = Y + (1 * Z). */
1312 base = base_in;
1313 index = subtract_p ? -1 : 1;
1314 stride = addend_in;
1315 ctype = TREE_TYPE (base_in);
1316 }
1317
1318 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1319 ctype, savings);
1320 return c;
1321 }
1322
1323 /* Create a candidate entry for a statement GS, where GS adds SSA
1324 name BASE_IN to constant INDEX_IN. Propagate any known information
1325 about BASE_IN into the new candidate. Return the new candidate. */
1326
1327 static slsr_cand_t
1328 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1329 bool speed)
1330 {
1331 enum cand_kind kind = CAND_ADD;
1332 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1333 widest_int index, multiple;
1334 unsigned savings = 0;
1335 slsr_cand_t c;
1336 slsr_cand_t base_cand = base_cand_from_table (base_in);
1337
1338 while (base_cand && !base && base_cand->kind != CAND_PHI)
1339 {
1340 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1341
1342 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1343 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1344 sign, &multiple))
1345 {
1346 /* Y = (B + i') * S, S constant, c = kS for some integer k
1347 X = Y + c
1348 ============================
1349 X = (B + (i'+ k)) * S
1350 OR
1351 Y = B + (i' * S), S constant, c = kS for some integer k
1352 X = Y + c
1353 ============================
1354 X = (B + (i'+ k)) * S */
1355 kind = base_cand->kind;
1356 base = base_cand->base_expr;
1357 index = base_cand->index + multiple;
1358 stride = base_cand->stride;
1359 ctype = base_cand->cand_type;
1360 if (has_single_use (base_in))
1361 savings = (base_cand->dead_savings
1362 + stmt_cost (base_cand->cand_stmt, speed));
1363 }
1364
1365 if (base_cand->next_interp)
1366 base_cand = lookup_cand (base_cand->next_interp);
1367 else
1368 base_cand = NULL;
1369 }
1370
1371 if (!base)
1372 {
1373 /* No interpretations had anything useful to propagate, so
1374 produce X = Y + (c * 1). */
1375 kind = CAND_ADD;
1376 base = base_in;
1377 index = index_in;
1378 stride = integer_one_node;
1379 ctype = TREE_TYPE (base_in);
1380 }
1381
1382 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1383 ctype, savings);
1384 return c;
1385 }
1386
1387 /* Given GS which is an add or subtract of scalar integers or pointers,
1388 make at least one appropriate entry in the candidate table. */
1389
1390 static void
1391 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1392 {
1393 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1394 slsr_cand_t c = NULL, c2;
1395
1396 if (TREE_CODE (rhs2) == SSA_NAME)
1397 {
1398 /* First record an interpretation assuming RHS1 is the base expression
1399 and RHS2 is the stride. But it doesn't make sense for the
1400 stride to be a pointer, so don't record a candidate in that case. */
1401 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1402 {
1403 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1404
1405 /* Add the first interpretation to the statement-candidate
1406 mapping. */
1407 add_cand_for_stmt (gs, c);
1408 }
1409
1410 /* If the two RHS operands are identical, or this is a subtract,
1411 we're done. */
1412 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1413 return;
1414
1415 /* Otherwise, record another interpretation assuming RHS2 is the
1416 base expression and RHS1 is the stride, again provided that the
1417 stride is not a pointer. */
1418 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1419 {
1420 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1421 if (c)
1422 c->next_interp = c2->cand_num;
1423 else
1424 add_cand_for_stmt (gs, c2);
1425 }
1426 }
1427 else
1428 {
1429 /* Record an interpretation for the add-immediate. */
1430 widest_int index = wi::to_widest (rhs2);
1431 if (subtract_p)
1432 index = -index;
1433
1434 c = create_add_imm_cand (gs, rhs1, index, speed);
1435
1436 /* Add the interpretation to the statement-candidate mapping. */
1437 add_cand_for_stmt (gs, c);
1438 }
1439 }
1440
1441 /* Given GS which is a negate of a scalar integer, make an appropriate
1442 entry in the candidate table. A negate is equivalent to a multiply
1443 by -1. */
1444
1445 static void
1446 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1447 {
1448 /* Record a CAND_MULT interpretation for the multiply by -1. */
1449 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1450
1451 /* Add the interpretation to the statement-candidate mapping. */
1452 add_cand_for_stmt (gs, c);
1453 }
1454
1455 /* Help function for legal_cast_p, operating on two trees. Checks
1456 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1457 for more details. */
1458
1459 static bool
1460 legal_cast_p_1 (tree lhs, tree rhs)
1461 {
1462 tree lhs_type, rhs_type;
1463 unsigned lhs_size, rhs_size;
1464 bool lhs_wraps, rhs_wraps;
1465
1466 lhs_type = TREE_TYPE (lhs);
1467 rhs_type = TREE_TYPE (rhs);
1468 lhs_size = TYPE_PRECISION (lhs_type);
1469 rhs_size = TYPE_PRECISION (rhs_type);
1470 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1471 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1472
1473 if (lhs_size < rhs_size
1474 || (rhs_wraps && !lhs_wraps)
1475 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1476 return false;
1477
1478 return true;
1479 }
1480
1481 /* Return TRUE if GS is a statement that defines an SSA name from
1482 a conversion and is legal for us to combine with an add and multiply
1483 in the candidate table. For example, suppose we have:
1484
1485 A = B + i;
1486 C = (type) A;
1487 D = C * S;
1488
1489 Without the type-cast, we would create a CAND_MULT for D with base B,
1490 index i, and stride S. We want to record this candidate only if it
1491 is equivalent to apply the type cast following the multiply:
1492
1493 A = B + i;
1494 E = A * S;
1495 D = (type) E;
1496
1497 We will record the type with the candidate for D. This allows us
1498 to use a similar previous candidate as a basis. If we have earlier seen
1499
1500 A' = B + i';
1501 C' = (type) A';
1502 D' = C' * S;
1503
1504 we can replace D with
1505
1506 D = D' + (i - i') * S;
1507
1508 But if moving the type-cast would change semantics, we mustn't do this.
1509
1510 This is legitimate for casts from a non-wrapping integral type to
1511 any integral type of the same or larger size. It is not legitimate
1512 to convert a wrapping type to a non-wrapping type, or to a wrapping
1513 type of a different size. I.e., with a wrapping type, we must
1514 assume that the addition B + i could wrap, in which case performing
1515 the multiply before or after one of the "illegal" type casts will
1516 have different semantics. */
1517
1518 static bool
1519 legal_cast_p (gimple *gs, tree rhs)
1520 {
1521 if (!is_gimple_assign (gs)
1522 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1523 return false;
1524
1525 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1526 }
1527
1528 /* Given GS which is a cast to a scalar integer type, determine whether
1529 the cast is legal for strength reduction. If so, make at least one
1530 appropriate entry in the candidate table. */
1531
1532 static void
1533 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1534 {
1535 tree lhs, ctype;
1536 slsr_cand_t base_cand, c, c2;
1537 unsigned savings = 0;
1538
1539 if (!legal_cast_p (gs, rhs1))
1540 return;
1541
1542 lhs = gimple_assign_lhs (gs);
1543 base_cand = base_cand_from_table (rhs1);
1544 ctype = TREE_TYPE (lhs);
1545
1546 if (base_cand && base_cand->kind != CAND_PHI)
1547 {
1548 while (base_cand)
1549 {
1550 /* Propagate all data from the base candidate except the type,
1551 which comes from the cast, and the base candidate's cast,
1552 which is no longer applicable. */
1553 if (has_single_use (rhs1))
1554 savings = (base_cand->dead_savings
1555 + stmt_cost (base_cand->cand_stmt, speed));
1556
1557 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1558 base_cand->base_expr,
1559 base_cand->index, base_cand->stride,
1560 ctype, savings);
1561 if (base_cand->next_interp)
1562 base_cand = lookup_cand (base_cand->next_interp);
1563 else
1564 base_cand = NULL;
1565 }
1566 }
1567 else
1568 {
1569 /* If nothing is known about the RHS, create fresh CAND_ADD and
1570 CAND_MULT interpretations:
1571
1572 X = Y + (0 * 1)
1573 X = (Y + 0) * 1
1574
1575 The first of these is somewhat arbitrary, but the choice of
1576 1 for the stride simplifies the logic for propagating casts
1577 into their uses. */
1578 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1579 0, integer_one_node, ctype, 0);
1580 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1581 0, integer_one_node, ctype, 0);
1582 c->next_interp = c2->cand_num;
1583 }
1584
1585 /* Add the first (or only) interpretation to the statement-candidate
1586 mapping. */
1587 add_cand_for_stmt (gs, c);
1588 }
1589
1590 /* Given GS which is a copy of a scalar integer type, make at least one
1591 appropriate entry in the candidate table.
1592
1593 This interface is included for completeness, but is unnecessary
1594 if this pass immediately follows a pass that performs copy
1595 propagation, such as DOM. */
1596
1597 static void
1598 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1599 {
1600 slsr_cand_t base_cand, c, c2;
1601 unsigned savings = 0;
1602
1603 base_cand = base_cand_from_table (rhs1);
1604
1605 if (base_cand && base_cand->kind != CAND_PHI)
1606 {
1607 while (base_cand)
1608 {
1609 /* Propagate all data from the base candidate. */
1610 if (has_single_use (rhs1))
1611 savings = (base_cand->dead_savings
1612 + stmt_cost (base_cand->cand_stmt, speed));
1613
1614 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1615 base_cand->base_expr,
1616 base_cand->index, base_cand->stride,
1617 base_cand->cand_type, savings);
1618 if (base_cand->next_interp)
1619 base_cand = lookup_cand (base_cand->next_interp);
1620 else
1621 base_cand = NULL;
1622 }
1623 }
1624 else
1625 {
1626 /* If nothing is known about the RHS, create fresh CAND_ADD and
1627 CAND_MULT interpretations:
1628
1629 X = Y + (0 * 1)
1630 X = (Y + 0) * 1
1631
1632 The first of these is somewhat arbitrary, but the choice of
1633 1 for the stride simplifies the logic for propagating casts
1634 into their uses. */
1635 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1636 0, integer_one_node, TREE_TYPE (rhs1), 0);
1637 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1638 0, integer_one_node, TREE_TYPE (rhs1), 0);
1639 c->next_interp = c2->cand_num;
1640 }
1641
1642 /* Add the first (or only) interpretation to the statement-candidate
1643 mapping. */
1644 add_cand_for_stmt (gs, c);
1645 }
1646 \f
1647 class find_candidates_dom_walker : public dom_walker
1648 {
1649 public:
1650 find_candidates_dom_walker (cdi_direction direction)
1651 : dom_walker (direction) {}
1652 virtual edge before_dom_children (basic_block);
1653 };
1654
1655 /* Find strength-reduction candidates in block BB. */
1656
1657 edge
1658 find_candidates_dom_walker::before_dom_children (basic_block bb)
1659 {
1660 bool speed = optimize_bb_for_speed_p (bb);
1661
1662 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1663 gsi_next (&gsi))
1664 slsr_process_phi (gsi.phi (), speed);
1665
1666 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1667 gsi_next (&gsi))
1668 {
1669 gimple *gs = gsi_stmt (gsi);
1670
1671 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1672 slsr_process_ref (gs);
1673
1674 else if (is_gimple_assign (gs)
1675 && SCALAR_INT_MODE_P
1676 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1677 {
1678 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1679
1680 switch (gimple_assign_rhs_code (gs))
1681 {
1682 case MULT_EXPR:
1683 case PLUS_EXPR:
1684 rhs1 = gimple_assign_rhs1 (gs);
1685 rhs2 = gimple_assign_rhs2 (gs);
1686 /* Should never happen, but currently some buggy situations
1687 in earlier phases put constants in rhs1. */
1688 if (TREE_CODE (rhs1) != SSA_NAME)
1689 continue;
1690 break;
1691
1692 /* Possible future opportunity: rhs1 of a ptr+ can be
1693 an ADDR_EXPR. */
1694 case POINTER_PLUS_EXPR:
1695 case MINUS_EXPR:
1696 rhs2 = gimple_assign_rhs2 (gs);
1697 /* Fall-through. */
1698
1699 CASE_CONVERT:
1700 case MODIFY_EXPR:
1701 case NEGATE_EXPR:
1702 rhs1 = gimple_assign_rhs1 (gs);
1703 if (TREE_CODE (rhs1) != SSA_NAME)
1704 continue;
1705 break;
1706
1707 default:
1708 ;
1709 }
1710
1711 switch (gimple_assign_rhs_code (gs))
1712 {
1713 case MULT_EXPR:
1714 slsr_process_mul (gs, rhs1, rhs2, speed);
1715 break;
1716
1717 case PLUS_EXPR:
1718 case POINTER_PLUS_EXPR:
1719 case MINUS_EXPR:
1720 slsr_process_add (gs, rhs1, rhs2, speed);
1721 break;
1722
1723 case NEGATE_EXPR:
1724 slsr_process_neg (gs, rhs1, speed);
1725 break;
1726
1727 CASE_CONVERT:
1728 slsr_process_cast (gs, rhs1, speed);
1729 break;
1730
1731 case MODIFY_EXPR:
1732 slsr_process_copy (gs, rhs1, speed);
1733 break;
1734
1735 default:
1736 ;
1737 }
1738 }
1739 }
1740 return NULL;
1741 }
1742 \f
1743 /* Dump a candidate for debug. */
1744
1745 static void
1746 dump_candidate (slsr_cand_t c)
1747 {
1748 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1749 gimple_bb (c->cand_stmt)->index);
1750 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1751 switch (c->kind)
1752 {
1753 case CAND_MULT:
1754 fputs (" MULT : (", dump_file);
1755 print_generic_expr (dump_file, c->base_expr, 0);
1756 fputs (" + ", dump_file);
1757 print_decs (c->index, dump_file);
1758 fputs (") * ", dump_file);
1759 print_generic_expr (dump_file, c->stride, 0);
1760 fputs (" : ", dump_file);
1761 break;
1762 case CAND_ADD:
1763 fputs (" ADD : ", dump_file);
1764 print_generic_expr (dump_file, c->base_expr, 0);
1765 fputs (" + (", dump_file);
1766 print_decs (c->index, dump_file);
1767 fputs (" * ", dump_file);
1768 print_generic_expr (dump_file, c->stride, 0);
1769 fputs (") : ", dump_file);
1770 break;
1771 case CAND_REF:
1772 fputs (" REF : ", dump_file);
1773 print_generic_expr (dump_file, c->base_expr, 0);
1774 fputs (" + (", dump_file);
1775 print_generic_expr (dump_file, c->stride, 0);
1776 fputs (") + ", dump_file);
1777 print_decs (c->index, dump_file);
1778 fputs (" : ", dump_file);
1779 break;
1780 case CAND_PHI:
1781 fputs (" PHI : ", dump_file);
1782 print_generic_expr (dump_file, c->base_expr, 0);
1783 fputs (" + (unknown * ", dump_file);
1784 print_generic_expr (dump_file, c->stride, 0);
1785 fputs (") : ", dump_file);
1786 break;
1787 default:
1788 gcc_unreachable ();
1789 }
1790 print_generic_expr (dump_file, c->cand_type, 0);
1791 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1792 c->basis, c->dependent, c->sibling);
1793 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1794 c->next_interp, c->dead_savings);
1795 if (c->def_phi)
1796 fprintf (dump_file, " phi: %d\n", c->def_phi);
1797 fputs ("\n", dump_file);
1798 }
1799
1800 /* Dump the candidate vector for debug. */
1801
1802 static void
1803 dump_cand_vec (void)
1804 {
1805 unsigned i;
1806 slsr_cand_t c;
1807
1808 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1809
1810 FOR_EACH_VEC_ELT (cand_vec, i, c)
1811 dump_candidate (c);
1812 }
1813
1814 /* Callback used to dump the candidate chains hash table. */
1815
1816 int
1817 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1818 {
1819 const_cand_chain_t chain = *slot;
1820 cand_chain_t p;
1821
1822 print_generic_expr (dump_file, chain->base_expr, 0);
1823 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1824
1825 for (p = chain->next; p; p = p->next)
1826 fprintf (dump_file, " -> %d", p->cand->cand_num);
1827
1828 fputs ("\n", dump_file);
1829 return 1;
1830 }
1831
1832 /* Dump the candidate chains. */
1833
1834 static void
1835 dump_cand_chains (void)
1836 {
1837 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1838 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1839 (NULL);
1840 fputs ("\n", dump_file);
1841 }
1842
1843 /* Dump the increment vector for debug. */
1844
1845 static void
1846 dump_incr_vec (void)
1847 {
1848 if (dump_file && (dump_flags & TDF_DETAILS))
1849 {
1850 unsigned i;
1851
1852 fprintf (dump_file, "\nIncrement vector:\n\n");
1853
1854 for (i = 0; i < incr_vec_len; i++)
1855 {
1856 fprintf (dump_file, "%3d increment: ", i);
1857 print_decs (incr_vec[i].incr, dump_file);
1858 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1859 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1860 fputs ("\n initializer: ", dump_file);
1861 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1862 fputs ("\n\n", dump_file);
1863 }
1864 }
1865 }
1866 \f
1867 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1868 data reference. */
1869
1870 static void
1871 replace_ref (tree *expr, slsr_cand_t c)
1872 {
1873 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1874 unsigned HOST_WIDE_INT misalign;
1875 unsigned align;
1876
1877 /* Ensure the memory reference carries the minimum alignment
1878 requirement for the data type. See PR58041. */
1879 get_object_alignment_1 (*expr, &align, &misalign);
1880 if (misalign != 0)
1881 align = (misalign & -misalign);
1882 if (align < TYPE_ALIGN (acc_type))
1883 acc_type = build_aligned_type (acc_type, align);
1884
1885 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1886 c->base_expr, c->stride);
1887 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1888 wide_int_to_tree (c->cand_type, c->index));
1889
1890 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1891 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1892 TREE_OPERAND (mem_ref, 0)
1893 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1894 /*simple_p=*/true, NULL,
1895 /*before=*/true, GSI_SAME_STMT);
1896 copy_ref_info (mem_ref, *expr);
1897 *expr = mem_ref;
1898 update_stmt (c->cand_stmt);
1899 }
1900
1901 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1902 dependent of candidate C with an equivalent strength-reduced data
1903 reference. */
1904
1905 static void
1906 replace_refs (slsr_cand_t c)
1907 {
1908 if (dump_file && (dump_flags & TDF_DETAILS))
1909 {
1910 fputs ("Replacing reference: ", dump_file);
1911 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1912 }
1913
1914 if (gimple_vdef (c->cand_stmt))
1915 {
1916 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1917 replace_ref (lhs, c);
1918 }
1919 else
1920 {
1921 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1922 replace_ref (rhs, c);
1923 }
1924
1925 if (dump_file && (dump_flags & TDF_DETAILS))
1926 {
1927 fputs ("With: ", dump_file);
1928 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1929 fputs ("\n", dump_file);
1930 }
1931
1932 if (c->sibling)
1933 replace_refs (lookup_cand (c->sibling));
1934
1935 if (c->dependent)
1936 replace_refs (lookup_cand (c->dependent));
1937 }
1938
1939 /* Return TRUE if candidate C is dependent upon a PHI. */
1940
1941 static bool
1942 phi_dependent_cand_p (slsr_cand_t c)
1943 {
1944 /* A candidate is not necessarily dependent upon a PHI just because
1945 it has a phi definition for its base name. It may have a basis
1946 that relies upon the same phi definition, in which case the PHI
1947 is irrelevant to this candidate. */
1948 return (c->def_phi
1949 && c->basis
1950 && lookup_cand (c->basis)->def_phi != c->def_phi);
1951 }
1952
1953 /* Calculate the increment required for candidate C relative to
1954 its basis. */
1955
1956 static widest_int
1957 cand_increment (slsr_cand_t c)
1958 {
1959 slsr_cand_t basis;
1960
1961 /* If the candidate doesn't have a basis, just return its own
1962 index. This is useful in record_increments to help us find
1963 an existing initializer. Also, if the candidate's basis is
1964 hidden by a phi, then its own index will be the increment
1965 from the newly introduced phi basis. */
1966 if (!c->basis || phi_dependent_cand_p (c))
1967 return c->index;
1968
1969 basis = lookup_cand (c->basis);
1970 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1971 return c->index - basis->index;
1972 }
1973
1974 /* Calculate the increment required for candidate C relative to
1975 its basis. If we aren't going to generate pointer arithmetic
1976 for this candidate, return the absolute value of that increment
1977 instead. */
1978
1979 static inline widest_int
1980 cand_abs_increment (slsr_cand_t c)
1981 {
1982 widest_int increment = cand_increment (c);
1983
1984 if (!address_arithmetic_p && wi::neg_p (increment))
1985 increment = -increment;
1986
1987 return increment;
1988 }
1989
1990 /* Return TRUE iff candidate C has already been replaced under
1991 another interpretation. */
1992
1993 static inline bool
1994 cand_already_replaced (slsr_cand_t c)
1995 {
1996 return (gimple_bb (c->cand_stmt) == 0);
1997 }
1998
1999 /* Common logic used by replace_unconditional_candidate and
2000 replace_conditional_candidate. */
2001
2002 static void
2003 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2004 {
2005 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2006 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2007
2008 /* It is highly unlikely, but possible, that the resulting
2009 bump doesn't fit in a HWI. Abandon the replacement
2010 in this case. This does not affect siblings or dependents
2011 of C. Restriction to signed HWI is conservative for unsigned
2012 types but allows for safe negation without twisted logic. */
2013 if (wi::fits_shwi_p (bump)
2014 && bump.to_shwi () != HOST_WIDE_INT_MIN
2015 /* It is not useful to replace casts, copies, or adds of
2016 an SSA name and a constant. */
2017 && cand_code != MODIFY_EXPR
2018 && !CONVERT_EXPR_CODE_P (cand_code)
2019 && cand_code != PLUS_EXPR
2020 && cand_code != POINTER_PLUS_EXPR
2021 && cand_code != MINUS_EXPR)
2022 {
2023 enum tree_code code = PLUS_EXPR;
2024 tree bump_tree;
2025 gimple *stmt_to_print = NULL;
2026
2027 /* If the basis name and the candidate's LHS have incompatible
2028 types, introduce a cast. */
2029 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2030 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2031 if (wi::neg_p (bump))
2032 {
2033 code = MINUS_EXPR;
2034 bump = -bump;
2035 }
2036
2037 bump_tree = wide_int_to_tree (target_type, bump);
2038
2039 if (dump_file && (dump_flags & TDF_DETAILS))
2040 {
2041 fputs ("Replacing: ", dump_file);
2042 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2043 }
2044
2045 if (bump == 0)
2046 {
2047 tree lhs = gimple_assign_lhs (c->cand_stmt);
2048 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2049 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2050 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2051 gsi_replace (&gsi, copy_stmt, false);
2052 c->cand_stmt = copy_stmt;
2053 if (dump_file && (dump_flags & TDF_DETAILS))
2054 stmt_to_print = copy_stmt;
2055 }
2056 else
2057 {
2058 tree rhs1, rhs2;
2059 if (cand_code != NEGATE_EXPR) {
2060 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2061 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2062 }
2063 if (cand_code != NEGATE_EXPR
2064 && ((operand_equal_p (rhs1, basis_name, 0)
2065 && operand_equal_p (rhs2, bump_tree, 0))
2066 || (operand_equal_p (rhs1, bump_tree, 0)
2067 && operand_equal_p (rhs2, basis_name, 0))))
2068 {
2069 if (dump_file && (dump_flags & TDF_DETAILS))
2070 {
2071 fputs ("(duplicate, not actually replacing)", dump_file);
2072 stmt_to_print = c->cand_stmt;
2073 }
2074 }
2075 else
2076 {
2077 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2078 gimple_assign_set_rhs_with_ops (&gsi, code,
2079 basis_name, bump_tree);
2080 update_stmt (gsi_stmt (gsi));
2081 c->cand_stmt = gsi_stmt (gsi);
2082 if (dump_file && (dump_flags & TDF_DETAILS))
2083 stmt_to_print = gsi_stmt (gsi);
2084 }
2085 }
2086
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 {
2089 fputs ("With: ", dump_file);
2090 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2091 fputs ("\n", dump_file);
2092 }
2093 }
2094 }
2095
2096 /* Replace candidate C with an add or subtract. Note that we only
2097 operate on CAND_MULTs with known strides, so we will never generate
2098 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2099 X = Y + ((i - i') * S), as described in the module commentary. The
2100 folded value ((i - i') * S) is referred to here as the "bump." */
2101
2102 static void
2103 replace_unconditional_candidate (slsr_cand_t c)
2104 {
2105 slsr_cand_t basis;
2106
2107 if (cand_already_replaced (c))
2108 return;
2109
2110 basis = lookup_cand (c->basis);
2111 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2112
2113 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2114 }
2115 \f
2116 /* Return the index in the increment vector of the given INCREMENT,
2117 or -1 if not found. The latter can occur if more than
2118 MAX_INCR_VEC_LEN increments have been found. */
2119
2120 static inline int
2121 incr_vec_index (const widest_int &increment)
2122 {
2123 unsigned i;
2124
2125 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2126 ;
2127
2128 if (i < incr_vec_len)
2129 return i;
2130 else
2131 return -1;
2132 }
2133
2134 /* Create a new statement along edge E to add BASIS_NAME to the product
2135 of INCREMENT and the stride of candidate C. Create and return a new
2136 SSA name from *VAR to be used as the LHS of the new statement.
2137 KNOWN_STRIDE is true iff C's stride is a constant. */
2138
2139 static tree
2140 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2141 widest_int increment, edge e, location_t loc,
2142 bool known_stride)
2143 {
2144 basic_block insert_bb;
2145 gimple_stmt_iterator gsi;
2146 tree lhs, basis_type;
2147 gassign *new_stmt;
2148
2149 /* If the add candidate along this incoming edge has the same
2150 index as C's hidden basis, the hidden basis represents this
2151 edge correctly. */
2152 if (increment == 0)
2153 return basis_name;
2154
2155 basis_type = TREE_TYPE (basis_name);
2156 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2157
2158 if (known_stride)
2159 {
2160 tree bump_tree;
2161 enum tree_code code = PLUS_EXPR;
2162 widest_int bump = increment * wi::to_widest (c->stride);
2163 if (wi::neg_p (bump))
2164 {
2165 code = MINUS_EXPR;
2166 bump = -bump;
2167 }
2168
2169 bump_tree = wide_int_to_tree (basis_type, bump);
2170 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2171 }
2172 else
2173 {
2174 int i;
2175 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2176 i = incr_vec_index (negate_incr ? -increment : increment);
2177 gcc_assert (i >= 0);
2178
2179 if (incr_vec[i].initializer)
2180 {
2181 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2182 new_stmt = gimple_build_assign (lhs, code, basis_name,
2183 incr_vec[i].initializer);
2184 }
2185 else if (increment == 1)
2186 new_stmt = gimple_build_assign (lhs, PLUS_EXPR, basis_name, c->stride);
2187 else if (increment == -1)
2188 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name,
2189 c->stride);
2190 else
2191 gcc_unreachable ();
2192 }
2193
2194 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2195 gsi = gsi_last_bb (insert_bb);
2196
2197 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2198 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2199 else
2200 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2201
2202 gimple_set_location (new_stmt, loc);
2203
2204 if (dump_file && (dump_flags & TDF_DETAILS))
2205 {
2206 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2207 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2208 }
2209
2210 return lhs;
2211 }
2212
2213 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2214 is hidden by the phi node FROM_PHI, create a new phi node in the same
2215 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2216 with its phi arguments representing conditional adjustments to the
2217 hidden basis along conditional incoming paths. Those adjustments are
2218 made by creating add statements (and sometimes recursively creating
2219 phis) along those incoming paths. LOC is the location to attach to
2220 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2221 constant. */
2222
2223 static tree
2224 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2225 location_t loc, bool known_stride)
2226 {
2227 int i;
2228 tree name, phi_arg;
2229 gphi *phi;
2230 slsr_cand_t basis = lookup_cand (c->basis);
2231 int nargs = gimple_phi_num_args (from_phi);
2232 basic_block phi_bb = gimple_bb (from_phi);
2233 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2234 auto_vec<tree> phi_args (nargs);
2235
2236 /* Process each argument of the existing phi that represents
2237 conditionally-executed add candidates. */
2238 for (i = 0; i < nargs; i++)
2239 {
2240 edge e = (*phi_bb->preds)[i];
2241 tree arg = gimple_phi_arg_def (from_phi, i);
2242 tree feeding_def;
2243
2244 /* If the phi argument is the base name of the CAND_PHI, then
2245 this incoming arc should use the hidden basis. */
2246 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2247 if (basis->index == 0)
2248 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2249 else
2250 {
2251 widest_int incr = -basis->index;
2252 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2253 e, loc, known_stride);
2254 }
2255 else
2256 {
2257 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2258
2259 /* If there is another phi along this incoming edge, we must
2260 process it in the same fashion to ensure that all basis
2261 adjustments are made along its incoming edges. */
2262 if (gimple_code (arg_def) == GIMPLE_PHI)
2263 feeding_def = create_phi_basis (c, arg_def, basis_name,
2264 loc, known_stride);
2265 else
2266 {
2267 slsr_cand_t arg_cand = base_cand_from_table (arg);
2268 widest_int diff = arg_cand->index - basis->index;
2269 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2270 e, loc, known_stride);
2271 }
2272 }
2273
2274 /* Because of recursion, we need to save the arguments in a vector
2275 so we can create the PHI statement all at once. Otherwise the
2276 storage for the half-created PHI can be reclaimed. */
2277 phi_args.safe_push (feeding_def);
2278 }
2279
2280 /* Create the new phi basis. */
2281 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2282 phi = create_phi_node (name, phi_bb);
2283 SSA_NAME_DEF_STMT (name) = phi;
2284
2285 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2286 {
2287 edge e = (*phi_bb->preds)[i];
2288 add_phi_arg (phi, phi_arg, e, loc);
2289 }
2290
2291 update_stmt (phi);
2292
2293 if (dump_file && (dump_flags & TDF_DETAILS))
2294 {
2295 fputs ("Introducing new phi basis: ", dump_file);
2296 print_gimple_stmt (dump_file, phi, 0, 0);
2297 }
2298
2299 return name;
2300 }
2301
2302 /* Given a candidate C whose basis is hidden by at least one intervening
2303 phi, introduce a matching number of new phis to represent its basis
2304 adjusted by conditional increments along possible incoming paths. Then
2305 replace C as though it were an unconditional candidate, using the new
2306 basis. */
2307
2308 static void
2309 replace_conditional_candidate (slsr_cand_t c)
2310 {
2311 tree basis_name, name;
2312 slsr_cand_t basis;
2313 location_t loc;
2314
2315 /* Look up the LHS SSA name from C's basis. This will be the
2316 RHS1 of the adds we will introduce to create new phi arguments. */
2317 basis = lookup_cand (c->basis);
2318 basis_name = gimple_assign_lhs (basis->cand_stmt);
2319
2320 /* Create a new phi statement which will represent C's true basis
2321 after the transformation is complete. */
2322 loc = gimple_location (c->cand_stmt);
2323 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2324 basis_name, loc, KNOWN_STRIDE);
2325 /* Replace C with an add of the new basis phi and a constant. */
2326 widest_int bump = c->index * wi::to_widest (c->stride);
2327
2328 replace_mult_candidate (c, name, bump);
2329 }
2330
2331 /* Compute the expected costs of inserting basis adjustments for
2332 candidate C with phi-definition PHI. The cost of inserting
2333 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2334 which are themselves phi results, recursively calculate costs
2335 for those phis as well. */
2336
2337 static int
2338 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2339 {
2340 unsigned i;
2341 int cost = 0;
2342 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2343
2344 /* If we work our way back to a phi that isn't dominated by the hidden
2345 basis, this isn't a candidate for replacement. Indicate this by
2346 returning an unreasonably high cost. It's not easy to detect
2347 these situations when determining the basis, so we defer the
2348 decision until now. */
2349 basic_block phi_bb = gimple_bb (phi);
2350 slsr_cand_t basis = lookup_cand (c->basis);
2351 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2352
2353 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2354 return COST_INFINITE;
2355
2356 for (i = 0; i < gimple_phi_num_args (phi); i++)
2357 {
2358 tree arg = gimple_phi_arg_def (phi, i);
2359
2360 if (arg != phi_cand->base_expr)
2361 {
2362 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2363
2364 if (gimple_code (arg_def) == GIMPLE_PHI)
2365 cost += phi_add_costs (arg_def, c, one_add_cost);
2366 else
2367 {
2368 slsr_cand_t arg_cand = base_cand_from_table (arg);
2369
2370 if (arg_cand->index != c->index)
2371 cost += one_add_cost;
2372 }
2373 }
2374 }
2375
2376 return cost;
2377 }
2378
2379 /* For candidate C, each sibling of candidate C, and each dependent of
2380 candidate C, determine whether the candidate is dependent upon a
2381 phi that hides its basis. If not, replace the candidate unconditionally.
2382 Otherwise, determine whether the cost of introducing compensation code
2383 for the candidate is offset by the gains from strength reduction. If
2384 so, replace the candidate and introduce the compensation code. */
2385
2386 static void
2387 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2388 {
2389 if (phi_dependent_cand_p (c))
2390 {
2391 if (c->kind == CAND_MULT)
2392 {
2393 /* A candidate dependent upon a phi will replace a multiply by
2394 a constant with an add, and will insert at most one add for
2395 each phi argument. Add these costs with the potential dead-code
2396 savings to determine profitability. */
2397 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2398 int mult_savings = stmt_cost (c->cand_stmt, speed);
2399 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2400 tree phi_result = gimple_phi_result (phi);
2401 int one_add_cost = add_cost (speed,
2402 TYPE_MODE (TREE_TYPE (phi_result)));
2403 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2404 int cost = add_costs - mult_savings - c->dead_savings;
2405
2406 if (dump_file && (dump_flags & TDF_DETAILS))
2407 {
2408 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2409 fprintf (dump_file, " add_costs = %d\n", add_costs);
2410 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2411 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2412 fprintf (dump_file, " cost = %d\n", cost);
2413 if (cost <= COST_NEUTRAL)
2414 fputs (" Replacing...\n", dump_file);
2415 else
2416 fputs (" Not replaced.\n", dump_file);
2417 }
2418
2419 if (cost <= COST_NEUTRAL)
2420 replace_conditional_candidate (c);
2421 }
2422 }
2423 else
2424 replace_unconditional_candidate (c);
2425
2426 if (c->sibling)
2427 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2428
2429 if (c->dependent)
2430 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2431 }
2432 \f
2433 /* Count the number of candidates in the tree rooted at C that have
2434 not already been replaced under other interpretations. */
2435
2436 static int
2437 count_candidates (slsr_cand_t c)
2438 {
2439 unsigned count = cand_already_replaced (c) ? 0 : 1;
2440
2441 if (c->sibling)
2442 count += count_candidates (lookup_cand (c->sibling));
2443
2444 if (c->dependent)
2445 count += count_candidates (lookup_cand (c->dependent));
2446
2447 return count;
2448 }
2449
2450 /* Increase the count of INCREMENT by one in the increment vector.
2451 INCREMENT is associated with candidate C. If INCREMENT is to be
2452 conditionally executed as part of a conditional candidate replacement,
2453 IS_PHI_ADJUST is true, otherwise false. If an initializer
2454 T_0 = stride * I is provided by a candidate that dominates all
2455 candidates with the same increment, also record T_0 for subsequent use. */
2456
2457 static void
2458 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2459 {
2460 bool found = false;
2461 unsigned i;
2462
2463 /* Treat increments that differ only in sign as identical so as to
2464 share initializers, unless we are generating pointer arithmetic. */
2465 if (!address_arithmetic_p && wi::neg_p (increment))
2466 increment = -increment;
2467
2468 for (i = 0; i < incr_vec_len; i++)
2469 {
2470 if (incr_vec[i].incr == increment)
2471 {
2472 incr_vec[i].count++;
2473 found = true;
2474
2475 /* If we previously recorded an initializer that doesn't
2476 dominate this candidate, it's not going to be useful to
2477 us after all. */
2478 if (incr_vec[i].initializer
2479 && !dominated_by_p (CDI_DOMINATORS,
2480 gimple_bb (c->cand_stmt),
2481 incr_vec[i].init_bb))
2482 {
2483 incr_vec[i].initializer = NULL_TREE;
2484 incr_vec[i].init_bb = NULL;
2485 }
2486
2487 break;
2488 }
2489 }
2490
2491 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2492 {
2493 /* The first time we see an increment, create the entry for it.
2494 If this is the root candidate which doesn't have a basis, set
2495 the count to zero. We're only processing it so it can possibly
2496 provide an initializer for other candidates. */
2497 incr_vec[incr_vec_len].incr = increment;
2498 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2499 incr_vec[incr_vec_len].cost = COST_INFINITE;
2500
2501 /* Optimistically record the first occurrence of this increment
2502 as providing an initializer (if it does); we will revise this
2503 opinion later if it doesn't dominate all other occurrences.
2504 Exception: increments of -1, 0, 1 never need initializers;
2505 and phi adjustments don't ever provide initializers. */
2506 if (c->kind == CAND_ADD
2507 && !is_phi_adjust
2508 && c->index == increment
2509 && (wi::gts_p (increment, 1)
2510 || wi::lts_p (increment, -1))
2511 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2512 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2513 {
2514 tree t0 = NULL_TREE;
2515 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2516 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2517 if (operand_equal_p (rhs1, c->base_expr, 0))
2518 t0 = rhs2;
2519 else if (operand_equal_p (rhs2, c->base_expr, 0))
2520 t0 = rhs1;
2521 if (t0
2522 && SSA_NAME_DEF_STMT (t0)
2523 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2524 {
2525 incr_vec[incr_vec_len].initializer = t0;
2526 incr_vec[incr_vec_len++].init_bb
2527 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2528 }
2529 else
2530 {
2531 incr_vec[incr_vec_len].initializer = NULL_TREE;
2532 incr_vec[incr_vec_len++].init_bb = NULL;
2533 }
2534 }
2535 else
2536 {
2537 incr_vec[incr_vec_len].initializer = NULL_TREE;
2538 incr_vec[incr_vec_len++].init_bb = NULL;
2539 }
2540 }
2541 }
2542
2543 /* Given phi statement PHI that hides a candidate from its BASIS, find
2544 the increments along each incoming arc (recursively handling additional
2545 phis that may be present) and record them. These increments are the
2546 difference in index between the index-adjusting statements and the
2547 index of the basis. */
2548
2549 static void
2550 record_phi_increments (slsr_cand_t basis, gimple *phi)
2551 {
2552 unsigned i;
2553 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2554
2555 for (i = 0; i < gimple_phi_num_args (phi); i++)
2556 {
2557 tree arg = gimple_phi_arg_def (phi, i);
2558
2559 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2560 {
2561 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2562
2563 if (gimple_code (arg_def) == GIMPLE_PHI)
2564 record_phi_increments (basis, arg_def);
2565 else
2566 {
2567 slsr_cand_t arg_cand = base_cand_from_table (arg);
2568 widest_int diff = arg_cand->index - basis->index;
2569 record_increment (arg_cand, diff, PHI_ADJUST);
2570 }
2571 }
2572 }
2573 }
2574
2575 /* Determine how many times each unique increment occurs in the set
2576 of candidates rooted at C's parent, recording the data in the
2577 increment vector. For each unique increment I, if an initializer
2578 T_0 = stride * I is provided by a candidate that dominates all
2579 candidates with the same increment, also record T_0 for subsequent
2580 use. */
2581
2582 static void
2583 record_increments (slsr_cand_t c)
2584 {
2585 if (!cand_already_replaced (c))
2586 {
2587 if (!phi_dependent_cand_p (c))
2588 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2589 else
2590 {
2591 /* A candidate with a basis hidden by a phi will have one
2592 increment for its relationship to the index represented by
2593 the phi, and potentially additional increments along each
2594 incoming edge. For the root of the dependency tree (which
2595 has no basis), process just the initial index in case it has
2596 an initializer that can be used by subsequent candidates. */
2597 record_increment (c, c->index, NOT_PHI_ADJUST);
2598
2599 if (c->basis)
2600 record_phi_increments (lookup_cand (c->basis),
2601 lookup_cand (c->def_phi)->cand_stmt);
2602 }
2603 }
2604
2605 if (c->sibling)
2606 record_increments (lookup_cand (c->sibling));
2607
2608 if (c->dependent)
2609 record_increments (lookup_cand (c->dependent));
2610 }
2611
2612 /* Add up and return the costs of introducing add statements that
2613 require the increment INCR on behalf of candidate C and phi
2614 statement PHI. Accumulate into *SAVINGS the potential savings
2615 from removing existing statements that feed PHI and have no other
2616 uses. */
2617
2618 static int
2619 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2620 int *savings)
2621 {
2622 unsigned i;
2623 int cost = 0;
2624 slsr_cand_t basis = lookup_cand (c->basis);
2625 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2626
2627 for (i = 0; i < gimple_phi_num_args (phi); i++)
2628 {
2629 tree arg = gimple_phi_arg_def (phi, i);
2630
2631 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2632 {
2633 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2634
2635 if (gimple_code (arg_def) == GIMPLE_PHI)
2636 {
2637 int feeding_savings = 0;
2638 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2639 if (has_single_use (gimple_phi_result (arg_def)))
2640 *savings += feeding_savings;
2641 }
2642 else
2643 {
2644 slsr_cand_t arg_cand = base_cand_from_table (arg);
2645 widest_int diff = arg_cand->index - basis->index;
2646
2647 if (incr == diff)
2648 {
2649 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2650 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2651 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2652 if (has_single_use (lhs))
2653 *savings += stmt_cost (arg_cand->cand_stmt, true);
2654 }
2655 }
2656 }
2657 }
2658
2659 return cost;
2660 }
2661
2662 /* Return the first candidate in the tree rooted at C that has not
2663 already been replaced, favoring siblings over dependents. */
2664
2665 static slsr_cand_t
2666 unreplaced_cand_in_tree (slsr_cand_t c)
2667 {
2668 if (!cand_already_replaced (c))
2669 return c;
2670
2671 if (c->sibling)
2672 {
2673 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2674 if (sib)
2675 return sib;
2676 }
2677
2678 if (c->dependent)
2679 {
2680 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2681 if (dep)
2682 return dep;
2683 }
2684
2685 return NULL;
2686 }
2687
2688 /* Return TRUE if the candidates in the tree rooted at C should be
2689 optimized for speed, else FALSE. We estimate this based on the block
2690 containing the most dominant candidate in the tree that has not yet
2691 been replaced. */
2692
2693 static bool
2694 optimize_cands_for_speed_p (slsr_cand_t c)
2695 {
2696 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2697 gcc_assert (c2);
2698 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2699 }
2700
2701 /* Add COST_IN to the lowest cost of any dependent path starting at
2702 candidate C or any of its siblings, counting only candidates along
2703 such paths with increment INCR. Assume that replacing a candidate
2704 reduces cost by REPL_SAVINGS. Also account for savings from any
2705 statements that would go dead. If COUNT_PHIS is true, include
2706 costs of introducing feeding statements for conditional candidates. */
2707
2708 static int
2709 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2710 const widest_int &incr, bool count_phis)
2711 {
2712 int local_cost, sib_cost, savings = 0;
2713 widest_int cand_incr = cand_abs_increment (c);
2714
2715 if (cand_already_replaced (c))
2716 local_cost = cost_in;
2717 else if (incr == cand_incr)
2718 local_cost = cost_in - repl_savings - c->dead_savings;
2719 else
2720 local_cost = cost_in - c->dead_savings;
2721
2722 if (count_phis
2723 && phi_dependent_cand_p (c)
2724 && !cand_already_replaced (c))
2725 {
2726 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2727 local_cost += phi_incr_cost (c, incr, phi, &savings);
2728
2729 if (has_single_use (gimple_phi_result (phi)))
2730 local_cost -= savings;
2731 }
2732
2733 if (c->dependent)
2734 local_cost = lowest_cost_path (local_cost, repl_savings,
2735 lookup_cand (c->dependent), incr,
2736 count_phis);
2737
2738 if (c->sibling)
2739 {
2740 sib_cost = lowest_cost_path (cost_in, repl_savings,
2741 lookup_cand (c->sibling), incr,
2742 count_phis);
2743 local_cost = MIN (local_cost, sib_cost);
2744 }
2745
2746 return local_cost;
2747 }
2748
2749 /* Compute the total savings that would accrue from all replacements
2750 in the candidate tree rooted at C, counting only candidates with
2751 increment INCR. Assume that replacing a candidate reduces cost
2752 by REPL_SAVINGS. Also account for savings from statements that
2753 would go dead. */
2754
2755 static int
2756 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2757 bool count_phis)
2758 {
2759 int savings = 0;
2760 widest_int cand_incr = cand_abs_increment (c);
2761
2762 if (incr == cand_incr && !cand_already_replaced (c))
2763 savings += repl_savings + c->dead_savings;
2764
2765 if (count_phis
2766 && phi_dependent_cand_p (c)
2767 && !cand_already_replaced (c))
2768 {
2769 int phi_savings = 0;
2770 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2771 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2772
2773 if (has_single_use (gimple_phi_result (phi)))
2774 savings += phi_savings;
2775 }
2776
2777 if (c->dependent)
2778 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2779 count_phis);
2780
2781 if (c->sibling)
2782 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2783 count_phis);
2784
2785 return savings;
2786 }
2787
2788 /* Use target-specific costs to determine and record which increments
2789 in the current candidate tree are profitable to replace, assuming
2790 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2791 the candidate tree.
2792
2793 One slight limitation here is that we don't account for the possible
2794 introduction of casts in some cases. See replace_one_candidate for
2795 the cases where these are introduced. This should probably be cleaned
2796 up sometime. */
2797
2798 static void
2799 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2800 {
2801 unsigned i;
2802
2803 for (i = 0; i < incr_vec_len; i++)
2804 {
2805 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2806
2807 /* If somehow this increment is bigger than a HWI, we won't
2808 be optimizing candidates that use it. And if the increment
2809 has a count of zero, nothing will be done with it. */
2810 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2811 incr_vec[i].cost = COST_INFINITE;
2812
2813 /* Increments of 0, 1, and -1 are always profitable to replace,
2814 because they always replace a multiply or add with an add or
2815 copy, and may cause one or more existing instructions to go
2816 dead. Exception: -1 can't be assumed to be profitable for
2817 pointer addition. */
2818 else if (incr == 0
2819 || incr == 1
2820 || (incr == -1
2821 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2822 != POINTER_PLUS_EXPR)))
2823 incr_vec[i].cost = COST_NEUTRAL;
2824
2825 /* FORNOW: If we need to add an initializer, give up if a cast from
2826 the candidate's type to its stride's type can lose precision.
2827 This could eventually be handled better by expressly retaining the
2828 result of a cast to a wider type in the stride. Example:
2829
2830 short int _1;
2831 _2 = (int) _1;
2832 _3 = _2 * 10;
2833 _4 = x + _3; ADD: x + (10 * _1) : int
2834 _5 = _2 * 15;
2835 _6 = x + _3; ADD: x + (15 * _1) : int
2836
2837 Right now replacing _6 would cause insertion of an initializer
2838 of the form "short int T = _1 * 5;" followed by a cast to
2839 int, which could overflow incorrectly. Had we recorded _2 or
2840 (int)_1 as the stride, this wouldn't happen. However, doing
2841 this breaks other opportunities, so this will require some
2842 care. */
2843 else if (!incr_vec[i].initializer
2844 && TREE_CODE (first_dep->stride) != INTEGER_CST
2845 && !legal_cast_p_1 (first_dep->stride,
2846 gimple_assign_lhs (first_dep->cand_stmt)))
2847
2848 incr_vec[i].cost = COST_INFINITE;
2849
2850 /* If we need to add an initializer, make sure we don't introduce
2851 a multiply by a pointer type, which can happen in certain cast
2852 scenarios. FIXME: When cleaning up these cast issues, we can
2853 afford to introduce the multiply provided we cast out to an
2854 unsigned int of appropriate size. */
2855 else if (!incr_vec[i].initializer
2856 && TREE_CODE (first_dep->stride) != INTEGER_CST
2857 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2858
2859 incr_vec[i].cost = COST_INFINITE;
2860
2861 /* For any other increment, if this is a multiply candidate, we
2862 must introduce a temporary T and initialize it with
2863 T_0 = stride * increment. When optimizing for speed, walk the
2864 candidate tree to calculate the best cost reduction along any
2865 path; if it offsets the fixed cost of inserting the initializer,
2866 replacing the increment is profitable. When optimizing for
2867 size, instead calculate the total cost reduction from replacing
2868 all candidates with this increment. */
2869 else if (first_dep->kind == CAND_MULT)
2870 {
2871 int cost = mult_by_coeff_cost (incr, mode, speed);
2872 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2873 if (speed)
2874 cost = lowest_cost_path (cost, repl_savings, first_dep,
2875 incr_vec[i].incr, COUNT_PHIS);
2876 else
2877 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2878 COUNT_PHIS);
2879
2880 incr_vec[i].cost = cost;
2881 }
2882
2883 /* If this is an add candidate, the initializer may already
2884 exist, so only calculate the cost of the initializer if it
2885 doesn't. We are replacing one add with another here, so the
2886 known replacement savings is zero. We will account for removal
2887 of dead instructions in lowest_cost_path or total_savings. */
2888 else
2889 {
2890 int cost = 0;
2891 if (!incr_vec[i].initializer)
2892 cost = mult_by_coeff_cost (incr, mode, speed);
2893
2894 if (speed)
2895 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2896 DONT_COUNT_PHIS);
2897 else
2898 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2899 DONT_COUNT_PHIS);
2900
2901 incr_vec[i].cost = cost;
2902 }
2903 }
2904 }
2905
2906 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2907 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2908 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2909 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2910 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2911
2912 static basic_block
2913 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2914 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2915 {
2916 basic_block ncd;
2917
2918 if (!bb1)
2919 {
2920 *where = c2;
2921 return bb2;
2922 }
2923
2924 if (!bb2)
2925 {
2926 *where = c1;
2927 return bb1;
2928 }
2929
2930 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2931
2932 /* If both candidates are in the same block, the earlier
2933 candidate wins. */
2934 if (bb1 == ncd && bb2 == ncd)
2935 {
2936 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2937 *where = c2;
2938 else
2939 *where = c1;
2940 }
2941
2942 /* Otherwise, if one of them produced a candidate in the
2943 dominator, that one wins. */
2944 else if (bb1 == ncd)
2945 *where = c1;
2946
2947 else if (bb2 == ncd)
2948 *where = c2;
2949
2950 /* If neither matches the dominator, neither wins. */
2951 else
2952 *where = NULL;
2953
2954 return ncd;
2955 }
2956
2957 /* Consider all candidates that feed PHI. Find the nearest common
2958 dominator of those candidates requiring the given increment INCR.
2959 Further find and return the nearest common dominator of this result
2960 with block NCD. If the returned block contains one or more of the
2961 candidates, return the earliest candidate in the block in *WHERE. */
2962
2963 static basic_block
2964 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
2965 basic_block ncd, slsr_cand_t *where)
2966 {
2967 unsigned i;
2968 slsr_cand_t basis = lookup_cand (c->basis);
2969 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2970
2971 for (i = 0; i < gimple_phi_num_args (phi); i++)
2972 {
2973 tree arg = gimple_phi_arg_def (phi, i);
2974
2975 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2976 {
2977 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2978
2979 if (gimple_code (arg_def) == GIMPLE_PHI)
2980 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
2981 where);
2982 else
2983 {
2984 slsr_cand_t arg_cand = base_cand_from_table (arg);
2985 widest_int diff = arg_cand->index - basis->index;
2986 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
2987
2988 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2989 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
2990 }
2991 }
2992 }
2993
2994 return ncd;
2995 }
2996
2997 /* Consider the candidate C together with any candidates that feed
2998 C's phi dependence (if any). Find and return the nearest common
2999 dominator of those candidates requiring the given increment INCR.
3000 If the returned block contains one or more of the candidates,
3001 return the earliest candidate in the block in *WHERE. */
3002
3003 static basic_block
3004 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3005 {
3006 basic_block ncd = NULL;
3007
3008 if (cand_abs_increment (c) == incr)
3009 {
3010 ncd = gimple_bb (c->cand_stmt);
3011 *where = c;
3012 }
3013
3014 if (phi_dependent_cand_p (c))
3015 ncd = ncd_with_phi (c, incr,
3016 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3017 ncd, where);
3018
3019 return ncd;
3020 }
3021
3022 /* Consider all candidates in the tree rooted at C for which INCR
3023 represents the required increment of C relative to its basis.
3024 Find and return the basic block that most nearly dominates all
3025 such candidates. If the returned block contains one or more of
3026 the candidates, return the earliest candidate in the block in
3027 *WHERE. */
3028
3029 static basic_block
3030 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3031 slsr_cand_t *where)
3032 {
3033 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3034 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3035
3036 /* First find the NCD of all siblings and dependents. */
3037 if (c->sibling)
3038 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3039 incr, &sib_where);
3040 if (c->dependent)
3041 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3042 incr, &dep_where);
3043 if (!sib_ncd && !dep_ncd)
3044 {
3045 new_where = NULL;
3046 ncd = NULL;
3047 }
3048 else if (sib_ncd && !dep_ncd)
3049 {
3050 new_where = sib_where;
3051 ncd = sib_ncd;
3052 }
3053 else if (dep_ncd && !sib_ncd)
3054 {
3055 new_where = dep_where;
3056 ncd = dep_ncd;
3057 }
3058 else
3059 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3060 dep_where, &new_where);
3061
3062 /* If the candidate's increment doesn't match the one we're interested
3063 in (and nor do any increments for feeding defs of a phi-dependence),
3064 then the result depends only on siblings and dependents. */
3065 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3066
3067 if (!this_ncd || cand_already_replaced (c))
3068 {
3069 *where = new_where;
3070 return ncd;
3071 }
3072
3073 /* Otherwise, compare this candidate with the result from all siblings
3074 and dependents. */
3075 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3076
3077 return ncd;
3078 }
3079
3080 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3081
3082 static inline bool
3083 profitable_increment_p (unsigned index)
3084 {
3085 return (incr_vec[index].cost <= COST_NEUTRAL);
3086 }
3087
3088 /* For each profitable increment in the increment vector not equal to
3089 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3090 dominator of all statements in the candidate chain rooted at C
3091 that require that increment, and insert an initializer
3092 T_0 = stride * increment at that location. Record T_0 with the
3093 increment record. */
3094
3095 static void
3096 insert_initializers (slsr_cand_t c)
3097 {
3098 unsigned i;
3099
3100 for (i = 0; i < incr_vec_len; i++)
3101 {
3102 basic_block bb;
3103 slsr_cand_t where = NULL;
3104 gassign *init_stmt;
3105 tree stride_type, new_name, incr_tree;
3106 widest_int incr = incr_vec[i].incr;
3107
3108 if (!profitable_increment_p (i)
3109 || incr == 1
3110 || (incr == -1
3111 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3112 || incr == 0)
3113 continue;
3114
3115 /* We may have already identified an existing initializer that
3116 will suffice. */
3117 if (incr_vec[i].initializer)
3118 {
3119 if (dump_file && (dump_flags & TDF_DETAILS))
3120 {
3121 fputs ("Using existing initializer: ", dump_file);
3122 print_gimple_stmt (dump_file,
3123 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3124 0, 0);
3125 }
3126 continue;
3127 }
3128
3129 /* Find the block that most closely dominates all candidates
3130 with this increment. If there is at least one candidate in
3131 that block, the earliest one will be returned in WHERE. */
3132 bb = nearest_common_dominator_for_cands (c, incr, &where);
3133
3134 /* Create a new SSA name to hold the initializer's value. */
3135 stride_type = TREE_TYPE (c->stride);
3136 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3137 incr_vec[i].initializer = new_name;
3138
3139 /* Create the initializer and insert it in the latest possible
3140 dominating position. */
3141 incr_tree = wide_int_to_tree (stride_type, incr);
3142 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3143 c->stride, incr_tree);
3144 if (where)
3145 {
3146 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3147 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3148 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3149 }
3150 else
3151 {
3152 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3153 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3154
3155 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3156 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3157 else
3158 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3159
3160 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3161 }
3162
3163 if (dump_file && (dump_flags & TDF_DETAILS))
3164 {
3165 fputs ("Inserting initializer: ", dump_file);
3166 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3167 }
3168 }
3169 }
3170
3171 /* Return TRUE iff all required increments for candidates feeding PHI
3172 are profitable to replace on behalf of candidate C. */
3173
3174 static bool
3175 all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
3176 {
3177 unsigned i;
3178 slsr_cand_t basis = lookup_cand (c->basis);
3179 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3180
3181 for (i = 0; i < gimple_phi_num_args (phi); i++)
3182 {
3183 tree arg = gimple_phi_arg_def (phi, i);
3184
3185 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3186 {
3187 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3188
3189 if (gimple_code (arg_def) == GIMPLE_PHI)
3190 {
3191 if (!all_phi_incrs_profitable (c, arg_def))
3192 return false;
3193 }
3194 else
3195 {
3196 int j;
3197 slsr_cand_t arg_cand = base_cand_from_table (arg);
3198 widest_int increment = arg_cand->index - basis->index;
3199
3200 if (!address_arithmetic_p && wi::neg_p (increment))
3201 increment = -increment;
3202
3203 j = incr_vec_index (increment);
3204
3205 if (dump_file && (dump_flags & TDF_DETAILS))
3206 {
3207 fprintf (dump_file, " Conditional candidate %d, phi: ",
3208 c->cand_num);
3209 print_gimple_stmt (dump_file, phi, 0, 0);
3210 fputs (" increment: ", dump_file);
3211 print_decs (increment, dump_file);
3212 if (j < 0)
3213 fprintf (dump_file,
3214 "\n Not replaced; incr_vec overflow.\n");
3215 else {
3216 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3217 if (profitable_increment_p (j))
3218 fputs (" Replacing...\n", dump_file);
3219 else
3220 fputs (" Not replaced.\n", dump_file);
3221 }
3222 }
3223
3224 if (j < 0 || !profitable_increment_p (j))
3225 return false;
3226 }
3227 }
3228 }
3229
3230 return true;
3231 }
3232
3233 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3234 type TO_TYPE, and insert it in front of the statement represented
3235 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3236 the new SSA name. */
3237
3238 static tree
3239 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3240 {
3241 tree cast_lhs;
3242 gassign *cast_stmt;
3243 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3244
3245 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3246 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3247 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3248 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3249
3250 if (dump_file && (dump_flags & TDF_DETAILS))
3251 {
3252 fputs (" Inserting: ", dump_file);
3253 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3254 }
3255
3256 return cast_lhs;
3257 }
3258
3259 /* Replace the RHS of the statement represented by candidate C with
3260 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3261 leave C unchanged or just interchange its operands. The original
3262 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3263 If the replacement was made and we are doing a details dump,
3264 return the revised statement, else NULL. */
3265
3266 static gimple *
3267 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3268 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3269 slsr_cand_t c)
3270 {
3271 if (new_code != old_code
3272 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3273 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3274 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3275 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3276 {
3277 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3278 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3279 update_stmt (gsi_stmt (gsi));
3280 c->cand_stmt = gsi_stmt (gsi);
3281
3282 if (dump_file && (dump_flags & TDF_DETAILS))
3283 return gsi_stmt (gsi);
3284 }
3285
3286 else if (dump_file && (dump_flags & TDF_DETAILS))
3287 fputs (" (duplicate, not actually replacing)\n", dump_file);
3288
3289 return NULL;
3290 }
3291
3292 /* Strength-reduce the statement represented by candidate C by replacing
3293 it with an equivalent addition or subtraction. I is the index into
3294 the increment vector identifying C's increment. NEW_VAR is used to
3295 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3296 is the rhs1 to use in creating the add/subtract. */
3297
3298 static void
3299 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3300 {
3301 gimple *stmt_to_print = NULL;
3302 tree orig_rhs1, orig_rhs2;
3303 tree rhs2;
3304 enum tree_code orig_code, repl_code;
3305 widest_int cand_incr;
3306
3307 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3308 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3309 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3310 cand_incr = cand_increment (c);
3311
3312 if (dump_file && (dump_flags & TDF_DETAILS))
3313 {
3314 fputs ("Replacing: ", dump_file);
3315 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3316 stmt_to_print = c->cand_stmt;
3317 }
3318
3319 if (address_arithmetic_p)
3320 repl_code = POINTER_PLUS_EXPR;
3321 else
3322 repl_code = PLUS_EXPR;
3323
3324 /* If the increment has an initializer T_0, replace the candidate
3325 statement with an add of the basis name and the initializer. */
3326 if (incr_vec[i].initializer)
3327 {
3328 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3329 tree orig_type = TREE_TYPE (orig_rhs2);
3330
3331 if (types_compatible_p (orig_type, init_type))
3332 rhs2 = incr_vec[i].initializer;
3333 else
3334 rhs2 = introduce_cast_before_cand (c, orig_type,
3335 incr_vec[i].initializer);
3336
3337 if (incr_vec[i].incr != cand_incr)
3338 {
3339 gcc_assert (repl_code == PLUS_EXPR);
3340 repl_code = MINUS_EXPR;
3341 }
3342
3343 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3344 orig_code, orig_rhs1, orig_rhs2,
3345 c);
3346 }
3347
3348 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3349 with a subtract of the stride from the basis name, a copy
3350 from the basis name, or an add of the stride to the basis
3351 name, respectively. It may be necessary to introduce a
3352 cast (or reuse an existing cast). */
3353 else if (cand_incr == 1)
3354 {
3355 tree stride_type = TREE_TYPE (c->stride);
3356 tree orig_type = TREE_TYPE (orig_rhs2);
3357
3358 if (types_compatible_p (orig_type, stride_type))
3359 rhs2 = c->stride;
3360 else
3361 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3362
3363 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3364 orig_code, orig_rhs1, orig_rhs2,
3365 c);
3366 }
3367
3368 else if (cand_incr == -1)
3369 {
3370 tree stride_type = TREE_TYPE (c->stride);
3371 tree orig_type = TREE_TYPE (orig_rhs2);
3372 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3373
3374 if (types_compatible_p (orig_type, stride_type))
3375 rhs2 = c->stride;
3376 else
3377 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3378
3379 if (orig_code != MINUS_EXPR
3380 || !operand_equal_p (basis_name, orig_rhs1, 0)
3381 || !operand_equal_p (rhs2, orig_rhs2, 0))
3382 {
3383 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3384 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3385 update_stmt (gsi_stmt (gsi));
3386 c->cand_stmt = gsi_stmt (gsi);
3387
3388 if (dump_file && (dump_flags & TDF_DETAILS))
3389 stmt_to_print = gsi_stmt (gsi);
3390 }
3391 else if (dump_file && (dump_flags & TDF_DETAILS))
3392 fputs (" (duplicate, not actually replacing)\n", dump_file);
3393 }
3394
3395 else if (cand_incr == 0)
3396 {
3397 tree lhs = gimple_assign_lhs (c->cand_stmt);
3398 tree lhs_type = TREE_TYPE (lhs);
3399 tree basis_type = TREE_TYPE (basis_name);
3400
3401 if (types_compatible_p (lhs_type, basis_type))
3402 {
3403 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3404 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3405 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3406 gsi_replace (&gsi, copy_stmt, false);
3407 c->cand_stmt = copy_stmt;
3408
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3410 stmt_to_print = copy_stmt;
3411 }
3412 else
3413 {
3414 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3415 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3416 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3417 gsi_replace (&gsi, cast_stmt, false);
3418 c->cand_stmt = cast_stmt;
3419
3420 if (dump_file && (dump_flags & TDF_DETAILS))
3421 stmt_to_print = cast_stmt;
3422 }
3423 }
3424 else
3425 gcc_unreachable ();
3426
3427 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3428 {
3429 fputs ("With: ", dump_file);
3430 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3431 fputs ("\n", dump_file);
3432 }
3433 }
3434
3435 /* For each candidate in the tree rooted at C, replace it with
3436 an increment if such has been shown to be profitable. */
3437
3438 static void
3439 replace_profitable_candidates (slsr_cand_t c)
3440 {
3441 if (!cand_already_replaced (c))
3442 {
3443 widest_int increment = cand_abs_increment (c);
3444 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3445 int i;
3446
3447 i = incr_vec_index (increment);
3448
3449 /* Only process profitable increments. Nothing useful can be done
3450 to a cast or copy. */
3451 if (i >= 0
3452 && profitable_increment_p (i)
3453 && orig_code != MODIFY_EXPR
3454 && !CONVERT_EXPR_CODE_P (orig_code))
3455 {
3456 if (phi_dependent_cand_p (c))
3457 {
3458 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3459
3460 if (all_phi_incrs_profitable (c, phi))
3461 {
3462 /* Look up the LHS SSA name from C's basis. This will be
3463 the RHS1 of the adds we will introduce to create new
3464 phi arguments. */
3465 slsr_cand_t basis = lookup_cand (c->basis);
3466 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3467
3468 /* Create a new phi statement that will represent C's true
3469 basis after the transformation is complete. */
3470 location_t loc = gimple_location (c->cand_stmt);
3471 tree name = create_phi_basis (c, phi, basis_name,
3472 loc, UNKNOWN_STRIDE);
3473
3474 /* Replace C with an add of the new basis phi and the
3475 increment. */
3476 replace_one_candidate (c, i, name);
3477 }
3478 }
3479 else
3480 {
3481 slsr_cand_t basis = lookup_cand (c->basis);
3482 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3483 replace_one_candidate (c, i, basis_name);
3484 }
3485 }
3486 }
3487
3488 if (c->sibling)
3489 replace_profitable_candidates (lookup_cand (c->sibling));
3490
3491 if (c->dependent)
3492 replace_profitable_candidates (lookup_cand (c->dependent));
3493 }
3494 \f
3495 /* Analyze costs of related candidates in the candidate vector,
3496 and make beneficial replacements. */
3497
3498 static void
3499 analyze_candidates_and_replace (void)
3500 {
3501 unsigned i;
3502 slsr_cand_t c;
3503
3504 /* Each candidate that has a null basis and a non-null
3505 dependent is the root of a tree of related statements.
3506 Analyze each tree to determine a subset of those
3507 statements that can be replaced with maximum benefit. */
3508 FOR_EACH_VEC_ELT (cand_vec, i, c)
3509 {
3510 slsr_cand_t first_dep;
3511
3512 if (c->basis != 0 || c->dependent == 0)
3513 continue;
3514
3515 if (dump_file && (dump_flags & TDF_DETAILS))
3516 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3517 c->cand_num);
3518
3519 first_dep = lookup_cand (c->dependent);
3520
3521 /* If this is a chain of CAND_REFs, unconditionally replace
3522 each of them with a strength-reduced data reference. */
3523 if (c->kind == CAND_REF)
3524 replace_refs (c);
3525
3526 /* If the common stride of all related candidates is a known
3527 constant, each candidate without a phi-dependence can be
3528 profitably replaced. Each replaces a multiply by a single
3529 add, with the possibility that a feeding add also goes dead.
3530 A candidate with a phi-dependence is replaced only if the
3531 compensation code it requires is offset by the strength
3532 reduction savings. */
3533 else if (TREE_CODE (c->stride) == INTEGER_CST)
3534 replace_uncond_cands_and_profitable_phis (first_dep);
3535
3536 /* When the stride is an SSA name, it may still be profitable
3537 to replace some or all of the dependent candidates, depending
3538 on whether the introduced increments can be reused, or are
3539 less expensive to calculate than the replaced statements. */
3540 else
3541 {
3542 machine_mode mode;
3543 bool speed;
3544
3545 /* Determine whether we'll be generating pointer arithmetic
3546 when replacing candidates. */
3547 address_arithmetic_p = (c->kind == CAND_ADD
3548 && POINTER_TYPE_P (c->cand_type));
3549
3550 /* If all candidates have already been replaced under other
3551 interpretations, nothing remains to be done. */
3552 if (!count_candidates (c))
3553 continue;
3554
3555 /* Construct an array of increments for this candidate chain. */
3556 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3557 incr_vec_len = 0;
3558 record_increments (c);
3559
3560 /* Determine which increments are profitable to replace. */
3561 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3562 speed = optimize_cands_for_speed_p (c);
3563 analyze_increments (first_dep, mode, speed);
3564
3565 /* Insert initializers of the form T_0 = stride * increment
3566 for use in profitable replacements. */
3567 insert_initializers (first_dep);
3568 dump_incr_vec ();
3569
3570 /* Perform the replacements. */
3571 replace_profitable_candidates (first_dep);
3572 free (incr_vec);
3573 }
3574 }
3575 }
3576
3577 namespace {
3578
3579 const pass_data pass_data_strength_reduction =
3580 {
3581 GIMPLE_PASS, /* type */
3582 "slsr", /* name */
3583 OPTGROUP_NONE, /* optinfo_flags */
3584 TV_GIMPLE_SLSR, /* tv_id */
3585 ( PROP_cfg | PROP_ssa ), /* properties_required */
3586 0, /* properties_provided */
3587 0, /* properties_destroyed */
3588 0, /* todo_flags_start */
3589 0, /* todo_flags_finish */
3590 };
3591
3592 class pass_strength_reduction : public gimple_opt_pass
3593 {
3594 public:
3595 pass_strength_reduction (gcc::context *ctxt)
3596 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3597 {}
3598
3599 /* opt_pass methods: */
3600 virtual bool gate (function *) { return flag_tree_slsr; }
3601 virtual unsigned int execute (function *);
3602
3603 }; // class pass_strength_reduction
3604
3605 unsigned
3606 pass_strength_reduction::execute (function *fun)
3607 {
3608 /* Create the obstack where candidates will reside. */
3609 gcc_obstack_init (&cand_obstack);
3610
3611 /* Allocate the candidate vector. */
3612 cand_vec.create (128);
3613
3614 /* Allocate the mapping from statements to candidate indices. */
3615 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3616
3617 /* Create the obstack where candidate chains will reside. */
3618 gcc_obstack_init (&chain_obstack);
3619
3620 /* Allocate the mapping from base expressions to candidate chains. */
3621 base_cand_map = new hash_table<cand_chain_hasher> (500);
3622
3623 /* Allocate the mapping from bases to alternative bases. */
3624 alt_base_map = new hash_map<tree, tree>;
3625
3626 /* Initialize the loop optimizer. We need to detect flow across
3627 back edges, and this gives us dominator information as well. */
3628 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3629
3630 /* Walk the CFG in predominator order looking for strength reduction
3631 candidates. */
3632 find_candidates_dom_walker (CDI_DOMINATORS)
3633 .walk (fun->cfg->x_entry_block_ptr);
3634
3635 if (dump_file && (dump_flags & TDF_DETAILS))
3636 {
3637 dump_cand_vec ();
3638 dump_cand_chains ();
3639 }
3640
3641 delete alt_base_map;
3642 free_affine_expand_cache (&name_expansions);
3643
3644 /* Analyze costs and make appropriate replacements. */
3645 analyze_candidates_and_replace ();
3646
3647 loop_optimizer_finalize ();
3648 delete base_cand_map;
3649 base_cand_map = NULL;
3650 obstack_free (&chain_obstack, NULL);
3651 delete stmt_cand_map;
3652 cand_vec.release ();
3653 obstack_free (&cand_obstack, NULL);
3654
3655 return 0;
3656 }
3657
3658 } // anon namespace
3659
3660 gimple_opt_pass *
3661 make_pass_strength_reduction (gcc::context *ctxt)
3662 {
3663 return new pass_strength_reduction (ctxt);
3664 }