re PR tree-optimization/78777 (ICE in mark_reachable_handlers, at tree-eh.c:3823...
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
59 \f
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
64
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
67
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
71
72 Any statement S0 that dominates S1 and is of the form:
73
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
76
77 is called a "basis" for S1. In both cases, S1 may be replaced by
78
79 S1': X = Y + (i - i') * S,
80
81 where (i - i') * S is folded to the extent possible.
82
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
91
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
96
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
100
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
104
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
107
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
111
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
120
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
124
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
128
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
131
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
134
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
142
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
151
152 Conditional candidates
153 ======================
154
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
157
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
165
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
168
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
172 {
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
175 }
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
180
181 where the bracketed instructions may go dead.
182
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
193
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
199
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
209
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
213
214
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
218
219 /* The kind of candidate. */
220 enum cand_kind
221 {
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
226 };
227
228 struct slsr_cand_d
229 {
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
232
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
235
236 /* The stride S. */
237 tree stride;
238
239 /* The index constant i. */
240 widest_int index;
241
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
248
249 /* The type to be used to interpret the stride field when the stride
250 is not a constant. Normally the same as the type of the recorded
251 stride, but when the stride has been cast we need to maintain that
252 knowledge in order to make legal substitutions without losing
253 precision. When the stride is a constant, this will be sizetype. */
254 tree stride_type;
255
256 /* The kind of candidate (CAND_MULT, etc.). */
257 enum cand_kind kind;
258
259 /* Index of this candidate in the candidate vector. */
260 cand_idx cand_num;
261
262 /* Index of the next candidate record for the same statement.
263 A statement may be useful in more than one way (e.g., due to
264 commutativity). So we can have multiple "interpretations"
265 of a statement. */
266 cand_idx next_interp;
267
268 /* Index of the basis statement S0, if any, in the candidate vector. */
269 cand_idx basis;
270
271 /* First candidate for which this candidate is a basis, if one exists. */
272 cand_idx dependent;
273
274 /* Next candidate having the same basis as this one. */
275 cand_idx sibling;
276
277 /* If this is a conditional candidate, the CAND_PHI candidate
278 that defines the base SSA name B. */
279 cand_idx def_phi;
280
281 /* Savings that can be expected from eliminating dead code if this
282 candidate is replaced. */
283 int dead_savings;
284 };
285
286 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
287 typedef const struct slsr_cand_d *const_slsr_cand_t;
288
289 /* Pointers to candidates are chained together as part of a mapping
290 from base expressions to the candidates that use them. */
291
292 struct cand_chain_d
293 {
294 /* Base expression for the chain of candidates: often, but not
295 always, an SSA name. */
296 tree base_expr;
297
298 /* Pointer to a candidate. */
299 slsr_cand_t cand;
300
301 /* Chain pointer. */
302 struct cand_chain_d *next;
303
304 };
305
306 typedef struct cand_chain_d cand_chain, *cand_chain_t;
307 typedef const struct cand_chain_d *const_cand_chain_t;
308
309 /* Information about a unique "increment" associated with candidates
310 having an SSA name for a stride. An increment is the difference
311 between the index of the candidate and the index of its basis,
312 i.e., (i - i') as discussed in the module commentary.
313
314 When we are not going to generate address arithmetic we treat
315 increments that differ only in sign as the same, allowing sharing
316 of the cost of initializers. The absolute value of the increment
317 is stored in the incr_info. */
318
319 struct incr_info_d
320 {
321 /* The increment that relates a candidate to its basis. */
322 widest_int incr;
323
324 /* How many times the increment occurs in the candidate tree. */
325 unsigned count;
326
327 /* Cost of replacing candidates using this increment. Negative and
328 zero costs indicate replacement should be performed. */
329 int cost;
330
331 /* If this increment is profitable but is not -1, 0, or 1, it requires
332 an initializer T_0 = stride * incr to be found or introduced in the
333 nearest common dominator of all candidates. This field holds T_0
334 for subsequent use. */
335 tree initializer;
336
337 /* If the initializer was found to already exist, this is the block
338 where it was found. */
339 basic_block init_bb;
340 };
341
342 typedef struct incr_info_d incr_info, *incr_info_t;
343
344 /* Candidates are maintained in a vector. If candidate X dominates
345 candidate Y, then X appears before Y in the vector; but the
346 converse does not necessarily hold. */
347 static vec<slsr_cand_t> cand_vec;
348
349 enum cost_consts
350 {
351 COST_NEUTRAL = 0,
352 COST_INFINITE = 1000
353 };
354
355 enum stride_status
356 {
357 UNKNOWN_STRIDE = 0,
358 KNOWN_STRIDE = 1
359 };
360
361 enum phi_adjust_status
362 {
363 NOT_PHI_ADJUST = 0,
364 PHI_ADJUST = 1
365 };
366
367 enum count_phis_status
368 {
369 DONT_COUNT_PHIS = 0,
370 COUNT_PHIS = 1
371 };
372
373 /* Pointer map embodying a mapping from statements to candidates. */
374 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
375
376 /* Obstack for candidates. */
377 static struct obstack cand_obstack;
378
379 /* Obstack for candidate chains. */
380 static struct obstack chain_obstack;
381
382 /* An array INCR_VEC of incr_infos is used during analysis of related
383 candidates having an SSA name for a stride. INCR_VEC_LEN describes
384 its current length. MAX_INCR_VEC_LEN is used to avoid costly
385 pathological cases. */
386 static incr_info_t incr_vec;
387 static unsigned incr_vec_len;
388 const int MAX_INCR_VEC_LEN = 16;
389
390 /* For a chain of candidates with unknown stride, indicates whether or not
391 we must generate pointer arithmetic when replacing statements. */
392 static bool address_arithmetic_p;
393
394 /* Forward function declarations. */
395 static slsr_cand_t base_cand_from_table (tree);
396 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
397 static bool legal_cast_p_1 (tree, tree);
398 \f
399 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
400
401 static slsr_cand_t
402 lookup_cand (cand_idx idx)
403 {
404 return cand_vec[idx - 1];
405 }
406
407 /* Helper for hashing a candidate chain header. */
408
409 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
410 {
411 static inline hashval_t hash (const cand_chain *);
412 static inline bool equal (const cand_chain *, const cand_chain *);
413 };
414
415 inline hashval_t
416 cand_chain_hasher::hash (const cand_chain *p)
417 {
418 tree base_expr = p->base_expr;
419 return iterative_hash_expr (base_expr, 0);
420 }
421
422 inline bool
423 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
424 {
425 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
426 }
427
428 /* Hash table embodying a mapping from base exprs to chains of candidates. */
429 static hash_table<cand_chain_hasher> *base_cand_map;
430 \f
431 /* Pointer map used by tree_to_aff_combination_expand. */
432 static hash_map<tree, name_expansion *> *name_expansions;
433 /* Pointer map embodying a mapping from bases to alternative bases. */
434 static hash_map<tree, tree> *alt_base_map;
435
436 /* Given BASE, use the tree affine combiniation facilities to
437 find the underlying tree expression for BASE, with any
438 immediate offset excluded.
439
440 N.B. we should eliminate this backtracking with better forward
441 analysis in a future release. */
442
443 static tree
444 get_alternative_base (tree base)
445 {
446 tree *result = alt_base_map->get (base);
447
448 if (result == NULL)
449 {
450 tree expr;
451 aff_tree aff;
452
453 tree_to_aff_combination_expand (base, TREE_TYPE (base),
454 &aff, &name_expansions);
455 aff.offset = 0;
456 expr = aff_combination_to_tree (&aff);
457
458 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
459
460 return expr == base ? NULL : expr;
461 }
462
463 return *result;
464 }
465
466 /* Look in the candidate table for a CAND_PHI that defines BASE and
467 return it if found; otherwise return NULL. */
468
469 static cand_idx
470 find_phi_def (tree base)
471 {
472 slsr_cand_t c;
473
474 if (TREE_CODE (base) != SSA_NAME)
475 return 0;
476
477 c = base_cand_from_table (base);
478
479 if (!c || c->kind != CAND_PHI)
480 return 0;
481
482 return c->cand_num;
483 }
484
485 /* Helper routine for find_basis_for_candidate. May be called twice:
486 once for the candidate's base expr, and optionally again either for
487 the candidate's phi definition or for a CAND_REF's alternative base
488 expression. */
489
490 static slsr_cand_t
491 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
492 {
493 cand_chain mapping_key;
494 cand_chain_t chain;
495 slsr_cand_t basis = NULL;
496
497 // Limit potential of N^2 behavior for long candidate chains.
498 int iters = 0;
499 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
500
501 mapping_key.base_expr = base_expr;
502 chain = base_cand_map->find (&mapping_key);
503
504 for (; chain && iters < max_iters; chain = chain->next, ++iters)
505 {
506 slsr_cand_t one_basis = chain->cand;
507
508 if (one_basis->kind != c->kind
509 || one_basis->cand_stmt == c->cand_stmt
510 || !operand_equal_p (one_basis->stride, c->stride, 0)
511 || !types_compatible_p (one_basis->cand_type, c->cand_type)
512 || !types_compatible_p (one_basis->stride_type, c->stride_type)
513 || !dominated_by_p (CDI_DOMINATORS,
514 gimple_bb (c->cand_stmt),
515 gimple_bb (one_basis->cand_stmt)))
516 continue;
517
518 if (!basis || basis->cand_num < one_basis->cand_num)
519 basis = one_basis;
520 }
521
522 return basis;
523 }
524
525 /* Use the base expr from candidate C to look for possible candidates
526 that can serve as a basis for C. Each potential basis must also
527 appear in a block that dominates the candidate statement and have
528 the same stride and type. If more than one possible basis exists,
529 the one with highest index in the vector is chosen; this will be
530 the most immediately dominating basis. */
531
532 static int
533 find_basis_for_candidate (slsr_cand_t c)
534 {
535 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
536
537 /* If a candidate doesn't have a basis using its base expression,
538 it may have a basis hidden by one or more intervening phis. */
539 if (!basis && c->def_phi)
540 {
541 basic_block basis_bb, phi_bb;
542 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
543 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
544
545 if (basis)
546 {
547 /* A hidden basis must dominate the phi-definition of the
548 candidate's base name. */
549 phi_bb = gimple_bb (phi_cand->cand_stmt);
550 basis_bb = gimple_bb (basis->cand_stmt);
551
552 if (phi_bb == basis_bb
553 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
554 {
555 basis = NULL;
556 c->basis = 0;
557 }
558
559 /* If we found a hidden basis, estimate additional dead-code
560 savings if the phi and its feeding statements can be removed. */
561 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
562 c->dead_savings += phi_cand->dead_savings;
563 }
564 }
565
566 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
567 {
568 tree alt_base_expr = get_alternative_base (c->base_expr);
569 if (alt_base_expr)
570 basis = find_basis_for_base_expr (c, alt_base_expr);
571 }
572
573 if (basis)
574 {
575 c->sibling = basis->dependent;
576 basis->dependent = c->cand_num;
577 return basis->cand_num;
578 }
579
580 return 0;
581 }
582
583 /* Record a mapping from BASE to C, indicating that C may potentially serve
584 as a basis using that base expression. BASE may be the same as
585 C->BASE_EXPR; alternatively BASE can be a different tree that share the
586 underlining expression of C->BASE_EXPR. */
587
588 static void
589 record_potential_basis (slsr_cand_t c, tree base)
590 {
591 cand_chain_t node;
592 cand_chain **slot;
593
594 gcc_assert (base);
595
596 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
597 node->base_expr = base;
598 node->cand = c;
599 node->next = NULL;
600 slot = base_cand_map->find_slot (node, INSERT);
601
602 if (*slot)
603 {
604 cand_chain_t head = (cand_chain_t) (*slot);
605 node->next = head->next;
606 head->next = node;
607 }
608 else
609 *slot = node;
610 }
611
612 /* Allocate storage for a new candidate and initialize its fields.
613 Attempt to find a basis for the candidate.
614
615 For CAND_REF, an alternative base may also be recorded and used
616 to find a basis. This helps cases where the expression hidden
617 behind BASE (which is usually an SSA_NAME) has immediate offset,
618 e.g.
619
620 a2[i][j] = 1;
621 a2[i + 20][j] = 2; */
622
623 static slsr_cand_t
624 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
625 const widest_int &index, tree stride, tree ctype,
626 tree stype, unsigned savings)
627 {
628 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
629 sizeof (slsr_cand));
630 c->cand_stmt = gs;
631 c->base_expr = base;
632 c->stride = stride;
633 c->index = index;
634 c->cand_type = ctype;
635 c->stride_type = stype;
636 c->kind = kind;
637 c->cand_num = cand_vec.length () + 1;
638 c->next_interp = 0;
639 c->dependent = 0;
640 c->sibling = 0;
641 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
642 c->dead_savings = savings;
643
644 cand_vec.safe_push (c);
645
646 if (kind == CAND_PHI)
647 c->basis = 0;
648 else
649 c->basis = find_basis_for_candidate (c);
650
651 record_potential_basis (c, base);
652 if (flag_expensive_optimizations && kind == CAND_REF)
653 {
654 tree alt_base = get_alternative_base (base);
655 if (alt_base)
656 record_potential_basis (c, alt_base);
657 }
658
659 return c;
660 }
661
662 /* Determine the target cost of statement GS when compiling according
663 to SPEED. */
664
665 static int
666 stmt_cost (gimple *gs, bool speed)
667 {
668 tree lhs, rhs1, rhs2;
669 machine_mode lhs_mode;
670
671 gcc_assert (is_gimple_assign (gs));
672 lhs = gimple_assign_lhs (gs);
673 rhs1 = gimple_assign_rhs1 (gs);
674 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
675
676 switch (gimple_assign_rhs_code (gs))
677 {
678 case MULT_EXPR:
679 rhs2 = gimple_assign_rhs2 (gs);
680
681 if (tree_fits_shwi_p (rhs2))
682 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
683
684 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
685 return mul_cost (speed, lhs_mode);
686
687 case PLUS_EXPR:
688 case POINTER_PLUS_EXPR:
689 case MINUS_EXPR:
690 return add_cost (speed, lhs_mode);
691
692 case NEGATE_EXPR:
693 return neg_cost (speed, lhs_mode);
694
695 CASE_CONVERT:
696 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
697
698 /* Note that we don't assign costs to copies that in most cases
699 will go away. */
700 case SSA_NAME:
701 return 0;
702
703 default:
704 ;
705 }
706
707 gcc_unreachable ();
708 return 0;
709 }
710
711 /* Look up the defining statement for BASE_IN and return a pointer
712 to its candidate in the candidate table, if any; otherwise NULL.
713 Only CAND_ADD and CAND_MULT candidates are returned. */
714
715 static slsr_cand_t
716 base_cand_from_table (tree base_in)
717 {
718 slsr_cand_t *result;
719
720 gimple *def = SSA_NAME_DEF_STMT (base_in);
721 if (!def)
722 return (slsr_cand_t) NULL;
723
724 result = stmt_cand_map->get (def);
725
726 if (result && (*result)->kind != CAND_REF)
727 return *result;
728
729 return (slsr_cand_t) NULL;
730 }
731
732 /* Add an entry to the statement-to-candidate mapping. */
733
734 static void
735 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
736 {
737 gcc_assert (!stmt_cand_map->put (gs, c));
738 }
739 \f
740 /* Given PHI which contains a phi statement, determine whether it
741 satisfies all the requirements of a phi candidate. If so, create
742 a candidate. Note that a CAND_PHI never has a basis itself, but
743 is used to help find a basis for subsequent candidates. */
744
745 static void
746 slsr_process_phi (gphi *phi, bool speed)
747 {
748 unsigned i;
749 tree arg0_base = NULL_TREE, base_type;
750 slsr_cand_t c;
751 struct loop *cand_loop = gimple_bb (phi)->loop_father;
752 unsigned savings = 0;
753
754 /* A CAND_PHI requires each of its arguments to have the same
755 derived base name. (See the module header commentary for a
756 definition of derived base names.) Furthermore, all feeding
757 definitions must be in the same position in the loop hierarchy
758 as PHI. */
759
760 for (i = 0; i < gimple_phi_num_args (phi); i++)
761 {
762 slsr_cand_t arg_cand;
763 tree arg = gimple_phi_arg_def (phi, i);
764 tree derived_base_name = NULL_TREE;
765 gimple *arg_stmt = NULL;
766 basic_block arg_bb = NULL;
767
768 if (TREE_CODE (arg) != SSA_NAME)
769 return;
770
771 arg_cand = base_cand_from_table (arg);
772
773 if (arg_cand)
774 {
775 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
776 {
777 if (!arg_cand->next_interp)
778 return;
779
780 arg_cand = lookup_cand (arg_cand->next_interp);
781 }
782
783 if (!integer_onep (arg_cand->stride))
784 return;
785
786 derived_base_name = arg_cand->base_expr;
787 arg_stmt = arg_cand->cand_stmt;
788 arg_bb = gimple_bb (arg_stmt);
789
790 /* Gather potential dead code savings if the phi statement
791 can be removed later on. */
792 if (has_single_use (arg))
793 {
794 if (gimple_code (arg_stmt) == GIMPLE_PHI)
795 savings += arg_cand->dead_savings;
796 else
797 savings += stmt_cost (arg_stmt, speed);
798 }
799 }
800 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
801 {
802 derived_base_name = arg;
803 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
804 }
805
806 if (!arg_bb || arg_bb->loop_father != cand_loop)
807 return;
808
809 if (i == 0)
810 arg0_base = derived_base_name;
811 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
812 return;
813 }
814
815 /* Create the candidate. "alloc_cand_and_find_basis" is named
816 misleadingly for this case, as no basis will be sought for a
817 CAND_PHI. */
818 base_type = TREE_TYPE (arg0_base);
819
820 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
821 0, integer_one_node, base_type,
822 sizetype, savings);
823
824 /* Add the candidate to the statement-candidate mapping. */
825 add_cand_for_stmt (phi, c);
826 }
827
828 /* Given PBASE which is a pointer to tree, look up the defining
829 statement for it and check whether the candidate is in the
830 form of:
831
832 X = B + (1 * S), S is integer constant
833 X = B + (i * S), S is integer one
834
835 If so, set PBASE to the candidate's base_expr and return double
836 int (i * S).
837 Otherwise, just return double int zero. */
838
839 static widest_int
840 backtrace_base_for_ref (tree *pbase)
841 {
842 tree base_in = *pbase;
843 slsr_cand_t base_cand;
844
845 STRIP_NOPS (base_in);
846
847 /* Strip off widening conversion(s) to handle cases where
848 e.g. 'B' is widened from an 'int' in order to calculate
849 a 64-bit address. */
850 if (CONVERT_EXPR_P (base_in)
851 && legal_cast_p_1 (TREE_TYPE (base_in),
852 TREE_TYPE (TREE_OPERAND (base_in, 0))))
853 base_in = get_unwidened (base_in, NULL_TREE);
854
855 if (TREE_CODE (base_in) != SSA_NAME)
856 return 0;
857
858 base_cand = base_cand_from_table (base_in);
859
860 while (base_cand && base_cand->kind != CAND_PHI)
861 {
862 if (base_cand->kind == CAND_ADD
863 && base_cand->index == 1
864 && TREE_CODE (base_cand->stride) == INTEGER_CST)
865 {
866 /* X = B + (1 * S), S is integer constant. */
867 *pbase = base_cand->base_expr;
868 return wi::to_widest (base_cand->stride);
869 }
870 else if (base_cand->kind == CAND_ADD
871 && TREE_CODE (base_cand->stride) == INTEGER_CST
872 && integer_onep (base_cand->stride))
873 {
874 /* X = B + (i * S), S is integer one. */
875 *pbase = base_cand->base_expr;
876 return base_cand->index;
877 }
878
879 if (base_cand->next_interp)
880 base_cand = lookup_cand (base_cand->next_interp);
881 else
882 base_cand = NULL;
883 }
884
885 return 0;
886 }
887
888 /* Look for the following pattern:
889
890 *PBASE: MEM_REF (T1, C1)
891
892 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
893 or
894 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
895 or
896 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
897
898 *PINDEX: C4 * BITS_PER_UNIT
899
900 If not present, leave the input values unchanged and return FALSE.
901 Otherwise, modify the input values as follows and return TRUE:
902
903 *PBASE: T1
904 *POFFSET: MULT_EXPR (T2, C3)
905 *PINDEX: C1 + (C2 * C3) + C4
906
907 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
908 will be further restructured to:
909
910 *PBASE: T1
911 *POFFSET: MULT_EXPR (T2', C3)
912 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
913
914 static bool
915 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
916 tree *ptype)
917 {
918 tree base = *pbase, offset = *poffset;
919 widest_int index = *pindex;
920 tree mult_op0, t1, t2, type;
921 widest_int c1, c2, c3, c4, c5;
922
923 if (!base
924 || !offset
925 || TREE_CODE (base) != MEM_REF
926 || TREE_CODE (offset) != MULT_EXPR
927 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
928 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
929 return false;
930
931 t1 = TREE_OPERAND (base, 0);
932 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
933 type = TREE_TYPE (TREE_OPERAND (base, 1));
934
935 mult_op0 = TREE_OPERAND (offset, 0);
936 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
937
938 if (TREE_CODE (mult_op0) == PLUS_EXPR)
939
940 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
941 {
942 t2 = TREE_OPERAND (mult_op0, 0);
943 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
944 }
945 else
946 return false;
947
948 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
949
950 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
951 {
952 t2 = TREE_OPERAND (mult_op0, 0);
953 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
954 }
955 else
956 return false;
957
958 else
959 {
960 t2 = mult_op0;
961 c2 = 0;
962 }
963
964 c4 = index >> LOG2_BITS_PER_UNIT;
965 c5 = backtrace_base_for_ref (&t2);
966
967 *pbase = t1;
968 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
969 wide_int_to_tree (sizetype, c3));
970 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
971 *ptype = type;
972
973 return true;
974 }
975
976 /* Given GS which contains a data reference, create a CAND_REF entry in
977 the candidate table and attempt to find a basis. */
978
979 static void
980 slsr_process_ref (gimple *gs)
981 {
982 tree ref_expr, base, offset, type;
983 HOST_WIDE_INT bitsize, bitpos;
984 machine_mode mode;
985 int unsignedp, reversep, volatilep;
986 slsr_cand_t c;
987
988 if (gimple_vdef (gs))
989 ref_expr = gimple_assign_lhs (gs);
990 else
991 ref_expr = gimple_assign_rhs1 (gs);
992
993 if (!handled_component_p (ref_expr)
994 || TREE_CODE (ref_expr) == BIT_FIELD_REF
995 || (TREE_CODE (ref_expr) == COMPONENT_REF
996 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
997 return;
998
999 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1000 &unsignedp, &reversep, &volatilep);
1001 if (reversep)
1002 return;
1003 widest_int index = bitpos;
1004
1005 if (!restructure_reference (&base, &offset, &index, &type))
1006 return;
1007
1008 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1009 type, sizetype, 0);
1010
1011 /* Add the candidate to the statement-candidate mapping. */
1012 add_cand_for_stmt (gs, c);
1013 }
1014
1015 /* Create a candidate entry for a statement GS, where GS multiplies
1016 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1017 about the two SSA names into the new candidate. Return the new
1018 candidate. */
1019
1020 static slsr_cand_t
1021 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1022 {
1023 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1024 tree stype = NULL_TREE;
1025 widest_int index;
1026 unsigned savings = 0;
1027 slsr_cand_t c;
1028 slsr_cand_t base_cand = base_cand_from_table (base_in);
1029
1030 /* Look at all interpretations of the base candidate, if necessary,
1031 to find information to propagate into this candidate. */
1032 while (base_cand && !base && base_cand->kind != CAND_PHI)
1033 {
1034
1035 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1036 {
1037 /* Y = (B + i') * 1
1038 X = Y * Z
1039 ================
1040 X = (B + i') * Z */
1041 base = base_cand->base_expr;
1042 index = base_cand->index;
1043 stride = stride_in;
1044 ctype = base_cand->cand_type;
1045 stype = TREE_TYPE (stride_in);
1046 if (has_single_use (base_in))
1047 savings = (base_cand->dead_savings
1048 + stmt_cost (base_cand->cand_stmt, speed));
1049 }
1050 else if (base_cand->kind == CAND_ADD
1051 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1052 {
1053 /* Y = B + (i' * S), S constant
1054 X = Y * Z
1055 ============================
1056 X = B + ((i' * S) * Z) */
1057 base = base_cand->base_expr;
1058 index = base_cand->index * wi::to_widest (base_cand->stride);
1059 stride = stride_in;
1060 ctype = base_cand->cand_type;
1061 stype = TREE_TYPE (stride_in);
1062 if (has_single_use (base_in))
1063 savings = (base_cand->dead_savings
1064 + stmt_cost (base_cand->cand_stmt, speed));
1065 }
1066
1067 if (base_cand->next_interp)
1068 base_cand = lookup_cand (base_cand->next_interp);
1069 else
1070 base_cand = NULL;
1071 }
1072
1073 if (!base)
1074 {
1075 /* No interpretations had anything useful to propagate, so
1076 produce X = (Y + 0) * Z. */
1077 base = base_in;
1078 index = 0;
1079 stride = stride_in;
1080 ctype = TREE_TYPE (base_in);
1081 stype = TREE_TYPE (stride_in);
1082 }
1083
1084 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1085 ctype, stype, savings);
1086 return c;
1087 }
1088
1089 /* Create a candidate entry for a statement GS, where GS multiplies
1090 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1091 information about BASE_IN into the new candidate. Return the new
1092 candidate. */
1093
1094 static slsr_cand_t
1095 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1096 {
1097 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1098 widest_int index, temp;
1099 unsigned savings = 0;
1100 slsr_cand_t c;
1101 slsr_cand_t base_cand = base_cand_from_table (base_in);
1102
1103 /* Look at all interpretations of the base candidate, if necessary,
1104 to find information to propagate into this candidate. */
1105 while (base_cand && !base && base_cand->kind != CAND_PHI)
1106 {
1107 if (base_cand->kind == CAND_MULT
1108 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1109 {
1110 /* Y = (B + i') * S, S constant
1111 X = Y * c
1112 ============================
1113 X = (B + i') * (S * c) */
1114 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1115 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1116 {
1117 base = base_cand->base_expr;
1118 index = base_cand->index;
1119 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1120 ctype = base_cand->cand_type;
1121 if (has_single_use (base_in))
1122 savings = (base_cand->dead_savings
1123 + stmt_cost (base_cand->cand_stmt, speed));
1124 }
1125 }
1126 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1127 {
1128 /* Y = B + (i' * 1)
1129 X = Y * c
1130 ===========================
1131 X = (B + i') * c */
1132 base = base_cand->base_expr;
1133 index = base_cand->index;
1134 stride = stride_in;
1135 ctype = base_cand->cand_type;
1136 if (has_single_use (base_in))
1137 savings = (base_cand->dead_savings
1138 + stmt_cost (base_cand->cand_stmt, speed));
1139 }
1140 else if (base_cand->kind == CAND_ADD
1141 && base_cand->index == 1
1142 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1143 {
1144 /* Y = B + (1 * S), S constant
1145 X = Y * c
1146 ===========================
1147 X = (B + S) * c */
1148 base = base_cand->base_expr;
1149 index = wi::to_widest (base_cand->stride);
1150 stride = stride_in;
1151 ctype = base_cand->cand_type;
1152 if (has_single_use (base_in))
1153 savings = (base_cand->dead_savings
1154 + stmt_cost (base_cand->cand_stmt, speed));
1155 }
1156
1157 if (base_cand->next_interp)
1158 base_cand = lookup_cand (base_cand->next_interp);
1159 else
1160 base_cand = NULL;
1161 }
1162
1163 if (!base)
1164 {
1165 /* No interpretations had anything useful to propagate, so
1166 produce X = (Y + 0) * c. */
1167 base = base_in;
1168 index = 0;
1169 stride = stride_in;
1170 ctype = TREE_TYPE (base_in);
1171 }
1172
1173 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1174 ctype, sizetype, savings);
1175 return c;
1176 }
1177
1178 /* Given GS which is a multiply of scalar integers, make an appropriate
1179 entry in the candidate table. If this is a multiply of two SSA names,
1180 create two CAND_MULT interpretations and attempt to find a basis for
1181 each of them. Otherwise, create a single CAND_MULT and attempt to
1182 find a basis. */
1183
1184 static void
1185 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1186 {
1187 slsr_cand_t c, c2;
1188
1189 /* If this is a multiply of an SSA name with itself, it is highly
1190 unlikely that we will get a strength reduction opportunity, so
1191 don't record it as a candidate. This simplifies the logic for
1192 finding a basis, so if this is removed that must be considered. */
1193 if (rhs1 == rhs2)
1194 return;
1195
1196 if (TREE_CODE (rhs2) == SSA_NAME)
1197 {
1198 /* Record an interpretation of this statement in the candidate table
1199 assuming RHS1 is the base expression and RHS2 is the stride. */
1200 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1201
1202 /* Add the first interpretation to the statement-candidate mapping. */
1203 add_cand_for_stmt (gs, c);
1204
1205 /* Record another interpretation of this statement assuming RHS1
1206 is the stride and RHS2 is the base expression. */
1207 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1208 c->next_interp = c2->cand_num;
1209 }
1210 else
1211 {
1212 /* Record an interpretation for the multiply-immediate. */
1213 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1214
1215 /* Add the interpretation to the statement-candidate mapping. */
1216 add_cand_for_stmt (gs, c);
1217 }
1218 }
1219
1220 /* Create a candidate entry for a statement GS, where GS adds two
1221 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1222 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1223 information about the two SSA names into the new candidate.
1224 Return the new candidate. */
1225
1226 static slsr_cand_t
1227 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1228 bool subtract_p, bool speed)
1229 {
1230 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1231 tree stype = NULL_TREE;
1232 widest_int index;
1233 unsigned savings = 0;
1234 slsr_cand_t c;
1235 slsr_cand_t base_cand = base_cand_from_table (base_in);
1236 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1237
1238 /* The most useful transformation is a multiply-immediate feeding
1239 an add or subtract. Look for that first. */
1240 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1241 {
1242 if (addend_cand->kind == CAND_MULT
1243 && addend_cand->index == 0
1244 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1245 {
1246 /* Z = (B + 0) * S, S constant
1247 X = Y +/- Z
1248 ===========================
1249 X = Y + ((+/-1 * S) * B) */
1250 base = base_in;
1251 index = wi::to_widest (addend_cand->stride);
1252 if (subtract_p)
1253 index = -index;
1254 stride = addend_cand->base_expr;
1255 ctype = TREE_TYPE (base_in);
1256 stype = addend_cand->cand_type;
1257 if (has_single_use (addend_in))
1258 savings = (addend_cand->dead_savings
1259 + stmt_cost (addend_cand->cand_stmt, speed));
1260 }
1261
1262 if (addend_cand->next_interp)
1263 addend_cand = lookup_cand (addend_cand->next_interp);
1264 else
1265 addend_cand = NULL;
1266 }
1267
1268 while (base_cand && !base && base_cand->kind != CAND_PHI)
1269 {
1270 if (base_cand->kind == CAND_ADD
1271 && (base_cand->index == 0
1272 || operand_equal_p (base_cand->stride,
1273 integer_zero_node, 0)))
1274 {
1275 /* Y = B + (i' * S), i' * S = 0
1276 X = Y +/- Z
1277 ============================
1278 X = B + (+/-1 * Z) */
1279 base = base_cand->base_expr;
1280 index = subtract_p ? -1 : 1;
1281 stride = addend_in;
1282 ctype = base_cand->cand_type;
1283 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1284 : TREE_TYPE (addend_in));
1285 if (has_single_use (base_in))
1286 savings = (base_cand->dead_savings
1287 + stmt_cost (base_cand->cand_stmt, speed));
1288 }
1289 else if (subtract_p)
1290 {
1291 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1292
1293 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1294 {
1295 if (subtrahend_cand->kind == CAND_MULT
1296 && subtrahend_cand->index == 0
1297 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1298 {
1299 /* Z = (B + 0) * S, S constant
1300 X = Y - Z
1301 ===========================
1302 Value: X = Y + ((-1 * S) * B) */
1303 base = base_in;
1304 index = wi::to_widest (subtrahend_cand->stride);
1305 index = -index;
1306 stride = subtrahend_cand->base_expr;
1307 ctype = TREE_TYPE (base_in);
1308 stype = subtrahend_cand->cand_type;
1309 if (has_single_use (addend_in))
1310 savings = (subtrahend_cand->dead_savings
1311 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1312 }
1313
1314 if (subtrahend_cand->next_interp)
1315 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1316 else
1317 subtrahend_cand = NULL;
1318 }
1319 }
1320
1321 if (base_cand->next_interp)
1322 base_cand = lookup_cand (base_cand->next_interp);
1323 else
1324 base_cand = NULL;
1325 }
1326
1327 if (!base)
1328 {
1329 /* No interpretations had anything useful to propagate, so
1330 produce X = Y + (1 * Z). */
1331 base = base_in;
1332 index = subtract_p ? -1 : 1;
1333 stride = addend_in;
1334 ctype = TREE_TYPE (base_in);
1335 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1336 : TREE_TYPE (addend_in));
1337 }
1338
1339 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1340 ctype, stype, savings);
1341 return c;
1342 }
1343
1344 /* Create a candidate entry for a statement GS, where GS adds SSA
1345 name BASE_IN to constant INDEX_IN. Propagate any known information
1346 about BASE_IN into the new candidate. Return the new candidate. */
1347
1348 static slsr_cand_t
1349 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1350 bool speed)
1351 {
1352 enum cand_kind kind = CAND_ADD;
1353 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1354 tree stype = NULL_TREE;
1355 widest_int index, multiple;
1356 unsigned savings = 0;
1357 slsr_cand_t c;
1358 slsr_cand_t base_cand = base_cand_from_table (base_in);
1359
1360 while (base_cand && !base && base_cand->kind != CAND_PHI)
1361 {
1362 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1363
1364 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1365 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1366 sign, &multiple))
1367 {
1368 /* Y = (B + i') * S, S constant, c = kS for some integer k
1369 X = Y + c
1370 ============================
1371 X = (B + (i'+ k)) * S
1372 OR
1373 Y = B + (i' * S), S constant, c = kS for some integer k
1374 X = Y + c
1375 ============================
1376 X = (B + (i'+ k)) * S */
1377 kind = base_cand->kind;
1378 base = base_cand->base_expr;
1379 index = base_cand->index + multiple;
1380 stride = base_cand->stride;
1381 ctype = base_cand->cand_type;
1382 stype = base_cand->stride_type;
1383 if (has_single_use (base_in))
1384 savings = (base_cand->dead_savings
1385 + stmt_cost (base_cand->cand_stmt, speed));
1386 }
1387
1388 if (base_cand->next_interp)
1389 base_cand = lookup_cand (base_cand->next_interp);
1390 else
1391 base_cand = NULL;
1392 }
1393
1394 if (!base)
1395 {
1396 /* No interpretations had anything useful to propagate, so
1397 produce X = Y + (c * 1). */
1398 kind = CAND_ADD;
1399 base = base_in;
1400 index = index_in;
1401 stride = integer_one_node;
1402 ctype = TREE_TYPE (base_in);
1403 stype = sizetype;
1404 }
1405
1406 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1407 ctype, stype, savings);
1408 return c;
1409 }
1410
1411 /* Given GS which is an add or subtract of scalar integers or pointers,
1412 make at least one appropriate entry in the candidate table. */
1413
1414 static void
1415 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1416 {
1417 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1418 slsr_cand_t c = NULL, c2;
1419
1420 if (TREE_CODE (rhs2) == SSA_NAME)
1421 {
1422 /* First record an interpretation assuming RHS1 is the base expression
1423 and RHS2 is the stride. But it doesn't make sense for the
1424 stride to be a pointer, so don't record a candidate in that case. */
1425 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1426 {
1427 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1428
1429 /* Add the first interpretation to the statement-candidate
1430 mapping. */
1431 add_cand_for_stmt (gs, c);
1432 }
1433
1434 /* If the two RHS operands are identical, or this is a subtract,
1435 we're done. */
1436 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1437 return;
1438
1439 /* Otherwise, record another interpretation assuming RHS2 is the
1440 base expression and RHS1 is the stride, again provided that the
1441 stride is not a pointer. */
1442 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1443 {
1444 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1445 if (c)
1446 c->next_interp = c2->cand_num;
1447 else
1448 add_cand_for_stmt (gs, c2);
1449 }
1450 }
1451 else
1452 {
1453 /* Record an interpretation for the add-immediate. */
1454 widest_int index = wi::to_widest (rhs2);
1455 if (subtract_p)
1456 index = -index;
1457
1458 c = create_add_imm_cand (gs, rhs1, index, speed);
1459
1460 /* Add the interpretation to the statement-candidate mapping. */
1461 add_cand_for_stmt (gs, c);
1462 }
1463 }
1464
1465 /* Given GS which is a negate of a scalar integer, make an appropriate
1466 entry in the candidate table. A negate is equivalent to a multiply
1467 by -1. */
1468
1469 static void
1470 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1471 {
1472 /* Record a CAND_MULT interpretation for the multiply by -1. */
1473 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1474
1475 /* Add the interpretation to the statement-candidate mapping. */
1476 add_cand_for_stmt (gs, c);
1477 }
1478
1479 /* Help function for legal_cast_p, operating on two trees. Checks
1480 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1481 for more details. */
1482
1483 static bool
1484 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1485 {
1486 unsigned lhs_size, rhs_size;
1487 bool lhs_wraps, rhs_wraps;
1488
1489 lhs_size = TYPE_PRECISION (lhs_type);
1490 rhs_size = TYPE_PRECISION (rhs_type);
1491 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1492 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1493
1494 if (lhs_size < rhs_size
1495 || (rhs_wraps && !lhs_wraps)
1496 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1497 return false;
1498
1499 return true;
1500 }
1501
1502 /* Return TRUE if GS is a statement that defines an SSA name from
1503 a conversion and is legal for us to combine with an add and multiply
1504 in the candidate table. For example, suppose we have:
1505
1506 A = B + i;
1507 C = (type) A;
1508 D = C * S;
1509
1510 Without the type-cast, we would create a CAND_MULT for D with base B,
1511 index i, and stride S. We want to record this candidate only if it
1512 is equivalent to apply the type cast following the multiply:
1513
1514 A = B + i;
1515 E = A * S;
1516 D = (type) E;
1517
1518 We will record the type with the candidate for D. This allows us
1519 to use a similar previous candidate as a basis. If we have earlier seen
1520
1521 A' = B + i';
1522 C' = (type) A';
1523 D' = C' * S;
1524
1525 we can replace D with
1526
1527 D = D' + (i - i') * S;
1528
1529 But if moving the type-cast would change semantics, we mustn't do this.
1530
1531 This is legitimate for casts from a non-wrapping integral type to
1532 any integral type of the same or larger size. It is not legitimate
1533 to convert a wrapping type to a non-wrapping type, or to a wrapping
1534 type of a different size. I.e., with a wrapping type, we must
1535 assume that the addition B + i could wrap, in which case performing
1536 the multiply before or after one of the "illegal" type casts will
1537 have different semantics. */
1538
1539 static bool
1540 legal_cast_p (gimple *gs, tree rhs)
1541 {
1542 if (!is_gimple_assign (gs)
1543 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1544 return false;
1545
1546 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1547 }
1548
1549 /* Given GS which is a cast to a scalar integer type, determine whether
1550 the cast is legal for strength reduction. If so, make at least one
1551 appropriate entry in the candidate table. */
1552
1553 static void
1554 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1555 {
1556 tree lhs, ctype;
1557 slsr_cand_t base_cand, c = NULL, c2;
1558 unsigned savings = 0;
1559
1560 if (!legal_cast_p (gs, rhs1))
1561 return;
1562
1563 lhs = gimple_assign_lhs (gs);
1564 base_cand = base_cand_from_table (rhs1);
1565 ctype = TREE_TYPE (lhs);
1566
1567 if (base_cand && base_cand->kind != CAND_PHI)
1568 {
1569 while (base_cand)
1570 {
1571 /* Propagate all data from the base candidate except the type,
1572 which comes from the cast, and the base candidate's cast,
1573 which is no longer applicable. */
1574 if (has_single_use (rhs1))
1575 savings = (base_cand->dead_savings
1576 + stmt_cost (base_cand->cand_stmt, speed));
1577
1578 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1579 base_cand->base_expr,
1580 base_cand->index, base_cand->stride,
1581 ctype, base_cand->stride_type,
1582 savings);
1583 if (base_cand->next_interp)
1584 base_cand = lookup_cand (base_cand->next_interp);
1585 else
1586 base_cand = NULL;
1587 }
1588 }
1589 else
1590 {
1591 /* If nothing is known about the RHS, create fresh CAND_ADD and
1592 CAND_MULT interpretations:
1593
1594 X = Y + (0 * 1)
1595 X = (Y + 0) * 1
1596
1597 The first of these is somewhat arbitrary, but the choice of
1598 1 for the stride simplifies the logic for propagating casts
1599 into their uses. */
1600 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1601 integer_one_node, ctype, sizetype, 0);
1602 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1603 integer_one_node, ctype, sizetype, 0);
1604 c->next_interp = c2->cand_num;
1605 }
1606
1607 /* Add the first (or only) interpretation to the statement-candidate
1608 mapping. */
1609 add_cand_for_stmt (gs, c);
1610 }
1611
1612 /* Given GS which is a copy of a scalar integer type, make at least one
1613 appropriate entry in the candidate table.
1614
1615 This interface is included for completeness, but is unnecessary
1616 if this pass immediately follows a pass that performs copy
1617 propagation, such as DOM. */
1618
1619 static void
1620 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1621 {
1622 slsr_cand_t base_cand, c = NULL, c2;
1623 unsigned savings = 0;
1624
1625 base_cand = base_cand_from_table (rhs1);
1626
1627 if (base_cand && base_cand->kind != CAND_PHI)
1628 {
1629 while (base_cand)
1630 {
1631 /* Propagate all data from the base candidate. */
1632 if (has_single_use (rhs1))
1633 savings = (base_cand->dead_savings
1634 + stmt_cost (base_cand->cand_stmt, speed));
1635
1636 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1637 base_cand->base_expr,
1638 base_cand->index, base_cand->stride,
1639 base_cand->cand_type,
1640 base_cand->stride_type, savings);
1641 if (base_cand->next_interp)
1642 base_cand = lookup_cand (base_cand->next_interp);
1643 else
1644 base_cand = NULL;
1645 }
1646 }
1647 else
1648 {
1649 /* If nothing is known about the RHS, create fresh CAND_ADD and
1650 CAND_MULT interpretations:
1651
1652 X = Y + (0 * 1)
1653 X = (Y + 0) * 1
1654
1655 The first of these is somewhat arbitrary, but the choice of
1656 1 for the stride simplifies the logic for propagating casts
1657 into their uses. */
1658 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1659 integer_one_node, TREE_TYPE (rhs1),
1660 sizetype, 0);
1661 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1662 integer_one_node, TREE_TYPE (rhs1),
1663 sizetype, 0);
1664 c->next_interp = c2->cand_num;
1665 }
1666
1667 /* Add the first (or only) interpretation to the statement-candidate
1668 mapping. */
1669 add_cand_for_stmt (gs, c);
1670 }
1671 \f
1672 class find_candidates_dom_walker : public dom_walker
1673 {
1674 public:
1675 find_candidates_dom_walker (cdi_direction direction)
1676 : dom_walker (direction) {}
1677 virtual edge before_dom_children (basic_block);
1678 };
1679
1680 /* Find strength-reduction candidates in block BB. */
1681
1682 edge
1683 find_candidates_dom_walker::before_dom_children (basic_block bb)
1684 {
1685 bool speed = optimize_bb_for_speed_p (bb);
1686
1687 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1688 gsi_next (&gsi))
1689 slsr_process_phi (gsi.phi (), speed);
1690
1691 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1692 gsi_next (&gsi))
1693 {
1694 gimple *gs = gsi_stmt (gsi);
1695
1696 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1697 slsr_process_ref (gs);
1698
1699 else if (is_gimple_assign (gs)
1700 && SCALAR_INT_MODE_P
1701 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1702 {
1703 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1704
1705 switch (gimple_assign_rhs_code (gs))
1706 {
1707 case MULT_EXPR:
1708 case PLUS_EXPR:
1709 rhs1 = gimple_assign_rhs1 (gs);
1710 rhs2 = gimple_assign_rhs2 (gs);
1711 /* Should never happen, but currently some buggy situations
1712 in earlier phases put constants in rhs1. */
1713 if (TREE_CODE (rhs1) != SSA_NAME)
1714 continue;
1715 break;
1716
1717 /* Possible future opportunity: rhs1 of a ptr+ can be
1718 an ADDR_EXPR. */
1719 case POINTER_PLUS_EXPR:
1720 case MINUS_EXPR:
1721 rhs2 = gimple_assign_rhs2 (gs);
1722 gcc_fallthrough ();
1723
1724 CASE_CONVERT:
1725 case SSA_NAME:
1726 case NEGATE_EXPR:
1727 rhs1 = gimple_assign_rhs1 (gs);
1728 if (TREE_CODE (rhs1) != SSA_NAME)
1729 continue;
1730 break;
1731
1732 default:
1733 ;
1734 }
1735
1736 switch (gimple_assign_rhs_code (gs))
1737 {
1738 case MULT_EXPR:
1739 slsr_process_mul (gs, rhs1, rhs2, speed);
1740 break;
1741
1742 case PLUS_EXPR:
1743 case POINTER_PLUS_EXPR:
1744 case MINUS_EXPR:
1745 slsr_process_add (gs, rhs1, rhs2, speed);
1746 break;
1747
1748 case NEGATE_EXPR:
1749 slsr_process_neg (gs, rhs1, speed);
1750 break;
1751
1752 CASE_CONVERT:
1753 slsr_process_cast (gs, rhs1, speed);
1754 break;
1755
1756 case SSA_NAME:
1757 slsr_process_copy (gs, rhs1, speed);
1758 break;
1759
1760 default:
1761 ;
1762 }
1763 }
1764 }
1765 return NULL;
1766 }
1767 \f
1768 /* Dump a candidate for debug. */
1769
1770 static void
1771 dump_candidate (slsr_cand_t c)
1772 {
1773 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1774 gimple_bb (c->cand_stmt)->index);
1775 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1776 switch (c->kind)
1777 {
1778 case CAND_MULT:
1779 fputs (" MULT : (", dump_file);
1780 print_generic_expr (dump_file, c->base_expr, 0);
1781 fputs (" + ", dump_file);
1782 print_decs (c->index, dump_file);
1783 fputs (") * ", dump_file);
1784 if (TREE_CODE (c->stride) != INTEGER_CST
1785 && c->stride_type != TREE_TYPE (c->stride))
1786 {
1787 fputs ("(", dump_file);
1788 print_generic_expr (dump_file, c->stride_type, 0);
1789 fputs (")", dump_file);
1790 }
1791 print_generic_expr (dump_file, c->stride, 0);
1792 fputs (" : ", dump_file);
1793 break;
1794 case CAND_ADD:
1795 fputs (" ADD : ", dump_file);
1796 print_generic_expr (dump_file, c->base_expr, 0);
1797 fputs (" + (", dump_file);
1798 print_decs (c->index, dump_file);
1799 fputs (" * ", dump_file);
1800 if (TREE_CODE (c->stride) != INTEGER_CST
1801 && c->stride_type != TREE_TYPE (c->stride))
1802 {
1803 fputs ("(", dump_file);
1804 print_generic_expr (dump_file, c->stride_type, 0);
1805 fputs (")", dump_file);
1806 }
1807 print_generic_expr (dump_file, c->stride, 0);
1808 fputs (") : ", dump_file);
1809 break;
1810 case CAND_REF:
1811 fputs (" REF : ", dump_file);
1812 print_generic_expr (dump_file, c->base_expr, 0);
1813 fputs (" + (", dump_file);
1814 print_generic_expr (dump_file, c->stride, 0);
1815 fputs (") + ", dump_file);
1816 print_decs (c->index, dump_file);
1817 fputs (" : ", dump_file);
1818 break;
1819 case CAND_PHI:
1820 fputs (" PHI : ", dump_file);
1821 print_generic_expr (dump_file, c->base_expr, 0);
1822 fputs (" + (unknown * ", dump_file);
1823 print_generic_expr (dump_file, c->stride, 0);
1824 fputs (") : ", dump_file);
1825 break;
1826 default:
1827 gcc_unreachable ();
1828 }
1829 print_generic_expr (dump_file, c->cand_type, 0);
1830 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1831 c->basis, c->dependent, c->sibling);
1832 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1833 c->next_interp, c->dead_savings);
1834 if (c->def_phi)
1835 fprintf (dump_file, " phi: %d\n", c->def_phi);
1836 fputs ("\n", dump_file);
1837 }
1838
1839 /* Dump the candidate vector for debug. */
1840
1841 static void
1842 dump_cand_vec (void)
1843 {
1844 unsigned i;
1845 slsr_cand_t c;
1846
1847 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1848
1849 FOR_EACH_VEC_ELT (cand_vec, i, c)
1850 dump_candidate (c);
1851 }
1852
1853 /* Callback used to dump the candidate chains hash table. */
1854
1855 int
1856 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1857 {
1858 const_cand_chain_t chain = *slot;
1859 cand_chain_t p;
1860
1861 print_generic_expr (dump_file, chain->base_expr, 0);
1862 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1863
1864 for (p = chain->next; p; p = p->next)
1865 fprintf (dump_file, " -> %d", p->cand->cand_num);
1866
1867 fputs ("\n", dump_file);
1868 return 1;
1869 }
1870
1871 /* Dump the candidate chains. */
1872
1873 static void
1874 dump_cand_chains (void)
1875 {
1876 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1877 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1878 (NULL);
1879 fputs ("\n", dump_file);
1880 }
1881
1882 /* Dump the increment vector for debug. */
1883
1884 static void
1885 dump_incr_vec (void)
1886 {
1887 if (dump_file && (dump_flags & TDF_DETAILS))
1888 {
1889 unsigned i;
1890
1891 fprintf (dump_file, "\nIncrement vector:\n\n");
1892
1893 for (i = 0; i < incr_vec_len; i++)
1894 {
1895 fprintf (dump_file, "%3d increment: ", i);
1896 print_decs (incr_vec[i].incr, dump_file);
1897 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1898 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1899 fputs ("\n initializer: ", dump_file);
1900 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1901 fputs ("\n\n", dump_file);
1902 }
1903 }
1904 }
1905 \f
1906 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1907 data reference. */
1908
1909 static void
1910 replace_ref (tree *expr, slsr_cand_t c)
1911 {
1912 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1913 unsigned HOST_WIDE_INT misalign;
1914 unsigned align;
1915
1916 /* Ensure the memory reference carries the minimum alignment
1917 requirement for the data type. See PR58041. */
1918 get_object_alignment_1 (*expr, &align, &misalign);
1919 if (misalign != 0)
1920 align = least_bit_hwi (misalign);
1921 if (align < TYPE_ALIGN (acc_type))
1922 acc_type = build_aligned_type (acc_type, align);
1923
1924 add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
1925 c->base_expr, c->stride);
1926 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1927 wide_int_to_tree (c->cand_type, c->index));
1928
1929 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1930 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1931 TREE_OPERAND (mem_ref, 0)
1932 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1933 /*simple_p=*/true, NULL,
1934 /*before=*/true, GSI_SAME_STMT);
1935 copy_ref_info (mem_ref, *expr);
1936 *expr = mem_ref;
1937 update_stmt (c->cand_stmt);
1938 }
1939
1940 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1941 dependent of candidate C with an equivalent strength-reduced data
1942 reference. */
1943
1944 static void
1945 replace_refs (slsr_cand_t c)
1946 {
1947 if (dump_file && (dump_flags & TDF_DETAILS))
1948 {
1949 fputs ("Replacing reference: ", dump_file);
1950 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1951 }
1952
1953 if (gimple_vdef (c->cand_stmt))
1954 {
1955 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1956 replace_ref (lhs, c);
1957 }
1958 else
1959 {
1960 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1961 replace_ref (rhs, c);
1962 }
1963
1964 if (dump_file && (dump_flags & TDF_DETAILS))
1965 {
1966 fputs ("With: ", dump_file);
1967 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1968 fputs ("\n", dump_file);
1969 }
1970
1971 if (c->sibling)
1972 replace_refs (lookup_cand (c->sibling));
1973
1974 if (c->dependent)
1975 replace_refs (lookup_cand (c->dependent));
1976 }
1977
1978 /* Return TRUE if candidate C is dependent upon a PHI. */
1979
1980 static bool
1981 phi_dependent_cand_p (slsr_cand_t c)
1982 {
1983 /* A candidate is not necessarily dependent upon a PHI just because
1984 it has a phi definition for its base name. It may have a basis
1985 that relies upon the same phi definition, in which case the PHI
1986 is irrelevant to this candidate. */
1987 return (c->def_phi
1988 && c->basis
1989 && lookup_cand (c->basis)->def_phi != c->def_phi);
1990 }
1991
1992 /* Calculate the increment required for candidate C relative to
1993 its basis. */
1994
1995 static widest_int
1996 cand_increment (slsr_cand_t c)
1997 {
1998 slsr_cand_t basis;
1999
2000 /* If the candidate doesn't have a basis, just return its own
2001 index. This is useful in record_increments to help us find
2002 an existing initializer. Also, if the candidate's basis is
2003 hidden by a phi, then its own index will be the increment
2004 from the newly introduced phi basis. */
2005 if (!c->basis || phi_dependent_cand_p (c))
2006 return c->index;
2007
2008 basis = lookup_cand (c->basis);
2009 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2010 return c->index - basis->index;
2011 }
2012
2013 /* Calculate the increment required for candidate C relative to
2014 its basis. If we aren't going to generate pointer arithmetic
2015 for this candidate, return the absolute value of that increment
2016 instead. */
2017
2018 static inline widest_int
2019 cand_abs_increment (slsr_cand_t c)
2020 {
2021 widest_int increment = cand_increment (c);
2022
2023 if (!address_arithmetic_p && wi::neg_p (increment))
2024 increment = -increment;
2025
2026 return increment;
2027 }
2028
2029 /* Return TRUE iff candidate C has already been replaced under
2030 another interpretation. */
2031
2032 static inline bool
2033 cand_already_replaced (slsr_cand_t c)
2034 {
2035 return (gimple_bb (c->cand_stmt) == 0);
2036 }
2037
2038 /* Common logic used by replace_unconditional_candidate and
2039 replace_conditional_candidate. */
2040
2041 static void
2042 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2043 {
2044 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2045 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2046
2047 /* It is highly unlikely, but possible, that the resulting
2048 bump doesn't fit in a HWI. Abandon the replacement
2049 in this case. This does not affect siblings or dependents
2050 of C. Restriction to signed HWI is conservative for unsigned
2051 types but allows for safe negation without twisted logic. */
2052 if (wi::fits_shwi_p (bump)
2053 && bump.to_shwi () != HOST_WIDE_INT_MIN
2054 /* It is not useful to replace casts, copies, or adds of
2055 an SSA name and a constant. */
2056 && cand_code != SSA_NAME
2057 && !CONVERT_EXPR_CODE_P (cand_code)
2058 && cand_code != PLUS_EXPR
2059 && cand_code != POINTER_PLUS_EXPR
2060 && cand_code != MINUS_EXPR)
2061 {
2062 enum tree_code code = PLUS_EXPR;
2063 tree bump_tree;
2064 gimple *stmt_to_print = NULL;
2065
2066 /* If the basis name and the candidate's LHS have incompatible
2067 types, introduce a cast. */
2068 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2069 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2070 if (wi::neg_p (bump))
2071 {
2072 code = MINUS_EXPR;
2073 bump = -bump;
2074 }
2075
2076 bump_tree = wide_int_to_tree (target_type, bump);
2077
2078 if (dump_file && (dump_flags & TDF_DETAILS))
2079 {
2080 fputs ("Replacing: ", dump_file);
2081 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2082 }
2083
2084 if (bump == 0)
2085 {
2086 tree lhs = gimple_assign_lhs (c->cand_stmt);
2087 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2088 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2089 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2090 gsi_replace (&gsi, copy_stmt, false);
2091 c->cand_stmt = copy_stmt;
2092 if (dump_file && (dump_flags & TDF_DETAILS))
2093 stmt_to_print = copy_stmt;
2094 }
2095 else
2096 {
2097 tree rhs1, rhs2;
2098 if (cand_code != NEGATE_EXPR) {
2099 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2100 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2101 }
2102 if (cand_code != NEGATE_EXPR
2103 && ((operand_equal_p (rhs1, basis_name, 0)
2104 && operand_equal_p (rhs2, bump_tree, 0))
2105 || (operand_equal_p (rhs1, bump_tree, 0)
2106 && operand_equal_p (rhs2, basis_name, 0))))
2107 {
2108 if (dump_file && (dump_flags & TDF_DETAILS))
2109 {
2110 fputs ("(duplicate, not actually replacing)", dump_file);
2111 stmt_to_print = c->cand_stmt;
2112 }
2113 }
2114 else
2115 {
2116 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2117 gimple_assign_set_rhs_with_ops (&gsi, code,
2118 basis_name, bump_tree);
2119 update_stmt (gsi_stmt (gsi));
2120 c->cand_stmt = gsi_stmt (gsi);
2121 if (dump_file && (dump_flags & TDF_DETAILS))
2122 stmt_to_print = gsi_stmt (gsi);
2123 }
2124 }
2125
2126 if (dump_file && (dump_flags & TDF_DETAILS))
2127 {
2128 fputs ("With: ", dump_file);
2129 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2130 fputs ("\n", dump_file);
2131 }
2132 }
2133 }
2134
2135 /* Replace candidate C with an add or subtract. Note that we only
2136 operate on CAND_MULTs with known strides, so we will never generate
2137 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2138 X = Y + ((i - i') * S), as described in the module commentary. The
2139 folded value ((i - i') * S) is referred to here as the "bump." */
2140
2141 static void
2142 replace_unconditional_candidate (slsr_cand_t c)
2143 {
2144 slsr_cand_t basis;
2145
2146 if (cand_already_replaced (c))
2147 return;
2148
2149 basis = lookup_cand (c->basis);
2150 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2151
2152 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2153 }
2154 \f
2155 /* Return the index in the increment vector of the given INCREMENT,
2156 or -1 if not found. The latter can occur if more than
2157 MAX_INCR_VEC_LEN increments have been found. */
2158
2159 static inline int
2160 incr_vec_index (const widest_int &increment)
2161 {
2162 unsigned i;
2163
2164 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2165 ;
2166
2167 if (i < incr_vec_len)
2168 return i;
2169 else
2170 return -1;
2171 }
2172
2173 /* Create a new statement along edge E to add BASIS_NAME to the product
2174 of INCREMENT and the stride of candidate C. Create and return a new
2175 SSA name from *VAR to be used as the LHS of the new statement.
2176 KNOWN_STRIDE is true iff C's stride is a constant. */
2177
2178 static tree
2179 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2180 widest_int increment, edge e, location_t loc,
2181 bool known_stride)
2182 {
2183 basic_block insert_bb;
2184 gimple_stmt_iterator gsi;
2185 tree lhs, basis_type;
2186 gassign *new_stmt, *cast_stmt = NULL;
2187
2188 /* If the add candidate along this incoming edge has the same
2189 index as C's hidden basis, the hidden basis represents this
2190 edge correctly. */
2191 if (increment == 0)
2192 return basis_name;
2193
2194 basis_type = TREE_TYPE (basis_name);
2195 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2196
2197 /* Occasionally people convert integers to pointers without a
2198 cast, leading us into trouble if we aren't careful. */
2199 enum tree_code plus_code
2200 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2201
2202 if (known_stride)
2203 {
2204 tree bump_tree;
2205 enum tree_code code = plus_code;
2206 widest_int bump = increment * wi::to_widest (c->stride);
2207 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2208 {
2209 code = MINUS_EXPR;
2210 bump = -bump;
2211 }
2212
2213 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2214 bump_tree = wide_int_to_tree (stride_type, bump);
2215 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2216 }
2217 else
2218 {
2219 int i;
2220 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2221 i = incr_vec_index (negate_incr ? -increment : increment);
2222 gcc_assert (i >= 0);
2223
2224 if (incr_vec[i].initializer)
2225 {
2226 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2227 new_stmt = gimple_build_assign (lhs, code, basis_name,
2228 incr_vec[i].initializer);
2229 }
2230 else {
2231 tree stride;
2232
2233 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2234 {
2235 tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2236 "slsr");
2237 cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2238 c->stride);
2239 stride = cast_stride;
2240 }
2241 else
2242 stride = c->stride;
2243
2244 if (increment == 1)
2245 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2246 else if (increment == -1)
2247 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2248 else
2249 gcc_unreachable ();
2250 }
2251 }
2252
2253 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2254 gsi = gsi_last_bb (insert_bb);
2255
2256 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
2257 {
2258 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2259 if (cast_stmt)
2260 {
2261 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
2262 gimple_set_location (cast_stmt, loc);
2263 }
2264 }
2265 else
2266 {
2267 if (cast_stmt)
2268 {
2269 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
2270 gimple_set_location (cast_stmt, loc);
2271 }
2272 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2273 }
2274
2275 gimple_set_location (new_stmt, loc);
2276
2277 if (dump_file && (dump_flags & TDF_DETAILS))
2278 {
2279 if (cast_stmt)
2280 {
2281 fprintf (dump_file, "Inserting cast in block %d: ",
2282 insert_bb->index);
2283 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
2284 }
2285 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2286 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2287 }
2288
2289 return lhs;
2290 }
2291
2292 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2293 is hidden by the phi node FROM_PHI, create a new phi node in the same
2294 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2295 with its phi arguments representing conditional adjustments to the
2296 hidden basis along conditional incoming paths. Those adjustments are
2297 made by creating add statements (and sometimes recursively creating
2298 phis) along those incoming paths. LOC is the location to attach to
2299 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2300 constant. */
2301
2302 static tree
2303 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2304 location_t loc, bool known_stride)
2305 {
2306 int i;
2307 tree name, phi_arg;
2308 gphi *phi;
2309 slsr_cand_t basis = lookup_cand (c->basis);
2310 int nargs = gimple_phi_num_args (from_phi);
2311 basic_block phi_bb = gimple_bb (from_phi);
2312 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2313 auto_vec<tree> phi_args (nargs);
2314
2315 /* Process each argument of the existing phi that represents
2316 conditionally-executed add candidates. */
2317 for (i = 0; i < nargs; i++)
2318 {
2319 edge e = (*phi_bb->preds)[i];
2320 tree arg = gimple_phi_arg_def (from_phi, i);
2321 tree feeding_def;
2322
2323 /* If the phi argument is the base name of the CAND_PHI, then
2324 this incoming arc should use the hidden basis. */
2325 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2326 if (basis->index == 0)
2327 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2328 else
2329 {
2330 widest_int incr = -basis->index;
2331 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2332 e, loc, known_stride);
2333 }
2334 else
2335 {
2336 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2337
2338 /* If there is another phi along this incoming edge, we must
2339 process it in the same fashion to ensure that all basis
2340 adjustments are made along its incoming edges. */
2341 if (gimple_code (arg_def) == GIMPLE_PHI)
2342 feeding_def = create_phi_basis (c, arg_def, basis_name,
2343 loc, known_stride);
2344 else
2345 {
2346 slsr_cand_t arg_cand = base_cand_from_table (arg);
2347 widest_int diff = arg_cand->index - basis->index;
2348 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2349 e, loc, known_stride);
2350 }
2351 }
2352
2353 /* Because of recursion, we need to save the arguments in a vector
2354 so we can create the PHI statement all at once. Otherwise the
2355 storage for the half-created PHI can be reclaimed. */
2356 phi_args.safe_push (feeding_def);
2357 }
2358
2359 /* Create the new phi basis. */
2360 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2361 phi = create_phi_node (name, phi_bb);
2362 SSA_NAME_DEF_STMT (name) = phi;
2363
2364 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2365 {
2366 edge e = (*phi_bb->preds)[i];
2367 add_phi_arg (phi, phi_arg, e, loc);
2368 }
2369
2370 update_stmt (phi);
2371
2372 if (dump_file && (dump_flags & TDF_DETAILS))
2373 {
2374 fputs ("Introducing new phi basis: ", dump_file);
2375 print_gimple_stmt (dump_file, phi, 0, 0);
2376 }
2377
2378 return name;
2379 }
2380
2381 /* Given a candidate C whose basis is hidden by at least one intervening
2382 phi, introduce a matching number of new phis to represent its basis
2383 adjusted by conditional increments along possible incoming paths. Then
2384 replace C as though it were an unconditional candidate, using the new
2385 basis. */
2386
2387 static void
2388 replace_conditional_candidate (slsr_cand_t c)
2389 {
2390 tree basis_name, name;
2391 slsr_cand_t basis;
2392 location_t loc;
2393
2394 /* Look up the LHS SSA name from C's basis. This will be the
2395 RHS1 of the adds we will introduce to create new phi arguments. */
2396 basis = lookup_cand (c->basis);
2397 basis_name = gimple_assign_lhs (basis->cand_stmt);
2398
2399 /* Create a new phi statement which will represent C's true basis
2400 after the transformation is complete. */
2401 loc = gimple_location (c->cand_stmt);
2402 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2403 basis_name, loc, KNOWN_STRIDE);
2404 /* Replace C with an add of the new basis phi and a constant. */
2405 widest_int bump = c->index * wi::to_widest (c->stride);
2406
2407 replace_mult_candidate (c, name, bump);
2408 }
2409
2410 /* Compute the expected costs of inserting basis adjustments for
2411 candidate C with phi-definition PHI. The cost of inserting
2412 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2413 which are themselves phi results, recursively calculate costs
2414 for those phis as well. */
2415
2416 static int
2417 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2418 {
2419 unsigned i;
2420 int cost = 0;
2421 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2422
2423 /* If we work our way back to a phi that isn't dominated by the hidden
2424 basis, this isn't a candidate for replacement. Indicate this by
2425 returning an unreasonably high cost. It's not easy to detect
2426 these situations when determining the basis, so we defer the
2427 decision until now. */
2428 basic_block phi_bb = gimple_bb (phi);
2429 slsr_cand_t basis = lookup_cand (c->basis);
2430 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2431
2432 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2433 return COST_INFINITE;
2434
2435 for (i = 0; i < gimple_phi_num_args (phi); i++)
2436 {
2437 tree arg = gimple_phi_arg_def (phi, i);
2438
2439 if (arg != phi_cand->base_expr)
2440 {
2441 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2442
2443 if (gimple_code (arg_def) == GIMPLE_PHI)
2444 cost += phi_add_costs (arg_def, c, one_add_cost);
2445 else
2446 {
2447 slsr_cand_t arg_cand = base_cand_from_table (arg);
2448
2449 if (arg_cand->index != c->index)
2450 cost += one_add_cost;
2451 }
2452 }
2453 }
2454
2455 return cost;
2456 }
2457
2458 /* For candidate C, each sibling of candidate C, and each dependent of
2459 candidate C, determine whether the candidate is dependent upon a
2460 phi that hides its basis. If not, replace the candidate unconditionally.
2461 Otherwise, determine whether the cost of introducing compensation code
2462 for the candidate is offset by the gains from strength reduction. If
2463 so, replace the candidate and introduce the compensation code. */
2464
2465 static void
2466 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2467 {
2468 if (phi_dependent_cand_p (c))
2469 {
2470 if (c->kind == CAND_MULT)
2471 {
2472 /* A candidate dependent upon a phi will replace a multiply by
2473 a constant with an add, and will insert at most one add for
2474 each phi argument. Add these costs with the potential dead-code
2475 savings to determine profitability. */
2476 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2477 int mult_savings = stmt_cost (c->cand_stmt, speed);
2478 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2479 tree phi_result = gimple_phi_result (phi);
2480 int one_add_cost = add_cost (speed,
2481 TYPE_MODE (TREE_TYPE (phi_result)));
2482 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2483 int cost = add_costs - mult_savings - c->dead_savings;
2484
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2486 {
2487 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2488 fprintf (dump_file, " add_costs = %d\n", add_costs);
2489 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2490 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2491 fprintf (dump_file, " cost = %d\n", cost);
2492 if (cost <= COST_NEUTRAL)
2493 fputs (" Replacing...\n", dump_file);
2494 else
2495 fputs (" Not replaced.\n", dump_file);
2496 }
2497
2498 if (cost <= COST_NEUTRAL)
2499 replace_conditional_candidate (c);
2500 }
2501 }
2502 else
2503 replace_unconditional_candidate (c);
2504
2505 if (c->sibling)
2506 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2507
2508 if (c->dependent)
2509 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2510 }
2511 \f
2512 /* Count the number of candidates in the tree rooted at C that have
2513 not already been replaced under other interpretations. */
2514
2515 static int
2516 count_candidates (slsr_cand_t c)
2517 {
2518 unsigned count = cand_already_replaced (c) ? 0 : 1;
2519
2520 if (c->sibling)
2521 count += count_candidates (lookup_cand (c->sibling));
2522
2523 if (c->dependent)
2524 count += count_candidates (lookup_cand (c->dependent));
2525
2526 return count;
2527 }
2528
2529 /* Increase the count of INCREMENT by one in the increment vector.
2530 INCREMENT is associated with candidate C. If INCREMENT is to be
2531 conditionally executed as part of a conditional candidate replacement,
2532 IS_PHI_ADJUST is true, otherwise false. If an initializer
2533 T_0 = stride * I is provided by a candidate that dominates all
2534 candidates with the same increment, also record T_0 for subsequent use. */
2535
2536 static void
2537 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2538 {
2539 bool found = false;
2540 unsigned i;
2541
2542 /* Treat increments that differ only in sign as identical so as to
2543 share initializers, unless we are generating pointer arithmetic. */
2544 if (!address_arithmetic_p && wi::neg_p (increment))
2545 increment = -increment;
2546
2547 for (i = 0; i < incr_vec_len; i++)
2548 {
2549 if (incr_vec[i].incr == increment)
2550 {
2551 incr_vec[i].count++;
2552 found = true;
2553
2554 /* If we previously recorded an initializer that doesn't
2555 dominate this candidate, it's not going to be useful to
2556 us after all. */
2557 if (incr_vec[i].initializer
2558 && !dominated_by_p (CDI_DOMINATORS,
2559 gimple_bb (c->cand_stmt),
2560 incr_vec[i].init_bb))
2561 {
2562 incr_vec[i].initializer = NULL_TREE;
2563 incr_vec[i].init_bb = NULL;
2564 }
2565
2566 break;
2567 }
2568 }
2569
2570 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2571 {
2572 /* The first time we see an increment, create the entry for it.
2573 If this is the root candidate which doesn't have a basis, set
2574 the count to zero. We're only processing it so it can possibly
2575 provide an initializer for other candidates. */
2576 incr_vec[incr_vec_len].incr = increment;
2577 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2578 incr_vec[incr_vec_len].cost = COST_INFINITE;
2579
2580 /* Optimistically record the first occurrence of this increment
2581 as providing an initializer (if it does); we will revise this
2582 opinion later if it doesn't dominate all other occurrences.
2583 Exception: increments of 0, 1 never need initializers;
2584 and phi adjustments don't ever provide initializers. */
2585 if (c->kind == CAND_ADD
2586 && !is_phi_adjust
2587 && c->index == increment
2588 && (increment > 1 || increment < 0)
2589 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2590 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2591 {
2592 tree t0 = NULL_TREE;
2593 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2594 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2595 if (operand_equal_p (rhs1, c->base_expr, 0))
2596 t0 = rhs2;
2597 else if (operand_equal_p (rhs2, c->base_expr, 0))
2598 t0 = rhs1;
2599 if (t0
2600 && SSA_NAME_DEF_STMT (t0)
2601 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2602 {
2603 incr_vec[incr_vec_len].initializer = t0;
2604 incr_vec[incr_vec_len++].init_bb
2605 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2606 }
2607 else
2608 {
2609 incr_vec[incr_vec_len].initializer = NULL_TREE;
2610 incr_vec[incr_vec_len++].init_bb = NULL;
2611 }
2612 }
2613 else
2614 {
2615 incr_vec[incr_vec_len].initializer = NULL_TREE;
2616 incr_vec[incr_vec_len++].init_bb = NULL;
2617 }
2618 }
2619 }
2620
2621 /* Given phi statement PHI that hides a candidate from its BASIS, find
2622 the increments along each incoming arc (recursively handling additional
2623 phis that may be present) and record them. These increments are the
2624 difference in index between the index-adjusting statements and the
2625 index of the basis. */
2626
2627 static void
2628 record_phi_increments (slsr_cand_t basis, gimple *phi)
2629 {
2630 unsigned i;
2631 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2632
2633 for (i = 0; i < gimple_phi_num_args (phi); i++)
2634 {
2635 tree arg = gimple_phi_arg_def (phi, i);
2636
2637 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2638 {
2639 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2640
2641 if (gimple_code (arg_def) == GIMPLE_PHI)
2642 record_phi_increments (basis, arg_def);
2643 else
2644 {
2645 slsr_cand_t arg_cand = base_cand_from_table (arg);
2646 widest_int diff = arg_cand->index - basis->index;
2647 record_increment (arg_cand, diff, PHI_ADJUST);
2648 }
2649 }
2650 }
2651 }
2652
2653 /* Determine how many times each unique increment occurs in the set
2654 of candidates rooted at C's parent, recording the data in the
2655 increment vector. For each unique increment I, if an initializer
2656 T_0 = stride * I is provided by a candidate that dominates all
2657 candidates with the same increment, also record T_0 for subsequent
2658 use. */
2659
2660 static void
2661 record_increments (slsr_cand_t c)
2662 {
2663 if (!cand_already_replaced (c))
2664 {
2665 if (!phi_dependent_cand_p (c))
2666 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2667 else
2668 {
2669 /* A candidate with a basis hidden by a phi will have one
2670 increment for its relationship to the index represented by
2671 the phi, and potentially additional increments along each
2672 incoming edge. For the root of the dependency tree (which
2673 has no basis), process just the initial index in case it has
2674 an initializer that can be used by subsequent candidates. */
2675 record_increment (c, c->index, NOT_PHI_ADJUST);
2676
2677 if (c->basis)
2678 record_phi_increments (lookup_cand (c->basis),
2679 lookup_cand (c->def_phi)->cand_stmt);
2680 }
2681 }
2682
2683 if (c->sibling)
2684 record_increments (lookup_cand (c->sibling));
2685
2686 if (c->dependent)
2687 record_increments (lookup_cand (c->dependent));
2688 }
2689
2690 /* Add up and return the costs of introducing add statements that
2691 require the increment INCR on behalf of candidate C and phi
2692 statement PHI. Accumulate into *SAVINGS the potential savings
2693 from removing existing statements that feed PHI and have no other
2694 uses. */
2695
2696 static int
2697 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2698 int *savings)
2699 {
2700 unsigned i;
2701 int cost = 0;
2702 slsr_cand_t basis = lookup_cand (c->basis);
2703 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2704
2705 for (i = 0; i < gimple_phi_num_args (phi); i++)
2706 {
2707 tree arg = gimple_phi_arg_def (phi, i);
2708
2709 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2710 {
2711 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2712
2713 if (gimple_code (arg_def) == GIMPLE_PHI)
2714 {
2715 int feeding_savings = 0;
2716 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2717 if (has_single_use (gimple_phi_result (arg_def)))
2718 *savings += feeding_savings;
2719 }
2720 else
2721 {
2722 slsr_cand_t arg_cand = base_cand_from_table (arg);
2723 widest_int diff = arg_cand->index - basis->index;
2724
2725 if (incr == diff)
2726 {
2727 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2728 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2729 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2730 if (has_single_use (lhs))
2731 *savings += stmt_cost (arg_cand->cand_stmt, true);
2732 }
2733 }
2734 }
2735 }
2736
2737 return cost;
2738 }
2739
2740 /* Return the first candidate in the tree rooted at C that has not
2741 already been replaced, favoring siblings over dependents. */
2742
2743 static slsr_cand_t
2744 unreplaced_cand_in_tree (slsr_cand_t c)
2745 {
2746 if (!cand_already_replaced (c))
2747 return c;
2748
2749 if (c->sibling)
2750 {
2751 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2752 if (sib)
2753 return sib;
2754 }
2755
2756 if (c->dependent)
2757 {
2758 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2759 if (dep)
2760 return dep;
2761 }
2762
2763 return NULL;
2764 }
2765
2766 /* Return TRUE if the candidates in the tree rooted at C should be
2767 optimized for speed, else FALSE. We estimate this based on the block
2768 containing the most dominant candidate in the tree that has not yet
2769 been replaced. */
2770
2771 static bool
2772 optimize_cands_for_speed_p (slsr_cand_t c)
2773 {
2774 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2775 gcc_assert (c2);
2776 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2777 }
2778
2779 /* Add COST_IN to the lowest cost of any dependent path starting at
2780 candidate C or any of its siblings, counting only candidates along
2781 such paths with increment INCR. Assume that replacing a candidate
2782 reduces cost by REPL_SAVINGS. Also account for savings from any
2783 statements that would go dead. If COUNT_PHIS is true, include
2784 costs of introducing feeding statements for conditional candidates. */
2785
2786 static int
2787 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2788 const widest_int &incr, bool count_phis)
2789 {
2790 int local_cost, sib_cost, savings = 0;
2791 widest_int cand_incr = cand_abs_increment (c);
2792
2793 if (cand_already_replaced (c))
2794 local_cost = cost_in;
2795 else if (incr == cand_incr)
2796 local_cost = cost_in - repl_savings - c->dead_savings;
2797 else
2798 local_cost = cost_in - c->dead_savings;
2799
2800 if (count_phis
2801 && phi_dependent_cand_p (c)
2802 && !cand_already_replaced (c))
2803 {
2804 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2805 local_cost += phi_incr_cost (c, incr, phi, &savings);
2806
2807 if (has_single_use (gimple_phi_result (phi)))
2808 local_cost -= savings;
2809 }
2810
2811 if (c->dependent)
2812 local_cost = lowest_cost_path (local_cost, repl_savings,
2813 lookup_cand (c->dependent), incr,
2814 count_phis);
2815
2816 if (c->sibling)
2817 {
2818 sib_cost = lowest_cost_path (cost_in, repl_savings,
2819 lookup_cand (c->sibling), incr,
2820 count_phis);
2821 local_cost = MIN (local_cost, sib_cost);
2822 }
2823
2824 return local_cost;
2825 }
2826
2827 /* Compute the total savings that would accrue from all replacements
2828 in the candidate tree rooted at C, counting only candidates with
2829 increment INCR. Assume that replacing a candidate reduces cost
2830 by REPL_SAVINGS. Also account for savings from statements that
2831 would go dead. */
2832
2833 static int
2834 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2835 bool count_phis)
2836 {
2837 int savings = 0;
2838 widest_int cand_incr = cand_abs_increment (c);
2839
2840 if (incr == cand_incr && !cand_already_replaced (c))
2841 savings += repl_savings + c->dead_savings;
2842
2843 if (count_phis
2844 && phi_dependent_cand_p (c)
2845 && !cand_already_replaced (c))
2846 {
2847 int phi_savings = 0;
2848 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2849 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2850
2851 if (has_single_use (gimple_phi_result (phi)))
2852 savings += phi_savings;
2853 }
2854
2855 if (c->dependent)
2856 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2857 count_phis);
2858
2859 if (c->sibling)
2860 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2861 count_phis);
2862
2863 return savings;
2864 }
2865
2866 /* Use target-specific costs to determine and record which increments
2867 in the current candidate tree are profitable to replace, assuming
2868 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2869 the candidate tree.
2870
2871 One slight limitation here is that we don't account for the possible
2872 introduction of casts in some cases. See replace_one_candidate for
2873 the cases where these are introduced. This should probably be cleaned
2874 up sometime. */
2875
2876 static void
2877 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2878 {
2879 unsigned i;
2880
2881 for (i = 0; i < incr_vec_len; i++)
2882 {
2883 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2884
2885 /* If somehow this increment is bigger than a HWI, we won't
2886 be optimizing candidates that use it. And if the increment
2887 has a count of zero, nothing will be done with it. */
2888 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2889 incr_vec[i].cost = COST_INFINITE;
2890
2891 /* Increments of 0, 1, and -1 are always profitable to replace,
2892 because they always replace a multiply or add with an add or
2893 copy, and may cause one or more existing instructions to go
2894 dead. Exception: -1 can't be assumed to be profitable for
2895 pointer addition. */
2896 else if (incr == 0
2897 || incr == 1
2898 || (incr == -1
2899 && !POINTER_TYPE_P (first_dep->cand_type)))
2900 incr_vec[i].cost = COST_NEUTRAL;
2901
2902 /* If we need to add an initializer, give up if a cast from the
2903 candidate's type to its stride's type can lose precision.
2904 Note that this already takes into account that the stride may
2905 have been cast to a wider type, in which case this test won't
2906 fire. Example:
2907
2908 short int _1;
2909 _2 = (int) _1;
2910 _3 = _2 * 10;
2911 _4 = x + _3; ADD: x + (10 * (int)_1) : int
2912 _5 = _2 * 15;
2913 _6 = x + _5; ADD: x + (15 * (int)_1) : int
2914
2915 Although the stride was a short int initially, the stride
2916 used in the analysis has been widened to an int, and such
2917 widening will be done in the initializer as well. */
2918 else if (!incr_vec[i].initializer
2919 && TREE_CODE (first_dep->stride) != INTEGER_CST
2920 && !legal_cast_p_1 (first_dep->stride_type,
2921 TREE_TYPE (gimple_assign_lhs
2922 (first_dep->cand_stmt))))
2923 incr_vec[i].cost = COST_INFINITE;
2924
2925 /* If we need to add an initializer, make sure we don't introduce
2926 a multiply by a pointer type, which can happen in certain cast
2927 scenarios. */
2928 else if (!incr_vec[i].initializer
2929 && TREE_CODE (first_dep->stride) != INTEGER_CST
2930 && POINTER_TYPE_P (first_dep->stride_type))
2931 incr_vec[i].cost = COST_INFINITE;
2932
2933 /* For any other increment, if this is a multiply candidate, we
2934 must introduce a temporary T and initialize it with
2935 T_0 = stride * increment. When optimizing for speed, walk the
2936 candidate tree to calculate the best cost reduction along any
2937 path; if it offsets the fixed cost of inserting the initializer,
2938 replacing the increment is profitable. When optimizing for
2939 size, instead calculate the total cost reduction from replacing
2940 all candidates with this increment. */
2941 else if (first_dep->kind == CAND_MULT)
2942 {
2943 int cost = mult_by_coeff_cost (incr, mode, speed);
2944 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2945 if (speed)
2946 cost = lowest_cost_path (cost, repl_savings, first_dep,
2947 incr_vec[i].incr, COUNT_PHIS);
2948 else
2949 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2950 COUNT_PHIS);
2951
2952 incr_vec[i].cost = cost;
2953 }
2954
2955 /* If this is an add candidate, the initializer may already
2956 exist, so only calculate the cost of the initializer if it
2957 doesn't. We are replacing one add with another here, so the
2958 known replacement savings is zero. We will account for removal
2959 of dead instructions in lowest_cost_path or total_savings. */
2960 else
2961 {
2962 int cost = 0;
2963 if (!incr_vec[i].initializer)
2964 cost = mult_by_coeff_cost (incr, mode, speed);
2965
2966 if (speed)
2967 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2968 DONT_COUNT_PHIS);
2969 else
2970 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2971 DONT_COUNT_PHIS);
2972
2973 incr_vec[i].cost = cost;
2974 }
2975 }
2976 }
2977
2978 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2979 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2980 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2981 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2982 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2983
2984 static basic_block
2985 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2986 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2987 {
2988 basic_block ncd;
2989
2990 if (!bb1)
2991 {
2992 *where = c2;
2993 return bb2;
2994 }
2995
2996 if (!bb2)
2997 {
2998 *where = c1;
2999 return bb1;
3000 }
3001
3002 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3003
3004 /* If both candidates are in the same block, the earlier
3005 candidate wins. */
3006 if (bb1 == ncd && bb2 == ncd)
3007 {
3008 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3009 *where = c2;
3010 else
3011 *where = c1;
3012 }
3013
3014 /* Otherwise, if one of them produced a candidate in the
3015 dominator, that one wins. */
3016 else if (bb1 == ncd)
3017 *where = c1;
3018
3019 else if (bb2 == ncd)
3020 *where = c2;
3021
3022 /* If neither matches the dominator, neither wins. */
3023 else
3024 *where = NULL;
3025
3026 return ncd;
3027 }
3028
3029 /* Consider all candidates that feed PHI. Find the nearest common
3030 dominator of those candidates requiring the given increment INCR.
3031 Further find and return the nearest common dominator of this result
3032 with block NCD. If the returned block contains one or more of the
3033 candidates, return the earliest candidate in the block in *WHERE. */
3034
3035 static basic_block
3036 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3037 basic_block ncd, slsr_cand_t *where)
3038 {
3039 unsigned i;
3040 slsr_cand_t basis = lookup_cand (c->basis);
3041 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3042
3043 for (i = 0; i < gimple_phi_num_args (phi); i++)
3044 {
3045 tree arg = gimple_phi_arg_def (phi, i);
3046
3047 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3048 {
3049 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3050
3051 if (gimple_code (arg_def) == GIMPLE_PHI)
3052 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3053 where);
3054 else
3055 {
3056 slsr_cand_t arg_cand = base_cand_from_table (arg);
3057 widest_int diff = arg_cand->index - basis->index;
3058 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3059
3060 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3061 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3062 }
3063 }
3064 }
3065
3066 return ncd;
3067 }
3068
3069 /* Consider the candidate C together with any candidates that feed
3070 C's phi dependence (if any). Find and return the nearest common
3071 dominator of those candidates requiring the given increment INCR.
3072 If the returned block contains one or more of the candidates,
3073 return the earliest candidate in the block in *WHERE. */
3074
3075 static basic_block
3076 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3077 {
3078 basic_block ncd = NULL;
3079
3080 if (cand_abs_increment (c) == incr)
3081 {
3082 ncd = gimple_bb (c->cand_stmt);
3083 *where = c;
3084 }
3085
3086 if (phi_dependent_cand_p (c))
3087 ncd = ncd_with_phi (c, incr,
3088 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3089 ncd, where);
3090
3091 return ncd;
3092 }
3093
3094 /* Consider all candidates in the tree rooted at C for which INCR
3095 represents the required increment of C relative to its basis.
3096 Find and return the basic block that most nearly dominates all
3097 such candidates. If the returned block contains one or more of
3098 the candidates, return the earliest candidate in the block in
3099 *WHERE. */
3100
3101 static basic_block
3102 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3103 slsr_cand_t *where)
3104 {
3105 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3106 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3107
3108 /* First find the NCD of all siblings and dependents. */
3109 if (c->sibling)
3110 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3111 incr, &sib_where);
3112 if (c->dependent)
3113 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3114 incr, &dep_where);
3115 if (!sib_ncd && !dep_ncd)
3116 {
3117 new_where = NULL;
3118 ncd = NULL;
3119 }
3120 else if (sib_ncd && !dep_ncd)
3121 {
3122 new_where = sib_where;
3123 ncd = sib_ncd;
3124 }
3125 else if (dep_ncd && !sib_ncd)
3126 {
3127 new_where = dep_where;
3128 ncd = dep_ncd;
3129 }
3130 else
3131 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3132 dep_where, &new_where);
3133
3134 /* If the candidate's increment doesn't match the one we're interested
3135 in (and nor do any increments for feeding defs of a phi-dependence),
3136 then the result depends only on siblings and dependents. */
3137 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3138
3139 if (!this_ncd || cand_already_replaced (c))
3140 {
3141 *where = new_where;
3142 return ncd;
3143 }
3144
3145 /* Otherwise, compare this candidate with the result from all siblings
3146 and dependents. */
3147 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3148
3149 return ncd;
3150 }
3151
3152 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3153
3154 static inline bool
3155 profitable_increment_p (unsigned index)
3156 {
3157 return (incr_vec[index].cost <= COST_NEUTRAL);
3158 }
3159
3160 /* For each profitable increment in the increment vector not equal to
3161 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3162 dominator of all statements in the candidate chain rooted at C
3163 that require that increment, and insert an initializer
3164 T_0 = stride * increment at that location. Record T_0 with the
3165 increment record. */
3166
3167 static void
3168 insert_initializers (slsr_cand_t c)
3169 {
3170 unsigned i;
3171
3172 for (i = 0; i < incr_vec_len; i++)
3173 {
3174 basic_block bb;
3175 slsr_cand_t where = NULL;
3176 gassign *init_stmt;
3177 gassign *cast_stmt = NULL;
3178 tree new_name, incr_tree, init_stride;
3179 widest_int incr = incr_vec[i].incr;
3180
3181 if (!profitable_increment_p (i)
3182 || incr == 1
3183 || (incr == -1
3184 && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3185 || incr == 0)
3186 continue;
3187
3188 /* We may have already identified an existing initializer that
3189 will suffice. */
3190 if (incr_vec[i].initializer)
3191 {
3192 if (dump_file && (dump_flags & TDF_DETAILS))
3193 {
3194 fputs ("Using existing initializer: ", dump_file);
3195 print_gimple_stmt (dump_file,
3196 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3197 0, 0);
3198 }
3199 continue;
3200 }
3201
3202 /* Find the block that most closely dominates all candidates
3203 with this increment. If there is at least one candidate in
3204 that block, the earliest one will be returned in WHERE. */
3205 bb = nearest_common_dominator_for_cands (c, incr, &where);
3206
3207 /* If the nominal stride has a different type than the recorded
3208 stride type, build a cast from the nominal stride to that type. */
3209 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3210 {
3211 init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3212 cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3213 }
3214 else
3215 init_stride = c->stride;
3216
3217 /* Create a new SSA name to hold the initializer's value. */
3218 new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3219 incr_vec[i].initializer = new_name;
3220
3221 /* Create the initializer and insert it in the latest possible
3222 dominating position. */
3223 incr_tree = wide_int_to_tree (c->stride_type, incr);
3224 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3225 init_stride, incr_tree);
3226 if (where)
3227 {
3228 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3229 location_t loc = gimple_location (where->cand_stmt);
3230
3231 if (cast_stmt)
3232 {
3233 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3234 gimple_set_location (cast_stmt, loc);
3235 }
3236
3237 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3238 gimple_set_location (init_stmt, loc);
3239 }
3240 else
3241 {
3242 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3243 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3244 location_t loc = gimple_location (basis_stmt);
3245
3246 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3247 {
3248 if (cast_stmt)
3249 {
3250 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3251 gimple_set_location (cast_stmt, loc);
3252 }
3253 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3254 }
3255 else
3256 {
3257 if (cast_stmt)
3258 {
3259 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3260 gimple_set_location (cast_stmt, loc);
3261 }
3262 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3263 }
3264
3265 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3266 }
3267
3268 if (dump_file && (dump_flags & TDF_DETAILS))
3269 {
3270 if (cast_stmt)
3271 {
3272 fputs ("Inserting stride cast: ", dump_file);
3273 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3274 }
3275 fputs ("Inserting initializer: ", dump_file);
3276 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3277 }
3278 }
3279 }
3280
3281 /* Return TRUE iff all required increments for candidates feeding PHI
3282 are profitable to replace on behalf of candidate C. */
3283
3284 static bool
3285 all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
3286 {
3287 unsigned i;
3288 slsr_cand_t basis = lookup_cand (c->basis);
3289 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3290
3291 for (i = 0; i < gimple_phi_num_args (phi); i++)
3292 {
3293 tree arg = gimple_phi_arg_def (phi, i);
3294
3295 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3296 {
3297 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3298
3299 if (gimple_code (arg_def) == GIMPLE_PHI)
3300 {
3301 if (!all_phi_incrs_profitable (c, arg_def))
3302 return false;
3303 }
3304 else
3305 {
3306 int j;
3307 slsr_cand_t arg_cand = base_cand_from_table (arg);
3308 widest_int increment = arg_cand->index - basis->index;
3309
3310 if (!address_arithmetic_p && wi::neg_p (increment))
3311 increment = -increment;
3312
3313 j = incr_vec_index (increment);
3314
3315 if (dump_file && (dump_flags & TDF_DETAILS))
3316 {
3317 fprintf (dump_file, " Conditional candidate %d, phi: ",
3318 c->cand_num);
3319 print_gimple_stmt (dump_file, phi, 0, 0);
3320 fputs (" increment: ", dump_file);
3321 print_decs (increment, dump_file);
3322 if (j < 0)
3323 fprintf (dump_file,
3324 "\n Not replaced; incr_vec overflow.\n");
3325 else {
3326 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3327 if (profitable_increment_p (j))
3328 fputs (" Replacing...\n", dump_file);
3329 else
3330 fputs (" Not replaced.\n", dump_file);
3331 }
3332 }
3333
3334 if (j < 0 || !profitable_increment_p (j))
3335 return false;
3336 }
3337 }
3338 }
3339
3340 return true;
3341 }
3342
3343 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3344 type TO_TYPE, and insert it in front of the statement represented
3345 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3346 the new SSA name. */
3347
3348 static tree
3349 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3350 {
3351 tree cast_lhs;
3352 gassign *cast_stmt;
3353 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3354
3355 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3356 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3357 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3358 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3359
3360 if (dump_file && (dump_flags & TDF_DETAILS))
3361 {
3362 fputs (" Inserting: ", dump_file);
3363 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3364 }
3365
3366 return cast_lhs;
3367 }
3368
3369 /* Replace the RHS of the statement represented by candidate C with
3370 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3371 leave C unchanged or just interchange its operands. The original
3372 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3373 If the replacement was made and we are doing a details dump,
3374 return the revised statement, else NULL. */
3375
3376 static gimple *
3377 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3378 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3379 slsr_cand_t c)
3380 {
3381 if (new_code != old_code
3382 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3383 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3384 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3385 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3386 {
3387 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3388 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3389 update_stmt (gsi_stmt (gsi));
3390 c->cand_stmt = gsi_stmt (gsi);
3391
3392 if (dump_file && (dump_flags & TDF_DETAILS))
3393 return gsi_stmt (gsi);
3394 }
3395
3396 else if (dump_file && (dump_flags & TDF_DETAILS))
3397 fputs (" (duplicate, not actually replacing)\n", dump_file);
3398
3399 return NULL;
3400 }
3401
3402 /* Strength-reduce the statement represented by candidate C by replacing
3403 it with an equivalent addition or subtraction. I is the index into
3404 the increment vector identifying C's increment. NEW_VAR is used to
3405 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3406 is the rhs1 to use in creating the add/subtract. */
3407
3408 static void
3409 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3410 {
3411 gimple *stmt_to_print = NULL;
3412 tree orig_rhs1, orig_rhs2;
3413 tree rhs2;
3414 enum tree_code orig_code, repl_code;
3415 widest_int cand_incr;
3416
3417 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3418 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3419 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3420 cand_incr = cand_increment (c);
3421
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3423 {
3424 fputs ("Replacing: ", dump_file);
3425 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3426 stmt_to_print = c->cand_stmt;
3427 }
3428
3429 if (address_arithmetic_p)
3430 repl_code = POINTER_PLUS_EXPR;
3431 else
3432 repl_code = PLUS_EXPR;
3433
3434 /* If the increment has an initializer T_0, replace the candidate
3435 statement with an add of the basis name and the initializer. */
3436 if (incr_vec[i].initializer)
3437 {
3438 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3439 tree orig_type = TREE_TYPE (orig_rhs2);
3440
3441 if (types_compatible_p (orig_type, init_type))
3442 rhs2 = incr_vec[i].initializer;
3443 else
3444 rhs2 = introduce_cast_before_cand (c, orig_type,
3445 incr_vec[i].initializer);
3446
3447 if (incr_vec[i].incr != cand_incr)
3448 {
3449 gcc_assert (repl_code == PLUS_EXPR);
3450 repl_code = MINUS_EXPR;
3451 }
3452
3453 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3454 orig_code, orig_rhs1, orig_rhs2,
3455 c);
3456 }
3457
3458 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3459 with a subtract of the stride from the basis name, a copy
3460 from the basis name, or an add of the stride to the basis
3461 name, respectively. It may be necessary to introduce a
3462 cast (or reuse an existing cast). */
3463 else if (cand_incr == 1)
3464 {
3465 tree stride_type = TREE_TYPE (c->stride);
3466 tree orig_type = TREE_TYPE (orig_rhs2);
3467
3468 if (types_compatible_p (orig_type, stride_type))
3469 rhs2 = c->stride;
3470 else
3471 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3472
3473 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3474 orig_code, orig_rhs1, orig_rhs2,
3475 c);
3476 }
3477
3478 else if (cand_incr == -1)
3479 {
3480 tree stride_type = TREE_TYPE (c->stride);
3481 tree orig_type = TREE_TYPE (orig_rhs2);
3482 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3483
3484 if (types_compatible_p (orig_type, stride_type))
3485 rhs2 = c->stride;
3486 else
3487 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3488
3489 if (orig_code != MINUS_EXPR
3490 || !operand_equal_p (basis_name, orig_rhs1, 0)
3491 || !operand_equal_p (rhs2, orig_rhs2, 0))
3492 {
3493 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3494 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3495 update_stmt (gsi_stmt (gsi));
3496 c->cand_stmt = gsi_stmt (gsi);
3497
3498 if (dump_file && (dump_flags & TDF_DETAILS))
3499 stmt_to_print = gsi_stmt (gsi);
3500 }
3501 else if (dump_file && (dump_flags & TDF_DETAILS))
3502 fputs (" (duplicate, not actually replacing)\n", dump_file);
3503 }
3504
3505 else if (cand_incr == 0)
3506 {
3507 tree lhs = gimple_assign_lhs (c->cand_stmt);
3508 tree lhs_type = TREE_TYPE (lhs);
3509 tree basis_type = TREE_TYPE (basis_name);
3510
3511 if (types_compatible_p (lhs_type, basis_type))
3512 {
3513 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3514 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3515 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3516 gsi_replace (&gsi, copy_stmt, false);
3517 c->cand_stmt = copy_stmt;
3518
3519 if (dump_file && (dump_flags & TDF_DETAILS))
3520 stmt_to_print = copy_stmt;
3521 }
3522 else
3523 {
3524 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3525 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3526 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3527 gsi_replace (&gsi, cast_stmt, false);
3528 c->cand_stmt = cast_stmt;
3529
3530 if (dump_file && (dump_flags & TDF_DETAILS))
3531 stmt_to_print = cast_stmt;
3532 }
3533 }
3534 else
3535 gcc_unreachable ();
3536
3537 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3538 {
3539 fputs ("With: ", dump_file);
3540 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3541 fputs ("\n", dump_file);
3542 }
3543 }
3544
3545 /* For each candidate in the tree rooted at C, replace it with
3546 an increment if such has been shown to be profitable. */
3547
3548 static void
3549 replace_profitable_candidates (slsr_cand_t c)
3550 {
3551 if (!cand_already_replaced (c))
3552 {
3553 widest_int increment = cand_abs_increment (c);
3554 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3555 int i;
3556
3557 i = incr_vec_index (increment);
3558
3559 /* Only process profitable increments. Nothing useful can be done
3560 to a cast or copy. */
3561 if (i >= 0
3562 && profitable_increment_p (i)
3563 && orig_code != SSA_NAME
3564 && !CONVERT_EXPR_CODE_P (orig_code))
3565 {
3566 if (phi_dependent_cand_p (c))
3567 {
3568 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3569
3570 if (all_phi_incrs_profitable (c, phi))
3571 {
3572 /* Look up the LHS SSA name from C's basis. This will be
3573 the RHS1 of the adds we will introduce to create new
3574 phi arguments. */
3575 slsr_cand_t basis = lookup_cand (c->basis);
3576 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3577
3578 /* Create a new phi statement that will represent C's true
3579 basis after the transformation is complete. */
3580 location_t loc = gimple_location (c->cand_stmt);
3581 tree name = create_phi_basis (c, phi, basis_name,
3582 loc, UNKNOWN_STRIDE);
3583
3584 /* Replace C with an add of the new basis phi and the
3585 increment. */
3586 replace_one_candidate (c, i, name);
3587 }
3588 }
3589 else
3590 {
3591 slsr_cand_t basis = lookup_cand (c->basis);
3592 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3593 replace_one_candidate (c, i, basis_name);
3594 }
3595 }
3596 }
3597
3598 if (c->sibling)
3599 replace_profitable_candidates (lookup_cand (c->sibling));
3600
3601 if (c->dependent)
3602 replace_profitable_candidates (lookup_cand (c->dependent));
3603 }
3604 \f
3605 /* Analyze costs of related candidates in the candidate vector,
3606 and make beneficial replacements. */
3607
3608 static void
3609 analyze_candidates_and_replace (void)
3610 {
3611 unsigned i;
3612 slsr_cand_t c;
3613
3614 /* Each candidate that has a null basis and a non-null
3615 dependent is the root of a tree of related statements.
3616 Analyze each tree to determine a subset of those
3617 statements that can be replaced with maximum benefit. */
3618 FOR_EACH_VEC_ELT (cand_vec, i, c)
3619 {
3620 slsr_cand_t first_dep;
3621
3622 if (c->basis != 0 || c->dependent == 0)
3623 continue;
3624
3625 if (dump_file && (dump_flags & TDF_DETAILS))
3626 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3627 c->cand_num);
3628
3629 first_dep = lookup_cand (c->dependent);
3630
3631 /* If this is a chain of CAND_REFs, unconditionally replace
3632 each of them with a strength-reduced data reference. */
3633 if (c->kind == CAND_REF)
3634 replace_refs (c);
3635
3636 /* If the common stride of all related candidates is a known
3637 constant, each candidate without a phi-dependence can be
3638 profitably replaced. Each replaces a multiply by a single
3639 add, with the possibility that a feeding add also goes dead.
3640 A candidate with a phi-dependence is replaced only if the
3641 compensation code it requires is offset by the strength
3642 reduction savings. */
3643 else if (TREE_CODE (c->stride) == INTEGER_CST)
3644 replace_uncond_cands_and_profitable_phis (first_dep);
3645
3646 /* When the stride is an SSA name, it may still be profitable
3647 to replace some or all of the dependent candidates, depending
3648 on whether the introduced increments can be reused, or are
3649 less expensive to calculate than the replaced statements. */
3650 else
3651 {
3652 machine_mode mode;
3653 bool speed;
3654
3655 /* Determine whether we'll be generating pointer arithmetic
3656 when replacing candidates. */
3657 address_arithmetic_p = (c->kind == CAND_ADD
3658 && POINTER_TYPE_P (c->cand_type));
3659
3660 /* If all candidates have already been replaced under other
3661 interpretations, nothing remains to be done. */
3662 if (!count_candidates (c))
3663 continue;
3664
3665 /* Construct an array of increments for this candidate chain. */
3666 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3667 incr_vec_len = 0;
3668 record_increments (c);
3669
3670 /* Determine which increments are profitable to replace. */
3671 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3672 speed = optimize_cands_for_speed_p (c);
3673 analyze_increments (first_dep, mode, speed);
3674
3675 /* Insert initializers of the form T_0 = stride * increment
3676 for use in profitable replacements. */
3677 insert_initializers (first_dep);
3678 dump_incr_vec ();
3679
3680 /* Perform the replacements. */
3681 replace_profitable_candidates (first_dep);
3682 free (incr_vec);
3683 }
3684 }
3685 }
3686
3687 namespace {
3688
3689 const pass_data pass_data_strength_reduction =
3690 {
3691 GIMPLE_PASS, /* type */
3692 "slsr", /* name */
3693 OPTGROUP_NONE, /* optinfo_flags */
3694 TV_GIMPLE_SLSR, /* tv_id */
3695 ( PROP_cfg | PROP_ssa ), /* properties_required */
3696 0, /* properties_provided */
3697 0, /* properties_destroyed */
3698 0, /* todo_flags_start */
3699 0, /* todo_flags_finish */
3700 };
3701
3702 class pass_strength_reduction : public gimple_opt_pass
3703 {
3704 public:
3705 pass_strength_reduction (gcc::context *ctxt)
3706 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3707 {}
3708
3709 /* opt_pass methods: */
3710 virtual bool gate (function *) { return flag_tree_slsr; }
3711 virtual unsigned int execute (function *);
3712
3713 }; // class pass_strength_reduction
3714
3715 unsigned
3716 pass_strength_reduction::execute (function *fun)
3717 {
3718 /* Create the obstack where candidates will reside. */
3719 gcc_obstack_init (&cand_obstack);
3720
3721 /* Allocate the candidate vector. */
3722 cand_vec.create (128);
3723
3724 /* Allocate the mapping from statements to candidate indices. */
3725 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3726
3727 /* Create the obstack where candidate chains will reside. */
3728 gcc_obstack_init (&chain_obstack);
3729
3730 /* Allocate the mapping from base expressions to candidate chains. */
3731 base_cand_map = new hash_table<cand_chain_hasher> (500);
3732
3733 /* Allocate the mapping from bases to alternative bases. */
3734 alt_base_map = new hash_map<tree, tree>;
3735
3736 /* Initialize the loop optimizer. We need to detect flow across
3737 back edges, and this gives us dominator information as well. */
3738 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3739
3740 /* Walk the CFG in predominator order looking for strength reduction
3741 candidates. */
3742 find_candidates_dom_walker (CDI_DOMINATORS)
3743 .walk (fun->cfg->x_entry_block_ptr);
3744
3745 if (dump_file && (dump_flags & TDF_DETAILS))
3746 {
3747 dump_cand_vec ();
3748 dump_cand_chains ();
3749 }
3750
3751 delete alt_base_map;
3752 free_affine_expand_cache (&name_expansions);
3753
3754 /* Analyze costs and make appropriate replacements. */
3755 analyze_candidates_and_replace ();
3756
3757 loop_optimizer_finalize ();
3758 delete base_cand_map;
3759 base_cand_map = NULL;
3760 obstack_free (&chain_obstack, NULL);
3761 delete stmt_cand_map;
3762 cand_vec.release ();
3763 obstack_free (&cand_obstack, NULL);
3764
3765 return 0;
3766 }
3767
3768 } // anon namespace
3769
3770 gimple_opt_pass *
3771 make_pass_strength_reduction (gcc::context *ctxt)
3772 {
3773 return new pass_strength_reduction (ctxt);
3774 }