gimple-ssa-strength-reduction.c (slsr_process_phi): Remove dead and unnecessary call...
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
59 \f
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
64
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
67
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
71
72 Any statement S0 that dominates S1 and is of the form:
73
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
76
77 is called a "basis" for S1. In both cases, S1 may be replaced by
78
79 S1': X = Y + (i - i') * S,
80
81 where (i - i') * S is folded to the extent possible.
82
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
91
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
96
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
100
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
104
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
107
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
111
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
120
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
124
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
128
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
131
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
134
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
142
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
151
152 Conditional candidates
153 ======================
154
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
157
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
165
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
168
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
172 {
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
175 }
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
180
181 where the bracketed instructions may go dead.
182
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
193
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
199
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
209
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
213
214
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
218
219 /* The kind of candidate. */
220 enum cand_kind
221 {
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
226 };
227
228 struct slsr_cand_d
229 {
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
232
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
235
236 /* The stride S. */
237 tree stride;
238
239 /* The index constant i. */
240 widest_int index;
241
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
248
249 /* The kind of candidate (CAND_MULT, etc.). */
250 enum cand_kind kind;
251
252 /* Index of this candidate in the candidate vector. */
253 cand_idx cand_num;
254
255 /* Index of the next candidate record for the same statement.
256 A statement may be useful in more than one way (e.g., due to
257 commutativity). So we can have multiple "interpretations"
258 of a statement. */
259 cand_idx next_interp;
260
261 /* Index of the basis statement S0, if any, in the candidate vector. */
262 cand_idx basis;
263
264 /* First candidate for which this candidate is a basis, if one exists. */
265 cand_idx dependent;
266
267 /* Next candidate having the same basis as this one. */
268 cand_idx sibling;
269
270 /* If this is a conditional candidate, the CAND_PHI candidate
271 that defines the base SSA name B. */
272 cand_idx def_phi;
273
274 /* Savings that can be expected from eliminating dead code if this
275 candidate is replaced. */
276 int dead_savings;
277 };
278
279 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
280 typedef const struct slsr_cand_d *const_slsr_cand_t;
281
282 /* Pointers to candidates are chained together as part of a mapping
283 from base expressions to the candidates that use them. */
284
285 struct cand_chain_d
286 {
287 /* Base expression for the chain of candidates: often, but not
288 always, an SSA name. */
289 tree base_expr;
290
291 /* Pointer to a candidate. */
292 slsr_cand_t cand;
293
294 /* Chain pointer. */
295 struct cand_chain_d *next;
296
297 };
298
299 typedef struct cand_chain_d cand_chain, *cand_chain_t;
300 typedef const struct cand_chain_d *const_cand_chain_t;
301
302 /* Information about a unique "increment" associated with candidates
303 having an SSA name for a stride. An increment is the difference
304 between the index of the candidate and the index of its basis,
305 i.e., (i - i') as discussed in the module commentary.
306
307 When we are not going to generate address arithmetic we treat
308 increments that differ only in sign as the same, allowing sharing
309 of the cost of initializers. The absolute value of the increment
310 is stored in the incr_info. */
311
312 struct incr_info_d
313 {
314 /* The increment that relates a candidate to its basis. */
315 widest_int incr;
316
317 /* How many times the increment occurs in the candidate tree. */
318 unsigned count;
319
320 /* Cost of replacing candidates using this increment. Negative and
321 zero costs indicate replacement should be performed. */
322 int cost;
323
324 /* If this increment is profitable but is not -1, 0, or 1, it requires
325 an initializer T_0 = stride * incr to be found or introduced in the
326 nearest common dominator of all candidates. This field holds T_0
327 for subsequent use. */
328 tree initializer;
329
330 /* If the initializer was found to already exist, this is the block
331 where it was found. */
332 basic_block init_bb;
333 };
334
335 typedef struct incr_info_d incr_info, *incr_info_t;
336
337 /* Candidates are maintained in a vector. If candidate X dominates
338 candidate Y, then X appears before Y in the vector; but the
339 converse does not necessarily hold. */
340 static vec<slsr_cand_t> cand_vec;
341
342 enum cost_consts
343 {
344 COST_NEUTRAL = 0,
345 COST_INFINITE = 1000
346 };
347
348 enum stride_status
349 {
350 UNKNOWN_STRIDE = 0,
351 KNOWN_STRIDE = 1
352 };
353
354 enum phi_adjust_status
355 {
356 NOT_PHI_ADJUST = 0,
357 PHI_ADJUST = 1
358 };
359
360 enum count_phis_status
361 {
362 DONT_COUNT_PHIS = 0,
363 COUNT_PHIS = 1
364 };
365
366 /* Pointer map embodying a mapping from statements to candidates. */
367 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
368
369 /* Obstack for candidates. */
370 static struct obstack cand_obstack;
371
372 /* Obstack for candidate chains. */
373 static struct obstack chain_obstack;
374
375 /* An array INCR_VEC of incr_infos is used during analysis of related
376 candidates having an SSA name for a stride. INCR_VEC_LEN describes
377 its current length. MAX_INCR_VEC_LEN is used to avoid costly
378 pathological cases. */
379 static incr_info_t incr_vec;
380 static unsigned incr_vec_len;
381 const int MAX_INCR_VEC_LEN = 16;
382
383 /* For a chain of candidates with unknown stride, indicates whether or not
384 we must generate pointer arithmetic when replacing statements. */
385 static bool address_arithmetic_p;
386
387 /* Forward function declarations. */
388 static slsr_cand_t base_cand_from_table (tree);
389 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
390 static bool legal_cast_p_1 (tree, tree);
391 \f
392 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
393
394 static slsr_cand_t
395 lookup_cand (cand_idx idx)
396 {
397 return cand_vec[idx - 1];
398 }
399
400 /* Helper for hashing a candidate chain header. */
401
402 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
403 {
404 static inline hashval_t hash (const cand_chain *);
405 static inline bool equal (const cand_chain *, const cand_chain *);
406 };
407
408 inline hashval_t
409 cand_chain_hasher::hash (const cand_chain *p)
410 {
411 tree base_expr = p->base_expr;
412 return iterative_hash_expr (base_expr, 0);
413 }
414
415 inline bool
416 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
417 {
418 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
419 }
420
421 /* Hash table embodying a mapping from base exprs to chains of candidates. */
422 static hash_table<cand_chain_hasher> *base_cand_map;
423 \f
424 /* Pointer map used by tree_to_aff_combination_expand. */
425 static hash_map<tree, name_expansion *> *name_expansions;
426 /* Pointer map embodying a mapping from bases to alternative bases. */
427 static hash_map<tree, tree> *alt_base_map;
428
429 /* Given BASE, use the tree affine combiniation facilities to
430 find the underlying tree expression for BASE, with any
431 immediate offset excluded.
432
433 N.B. we should eliminate this backtracking with better forward
434 analysis in a future release. */
435
436 static tree
437 get_alternative_base (tree base)
438 {
439 tree *result = alt_base_map->get (base);
440
441 if (result == NULL)
442 {
443 tree expr;
444 aff_tree aff;
445
446 tree_to_aff_combination_expand (base, TREE_TYPE (base),
447 &aff, &name_expansions);
448 aff.offset = 0;
449 expr = aff_combination_to_tree (&aff);
450
451 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
452
453 return expr == base ? NULL : expr;
454 }
455
456 return *result;
457 }
458
459 /* Look in the candidate table for a CAND_PHI that defines BASE and
460 return it if found; otherwise return NULL. */
461
462 static cand_idx
463 find_phi_def (tree base)
464 {
465 slsr_cand_t c;
466
467 if (TREE_CODE (base) != SSA_NAME)
468 return 0;
469
470 c = base_cand_from_table (base);
471
472 if (!c || c->kind != CAND_PHI)
473 return 0;
474
475 return c->cand_num;
476 }
477
478 /* Helper routine for find_basis_for_candidate. May be called twice:
479 once for the candidate's base expr, and optionally again either for
480 the candidate's phi definition or for a CAND_REF's alternative base
481 expression. */
482
483 static slsr_cand_t
484 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
485 {
486 cand_chain mapping_key;
487 cand_chain_t chain;
488 slsr_cand_t basis = NULL;
489
490 // Limit potential of N^2 behavior for long candidate chains.
491 int iters = 0;
492 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
493
494 mapping_key.base_expr = base_expr;
495 chain = base_cand_map->find (&mapping_key);
496
497 for (; chain && iters < max_iters; chain = chain->next, ++iters)
498 {
499 slsr_cand_t one_basis = chain->cand;
500
501 if (one_basis->kind != c->kind
502 || one_basis->cand_stmt == c->cand_stmt
503 || !operand_equal_p (one_basis->stride, c->stride, 0)
504 || !types_compatible_p (one_basis->cand_type, c->cand_type)
505 || !dominated_by_p (CDI_DOMINATORS,
506 gimple_bb (c->cand_stmt),
507 gimple_bb (one_basis->cand_stmt)))
508 continue;
509
510 if (!basis || basis->cand_num < one_basis->cand_num)
511 basis = one_basis;
512 }
513
514 return basis;
515 }
516
517 /* Use the base expr from candidate C to look for possible candidates
518 that can serve as a basis for C. Each potential basis must also
519 appear in a block that dominates the candidate statement and have
520 the same stride and type. If more than one possible basis exists,
521 the one with highest index in the vector is chosen; this will be
522 the most immediately dominating basis. */
523
524 static int
525 find_basis_for_candidate (slsr_cand_t c)
526 {
527 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
528
529 /* If a candidate doesn't have a basis using its base expression,
530 it may have a basis hidden by one or more intervening phis. */
531 if (!basis && c->def_phi)
532 {
533 basic_block basis_bb, phi_bb;
534 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
535 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
536
537 if (basis)
538 {
539 /* A hidden basis must dominate the phi-definition of the
540 candidate's base name. */
541 phi_bb = gimple_bb (phi_cand->cand_stmt);
542 basis_bb = gimple_bb (basis->cand_stmt);
543
544 if (phi_bb == basis_bb
545 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
546 {
547 basis = NULL;
548 c->basis = 0;
549 }
550
551 /* If we found a hidden basis, estimate additional dead-code
552 savings if the phi and its feeding statements can be removed. */
553 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
554 c->dead_savings += phi_cand->dead_savings;
555 }
556 }
557
558 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
559 {
560 tree alt_base_expr = get_alternative_base (c->base_expr);
561 if (alt_base_expr)
562 basis = find_basis_for_base_expr (c, alt_base_expr);
563 }
564
565 if (basis)
566 {
567 c->sibling = basis->dependent;
568 basis->dependent = c->cand_num;
569 return basis->cand_num;
570 }
571
572 return 0;
573 }
574
575 /* Record a mapping from BASE to C, indicating that C may potentially serve
576 as a basis using that base expression. BASE may be the same as
577 C->BASE_EXPR; alternatively BASE can be a different tree that share the
578 underlining expression of C->BASE_EXPR. */
579
580 static void
581 record_potential_basis (slsr_cand_t c, tree base)
582 {
583 cand_chain_t node;
584 cand_chain **slot;
585
586 gcc_assert (base);
587
588 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
589 node->base_expr = base;
590 node->cand = c;
591 node->next = NULL;
592 slot = base_cand_map->find_slot (node, INSERT);
593
594 if (*slot)
595 {
596 cand_chain_t head = (cand_chain_t) (*slot);
597 node->next = head->next;
598 head->next = node;
599 }
600 else
601 *slot = node;
602 }
603
604 /* Allocate storage for a new candidate and initialize its fields.
605 Attempt to find a basis for the candidate.
606
607 For CAND_REF, an alternative base may also be recorded and used
608 to find a basis. This helps cases where the expression hidden
609 behind BASE (which is usually an SSA_NAME) has immediate offset,
610 e.g.
611
612 a2[i][j] = 1;
613 a2[i + 20][j] = 2; */
614
615 static slsr_cand_t
616 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
617 const widest_int &index, tree stride, tree ctype,
618 unsigned savings)
619 {
620 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
621 sizeof (slsr_cand));
622 c->cand_stmt = gs;
623 c->base_expr = base;
624 c->stride = stride;
625 c->index = index;
626 c->cand_type = ctype;
627 c->kind = kind;
628 c->cand_num = cand_vec.length () + 1;
629 c->next_interp = 0;
630 c->dependent = 0;
631 c->sibling = 0;
632 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
633 c->dead_savings = savings;
634
635 cand_vec.safe_push (c);
636
637 if (kind == CAND_PHI)
638 c->basis = 0;
639 else
640 c->basis = find_basis_for_candidate (c);
641
642 record_potential_basis (c, base);
643 if (flag_expensive_optimizations && kind == CAND_REF)
644 {
645 tree alt_base = get_alternative_base (base);
646 if (alt_base)
647 record_potential_basis (c, alt_base);
648 }
649
650 return c;
651 }
652
653 /* Determine the target cost of statement GS when compiling according
654 to SPEED. */
655
656 static int
657 stmt_cost (gimple *gs, bool speed)
658 {
659 tree lhs, rhs1, rhs2;
660 machine_mode lhs_mode;
661
662 gcc_assert (is_gimple_assign (gs));
663 lhs = gimple_assign_lhs (gs);
664 rhs1 = gimple_assign_rhs1 (gs);
665 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
666
667 switch (gimple_assign_rhs_code (gs))
668 {
669 case MULT_EXPR:
670 rhs2 = gimple_assign_rhs2 (gs);
671
672 if (tree_fits_shwi_p (rhs2))
673 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
674
675 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
676 return mul_cost (speed, lhs_mode);
677
678 case PLUS_EXPR:
679 case POINTER_PLUS_EXPR:
680 case MINUS_EXPR:
681 return add_cost (speed, lhs_mode);
682
683 case NEGATE_EXPR:
684 return neg_cost (speed, lhs_mode);
685
686 CASE_CONVERT:
687 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
688
689 /* Note that we don't assign costs to copies that in most cases
690 will go away. */
691 default:
692 ;
693 }
694
695 gcc_unreachable ();
696 return 0;
697 }
698
699 /* Look up the defining statement for BASE_IN and return a pointer
700 to its candidate in the candidate table, if any; otherwise NULL.
701 Only CAND_ADD and CAND_MULT candidates are returned. */
702
703 static slsr_cand_t
704 base_cand_from_table (tree base_in)
705 {
706 slsr_cand_t *result;
707
708 gimple *def = SSA_NAME_DEF_STMT (base_in);
709 if (!def)
710 return (slsr_cand_t) NULL;
711
712 result = stmt_cand_map->get (def);
713
714 if (result && (*result)->kind != CAND_REF)
715 return *result;
716
717 return (slsr_cand_t) NULL;
718 }
719
720 /* Add an entry to the statement-to-candidate mapping. */
721
722 static void
723 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
724 {
725 gcc_assert (!stmt_cand_map->put (gs, c));
726 }
727 \f
728 /* Given PHI which contains a phi statement, determine whether it
729 satisfies all the requirements of a phi candidate. If so, create
730 a candidate. Note that a CAND_PHI never has a basis itself, but
731 is used to help find a basis for subsequent candidates. */
732
733 static void
734 slsr_process_phi (gphi *phi, bool speed)
735 {
736 unsigned i;
737 tree arg0_base = NULL_TREE, base_type;
738 slsr_cand_t c;
739 struct loop *cand_loop = gimple_bb (phi)->loop_father;
740 unsigned savings = 0;
741
742 /* A CAND_PHI requires each of its arguments to have the same
743 derived base name. (See the module header commentary for a
744 definition of derived base names.) Furthermore, all feeding
745 definitions must be in the same position in the loop hierarchy
746 as PHI. */
747
748 for (i = 0; i < gimple_phi_num_args (phi); i++)
749 {
750 slsr_cand_t arg_cand;
751 tree arg = gimple_phi_arg_def (phi, i);
752 tree derived_base_name = NULL_TREE;
753 gimple *arg_stmt = NULL;
754 basic_block arg_bb = NULL;
755
756 if (TREE_CODE (arg) != SSA_NAME)
757 return;
758
759 arg_cand = base_cand_from_table (arg);
760
761 if (arg_cand)
762 {
763 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
764 {
765 if (!arg_cand->next_interp)
766 return;
767
768 arg_cand = lookup_cand (arg_cand->next_interp);
769 }
770
771 if (!integer_onep (arg_cand->stride))
772 return;
773
774 derived_base_name = arg_cand->base_expr;
775 arg_stmt = arg_cand->cand_stmt;
776 arg_bb = gimple_bb (arg_stmt);
777
778 /* Gather potential dead code savings if the phi statement
779 can be removed later on. */
780 if (has_single_use (arg))
781 {
782 if (gimple_code (arg_stmt) == GIMPLE_PHI)
783 savings += arg_cand->dead_savings;
784 else
785 savings += stmt_cost (arg_stmt, speed);
786 }
787 }
788 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
789 {
790 derived_base_name = arg;
791 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
792 }
793
794 if (!arg_bb || arg_bb->loop_father != cand_loop)
795 return;
796
797 if (i == 0)
798 arg0_base = derived_base_name;
799 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
800 return;
801 }
802
803 /* Create the candidate. "alloc_cand_and_find_basis" is named
804 misleadingly for this case, as no basis will be sought for a
805 CAND_PHI. */
806 base_type = TREE_TYPE (arg0_base);
807
808 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
809 0, integer_one_node, base_type, savings);
810
811 /* Add the candidate to the statement-candidate mapping. */
812 add_cand_for_stmt (phi, c);
813 }
814
815 /* Given PBASE which is a pointer to tree, look up the defining
816 statement for it and check whether the candidate is in the
817 form of:
818
819 X = B + (1 * S), S is integer constant
820 X = B + (i * S), S is integer one
821
822 If so, set PBASE to the candidate's base_expr and return double
823 int (i * S).
824 Otherwise, just return double int zero. */
825
826 static widest_int
827 backtrace_base_for_ref (tree *pbase)
828 {
829 tree base_in = *pbase;
830 slsr_cand_t base_cand;
831
832 STRIP_NOPS (base_in);
833
834 /* Strip off widening conversion(s) to handle cases where
835 e.g. 'B' is widened from an 'int' in order to calculate
836 a 64-bit address. */
837 if (CONVERT_EXPR_P (base_in)
838 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
839 base_in = get_unwidened (base_in, NULL_TREE);
840
841 if (TREE_CODE (base_in) != SSA_NAME)
842 return 0;
843
844 base_cand = base_cand_from_table (base_in);
845
846 while (base_cand && base_cand->kind != CAND_PHI)
847 {
848 if (base_cand->kind == CAND_ADD
849 && base_cand->index == 1
850 && TREE_CODE (base_cand->stride) == INTEGER_CST)
851 {
852 /* X = B + (1 * S), S is integer constant. */
853 *pbase = base_cand->base_expr;
854 return wi::to_widest (base_cand->stride);
855 }
856 else if (base_cand->kind == CAND_ADD
857 && TREE_CODE (base_cand->stride) == INTEGER_CST
858 && integer_onep (base_cand->stride))
859 {
860 /* X = B + (i * S), S is integer one. */
861 *pbase = base_cand->base_expr;
862 return base_cand->index;
863 }
864
865 if (base_cand->next_interp)
866 base_cand = lookup_cand (base_cand->next_interp);
867 else
868 base_cand = NULL;
869 }
870
871 return 0;
872 }
873
874 /* Look for the following pattern:
875
876 *PBASE: MEM_REF (T1, C1)
877
878 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
879 or
880 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
881 or
882 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
883
884 *PINDEX: C4 * BITS_PER_UNIT
885
886 If not present, leave the input values unchanged and return FALSE.
887 Otherwise, modify the input values as follows and return TRUE:
888
889 *PBASE: T1
890 *POFFSET: MULT_EXPR (T2, C3)
891 *PINDEX: C1 + (C2 * C3) + C4
892
893 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
894 will be further restructured to:
895
896 *PBASE: T1
897 *POFFSET: MULT_EXPR (T2', C3)
898 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
899
900 static bool
901 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
902 tree *ptype)
903 {
904 tree base = *pbase, offset = *poffset;
905 widest_int index = *pindex;
906 tree mult_op0, t1, t2, type;
907 widest_int c1, c2, c3, c4, c5;
908
909 if (!base
910 || !offset
911 || TREE_CODE (base) != MEM_REF
912 || TREE_CODE (offset) != MULT_EXPR
913 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
914 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
915 return false;
916
917 t1 = TREE_OPERAND (base, 0);
918 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
919 type = TREE_TYPE (TREE_OPERAND (base, 1));
920
921 mult_op0 = TREE_OPERAND (offset, 0);
922 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
923
924 if (TREE_CODE (mult_op0) == PLUS_EXPR)
925
926 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
927 {
928 t2 = TREE_OPERAND (mult_op0, 0);
929 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
930 }
931 else
932 return false;
933
934 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
935
936 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
937 {
938 t2 = TREE_OPERAND (mult_op0, 0);
939 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
940 }
941 else
942 return false;
943
944 else
945 {
946 t2 = mult_op0;
947 c2 = 0;
948 }
949
950 c4 = index >> LOG2_BITS_PER_UNIT;
951 c5 = backtrace_base_for_ref (&t2);
952
953 *pbase = t1;
954 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
955 wide_int_to_tree (sizetype, c3));
956 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
957 *ptype = type;
958
959 return true;
960 }
961
962 /* Given GS which contains a data reference, create a CAND_REF entry in
963 the candidate table and attempt to find a basis. */
964
965 static void
966 slsr_process_ref (gimple *gs)
967 {
968 tree ref_expr, base, offset, type;
969 HOST_WIDE_INT bitsize, bitpos;
970 machine_mode mode;
971 int unsignedp, reversep, volatilep;
972 slsr_cand_t c;
973
974 if (gimple_vdef (gs))
975 ref_expr = gimple_assign_lhs (gs);
976 else
977 ref_expr = gimple_assign_rhs1 (gs);
978
979 if (!handled_component_p (ref_expr)
980 || TREE_CODE (ref_expr) == BIT_FIELD_REF
981 || (TREE_CODE (ref_expr) == COMPONENT_REF
982 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
983 return;
984
985 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
986 &unsignedp, &reversep, &volatilep);
987 if (reversep)
988 return;
989 widest_int index = bitpos;
990
991 if (!restructure_reference (&base, &offset, &index, &type))
992 return;
993
994 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
995 type, 0);
996
997 /* Add the candidate to the statement-candidate mapping. */
998 add_cand_for_stmt (gs, c);
999 }
1000
1001 /* Create a candidate entry for a statement GS, where GS multiplies
1002 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1003 about the two SSA names into the new candidate. Return the new
1004 candidate. */
1005
1006 static slsr_cand_t
1007 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1008 {
1009 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1010 widest_int index;
1011 unsigned savings = 0;
1012 slsr_cand_t c;
1013 slsr_cand_t base_cand = base_cand_from_table (base_in);
1014
1015 /* Look at all interpretations of the base candidate, if necessary,
1016 to find information to propagate into this candidate. */
1017 while (base_cand && !base && base_cand->kind != CAND_PHI)
1018 {
1019
1020 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1021 {
1022 /* Y = (B + i') * 1
1023 X = Y * Z
1024 ================
1025 X = (B + i') * Z */
1026 base = base_cand->base_expr;
1027 index = base_cand->index;
1028 stride = stride_in;
1029 ctype = base_cand->cand_type;
1030 if (has_single_use (base_in))
1031 savings = (base_cand->dead_savings
1032 + stmt_cost (base_cand->cand_stmt, speed));
1033 }
1034 else if (base_cand->kind == CAND_ADD
1035 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1036 {
1037 /* Y = B + (i' * S), S constant
1038 X = Y * Z
1039 ============================
1040 X = B + ((i' * S) * Z) */
1041 base = base_cand->base_expr;
1042 index = base_cand->index * wi::to_widest (base_cand->stride);
1043 stride = stride_in;
1044 ctype = base_cand->cand_type;
1045 if (has_single_use (base_in))
1046 savings = (base_cand->dead_savings
1047 + stmt_cost (base_cand->cand_stmt, speed));
1048 }
1049
1050 if (base_cand->next_interp)
1051 base_cand = lookup_cand (base_cand->next_interp);
1052 else
1053 base_cand = NULL;
1054 }
1055
1056 if (!base)
1057 {
1058 /* No interpretations had anything useful to propagate, so
1059 produce X = (Y + 0) * Z. */
1060 base = base_in;
1061 index = 0;
1062 stride = stride_in;
1063 ctype = TREE_TYPE (base_in);
1064 }
1065
1066 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1067 ctype, savings);
1068 return c;
1069 }
1070
1071 /* Create a candidate entry for a statement GS, where GS multiplies
1072 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1073 information about BASE_IN into the new candidate. Return the new
1074 candidate. */
1075
1076 static slsr_cand_t
1077 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1078 {
1079 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1080 widest_int index, temp;
1081 unsigned savings = 0;
1082 slsr_cand_t c;
1083 slsr_cand_t base_cand = base_cand_from_table (base_in);
1084
1085 /* Look at all interpretations of the base candidate, if necessary,
1086 to find information to propagate into this candidate. */
1087 while (base_cand && !base && base_cand->kind != CAND_PHI)
1088 {
1089 if (base_cand->kind == CAND_MULT
1090 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1091 {
1092 /* Y = (B + i') * S, S constant
1093 X = Y * c
1094 ============================
1095 X = (B + i') * (S * c) */
1096 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1097 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1098 {
1099 base = base_cand->base_expr;
1100 index = base_cand->index;
1101 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1102 ctype = base_cand->cand_type;
1103 if (has_single_use (base_in))
1104 savings = (base_cand->dead_savings
1105 + stmt_cost (base_cand->cand_stmt, speed));
1106 }
1107 }
1108 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1109 {
1110 /* Y = B + (i' * 1)
1111 X = Y * c
1112 ===========================
1113 X = (B + i') * c */
1114 base = base_cand->base_expr;
1115 index = base_cand->index;
1116 stride = stride_in;
1117 ctype = base_cand->cand_type;
1118 if (has_single_use (base_in))
1119 savings = (base_cand->dead_savings
1120 + stmt_cost (base_cand->cand_stmt, speed));
1121 }
1122 else if (base_cand->kind == CAND_ADD
1123 && base_cand->index == 1
1124 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1125 {
1126 /* Y = B + (1 * S), S constant
1127 X = Y * c
1128 ===========================
1129 X = (B + S) * c */
1130 base = base_cand->base_expr;
1131 index = wi::to_widest (base_cand->stride);
1132 stride = stride_in;
1133 ctype = base_cand->cand_type;
1134 if (has_single_use (base_in))
1135 savings = (base_cand->dead_savings
1136 + stmt_cost (base_cand->cand_stmt, speed));
1137 }
1138
1139 if (base_cand->next_interp)
1140 base_cand = lookup_cand (base_cand->next_interp);
1141 else
1142 base_cand = NULL;
1143 }
1144
1145 if (!base)
1146 {
1147 /* No interpretations had anything useful to propagate, so
1148 produce X = (Y + 0) * c. */
1149 base = base_in;
1150 index = 0;
1151 stride = stride_in;
1152 ctype = TREE_TYPE (base_in);
1153 }
1154
1155 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1156 ctype, savings);
1157 return c;
1158 }
1159
1160 /* Given GS which is a multiply of scalar integers, make an appropriate
1161 entry in the candidate table. If this is a multiply of two SSA names,
1162 create two CAND_MULT interpretations and attempt to find a basis for
1163 each of them. Otherwise, create a single CAND_MULT and attempt to
1164 find a basis. */
1165
1166 static void
1167 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1168 {
1169 slsr_cand_t c, c2;
1170
1171 /* If this is a multiply of an SSA name with itself, it is highly
1172 unlikely that we will get a strength reduction opportunity, so
1173 don't record it as a candidate. This simplifies the logic for
1174 finding a basis, so if this is removed that must be considered. */
1175 if (rhs1 == rhs2)
1176 return;
1177
1178 if (TREE_CODE (rhs2) == SSA_NAME)
1179 {
1180 /* Record an interpretation of this statement in the candidate table
1181 assuming RHS1 is the base expression and RHS2 is the stride. */
1182 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1183
1184 /* Add the first interpretation to the statement-candidate mapping. */
1185 add_cand_for_stmt (gs, c);
1186
1187 /* Record another interpretation of this statement assuming RHS1
1188 is the stride and RHS2 is the base expression. */
1189 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1190 c->next_interp = c2->cand_num;
1191 }
1192 else
1193 {
1194 /* Record an interpretation for the multiply-immediate. */
1195 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1196
1197 /* Add the interpretation to the statement-candidate mapping. */
1198 add_cand_for_stmt (gs, c);
1199 }
1200 }
1201
1202 /* Create a candidate entry for a statement GS, where GS adds two
1203 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1204 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1205 information about the two SSA names into the new candidate.
1206 Return the new candidate. */
1207
1208 static slsr_cand_t
1209 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1210 bool subtract_p, bool speed)
1211 {
1212 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1213 widest_int index;
1214 unsigned savings = 0;
1215 slsr_cand_t c;
1216 slsr_cand_t base_cand = base_cand_from_table (base_in);
1217 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1218
1219 /* The most useful transformation is a multiply-immediate feeding
1220 an add or subtract. Look for that first. */
1221 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1222 {
1223 if (addend_cand->kind == CAND_MULT
1224 && addend_cand->index == 0
1225 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1226 {
1227 /* Z = (B + 0) * S, S constant
1228 X = Y +/- Z
1229 ===========================
1230 X = Y + ((+/-1 * S) * B) */
1231 base = base_in;
1232 index = wi::to_widest (addend_cand->stride);
1233 if (subtract_p)
1234 index = -index;
1235 stride = addend_cand->base_expr;
1236 ctype = TREE_TYPE (base_in);
1237 if (has_single_use (addend_in))
1238 savings = (addend_cand->dead_savings
1239 + stmt_cost (addend_cand->cand_stmt, speed));
1240 }
1241
1242 if (addend_cand->next_interp)
1243 addend_cand = lookup_cand (addend_cand->next_interp);
1244 else
1245 addend_cand = NULL;
1246 }
1247
1248 while (base_cand && !base && base_cand->kind != CAND_PHI)
1249 {
1250 if (base_cand->kind == CAND_ADD
1251 && (base_cand->index == 0
1252 || operand_equal_p (base_cand->stride,
1253 integer_zero_node, 0)))
1254 {
1255 /* Y = B + (i' * S), i' * S = 0
1256 X = Y +/- Z
1257 ============================
1258 X = B + (+/-1 * Z) */
1259 base = base_cand->base_expr;
1260 index = subtract_p ? -1 : 1;
1261 stride = addend_in;
1262 ctype = base_cand->cand_type;
1263 if (has_single_use (base_in))
1264 savings = (base_cand->dead_savings
1265 + stmt_cost (base_cand->cand_stmt, speed));
1266 }
1267 else if (subtract_p)
1268 {
1269 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1270
1271 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1272 {
1273 if (subtrahend_cand->kind == CAND_MULT
1274 && subtrahend_cand->index == 0
1275 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1276 {
1277 /* Z = (B + 0) * S, S constant
1278 X = Y - Z
1279 ===========================
1280 Value: X = Y + ((-1 * S) * B) */
1281 base = base_in;
1282 index = wi::to_widest (subtrahend_cand->stride);
1283 index = -index;
1284 stride = subtrahend_cand->base_expr;
1285 ctype = TREE_TYPE (base_in);
1286 if (has_single_use (addend_in))
1287 savings = (subtrahend_cand->dead_savings
1288 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1289 }
1290
1291 if (subtrahend_cand->next_interp)
1292 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1293 else
1294 subtrahend_cand = NULL;
1295 }
1296 }
1297
1298 if (base_cand->next_interp)
1299 base_cand = lookup_cand (base_cand->next_interp);
1300 else
1301 base_cand = NULL;
1302 }
1303
1304 if (!base)
1305 {
1306 /* No interpretations had anything useful to propagate, so
1307 produce X = Y + (1 * Z). */
1308 base = base_in;
1309 index = subtract_p ? -1 : 1;
1310 stride = addend_in;
1311 ctype = TREE_TYPE (base_in);
1312 }
1313
1314 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1315 ctype, savings);
1316 return c;
1317 }
1318
1319 /* Create a candidate entry for a statement GS, where GS adds SSA
1320 name BASE_IN to constant INDEX_IN. Propagate any known information
1321 about BASE_IN into the new candidate. Return the new candidate. */
1322
1323 static slsr_cand_t
1324 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1325 bool speed)
1326 {
1327 enum cand_kind kind = CAND_ADD;
1328 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1329 widest_int index, multiple;
1330 unsigned savings = 0;
1331 slsr_cand_t c;
1332 slsr_cand_t base_cand = base_cand_from_table (base_in);
1333
1334 while (base_cand && !base && base_cand->kind != CAND_PHI)
1335 {
1336 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1337
1338 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1339 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1340 sign, &multiple))
1341 {
1342 /* Y = (B + i') * S, S constant, c = kS for some integer k
1343 X = Y + c
1344 ============================
1345 X = (B + (i'+ k)) * S
1346 OR
1347 Y = B + (i' * S), S constant, c = kS for some integer k
1348 X = Y + c
1349 ============================
1350 X = (B + (i'+ k)) * S */
1351 kind = base_cand->kind;
1352 base = base_cand->base_expr;
1353 index = base_cand->index + multiple;
1354 stride = base_cand->stride;
1355 ctype = base_cand->cand_type;
1356 if (has_single_use (base_in))
1357 savings = (base_cand->dead_savings
1358 + stmt_cost (base_cand->cand_stmt, speed));
1359 }
1360
1361 if (base_cand->next_interp)
1362 base_cand = lookup_cand (base_cand->next_interp);
1363 else
1364 base_cand = NULL;
1365 }
1366
1367 if (!base)
1368 {
1369 /* No interpretations had anything useful to propagate, so
1370 produce X = Y + (c * 1). */
1371 kind = CAND_ADD;
1372 base = base_in;
1373 index = index_in;
1374 stride = integer_one_node;
1375 ctype = TREE_TYPE (base_in);
1376 }
1377
1378 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1379 ctype, savings);
1380 return c;
1381 }
1382
1383 /* Given GS which is an add or subtract of scalar integers or pointers,
1384 make at least one appropriate entry in the candidate table. */
1385
1386 static void
1387 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1388 {
1389 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1390 slsr_cand_t c = NULL, c2;
1391
1392 if (TREE_CODE (rhs2) == SSA_NAME)
1393 {
1394 /* First record an interpretation assuming RHS1 is the base expression
1395 and RHS2 is the stride. But it doesn't make sense for the
1396 stride to be a pointer, so don't record a candidate in that case. */
1397 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1398 {
1399 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1400
1401 /* Add the first interpretation to the statement-candidate
1402 mapping. */
1403 add_cand_for_stmt (gs, c);
1404 }
1405
1406 /* If the two RHS operands are identical, or this is a subtract,
1407 we're done. */
1408 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1409 return;
1410
1411 /* Otherwise, record another interpretation assuming RHS2 is the
1412 base expression and RHS1 is the stride, again provided that the
1413 stride is not a pointer. */
1414 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1415 {
1416 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1417 if (c)
1418 c->next_interp = c2->cand_num;
1419 else
1420 add_cand_for_stmt (gs, c2);
1421 }
1422 }
1423 else
1424 {
1425 /* Record an interpretation for the add-immediate. */
1426 widest_int index = wi::to_widest (rhs2);
1427 if (subtract_p)
1428 index = -index;
1429
1430 c = create_add_imm_cand (gs, rhs1, index, speed);
1431
1432 /* Add the interpretation to the statement-candidate mapping. */
1433 add_cand_for_stmt (gs, c);
1434 }
1435 }
1436
1437 /* Given GS which is a negate of a scalar integer, make an appropriate
1438 entry in the candidate table. A negate is equivalent to a multiply
1439 by -1. */
1440
1441 static void
1442 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1443 {
1444 /* Record a CAND_MULT interpretation for the multiply by -1. */
1445 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1446
1447 /* Add the interpretation to the statement-candidate mapping. */
1448 add_cand_for_stmt (gs, c);
1449 }
1450
1451 /* Help function for legal_cast_p, operating on two trees. Checks
1452 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1453 for more details. */
1454
1455 static bool
1456 legal_cast_p_1 (tree lhs, tree rhs)
1457 {
1458 tree lhs_type, rhs_type;
1459 unsigned lhs_size, rhs_size;
1460 bool lhs_wraps, rhs_wraps;
1461
1462 lhs_type = TREE_TYPE (lhs);
1463 rhs_type = TREE_TYPE (rhs);
1464 lhs_size = TYPE_PRECISION (lhs_type);
1465 rhs_size = TYPE_PRECISION (rhs_type);
1466 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1467 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1468
1469 if (lhs_size < rhs_size
1470 || (rhs_wraps && !lhs_wraps)
1471 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1472 return false;
1473
1474 return true;
1475 }
1476
1477 /* Return TRUE if GS is a statement that defines an SSA name from
1478 a conversion and is legal for us to combine with an add and multiply
1479 in the candidate table. For example, suppose we have:
1480
1481 A = B + i;
1482 C = (type) A;
1483 D = C * S;
1484
1485 Without the type-cast, we would create a CAND_MULT for D with base B,
1486 index i, and stride S. We want to record this candidate only if it
1487 is equivalent to apply the type cast following the multiply:
1488
1489 A = B + i;
1490 E = A * S;
1491 D = (type) E;
1492
1493 We will record the type with the candidate for D. This allows us
1494 to use a similar previous candidate as a basis. If we have earlier seen
1495
1496 A' = B + i';
1497 C' = (type) A';
1498 D' = C' * S;
1499
1500 we can replace D with
1501
1502 D = D' + (i - i') * S;
1503
1504 But if moving the type-cast would change semantics, we mustn't do this.
1505
1506 This is legitimate for casts from a non-wrapping integral type to
1507 any integral type of the same or larger size. It is not legitimate
1508 to convert a wrapping type to a non-wrapping type, or to a wrapping
1509 type of a different size. I.e., with a wrapping type, we must
1510 assume that the addition B + i could wrap, in which case performing
1511 the multiply before or after one of the "illegal" type casts will
1512 have different semantics. */
1513
1514 static bool
1515 legal_cast_p (gimple *gs, tree rhs)
1516 {
1517 if (!is_gimple_assign (gs)
1518 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1519 return false;
1520
1521 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1522 }
1523
1524 /* Given GS which is a cast to a scalar integer type, determine whether
1525 the cast is legal for strength reduction. If so, make at least one
1526 appropriate entry in the candidate table. */
1527
1528 static void
1529 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1530 {
1531 tree lhs, ctype;
1532 slsr_cand_t base_cand, c, c2;
1533 unsigned savings = 0;
1534
1535 if (!legal_cast_p (gs, rhs1))
1536 return;
1537
1538 lhs = gimple_assign_lhs (gs);
1539 base_cand = base_cand_from_table (rhs1);
1540 ctype = TREE_TYPE (lhs);
1541
1542 if (base_cand && base_cand->kind != CAND_PHI)
1543 {
1544 while (base_cand)
1545 {
1546 /* Propagate all data from the base candidate except the type,
1547 which comes from the cast, and the base candidate's cast,
1548 which is no longer applicable. */
1549 if (has_single_use (rhs1))
1550 savings = (base_cand->dead_savings
1551 + stmt_cost (base_cand->cand_stmt, speed));
1552
1553 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1554 base_cand->base_expr,
1555 base_cand->index, base_cand->stride,
1556 ctype, savings);
1557 if (base_cand->next_interp)
1558 base_cand = lookup_cand (base_cand->next_interp);
1559 else
1560 base_cand = NULL;
1561 }
1562 }
1563 else
1564 {
1565 /* If nothing is known about the RHS, create fresh CAND_ADD and
1566 CAND_MULT interpretations:
1567
1568 X = Y + (0 * 1)
1569 X = (Y + 0) * 1
1570
1571 The first of these is somewhat arbitrary, but the choice of
1572 1 for the stride simplifies the logic for propagating casts
1573 into their uses. */
1574 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1575 0, integer_one_node, ctype, 0);
1576 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1577 0, integer_one_node, ctype, 0);
1578 c->next_interp = c2->cand_num;
1579 }
1580
1581 /* Add the first (or only) interpretation to the statement-candidate
1582 mapping. */
1583 add_cand_for_stmt (gs, c);
1584 }
1585
1586 /* Given GS which is a copy of a scalar integer type, make at least one
1587 appropriate entry in the candidate table.
1588
1589 This interface is included for completeness, but is unnecessary
1590 if this pass immediately follows a pass that performs copy
1591 propagation, such as DOM. */
1592
1593 static void
1594 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1595 {
1596 slsr_cand_t base_cand, c, c2;
1597 unsigned savings = 0;
1598
1599 base_cand = base_cand_from_table (rhs1);
1600
1601 if (base_cand && base_cand->kind != CAND_PHI)
1602 {
1603 while (base_cand)
1604 {
1605 /* Propagate all data from the base candidate. */
1606 if (has_single_use (rhs1))
1607 savings = (base_cand->dead_savings
1608 + stmt_cost (base_cand->cand_stmt, speed));
1609
1610 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1611 base_cand->base_expr,
1612 base_cand->index, base_cand->stride,
1613 base_cand->cand_type, savings);
1614 if (base_cand->next_interp)
1615 base_cand = lookup_cand (base_cand->next_interp);
1616 else
1617 base_cand = NULL;
1618 }
1619 }
1620 else
1621 {
1622 /* If nothing is known about the RHS, create fresh CAND_ADD and
1623 CAND_MULT interpretations:
1624
1625 X = Y + (0 * 1)
1626 X = (Y + 0) * 1
1627
1628 The first of these is somewhat arbitrary, but the choice of
1629 1 for the stride simplifies the logic for propagating casts
1630 into their uses. */
1631 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1632 0, integer_one_node, TREE_TYPE (rhs1), 0);
1633 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1634 0, integer_one_node, TREE_TYPE (rhs1), 0);
1635 c->next_interp = c2->cand_num;
1636 }
1637
1638 /* Add the first (or only) interpretation to the statement-candidate
1639 mapping. */
1640 add_cand_for_stmt (gs, c);
1641 }
1642 \f
1643 class find_candidates_dom_walker : public dom_walker
1644 {
1645 public:
1646 find_candidates_dom_walker (cdi_direction direction)
1647 : dom_walker (direction) {}
1648 virtual edge before_dom_children (basic_block);
1649 };
1650
1651 /* Find strength-reduction candidates in block BB. */
1652
1653 edge
1654 find_candidates_dom_walker::before_dom_children (basic_block bb)
1655 {
1656 bool speed = optimize_bb_for_speed_p (bb);
1657
1658 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1659 gsi_next (&gsi))
1660 slsr_process_phi (gsi.phi (), speed);
1661
1662 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1663 gsi_next (&gsi))
1664 {
1665 gimple *gs = gsi_stmt (gsi);
1666
1667 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1668 slsr_process_ref (gs);
1669
1670 else if (is_gimple_assign (gs)
1671 && SCALAR_INT_MODE_P
1672 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1673 {
1674 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1675
1676 switch (gimple_assign_rhs_code (gs))
1677 {
1678 case MULT_EXPR:
1679 case PLUS_EXPR:
1680 rhs1 = gimple_assign_rhs1 (gs);
1681 rhs2 = gimple_assign_rhs2 (gs);
1682 /* Should never happen, but currently some buggy situations
1683 in earlier phases put constants in rhs1. */
1684 if (TREE_CODE (rhs1) != SSA_NAME)
1685 continue;
1686 break;
1687
1688 /* Possible future opportunity: rhs1 of a ptr+ can be
1689 an ADDR_EXPR. */
1690 case POINTER_PLUS_EXPR:
1691 case MINUS_EXPR:
1692 rhs2 = gimple_assign_rhs2 (gs);
1693 /* Fall-through. */
1694
1695 CASE_CONVERT:
1696 case MODIFY_EXPR:
1697 case NEGATE_EXPR:
1698 rhs1 = gimple_assign_rhs1 (gs);
1699 if (TREE_CODE (rhs1) != SSA_NAME)
1700 continue;
1701 break;
1702
1703 default:
1704 ;
1705 }
1706
1707 switch (gimple_assign_rhs_code (gs))
1708 {
1709 case MULT_EXPR:
1710 slsr_process_mul (gs, rhs1, rhs2, speed);
1711 break;
1712
1713 case PLUS_EXPR:
1714 case POINTER_PLUS_EXPR:
1715 case MINUS_EXPR:
1716 slsr_process_add (gs, rhs1, rhs2, speed);
1717 break;
1718
1719 case NEGATE_EXPR:
1720 slsr_process_neg (gs, rhs1, speed);
1721 break;
1722
1723 CASE_CONVERT:
1724 slsr_process_cast (gs, rhs1, speed);
1725 break;
1726
1727 case MODIFY_EXPR:
1728 slsr_process_copy (gs, rhs1, speed);
1729 break;
1730
1731 default:
1732 ;
1733 }
1734 }
1735 }
1736 return NULL;
1737 }
1738 \f
1739 /* Dump a candidate for debug. */
1740
1741 static void
1742 dump_candidate (slsr_cand_t c)
1743 {
1744 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1745 gimple_bb (c->cand_stmt)->index);
1746 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1747 switch (c->kind)
1748 {
1749 case CAND_MULT:
1750 fputs (" MULT : (", dump_file);
1751 print_generic_expr (dump_file, c->base_expr, 0);
1752 fputs (" + ", dump_file);
1753 print_decs (c->index, dump_file);
1754 fputs (") * ", dump_file);
1755 print_generic_expr (dump_file, c->stride, 0);
1756 fputs (" : ", dump_file);
1757 break;
1758 case CAND_ADD:
1759 fputs (" ADD : ", dump_file);
1760 print_generic_expr (dump_file, c->base_expr, 0);
1761 fputs (" + (", dump_file);
1762 print_decs (c->index, dump_file);
1763 fputs (" * ", dump_file);
1764 print_generic_expr (dump_file, c->stride, 0);
1765 fputs (") : ", dump_file);
1766 break;
1767 case CAND_REF:
1768 fputs (" REF : ", dump_file);
1769 print_generic_expr (dump_file, c->base_expr, 0);
1770 fputs (" + (", dump_file);
1771 print_generic_expr (dump_file, c->stride, 0);
1772 fputs (") + ", dump_file);
1773 print_decs (c->index, dump_file);
1774 fputs (" : ", dump_file);
1775 break;
1776 case CAND_PHI:
1777 fputs (" PHI : ", dump_file);
1778 print_generic_expr (dump_file, c->base_expr, 0);
1779 fputs (" + (unknown * ", dump_file);
1780 print_generic_expr (dump_file, c->stride, 0);
1781 fputs (") : ", dump_file);
1782 break;
1783 default:
1784 gcc_unreachable ();
1785 }
1786 print_generic_expr (dump_file, c->cand_type, 0);
1787 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1788 c->basis, c->dependent, c->sibling);
1789 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1790 c->next_interp, c->dead_savings);
1791 if (c->def_phi)
1792 fprintf (dump_file, " phi: %d\n", c->def_phi);
1793 fputs ("\n", dump_file);
1794 }
1795
1796 /* Dump the candidate vector for debug. */
1797
1798 static void
1799 dump_cand_vec (void)
1800 {
1801 unsigned i;
1802 slsr_cand_t c;
1803
1804 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1805
1806 FOR_EACH_VEC_ELT (cand_vec, i, c)
1807 dump_candidate (c);
1808 }
1809
1810 /* Callback used to dump the candidate chains hash table. */
1811
1812 int
1813 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1814 {
1815 const_cand_chain_t chain = *slot;
1816 cand_chain_t p;
1817
1818 print_generic_expr (dump_file, chain->base_expr, 0);
1819 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1820
1821 for (p = chain->next; p; p = p->next)
1822 fprintf (dump_file, " -> %d", p->cand->cand_num);
1823
1824 fputs ("\n", dump_file);
1825 return 1;
1826 }
1827
1828 /* Dump the candidate chains. */
1829
1830 static void
1831 dump_cand_chains (void)
1832 {
1833 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1834 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1835 (NULL);
1836 fputs ("\n", dump_file);
1837 }
1838
1839 /* Dump the increment vector for debug. */
1840
1841 static void
1842 dump_incr_vec (void)
1843 {
1844 if (dump_file && (dump_flags & TDF_DETAILS))
1845 {
1846 unsigned i;
1847
1848 fprintf (dump_file, "\nIncrement vector:\n\n");
1849
1850 for (i = 0; i < incr_vec_len; i++)
1851 {
1852 fprintf (dump_file, "%3d increment: ", i);
1853 print_decs (incr_vec[i].incr, dump_file);
1854 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1855 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1856 fputs ("\n initializer: ", dump_file);
1857 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1858 fputs ("\n\n", dump_file);
1859 }
1860 }
1861 }
1862 \f
1863 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1864 data reference. */
1865
1866 static void
1867 replace_ref (tree *expr, slsr_cand_t c)
1868 {
1869 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1870 unsigned HOST_WIDE_INT misalign;
1871 unsigned align;
1872
1873 /* Ensure the memory reference carries the minimum alignment
1874 requirement for the data type. See PR58041. */
1875 get_object_alignment_1 (*expr, &align, &misalign);
1876 if (misalign != 0)
1877 align = (misalign & -misalign);
1878 if (align < TYPE_ALIGN (acc_type))
1879 acc_type = build_aligned_type (acc_type, align);
1880
1881 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1882 c->base_expr, c->stride);
1883 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1884 wide_int_to_tree (c->cand_type, c->index));
1885
1886 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1887 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1888 TREE_OPERAND (mem_ref, 0)
1889 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1890 /*simple_p=*/true, NULL,
1891 /*before=*/true, GSI_SAME_STMT);
1892 copy_ref_info (mem_ref, *expr);
1893 *expr = mem_ref;
1894 update_stmt (c->cand_stmt);
1895 }
1896
1897 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1898 dependent of candidate C with an equivalent strength-reduced data
1899 reference. */
1900
1901 static void
1902 replace_refs (slsr_cand_t c)
1903 {
1904 if (dump_file && (dump_flags & TDF_DETAILS))
1905 {
1906 fputs ("Replacing reference: ", dump_file);
1907 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1908 }
1909
1910 if (gimple_vdef (c->cand_stmt))
1911 {
1912 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1913 replace_ref (lhs, c);
1914 }
1915 else
1916 {
1917 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1918 replace_ref (rhs, c);
1919 }
1920
1921 if (dump_file && (dump_flags & TDF_DETAILS))
1922 {
1923 fputs ("With: ", dump_file);
1924 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1925 fputs ("\n", dump_file);
1926 }
1927
1928 if (c->sibling)
1929 replace_refs (lookup_cand (c->sibling));
1930
1931 if (c->dependent)
1932 replace_refs (lookup_cand (c->dependent));
1933 }
1934
1935 /* Return TRUE if candidate C is dependent upon a PHI. */
1936
1937 static bool
1938 phi_dependent_cand_p (slsr_cand_t c)
1939 {
1940 /* A candidate is not necessarily dependent upon a PHI just because
1941 it has a phi definition for its base name. It may have a basis
1942 that relies upon the same phi definition, in which case the PHI
1943 is irrelevant to this candidate. */
1944 return (c->def_phi
1945 && c->basis
1946 && lookup_cand (c->basis)->def_phi != c->def_phi);
1947 }
1948
1949 /* Calculate the increment required for candidate C relative to
1950 its basis. */
1951
1952 static widest_int
1953 cand_increment (slsr_cand_t c)
1954 {
1955 slsr_cand_t basis;
1956
1957 /* If the candidate doesn't have a basis, just return its own
1958 index. This is useful in record_increments to help us find
1959 an existing initializer. Also, if the candidate's basis is
1960 hidden by a phi, then its own index will be the increment
1961 from the newly introduced phi basis. */
1962 if (!c->basis || phi_dependent_cand_p (c))
1963 return c->index;
1964
1965 basis = lookup_cand (c->basis);
1966 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1967 return c->index - basis->index;
1968 }
1969
1970 /* Calculate the increment required for candidate C relative to
1971 its basis. If we aren't going to generate pointer arithmetic
1972 for this candidate, return the absolute value of that increment
1973 instead. */
1974
1975 static inline widest_int
1976 cand_abs_increment (slsr_cand_t c)
1977 {
1978 widest_int increment = cand_increment (c);
1979
1980 if (!address_arithmetic_p && wi::neg_p (increment))
1981 increment = -increment;
1982
1983 return increment;
1984 }
1985
1986 /* Return TRUE iff candidate C has already been replaced under
1987 another interpretation. */
1988
1989 static inline bool
1990 cand_already_replaced (slsr_cand_t c)
1991 {
1992 return (gimple_bb (c->cand_stmt) == 0);
1993 }
1994
1995 /* Common logic used by replace_unconditional_candidate and
1996 replace_conditional_candidate. */
1997
1998 static void
1999 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2000 {
2001 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2002 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2003
2004 /* It is highly unlikely, but possible, that the resulting
2005 bump doesn't fit in a HWI. Abandon the replacement
2006 in this case. This does not affect siblings or dependents
2007 of C. Restriction to signed HWI is conservative for unsigned
2008 types but allows for safe negation without twisted logic. */
2009 if (wi::fits_shwi_p (bump)
2010 && bump.to_shwi () != HOST_WIDE_INT_MIN
2011 /* It is not useful to replace casts, copies, or adds of
2012 an SSA name and a constant. */
2013 && cand_code != MODIFY_EXPR
2014 && !CONVERT_EXPR_CODE_P (cand_code)
2015 && cand_code != PLUS_EXPR
2016 && cand_code != POINTER_PLUS_EXPR
2017 && cand_code != MINUS_EXPR)
2018 {
2019 enum tree_code code = PLUS_EXPR;
2020 tree bump_tree;
2021 gimple *stmt_to_print = NULL;
2022
2023 /* If the basis name and the candidate's LHS have incompatible
2024 types, introduce a cast. */
2025 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2026 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2027 if (wi::neg_p (bump))
2028 {
2029 code = MINUS_EXPR;
2030 bump = -bump;
2031 }
2032
2033 bump_tree = wide_int_to_tree (target_type, bump);
2034
2035 if (dump_file && (dump_flags & TDF_DETAILS))
2036 {
2037 fputs ("Replacing: ", dump_file);
2038 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2039 }
2040
2041 if (bump == 0)
2042 {
2043 tree lhs = gimple_assign_lhs (c->cand_stmt);
2044 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2045 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2046 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2047 gsi_replace (&gsi, copy_stmt, false);
2048 c->cand_stmt = copy_stmt;
2049 if (dump_file && (dump_flags & TDF_DETAILS))
2050 stmt_to_print = copy_stmt;
2051 }
2052 else
2053 {
2054 tree rhs1, rhs2;
2055 if (cand_code != NEGATE_EXPR) {
2056 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2057 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2058 }
2059 if (cand_code != NEGATE_EXPR
2060 && ((operand_equal_p (rhs1, basis_name, 0)
2061 && operand_equal_p (rhs2, bump_tree, 0))
2062 || (operand_equal_p (rhs1, bump_tree, 0)
2063 && operand_equal_p (rhs2, basis_name, 0))))
2064 {
2065 if (dump_file && (dump_flags & TDF_DETAILS))
2066 {
2067 fputs ("(duplicate, not actually replacing)", dump_file);
2068 stmt_to_print = c->cand_stmt;
2069 }
2070 }
2071 else
2072 {
2073 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2074 gimple_assign_set_rhs_with_ops (&gsi, code,
2075 basis_name, bump_tree);
2076 update_stmt (gsi_stmt (gsi));
2077 c->cand_stmt = gsi_stmt (gsi);
2078 if (dump_file && (dump_flags & TDF_DETAILS))
2079 stmt_to_print = gsi_stmt (gsi);
2080 }
2081 }
2082
2083 if (dump_file && (dump_flags & TDF_DETAILS))
2084 {
2085 fputs ("With: ", dump_file);
2086 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2087 fputs ("\n", dump_file);
2088 }
2089 }
2090 }
2091
2092 /* Replace candidate C with an add or subtract. Note that we only
2093 operate on CAND_MULTs with known strides, so we will never generate
2094 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2095 X = Y + ((i - i') * S), as described in the module commentary. The
2096 folded value ((i - i') * S) is referred to here as the "bump." */
2097
2098 static void
2099 replace_unconditional_candidate (slsr_cand_t c)
2100 {
2101 slsr_cand_t basis;
2102
2103 if (cand_already_replaced (c))
2104 return;
2105
2106 basis = lookup_cand (c->basis);
2107 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2108
2109 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2110 }
2111 \f
2112 /* Return the index in the increment vector of the given INCREMENT,
2113 or -1 if not found. The latter can occur if more than
2114 MAX_INCR_VEC_LEN increments have been found. */
2115
2116 static inline int
2117 incr_vec_index (const widest_int &increment)
2118 {
2119 unsigned i;
2120
2121 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2122 ;
2123
2124 if (i < incr_vec_len)
2125 return i;
2126 else
2127 return -1;
2128 }
2129
2130 /* Create a new statement along edge E to add BASIS_NAME to the product
2131 of INCREMENT and the stride of candidate C. Create and return a new
2132 SSA name from *VAR to be used as the LHS of the new statement.
2133 KNOWN_STRIDE is true iff C's stride is a constant. */
2134
2135 static tree
2136 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2137 widest_int increment, edge e, location_t loc,
2138 bool known_stride)
2139 {
2140 basic_block insert_bb;
2141 gimple_stmt_iterator gsi;
2142 tree lhs, basis_type;
2143 gassign *new_stmt;
2144
2145 /* If the add candidate along this incoming edge has the same
2146 index as C's hidden basis, the hidden basis represents this
2147 edge correctly. */
2148 if (increment == 0)
2149 return basis_name;
2150
2151 basis_type = TREE_TYPE (basis_name);
2152 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2153
2154 if (known_stride)
2155 {
2156 tree bump_tree;
2157 enum tree_code code = PLUS_EXPR;
2158 widest_int bump = increment * wi::to_widest (c->stride);
2159 if (wi::neg_p (bump))
2160 {
2161 code = MINUS_EXPR;
2162 bump = -bump;
2163 }
2164
2165 bump_tree = wide_int_to_tree (basis_type, bump);
2166 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2167 }
2168 else
2169 {
2170 int i;
2171 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2172 i = incr_vec_index (negate_incr ? -increment : increment);
2173 gcc_assert (i >= 0);
2174
2175 if (incr_vec[i].initializer)
2176 {
2177 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2178 new_stmt = gimple_build_assign (lhs, code, basis_name,
2179 incr_vec[i].initializer);
2180 }
2181 else if (increment == 1)
2182 new_stmt = gimple_build_assign (lhs, PLUS_EXPR, basis_name, c->stride);
2183 else if (increment == -1)
2184 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name,
2185 c->stride);
2186 else
2187 gcc_unreachable ();
2188 }
2189
2190 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2191 gsi = gsi_last_bb (insert_bb);
2192
2193 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2194 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2195 else
2196 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2197
2198 gimple_set_location (new_stmt, loc);
2199
2200 if (dump_file && (dump_flags & TDF_DETAILS))
2201 {
2202 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2203 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2204 }
2205
2206 return lhs;
2207 }
2208
2209 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2210 is hidden by the phi node FROM_PHI, create a new phi node in the same
2211 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2212 with its phi arguments representing conditional adjustments to the
2213 hidden basis along conditional incoming paths. Those adjustments are
2214 made by creating add statements (and sometimes recursively creating
2215 phis) along those incoming paths. LOC is the location to attach to
2216 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2217 constant. */
2218
2219 static tree
2220 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2221 location_t loc, bool known_stride)
2222 {
2223 int i;
2224 tree name, phi_arg;
2225 gphi *phi;
2226 slsr_cand_t basis = lookup_cand (c->basis);
2227 int nargs = gimple_phi_num_args (from_phi);
2228 basic_block phi_bb = gimple_bb (from_phi);
2229 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2230 auto_vec<tree> phi_args (nargs);
2231
2232 /* Process each argument of the existing phi that represents
2233 conditionally-executed add candidates. */
2234 for (i = 0; i < nargs; i++)
2235 {
2236 edge e = (*phi_bb->preds)[i];
2237 tree arg = gimple_phi_arg_def (from_phi, i);
2238 tree feeding_def;
2239
2240 /* If the phi argument is the base name of the CAND_PHI, then
2241 this incoming arc should use the hidden basis. */
2242 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2243 if (basis->index == 0)
2244 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2245 else
2246 {
2247 widest_int incr = -basis->index;
2248 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2249 e, loc, known_stride);
2250 }
2251 else
2252 {
2253 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2254
2255 /* If there is another phi along this incoming edge, we must
2256 process it in the same fashion to ensure that all basis
2257 adjustments are made along its incoming edges. */
2258 if (gimple_code (arg_def) == GIMPLE_PHI)
2259 feeding_def = create_phi_basis (c, arg_def, basis_name,
2260 loc, known_stride);
2261 else
2262 {
2263 slsr_cand_t arg_cand = base_cand_from_table (arg);
2264 widest_int diff = arg_cand->index - basis->index;
2265 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2266 e, loc, known_stride);
2267 }
2268 }
2269
2270 /* Because of recursion, we need to save the arguments in a vector
2271 so we can create the PHI statement all at once. Otherwise the
2272 storage for the half-created PHI can be reclaimed. */
2273 phi_args.safe_push (feeding_def);
2274 }
2275
2276 /* Create the new phi basis. */
2277 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2278 phi = create_phi_node (name, phi_bb);
2279 SSA_NAME_DEF_STMT (name) = phi;
2280
2281 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2282 {
2283 edge e = (*phi_bb->preds)[i];
2284 add_phi_arg (phi, phi_arg, e, loc);
2285 }
2286
2287 update_stmt (phi);
2288
2289 if (dump_file && (dump_flags & TDF_DETAILS))
2290 {
2291 fputs ("Introducing new phi basis: ", dump_file);
2292 print_gimple_stmt (dump_file, phi, 0, 0);
2293 }
2294
2295 return name;
2296 }
2297
2298 /* Given a candidate C whose basis is hidden by at least one intervening
2299 phi, introduce a matching number of new phis to represent its basis
2300 adjusted by conditional increments along possible incoming paths. Then
2301 replace C as though it were an unconditional candidate, using the new
2302 basis. */
2303
2304 static void
2305 replace_conditional_candidate (slsr_cand_t c)
2306 {
2307 tree basis_name, name;
2308 slsr_cand_t basis;
2309 location_t loc;
2310
2311 /* Look up the LHS SSA name from C's basis. This will be the
2312 RHS1 of the adds we will introduce to create new phi arguments. */
2313 basis = lookup_cand (c->basis);
2314 basis_name = gimple_assign_lhs (basis->cand_stmt);
2315
2316 /* Create a new phi statement which will represent C's true basis
2317 after the transformation is complete. */
2318 loc = gimple_location (c->cand_stmt);
2319 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2320 basis_name, loc, KNOWN_STRIDE);
2321 /* Replace C with an add of the new basis phi and a constant. */
2322 widest_int bump = c->index * wi::to_widest (c->stride);
2323
2324 replace_mult_candidate (c, name, bump);
2325 }
2326
2327 /* Compute the expected costs of inserting basis adjustments for
2328 candidate C with phi-definition PHI. The cost of inserting
2329 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2330 which are themselves phi results, recursively calculate costs
2331 for those phis as well. */
2332
2333 static int
2334 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2335 {
2336 unsigned i;
2337 int cost = 0;
2338 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2339
2340 /* If we work our way back to a phi that isn't dominated by the hidden
2341 basis, this isn't a candidate for replacement. Indicate this by
2342 returning an unreasonably high cost. It's not easy to detect
2343 these situations when determining the basis, so we defer the
2344 decision until now. */
2345 basic_block phi_bb = gimple_bb (phi);
2346 slsr_cand_t basis = lookup_cand (c->basis);
2347 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2348
2349 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2350 return COST_INFINITE;
2351
2352 for (i = 0; i < gimple_phi_num_args (phi); i++)
2353 {
2354 tree arg = gimple_phi_arg_def (phi, i);
2355
2356 if (arg != phi_cand->base_expr)
2357 {
2358 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2359
2360 if (gimple_code (arg_def) == GIMPLE_PHI)
2361 cost += phi_add_costs (arg_def, c, one_add_cost);
2362 else
2363 {
2364 slsr_cand_t arg_cand = base_cand_from_table (arg);
2365
2366 if (arg_cand->index != c->index)
2367 cost += one_add_cost;
2368 }
2369 }
2370 }
2371
2372 return cost;
2373 }
2374
2375 /* For candidate C, each sibling of candidate C, and each dependent of
2376 candidate C, determine whether the candidate is dependent upon a
2377 phi that hides its basis. If not, replace the candidate unconditionally.
2378 Otherwise, determine whether the cost of introducing compensation code
2379 for the candidate is offset by the gains from strength reduction. If
2380 so, replace the candidate and introduce the compensation code. */
2381
2382 static void
2383 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2384 {
2385 if (phi_dependent_cand_p (c))
2386 {
2387 if (c->kind == CAND_MULT)
2388 {
2389 /* A candidate dependent upon a phi will replace a multiply by
2390 a constant with an add, and will insert at most one add for
2391 each phi argument. Add these costs with the potential dead-code
2392 savings to determine profitability. */
2393 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2394 int mult_savings = stmt_cost (c->cand_stmt, speed);
2395 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2396 tree phi_result = gimple_phi_result (phi);
2397 int one_add_cost = add_cost (speed,
2398 TYPE_MODE (TREE_TYPE (phi_result)));
2399 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2400 int cost = add_costs - mult_savings - c->dead_savings;
2401
2402 if (dump_file && (dump_flags & TDF_DETAILS))
2403 {
2404 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2405 fprintf (dump_file, " add_costs = %d\n", add_costs);
2406 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2407 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2408 fprintf (dump_file, " cost = %d\n", cost);
2409 if (cost <= COST_NEUTRAL)
2410 fputs (" Replacing...\n", dump_file);
2411 else
2412 fputs (" Not replaced.\n", dump_file);
2413 }
2414
2415 if (cost <= COST_NEUTRAL)
2416 replace_conditional_candidate (c);
2417 }
2418 }
2419 else
2420 replace_unconditional_candidate (c);
2421
2422 if (c->sibling)
2423 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2424
2425 if (c->dependent)
2426 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2427 }
2428 \f
2429 /* Count the number of candidates in the tree rooted at C that have
2430 not already been replaced under other interpretations. */
2431
2432 static int
2433 count_candidates (slsr_cand_t c)
2434 {
2435 unsigned count = cand_already_replaced (c) ? 0 : 1;
2436
2437 if (c->sibling)
2438 count += count_candidates (lookup_cand (c->sibling));
2439
2440 if (c->dependent)
2441 count += count_candidates (lookup_cand (c->dependent));
2442
2443 return count;
2444 }
2445
2446 /* Increase the count of INCREMENT by one in the increment vector.
2447 INCREMENT is associated with candidate C. If INCREMENT is to be
2448 conditionally executed as part of a conditional candidate replacement,
2449 IS_PHI_ADJUST is true, otherwise false. If an initializer
2450 T_0 = stride * I is provided by a candidate that dominates all
2451 candidates with the same increment, also record T_0 for subsequent use. */
2452
2453 static void
2454 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2455 {
2456 bool found = false;
2457 unsigned i;
2458
2459 /* Treat increments that differ only in sign as identical so as to
2460 share initializers, unless we are generating pointer arithmetic. */
2461 if (!address_arithmetic_p && wi::neg_p (increment))
2462 increment = -increment;
2463
2464 for (i = 0; i < incr_vec_len; i++)
2465 {
2466 if (incr_vec[i].incr == increment)
2467 {
2468 incr_vec[i].count++;
2469 found = true;
2470
2471 /* If we previously recorded an initializer that doesn't
2472 dominate this candidate, it's not going to be useful to
2473 us after all. */
2474 if (incr_vec[i].initializer
2475 && !dominated_by_p (CDI_DOMINATORS,
2476 gimple_bb (c->cand_stmt),
2477 incr_vec[i].init_bb))
2478 {
2479 incr_vec[i].initializer = NULL_TREE;
2480 incr_vec[i].init_bb = NULL;
2481 }
2482
2483 break;
2484 }
2485 }
2486
2487 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2488 {
2489 /* The first time we see an increment, create the entry for it.
2490 If this is the root candidate which doesn't have a basis, set
2491 the count to zero. We're only processing it so it can possibly
2492 provide an initializer for other candidates. */
2493 incr_vec[incr_vec_len].incr = increment;
2494 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2495 incr_vec[incr_vec_len].cost = COST_INFINITE;
2496
2497 /* Optimistically record the first occurrence of this increment
2498 as providing an initializer (if it does); we will revise this
2499 opinion later if it doesn't dominate all other occurrences.
2500 Exception: increments of -1, 0, 1 never need initializers;
2501 and phi adjustments don't ever provide initializers. */
2502 if (c->kind == CAND_ADD
2503 && !is_phi_adjust
2504 && c->index == increment
2505 && (increment > 1 || increment < -1)
2506 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2507 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2508 {
2509 tree t0 = NULL_TREE;
2510 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2511 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2512 if (operand_equal_p (rhs1, c->base_expr, 0))
2513 t0 = rhs2;
2514 else if (operand_equal_p (rhs2, c->base_expr, 0))
2515 t0 = rhs1;
2516 if (t0
2517 && SSA_NAME_DEF_STMT (t0)
2518 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2519 {
2520 incr_vec[incr_vec_len].initializer = t0;
2521 incr_vec[incr_vec_len++].init_bb
2522 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2523 }
2524 else
2525 {
2526 incr_vec[incr_vec_len].initializer = NULL_TREE;
2527 incr_vec[incr_vec_len++].init_bb = NULL;
2528 }
2529 }
2530 else
2531 {
2532 incr_vec[incr_vec_len].initializer = NULL_TREE;
2533 incr_vec[incr_vec_len++].init_bb = NULL;
2534 }
2535 }
2536 }
2537
2538 /* Given phi statement PHI that hides a candidate from its BASIS, find
2539 the increments along each incoming arc (recursively handling additional
2540 phis that may be present) and record them. These increments are the
2541 difference in index between the index-adjusting statements and the
2542 index of the basis. */
2543
2544 static void
2545 record_phi_increments (slsr_cand_t basis, gimple *phi)
2546 {
2547 unsigned i;
2548 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2549
2550 for (i = 0; i < gimple_phi_num_args (phi); i++)
2551 {
2552 tree arg = gimple_phi_arg_def (phi, i);
2553
2554 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2555 {
2556 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2557
2558 if (gimple_code (arg_def) == GIMPLE_PHI)
2559 record_phi_increments (basis, arg_def);
2560 else
2561 {
2562 slsr_cand_t arg_cand = base_cand_from_table (arg);
2563 widest_int diff = arg_cand->index - basis->index;
2564 record_increment (arg_cand, diff, PHI_ADJUST);
2565 }
2566 }
2567 }
2568 }
2569
2570 /* Determine how many times each unique increment occurs in the set
2571 of candidates rooted at C's parent, recording the data in the
2572 increment vector. For each unique increment I, if an initializer
2573 T_0 = stride * I is provided by a candidate that dominates all
2574 candidates with the same increment, also record T_0 for subsequent
2575 use. */
2576
2577 static void
2578 record_increments (slsr_cand_t c)
2579 {
2580 if (!cand_already_replaced (c))
2581 {
2582 if (!phi_dependent_cand_p (c))
2583 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2584 else
2585 {
2586 /* A candidate with a basis hidden by a phi will have one
2587 increment for its relationship to the index represented by
2588 the phi, and potentially additional increments along each
2589 incoming edge. For the root of the dependency tree (which
2590 has no basis), process just the initial index in case it has
2591 an initializer that can be used by subsequent candidates. */
2592 record_increment (c, c->index, NOT_PHI_ADJUST);
2593
2594 if (c->basis)
2595 record_phi_increments (lookup_cand (c->basis),
2596 lookup_cand (c->def_phi)->cand_stmt);
2597 }
2598 }
2599
2600 if (c->sibling)
2601 record_increments (lookup_cand (c->sibling));
2602
2603 if (c->dependent)
2604 record_increments (lookup_cand (c->dependent));
2605 }
2606
2607 /* Add up and return the costs of introducing add statements that
2608 require the increment INCR on behalf of candidate C and phi
2609 statement PHI. Accumulate into *SAVINGS the potential savings
2610 from removing existing statements that feed PHI and have no other
2611 uses. */
2612
2613 static int
2614 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2615 int *savings)
2616 {
2617 unsigned i;
2618 int cost = 0;
2619 slsr_cand_t basis = lookup_cand (c->basis);
2620 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2621
2622 for (i = 0; i < gimple_phi_num_args (phi); i++)
2623 {
2624 tree arg = gimple_phi_arg_def (phi, i);
2625
2626 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2627 {
2628 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2629
2630 if (gimple_code (arg_def) == GIMPLE_PHI)
2631 {
2632 int feeding_savings = 0;
2633 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2634 if (has_single_use (gimple_phi_result (arg_def)))
2635 *savings += feeding_savings;
2636 }
2637 else
2638 {
2639 slsr_cand_t arg_cand = base_cand_from_table (arg);
2640 widest_int diff = arg_cand->index - basis->index;
2641
2642 if (incr == diff)
2643 {
2644 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2645 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2646 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2647 if (has_single_use (lhs))
2648 *savings += stmt_cost (arg_cand->cand_stmt, true);
2649 }
2650 }
2651 }
2652 }
2653
2654 return cost;
2655 }
2656
2657 /* Return the first candidate in the tree rooted at C that has not
2658 already been replaced, favoring siblings over dependents. */
2659
2660 static slsr_cand_t
2661 unreplaced_cand_in_tree (slsr_cand_t c)
2662 {
2663 if (!cand_already_replaced (c))
2664 return c;
2665
2666 if (c->sibling)
2667 {
2668 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2669 if (sib)
2670 return sib;
2671 }
2672
2673 if (c->dependent)
2674 {
2675 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2676 if (dep)
2677 return dep;
2678 }
2679
2680 return NULL;
2681 }
2682
2683 /* Return TRUE if the candidates in the tree rooted at C should be
2684 optimized for speed, else FALSE. We estimate this based on the block
2685 containing the most dominant candidate in the tree that has not yet
2686 been replaced. */
2687
2688 static bool
2689 optimize_cands_for_speed_p (slsr_cand_t c)
2690 {
2691 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2692 gcc_assert (c2);
2693 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2694 }
2695
2696 /* Add COST_IN to the lowest cost of any dependent path starting at
2697 candidate C or any of its siblings, counting only candidates along
2698 such paths with increment INCR. Assume that replacing a candidate
2699 reduces cost by REPL_SAVINGS. Also account for savings from any
2700 statements that would go dead. If COUNT_PHIS is true, include
2701 costs of introducing feeding statements for conditional candidates. */
2702
2703 static int
2704 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2705 const widest_int &incr, bool count_phis)
2706 {
2707 int local_cost, sib_cost, savings = 0;
2708 widest_int cand_incr = cand_abs_increment (c);
2709
2710 if (cand_already_replaced (c))
2711 local_cost = cost_in;
2712 else if (incr == cand_incr)
2713 local_cost = cost_in - repl_savings - c->dead_savings;
2714 else
2715 local_cost = cost_in - c->dead_savings;
2716
2717 if (count_phis
2718 && phi_dependent_cand_p (c)
2719 && !cand_already_replaced (c))
2720 {
2721 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2722 local_cost += phi_incr_cost (c, incr, phi, &savings);
2723
2724 if (has_single_use (gimple_phi_result (phi)))
2725 local_cost -= savings;
2726 }
2727
2728 if (c->dependent)
2729 local_cost = lowest_cost_path (local_cost, repl_savings,
2730 lookup_cand (c->dependent), incr,
2731 count_phis);
2732
2733 if (c->sibling)
2734 {
2735 sib_cost = lowest_cost_path (cost_in, repl_savings,
2736 lookup_cand (c->sibling), incr,
2737 count_phis);
2738 local_cost = MIN (local_cost, sib_cost);
2739 }
2740
2741 return local_cost;
2742 }
2743
2744 /* Compute the total savings that would accrue from all replacements
2745 in the candidate tree rooted at C, counting only candidates with
2746 increment INCR. Assume that replacing a candidate reduces cost
2747 by REPL_SAVINGS. Also account for savings from statements that
2748 would go dead. */
2749
2750 static int
2751 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2752 bool count_phis)
2753 {
2754 int savings = 0;
2755 widest_int cand_incr = cand_abs_increment (c);
2756
2757 if (incr == cand_incr && !cand_already_replaced (c))
2758 savings += repl_savings + c->dead_savings;
2759
2760 if (count_phis
2761 && phi_dependent_cand_p (c)
2762 && !cand_already_replaced (c))
2763 {
2764 int phi_savings = 0;
2765 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2766 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2767
2768 if (has_single_use (gimple_phi_result (phi)))
2769 savings += phi_savings;
2770 }
2771
2772 if (c->dependent)
2773 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2774 count_phis);
2775
2776 if (c->sibling)
2777 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2778 count_phis);
2779
2780 return savings;
2781 }
2782
2783 /* Use target-specific costs to determine and record which increments
2784 in the current candidate tree are profitable to replace, assuming
2785 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2786 the candidate tree.
2787
2788 One slight limitation here is that we don't account for the possible
2789 introduction of casts in some cases. See replace_one_candidate for
2790 the cases where these are introduced. This should probably be cleaned
2791 up sometime. */
2792
2793 static void
2794 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2795 {
2796 unsigned i;
2797
2798 for (i = 0; i < incr_vec_len; i++)
2799 {
2800 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2801
2802 /* If somehow this increment is bigger than a HWI, we won't
2803 be optimizing candidates that use it. And if the increment
2804 has a count of zero, nothing will be done with it. */
2805 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2806 incr_vec[i].cost = COST_INFINITE;
2807
2808 /* Increments of 0, 1, and -1 are always profitable to replace,
2809 because they always replace a multiply or add with an add or
2810 copy, and may cause one or more existing instructions to go
2811 dead. Exception: -1 can't be assumed to be profitable for
2812 pointer addition. */
2813 else if (incr == 0
2814 || incr == 1
2815 || (incr == -1
2816 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2817 != POINTER_PLUS_EXPR)))
2818 incr_vec[i].cost = COST_NEUTRAL;
2819
2820 /* FORNOW: If we need to add an initializer, give up if a cast from
2821 the candidate's type to its stride's type can lose precision.
2822 This could eventually be handled better by expressly retaining the
2823 result of a cast to a wider type in the stride. Example:
2824
2825 short int _1;
2826 _2 = (int) _1;
2827 _3 = _2 * 10;
2828 _4 = x + _3; ADD: x + (10 * _1) : int
2829 _5 = _2 * 15;
2830 _6 = x + _3; ADD: x + (15 * _1) : int
2831
2832 Right now replacing _6 would cause insertion of an initializer
2833 of the form "short int T = _1 * 5;" followed by a cast to
2834 int, which could overflow incorrectly. Had we recorded _2 or
2835 (int)_1 as the stride, this wouldn't happen. However, doing
2836 this breaks other opportunities, so this will require some
2837 care. */
2838 else if (!incr_vec[i].initializer
2839 && TREE_CODE (first_dep->stride) != INTEGER_CST
2840 && !legal_cast_p_1 (first_dep->stride,
2841 gimple_assign_lhs (first_dep->cand_stmt)))
2842
2843 incr_vec[i].cost = COST_INFINITE;
2844
2845 /* If we need to add an initializer, make sure we don't introduce
2846 a multiply by a pointer type, which can happen in certain cast
2847 scenarios. FIXME: When cleaning up these cast issues, we can
2848 afford to introduce the multiply provided we cast out to an
2849 unsigned int of appropriate size. */
2850 else if (!incr_vec[i].initializer
2851 && TREE_CODE (first_dep->stride) != INTEGER_CST
2852 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2853
2854 incr_vec[i].cost = COST_INFINITE;
2855
2856 /* For any other increment, if this is a multiply candidate, we
2857 must introduce a temporary T and initialize it with
2858 T_0 = stride * increment. When optimizing for speed, walk the
2859 candidate tree to calculate the best cost reduction along any
2860 path; if it offsets the fixed cost of inserting the initializer,
2861 replacing the increment is profitable. When optimizing for
2862 size, instead calculate the total cost reduction from replacing
2863 all candidates with this increment. */
2864 else if (first_dep->kind == CAND_MULT)
2865 {
2866 int cost = mult_by_coeff_cost (incr, mode, speed);
2867 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2868 if (speed)
2869 cost = lowest_cost_path (cost, repl_savings, first_dep,
2870 incr_vec[i].incr, COUNT_PHIS);
2871 else
2872 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2873 COUNT_PHIS);
2874
2875 incr_vec[i].cost = cost;
2876 }
2877
2878 /* If this is an add candidate, the initializer may already
2879 exist, so only calculate the cost of the initializer if it
2880 doesn't. We are replacing one add with another here, so the
2881 known replacement savings is zero. We will account for removal
2882 of dead instructions in lowest_cost_path or total_savings. */
2883 else
2884 {
2885 int cost = 0;
2886 if (!incr_vec[i].initializer)
2887 cost = mult_by_coeff_cost (incr, mode, speed);
2888
2889 if (speed)
2890 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2891 DONT_COUNT_PHIS);
2892 else
2893 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2894 DONT_COUNT_PHIS);
2895
2896 incr_vec[i].cost = cost;
2897 }
2898 }
2899 }
2900
2901 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2902 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2903 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2904 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2905 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2906
2907 static basic_block
2908 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2909 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2910 {
2911 basic_block ncd;
2912
2913 if (!bb1)
2914 {
2915 *where = c2;
2916 return bb2;
2917 }
2918
2919 if (!bb2)
2920 {
2921 *where = c1;
2922 return bb1;
2923 }
2924
2925 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2926
2927 /* If both candidates are in the same block, the earlier
2928 candidate wins. */
2929 if (bb1 == ncd && bb2 == ncd)
2930 {
2931 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2932 *where = c2;
2933 else
2934 *where = c1;
2935 }
2936
2937 /* Otherwise, if one of them produced a candidate in the
2938 dominator, that one wins. */
2939 else if (bb1 == ncd)
2940 *where = c1;
2941
2942 else if (bb2 == ncd)
2943 *where = c2;
2944
2945 /* If neither matches the dominator, neither wins. */
2946 else
2947 *where = NULL;
2948
2949 return ncd;
2950 }
2951
2952 /* Consider all candidates that feed PHI. Find the nearest common
2953 dominator of those candidates requiring the given increment INCR.
2954 Further find and return the nearest common dominator of this result
2955 with block NCD. If the returned block contains one or more of the
2956 candidates, return the earliest candidate in the block in *WHERE. */
2957
2958 static basic_block
2959 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
2960 basic_block ncd, slsr_cand_t *where)
2961 {
2962 unsigned i;
2963 slsr_cand_t basis = lookup_cand (c->basis);
2964 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2965
2966 for (i = 0; i < gimple_phi_num_args (phi); i++)
2967 {
2968 tree arg = gimple_phi_arg_def (phi, i);
2969
2970 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2971 {
2972 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2973
2974 if (gimple_code (arg_def) == GIMPLE_PHI)
2975 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
2976 where);
2977 else
2978 {
2979 slsr_cand_t arg_cand = base_cand_from_table (arg);
2980 widest_int diff = arg_cand->index - basis->index;
2981 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
2982
2983 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2984 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
2985 }
2986 }
2987 }
2988
2989 return ncd;
2990 }
2991
2992 /* Consider the candidate C together with any candidates that feed
2993 C's phi dependence (if any). Find and return the nearest common
2994 dominator of those candidates requiring the given increment INCR.
2995 If the returned block contains one or more of the candidates,
2996 return the earliest candidate in the block in *WHERE. */
2997
2998 static basic_block
2999 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3000 {
3001 basic_block ncd = NULL;
3002
3003 if (cand_abs_increment (c) == incr)
3004 {
3005 ncd = gimple_bb (c->cand_stmt);
3006 *where = c;
3007 }
3008
3009 if (phi_dependent_cand_p (c))
3010 ncd = ncd_with_phi (c, incr,
3011 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3012 ncd, where);
3013
3014 return ncd;
3015 }
3016
3017 /* Consider all candidates in the tree rooted at C for which INCR
3018 represents the required increment of C relative to its basis.
3019 Find and return the basic block that most nearly dominates all
3020 such candidates. If the returned block contains one or more of
3021 the candidates, return the earliest candidate in the block in
3022 *WHERE. */
3023
3024 static basic_block
3025 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3026 slsr_cand_t *where)
3027 {
3028 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3029 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3030
3031 /* First find the NCD of all siblings and dependents. */
3032 if (c->sibling)
3033 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3034 incr, &sib_where);
3035 if (c->dependent)
3036 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3037 incr, &dep_where);
3038 if (!sib_ncd && !dep_ncd)
3039 {
3040 new_where = NULL;
3041 ncd = NULL;
3042 }
3043 else if (sib_ncd && !dep_ncd)
3044 {
3045 new_where = sib_where;
3046 ncd = sib_ncd;
3047 }
3048 else if (dep_ncd && !sib_ncd)
3049 {
3050 new_where = dep_where;
3051 ncd = dep_ncd;
3052 }
3053 else
3054 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3055 dep_where, &new_where);
3056
3057 /* If the candidate's increment doesn't match the one we're interested
3058 in (and nor do any increments for feeding defs of a phi-dependence),
3059 then the result depends only on siblings and dependents. */
3060 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3061
3062 if (!this_ncd || cand_already_replaced (c))
3063 {
3064 *where = new_where;
3065 return ncd;
3066 }
3067
3068 /* Otherwise, compare this candidate with the result from all siblings
3069 and dependents. */
3070 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3071
3072 return ncd;
3073 }
3074
3075 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3076
3077 static inline bool
3078 profitable_increment_p (unsigned index)
3079 {
3080 return (incr_vec[index].cost <= COST_NEUTRAL);
3081 }
3082
3083 /* For each profitable increment in the increment vector not equal to
3084 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3085 dominator of all statements in the candidate chain rooted at C
3086 that require that increment, and insert an initializer
3087 T_0 = stride * increment at that location. Record T_0 with the
3088 increment record. */
3089
3090 static void
3091 insert_initializers (slsr_cand_t c)
3092 {
3093 unsigned i;
3094
3095 for (i = 0; i < incr_vec_len; i++)
3096 {
3097 basic_block bb;
3098 slsr_cand_t where = NULL;
3099 gassign *init_stmt;
3100 tree stride_type, new_name, incr_tree;
3101 widest_int incr = incr_vec[i].incr;
3102
3103 if (!profitable_increment_p (i)
3104 || incr == 1
3105 || (incr == -1
3106 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3107 || incr == 0)
3108 continue;
3109
3110 /* We may have already identified an existing initializer that
3111 will suffice. */
3112 if (incr_vec[i].initializer)
3113 {
3114 if (dump_file && (dump_flags & TDF_DETAILS))
3115 {
3116 fputs ("Using existing initializer: ", dump_file);
3117 print_gimple_stmt (dump_file,
3118 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3119 0, 0);
3120 }
3121 continue;
3122 }
3123
3124 /* Find the block that most closely dominates all candidates
3125 with this increment. If there is at least one candidate in
3126 that block, the earliest one will be returned in WHERE. */
3127 bb = nearest_common_dominator_for_cands (c, incr, &where);
3128
3129 /* Create a new SSA name to hold the initializer's value. */
3130 stride_type = TREE_TYPE (c->stride);
3131 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3132 incr_vec[i].initializer = new_name;
3133
3134 /* Create the initializer and insert it in the latest possible
3135 dominating position. */
3136 incr_tree = wide_int_to_tree (stride_type, incr);
3137 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3138 c->stride, incr_tree);
3139 if (where)
3140 {
3141 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3142 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3143 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3144 }
3145 else
3146 {
3147 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3148 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3149
3150 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3151 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3152 else
3153 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3154
3155 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3156 }
3157
3158 if (dump_file && (dump_flags & TDF_DETAILS))
3159 {
3160 fputs ("Inserting initializer: ", dump_file);
3161 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3162 }
3163 }
3164 }
3165
3166 /* Return TRUE iff all required increments for candidates feeding PHI
3167 are profitable to replace on behalf of candidate C. */
3168
3169 static bool
3170 all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
3171 {
3172 unsigned i;
3173 slsr_cand_t basis = lookup_cand (c->basis);
3174 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3175
3176 for (i = 0; i < gimple_phi_num_args (phi); i++)
3177 {
3178 tree arg = gimple_phi_arg_def (phi, i);
3179
3180 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3181 {
3182 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3183
3184 if (gimple_code (arg_def) == GIMPLE_PHI)
3185 {
3186 if (!all_phi_incrs_profitable (c, arg_def))
3187 return false;
3188 }
3189 else
3190 {
3191 int j;
3192 slsr_cand_t arg_cand = base_cand_from_table (arg);
3193 widest_int increment = arg_cand->index - basis->index;
3194
3195 if (!address_arithmetic_p && wi::neg_p (increment))
3196 increment = -increment;
3197
3198 j = incr_vec_index (increment);
3199
3200 if (dump_file && (dump_flags & TDF_DETAILS))
3201 {
3202 fprintf (dump_file, " Conditional candidate %d, phi: ",
3203 c->cand_num);
3204 print_gimple_stmt (dump_file, phi, 0, 0);
3205 fputs (" increment: ", dump_file);
3206 print_decs (increment, dump_file);
3207 if (j < 0)
3208 fprintf (dump_file,
3209 "\n Not replaced; incr_vec overflow.\n");
3210 else {
3211 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3212 if (profitable_increment_p (j))
3213 fputs (" Replacing...\n", dump_file);
3214 else
3215 fputs (" Not replaced.\n", dump_file);
3216 }
3217 }
3218
3219 if (j < 0 || !profitable_increment_p (j))
3220 return false;
3221 }
3222 }
3223 }
3224
3225 return true;
3226 }
3227
3228 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3229 type TO_TYPE, and insert it in front of the statement represented
3230 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3231 the new SSA name. */
3232
3233 static tree
3234 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3235 {
3236 tree cast_lhs;
3237 gassign *cast_stmt;
3238 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3239
3240 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3241 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3242 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3243 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3244
3245 if (dump_file && (dump_flags & TDF_DETAILS))
3246 {
3247 fputs (" Inserting: ", dump_file);
3248 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3249 }
3250
3251 return cast_lhs;
3252 }
3253
3254 /* Replace the RHS of the statement represented by candidate C with
3255 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3256 leave C unchanged or just interchange its operands. The original
3257 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3258 If the replacement was made and we are doing a details dump,
3259 return the revised statement, else NULL. */
3260
3261 static gimple *
3262 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3263 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3264 slsr_cand_t c)
3265 {
3266 if (new_code != old_code
3267 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3268 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3269 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3270 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3271 {
3272 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3273 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3274 update_stmt (gsi_stmt (gsi));
3275 c->cand_stmt = gsi_stmt (gsi);
3276
3277 if (dump_file && (dump_flags & TDF_DETAILS))
3278 return gsi_stmt (gsi);
3279 }
3280
3281 else if (dump_file && (dump_flags & TDF_DETAILS))
3282 fputs (" (duplicate, not actually replacing)\n", dump_file);
3283
3284 return NULL;
3285 }
3286
3287 /* Strength-reduce the statement represented by candidate C by replacing
3288 it with an equivalent addition or subtraction. I is the index into
3289 the increment vector identifying C's increment. NEW_VAR is used to
3290 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3291 is the rhs1 to use in creating the add/subtract. */
3292
3293 static void
3294 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3295 {
3296 gimple *stmt_to_print = NULL;
3297 tree orig_rhs1, orig_rhs2;
3298 tree rhs2;
3299 enum tree_code orig_code, repl_code;
3300 widest_int cand_incr;
3301
3302 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3303 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3304 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3305 cand_incr = cand_increment (c);
3306
3307 if (dump_file && (dump_flags & TDF_DETAILS))
3308 {
3309 fputs ("Replacing: ", dump_file);
3310 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3311 stmt_to_print = c->cand_stmt;
3312 }
3313
3314 if (address_arithmetic_p)
3315 repl_code = POINTER_PLUS_EXPR;
3316 else
3317 repl_code = PLUS_EXPR;
3318
3319 /* If the increment has an initializer T_0, replace the candidate
3320 statement with an add of the basis name and the initializer. */
3321 if (incr_vec[i].initializer)
3322 {
3323 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3324 tree orig_type = TREE_TYPE (orig_rhs2);
3325
3326 if (types_compatible_p (orig_type, init_type))
3327 rhs2 = incr_vec[i].initializer;
3328 else
3329 rhs2 = introduce_cast_before_cand (c, orig_type,
3330 incr_vec[i].initializer);
3331
3332 if (incr_vec[i].incr != cand_incr)
3333 {
3334 gcc_assert (repl_code == PLUS_EXPR);
3335 repl_code = MINUS_EXPR;
3336 }
3337
3338 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3339 orig_code, orig_rhs1, orig_rhs2,
3340 c);
3341 }
3342
3343 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3344 with a subtract of the stride from the basis name, a copy
3345 from the basis name, or an add of the stride to the basis
3346 name, respectively. It may be necessary to introduce a
3347 cast (or reuse an existing cast). */
3348 else if (cand_incr == 1)
3349 {
3350 tree stride_type = TREE_TYPE (c->stride);
3351 tree orig_type = TREE_TYPE (orig_rhs2);
3352
3353 if (types_compatible_p (orig_type, stride_type))
3354 rhs2 = c->stride;
3355 else
3356 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3357
3358 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3359 orig_code, orig_rhs1, orig_rhs2,
3360 c);
3361 }
3362
3363 else if (cand_incr == -1)
3364 {
3365 tree stride_type = TREE_TYPE (c->stride);
3366 tree orig_type = TREE_TYPE (orig_rhs2);
3367 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3368
3369 if (types_compatible_p (orig_type, stride_type))
3370 rhs2 = c->stride;
3371 else
3372 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3373
3374 if (orig_code != MINUS_EXPR
3375 || !operand_equal_p (basis_name, orig_rhs1, 0)
3376 || !operand_equal_p (rhs2, orig_rhs2, 0))
3377 {
3378 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3379 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3380 update_stmt (gsi_stmt (gsi));
3381 c->cand_stmt = gsi_stmt (gsi);
3382
3383 if (dump_file && (dump_flags & TDF_DETAILS))
3384 stmt_to_print = gsi_stmt (gsi);
3385 }
3386 else if (dump_file && (dump_flags & TDF_DETAILS))
3387 fputs (" (duplicate, not actually replacing)\n", dump_file);
3388 }
3389
3390 else if (cand_incr == 0)
3391 {
3392 tree lhs = gimple_assign_lhs (c->cand_stmt);
3393 tree lhs_type = TREE_TYPE (lhs);
3394 tree basis_type = TREE_TYPE (basis_name);
3395
3396 if (types_compatible_p (lhs_type, basis_type))
3397 {
3398 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3399 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3400 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3401 gsi_replace (&gsi, copy_stmt, false);
3402 c->cand_stmt = copy_stmt;
3403
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3405 stmt_to_print = copy_stmt;
3406 }
3407 else
3408 {
3409 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3410 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3411 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3412 gsi_replace (&gsi, cast_stmt, false);
3413 c->cand_stmt = cast_stmt;
3414
3415 if (dump_file && (dump_flags & TDF_DETAILS))
3416 stmt_to_print = cast_stmt;
3417 }
3418 }
3419 else
3420 gcc_unreachable ();
3421
3422 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3423 {
3424 fputs ("With: ", dump_file);
3425 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3426 fputs ("\n", dump_file);
3427 }
3428 }
3429
3430 /* For each candidate in the tree rooted at C, replace it with
3431 an increment if such has been shown to be profitable. */
3432
3433 static void
3434 replace_profitable_candidates (slsr_cand_t c)
3435 {
3436 if (!cand_already_replaced (c))
3437 {
3438 widest_int increment = cand_abs_increment (c);
3439 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3440 int i;
3441
3442 i = incr_vec_index (increment);
3443
3444 /* Only process profitable increments. Nothing useful can be done
3445 to a cast or copy. */
3446 if (i >= 0
3447 && profitable_increment_p (i)
3448 && orig_code != MODIFY_EXPR
3449 && !CONVERT_EXPR_CODE_P (orig_code))
3450 {
3451 if (phi_dependent_cand_p (c))
3452 {
3453 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3454
3455 if (all_phi_incrs_profitable (c, phi))
3456 {
3457 /* Look up the LHS SSA name from C's basis. This will be
3458 the RHS1 of the adds we will introduce to create new
3459 phi arguments. */
3460 slsr_cand_t basis = lookup_cand (c->basis);
3461 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3462
3463 /* Create a new phi statement that will represent C's true
3464 basis after the transformation is complete. */
3465 location_t loc = gimple_location (c->cand_stmt);
3466 tree name = create_phi_basis (c, phi, basis_name,
3467 loc, UNKNOWN_STRIDE);
3468
3469 /* Replace C with an add of the new basis phi and the
3470 increment. */
3471 replace_one_candidate (c, i, name);
3472 }
3473 }
3474 else
3475 {
3476 slsr_cand_t basis = lookup_cand (c->basis);
3477 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3478 replace_one_candidate (c, i, basis_name);
3479 }
3480 }
3481 }
3482
3483 if (c->sibling)
3484 replace_profitable_candidates (lookup_cand (c->sibling));
3485
3486 if (c->dependent)
3487 replace_profitable_candidates (lookup_cand (c->dependent));
3488 }
3489 \f
3490 /* Analyze costs of related candidates in the candidate vector,
3491 and make beneficial replacements. */
3492
3493 static void
3494 analyze_candidates_and_replace (void)
3495 {
3496 unsigned i;
3497 slsr_cand_t c;
3498
3499 /* Each candidate that has a null basis and a non-null
3500 dependent is the root of a tree of related statements.
3501 Analyze each tree to determine a subset of those
3502 statements that can be replaced with maximum benefit. */
3503 FOR_EACH_VEC_ELT (cand_vec, i, c)
3504 {
3505 slsr_cand_t first_dep;
3506
3507 if (c->basis != 0 || c->dependent == 0)
3508 continue;
3509
3510 if (dump_file && (dump_flags & TDF_DETAILS))
3511 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3512 c->cand_num);
3513
3514 first_dep = lookup_cand (c->dependent);
3515
3516 /* If this is a chain of CAND_REFs, unconditionally replace
3517 each of them with a strength-reduced data reference. */
3518 if (c->kind == CAND_REF)
3519 replace_refs (c);
3520
3521 /* If the common stride of all related candidates is a known
3522 constant, each candidate without a phi-dependence can be
3523 profitably replaced. Each replaces a multiply by a single
3524 add, with the possibility that a feeding add also goes dead.
3525 A candidate with a phi-dependence is replaced only if the
3526 compensation code it requires is offset by the strength
3527 reduction savings. */
3528 else if (TREE_CODE (c->stride) == INTEGER_CST)
3529 replace_uncond_cands_and_profitable_phis (first_dep);
3530
3531 /* When the stride is an SSA name, it may still be profitable
3532 to replace some or all of the dependent candidates, depending
3533 on whether the introduced increments can be reused, or are
3534 less expensive to calculate than the replaced statements. */
3535 else
3536 {
3537 machine_mode mode;
3538 bool speed;
3539
3540 /* Determine whether we'll be generating pointer arithmetic
3541 when replacing candidates. */
3542 address_arithmetic_p = (c->kind == CAND_ADD
3543 && POINTER_TYPE_P (c->cand_type));
3544
3545 /* If all candidates have already been replaced under other
3546 interpretations, nothing remains to be done. */
3547 if (!count_candidates (c))
3548 continue;
3549
3550 /* Construct an array of increments for this candidate chain. */
3551 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3552 incr_vec_len = 0;
3553 record_increments (c);
3554
3555 /* Determine which increments are profitable to replace. */
3556 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3557 speed = optimize_cands_for_speed_p (c);
3558 analyze_increments (first_dep, mode, speed);
3559
3560 /* Insert initializers of the form T_0 = stride * increment
3561 for use in profitable replacements. */
3562 insert_initializers (first_dep);
3563 dump_incr_vec ();
3564
3565 /* Perform the replacements. */
3566 replace_profitable_candidates (first_dep);
3567 free (incr_vec);
3568 }
3569 }
3570 }
3571
3572 namespace {
3573
3574 const pass_data pass_data_strength_reduction =
3575 {
3576 GIMPLE_PASS, /* type */
3577 "slsr", /* name */
3578 OPTGROUP_NONE, /* optinfo_flags */
3579 TV_GIMPLE_SLSR, /* tv_id */
3580 ( PROP_cfg | PROP_ssa ), /* properties_required */
3581 0, /* properties_provided */
3582 0, /* properties_destroyed */
3583 0, /* todo_flags_start */
3584 0, /* todo_flags_finish */
3585 };
3586
3587 class pass_strength_reduction : public gimple_opt_pass
3588 {
3589 public:
3590 pass_strength_reduction (gcc::context *ctxt)
3591 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3592 {}
3593
3594 /* opt_pass methods: */
3595 virtual bool gate (function *) { return flag_tree_slsr; }
3596 virtual unsigned int execute (function *);
3597
3598 }; // class pass_strength_reduction
3599
3600 unsigned
3601 pass_strength_reduction::execute (function *fun)
3602 {
3603 /* Create the obstack where candidates will reside. */
3604 gcc_obstack_init (&cand_obstack);
3605
3606 /* Allocate the candidate vector. */
3607 cand_vec.create (128);
3608
3609 /* Allocate the mapping from statements to candidate indices. */
3610 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3611
3612 /* Create the obstack where candidate chains will reside. */
3613 gcc_obstack_init (&chain_obstack);
3614
3615 /* Allocate the mapping from base expressions to candidate chains. */
3616 base_cand_map = new hash_table<cand_chain_hasher> (500);
3617
3618 /* Allocate the mapping from bases to alternative bases. */
3619 alt_base_map = new hash_map<tree, tree>;
3620
3621 /* Initialize the loop optimizer. We need to detect flow across
3622 back edges, and this gives us dominator information as well. */
3623 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3624
3625 /* Walk the CFG in predominator order looking for strength reduction
3626 candidates. */
3627 find_candidates_dom_walker (CDI_DOMINATORS)
3628 .walk (fun->cfg->x_entry_block_ptr);
3629
3630 if (dump_file && (dump_flags & TDF_DETAILS))
3631 {
3632 dump_cand_vec ();
3633 dump_cand_chains ();
3634 }
3635
3636 delete alt_base_map;
3637 free_affine_expand_cache (&name_expansions);
3638
3639 /* Analyze costs and make appropriate replacements. */
3640 analyze_candidates_and_replace ();
3641
3642 loop_optimizer_finalize ();
3643 delete base_cand_map;
3644 base_cand_map = NULL;
3645 obstack_free (&chain_obstack, NULL);
3646 delete stmt_cand_map;
3647 cand_vec.release ();
3648 obstack_free (&cand_obstack, NULL);
3649
3650 return 0;
3651 }
3652
3653 } // anon namespace
3654
3655 gimple_opt_pass *
3656 make_pass_strength_reduction (gcc::context *ctxt)
3657 {
3658 return new pass_strength_reduction (ctxt);
3659 }