re PR tree-optimization/83293 (ICE: in gsi_insert_seq_nodes_after, at gimple-iterato...
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
59 \f
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
64
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
67
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
71
72 Any statement S0 that dominates S1 and is of the form:
73
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
76
77 is called a "basis" for S1. In both cases, S1 may be replaced by
78
79 S1': X = Y + (i - i') * S,
80
81 where (i - i') * S is folded to the extent possible.
82
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
91
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
96
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
100
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
104
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
107
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
111
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
120
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
124
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
128
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
131
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
134
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
142
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
151
152 Conditional candidates
153 ======================
154
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
157
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
165
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
168
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
172 {
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
175 }
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
180
181 where the bracketed instructions may go dead.
182
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
193
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
199
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
209
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
213
214
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
218
219 /* The kind of candidate. */
220 enum cand_kind
221 {
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
226 };
227
228 struct slsr_cand_d
229 {
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
232
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
235
236 /* The stride S. */
237 tree stride;
238
239 /* The index constant i. */
240 widest_int index;
241
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
248
249 /* The type to be used to interpret the stride field when the stride
250 is not a constant. Normally the same as the type of the recorded
251 stride, but when the stride has been cast we need to maintain that
252 knowledge in order to make legal substitutions without losing
253 precision. When the stride is a constant, this will be sizetype. */
254 tree stride_type;
255
256 /* The kind of candidate (CAND_MULT, etc.). */
257 enum cand_kind kind;
258
259 /* Index of this candidate in the candidate vector. */
260 cand_idx cand_num;
261
262 /* Index of the next candidate record for the same statement.
263 A statement may be useful in more than one way (e.g., due to
264 commutativity). So we can have multiple "interpretations"
265 of a statement. */
266 cand_idx next_interp;
267
268 /* Index of the basis statement S0, if any, in the candidate vector. */
269 cand_idx basis;
270
271 /* First candidate for which this candidate is a basis, if one exists. */
272 cand_idx dependent;
273
274 /* Next candidate having the same basis as this one. */
275 cand_idx sibling;
276
277 /* If this is a conditional candidate, the CAND_PHI candidate
278 that defines the base SSA name B. */
279 cand_idx def_phi;
280
281 /* Savings that can be expected from eliminating dead code if this
282 candidate is replaced. */
283 int dead_savings;
284
285 /* For PHI candidates, use a visited flag to keep from processing the
286 same PHI twice from multiple paths. */
287 int visited;
288
289 /* We sometimes have to cache a phi basis with a phi candidate to
290 avoid processing it twice. Valid only if visited==1. */
291 tree cached_basis;
292 };
293
294 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
295 typedef const struct slsr_cand_d *const_slsr_cand_t;
296
297 /* Pointers to candidates are chained together as part of a mapping
298 from base expressions to the candidates that use them. */
299
300 struct cand_chain_d
301 {
302 /* Base expression for the chain of candidates: often, but not
303 always, an SSA name. */
304 tree base_expr;
305
306 /* Pointer to a candidate. */
307 slsr_cand_t cand;
308
309 /* Chain pointer. */
310 struct cand_chain_d *next;
311
312 };
313
314 typedef struct cand_chain_d cand_chain, *cand_chain_t;
315 typedef const struct cand_chain_d *const_cand_chain_t;
316
317 /* Information about a unique "increment" associated with candidates
318 having an SSA name for a stride. An increment is the difference
319 between the index of the candidate and the index of its basis,
320 i.e., (i - i') as discussed in the module commentary.
321
322 When we are not going to generate address arithmetic we treat
323 increments that differ only in sign as the same, allowing sharing
324 of the cost of initializers. The absolute value of the increment
325 is stored in the incr_info. */
326
327 struct incr_info_d
328 {
329 /* The increment that relates a candidate to its basis. */
330 widest_int incr;
331
332 /* How many times the increment occurs in the candidate tree. */
333 unsigned count;
334
335 /* Cost of replacing candidates using this increment. Negative and
336 zero costs indicate replacement should be performed. */
337 int cost;
338
339 /* If this increment is profitable but is not -1, 0, or 1, it requires
340 an initializer T_0 = stride * incr to be found or introduced in the
341 nearest common dominator of all candidates. This field holds T_0
342 for subsequent use. */
343 tree initializer;
344
345 /* If the initializer was found to already exist, this is the block
346 where it was found. */
347 basic_block init_bb;
348 };
349
350 typedef struct incr_info_d incr_info, *incr_info_t;
351
352 /* Candidates are maintained in a vector. If candidate X dominates
353 candidate Y, then X appears before Y in the vector; but the
354 converse does not necessarily hold. */
355 static vec<slsr_cand_t> cand_vec;
356
357 enum cost_consts
358 {
359 COST_NEUTRAL = 0,
360 COST_INFINITE = 1000
361 };
362
363 enum stride_status
364 {
365 UNKNOWN_STRIDE = 0,
366 KNOWN_STRIDE = 1
367 };
368
369 enum phi_adjust_status
370 {
371 NOT_PHI_ADJUST = 0,
372 PHI_ADJUST = 1
373 };
374
375 enum count_phis_status
376 {
377 DONT_COUNT_PHIS = 0,
378 COUNT_PHIS = 1
379 };
380
381 /* Constrain how many PHI nodes we will visit for a conditional
382 candidate (depth and breadth). */
383 const int MAX_SPREAD = 16;
384
385 /* Pointer map embodying a mapping from statements to candidates. */
386 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
387
388 /* Obstack for candidates. */
389 static struct obstack cand_obstack;
390
391 /* Obstack for candidate chains. */
392 static struct obstack chain_obstack;
393
394 /* An array INCR_VEC of incr_infos is used during analysis of related
395 candidates having an SSA name for a stride. INCR_VEC_LEN describes
396 its current length. MAX_INCR_VEC_LEN is used to avoid costly
397 pathological cases. */
398 static incr_info_t incr_vec;
399 static unsigned incr_vec_len;
400 const int MAX_INCR_VEC_LEN = 16;
401
402 /* For a chain of candidates with unknown stride, indicates whether or not
403 we must generate pointer arithmetic when replacing statements. */
404 static bool address_arithmetic_p;
405
406 /* Forward function declarations. */
407 static slsr_cand_t base_cand_from_table (tree);
408 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
409 static bool legal_cast_p_1 (tree, tree);
410 \f
411 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
412
413 static slsr_cand_t
414 lookup_cand (cand_idx idx)
415 {
416 return cand_vec[idx - 1];
417 }
418
419 /* Helper for hashing a candidate chain header. */
420
421 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
422 {
423 static inline hashval_t hash (const cand_chain *);
424 static inline bool equal (const cand_chain *, const cand_chain *);
425 };
426
427 inline hashval_t
428 cand_chain_hasher::hash (const cand_chain *p)
429 {
430 tree base_expr = p->base_expr;
431 return iterative_hash_expr (base_expr, 0);
432 }
433
434 inline bool
435 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
436 {
437 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
438 }
439
440 /* Hash table embodying a mapping from base exprs to chains of candidates. */
441 static hash_table<cand_chain_hasher> *base_cand_map;
442 \f
443 /* Pointer map used by tree_to_aff_combination_expand. */
444 static hash_map<tree, name_expansion *> *name_expansions;
445 /* Pointer map embodying a mapping from bases to alternative bases. */
446 static hash_map<tree, tree> *alt_base_map;
447
448 /* Given BASE, use the tree affine combiniation facilities to
449 find the underlying tree expression for BASE, with any
450 immediate offset excluded.
451
452 N.B. we should eliminate this backtracking with better forward
453 analysis in a future release. */
454
455 static tree
456 get_alternative_base (tree base)
457 {
458 tree *result = alt_base_map->get (base);
459
460 if (result == NULL)
461 {
462 tree expr;
463 aff_tree aff;
464
465 tree_to_aff_combination_expand (base, TREE_TYPE (base),
466 &aff, &name_expansions);
467 aff.offset = 0;
468 expr = aff_combination_to_tree (&aff);
469
470 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
471
472 return expr == base ? NULL : expr;
473 }
474
475 return *result;
476 }
477
478 /* Look in the candidate table for a CAND_PHI that defines BASE and
479 return it if found; otherwise return NULL. */
480
481 static cand_idx
482 find_phi_def (tree base)
483 {
484 slsr_cand_t c;
485
486 if (TREE_CODE (base) != SSA_NAME)
487 return 0;
488
489 c = base_cand_from_table (base);
490
491 if (!c || c->kind != CAND_PHI
492 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (c->cand_stmt)))
493 return 0;
494
495 return c->cand_num;
496 }
497
498 /* Determine whether all uses of NAME are directly or indirectly
499 used by STMT. That is, we want to know whether if STMT goes
500 dead, the definition of NAME also goes dead. */
501 static bool
502 uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
503 {
504 gimple *use_stmt;
505 imm_use_iterator iter;
506 bool retval = true;
507
508 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
509 {
510 if (use_stmt == stmt || is_gimple_debug (use_stmt))
511 continue;
512
513 if (!is_gimple_assign (use_stmt)
514 || !gimple_get_lhs (use_stmt)
515 || !is_gimple_reg (gimple_get_lhs (use_stmt))
516 || recurse >= 10
517 || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt), stmt,
518 recurse + 1))
519 {
520 retval = false;
521 BREAK_FROM_IMM_USE_STMT (iter);
522 }
523 }
524
525 return retval;
526 }
527
528 /* Helper routine for find_basis_for_candidate. May be called twice:
529 once for the candidate's base expr, and optionally again either for
530 the candidate's phi definition or for a CAND_REF's alternative base
531 expression. */
532
533 static slsr_cand_t
534 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
535 {
536 cand_chain mapping_key;
537 cand_chain_t chain;
538 slsr_cand_t basis = NULL;
539
540 // Limit potential of N^2 behavior for long candidate chains.
541 int iters = 0;
542 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
543
544 mapping_key.base_expr = base_expr;
545 chain = base_cand_map->find (&mapping_key);
546
547 for (; chain && iters < max_iters; chain = chain->next, ++iters)
548 {
549 slsr_cand_t one_basis = chain->cand;
550
551 if (one_basis->kind != c->kind
552 || one_basis->cand_stmt == c->cand_stmt
553 || !operand_equal_p (one_basis->stride, c->stride, 0)
554 || !types_compatible_p (one_basis->cand_type, c->cand_type)
555 || !types_compatible_p (one_basis->stride_type, c->stride_type)
556 || !dominated_by_p (CDI_DOMINATORS,
557 gimple_bb (c->cand_stmt),
558 gimple_bb (one_basis->cand_stmt)))
559 continue;
560
561 tree lhs = gimple_assign_lhs (one_basis->cand_stmt);
562 if (lhs && TREE_CODE (lhs) == SSA_NAME
563 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
564 continue;
565
566 if (!basis || basis->cand_num < one_basis->cand_num)
567 basis = one_basis;
568 }
569
570 return basis;
571 }
572
573 /* Use the base expr from candidate C to look for possible candidates
574 that can serve as a basis for C. Each potential basis must also
575 appear in a block that dominates the candidate statement and have
576 the same stride and type. If more than one possible basis exists,
577 the one with highest index in the vector is chosen; this will be
578 the most immediately dominating basis. */
579
580 static int
581 find_basis_for_candidate (slsr_cand_t c)
582 {
583 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
584
585 /* If a candidate doesn't have a basis using its base expression,
586 it may have a basis hidden by one or more intervening phis. */
587 if (!basis && c->def_phi)
588 {
589 basic_block basis_bb, phi_bb;
590 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
591 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
592
593 if (basis)
594 {
595 /* A hidden basis must dominate the phi-definition of the
596 candidate's base name. */
597 phi_bb = gimple_bb (phi_cand->cand_stmt);
598 basis_bb = gimple_bb (basis->cand_stmt);
599
600 if (phi_bb == basis_bb
601 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
602 {
603 basis = NULL;
604 c->basis = 0;
605 }
606
607 /* If we found a hidden basis, estimate additional dead-code
608 savings if the phi and its feeding statements can be removed. */
609 tree feeding_var = gimple_phi_result (phi_cand->cand_stmt);
610 if (basis && uses_consumed_by_stmt (feeding_var, c->cand_stmt))
611 c->dead_savings += phi_cand->dead_savings;
612 }
613 }
614
615 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
616 {
617 tree alt_base_expr = get_alternative_base (c->base_expr);
618 if (alt_base_expr)
619 basis = find_basis_for_base_expr (c, alt_base_expr);
620 }
621
622 if (basis)
623 {
624 c->sibling = basis->dependent;
625 basis->dependent = c->cand_num;
626 return basis->cand_num;
627 }
628
629 return 0;
630 }
631
632 /* Record a mapping from BASE to C, indicating that C may potentially serve
633 as a basis using that base expression. BASE may be the same as
634 C->BASE_EXPR; alternatively BASE can be a different tree that share the
635 underlining expression of C->BASE_EXPR. */
636
637 static void
638 record_potential_basis (slsr_cand_t c, tree base)
639 {
640 cand_chain_t node;
641 cand_chain **slot;
642
643 gcc_assert (base);
644
645 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
646 node->base_expr = base;
647 node->cand = c;
648 node->next = NULL;
649 slot = base_cand_map->find_slot (node, INSERT);
650
651 if (*slot)
652 {
653 cand_chain_t head = (cand_chain_t) (*slot);
654 node->next = head->next;
655 head->next = node;
656 }
657 else
658 *slot = node;
659 }
660
661 /* Allocate storage for a new candidate and initialize its fields.
662 Attempt to find a basis for the candidate.
663
664 For CAND_REF, an alternative base may also be recorded and used
665 to find a basis. This helps cases where the expression hidden
666 behind BASE (which is usually an SSA_NAME) has immediate offset,
667 e.g.
668
669 a2[i][j] = 1;
670 a2[i + 20][j] = 2; */
671
672 static slsr_cand_t
673 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
674 const widest_int &index, tree stride, tree ctype,
675 tree stype, unsigned savings)
676 {
677 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
678 sizeof (slsr_cand));
679 c->cand_stmt = gs;
680 c->base_expr = base;
681 c->stride = stride;
682 c->index = index;
683 c->cand_type = ctype;
684 c->stride_type = stype;
685 c->kind = kind;
686 c->cand_num = cand_vec.length () + 1;
687 c->next_interp = 0;
688 c->dependent = 0;
689 c->sibling = 0;
690 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
691 c->dead_savings = savings;
692 c->visited = 0;
693 c->cached_basis = NULL_TREE;
694
695 cand_vec.safe_push (c);
696
697 if (kind == CAND_PHI)
698 c->basis = 0;
699 else
700 c->basis = find_basis_for_candidate (c);
701
702 record_potential_basis (c, base);
703 if (flag_expensive_optimizations && kind == CAND_REF)
704 {
705 tree alt_base = get_alternative_base (base);
706 if (alt_base)
707 record_potential_basis (c, alt_base);
708 }
709
710 return c;
711 }
712
713 /* Determine the target cost of statement GS when compiling according
714 to SPEED. */
715
716 static int
717 stmt_cost (gimple *gs, bool speed)
718 {
719 tree lhs, rhs1, rhs2;
720 machine_mode lhs_mode;
721
722 gcc_assert (is_gimple_assign (gs));
723 lhs = gimple_assign_lhs (gs);
724 rhs1 = gimple_assign_rhs1 (gs);
725 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
726
727 switch (gimple_assign_rhs_code (gs))
728 {
729 case MULT_EXPR:
730 rhs2 = gimple_assign_rhs2 (gs);
731
732 if (tree_fits_shwi_p (rhs2))
733 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
734
735 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
736 return mul_cost (speed, lhs_mode);
737
738 case PLUS_EXPR:
739 case POINTER_PLUS_EXPR:
740 case MINUS_EXPR:
741 return add_cost (speed, lhs_mode);
742
743 case NEGATE_EXPR:
744 return neg_cost (speed, lhs_mode);
745
746 CASE_CONVERT:
747 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
748
749 /* Note that we don't assign costs to copies that in most cases
750 will go away. */
751 case SSA_NAME:
752 return 0;
753
754 default:
755 ;
756 }
757
758 gcc_unreachable ();
759 return 0;
760 }
761
762 /* Look up the defining statement for BASE_IN and return a pointer
763 to its candidate in the candidate table, if any; otherwise NULL.
764 Only CAND_ADD and CAND_MULT candidates are returned. */
765
766 static slsr_cand_t
767 base_cand_from_table (tree base_in)
768 {
769 slsr_cand_t *result;
770
771 gimple *def = SSA_NAME_DEF_STMT (base_in);
772 if (!def)
773 return (slsr_cand_t) NULL;
774
775 result = stmt_cand_map->get (def);
776
777 if (result && (*result)->kind != CAND_REF)
778 return *result;
779
780 return (slsr_cand_t) NULL;
781 }
782
783 /* Add an entry to the statement-to-candidate mapping. */
784
785 static void
786 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
787 {
788 gcc_assert (!stmt_cand_map->put (gs, c));
789 }
790 \f
791 /* Given PHI which contains a phi statement, determine whether it
792 satisfies all the requirements of a phi candidate. If so, create
793 a candidate. Note that a CAND_PHI never has a basis itself, but
794 is used to help find a basis for subsequent candidates. */
795
796 static void
797 slsr_process_phi (gphi *phi, bool speed)
798 {
799 unsigned i;
800 tree arg0_base = NULL_TREE, base_type;
801 slsr_cand_t c;
802 struct loop *cand_loop = gimple_bb (phi)->loop_father;
803 unsigned savings = 0;
804
805 /* A CAND_PHI requires each of its arguments to have the same
806 derived base name. (See the module header commentary for a
807 definition of derived base names.) Furthermore, all feeding
808 definitions must be in the same position in the loop hierarchy
809 as PHI. */
810
811 for (i = 0; i < gimple_phi_num_args (phi); i++)
812 {
813 slsr_cand_t arg_cand;
814 tree arg = gimple_phi_arg_def (phi, i);
815 tree derived_base_name = NULL_TREE;
816 gimple *arg_stmt = NULL;
817 basic_block arg_bb = NULL;
818
819 if (TREE_CODE (arg) != SSA_NAME)
820 return;
821
822 arg_cand = base_cand_from_table (arg);
823
824 if (arg_cand)
825 {
826 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
827 {
828 if (!arg_cand->next_interp)
829 return;
830
831 arg_cand = lookup_cand (arg_cand->next_interp);
832 }
833
834 if (!integer_onep (arg_cand->stride))
835 return;
836
837 derived_base_name = arg_cand->base_expr;
838 arg_stmt = arg_cand->cand_stmt;
839 arg_bb = gimple_bb (arg_stmt);
840
841 /* Gather potential dead code savings if the phi statement
842 can be removed later on. */
843 if (uses_consumed_by_stmt (arg, phi))
844 {
845 if (gimple_code (arg_stmt) == GIMPLE_PHI)
846 savings += arg_cand->dead_savings;
847 else
848 savings += stmt_cost (arg_stmt, speed);
849 }
850 }
851 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
852 {
853 derived_base_name = arg;
854 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
855 }
856
857 if (!arg_bb || arg_bb->loop_father != cand_loop)
858 return;
859
860 if (i == 0)
861 arg0_base = derived_base_name;
862 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
863 return;
864 }
865
866 /* Create the candidate. "alloc_cand_and_find_basis" is named
867 misleadingly for this case, as no basis will be sought for a
868 CAND_PHI. */
869 base_type = TREE_TYPE (arg0_base);
870
871 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
872 0, integer_one_node, base_type,
873 sizetype, savings);
874
875 /* Add the candidate to the statement-candidate mapping. */
876 add_cand_for_stmt (phi, c);
877 }
878
879 /* Given PBASE which is a pointer to tree, look up the defining
880 statement for it and check whether the candidate is in the
881 form of:
882
883 X = B + (1 * S), S is integer constant
884 X = B + (i * S), S is integer one
885
886 If so, set PBASE to the candidate's base_expr and return double
887 int (i * S).
888 Otherwise, just return double int zero. */
889
890 static widest_int
891 backtrace_base_for_ref (tree *pbase)
892 {
893 tree base_in = *pbase;
894 slsr_cand_t base_cand;
895
896 STRIP_NOPS (base_in);
897
898 /* Strip off widening conversion(s) to handle cases where
899 e.g. 'B' is widened from an 'int' in order to calculate
900 a 64-bit address. */
901 if (CONVERT_EXPR_P (base_in)
902 && legal_cast_p_1 (TREE_TYPE (base_in),
903 TREE_TYPE (TREE_OPERAND (base_in, 0))))
904 base_in = get_unwidened (base_in, NULL_TREE);
905
906 if (TREE_CODE (base_in) != SSA_NAME)
907 return 0;
908
909 base_cand = base_cand_from_table (base_in);
910
911 while (base_cand && base_cand->kind != CAND_PHI)
912 {
913 if (base_cand->kind == CAND_ADD
914 && base_cand->index == 1
915 && TREE_CODE (base_cand->stride) == INTEGER_CST)
916 {
917 /* X = B + (1 * S), S is integer constant. */
918 *pbase = base_cand->base_expr;
919 return wi::to_widest (base_cand->stride);
920 }
921 else if (base_cand->kind == CAND_ADD
922 && TREE_CODE (base_cand->stride) == INTEGER_CST
923 && integer_onep (base_cand->stride))
924 {
925 /* X = B + (i * S), S is integer one. */
926 *pbase = base_cand->base_expr;
927 return base_cand->index;
928 }
929
930 if (base_cand->next_interp)
931 base_cand = lookup_cand (base_cand->next_interp);
932 else
933 base_cand = NULL;
934 }
935
936 return 0;
937 }
938
939 /* Look for the following pattern:
940
941 *PBASE: MEM_REF (T1, C1)
942
943 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
944 or
945 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
946 or
947 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
948
949 *PINDEX: C4 * BITS_PER_UNIT
950
951 If not present, leave the input values unchanged and return FALSE.
952 Otherwise, modify the input values as follows and return TRUE:
953
954 *PBASE: T1
955 *POFFSET: MULT_EXPR (T2, C3)
956 *PINDEX: C1 + (C2 * C3) + C4
957
958 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
959 will be further restructured to:
960
961 *PBASE: T1
962 *POFFSET: MULT_EXPR (T2', C3)
963 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
964
965 static bool
966 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
967 tree *ptype)
968 {
969 tree base = *pbase, offset = *poffset;
970 widest_int index = *pindex;
971 tree mult_op0, t1, t2, type;
972 widest_int c1, c2, c3, c4, c5;
973
974 if (!base
975 || !offset
976 || TREE_CODE (base) != MEM_REF
977 || TREE_CODE (offset) != MULT_EXPR
978 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
979 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
980 return false;
981
982 t1 = TREE_OPERAND (base, 0);
983 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
984 type = TREE_TYPE (TREE_OPERAND (base, 1));
985
986 mult_op0 = TREE_OPERAND (offset, 0);
987 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
988
989 if (TREE_CODE (mult_op0) == PLUS_EXPR)
990
991 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
992 {
993 t2 = TREE_OPERAND (mult_op0, 0);
994 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
995 }
996 else
997 return false;
998
999 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
1000
1001 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
1002 {
1003 t2 = TREE_OPERAND (mult_op0, 0);
1004 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
1005 }
1006 else
1007 return false;
1008
1009 else
1010 {
1011 t2 = mult_op0;
1012 c2 = 0;
1013 }
1014
1015 c4 = index >> LOG2_BITS_PER_UNIT;
1016 c5 = backtrace_base_for_ref (&t2);
1017
1018 *pbase = t1;
1019 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
1020 wide_int_to_tree (sizetype, c3));
1021 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1022 *ptype = type;
1023
1024 return true;
1025 }
1026
1027 /* Given GS which contains a data reference, create a CAND_REF entry in
1028 the candidate table and attempt to find a basis. */
1029
1030 static void
1031 slsr_process_ref (gimple *gs)
1032 {
1033 tree ref_expr, base, offset, type;
1034 HOST_WIDE_INT bitsize, bitpos;
1035 machine_mode mode;
1036 int unsignedp, reversep, volatilep;
1037 slsr_cand_t c;
1038
1039 if (gimple_vdef (gs))
1040 ref_expr = gimple_assign_lhs (gs);
1041 else
1042 ref_expr = gimple_assign_rhs1 (gs);
1043
1044 if (!handled_component_p (ref_expr)
1045 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1046 || (TREE_CODE (ref_expr) == COMPONENT_REF
1047 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1048 return;
1049
1050 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1051 &unsignedp, &reversep, &volatilep);
1052 if (reversep)
1053 return;
1054 widest_int index = bitpos;
1055
1056 if (!restructure_reference (&base, &offset, &index, &type))
1057 return;
1058
1059 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1060 type, sizetype, 0);
1061
1062 /* Add the candidate to the statement-candidate mapping. */
1063 add_cand_for_stmt (gs, c);
1064 }
1065
1066 /* Create a candidate entry for a statement GS, where GS multiplies
1067 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1068 about the two SSA names into the new candidate. Return the new
1069 candidate. */
1070
1071 static slsr_cand_t
1072 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1073 {
1074 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1075 tree stype = NULL_TREE;
1076 widest_int index;
1077 unsigned savings = 0;
1078 slsr_cand_t c;
1079 slsr_cand_t base_cand = base_cand_from_table (base_in);
1080
1081 /* Look at all interpretations of the base candidate, if necessary,
1082 to find information to propagate into this candidate. */
1083 while (base_cand && !base && base_cand->kind != CAND_PHI)
1084 {
1085
1086 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1087 {
1088 /* Y = (B + i') * 1
1089 X = Y * Z
1090 ================
1091 X = (B + i') * Z */
1092 base = base_cand->base_expr;
1093 index = base_cand->index;
1094 stride = stride_in;
1095 ctype = base_cand->cand_type;
1096 stype = TREE_TYPE (stride_in);
1097 if (has_single_use (base_in))
1098 savings = (base_cand->dead_savings
1099 + stmt_cost (base_cand->cand_stmt, speed));
1100 }
1101 else if (base_cand->kind == CAND_ADD
1102 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1103 {
1104 /* Y = B + (i' * S), S constant
1105 X = Y * Z
1106 ============================
1107 X = B + ((i' * S) * Z) */
1108 base = base_cand->base_expr;
1109 index = base_cand->index * wi::to_widest (base_cand->stride);
1110 stride = stride_in;
1111 ctype = base_cand->cand_type;
1112 stype = TREE_TYPE (stride_in);
1113 if (has_single_use (base_in))
1114 savings = (base_cand->dead_savings
1115 + stmt_cost (base_cand->cand_stmt, speed));
1116 }
1117
1118 if (base_cand->next_interp)
1119 base_cand = lookup_cand (base_cand->next_interp);
1120 else
1121 base_cand = NULL;
1122 }
1123
1124 if (!base)
1125 {
1126 /* No interpretations had anything useful to propagate, so
1127 produce X = (Y + 0) * Z. */
1128 base = base_in;
1129 index = 0;
1130 stride = stride_in;
1131 ctype = TREE_TYPE (base_in);
1132 stype = TREE_TYPE (stride_in);
1133 }
1134
1135 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1136 ctype, stype, savings);
1137 return c;
1138 }
1139
1140 /* Create a candidate entry for a statement GS, where GS multiplies
1141 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1142 information about BASE_IN into the new candidate. Return the new
1143 candidate. */
1144
1145 static slsr_cand_t
1146 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1147 {
1148 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1149 widest_int index, temp;
1150 unsigned savings = 0;
1151 slsr_cand_t c;
1152 slsr_cand_t base_cand = base_cand_from_table (base_in);
1153
1154 /* Look at all interpretations of the base candidate, if necessary,
1155 to find information to propagate into this candidate. */
1156 while (base_cand && !base && base_cand->kind != CAND_PHI)
1157 {
1158 if (base_cand->kind == CAND_MULT
1159 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1160 {
1161 /* Y = (B + i') * S, S constant
1162 X = Y * c
1163 ============================
1164 X = (B + i') * (S * c) */
1165 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1166 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1167 {
1168 base = base_cand->base_expr;
1169 index = base_cand->index;
1170 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1171 ctype = base_cand->cand_type;
1172 if (has_single_use (base_in))
1173 savings = (base_cand->dead_savings
1174 + stmt_cost (base_cand->cand_stmt, speed));
1175 }
1176 }
1177 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1178 {
1179 /* Y = B + (i' * 1)
1180 X = Y * c
1181 ===========================
1182 X = (B + i') * c */
1183 base = base_cand->base_expr;
1184 index = base_cand->index;
1185 stride = stride_in;
1186 ctype = base_cand->cand_type;
1187 if (has_single_use (base_in))
1188 savings = (base_cand->dead_savings
1189 + stmt_cost (base_cand->cand_stmt, speed));
1190 }
1191 else if (base_cand->kind == CAND_ADD
1192 && base_cand->index == 1
1193 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1194 {
1195 /* Y = B + (1 * S), S constant
1196 X = Y * c
1197 ===========================
1198 X = (B + S) * c */
1199 base = base_cand->base_expr;
1200 index = wi::to_widest (base_cand->stride);
1201 stride = stride_in;
1202 ctype = base_cand->cand_type;
1203 if (has_single_use (base_in))
1204 savings = (base_cand->dead_savings
1205 + stmt_cost (base_cand->cand_stmt, speed));
1206 }
1207
1208 if (base_cand->next_interp)
1209 base_cand = lookup_cand (base_cand->next_interp);
1210 else
1211 base_cand = NULL;
1212 }
1213
1214 if (!base)
1215 {
1216 /* No interpretations had anything useful to propagate, so
1217 produce X = (Y + 0) * c. */
1218 base = base_in;
1219 index = 0;
1220 stride = stride_in;
1221 ctype = TREE_TYPE (base_in);
1222 }
1223
1224 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1225 ctype, sizetype, savings);
1226 return c;
1227 }
1228
1229 /* Given GS which is a multiply of scalar integers, make an appropriate
1230 entry in the candidate table. If this is a multiply of two SSA names,
1231 create two CAND_MULT interpretations and attempt to find a basis for
1232 each of them. Otherwise, create a single CAND_MULT and attempt to
1233 find a basis. */
1234
1235 static void
1236 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1237 {
1238 slsr_cand_t c, c2;
1239
1240 /* If this is a multiply of an SSA name with itself, it is highly
1241 unlikely that we will get a strength reduction opportunity, so
1242 don't record it as a candidate. This simplifies the logic for
1243 finding a basis, so if this is removed that must be considered. */
1244 if (rhs1 == rhs2)
1245 return;
1246
1247 if (TREE_CODE (rhs2) == SSA_NAME)
1248 {
1249 /* Record an interpretation of this statement in the candidate table
1250 assuming RHS1 is the base expression and RHS2 is the stride. */
1251 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1252
1253 /* Add the first interpretation to the statement-candidate mapping. */
1254 add_cand_for_stmt (gs, c);
1255
1256 /* Record another interpretation of this statement assuming RHS1
1257 is the stride and RHS2 is the base expression. */
1258 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1259 c->next_interp = c2->cand_num;
1260 }
1261 else
1262 {
1263 /* Record an interpretation for the multiply-immediate. */
1264 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1265
1266 /* Add the interpretation to the statement-candidate mapping. */
1267 add_cand_for_stmt (gs, c);
1268 }
1269 }
1270
1271 /* Create a candidate entry for a statement GS, where GS adds two
1272 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1273 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1274 information about the two SSA names into the new candidate.
1275 Return the new candidate. */
1276
1277 static slsr_cand_t
1278 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1279 bool subtract_p, bool speed)
1280 {
1281 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1282 tree stype = NULL_TREE;
1283 widest_int index;
1284 unsigned savings = 0;
1285 slsr_cand_t c;
1286 slsr_cand_t base_cand = base_cand_from_table (base_in);
1287 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1288
1289 /* The most useful transformation is a multiply-immediate feeding
1290 an add or subtract. Look for that first. */
1291 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1292 {
1293 if (addend_cand->kind == CAND_MULT
1294 && addend_cand->index == 0
1295 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1296 {
1297 /* Z = (B + 0) * S, S constant
1298 X = Y +/- Z
1299 ===========================
1300 X = Y + ((+/-1 * S) * B) */
1301 base = base_in;
1302 index = wi::to_widest (addend_cand->stride);
1303 if (subtract_p)
1304 index = -index;
1305 stride = addend_cand->base_expr;
1306 ctype = TREE_TYPE (base_in);
1307 stype = addend_cand->cand_type;
1308 if (has_single_use (addend_in))
1309 savings = (addend_cand->dead_savings
1310 + stmt_cost (addend_cand->cand_stmt, speed));
1311 }
1312
1313 if (addend_cand->next_interp)
1314 addend_cand = lookup_cand (addend_cand->next_interp);
1315 else
1316 addend_cand = NULL;
1317 }
1318
1319 while (base_cand && !base && base_cand->kind != CAND_PHI)
1320 {
1321 if (base_cand->kind == CAND_ADD
1322 && (base_cand->index == 0
1323 || operand_equal_p (base_cand->stride,
1324 integer_zero_node, 0)))
1325 {
1326 /* Y = B + (i' * S), i' * S = 0
1327 X = Y +/- Z
1328 ============================
1329 X = B + (+/-1 * Z) */
1330 base = base_cand->base_expr;
1331 index = subtract_p ? -1 : 1;
1332 stride = addend_in;
1333 ctype = base_cand->cand_type;
1334 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1335 : TREE_TYPE (addend_in));
1336 if (has_single_use (base_in))
1337 savings = (base_cand->dead_savings
1338 + stmt_cost (base_cand->cand_stmt, speed));
1339 }
1340 else if (subtract_p)
1341 {
1342 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1343
1344 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1345 {
1346 if (subtrahend_cand->kind == CAND_MULT
1347 && subtrahend_cand->index == 0
1348 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1349 {
1350 /* Z = (B + 0) * S, S constant
1351 X = Y - Z
1352 ===========================
1353 Value: X = Y + ((-1 * S) * B) */
1354 base = base_in;
1355 index = wi::to_widest (subtrahend_cand->stride);
1356 index = -index;
1357 stride = subtrahend_cand->base_expr;
1358 ctype = TREE_TYPE (base_in);
1359 stype = subtrahend_cand->cand_type;
1360 if (has_single_use (addend_in))
1361 savings = (subtrahend_cand->dead_savings
1362 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1363 }
1364
1365 if (subtrahend_cand->next_interp)
1366 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1367 else
1368 subtrahend_cand = NULL;
1369 }
1370 }
1371
1372 if (base_cand->next_interp)
1373 base_cand = lookup_cand (base_cand->next_interp);
1374 else
1375 base_cand = NULL;
1376 }
1377
1378 if (!base)
1379 {
1380 /* No interpretations had anything useful to propagate, so
1381 produce X = Y + (1 * Z). */
1382 base = base_in;
1383 index = subtract_p ? -1 : 1;
1384 stride = addend_in;
1385 ctype = TREE_TYPE (base_in);
1386 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1387 : TREE_TYPE (addend_in));
1388 }
1389
1390 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1391 ctype, stype, savings);
1392 return c;
1393 }
1394
1395 /* Create a candidate entry for a statement GS, where GS adds SSA
1396 name BASE_IN to constant INDEX_IN. Propagate any known information
1397 about BASE_IN into the new candidate. Return the new candidate. */
1398
1399 static slsr_cand_t
1400 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1401 bool speed)
1402 {
1403 enum cand_kind kind = CAND_ADD;
1404 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1405 tree stype = NULL_TREE;
1406 widest_int index, multiple;
1407 unsigned savings = 0;
1408 slsr_cand_t c;
1409 slsr_cand_t base_cand = base_cand_from_table (base_in);
1410
1411 while (base_cand && !base && base_cand->kind != CAND_PHI)
1412 {
1413 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1414
1415 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1416 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1417 sign, &multiple))
1418 {
1419 /* Y = (B + i') * S, S constant, c = kS for some integer k
1420 X = Y + c
1421 ============================
1422 X = (B + (i'+ k)) * S
1423 OR
1424 Y = B + (i' * S), S constant, c = kS for some integer k
1425 X = Y + c
1426 ============================
1427 X = (B + (i'+ k)) * S */
1428 kind = base_cand->kind;
1429 base = base_cand->base_expr;
1430 index = base_cand->index + multiple;
1431 stride = base_cand->stride;
1432 ctype = base_cand->cand_type;
1433 stype = base_cand->stride_type;
1434 if (has_single_use (base_in))
1435 savings = (base_cand->dead_savings
1436 + stmt_cost (base_cand->cand_stmt, speed));
1437 }
1438
1439 if (base_cand->next_interp)
1440 base_cand = lookup_cand (base_cand->next_interp);
1441 else
1442 base_cand = NULL;
1443 }
1444
1445 if (!base)
1446 {
1447 /* No interpretations had anything useful to propagate, so
1448 produce X = Y + (c * 1). */
1449 kind = CAND_ADD;
1450 base = base_in;
1451 index = index_in;
1452 stride = integer_one_node;
1453 ctype = TREE_TYPE (base_in);
1454 stype = sizetype;
1455 }
1456
1457 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1458 ctype, stype, savings);
1459 return c;
1460 }
1461
1462 /* Given GS which is an add or subtract of scalar integers or pointers,
1463 make at least one appropriate entry in the candidate table. */
1464
1465 static void
1466 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1467 {
1468 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1469 slsr_cand_t c = NULL, c2;
1470
1471 if (TREE_CODE (rhs2) == SSA_NAME)
1472 {
1473 /* First record an interpretation assuming RHS1 is the base expression
1474 and RHS2 is the stride. But it doesn't make sense for the
1475 stride to be a pointer, so don't record a candidate in that case. */
1476 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1477 {
1478 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1479
1480 /* Add the first interpretation to the statement-candidate
1481 mapping. */
1482 add_cand_for_stmt (gs, c);
1483 }
1484
1485 /* If the two RHS operands are identical, or this is a subtract,
1486 we're done. */
1487 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1488 return;
1489
1490 /* Otherwise, record another interpretation assuming RHS2 is the
1491 base expression and RHS1 is the stride, again provided that the
1492 stride is not a pointer. */
1493 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1494 {
1495 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1496 if (c)
1497 c->next_interp = c2->cand_num;
1498 else
1499 add_cand_for_stmt (gs, c2);
1500 }
1501 }
1502 else
1503 {
1504 /* Record an interpretation for the add-immediate. */
1505 widest_int index = wi::to_widest (rhs2);
1506 if (subtract_p)
1507 index = -index;
1508
1509 c = create_add_imm_cand (gs, rhs1, index, speed);
1510
1511 /* Add the interpretation to the statement-candidate mapping. */
1512 add_cand_for_stmt (gs, c);
1513 }
1514 }
1515
1516 /* Given GS which is a negate of a scalar integer, make an appropriate
1517 entry in the candidate table. A negate is equivalent to a multiply
1518 by -1. */
1519
1520 static void
1521 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1522 {
1523 /* Record a CAND_MULT interpretation for the multiply by -1. */
1524 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1525
1526 /* Add the interpretation to the statement-candidate mapping. */
1527 add_cand_for_stmt (gs, c);
1528 }
1529
1530 /* Help function for legal_cast_p, operating on two trees. Checks
1531 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1532 for more details. */
1533
1534 static bool
1535 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1536 {
1537 unsigned lhs_size, rhs_size;
1538 bool lhs_wraps, rhs_wraps;
1539
1540 lhs_size = TYPE_PRECISION (lhs_type);
1541 rhs_size = TYPE_PRECISION (rhs_type);
1542 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1543 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1544
1545 if (lhs_size < rhs_size
1546 || (rhs_wraps && !lhs_wraps)
1547 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1548 return false;
1549
1550 return true;
1551 }
1552
1553 /* Return TRUE if GS is a statement that defines an SSA name from
1554 a conversion and is legal for us to combine with an add and multiply
1555 in the candidate table. For example, suppose we have:
1556
1557 A = B + i;
1558 C = (type) A;
1559 D = C * S;
1560
1561 Without the type-cast, we would create a CAND_MULT for D with base B,
1562 index i, and stride S. We want to record this candidate only if it
1563 is equivalent to apply the type cast following the multiply:
1564
1565 A = B + i;
1566 E = A * S;
1567 D = (type) E;
1568
1569 We will record the type with the candidate for D. This allows us
1570 to use a similar previous candidate as a basis. If we have earlier seen
1571
1572 A' = B + i';
1573 C' = (type) A';
1574 D' = C' * S;
1575
1576 we can replace D with
1577
1578 D = D' + (i - i') * S;
1579
1580 But if moving the type-cast would change semantics, we mustn't do this.
1581
1582 This is legitimate for casts from a non-wrapping integral type to
1583 any integral type of the same or larger size. It is not legitimate
1584 to convert a wrapping type to a non-wrapping type, or to a wrapping
1585 type of a different size. I.e., with a wrapping type, we must
1586 assume that the addition B + i could wrap, in which case performing
1587 the multiply before or after one of the "illegal" type casts will
1588 have different semantics. */
1589
1590 static bool
1591 legal_cast_p (gimple *gs, tree rhs)
1592 {
1593 if (!is_gimple_assign (gs)
1594 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1595 return false;
1596
1597 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1598 }
1599
1600 /* Given GS which is a cast to a scalar integer type, determine whether
1601 the cast is legal for strength reduction. If so, make at least one
1602 appropriate entry in the candidate table. */
1603
1604 static void
1605 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1606 {
1607 tree lhs, ctype;
1608 slsr_cand_t base_cand, c = NULL, c2;
1609 unsigned savings = 0;
1610
1611 if (!legal_cast_p (gs, rhs1))
1612 return;
1613
1614 lhs = gimple_assign_lhs (gs);
1615 base_cand = base_cand_from_table (rhs1);
1616 ctype = TREE_TYPE (lhs);
1617
1618 if (base_cand && base_cand->kind != CAND_PHI)
1619 {
1620 while (base_cand)
1621 {
1622 /* Propagate all data from the base candidate except the type,
1623 which comes from the cast, and the base candidate's cast,
1624 which is no longer applicable. */
1625 if (has_single_use (rhs1))
1626 savings = (base_cand->dead_savings
1627 + stmt_cost (base_cand->cand_stmt, speed));
1628
1629 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1630 base_cand->base_expr,
1631 base_cand->index, base_cand->stride,
1632 ctype, base_cand->stride_type,
1633 savings);
1634 if (base_cand->next_interp)
1635 base_cand = lookup_cand (base_cand->next_interp);
1636 else
1637 base_cand = NULL;
1638 }
1639 }
1640 else
1641 {
1642 /* If nothing is known about the RHS, create fresh CAND_ADD and
1643 CAND_MULT interpretations:
1644
1645 X = Y + (0 * 1)
1646 X = (Y + 0) * 1
1647
1648 The first of these is somewhat arbitrary, but the choice of
1649 1 for the stride simplifies the logic for propagating casts
1650 into their uses. */
1651 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1652 integer_one_node, ctype, sizetype, 0);
1653 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1654 integer_one_node, ctype, sizetype, 0);
1655 c->next_interp = c2->cand_num;
1656 }
1657
1658 /* Add the first (or only) interpretation to the statement-candidate
1659 mapping. */
1660 add_cand_for_stmt (gs, c);
1661 }
1662
1663 /* Given GS which is a copy of a scalar integer type, make at least one
1664 appropriate entry in the candidate table.
1665
1666 This interface is included for completeness, but is unnecessary
1667 if this pass immediately follows a pass that performs copy
1668 propagation, such as DOM. */
1669
1670 static void
1671 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1672 {
1673 slsr_cand_t base_cand, c = NULL, c2;
1674 unsigned savings = 0;
1675
1676 base_cand = base_cand_from_table (rhs1);
1677
1678 if (base_cand && base_cand->kind != CAND_PHI)
1679 {
1680 while (base_cand)
1681 {
1682 /* Propagate all data from the base candidate. */
1683 if (has_single_use (rhs1))
1684 savings = (base_cand->dead_savings
1685 + stmt_cost (base_cand->cand_stmt, speed));
1686
1687 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1688 base_cand->base_expr,
1689 base_cand->index, base_cand->stride,
1690 base_cand->cand_type,
1691 base_cand->stride_type, savings);
1692 if (base_cand->next_interp)
1693 base_cand = lookup_cand (base_cand->next_interp);
1694 else
1695 base_cand = NULL;
1696 }
1697 }
1698 else
1699 {
1700 /* If nothing is known about the RHS, create fresh CAND_ADD and
1701 CAND_MULT interpretations:
1702
1703 X = Y + (0 * 1)
1704 X = (Y + 0) * 1
1705
1706 The first of these is somewhat arbitrary, but the choice of
1707 1 for the stride simplifies the logic for propagating casts
1708 into their uses. */
1709 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1710 integer_one_node, TREE_TYPE (rhs1),
1711 sizetype, 0);
1712 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1713 integer_one_node, TREE_TYPE (rhs1),
1714 sizetype, 0);
1715 c->next_interp = c2->cand_num;
1716 }
1717
1718 /* Add the first (or only) interpretation to the statement-candidate
1719 mapping. */
1720 add_cand_for_stmt (gs, c);
1721 }
1722 \f
1723 class find_candidates_dom_walker : public dom_walker
1724 {
1725 public:
1726 find_candidates_dom_walker (cdi_direction direction)
1727 : dom_walker (direction) {}
1728 virtual edge before_dom_children (basic_block);
1729 };
1730
1731 /* Find strength-reduction candidates in block BB. */
1732
1733 edge
1734 find_candidates_dom_walker::before_dom_children (basic_block bb)
1735 {
1736 bool speed = optimize_bb_for_speed_p (bb);
1737
1738 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1739 gsi_next (&gsi))
1740 slsr_process_phi (gsi.phi (), speed);
1741
1742 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1743 gsi_next (&gsi))
1744 {
1745 gimple *gs = gsi_stmt (gsi);
1746
1747 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1748 slsr_process_ref (gs);
1749
1750 else if (is_gimple_assign (gs)
1751 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))
1752 || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))))
1753 {
1754 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1755
1756 switch (gimple_assign_rhs_code (gs))
1757 {
1758 case MULT_EXPR:
1759 case PLUS_EXPR:
1760 rhs1 = gimple_assign_rhs1 (gs);
1761 rhs2 = gimple_assign_rhs2 (gs);
1762 /* Should never happen, but currently some buggy situations
1763 in earlier phases put constants in rhs1. */
1764 if (TREE_CODE (rhs1) != SSA_NAME)
1765 continue;
1766 break;
1767
1768 /* Possible future opportunity: rhs1 of a ptr+ can be
1769 an ADDR_EXPR. */
1770 case POINTER_PLUS_EXPR:
1771 case MINUS_EXPR:
1772 rhs2 = gimple_assign_rhs2 (gs);
1773 gcc_fallthrough ();
1774
1775 CASE_CONVERT:
1776 case SSA_NAME:
1777 case NEGATE_EXPR:
1778 rhs1 = gimple_assign_rhs1 (gs);
1779 if (TREE_CODE (rhs1) != SSA_NAME)
1780 continue;
1781 break;
1782
1783 default:
1784 ;
1785 }
1786
1787 switch (gimple_assign_rhs_code (gs))
1788 {
1789 case MULT_EXPR:
1790 slsr_process_mul (gs, rhs1, rhs2, speed);
1791 break;
1792
1793 case PLUS_EXPR:
1794 case POINTER_PLUS_EXPR:
1795 case MINUS_EXPR:
1796 slsr_process_add (gs, rhs1, rhs2, speed);
1797 break;
1798
1799 case NEGATE_EXPR:
1800 slsr_process_neg (gs, rhs1, speed);
1801 break;
1802
1803 CASE_CONVERT:
1804 slsr_process_cast (gs, rhs1, speed);
1805 break;
1806
1807 case SSA_NAME:
1808 slsr_process_copy (gs, rhs1, speed);
1809 break;
1810
1811 default:
1812 ;
1813 }
1814 }
1815 }
1816 return NULL;
1817 }
1818 \f
1819 /* Dump a candidate for debug. */
1820
1821 static void
1822 dump_candidate (slsr_cand_t c)
1823 {
1824 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1825 gimple_bb (c->cand_stmt)->index);
1826 print_gimple_stmt (dump_file, c->cand_stmt, 0);
1827 switch (c->kind)
1828 {
1829 case CAND_MULT:
1830 fputs (" MULT : (", dump_file);
1831 print_generic_expr (dump_file, c->base_expr);
1832 fputs (" + ", dump_file);
1833 print_decs (c->index, dump_file);
1834 fputs (") * ", dump_file);
1835 if (TREE_CODE (c->stride) != INTEGER_CST
1836 && c->stride_type != TREE_TYPE (c->stride))
1837 {
1838 fputs ("(", dump_file);
1839 print_generic_expr (dump_file, c->stride_type);
1840 fputs (")", dump_file);
1841 }
1842 print_generic_expr (dump_file, c->stride);
1843 fputs (" : ", dump_file);
1844 break;
1845 case CAND_ADD:
1846 fputs (" ADD : ", dump_file);
1847 print_generic_expr (dump_file, c->base_expr);
1848 fputs (" + (", dump_file);
1849 print_decs (c->index, dump_file);
1850 fputs (" * ", dump_file);
1851 if (TREE_CODE (c->stride) != INTEGER_CST
1852 && c->stride_type != TREE_TYPE (c->stride))
1853 {
1854 fputs ("(", dump_file);
1855 print_generic_expr (dump_file, c->stride_type);
1856 fputs (")", dump_file);
1857 }
1858 print_generic_expr (dump_file, c->stride);
1859 fputs (") : ", dump_file);
1860 break;
1861 case CAND_REF:
1862 fputs (" REF : ", dump_file);
1863 print_generic_expr (dump_file, c->base_expr);
1864 fputs (" + (", dump_file);
1865 print_generic_expr (dump_file, c->stride);
1866 fputs (") + ", dump_file);
1867 print_decs (c->index, dump_file);
1868 fputs (" : ", dump_file);
1869 break;
1870 case CAND_PHI:
1871 fputs (" PHI : ", dump_file);
1872 print_generic_expr (dump_file, c->base_expr);
1873 fputs (" + (unknown * ", dump_file);
1874 print_generic_expr (dump_file, c->stride);
1875 fputs (") : ", dump_file);
1876 break;
1877 default:
1878 gcc_unreachable ();
1879 }
1880 print_generic_expr (dump_file, c->cand_type);
1881 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1882 c->basis, c->dependent, c->sibling);
1883 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1884 c->next_interp, c->dead_savings);
1885 if (c->def_phi)
1886 fprintf (dump_file, " phi: %d\n", c->def_phi);
1887 fputs ("\n", dump_file);
1888 }
1889
1890 /* Dump the candidate vector for debug. */
1891
1892 static void
1893 dump_cand_vec (void)
1894 {
1895 unsigned i;
1896 slsr_cand_t c;
1897
1898 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1899
1900 FOR_EACH_VEC_ELT (cand_vec, i, c)
1901 dump_candidate (c);
1902 }
1903
1904 /* Callback used to dump the candidate chains hash table. */
1905
1906 int
1907 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1908 {
1909 const_cand_chain_t chain = *slot;
1910 cand_chain_t p;
1911
1912 print_generic_expr (dump_file, chain->base_expr);
1913 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1914
1915 for (p = chain->next; p; p = p->next)
1916 fprintf (dump_file, " -> %d", p->cand->cand_num);
1917
1918 fputs ("\n", dump_file);
1919 return 1;
1920 }
1921
1922 /* Dump the candidate chains. */
1923
1924 static void
1925 dump_cand_chains (void)
1926 {
1927 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1928 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1929 (NULL);
1930 fputs ("\n", dump_file);
1931 }
1932
1933 /* Dump the increment vector for debug. */
1934
1935 static void
1936 dump_incr_vec (void)
1937 {
1938 if (dump_file && (dump_flags & TDF_DETAILS))
1939 {
1940 unsigned i;
1941
1942 fprintf (dump_file, "\nIncrement vector:\n\n");
1943
1944 for (i = 0; i < incr_vec_len; i++)
1945 {
1946 fprintf (dump_file, "%3d increment: ", i);
1947 print_decs (incr_vec[i].incr, dump_file);
1948 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1949 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1950 fputs ("\n initializer: ", dump_file);
1951 print_generic_expr (dump_file, incr_vec[i].initializer);
1952 fputs ("\n\n", dump_file);
1953 }
1954 }
1955 }
1956 \f
1957 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1958 data reference. */
1959
1960 static void
1961 replace_ref (tree *expr, slsr_cand_t c)
1962 {
1963 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1964 unsigned HOST_WIDE_INT misalign;
1965 unsigned align;
1966
1967 /* Ensure the memory reference carries the minimum alignment
1968 requirement for the data type. See PR58041. */
1969 get_object_alignment_1 (*expr, &align, &misalign);
1970 if (misalign != 0)
1971 align = least_bit_hwi (misalign);
1972 if (align < TYPE_ALIGN (acc_type))
1973 acc_type = build_aligned_type (acc_type, align);
1974
1975 add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
1976 c->base_expr, c->stride);
1977 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1978 wide_int_to_tree (c->cand_type, c->index));
1979
1980 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1981 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1982 TREE_OPERAND (mem_ref, 0)
1983 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1984 /*simple_p=*/true, NULL,
1985 /*before=*/true, GSI_SAME_STMT);
1986 copy_ref_info (mem_ref, *expr);
1987 *expr = mem_ref;
1988 update_stmt (c->cand_stmt);
1989 }
1990
1991 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1992 dependent of candidate C with an equivalent strength-reduced data
1993 reference. */
1994
1995 static void
1996 replace_refs (slsr_cand_t c)
1997 {
1998 if (dump_file && (dump_flags & TDF_DETAILS))
1999 {
2000 fputs ("Replacing reference: ", dump_file);
2001 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2002 }
2003
2004 if (gimple_vdef (c->cand_stmt))
2005 {
2006 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
2007 replace_ref (lhs, c);
2008 }
2009 else
2010 {
2011 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
2012 replace_ref (rhs, c);
2013 }
2014
2015 if (dump_file && (dump_flags & TDF_DETAILS))
2016 {
2017 fputs ("With: ", dump_file);
2018 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2019 fputs ("\n", dump_file);
2020 }
2021
2022 if (c->sibling)
2023 replace_refs (lookup_cand (c->sibling));
2024
2025 if (c->dependent)
2026 replace_refs (lookup_cand (c->dependent));
2027 }
2028
2029 /* Return TRUE if candidate C is dependent upon a PHI. */
2030
2031 static bool
2032 phi_dependent_cand_p (slsr_cand_t c)
2033 {
2034 /* A candidate is not necessarily dependent upon a PHI just because
2035 it has a phi definition for its base name. It may have a basis
2036 that relies upon the same phi definition, in which case the PHI
2037 is irrelevant to this candidate. */
2038 return (c->def_phi
2039 && c->basis
2040 && lookup_cand (c->basis)->def_phi != c->def_phi);
2041 }
2042
2043 /* Calculate the increment required for candidate C relative to
2044 its basis. */
2045
2046 static widest_int
2047 cand_increment (slsr_cand_t c)
2048 {
2049 slsr_cand_t basis;
2050
2051 /* If the candidate doesn't have a basis, just return its own
2052 index. This is useful in record_increments to help us find
2053 an existing initializer. Also, if the candidate's basis is
2054 hidden by a phi, then its own index will be the increment
2055 from the newly introduced phi basis. */
2056 if (!c->basis || phi_dependent_cand_p (c))
2057 return c->index;
2058
2059 basis = lookup_cand (c->basis);
2060 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2061 return c->index - basis->index;
2062 }
2063
2064 /* Calculate the increment required for candidate C relative to
2065 its basis. If we aren't going to generate pointer arithmetic
2066 for this candidate, return the absolute value of that increment
2067 instead. */
2068
2069 static inline widest_int
2070 cand_abs_increment (slsr_cand_t c)
2071 {
2072 widest_int increment = cand_increment (c);
2073
2074 if (!address_arithmetic_p && wi::neg_p (increment))
2075 increment = -increment;
2076
2077 return increment;
2078 }
2079
2080 /* Return TRUE iff candidate C has already been replaced under
2081 another interpretation. */
2082
2083 static inline bool
2084 cand_already_replaced (slsr_cand_t c)
2085 {
2086 return (gimple_bb (c->cand_stmt) == 0);
2087 }
2088
2089 /* Common logic used by replace_unconditional_candidate and
2090 replace_conditional_candidate. */
2091
2092 static void
2093 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2094 {
2095 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2096 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2097
2098 /* It is not useful to replace casts, copies, negates, or adds of
2099 an SSA name and a constant. */
2100 if (cand_code == SSA_NAME
2101 || CONVERT_EXPR_CODE_P (cand_code)
2102 || cand_code == PLUS_EXPR
2103 || cand_code == POINTER_PLUS_EXPR
2104 || cand_code == MINUS_EXPR
2105 || cand_code == NEGATE_EXPR)
2106 return;
2107
2108 enum tree_code code = PLUS_EXPR;
2109 tree bump_tree;
2110 gimple *stmt_to_print = NULL;
2111
2112 if (wi::neg_p (bump))
2113 {
2114 code = MINUS_EXPR;
2115 bump = -bump;
2116 }
2117
2118 /* It is possible that the resulting bump doesn't fit in target_type.
2119 Abandon the replacement in this case. This does not affect
2120 siblings or dependents of C. */
2121 if (bump != wi::ext (bump, TYPE_PRECISION (target_type),
2122 TYPE_SIGN (target_type)))
2123 return;
2124
2125 bump_tree = wide_int_to_tree (target_type, bump);
2126
2127 /* If the basis name and the candidate's LHS have incompatible types,
2128 introduce a cast. */
2129 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2130 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2131
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 {
2134 fputs ("Replacing: ", dump_file);
2135 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2136 }
2137
2138 if (bump == 0)
2139 {
2140 tree lhs = gimple_assign_lhs (c->cand_stmt);
2141 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2142 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2143 slsr_cand_t cc = c;
2144 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2145 gsi_replace (&gsi, copy_stmt, false);
2146 c->cand_stmt = copy_stmt;
2147 while (cc->next_interp)
2148 {
2149 cc = lookup_cand (cc->next_interp);
2150 cc->cand_stmt = copy_stmt;
2151 }
2152 if (dump_file && (dump_flags & TDF_DETAILS))
2153 stmt_to_print = copy_stmt;
2154 }
2155 else
2156 {
2157 tree rhs1, rhs2;
2158 if (cand_code != NEGATE_EXPR) {
2159 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2160 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2161 }
2162 if (cand_code != NEGATE_EXPR
2163 && ((operand_equal_p (rhs1, basis_name, 0)
2164 && operand_equal_p (rhs2, bump_tree, 0))
2165 || (operand_equal_p (rhs1, bump_tree, 0)
2166 && operand_equal_p (rhs2, basis_name, 0))))
2167 {
2168 if (dump_file && (dump_flags & TDF_DETAILS))
2169 {
2170 fputs ("(duplicate, not actually replacing)", dump_file);
2171 stmt_to_print = c->cand_stmt;
2172 }
2173 }
2174 else
2175 {
2176 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2177 slsr_cand_t cc = c;
2178 gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
2179 update_stmt (gsi_stmt (gsi));
2180 c->cand_stmt = gsi_stmt (gsi);
2181 while (cc->next_interp)
2182 {
2183 cc = lookup_cand (cc->next_interp);
2184 cc->cand_stmt = gsi_stmt (gsi);
2185 }
2186 if (dump_file && (dump_flags & TDF_DETAILS))
2187 stmt_to_print = gsi_stmt (gsi);
2188 }
2189 }
2190
2191 if (dump_file && (dump_flags & TDF_DETAILS))
2192 {
2193 fputs ("With: ", dump_file);
2194 print_gimple_stmt (dump_file, stmt_to_print, 0);
2195 fputs ("\n", dump_file);
2196 }
2197 }
2198
2199 /* Replace candidate C with an add or subtract. Note that we only
2200 operate on CAND_MULTs with known strides, so we will never generate
2201 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2202 X = Y + ((i - i') * S), as described in the module commentary. The
2203 folded value ((i - i') * S) is referred to here as the "bump." */
2204
2205 static void
2206 replace_unconditional_candidate (slsr_cand_t c)
2207 {
2208 slsr_cand_t basis;
2209
2210 if (cand_already_replaced (c))
2211 return;
2212
2213 basis = lookup_cand (c->basis);
2214 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2215
2216 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2217 }
2218 \f
2219 /* Return the index in the increment vector of the given INCREMENT,
2220 or -1 if not found. The latter can occur if more than
2221 MAX_INCR_VEC_LEN increments have been found. */
2222
2223 static inline int
2224 incr_vec_index (const widest_int &increment)
2225 {
2226 unsigned i;
2227
2228 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2229 ;
2230
2231 if (i < incr_vec_len)
2232 return i;
2233 else
2234 return -1;
2235 }
2236
2237 /* Create a new statement along edge E to add BASIS_NAME to the product
2238 of INCREMENT and the stride of candidate C. Create and return a new
2239 SSA name from *VAR to be used as the LHS of the new statement.
2240 KNOWN_STRIDE is true iff C's stride is a constant. */
2241
2242 static tree
2243 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2244 widest_int increment, edge e, location_t loc,
2245 bool known_stride)
2246 {
2247 tree lhs, basis_type;
2248 gassign *new_stmt, *cast_stmt = NULL;
2249
2250 /* If the add candidate along this incoming edge has the same
2251 index as C's hidden basis, the hidden basis represents this
2252 edge correctly. */
2253 if (increment == 0)
2254 return basis_name;
2255
2256 basis_type = TREE_TYPE (basis_name);
2257 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2258
2259 /* Occasionally people convert integers to pointers without a
2260 cast, leading us into trouble if we aren't careful. */
2261 enum tree_code plus_code
2262 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2263
2264 if (known_stride)
2265 {
2266 tree bump_tree;
2267 enum tree_code code = plus_code;
2268 widest_int bump = increment * wi::to_widest (c->stride);
2269 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2270 {
2271 code = MINUS_EXPR;
2272 bump = -bump;
2273 }
2274
2275 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2276 bump_tree = wide_int_to_tree (stride_type, bump);
2277 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2278 }
2279 else
2280 {
2281 int i;
2282 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2283 i = incr_vec_index (negate_incr ? -increment : increment);
2284 gcc_assert (i >= 0);
2285
2286 if (incr_vec[i].initializer)
2287 {
2288 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2289 new_stmt = gimple_build_assign (lhs, code, basis_name,
2290 incr_vec[i].initializer);
2291 }
2292 else {
2293 tree stride;
2294
2295 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2296 {
2297 tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2298 "slsr");
2299 cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2300 c->stride);
2301 stride = cast_stride;
2302 }
2303 else
2304 stride = c->stride;
2305
2306 if (increment == 1)
2307 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2308 else if (increment == -1)
2309 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2310 else
2311 gcc_unreachable ();
2312 }
2313 }
2314
2315 if (cast_stmt)
2316 {
2317 gimple_set_location (cast_stmt, loc);
2318 gsi_insert_on_edge (e, cast_stmt);
2319 }
2320
2321 gimple_set_location (new_stmt, loc);
2322 gsi_insert_on_edge (e, new_stmt);
2323
2324 if (dump_file && (dump_flags & TDF_DETAILS))
2325 {
2326 if (cast_stmt)
2327 {
2328 fprintf (dump_file, "Inserting cast on edge %d->%d: ",
2329 e->src->index, e->dest->index);
2330 print_gimple_stmt (dump_file, cast_stmt, 0);
2331 }
2332 fprintf (dump_file, "Inserting on edge %d->%d: ", e->src->index,
2333 e->dest->index);
2334 print_gimple_stmt (dump_file, new_stmt, 0);
2335 }
2336
2337 return lhs;
2338 }
2339
2340 /* Clear the visited field for a tree of PHI candidates. */
2341
2342 static void
2343 clear_visited (gphi *phi)
2344 {
2345 unsigned i;
2346 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2347
2348 if (phi_cand->visited)
2349 {
2350 phi_cand->visited = 0;
2351
2352 for (i = 0; i < gimple_phi_num_args (phi); i++)
2353 {
2354 tree arg = gimple_phi_arg_def (phi, i);
2355 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2356 if (gimple_code (arg_def) == GIMPLE_PHI)
2357 clear_visited (as_a <gphi *> (arg_def));
2358 }
2359 }
2360 }
2361
2362 /* Recursive helper function for create_phi_basis. */
2363
2364 static tree
2365 create_phi_basis_1 (slsr_cand_t c, gimple *from_phi, tree basis_name,
2366 location_t loc, bool known_stride)
2367 {
2368 int i;
2369 tree name, phi_arg;
2370 gphi *phi;
2371 slsr_cand_t basis = lookup_cand (c->basis);
2372 int nargs = gimple_phi_num_args (from_phi);
2373 basic_block phi_bb = gimple_bb (from_phi);
2374 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2375 auto_vec<tree> phi_args (nargs);
2376
2377 if (phi_cand->visited)
2378 return phi_cand->cached_basis;
2379 phi_cand->visited = 1;
2380
2381 /* Process each argument of the existing phi that represents
2382 conditionally-executed add candidates. */
2383 for (i = 0; i < nargs; i++)
2384 {
2385 edge e = (*phi_bb->preds)[i];
2386 tree arg = gimple_phi_arg_def (from_phi, i);
2387 tree feeding_def;
2388
2389 /* If the phi argument is the base name of the CAND_PHI, then
2390 this incoming arc should use the hidden basis. */
2391 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2392 if (basis->index == 0)
2393 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2394 else
2395 {
2396 widest_int incr = -basis->index;
2397 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2398 e, loc, known_stride);
2399 }
2400 else
2401 {
2402 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2403
2404 /* If there is another phi along this incoming edge, we must
2405 process it in the same fashion to ensure that all basis
2406 adjustments are made along its incoming edges. */
2407 if (gimple_code (arg_def) == GIMPLE_PHI)
2408 feeding_def = create_phi_basis_1 (c, arg_def, basis_name,
2409 loc, known_stride);
2410 else
2411 {
2412 slsr_cand_t arg_cand = base_cand_from_table (arg);
2413 widest_int diff = arg_cand->index - basis->index;
2414 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2415 e, loc, known_stride);
2416 }
2417 }
2418
2419 /* Because of recursion, we need to save the arguments in a vector
2420 so we can create the PHI statement all at once. Otherwise the
2421 storage for the half-created PHI can be reclaimed. */
2422 phi_args.safe_push (feeding_def);
2423 }
2424
2425 /* Create the new phi basis. */
2426 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2427 phi = create_phi_node (name, phi_bb);
2428 SSA_NAME_DEF_STMT (name) = phi;
2429
2430 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2431 {
2432 edge e = (*phi_bb->preds)[i];
2433 add_phi_arg (phi, phi_arg, e, loc);
2434 }
2435
2436 update_stmt (phi);
2437
2438 if (dump_file && (dump_flags & TDF_DETAILS))
2439 {
2440 fputs ("Introducing new phi basis: ", dump_file);
2441 print_gimple_stmt (dump_file, phi, 0);
2442 }
2443
2444 phi_cand->cached_basis = name;
2445 return name;
2446 }
2447
2448 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2449 is hidden by the phi node FROM_PHI, create a new phi node in the same
2450 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2451 with its phi arguments representing conditional adjustments to the
2452 hidden basis along conditional incoming paths. Those adjustments are
2453 made by creating add statements (and sometimes recursively creating
2454 phis) along those incoming paths. LOC is the location to attach to
2455 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2456 constant. */
2457
2458 static tree
2459 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2460 location_t loc, bool known_stride)
2461 {
2462 tree retval = create_phi_basis_1 (c, from_phi, basis_name, loc,
2463 known_stride);
2464 gcc_assert (retval);
2465 clear_visited (as_a <gphi *> (from_phi));
2466 return retval;
2467 }
2468
2469 /* Given a candidate C whose basis is hidden by at least one intervening
2470 phi, introduce a matching number of new phis to represent its basis
2471 adjusted by conditional increments along possible incoming paths. Then
2472 replace C as though it were an unconditional candidate, using the new
2473 basis. */
2474
2475 static void
2476 replace_conditional_candidate (slsr_cand_t c)
2477 {
2478 tree basis_name, name;
2479 slsr_cand_t basis;
2480 location_t loc;
2481
2482 /* Look up the LHS SSA name from C's basis. This will be the
2483 RHS1 of the adds we will introduce to create new phi arguments. */
2484 basis = lookup_cand (c->basis);
2485 basis_name = gimple_assign_lhs (basis->cand_stmt);
2486
2487 /* Create a new phi statement which will represent C's true basis
2488 after the transformation is complete. */
2489 loc = gimple_location (c->cand_stmt);
2490 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2491 basis_name, loc, KNOWN_STRIDE);
2492
2493 /* Replace C with an add of the new basis phi and a constant. */
2494 widest_int bump = c->index * wi::to_widest (c->stride);
2495
2496 replace_mult_candidate (c, name, bump);
2497 }
2498
2499 /* Recursive helper function for phi_add_costs. SPREAD is a measure of
2500 how many PHI nodes we have visited at this point in the tree walk. */
2501
2502 static int
2503 phi_add_costs_1 (gimple *phi, slsr_cand_t c, int one_add_cost, int *spread)
2504 {
2505 unsigned i;
2506 int cost = 0;
2507 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2508
2509 if (phi_cand->visited)
2510 return 0;
2511
2512 phi_cand->visited = 1;
2513 (*spread)++;
2514
2515 /* If we work our way back to a phi that isn't dominated by the hidden
2516 basis, this isn't a candidate for replacement. Indicate this by
2517 returning an unreasonably high cost. It's not easy to detect
2518 these situations when determining the basis, so we defer the
2519 decision until now. */
2520 basic_block phi_bb = gimple_bb (phi);
2521 slsr_cand_t basis = lookup_cand (c->basis);
2522 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2523
2524 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2525 return COST_INFINITE;
2526
2527 for (i = 0; i < gimple_phi_num_args (phi); i++)
2528 {
2529 tree arg = gimple_phi_arg_def (phi, i);
2530
2531 if (arg != phi_cand->base_expr)
2532 {
2533 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2534
2535 if (gimple_code (arg_def) == GIMPLE_PHI)
2536 {
2537 cost += phi_add_costs_1 (arg_def, c, one_add_cost, spread);
2538
2539 if (cost >= COST_INFINITE || *spread > MAX_SPREAD)
2540 return COST_INFINITE;
2541 }
2542 else
2543 {
2544 slsr_cand_t arg_cand = base_cand_from_table (arg);
2545
2546 if (arg_cand->index != c->index)
2547 cost += one_add_cost;
2548 }
2549 }
2550 }
2551
2552 return cost;
2553 }
2554
2555 /* Compute the expected costs of inserting basis adjustments for
2556 candidate C with phi-definition PHI. The cost of inserting
2557 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2558 which are themselves phi results, recursively calculate costs
2559 for those phis as well. */
2560
2561 static int
2562 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2563 {
2564 int spread = 0;
2565 int retval = phi_add_costs_1 (phi, c, one_add_cost, &spread);
2566 clear_visited (as_a <gphi *> (phi));
2567 return retval;
2568 }
2569 /* For candidate C, each sibling of candidate C, and each dependent of
2570 candidate C, determine whether the candidate is dependent upon a
2571 phi that hides its basis. If not, replace the candidate unconditionally.
2572 Otherwise, determine whether the cost of introducing compensation code
2573 for the candidate is offset by the gains from strength reduction. If
2574 so, replace the candidate and introduce the compensation code. */
2575
2576 static void
2577 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2578 {
2579 if (phi_dependent_cand_p (c))
2580 {
2581 /* A multiply candidate with a stride of 1 is just an artifice
2582 of a copy or cast; there is no value in replacing it. */
2583 if (c->kind == CAND_MULT && wi::to_widest (c->stride) != 1)
2584 {
2585 /* A candidate dependent upon a phi will replace a multiply by
2586 a constant with an add, and will insert at most one add for
2587 each phi argument. Add these costs with the potential dead-code
2588 savings to determine profitability. */
2589 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2590 int mult_savings = stmt_cost (c->cand_stmt, speed);
2591 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2592 tree phi_result = gimple_phi_result (phi);
2593 int one_add_cost = add_cost (speed,
2594 TYPE_MODE (TREE_TYPE (phi_result)));
2595 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2596 int cost = add_costs - mult_savings - c->dead_savings;
2597
2598 if (dump_file && (dump_flags & TDF_DETAILS))
2599 {
2600 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2601 fprintf (dump_file, " add_costs = %d\n", add_costs);
2602 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2603 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2604 fprintf (dump_file, " cost = %d\n", cost);
2605 if (cost <= COST_NEUTRAL)
2606 fputs (" Replacing...\n", dump_file);
2607 else
2608 fputs (" Not replaced.\n", dump_file);
2609 }
2610
2611 if (cost <= COST_NEUTRAL)
2612 replace_conditional_candidate (c);
2613 }
2614 }
2615 else
2616 replace_unconditional_candidate (c);
2617
2618 if (c->sibling)
2619 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2620
2621 if (c->dependent)
2622 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2623 }
2624 \f
2625 /* Count the number of candidates in the tree rooted at C that have
2626 not already been replaced under other interpretations. */
2627
2628 static int
2629 count_candidates (slsr_cand_t c)
2630 {
2631 unsigned count = cand_already_replaced (c) ? 0 : 1;
2632
2633 if (c->sibling)
2634 count += count_candidates (lookup_cand (c->sibling));
2635
2636 if (c->dependent)
2637 count += count_candidates (lookup_cand (c->dependent));
2638
2639 return count;
2640 }
2641
2642 /* Increase the count of INCREMENT by one in the increment vector.
2643 INCREMENT is associated with candidate C. If INCREMENT is to be
2644 conditionally executed as part of a conditional candidate replacement,
2645 IS_PHI_ADJUST is true, otherwise false. If an initializer
2646 T_0 = stride * I is provided by a candidate that dominates all
2647 candidates with the same increment, also record T_0 for subsequent use. */
2648
2649 static void
2650 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2651 {
2652 bool found = false;
2653 unsigned i;
2654
2655 /* Treat increments that differ only in sign as identical so as to
2656 share initializers, unless we are generating pointer arithmetic. */
2657 if (!address_arithmetic_p && wi::neg_p (increment))
2658 increment = -increment;
2659
2660 for (i = 0; i < incr_vec_len; i++)
2661 {
2662 if (incr_vec[i].incr == increment)
2663 {
2664 incr_vec[i].count++;
2665 found = true;
2666
2667 /* If we previously recorded an initializer that doesn't
2668 dominate this candidate, it's not going to be useful to
2669 us after all. */
2670 if (incr_vec[i].initializer
2671 && !dominated_by_p (CDI_DOMINATORS,
2672 gimple_bb (c->cand_stmt),
2673 incr_vec[i].init_bb))
2674 {
2675 incr_vec[i].initializer = NULL_TREE;
2676 incr_vec[i].init_bb = NULL;
2677 }
2678
2679 break;
2680 }
2681 }
2682
2683 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2684 {
2685 /* The first time we see an increment, create the entry for it.
2686 If this is the root candidate which doesn't have a basis, set
2687 the count to zero. We're only processing it so it can possibly
2688 provide an initializer for other candidates. */
2689 incr_vec[incr_vec_len].incr = increment;
2690 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2691 incr_vec[incr_vec_len].cost = COST_INFINITE;
2692
2693 /* Optimistically record the first occurrence of this increment
2694 as providing an initializer (if it does); we will revise this
2695 opinion later if it doesn't dominate all other occurrences.
2696 Exception: increments of 0, 1 never need initializers;
2697 and phi adjustments don't ever provide initializers. */
2698 if (c->kind == CAND_ADD
2699 && !is_phi_adjust
2700 && c->index == increment
2701 && (increment > 1 || increment < 0)
2702 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2703 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2704 {
2705 tree t0 = NULL_TREE;
2706 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2707 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2708 if (operand_equal_p (rhs1, c->base_expr, 0))
2709 t0 = rhs2;
2710 else if (operand_equal_p (rhs2, c->base_expr, 0))
2711 t0 = rhs1;
2712 if (t0
2713 && SSA_NAME_DEF_STMT (t0)
2714 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2715 {
2716 incr_vec[incr_vec_len].initializer = t0;
2717 incr_vec[incr_vec_len++].init_bb
2718 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2719 }
2720 else
2721 {
2722 incr_vec[incr_vec_len].initializer = NULL_TREE;
2723 incr_vec[incr_vec_len++].init_bb = NULL;
2724 }
2725 }
2726 else
2727 {
2728 incr_vec[incr_vec_len].initializer = NULL_TREE;
2729 incr_vec[incr_vec_len++].init_bb = NULL;
2730 }
2731 }
2732 }
2733
2734 /* Recursive helper function for record_phi_increments. */
2735
2736 static void
2737 record_phi_increments_1 (slsr_cand_t basis, gimple *phi)
2738 {
2739 unsigned i;
2740 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2741
2742 if (phi_cand->visited)
2743 return;
2744 phi_cand->visited = 1;
2745
2746 for (i = 0; i < gimple_phi_num_args (phi); i++)
2747 {
2748 tree arg = gimple_phi_arg_def (phi, i);
2749
2750 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2751 {
2752 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2753
2754 if (gimple_code (arg_def) == GIMPLE_PHI)
2755 record_phi_increments_1 (basis, arg_def);
2756 else
2757 {
2758 slsr_cand_t arg_cand = base_cand_from_table (arg);
2759 widest_int diff = arg_cand->index - basis->index;
2760 record_increment (arg_cand, diff, PHI_ADJUST);
2761 }
2762 }
2763 }
2764 }
2765
2766 /* Given phi statement PHI that hides a candidate from its BASIS, find
2767 the increments along each incoming arc (recursively handling additional
2768 phis that may be present) and record them. These increments are the
2769 difference in index between the index-adjusting statements and the
2770 index of the basis. */
2771
2772 static void
2773 record_phi_increments (slsr_cand_t basis, gimple *phi)
2774 {
2775 record_phi_increments_1 (basis, phi);
2776 clear_visited (as_a <gphi *> (phi));
2777 }
2778
2779 /* Determine how many times each unique increment occurs in the set
2780 of candidates rooted at C's parent, recording the data in the
2781 increment vector. For each unique increment I, if an initializer
2782 T_0 = stride * I is provided by a candidate that dominates all
2783 candidates with the same increment, also record T_0 for subsequent
2784 use. */
2785
2786 static void
2787 record_increments (slsr_cand_t c)
2788 {
2789 if (!cand_already_replaced (c))
2790 {
2791 if (!phi_dependent_cand_p (c))
2792 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2793 else
2794 {
2795 /* A candidate with a basis hidden by a phi will have one
2796 increment for its relationship to the index represented by
2797 the phi, and potentially additional increments along each
2798 incoming edge. For the root of the dependency tree (which
2799 has no basis), process just the initial index in case it has
2800 an initializer that can be used by subsequent candidates. */
2801 record_increment (c, c->index, NOT_PHI_ADJUST);
2802
2803 if (c->basis)
2804 record_phi_increments (lookup_cand (c->basis),
2805 lookup_cand (c->def_phi)->cand_stmt);
2806 }
2807 }
2808
2809 if (c->sibling)
2810 record_increments (lookup_cand (c->sibling));
2811
2812 if (c->dependent)
2813 record_increments (lookup_cand (c->dependent));
2814 }
2815
2816 /* Recursive helper function for phi_incr_cost. */
2817
2818 static int
2819 phi_incr_cost_1 (slsr_cand_t c, const widest_int &incr, gimple *phi,
2820 int *savings)
2821 {
2822 unsigned i;
2823 int cost = 0;
2824 slsr_cand_t basis = lookup_cand (c->basis);
2825 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2826
2827 if (phi_cand->visited)
2828 return 0;
2829 phi_cand->visited = 1;
2830
2831 for (i = 0; i < gimple_phi_num_args (phi); i++)
2832 {
2833 tree arg = gimple_phi_arg_def (phi, i);
2834
2835 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2836 {
2837 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2838
2839 if (gimple_code (arg_def) == GIMPLE_PHI)
2840 {
2841 int feeding_savings = 0;
2842 tree feeding_var = gimple_phi_result (arg_def);
2843 cost += phi_incr_cost_1 (c, incr, arg_def, &feeding_savings);
2844 if (uses_consumed_by_stmt (feeding_var, phi))
2845 *savings += feeding_savings;
2846 }
2847 else
2848 {
2849 slsr_cand_t arg_cand = base_cand_from_table (arg);
2850 widest_int diff = arg_cand->index - basis->index;
2851
2852 if (incr == diff)
2853 {
2854 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2855 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2856 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2857 if (uses_consumed_by_stmt (lhs, phi))
2858 *savings += stmt_cost (arg_cand->cand_stmt, true);
2859 }
2860 }
2861 }
2862 }
2863
2864 return cost;
2865 }
2866
2867 /* Add up and return the costs of introducing add statements that
2868 require the increment INCR on behalf of candidate C and phi
2869 statement PHI. Accumulate into *SAVINGS the potential savings
2870 from removing existing statements that feed PHI and have no other
2871 uses. */
2872
2873 static int
2874 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2875 int *savings)
2876 {
2877 int retval = phi_incr_cost_1 (c, incr, phi, savings);
2878 clear_visited (as_a <gphi *> (phi));
2879 return retval;
2880 }
2881
2882 /* Return the first candidate in the tree rooted at C that has not
2883 already been replaced, favoring siblings over dependents. */
2884
2885 static slsr_cand_t
2886 unreplaced_cand_in_tree (slsr_cand_t c)
2887 {
2888 if (!cand_already_replaced (c))
2889 return c;
2890
2891 if (c->sibling)
2892 {
2893 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2894 if (sib)
2895 return sib;
2896 }
2897
2898 if (c->dependent)
2899 {
2900 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2901 if (dep)
2902 return dep;
2903 }
2904
2905 return NULL;
2906 }
2907
2908 /* Return TRUE if the candidates in the tree rooted at C should be
2909 optimized for speed, else FALSE. We estimate this based on the block
2910 containing the most dominant candidate in the tree that has not yet
2911 been replaced. */
2912
2913 static bool
2914 optimize_cands_for_speed_p (slsr_cand_t c)
2915 {
2916 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2917 gcc_assert (c2);
2918 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2919 }
2920
2921 /* Add COST_IN to the lowest cost of any dependent path starting at
2922 candidate C or any of its siblings, counting only candidates along
2923 such paths with increment INCR. Assume that replacing a candidate
2924 reduces cost by REPL_SAVINGS. Also account for savings from any
2925 statements that would go dead. If COUNT_PHIS is true, include
2926 costs of introducing feeding statements for conditional candidates. */
2927
2928 static int
2929 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2930 const widest_int &incr, bool count_phis)
2931 {
2932 int local_cost, sib_cost, savings = 0;
2933 widest_int cand_incr = cand_abs_increment (c);
2934
2935 if (cand_already_replaced (c))
2936 local_cost = cost_in;
2937 else if (incr == cand_incr)
2938 local_cost = cost_in - repl_savings - c->dead_savings;
2939 else
2940 local_cost = cost_in - c->dead_savings;
2941
2942 if (count_phis
2943 && phi_dependent_cand_p (c)
2944 && !cand_already_replaced (c))
2945 {
2946 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2947 local_cost += phi_incr_cost (c, incr, phi, &savings);
2948
2949 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2950 local_cost -= savings;
2951 }
2952
2953 if (c->dependent)
2954 local_cost = lowest_cost_path (local_cost, repl_savings,
2955 lookup_cand (c->dependent), incr,
2956 count_phis);
2957
2958 if (c->sibling)
2959 {
2960 sib_cost = lowest_cost_path (cost_in, repl_savings,
2961 lookup_cand (c->sibling), incr,
2962 count_phis);
2963 local_cost = MIN (local_cost, sib_cost);
2964 }
2965
2966 return local_cost;
2967 }
2968
2969 /* Compute the total savings that would accrue from all replacements
2970 in the candidate tree rooted at C, counting only candidates with
2971 increment INCR. Assume that replacing a candidate reduces cost
2972 by REPL_SAVINGS. Also account for savings from statements that
2973 would go dead. */
2974
2975 static int
2976 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2977 bool count_phis)
2978 {
2979 int savings = 0;
2980 widest_int cand_incr = cand_abs_increment (c);
2981
2982 if (incr == cand_incr && !cand_already_replaced (c))
2983 savings += repl_savings + c->dead_savings;
2984
2985 if (count_phis
2986 && phi_dependent_cand_p (c)
2987 && !cand_already_replaced (c))
2988 {
2989 int phi_savings = 0;
2990 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2991 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2992
2993 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2994 savings += phi_savings;
2995 }
2996
2997 if (c->dependent)
2998 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2999 count_phis);
3000
3001 if (c->sibling)
3002 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
3003 count_phis);
3004
3005 return savings;
3006 }
3007
3008 /* Use target-specific costs to determine and record which increments
3009 in the current candidate tree are profitable to replace, assuming
3010 MODE and SPEED. FIRST_DEP is the first dependent of the root of
3011 the candidate tree.
3012
3013 One slight limitation here is that we don't account for the possible
3014 introduction of casts in some cases. See replace_one_candidate for
3015 the cases where these are introduced. This should probably be cleaned
3016 up sometime. */
3017
3018 static void
3019 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
3020 {
3021 unsigned i;
3022
3023 for (i = 0; i < incr_vec_len; i++)
3024 {
3025 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
3026
3027 /* If somehow this increment is bigger than a HWI, we won't
3028 be optimizing candidates that use it. And if the increment
3029 has a count of zero, nothing will be done with it. */
3030 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
3031 incr_vec[i].cost = COST_INFINITE;
3032
3033 /* Increments of 0, 1, and -1 are always profitable to replace,
3034 because they always replace a multiply or add with an add or
3035 copy, and may cause one or more existing instructions to go
3036 dead. Exception: -1 can't be assumed to be profitable for
3037 pointer addition. */
3038 else if (incr == 0
3039 || incr == 1
3040 || (incr == -1
3041 && !POINTER_TYPE_P (first_dep->cand_type)))
3042 incr_vec[i].cost = COST_NEUTRAL;
3043
3044 /* If we need to add an initializer, give up if a cast from the
3045 candidate's type to its stride's type can lose precision.
3046 Note that this already takes into account that the stride may
3047 have been cast to a wider type, in which case this test won't
3048 fire. Example:
3049
3050 short int _1;
3051 _2 = (int) _1;
3052 _3 = _2 * 10;
3053 _4 = x + _3; ADD: x + (10 * (int)_1) : int
3054 _5 = _2 * 15;
3055 _6 = x + _5; ADD: x + (15 * (int)_1) : int
3056
3057 Although the stride was a short int initially, the stride
3058 used in the analysis has been widened to an int, and such
3059 widening will be done in the initializer as well. */
3060 else if (!incr_vec[i].initializer
3061 && TREE_CODE (first_dep->stride) != INTEGER_CST
3062 && !legal_cast_p_1 (first_dep->stride_type,
3063 TREE_TYPE (gimple_assign_lhs
3064 (first_dep->cand_stmt))))
3065 incr_vec[i].cost = COST_INFINITE;
3066
3067 /* If we need to add an initializer, make sure we don't introduce
3068 a multiply by a pointer type, which can happen in certain cast
3069 scenarios. */
3070 else if (!incr_vec[i].initializer
3071 && TREE_CODE (first_dep->stride) != INTEGER_CST
3072 && POINTER_TYPE_P (first_dep->stride_type))
3073 incr_vec[i].cost = COST_INFINITE;
3074
3075 /* For any other increment, if this is a multiply candidate, we
3076 must introduce a temporary T and initialize it with
3077 T_0 = stride * increment. When optimizing for speed, walk the
3078 candidate tree to calculate the best cost reduction along any
3079 path; if it offsets the fixed cost of inserting the initializer,
3080 replacing the increment is profitable. When optimizing for
3081 size, instead calculate the total cost reduction from replacing
3082 all candidates with this increment. */
3083 else if (first_dep->kind == CAND_MULT)
3084 {
3085 int cost = mult_by_coeff_cost (incr, mode, speed);
3086 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
3087 if (speed)
3088 cost = lowest_cost_path (cost, repl_savings, first_dep,
3089 incr_vec[i].incr, COUNT_PHIS);
3090 else
3091 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
3092 COUNT_PHIS);
3093
3094 incr_vec[i].cost = cost;
3095 }
3096
3097 /* If this is an add candidate, the initializer may already
3098 exist, so only calculate the cost of the initializer if it
3099 doesn't. We are replacing one add with another here, so the
3100 known replacement savings is zero. We will account for removal
3101 of dead instructions in lowest_cost_path or total_savings. */
3102 else
3103 {
3104 int cost = 0;
3105 if (!incr_vec[i].initializer)
3106 cost = mult_by_coeff_cost (incr, mode, speed);
3107
3108 if (speed)
3109 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
3110 DONT_COUNT_PHIS);
3111 else
3112 cost -= total_savings (0, first_dep, incr_vec[i].incr,
3113 DONT_COUNT_PHIS);
3114
3115 incr_vec[i].cost = cost;
3116 }
3117 }
3118 }
3119
3120 /* Return the nearest common dominator of BB1 and BB2. If the blocks
3121 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
3122 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3123 return C2 in *WHERE; and if the NCD matches neither, return NULL in
3124 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
3125
3126 static basic_block
3127 ncd_for_two_cands (basic_block bb1, basic_block bb2,
3128 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
3129 {
3130 basic_block ncd;
3131
3132 if (!bb1)
3133 {
3134 *where = c2;
3135 return bb2;
3136 }
3137
3138 if (!bb2)
3139 {
3140 *where = c1;
3141 return bb1;
3142 }
3143
3144 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3145
3146 /* If both candidates are in the same block, the earlier
3147 candidate wins. */
3148 if (bb1 == ncd && bb2 == ncd)
3149 {
3150 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3151 *where = c2;
3152 else
3153 *where = c1;
3154 }
3155
3156 /* Otherwise, if one of them produced a candidate in the
3157 dominator, that one wins. */
3158 else if (bb1 == ncd)
3159 *where = c1;
3160
3161 else if (bb2 == ncd)
3162 *where = c2;
3163
3164 /* If neither matches the dominator, neither wins. */
3165 else
3166 *where = NULL;
3167
3168 return ncd;
3169 }
3170
3171 /* Consider all candidates that feed PHI. Find the nearest common
3172 dominator of those candidates requiring the given increment INCR.
3173 Further find and return the nearest common dominator of this result
3174 with block NCD. If the returned block contains one or more of the
3175 candidates, return the earliest candidate in the block in *WHERE. */
3176
3177 static basic_block
3178 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3179 basic_block ncd, slsr_cand_t *where)
3180 {
3181 unsigned i;
3182 slsr_cand_t basis = lookup_cand (c->basis);
3183 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3184
3185 for (i = 0; i < gimple_phi_num_args (phi); i++)
3186 {
3187 tree arg = gimple_phi_arg_def (phi, i);
3188
3189 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3190 {
3191 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3192
3193 if (gimple_code (arg_def) == GIMPLE_PHI)
3194 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3195 where);
3196 else
3197 {
3198 slsr_cand_t arg_cand = base_cand_from_table (arg);
3199 widest_int diff = arg_cand->index - basis->index;
3200 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3201
3202 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3203 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3204 }
3205 }
3206 }
3207
3208 return ncd;
3209 }
3210
3211 /* Consider the candidate C together with any candidates that feed
3212 C's phi dependence (if any). Find and return the nearest common
3213 dominator of those candidates requiring the given increment INCR.
3214 If the returned block contains one or more of the candidates,
3215 return the earliest candidate in the block in *WHERE. */
3216
3217 static basic_block
3218 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3219 {
3220 basic_block ncd = NULL;
3221
3222 if (cand_abs_increment (c) == incr)
3223 {
3224 ncd = gimple_bb (c->cand_stmt);
3225 *where = c;
3226 }
3227
3228 if (phi_dependent_cand_p (c))
3229 ncd = ncd_with_phi (c, incr,
3230 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3231 ncd, where);
3232
3233 return ncd;
3234 }
3235
3236 /* Consider all candidates in the tree rooted at C for which INCR
3237 represents the required increment of C relative to its basis.
3238 Find and return the basic block that most nearly dominates all
3239 such candidates. If the returned block contains one or more of
3240 the candidates, return the earliest candidate in the block in
3241 *WHERE. */
3242
3243 static basic_block
3244 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3245 slsr_cand_t *where)
3246 {
3247 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3248 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3249
3250 /* First find the NCD of all siblings and dependents. */
3251 if (c->sibling)
3252 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3253 incr, &sib_where);
3254 if (c->dependent)
3255 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3256 incr, &dep_where);
3257 if (!sib_ncd && !dep_ncd)
3258 {
3259 new_where = NULL;
3260 ncd = NULL;
3261 }
3262 else if (sib_ncd && !dep_ncd)
3263 {
3264 new_where = sib_where;
3265 ncd = sib_ncd;
3266 }
3267 else if (dep_ncd && !sib_ncd)
3268 {
3269 new_where = dep_where;
3270 ncd = dep_ncd;
3271 }
3272 else
3273 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3274 dep_where, &new_where);
3275
3276 /* If the candidate's increment doesn't match the one we're interested
3277 in (and nor do any increments for feeding defs of a phi-dependence),
3278 then the result depends only on siblings and dependents. */
3279 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3280
3281 if (!this_ncd || cand_already_replaced (c))
3282 {
3283 *where = new_where;
3284 return ncd;
3285 }
3286
3287 /* Otherwise, compare this candidate with the result from all siblings
3288 and dependents. */
3289 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3290
3291 return ncd;
3292 }
3293
3294 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3295
3296 static inline bool
3297 profitable_increment_p (unsigned index)
3298 {
3299 return (incr_vec[index].cost <= COST_NEUTRAL);
3300 }
3301
3302 /* For each profitable increment in the increment vector not equal to
3303 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3304 dominator of all statements in the candidate chain rooted at C
3305 that require that increment, and insert an initializer
3306 T_0 = stride * increment at that location. Record T_0 with the
3307 increment record. */
3308
3309 static void
3310 insert_initializers (slsr_cand_t c)
3311 {
3312 unsigned i;
3313
3314 for (i = 0; i < incr_vec_len; i++)
3315 {
3316 basic_block bb;
3317 slsr_cand_t where = NULL;
3318 gassign *init_stmt;
3319 gassign *cast_stmt = NULL;
3320 tree new_name, incr_tree, init_stride;
3321 widest_int incr = incr_vec[i].incr;
3322
3323 if (!profitable_increment_p (i)
3324 || incr == 1
3325 || (incr == -1
3326 && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3327 || incr == 0)
3328 continue;
3329
3330 /* We may have already identified an existing initializer that
3331 will suffice. */
3332 if (incr_vec[i].initializer)
3333 {
3334 if (dump_file && (dump_flags & TDF_DETAILS))
3335 {
3336 fputs ("Using existing initializer: ", dump_file);
3337 print_gimple_stmt (dump_file,
3338 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3339 0, 0);
3340 }
3341 continue;
3342 }
3343
3344 /* Find the block that most closely dominates all candidates
3345 with this increment. If there is at least one candidate in
3346 that block, the earliest one will be returned in WHERE. */
3347 bb = nearest_common_dominator_for_cands (c, incr, &where);
3348
3349 /* If the NCD is not dominated by the block containing the
3350 definition of the stride, we can't legally insert a
3351 single initializer. Mark the increment as unprofitable
3352 so we don't make any replacements. FIXME: Multiple
3353 initializers could be placed with more analysis. */
3354 gimple *stride_def = SSA_NAME_DEF_STMT (c->stride);
3355 basic_block stride_bb = gimple_bb (stride_def);
3356
3357 if (stride_bb && !dominated_by_p (CDI_DOMINATORS, bb, stride_bb))
3358 {
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3360 fprintf (dump_file,
3361 "Initializer #%d cannot be legally placed\n", i);
3362 incr_vec[i].cost = COST_INFINITE;
3363 continue;
3364 }
3365
3366 /* If the nominal stride has a different type than the recorded
3367 stride type, build a cast from the nominal stride to that type. */
3368 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3369 {
3370 init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3371 cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3372 }
3373 else
3374 init_stride = c->stride;
3375
3376 /* Create a new SSA name to hold the initializer's value. */
3377 new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3378 incr_vec[i].initializer = new_name;
3379
3380 /* Create the initializer and insert it in the latest possible
3381 dominating position. */
3382 incr_tree = wide_int_to_tree (c->stride_type, incr);
3383 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3384 init_stride, incr_tree);
3385 if (where)
3386 {
3387 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3388 location_t loc = gimple_location (where->cand_stmt);
3389
3390 if (cast_stmt)
3391 {
3392 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3393 gimple_set_location (cast_stmt, loc);
3394 }
3395
3396 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3397 gimple_set_location (init_stmt, loc);
3398 }
3399 else
3400 {
3401 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3402 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3403 location_t loc = gimple_location (basis_stmt);
3404
3405 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3406 {
3407 if (cast_stmt)
3408 {
3409 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3410 gimple_set_location (cast_stmt, loc);
3411 }
3412 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3413 }
3414 else
3415 {
3416 if (cast_stmt)
3417 {
3418 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3419 gimple_set_location (cast_stmt, loc);
3420 }
3421 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
3422 }
3423
3424 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3425 }
3426
3427 if (dump_file && (dump_flags & TDF_DETAILS))
3428 {
3429 if (cast_stmt)
3430 {
3431 fputs ("Inserting stride cast: ", dump_file);
3432 print_gimple_stmt (dump_file, cast_stmt, 0);
3433 }
3434 fputs ("Inserting initializer: ", dump_file);
3435 print_gimple_stmt (dump_file, init_stmt, 0);
3436 }
3437 }
3438 }
3439
3440 /* Recursive helper function for all_phi_incrs_profitable. */
3441
3442 static bool
3443 all_phi_incrs_profitable_1 (slsr_cand_t c, gphi *phi, int *spread)
3444 {
3445 unsigned i;
3446 slsr_cand_t basis = lookup_cand (c->basis);
3447 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3448
3449 if (phi_cand->visited)
3450 return true;
3451
3452 phi_cand->visited = 1;
3453 (*spread)++;
3454
3455 /* If the basis doesn't dominate the PHI (including when the PHI is
3456 in the same block as the basis), we won't be able to create a PHI
3457 using the basis here. */
3458 basic_block basis_bb = gimple_bb (basis->cand_stmt);
3459 basic_block phi_bb = gimple_bb (phi);
3460
3461 if (phi_bb == basis_bb
3462 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
3463 return false;
3464
3465 for (i = 0; i < gimple_phi_num_args (phi); i++)
3466 {
3467 /* If the PHI arg resides in a block not dominated by the basis,
3468 we won't be able to create a PHI using the basis here. */
3469 basic_block pred_bb = gimple_phi_arg_edge (phi, i)->src;
3470
3471 if (!dominated_by_p (CDI_DOMINATORS, pred_bb, basis_bb))
3472 return false;
3473
3474 tree arg = gimple_phi_arg_def (phi, i);
3475
3476 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3477 {
3478 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3479
3480 if (gimple_code (arg_def) == GIMPLE_PHI)
3481 {
3482 if (!all_phi_incrs_profitable_1 (c, as_a <gphi *> (arg_def),
3483 spread)
3484 || *spread > MAX_SPREAD)
3485 return false;
3486 }
3487 else
3488 {
3489 int j;
3490 slsr_cand_t arg_cand = base_cand_from_table (arg);
3491 widest_int increment = arg_cand->index - basis->index;
3492
3493 if (!address_arithmetic_p && wi::neg_p (increment))
3494 increment = -increment;
3495
3496 j = incr_vec_index (increment);
3497
3498 if (dump_file && (dump_flags & TDF_DETAILS))
3499 {
3500 fprintf (dump_file, " Conditional candidate %d, phi: ",
3501 c->cand_num);
3502 print_gimple_stmt (dump_file, phi, 0);
3503 fputs (" increment: ", dump_file);
3504 print_decs (increment, dump_file);
3505 if (j < 0)
3506 fprintf (dump_file,
3507 "\n Not replaced; incr_vec overflow.\n");
3508 else {
3509 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3510 if (profitable_increment_p (j))
3511 fputs (" Replacing...\n", dump_file);
3512 else
3513 fputs (" Not replaced.\n", dump_file);
3514 }
3515 }
3516
3517 if (j < 0 || !profitable_increment_p (j))
3518 return false;
3519 }
3520 }
3521 }
3522
3523 return true;
3524 }
3525
3526 /* Return TRUE iff all required increments for candidates feeding PHI
3527 are profitable (and legal!) to replace on behalf of candidate C. */
3528
3529 static bool
3530 all_phi_incrs_profitable (slsr_cand_t c, gphi *phi)
3531 {
3532 int spread = 0;
3533 bool retval = all_phi_incrs_profitable_1 (c, phi, &spread);
3534 clear_visited (phi);
3535 return retval;
3536 }
3537
3538 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3539 type TO_TYPE, and insert it in front of the statement represented
3540 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3541 the new SSA name. */
3542
3543 static tree
3544 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3545 {
3546 tree cast_lhs;
3547 gassign *cast_stmt;
3548 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3549
3550 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3551 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3552 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3553 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3554
3555 if (dump_file && (dump_flags & TDF_DETAILS))
3556 {
3557 fputs (" Inserting: ", dump_file);
3558 print_gimple_stmt (dump_file, cast_stmt, 0);
3559 }
3560
3561 return cast_lhs;
3562 }
3563
3564 /* Replace the RHS of the statement represented by candidate C with
3565 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3566 leave C unchanged or just interchange its operands. The original
3567 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3568 If the replacement was made and we are doing a details dump,
3569 return the revised statement, else NULL. */
3570
3571 static gimple *
3572 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3573 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3574 slsr_cand_t c)
3575 {
3576 if (new_code != old_code
3577 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3578 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3579 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3580 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3581 {
3582 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3583 slsr_cand_t cc = c;
3584 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3585 update_stmt (gsi_stmt (gsi));
3586 c->cand_stmt = gsi_stmt (gsi);
3587 while (cc->next_interp)
3588 {
3589 cc = lookup_cand (cc->next_interp);
3590 cc->cand_stmt = gsi_stmt (gsi);
3591 }
3592
3593 if (dump_file && (dump_flags & TDF_DETAILS))
3594 return gsi_stmt (gsi);
3595 }
3596
3597 else if (dump_file && (dump_flags & TDF_DETAILS))
3598 fputs (" (duplicate, not actually replacing)\n", dump_file);
3599
3600 return NULL;
3601 }
3602
3603 /* Strength-reduce the statement represented by candidate C by replacing
3604 it with an equivalent addition or subtraction. I is the index into
3605 the increment vector identifying C's increment. NEW_VAR is used to
3606 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3607 is the rhs1 to use in creating the add/subtract. */
3608
3609 static void
3610 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3611 {
3612 gimple *stmt_to_print = NULL;
3613 tree orig_rhs1, orig_rhs2;
3614 tree rhs2;
3615 enum tree_code orig_code, repl_code;
3616 widest_int cand_incr;
3617
3618 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3619 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3620 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3621 cand_incr = cand_increment (c);
3622
3623 if (dump_file && (dump_flags & TDF_DETAILS))
3624 {
3625 fputs ("Replacing: ", dump_file);
3626 print_gimple_stmt (dump_file, c->cand_stmt, 0);
3627 stmt_to_print = c->cand_stmt;
3628 }
3629
3630 if (address_arithmetic_p)
3631 repl_code = POINTER_PLUS_EXPR;
3632 else
3633 repl_code = PLUS_EXPR;
3634
3635 /* If the increment has an initializer T_0, replace the candidate
3636 statement with an add of the basis name and the initializer. */
3637 if (incr_vec[i].initializer)
3638 {
3639 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3640 tree orig_type = TREE_TYPE (orig_rhs2);
3641
3642 if (types_compatible_p (orig_type, init_type))
3643 rhs2 = incr_vec[i].initializer;
3644 else
3645 rhs2 = introduce_cast_before_cand (c, orig_type,
3646 incr_vec[i].initializer);
3647
3648 if (incr_vec[i].incr != cand_incr)
3649 {
3650 gcc_assert (repl_code == PLUS_EXPR);
3651 repl_code = MINUS_EXPR;
3652 }
3653
3654 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3655 orig_code, orig_rhs1, orig_rhs2,
3656 c);
3657 }
3658
3659 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3660 with a subtract of the stride from the basis name, a copy
3661 from the basis name, or an add of the stride to the basis
3662 name, respectively. It may be necessary to introduce a
3663 cast (or reuse an existing cast). */
3664 else if (cand_incr == 1)
3665 {
3666 tree stride_type = TREE_TYPE (c->stride);
3667 tree orig_type = TREE_TYPE (orig_rhs2);
3668
3669 if (types_compatible_p (orig_type, stride_type))
3670 rhs2 = c->stride;
3671 else
3672 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3673
3674 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3675 orig_code, orig_rhs1, orig_rhs2,
3676 c);
3677 }
3678
3679 else if (cand_incr == -1)
3680 {
3681 tree stride_type = TREE_TYPE (c->stride);
3682 tree orig_type = TREE_TYPE (orig_rhs2);
3683 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3684
3685 if (types_compatible_p (orig_type, stride_type))
3686 rhs2 = c->stride;
3687 else
3688 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3689
3690 if (orig_code != MINUS_EXPR
3691 || !operand_equal_p (basis_name, orig_rhs1, 0)
3692 || !operand_equal_p (rhs2, orig_rhs2, 0))
3693 {
3694 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3695 slsr_cand_t cc = c;
3696 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3697 update_stmt (gsi_stmt (gsi));
3698 c->cand_stmt = gsi_stmt (gsi);
3699 while (cc->next_interp)
3700 {
3701 cc = lookup_cand (cc->next_interp);
3702 cc->cand_stmt = gsi_stmt (gsi);
3703 }
3704
3705 if (dump_file && (dump_flags & TDF_DETAILS))
3706 stmt_to_print = gsi_stmt (gsi);
3707 }
3708 else if (dump_file && (dump_flags & TDF_DETAILS))
3709 fputs (" (duplicate, not actually replacing)\n", dump_file);
3710 }
3711
3712 else if (cand_incr == 0)
3713 {
3714 tree lhs = gimple_assign_lhs (c->cand_stmt);
3715 tree lhs_type = TREE_TYPE (lhs);
3716 tree basis_type = TREE_TYPE (basis_name);
3717
3718 if (types_compatible_p (lhs_type, basis_type))
3719 {
3720 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3721 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3722 slsr_cand_t cc = c;
3723 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3724 gsi_replace (&gsi, copy_stmt, false);
3725 c->cand_stmt = copy_stmt;
3726 while (cc->next_interp)
3727 {
3728 cc = lookup_cand (cc->next_interp);
3729 cc->cand_stmt = copy_stmt;
3730 }
3731
3732 if (dump_file && (dump_flags & TDF_DETAILS))
3733 stmt_to_print = copy_stmt;
3734 }
3735 else
3736 {
3737 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3738 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3739 slsr_cand_t cc = c;
3740 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3741 gsi_replace (&gsi, cast_stmt, false);
3742 c->cand_stmt = cast_stmt;
3743 while (cc->next_interp)
3744 {
3745 cc = lookup_cand (cc->next_interp);
3746 cc->cand_stmt = cast_stmt;
3747 }
3748
3749 if (dump_file && (dump_flags & TDF_DETAILS))
3750 stmt_to_print = cast_stmt;
3751 }
3752 }
3753 else
3754 gcc_unreachable ();
3755
3756 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3757 {
3758 fputs ("With: ", dump_file);
3759 print_gimple_stmt (dump_file, stmt_to_print, 0);
3760 fputs ("\n", dump_file);
3761 }
3762 }
3763
3764 /* For each candidate in the tree rooted at C, replace it with
3765 an increment if such has been shown to be profitable. */
3766
3767 static void
3768 replace_profitable_candidates (slsr_cand_t c)
3769 {
3770 if (!cand_already_replaced (c))
3771 {
3772 widest_int increment = cand_abs_increment (c);
3773 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3774 int i;
3775
3776 i = incr_vec_index (increment);
3777
3778 /* Only process profitable increments. Nothing useful can be done
3779 to a cast or copy. */
3780 if (i >= 0
3781 && profitable_increment_p (i)
3782 && orig_code != SSA_NAME
3783 && !CONVERT_EXPR_CODE_P (orig_code))
3784 {
3785 if (phi_dependent_cand_p (c))
3786 {
3787 gphi *phi = as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt);
3788
3789 if (all_phi_incrs_profitable (c, phi))
3790 {
3791 /* Look up the LHS SSA name from C's basis. This will be
3792 the RHS1 of the adds we will introduce to create new
3793 phi arguments. */
3794 slsr_cand_t basis = lookup_cand (c->basis);
3795 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3796
3797 /* Create a new phi statement that will represent C's true
3798 basis after the transformation is complete. */
3799 location_t loc = gimple_location (c->cand_stmt);
3800 tree name = create_phi_basis (c, phi, basis_name,
3801 loc, UNKNOWN_STRIDE);
3802
3803 /* Replace C with an add of the new basis phi and the
3804 increment. */
3805 replace_one_candidate (c, i, name);
3806 }
3807 }
3808 else
3809 {
3810 slsr_cand_t basis = lookup_cand (c->basis);
3811 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3812 replace_one_candidate (c, i, basis_name);
3813 }
3814 }
3815 }
3816
3817 if (c->sibling)
3818 replace_profitable_candidates (lookup_cand (c->sibling));
3819
3820 if (c->dependent)
3821 replace_profitable_candidates (lookup_cand (c->dependent));
3822 }
3823 \f
3824 /* Analyze costs of related candidates in the candidate vector,
3825 and make beneficial replacements. */
3826
3827 static void
3828 analyze_candidates_and_replace (void)
3829 {
3830 unsigned i;
3831 slsr_cand_t c;
3832
3833 /* Each candidate that has a null basis and a non-null
3834 dependent is the root of a tree of related statements.
3835 Analyze each tree to determine a subset of those
3836 statements that can be replaced with maximum benefit. */
3837 FOR_EACH_VEC_ELT (cand_vec, i, c)
3838 {
3839 slsr_cand_t first_dep;
3840
3841 if (c->basis != 0 || c->dependent == 0)
3842 continue;
3843
3844 if (dump_file && (dump_flags & TDF_DETAILS))
3845 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3846 c->cand_num);
3847
3848 first_dep = lookup_cand (c->dependent);
3849
3850 /* If this is a chain of CAND_REFs, unconditionally replace
3851 each of them with a strength-reduced data reference. */
3852 if (c->kind == CAND_REF)
3853 replace_refs (c);
3854
3855 /* If the common stride of all related candidates is a known
3856 constant, each candidate without a phi-dependence can be
3857 profitably replaced. Each replaces a multiply by a single
3858 add, with the possibility that a feeding add also goes dead.
3859 A candidate with a phi-dependence is replaced only if the
3860 compensation code it requires is offset by the strength
3861 reduction savings. */
3862 else if (TREE_CODE (c->stride) == INTEGER_CST)
3863 replace_uncond_cands_and_profitable_phis (first_dep);
3864
3865 /* When the stride is an SSA name, it may still be profitable
3866 to replace some or all of the dependent candidates, depending
3867 on whether the introduced increments can be reused, or are
3868 less expensive to calculate than the replaced statements. */
3869 else
3870 {
3871 machine_mode mode;
3872 bool speed;
3873
3874 /* Determine whether we'll be generating pointer arithmetic
3875 when replacing candidates. */
3876 address_arithmetic_p = (c->kind == CAND_ADD
3877 && POINTER_TYPE_P (c->cand_type));
3878
3879 /* If all candidates have already been replaced under other
3880 interpretations, nothing remains to be done. */
3881 if (!count_candidates (c))
3882 continue;
3883
3884 /* Construct an array of increments for this candidate chain. */
3885 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3886 incr_vec_len = 0;
3887 record_increments (c);
3888
3889 /* Determine which increments are profitable to replace. */
3890 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3891 speed = optimize_cands_for_speed_p (c);
3892 analyze_increments (first_dep, mode, speed);
3893
3894 /* Insert initializers of the form T_0 = stride * increment
3895 for use in profitable replacements. */
3896 insert_initializers (first_dep);
3897 dump_incr_vec ();
3898
3899 /* Perform the replacements. */
3900 replace_profitable_candidates (first_dep);
3901 free (incr_vec);
3902 }
3903 }
3904
3905 /* For conditional candidates, we may have uncommitted insertions
3906 on edges to clean up. */
3907 gsi_commit_edge_inserts ();
3908 }
3909
3910 namespace {
3911
3912 const pass_data pass_data_strength_reduction =
3913 {
3914 GIMPLE_PASS, /* type */
3915 "slsr", /* name */
3916 OPTGROUP_NONE, /* optinfo_flags */
3917 TV_GIMPLE_SLSR, /* tv_id */
3918 ( PROP_cfg | PROP_ssa ), /* properties_required */
3919 0, /* properties_provided */
3920 0, /* properties_destroyed */
3921 0, /* todo_flags_start */
3922 0, /* todo_flags_finish */
3923 };
3924
3925 class pass_strength_reduction : public gimple_opt_pass
3926 {
3927 public:
3928 pass_strength_reduction (gcc::context *ctxt)
3929 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3930 {}
3931
3932 /* opt_pass methods: */
3933 virtual bool gate (function *) { return flag_tree_slsr; }
3934 virtual unsigned int execute (function *);
3935
3936 }; // class pass_strength_reduction
3937
3938 unsigned
3939 pass_strength_reduction::execute (function *fun)
3940 {
3941 /* Create the obstack where candidates will reside. */
3942 gcc_obstack_init (&cand_obstack);
3943
3944 /* Allocate the candidate vector. */
3945 cand_vec.create (128);
3946
3947 /* Allocate the mapping from statements to candidate indices. */
3948 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3949
3950 /* Create the obstack where candidate chains will reside. */
3951 gcc_obstack_init (&chain_obstack);
3952
3953 /* Allocate the mapping from base expressions to candidate chains. */
3954 base_cand_map = new hash_table<cand_chain_hasher> (500);
3955
3956 /* Allocate the mapping from bases to alternative bases. */
3957 alt_base_map = new hash_map<tree, tree>;
3958
3959 /* Initialize the loop optimizer. We need to detect flow across
3960 back edges, and this gives us dominator information as well. */
3961 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3962
3963 /* Walk the CFG in predominator order looking for strength reduction
3964 candidates. */
3965 find_candidates_dom_walker (CDI_DOMINATORS)
3966 .walk (fun->cfg->x_entry_block_ptr);
3967
3968 if (dump_file && (dump_flags & TDF_DETAILS))
3969 {
3970 dump_cand_vec ();
3971 dump_cand_chains ();
3972 }
3973
3974 delete alt_base_map;
3975 free_affine_expand_cache (&name_expansions);
3976
3977 /* Analyze costs and make appropriate replacements. */
3978 analyze_candidates_and_replace ();
3979
3980 loop_optimizer_finalize ();
3981 delete base_cand_map;
3982 base_cand_map = NULL;
3983 obstack_free (&chain_obstack, NULL);
3984 delete stmt_cand_map;
3985 cand_vec.release ();
3986 obstack_free (&cand_obstack, NULL);
3987
3988 return 0;
3989 }
3990
3991 } // anon namespace
3992
3993 gimple_opt_pass *
3994 make_pass_strength_reduction (gcc::context *ctxt)
3995 {
3996 return new pass_strength_reduction (ctxt);
3997 }