cuintp.c (UI_From_gnu): Use tree_to_shwi.
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimplify-me.h"
43 #include "stor-layout.h"
44 #include "expr.h"
45 #include "basic-block.h"
46 #include "tree-pass.h"
47 #include "cfgloop.h"
48 #include "gimple-pretty-print.h"
49 #include "gimple-ssa.h"
50 #include "tree-cfg.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "domwalk.h"
56 #include "pointer-set.h"
57 #include "expmed.h"
58 #include "params.h"
59 #include "hash-table.h"
60 #include "tree-ssa-address.h"
61 \f
62 /* Information about a strength reduction candidate. Each statement
63 in the candidate table represents an expression of one of the
64 following forms (the special case of CAND_REF will be described
65 later):
66
67 (CAND_MULT) S1: X = (B + i) * S
68 (CAND_ADD) S1: X = B + (i * S)
69
70 Here X and B are SSA names, i is an integer constant, and S is
71 either an SSA name or a constant. We call B the "base," i the
72 "index", and S the "stride."
73
74 Any statement S0 that dominates S1 and is of the form:
75
76 (CAND_MULT) S0: Y = (B + i') * S
77 (CAND_ADD) S0: Y = B + (i' * S)
78
79 is called a "basis" for S1. In both cases, S1 may be replaced by
80
81 S1': X = Y + (i - i') * S,
82
83 where (i - i') * S is folded to the extent possible.
84
85 All gimple statements are visited in dominator order, and each
86 statement that may contribute to one of the forms of S1 above is
87 given at least one entry in the candidate table. Such statements
88 include addition, pointer addition, subtraction, multiplication,
89 negation, copies, and nontrivial type casts. If a statement may
90 represent more than one expression of the forms of S1 above,
91 multiple "interpretations" are stored in the table and chained
92 together. Examples:
93
94 * An add of two SSA names may treat either operand as the base.
95 * A multiply of two SSA names, likewise.
96 * A copy or cast may be thought of as either a CAND_MULT with
97 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
98
99 Candidate records are allocated from an obstack. They are addressed
100 both from a hash table keyed on S1, and from a vector of candidate
101 pointers arranged in predominator order.
102
103 Opportunity note
104 ----------------
105 Currently we don't recognize:
106
107 S0: Y = (S * i') - B
108 S1: X = (S * i) - B
109
110 as a strength reduction opportunity, even though this S1 would
111 also be replaceable by the S1' above. This can be added if it
112 comes up in practice.
113
114 Strength reduction in addressing
115 --------------------------------
116 There is another kind of candidate known as CAND_REF. A CAND_REF
117 describes a statement containing a memory reference having
118 complex addressing that might benefit from strength reduction.
119 Specifically, we are interested in references for which
120 get_inner_reference returns a base address, offset, and bitpos as
121 follows:
122
123 base: MEM_REF (T1, C1)
124 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
125 bitpos: C4 * BITS_PER_UNIT
126
127 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
128 arbitrary integer constants. Note that C2 may be zero, in which
129 case the offset will be MULT_EXPR (T2, C3).
130
131 When this pattern is recognized, the original memory reference
132 can be replaced with:
133
134 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
135 C1 + (C2 * C3) + C4)
136
137 which distributes the multiply to allow constant folding. When
138 two or more addressing expressions can be represented by MEM_REFs
139 of this form, differing only in the constants C1, C2, and C4,
140 making this substitution produces more efficient addressing during
141 the RTL phases. When there are not at least two expressions with
142 the same values of T1, T2, and C3, there is nothing to be gained
143 by the replacement.
144
145 Strength reduction of CAND_REFs uses the same infrastructure as
146 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
147 field, MULT_EXPR (T2, C3) in the stride (S) field, and
148 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
149 is thus another CAND_REF with the same B and S values. When at
150 least two CAND_REFs are chained together using the basis relation,
151 each of them is replaced as above, resulting in improved code
152 generation for addressing.
153
154 Conditional candidates
155 ======================
156
157 Conditional candidates are best illustrated with an example.
158 Consider the code sequence:
159
160 (1) x_0 = ...;
161 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
162 if (...)
163 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
164 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
165 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
166 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
167
168 Here strength reduction is complicated by the uncertain value of x_2.
169 A legitimate transformation is:
170
171 (1) x_0 = ...;
172 (2) a_0 = x_0 * 5;
173 if (...)
174 {
175 (3) [x_1 = x_0 + 1;]
176 (3a) t_1 = a_0 + 5;
177 }
178 (4) [x_2 = PHI <x_0, x_1>;]
179 (4a) t_2 = PHI <a_0, t_1>;
180 (5) [x_3 = x_2 + 1;]
181 (6r) a_1 = t_2 + 5;
182
183 where the bracketed instructions may go dead.
184
185 To recognize this opportunity, we have to observe that statement (6)
186 has a "hidden basis" (2). The hidden basis is unlike a normal basis
187 in that the statement and the hidden basis have different base SSA
188 names (x_2 and x_0, respectively). The relationship is established
189 when a statement's base name (x_2) is defined by a phi statement (4),
190 each argument of which (x_0, x_1) has an identical "derived base name."
191 If the argument is defined by a candidate (as x_1 is by (3)) that is a
192 CAND_ADD having a stride of 1, the derived base name of the argument is
193 the base name of the candidate (x_0). Otherwise, the argument itself
194 is its derived base name (as is the case with argument x_0).
195
196 The hidden basis for statement (6) is the nearest dominating candidate
197 whose base name is the derived base name (x_0) of the feeding phi (4),
198 and whose stride is identical to that of the statement. We can then
199 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
200 allowing the final replacement of (6) by the strength-reduced (6r).
201
202 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
203 A CAND_PHI is not a candidate for replacement, but is maintained in the
204 candidate table to ease discovery of hidden bases. Any phi statement
205 whose arguments share a common derived base name is entered into the
206 table with the derived base name, an (arbitrary) index of zero, and a
207 stride of 1. A statement with a hidden basis can then be detected by
208 simply looking up its feeding phi definition in the candidate table,
209 extracting the derived base name, and searching for a basis in the
210 usual manner after substituting the derived base name.
211
212 Note that the transformation is only valid when the original phi and
213 the statements that define the phi's arguments are all at the same
214 position in the loop hierarchy. */
215
216
217 /* Index into the candidate vector, offset by 1. VECs are zero-based,
218 while cand_idx's are one-based, with zero indicating null. */
219 typedef unsigned cand_idx;
220
221 /* The kind of candidate. */
222 enum cand_kind
223 {
224 CAND_MULT,
225 CAND_ADD,
226 CAND_REF,
227 CAND_PHI
228 };
229
230 struct slsr_cand_d
231 {
232 /* The candidate statement S1. */
233 gimple cand_stmt;
234
235 /* The base expression B: often an SSA name, but not always. */
236 tree base_expr;
237
238 /* The stride S. */
239 tree stride;
240
241 /* The index constant i. */
242 double_int index;
243
244 /* The type of the candidate. This is normally the type of base_expr,
245 but casts may have occurred when combining feeding instructions.
246 A candidate can only be a basis for candidates of the same final type.
247 (For CAND_REFs, this is the type to be used for operand 1 of the
248 replacement MEM_REF.) */
249 tree cand_type;
250
251 /* The kind of candidate (CAND_MULT, etc.). */
252 enum cand_kind kind;
253
254 /* Index of this candidate in the candidate vector. */
255 cand_idx cand_num;
256
257 /* Index of the next candidate record for the same statement.
258 A statement may be useful in more than one way (e.g., due to
259 commutativity). So we can have multiple "interpretations"
260 of a statement. */
261 cand_idx next_interp;
262
263 /* Index of the basis statement S0, if any, in the candidate vector. */
264 cand_idx basis;
265
266 /* First candidate for which this candidate is a basis, if one exists. */
267 cand_idx dependent;
268
269 /* Next candidate having the same basis as this one. */
270 cand_idx sibling;
271
272 /* If this is a conditional candidate, the CAND_PHI candidate
273 that defines the base SSA name B. */
274 cand_idx def_phi;
275
276 /* Savings that can be expected from eliminating dead code if this
277 candidate is replaced. */
278 int dead_savings;
279 };
280
281 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
282 typedef const struct slsr_cand_d *const_slsr_cand_t;
283
284 /* Pointers to candidates are chained together as part of a mapping
285 from base expressions to the candidates that use them. */
286
287 struct cand_chain_d
288 {
289 /* Base expression for the chain of candidates: often, but not
290 always, an SSA name. */
291 tree base_expr;
292
293 /* Pointer to a candidate. */
294 slsr_cand_t cand;
295
296 /* Chain pointer. */
297 struct cand_chain_d *next;
298
299 };
300
301 typedef struct cand_chain_d cand_chain, *cand_chain_t;
302 typedef const struct cand_chain_d *const_cand_chain_t;
303
304 /* Information about a unique "increment" associated with candidates
305 having an SSA name for a stride. An increment is the difference
306 between the index of the candidate and the index of its basis,
307 i.e., (i - i') as discussed in the module commentary.
308
309 When we are not going to generate address arithmetic we treat
310 increments that differ only in sign as the same, allowing sharing
311 of the cost of initializers. The absolute value of the increment
312 is stored in the incr_info. */
313
314 struct incr_info_d
315 {
316 /* The increment that relates a candidate to its basis. */
317 double_int incr;
318
319 /* How many times the increment occurs in the candidate tree. */
320 unsigned count;
321
322 /* Cost of replacing candidates using this increment. Negative and
323 zero costs indicate replacement should be performed. */
324 int cost;
325
326 /* If this increment is profitable but is not -1, 0, or 1, it requires
327 an initializer T_0 = stride * incr to be found or introduced in the
328 nearest common dominator of all candidates. This field holds T_0
329 for subsequent use. */
330 tree initializer;
331
332 /* If the initializer was found to already exist, this is the block
333 where it was found. */
334 basic_block init_bb;
335 };
336
337 typedef struct incr_info_d incr_info, *incr_info_t;
338
339 /* Candidates are maintained in a vector. If candidate X dominates
340 candidate Y, then X appears before Y in the vector; but the
341 converse does not necessarily hold. */
342 static vec<slsr_cand_t> cand_vec;
343
344 enum cost_consts
345 {
346 COST_NEUTRAL = 0,
347 COST_INFINITE = 1000
348 };
349
350 enum stride_status
351 {
352 UNKNOWN_STRIDE = 0,
353 KNOWN_STRIDE = 1
354 };
355
356 enum phi_adjust_status
357 {
358 NOT_PHI_ADJUST = 0,
359 PHI_ADJUST = 1
360 };
361
362 enum count_phis_status
363 {
364 DONT_COUNT_PHIS = 0,
365 COUNT_PHIS = 1
366 };
367
368 /* Pointer map embodying a mapping from statements to candidates. */
369 static struct pointer_map_t *stmt_cand_map;
370
371 /* Obstack for candidates. */
372 static struct obstack cand_obstack;
373
374 /* Obstack for candidate chains. */
375 static struct obstack chain_obstack;
376
377 /* An array INCR_VEC of incr_infos is used during analysis of related
378 candidates having an SSA name for a stride. INCR_VEC_LEN describes
379 its current length. MAX_INCR_VEC_LEN is used to avoid costly
380 pathological cases. */
381 static incr_info_t incr_vec;
382 static unsigned incr_vec_len;
383 const int MAX_INCR_VEC_LEN = 16;
384
385 /* For a chain of candidates with unknown stride, indicates whether or not
386 we must generate pointer arithmetic when replacing statements. */
387 static bool address_arithmetic_p;
388
389 /* Forward function declarations. */
390 static slsr_cand_t base_cand_from_table (tree);
391 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
392 static bool legal_cast_p_1 (tree, tree);
393 \f
394 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
395
396 static slsr_cand_t
397 lookup_cand (cand_idx idx)
398 {
399 return cand_vec[idx - 1];
400 }
401
402 /* Helper for hashing a candidate chain header. */
403
404 struct cand_chain_hasher : typed_noop_remove <cand_chain>
405 {
406 typedef cand_chain value_type;
407 typedef cand_chain compare_type;
408 static inline hashval_t hash (const value_type *);
409 static inline bool equal (const value_type *, const compare_type *);
410 };
411
412 inline hashval_t
413 cand_chain_hasher::hash (const value_type *p)
414 {
415 tree base_expr = p->base_expr;
416 return iterative_hash_expr (base_expr, 0);
417 }
418
419 inline bool
420 cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
421 {
422 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
423 }
424
425 /* Hash table embodying a mapping from base exprs to chains of candidates. */
426 static hash_table <cand_chain_hasher> base_cand_map;
427 \f
428 /* Look in the candidate table for a CAND_PHI that defines BASE and
429 return it if found; otherwise return NULL. */
430
431 static cand_idx
432 find_phi_def (tree base)
433 {
434 slsr_cand_t c;
435
436 if (TREE_CODE (base) != SSA_NAME)
437 return 0;
438
439 c = base_cand_from_table (base);
440
441 if (!c || c->kind != CAND_PHI)
442 return 0;
443
444 return c->cand_num;
445 }
446
447 /* Helper routine for find_basis_for_candidate. May be called twice:
448 once for the candidate's base expr, and optionally again for the
449 candidate's phi definition. */
450
451 static slsr_cand_t
452 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
453 {
454 cand_chain mapping_key;
455 cand_chain_t chain;
456 slsr_cand_t basis = NULL;
457
458 // Limit potential of N^2 behavior for long candidate chains.
459 int iters = 0;
460 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
461
462 mapping_key.base_expr = base_expr;
463 chain = base_cand_map.find (&mapping_key);
464
465 for (; chain && iters < max_iters; chain = chain->next, ++iters)
466 {
467 slsr_cand_t one_basis = chain->cand;
468
469 if (one_basis->kind != c->kind
470 || one_basis->cand_stmt == c->cand_stmt
471 || !operand_equal_p (one_basis->stride, c->stride, 0)
472 || !types_compatible_p (one_basis->cand_type, c->cand_type)
473 || !dominated_by_p (CDI_DOMINATORS,
474 gimple_bb (c->cand_stmt),
475 gimple_bb (one_basis->cand_stmt)))
476 continue;
477
478 if (!basis || basis->cand_num < one_basis->cand_num)
479 basis = one_basis;
480 }
481
482 return basis;
483 }
484
485 /* Use the base expr from candidate C to look for possible candidates
486 that can serve as a basis for C. Each potential basis must also
487 appear in a block that dominates the candidate statement and have
488 the same stride and type. If more than one possible basis exists,
489 the one with highest index in the vector is chosen; this will be
490 the most immediately dominating basis. */
491
492 static int
493 find_basis_for_candidate (slsr_cand_t c)
494 {
495 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
496
497 /* If a candidate doesn't have a basis using its base expression,
498 it may have a basis hidden by one or more intervening phis. */
499 if (!basis && c->def_phi)
500 {
501 basic_block basis_bb, phi_bb;
502 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
503 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
504
505 if (basis)
506 {
507 /* A hidden basis must dominate the phi-definition of the
508 candidate's base name. */
509 phi_bb = gimple_bb (phi_cand->cand_stmt);
510 basis_bb = gimple_bb (basis->cand_stmt);
511
512 if (phi_bb == basis_bb
513 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
514 {
515 basis = NULL;
516 c->basis = 0;
517 }
518
519 /* If we found a hidden basis, estimate additional dead-code
520 savings if the phi and its feeding statements can be removed. */
521 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
522 c->dead_savings += phi_cand->dead_savings;
523 }
524 }
525
526 if (basis)
527 {
528 c->sibling = basis->dependent;
529 basis->dependent = c->cand_num;
530 return basis->cand_num;
531 }
532
533 return 0;
534 }
535
536 /* Record a mapping from the base expression of C to C itself, indicating that
537 C may potentially serve as a basis using that base expression. */
538
539 static void
540 record_potential_basis (slsr_cand_t c)
541 {
542 cand_chain_t node;
543 cand_chain **slot;
544
545 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
546 node->base_expr = c->base_expr;
547 node->cand = c;
548 node->next = NULL;
549 slot = base_cand_map.find_slot (node, INSERT);
550
551 if (*slot)
552 {
553 cand_chain_t head = (cand_chain_t) (*slot);
554 node->next = head->next;
555 head->next = node;
556 }
557 else
558 *slot = node;
559 }
560
561 /* Allocate storage for a new candidate and initialize its fields.
562 Attempt to find a basis for the candidate. */
563
564 static slsr_cand_t
565 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
566 double_int index, tree stride, tree ctype,
567 unsigned savings)
568 {
569 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
570 sizeof (slsr_cand));
571 c->cand_stmt = gs;
572 c->base_expr = base;
573 c->stride = stride;
574 c->index = index;
575 c->cand_type = ctype;
576 c->kind = kind;
577 c->cand_num = cand_vec.length () + 1;
578 c->next_interp = 0;
579 c->dependent = 0;
580 c->sibling = 0;
581 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
582 c->dead_savings = savings;
583
584 cand_vec.safe_push (c);
585
586 if (kind == CAND_PHI)
587 c->basis = 0;
588 else
589 c->basis = find_basis_for_candidate (c);
590
591 record_potential_basis (c);
592
593 return c;
594 }
595
596 /* Determine the target cost of statement GS when compiling according
597 to SPEED. */
598
599 static int
600 stmt_cost (gimple gs, bool speed)
601 {
602 tree lhs, rhs1, rhs2;
603 enum machine_mode lhs_mode;
604
605 gcc_assert (is_gimple_assign (gs));
606 lhs = gimple_assign_lhs (gs);
607 rhs1 = gimple_assign_rhs1 (gs);
608 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
609
610 switch (gimple_assign_rhs_code (gs))
611 {
612 case MULT_EXPR:
613 rhs2 = gimple_assign_rhs2 (gs);
614
615 if (tree_fits_shwi_p (rhs2))
616 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
617
618 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
619 return mul_cost (speed, lhs_mode);
620
621 case PLUS_EXPR:
622 case POINTER_PLUS_EXPR:
623 case MINUS_EXPR:
624 return add_cost (speed, lhs_mode);
625
626 case NEGATE_EXPR:
627 return neg_cost (speed, lhs_mode);
628
629 case NOP_EXPR:
630 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
631
632 /* Note that we don't assign costs to copies that in most cases
633 will go away. */
634 default:
635 ;
636 }
637
638 gcc_unreachable ();
639 return 0;
640 }
641
642 /* Look up the defining statement for BASE_IN and return a pointer
643 to its candidate in the candidate table, if any; otherwise NULL.
644 Only CAND_ADD and CAND_MULT candidates are returned. */
645
646 static slsr_cand_t
647 base_cand_from_table (tree base_in)
648 {
649 slsr_cand_t *result;
650
651 gimple def = SSA_NAME_DEF_STMT (base_in);
652 if (!def)
653 return (slsr_cand_t) NULL;
654
655 result = (slsr_cand_t *) pointer_map_contains (stmt_cand_map, def);
656
657 if (result && (*result)->kind != CAND_REF)
658 return *result;
659
660 return (slsr_cand_t) NULL;
661 }
662
663 /* Add an entry to the statement-to-candidate mapping. */
664
665 static void
666 add_cand_for_stmt (gimple gs, slsr_cand_t c)
667 {
668 void **slot = pointer_map_insert (stmt_cand_map, gs);
669 gcc_assert (!*slot);
670 *slot = c;
671 }
672 \f
673 /* Given PHI which contains a phi statement, determine whether it
674 satisfies all the requirements of a phi candidate. If so, create
675 a candidate. Note that a CAND_PHI never has a basis itself, but
676 is used to help find a basis for subsequent candidates. */
677
678 static void
679 slsr_process_phi (gimple phi, bool speed)
680 {
681 unsigned i;
682 tree arg0_base = NULL_TREE, base_type;
683 slsr_cand_t c;
684 struct loop *cand_loop = gimple_bb (phi)->loop_father;
685 unsigned savings = 0;
686
687 /* A CAND_PHI requires each of its arguments to have the same
688 derived base name. (See the module header commentary for a
689 definition of derived base names.) Furthermore, all feeding
690 definitions must be in the same position in the loop hierarchy
691 as PHI. */
692
693 for (i = 0; i < gimple_phi_num_args (phi); i++)
694 {
695 slsr_cand_t arg_cand;
696 tree arg = gimple_phi_arg_def (phi, i);
697 tree derived_base_name = NULL_TREE;
698 gimple arg_stmt = NULL;
699 basic_block arg_bb = NULL;
700
701 if (TREE_CODE (arg) != SSA_NAME)
702 return;
703
704 arg_cand = base_cand_from_table (arg);
705
706 if (arg_cand)
707 {
708 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
709 {
710 if (!arg_cand->next_interp)
711 return;
712
713 arg_cand = lookup_cand (arg_cand->next_interp);
714 }
715
716 if (!integer_onep (arg_cand->stride))
717 return;
718
719 derived_base_name = arg_cand->base_expr;
720 arg_stmt = arg_cand->cand_stmt;
721 arg_bb = gimple_bb (arg_stmt);
722
723 /* Gather potential dead code savings if the phi statement
724 can be removed later on. */
725 if (has_single_use (arg))
726 {
727 if (gimple_code (arg_stmt) == GIMPLE_PHI)
728 savings += arg_cand->dead_savings;
729 else
730 savings += stmt_cost (arg_stmt, speed);
731 }
732 }
733 else
734 {
735 derived_base_name = arg;
736
737 if (SSA_NAME_IS_DEFAULT_DEF (arg))
738 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
739 else
740 gimple_bb (SSA_NAME_DEF_STMT (arg));
741 }
742
743 if (!arg_bb || arg_bb->loop_father != cand_loop)
744 return;
745
746 if (i == 0)
747 arg0_base = derived_base_name;
748 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
749 return;
750 }
751
752 /* Create the candidate. "alloc_cand_and_find_basis" is named
753 misleadingly for this case, as no basis will be sought for a
754 CAND_PHI. */
755 base_type = TREE_TYPE (arg0_base);
756
757 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base, double_int_zero,
758 integer_one_node, base_type, savings);
759
760 /* Add the candidate to the statement-candidate mapping. */
761 add_cand_for_stmt (phi, c);
762 }
763
764 /* Given PBASE which is a pointer to tree, look up the defining
765 statement for it and check whether the candidate is in the
766 form of:
767
768 X = B + (1 * S), S is integer constant
769 X = B + (i * S), S is integer one
770
771 If so, set PBASE to the candidate's base_expr and return double
772 int (i * S).
773 Otherwise, just return double int zero. */
774
775 static double_int
776 backtrace_base_for_ref (tree *pbase)
777 {
778 tree base_in = *pbase;
779 slsr_cand_t base_cand;
780
781 STRIP_NOPS (base_in);
782
783 /* Strip off widening conversion(s) to handle cases where
784 e.g. 'B' is widened from an 'int' in order to calculate
785 a 64-bit address. */
786 if (CONVERT_EXPR_P (base_in)
787 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
788 base_in = get_unwidened (base_in, NULL_TREE);
789
790 if (TREE_CODE (base_in) != SSA_NAME)
791 return tree_to_double_int (integer_zero_node);
792
793 base_cand = base_cand_from_table (base_in);
794
795 while (base_cand && base_cand->kind != CAND_PHI)
796 {
797 if (base_cand->kind == CAND_ADD
798 && base_cand->index.is_one ()
799 && TREE_CODE (base_cand->stride) == INTEGER_CST)
800 {
801 /* X = B + (1 * S), S is integer constant. */
802 *pbase = base_cand->base_expr;
803 return tree_to_double_int (base_cand->stride);
804 }
805 else if (base_cand->kind == CAND_ADD
806 && TREE_CODE (base_cand->stride) == INTEGER_CST
807 && integer_onep (base_cand->stride))
808 {
809 /* X = B + (i * S), S is integer one. */
810 *pbase = base_cand->base_expr;
811 return base_cand->index;
812 }
813
814 if (base_cand->next_interp)
815 base_cand = lookup_cand (base_cand->next_interp);
816 else
817 base_cand = NULL;
818 }
819
820 return tree_to_double_int (integer_zero_node);
821 }
822
823 /* Look for the following pattern:
824
825 *PBASE: MEM_REF (T1, C1)
826
827 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
828 or
829 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
830 or
831 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
832
833 *PINDEX: C4 * BITS_PER_UNIT
834
835 If not present, leave the input values unchanged and return FALSE.
836 Otherwise, modify the input values as follows and return TRUE:
837
838 *PBASE: T1
839 *POFFSET: MULT_EXPR (T2, C3)
840 *PINDEX: C1 + (C2 * C3) + C4
841
842 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
843 will be further restructured to:
844
845 *PBASE: T1
846 *POFFSET: MULT_EXPR (T2', C3)
847 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
848
849 static bool
850 restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
851 tree *ptype)
852 {
853 tree base = *pbase, offset = *poffset;
854 double_int index = *pindex;
855 double_int bpu = double_int::from_uhwi (BITS_PER_UNIT);
856 tree mult_op0, mult_op1, t1, t2, type;
857 double_int c1, c2, c3, c4, c5;
858
859 if (!base
860 || !offset
861 || TREE_CODE (base) != MEM_REF
862 || TREE_CODE (offset) != MULT_EXPR
863 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
864 || !index.umod (bpu, FLOOR_MOD_EXPR).is_zero ())
865 return false;
866
867 t1 = TREE_OPERAND (base, 0);
868 c1 = mem_ref_offset (base);
869 type = TREE_TYPE (TREE_OPERAND (base, 1));
870
871 mult_op0 = TREE_OPERAND (offset, 0);
872 mult_op1 = TREE_OPERAND (offset, 1);
873
874 c3 = tree_to_double_int (mult_op1);
875
876 if (TREE_CODE (mult_op0) == PLUS_EXPR)
877
878 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
879 {
880 t2 = TREE_OPERAND (mult_op0, 0);
881 c2 = tree_to_double_int (TREE_OPERAND (mult_op0, 1));
882 }
883 else
884 return false;
885
886 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
887
888 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
889 {
890 t2 = TREE_OPERAND (mult_op0, 0);
891 c2 = -tree_to_double_int (TREE_OPERAND (mult_op0, 1));
892 }
893 else
894 return false;
895
896 else
897 {
898 t2 = mult_op0;
899 c2 = double_int_zero;
900 }
901
902 c4 = index.udiv (bpu, FLOOR_DIV_EXPR);
903 c5 = backtrace_base_for_ref (&t2);
904
905 *pbase = t1;
906 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
907 double_int_to_tree (sizetype, c3));
908 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
909 *ptype = type;
910
911 return true;
912 }
913
914 /* Given GS which contains a data reference, create a CAND_REF entry in
915 the candidate table and attempt to find a basis. */
916
917 static void
918 slsr_process_ref (gimple gs)
919 {
920 tree ref_expr, base, offset, type;
921 HOST_WIDE_INT bitsize, bitpos;
922 enum machine_mode mode;
923 int unsignedp, volatilep;
924 double_int index;
925 slsr_cand_t c;
926
927 if (gimple_vdef (gs))
928 ref_expr = gimple_assign_lhs (gs);
929 else
930 ref_expr = gimple_assign_rhs1 (gs);
931
932 if (!handled_component_p (ref_expr)
933 || TREE_CODE (ref_expr) == BIT_FIELD_REF
934 || (TREE_CODE (ref_expr) == COMPONENT_REF
935 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
936 return;
937
938 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
939 &unsignedp, &volatilep, false);
940 index = double_int::from_uhwi (bitpos);
941
942 if (!restructure_reference (&base, &offset, &index, &type))
943 return;
944
945 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
946 type, 0);
947
948 /* Add the candidate to the statement-candidate mapping. */
949 add_cand_for_stmt (gs, c);
950 }
951
952 /* Create a candidate entry for a statement GS, where GS multiplies
953 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
954 about the two SSA names into the new candidate. Return the new
955 candidate. */
956
957 static slsr_cand_t
958 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
959 {
960 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
961 double_int index;
962 unsigned savings = 0;
963 slsr_cand_t c;
964 slsr_cand_t base_cand = base_cand_from_table (base_in);
965
966 /* Look at all interpretations of the base candidate, if necessary,
967 to find information to propagate into this candidate. */
968 while (base_cand && !base && base_cand->kind != CAND_PHI)
969 {
970
971 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
972 {
973 /* Y = (B + i') * 1
974 X = Y * Z
975 ================
976 X = (B + i') * Z */
977 base = base_cand->base_expr;
978 index = base_cand->index;
979 stride = stride_in;
980 ctype = base_cand->cand_type;
981 if (has_single_use (base_in))
982 savings = (base_cand->dead_savings
983 + stmt_cost (base_cand->cand_stmt, speed));
984 }
985 else if (base_cand->kind == CAND_ADD
986 && TREE_CODE (base_cand->stride) == INTEGER_CST)
987 {
988 /* Y = B + (i' * S), S constant
989 X = Y * Z
990 ============================
991 X = B + ((i' * S) * Z) */
992 base = base_cand->base_expr;
993 index = base_cand->index * tree_to_double_int (base_cand->stride);
994 stride = stride_in;
995 ctype = base_cand->cand_type;
996 if (has_single_use (base_in))
997 savings = (base_cand->dead_savings
998 + stmt_cost (base_cand->cand_stmt, speed));
999 }
1000
1001 if (base_cand->next_interp)
1002 base_cand = lookup_cand (base_cand->next_interp);
1003 else
1004 base_cand = NULL;
1005 }
1006
1007 if (!base)
1008 {
1009 /* No interpretations had anything useful to propagate, so
1010 produce X = (Y + 0) * Z. */
1011 base = base_in;
1012 index = double_int_zero;
1013 stride = stride_in;
1014 ctype = TREE_TYPE (base_in);
1015 }
1016
1017 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1018 ctype, savings);
1019 return c;
1020 }
1021
1022 /* Create a candidate entry for a statement GS, where GS multiplies
1023 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1024 information about BASE_IN into the new candidate. Return the new
1025 candidate. */
1026
1027 static slsr_cand_t
1028 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1029 {
1030 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1031 double_int index, temp;
1032 unsigned savings = 0;
1033 slsr_cand_t c;
1034 slsr_cand_t base_cand = base_cand_from_table (base_in);
1035
1036 /* Look at all interpretations of the base candidate, if necessary,
1037 to find information to propagate into this candidate. */
1038 while (base_cand && !base && base_cand->kind != CAND_PHI)
1039 {
1040 if (base_cand->kind == CAND_MULT
1041 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1042 {
1043 /* Y = (B + i') * S, S constant
1044 X = Y * c
1045 ============================
1046 X = (B + i') * (S * c) */
1047 base = base_cand->base_expr;
1048 index = base_cand->index;
1049 temp = tree_to_double_int (base_cand->stride)
1050 * tree_to_double_int (stride_in);
1051 stride = double_int_to_tree (TREE_TYPE (stride_in), temp);
1052 ctype = base_cand->cand_type;
1053 if (has_single_use (base_in))
1054 savings = (base_cand->dead_savings
1055 + stmt_cost (base_cand->cand_stmt, speed));
1056 }
1057 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1058 {
1059 /* Y = B + (i' * 1)
1060 X = Y * c
1061 ===========================
1062 X = (B + i') * c */
1063 base = base_cand->base_expr;
1064 index = base_cand->index;
1065 stride = stride_in;
1066 ctype = base_cand->cand_type;
1067 if (has_single_use (base_in))
1068 savings = (base_cand->dead_savings
1069 + stmt_cost (base_cand->cand_stmt, speed));
1070 }
1071 else if (base_cand->kind == CAND_ADD
1072 && base_cand->index.is_one ()
1073 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1074 {
1075 /* Y = B + (1 * S), S constant
1076 X = Y * c
1077 ===========================
1078 X = (B + S) * c */
1079 base = base_cand->base_expr;
1080 index = tree_to_double_int (base_cand->stride);
1081 stride = stride_in;
1082 ctype = base_cand->cand_type;
1083 if (has_single_use (base_in))
1084 savings = (base_cand->dead_savings
1085 + stmt_cost (base_cand->cand_stmt, speed));
1086 }
1087
1088 if (base_cand->next_interp)
1089 base_cand = lookup_cand (base_cand->next_interp);
1090 else
1091 base_cand = NULL;
1092 }
1093
1094 if (!base)
1095 {
1096 /* No interpretations had anything useful to propagate, so
1097 produce X = (Y + 0) * c. */
1098 base = base_in;
1099 index = double_int_zero;
1100 stride = stride_in;
1101 ctype = TREE_TYPE (base_in);
1102 }
1103
1104 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1105 ctype, savings);
1106 return c;
1107 }
1108
1109 /* Given GS which is a multiply of scalar integers, make an appropriate
1110 entry in the candidate table. If this is a multiply of two SSA names,
1111 create two CAND_MULT interpretations and attempt to find a basis for
1112 each of them. Otherwise, create a single CAND_MULT and attempt to
1113 find a basis. */
1114
1115 static void
1116 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1117 {
1118 slsr_cand_t c, c2;
1119
1120 /* If this is a multiply of an SSA name with itself, it is highly
1121 unlikely that we will get a strength reduction opportunity, so
1122 don't record it as a candidate. This simplifies the logic for
1123 finding a basis, so if this is removed that must be considered. */
1124 if (rhs1 == rhs2)
1125 return;
1126
1127 if (TREE_CODE (rhs2) == SSA_NAME)
1128 {
1129 /* Record an interpretation of this statement in the candidate table
1130 assuming RHS1 is the base expression and RHS2 is the stride. */
1131 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1132
1133 /* Add the first interpretation to the statement-candidate mapping. */
1134 add_cand_for_stmt (gs, c);
1135
1136 /* Record another interpretation of this statement assuming RHS1
1137 is the stride and RHS2 is the base expression. */
1138 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1139 c->next_interp = c2->cand_num;
1140 }
1141 else
1142 {
1143 /* Record an interpretation for the multiply-immediate. */
1144 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1145
1146 /* Add the interpretation to the statement-candidate mapping. */
1147 add_cand_for_stmt (gs, c);
1148 }
1149 }
1150
1151 /* Create a candidate entry for a statement GS, where GS adds two
1152 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1153 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1154 information about the two SSA names into the new candidate.
1155 Return the new candidate. */
1156
1157 static slsr_cand_t
1158 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1159 bool subtract_p, bool speed)
1160 {
1161 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1162 double_int index;
1163 unsigned savings = 0;
1164 slsr_cand_t c;
1165 slsr_cand_t base_cand = base_cand_from_table (base_in);
1166 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1167
1168 /* The most useful transformation is a multiply-immediate feeding
1169 an add or subtract. Look for that first. */
1170 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1171 {
1172 if (addend_cand->kind == CAND_MULT
1173 && addend_cand->index.is_zero ()
1174 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1175 {
1176 /* Z = (B + 0) * S, S constant
1177 X = Y +/- Z
1178 ===========================
1179 X = Y + ((+/-1 * S) * B) */
1180 base = base_in;
1181 index = tree_to_double_int (addend_cand->stride);
1182 if (subtract_p)
1183 index = -index;
1184 stride = addend_cand->base_expr;
1185 ctype = TREE_TYPE (base_in);
1186 if (has_single_use (addend_in))
1187 savings = (addend_cand->dead_savings
1188 + stmt_cost (addend_cand->cand_stmt, speed));
1189 }
1190
1191 if (addend_cand->next_interp)
1192 addend_cand = lookup_cand (addend_cand->next_interp);
1193 else
1194 addend_cand = NULL;
1195 }
1196
1197 while (base_cand && !base && base_cand->kind != CAND_PHI)
1198 {
1199 if (base_cand->kind == CAND_ADD
1200 && (base_cand->index.is_zero ()
1201 || operand_equal_p (base_cand->stride,
1202 integer_zero_node, 0)))
1203 {
1204 /* Y = B + (i' * S), i' * S = 0
1205 X = Y +/- Z
1206 ============================
1207 X = B + (+/-1 * Z) */
1208 base = base_cand->base_expr;
1209 index = subtract_p ? double_int_minus_one : double_int_one;
1210 stride = addend_in;
1211 ctype = base_cand->cand_type;
1212 if (has_single_use (base_in))
1213 savings = (base_cand->dead_savings
1214 + stmt_cost (base_cand->cand_stmt, speed));
1215 }
1216 else if (subtract_p)
1217 {
1218 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1219
1220 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1221 {
1222 if (subtrahend_cand->kind == CAND_MULT
1223 && subtrahend_cand->index.is_zero ()
1224 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1225 {
1226 /* Z = (B + 0) * S, S constant
1227 X = Y - Z
1228 ===========================
1229 Value: X = Y + ((-1 * S) * B) */
1230 base = base_in;
1231 index = tree_to_double_int (subtrahend_cand->stride);
1232 index = -index;
1233 stride = subtrahend_cand->base_expr;
1234 ctype = TREE_TYPE (base_in);
1235 if (has_single_use (addend_in))
1236 savings = (subtrahend_cand->dead_savings
1237 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1238 }
1239
1240 if (subtrahend_cand->next_interp)
1241 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1242 else
1243 subtrahend_cand = NULL;
1244 }
1245 }
1246
1247 if (base_cand->next_interp)
1248 base_cand = lookup_cand (base_cand->next_interp);
1249 else
1250 base_cand = NULL;
1251 }
1252
1253 if (!base)
1254 {
1255 /* No interpretations had anything useful to propagate, so
1256 produce X = Y + (1 * Z). */
1257 base = base_in;
1258 index = subtract_p ? double_int_minus_one : double_int_one;
1259 stride = addend_in;
1260 ctype = TREE_TYPE (base_in);
1261 }
1262
1263 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1264 ctype, savings);
1265 return c;
1266 }
1267
1268 /* Create a candidate entry for a statement GS, where GS adds SSA
1269 name BASE_IN to constant INDEX_IN. Propagate any known information
1270 about BASE_IN into the new candidate. Return the new candidate. */
1271
1272 static slsr_cand_t
1273 create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed)
1274 {
1275 enum cand_kind kind = CAND_ADD;
1276 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1277 double_int index, multiple;
1278 unsigned savings = 0;
1279 slsr_cand_t c;
1280 slsr_cand_t base_cand = base_cand_from_table (base_in);
1281
1282 while (base_cand && !base && base_cand->kind != CAND_PHI)
1283 {
1284 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (base_cand->stride));
1285
1286 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1287 && index_in.multiple_of (tree_to_double_int (base_cand->stride),
1288 unsigned_p, &multiple))
1289 {
1290 /* Y = (B + i') * S, S constant, c = kS for some integer k
1291 X = Y + c
1292 ============================
1293 X = (B + (i'+ k)) * S
1294 OR
1295 Y = B + (i' * S), S constant, c = kS for some integer k
1296 X = Y + c
1297 ============================
1298 X = (B + (i'+ k)) * S */
1299 kind = base_cand->kind;
1300 base = base_cand->base_expr;
1301 index = base_cand->index + multiple;
1302 stride = base_cand->stride;
1303 ctype = base_cand->cand_type;
1304 if (has_single_use (base_in))
1305 savings = (base_cand->dead_savings
1306 + stmt_cost (base_cand->cand_stmt, speed));
1307 }
1308
1309 if (base_cand->next_interp)
1310 base_cand = lookup_cand (base_cand->next_interp);
1311 else
1312 base_cand = NULL;
1313 }
1314
1315 if (!base)
1316 {
1317 /* No interpretations had anything useful to propagate, so
1318 produce X = Y + (c * 1). */
1319 kind = CAND_ADD;
1320 base = base_in;
1321 index = index_in;
1322 stride = integer_one_node;
1323 ctype = TREE_TYPE (base_in);
1324 }
1325
1326 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1327 ctype, savings);
1328 return c;
1329 }
1330
1331 /* Given GS which is an add or subtract of scalar integers or pointers,
1332 make at least one appropriate entry in the candidate table. */
1333
1334 static void
1335 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1336 {
1337 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1338 slsr_cand_t c = NULL, c2;
1339
1340 if (TREE_CODE (rhs2) == SSA_NAME)
1341 {
1342 /* First record an interpretation assuming RHS1 is the base expression
1343 and RHS2 is the stride. But it doesn't make sense for the
1344 stride to be a pointer, so don't record a candidate in that case. */
1345 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1346 {
1347 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1348
1349 /* Add the first interpretation to the statement-candidate
1350 mapping. */
1351 add_cand_for_stmt (gs, c);
1352 }
1353
1354 /* If the two RHS operands are identical, or this is a subtract,
1355 we're done. */
1356 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1357 return;
1358
1359 /* Otherwise, record another interpretation assuming RHS2 is the
1360 base expression and RHS1 is the stride, again provided that the
1361 stride is not a pointer. */
1362 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1363 {
1364 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1365 if (c)
1366 c->next_interp = c2->cand_num;
1367 else
1368 add_cand_for_stmt (gs, c2);
1369 }
1370 }
1371 else
1372 {
1373 double_int index;
1374
1375 /* Record an interpretation for the add-immediate. */
1376 index = tree_to_double_int (rhs2);
1377 if (subtract_p)
1378 index = -index;
1379
1380 c = create_add_imm_cand (gs, rhs1, index, speed);
1381
1382 /* Add the interpretation to the statement-candidate mapping. */
1383 add_cand_for_stmt (gs, c);
1384 }
1385 }
1386
1387 /* Given GS which is a negate of a scalar integer, make an appropriate
1388 entry in the candidate table. A negate is equivalent to a multiply
1389 by -1. */
1390
1391 static void
1392 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1393 {
1394 /* Record a CAND_MULT interpretation for the multiply by -1. */
1395 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1396
1397 /* Add the interpretation to the statement-candidate mapping. */
1398 add_cand_for_stmt (gs, c);
1399 }
1400
1401 /* Help function for legal_cast_p, operating on two trees. Checks
1402 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1403 for more details. */
1404
1405 static bool
1406 legal_cast_p_1 (tree lhs, tree rhs)
1407 {
1408 tree lhs_type, rhs_type;
1409 unsigned lhs_size, rhs_size;
1410 bool lhs_wraps, rhs_wraps;
1411
1412 lhs_type = TREE_TYPE (lhs);
1413 rhs_type = TREE_TYPE (rhs);
1414 lhs_size = TYPE_PRECISION (lhs_type);
1415 rhs_size = TYPE_PRECISION (rhs_type);
1416 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1417 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1418
1419 if (lhs_size < rhs_size
1420 || (rhs_wraps && !lhs_wraps)
1421 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1422 return false;
1423
1424 return true;
1425 }
1426
1427 /* Return TRUE if GS is a statement that defines an SSA name from
1428 a conversion and is legal for us to combine with an add and multiply
1429 in the candidate table. For example, suppose we have:
1430
1431 A = B + i;
1432 C = (type) A;
1433 D = C * S;
1434
1435 Without the type-cast, we would create a CAND_MULT for D with base B,
1436 index i, and stride S. We want to record this candidate only if it
1437 is equivalent to apply the type cast following the multiply:
1438
1439 A = B + i;
1440 E = A * S;
1441 D = (type) E;
1442
1443 We will record the type with the candidate for D. This allows us
1444 to use a similar previous candidate as a basis. If we have earlier seen
1445
1446 A' = B + i';
1447 C' = (type) A';
1448 D' = C' * S;
1449
1450 we can replace D with
1451
1452 D = D' + (i - i') * S;
1453
1454 But if moving the type-cast would change semantics, we mustn't do this.
1455
1456 This is legitimate for casts from a non-wrapping integral type to
1457 any integral type of the same or larger size. It is not legitimate
1458 to convert a wrapping type to a non-wrapping type, or to a wrapping
1459 type of a different size. I.e., with a wrapping type, we must
1460 assume that the addition B + i could wrap, in which case performing
1461 the multiply before or after one of the "illegal" type casts will
1462 have different semantics. */
1463
1464 static bool
1465 legal_cast_p (gimple gs, tree rhs)
1466 {
1467 if (!is_gimple_assign (gs)
1468 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1469 return false;
1470
1471 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1472 }
1473
1474 /* Given GS which is a cast to a scalar integer type, determine whether
1475 the cast is legal for strength reduction. If so, make at least one
1476 appropriate entry in the candidate table. */
1477
1478 static void
1479 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1480 {
1481 tree lhs, ctype;
1482 slsr_cand_t base_cand, c, c2;
1483 unsigned savings = 0;
1484
1485 if (!legal_cast_p (gs, rhs1))
1486 return;
1487
1488 lhs = gimple_assign_lhs (gs);
1489 base_cand = base_cand_from_table (rhs1);
1490 ctype = TREE_TYPE (lhs);
1491
1492 if (base_cand && base_cand->kind != CAND_PHI)
1493 {
1494 while (base_cand)
1495 {
1496 /* Propagate all data from the base candidate except the type,
1497 which comes from the cast, and the base candidate's cast,
1498 which is no longer applicable. */
1499 if (has_single_use (rhs1))
1500 savings = (base_cand->dead_savings
1501 + stmt_cost (base_cand->cand_stmt, speed));
1502
1503 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1504 base_cand->base_expr,
1505 base_cand->index, base_cand->stride,
1506 ctype, savings);
1507 if (base_cand->next_interp)
1508 base_cand = lookup_cand (base_cand->next_interp);
1509 else
1510 base_cand = NULL;
1511 }
1512 }
1513 else
1514 {
1515 /* If nothing is known about the RHS, create fresh CAND_ADD and
1516 CAND_MULT interpretations:
1517
1518 X = Y + (0 * 1)
1519 X = (Y + 0) * 1
1520
1521 The first of these is somewhat arbitrary, but the choice of
1522 1 for the stride simplifies the logic for propagating casts
1523 into their uses. */
1524 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, double_int_zero,
1525 integer_one_node, ctype, 0);
1526 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, double_int_zero,
1527 integer_one_node, ctype, 0);
1528 c->next_interp = c2->cand_num;
1529 }
1530
1531 /* Add the first (or only) interpretation to the statement-candidate
1532 mapping. */
1533 add_cand_for_stmt (gs, c);
1534 }
1535
1536 /* Given GS which is a copy of a scalar integer type, make at least one
1537 appropriate entry in the candidate table.
1538
1539 This interface is included for completeness, but is unnecessary
1540 if this pass immediately follows a pass that performs copy
1541 propagation, such as DOM. */
1542
1543 static void
1544 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1545 {
1546 slsr_cand_t base_cand, c, c2;
1547 unsigned savings = 0;
1548
1549 base_cand = base_cand_from_table (rhs1);
1550
1551 if (base_cand && base_cand->kind != CAND_PHI)
1552 {
1553 while (base_cand)
1554 {
1555 /* Propagate all data from the base candidate. */
1556 if (has_single_use (rhs1))
1557 savings = (base_cand->dead_savings
1558 + stmt_cost (base_cand->cand_stmt, speed));
1559
1560 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1561 base_cand->base_expr,
1562 base_cand->index, base_cand->stride,
1563 base_cand->cand_type, savings);
1564 if (base_cand->next_interp)
1565 base_cand = lookup_cand (base_cand->next_interp);
1566 else
1567 base_cand = NULL;
1568 }
1569 }
1570 else
1571 {
1572 /* If nothing is known about the RHS, create fresh CAND_ADD and
1573 CAND_MULT interpretations:
1574
1575 X = Y + (0 * 1)
1576 X = (Y + 0) * 1
1577
1578 The first of these is somewhat arbitrary, but the choice of
1579 1 for the stride simplifies the logic for propagating casts
1580 into their uses. */
1581 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, double_int_zero,
1582 integer_one_node, TREE_TYPE (rhs1), 0);
1583 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, double_int_zero,
1584 integer_one_node, TREE_TYPE (rhs1), 0);
1585 c->next_interp = c2->cand_num;
1586 }
1587
1588 /* Add the first (or only) interpretation to the statement-candidate
1589 mapping. */
1590 add_cand_for_stmt (gs, c);
1591 }
1592 \f
1593 class find_candidates_dom_walker : public dom_walker
1594 {
1595 public:
1596 find_candidates_dom_walker (cdi_direction direction)
1597 : dom_walker (direction) {}
1598 virtual void before_dom_children (basic_block);
1599 };
1600
1601 /* Find strength-reduction candidates in block BB. */
1602
1603 void
1604 find_candidates_dom_walker::before_dom_children (basic_block bb)
1605 {
1606 bool speed = optimize_bb_for_speed_p (bb);
1607 gimple_stmt_iterator gsi;
1608
1609 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1610 slsr_process_phi (gsi_stmt (gsi), speed);
1611
1612 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1613 {
1614 gimple gs = gsi_stmt (gsi);
1615
1616 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1617 slsr_process_ref (gs);
1618
1619 else if (is_gimple_assign (gs)
1620 && SCALAR_INT_MODE_P
1621 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1622 {
1623 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1624
1625 switch (gimple_assign_rhs_code (gs))
1626 {
1627 case MULT_EXPR:
1628 case PLUS_EXPR:
1629 rhs1 = gimple_assign_rhs1 (gs);
1630 rhs2 = gimple_assign_rhs2 (gs);
1631 /* Should never happen, but currently some buggy situations
1632 in earlier phases put constants in rhs1. */
1633 if (TREE_CODE (rhs1) != SSA_NAME)
1634 continue;
1635 break;
1636
1637 /* Possible future opportunity: rhs1 of a ptr+ can be
1638 an ADDR_EXPR. */
1639 case POINTER_PLUS_EXPR:
1640 case MINUS_EXPR:
1641 rhs2 = gimple_assign_rhs2 (gs);
1642 /* Fall-through. */
1643
1644 case NOP_EXPR:
1645 case MODIFY_EXPR:
1646 case NEGATE_EXPR:
1647 rhs1 = gimple_assign_rhs1 (gs);
1648 if (TREE_CODE (rhs1) != SSA_NAME)
1649 continue;
1650 break;
1651
1652 default:
1653 ;
1654 }
1655
1656 switch (gimple_assign_rhs_code (gs))
1657 {
1658 case MULT_EXPR:
1659 slsr_process_mul (gs, rhs1, rhs2, speed);
1660 break;
1661
1662 case PLUS_EXPR:
1663 case POINTER_PLUS_EXPR:
1664 case MINUS_EXPR:
1665 slsr_process_add (gs, rhs1, rhs2, speed);
1666 break;
1667
1668 case NEGATE_EXPR:
1669 slsr_process_neg (gs, rhs1, speed);
1670 break;
1671
1672 case NOP_EXPR:
1673 slsr_process_cast (gs, rhs1, speed);
1674 break;
1675
1676 case MODIFY_EXPR:
1677 slsr_process_copy (gs, rhs1, speed);
1678 break;
1679
1680 default:
1681 ;
1682 }
1683 }
1684 }
1685 }
1686 \f
1687 /* Dump a candidate for debug. */
1688
1689 static void
1690 dump_candidate (slsr_cand_t c)
1691 {
1692 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1693 gimple_bb (c->cand_stmt)->index);
1694 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1695 switch (c->kind)
1696 {
1697 case CAND_MULT:
1698 fputs (" MULT : (", dump_file);
1699 print_generic_expr (dump_file, c->base_expr, 0);
1700 fputs (" + ", dump_file);
1701 dump_double_int (dump_file, c->index, false);
1702 fputs (") * ", dump_file);
1703 print_generic_expr (dump_file, c->stride, 0);
1704 fputs (" : ", dump_file);
1705 break;
1706 case CAND_ADD:
1707 fputs (" ADD : ", dump_file);
1708 print_generic_expr (dump_file, c->base_expr, 0);
1709 fputs (" + (", dump_file);
1710 dump_double_int (dump_file, c->index, false);
1711 fputs (" * ", dump_file);
1712 print_generic_expr (dump_file, c->stride, 0);
1713 fputs (") : ", dump_file);
1714 break;
1715 case CAND_REF:
1716 fputs (" REF : ", dump_file);
1717 print_generic_expr (dump_file, c->base_expr, 0);
1718 fputs (" + (", dump_file);
1719 print_generic_expr (dump_file, c->stride, 0);
1720 fputs (") + ", dump_file);
1721 dump_double_int (dump_file, c->index, false);
1722 fputs (" : ", dump_file);
1723 break;
1724 case CAND_PHI:
1725 fputs (" PHI : ", dump_file);
1726 print_generic_expr (dump_file, c->base_expr, 0);
1727 fputs (" + (unknown * ", dump_file);
1728 print_generic_expr (dump_file, c->stride, 0);
1729 fputs (") : ", dump_file);
1730 break;
1731 default:
1732 gcc_unreachable ();
1733 }
1734 print_generic_expr (dump_file, c->cand_type, 0);
1735 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1736 c->basis, c->dependent, c->sibling);
1737 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1738 c->next_interp, c->dead_savings);
1739 if (c->def_phi)
1740 fprintf (dump_file, " phi: %d\n", c->def_phi);
1741 fputs ("\n", dump_file);
1742 }
1743
1744 /* Dump the candidate vector for debug. */
1745
1746 static void
1747 dump_cand_vec (void)
1748 {
1749 unsigned i;
1750 slsr_cand_t c;
1751
1752 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1753
1754 FOR_EACH_VEC_ELT (cand_vec, i, c)
1755 dump_candidate (c);
1756 }
1757
1758 /* Callback used to dump the candidate chains hash table. */
1759
1760 int
1761 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1762 {
1763 const_cand_chain_t chain = *slot;
1764 cand_chain_t p;
1765
1766 print_generic_expr (dump_file, chain->base_expr, 0);
1767 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1768
1769 for (p = chain->next; p; p = p->next)
1770 fprintf (dump_file, " -> %d", p->cand->cand_num);
1771
1772 fputs ("\n", dump_file);
1773 return 1;
1774 }
1775
1776 /* Dump the candidate chains. */
1777
1778 static void
1779 dump_cand_chains (void)
1780 {
1781 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1782 base_cand_map.traverse_noresize <void *, ssa_base_cand_dump_callback> (NULL);
1783 fputs ("\n", dump_file);
1784 }
1785
1786 /* Dump the increment vector for debug. */
1787
1788 static void
1789 dump_incr_vec (void)
1790 {
1791 if (dump_file && (dump_flags & TDF_DETAILS))
1792 {
1793 unsigned i;
1794
1795 fprintf (dump_file, "\nIncrement vector:\n\n");
1796
1797 for (i = 0; i < incr_vec_len; i++)
1798 {
1799 fprintf (dump_file, "%3d increment: ", i);
1800 dump_double_int (dump_file, incr_vec[i].incr, false);
1801 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1802 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1803 fputs ("\n initializer: ", dump_file);
1804 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1805 fputs ("\n\n", dump_file);
1806 }
1807 }
1808 }
1809 \f
1810 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1811 data reference. */
1812
1813 static void
1814 replace_ref (tree *expr, slsr_cand_t c)
1815 {
1816 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1817 unsigned HOST_WIDE_INT misalign;
1818 unsigned align;
1819
1820 /* Ensure the memory reference carries the minimum alignment
1821 requirement for the data type. See PR58041. */
1822 get_object_alignment_1 (*expr, &align, &misalign);
1823 if (misalign != 0)
1824 align = (misalign & -misalign);
1825 if (align < TYPE_ALIGN (acc_type))
1826 acc_type = build_aligned_type (acc_type, align);
1827
1828 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1829 c->base_expr, c->stride);
1830 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1831 double_int_to_tree (c->cand_type, c->index));
1832
1833 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1834 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1835 TREE_OPERAND (mem_ref, 0)
1836 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1837 /*simple_p=*/true, NULL,
1838 /*before=*/true, GSI_SAME_STMT);
1839 copy_ref_info (mem_ref, *expr);
1840 *expr = mem_ref;
1841 update_stmt (c->cand_stmt);
1842 }
1843
1844 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1845 dependent of candidate C with an equivalent strength-reduced data
1846 reference. */
1847
1848 static void
1849 replace_refs (slsr_cand_t c)
1850 {
1851 if (gimple_vdef (c->cand_stmt))
1852 {
1853 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1854 replace_ref (lhs, c);
1855 }
1856 else
1857 {
1858 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1859 replace_ref (rhs, c);
1860 }
1861
1862 if (c->sibling)
1863 replace_refs (lookup_cand (c->sibling));
1864
1865 if (c->dependent)
1866 replace_refs (lookup_cand (c->dependent));
1867 }
1868
1869 /* Return TRUE if candidate C is dependent upon a PHI. */
1870
1871 static bool
1872 phi_dependent_cand_p (slsr_cand_t c)
1873 {
1874 /* A candidate is not necessarily dependent upon a PHI just because
1875 it has a phi definition for its base name. It may have a basis
1876 that relies upon the same phi definition, in which case the PHI
1877 is irrelevant to this candidate. */
1878 return (c->def_phi
1879 && c->basis
1880 && lookup_cand (c->basis)->def_phi != c->def_phi);
1881 }
1882
1883 /* Calculate the increment required for candidate C relative to
1884 its basis. */
1885
1886 static double_int
1887 cand_increment (slsr_cand_t c)
1888 {
1889 slsr_cand_t basis;
1890
1891 /* If the candidate doesn't have a basis, just return its own
1892 index. This is useful in record_increments to help us find
1893 an existing initializer. Also, if the candidate's basis is
1894 hidden by a phi, then its own index will be the increment
1895 from the newly introduced phi basis. */
1896 if (!c->basis || phi_dependent_cand_p (c))
1897 return c->index;
1898
1899 basis = lookup_cand (c->basis);
1900 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1901 return c->index - basis->index;
1902 }
1903
1904 /* Calculate the increment required for candidate C relative to
1905 its basis. If we aren't going to generate pointer arithmetic
1906 for this candidate, return the absolute value of that increment
1907 instead. */
1908
1909 static inline double_int
1910 cand_abs_increment (slsr_cand_t c)
1911 {
1912 double_int increment = cand_increment (c);
1913
1914 if (!address_arithmetic_p && increment.is_negative ())
1915 increment = -increment;
1916
1917 return increment;
1918 }
1919
1920 /* Return TRUE iff candidate C has already been replaced under
1921 another interpretation. */
1922
1923 static inline bool
1924 cand_already_replaced (slsr_cand_t c)
1925 {
1926 return (gimple_bb (c->cand_stmt) == 0);
1927 }
1928
1929 /* Common logic used by replace_unconditional_candidate and
1930 replace_conditional_candidate. */
1931
1932 static void
1933 replace_mult_candidate (slsr_cand_t c, tree basis_name, double_int bump)
1934 {
1935 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
1936 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
1937
1938 /* It is highly unlikely, but possible, that the resulting
1939 bump doesn't fit in a HWI. Abandon the replacement
1940 in this case. This does not affect siblings or dependents
1941 of C. Restriction to signed HWI is conservative for unsigned
1942 types but allows for safe negation without twisted logic. */
1943 if (bump.fits_shwi ()
1944 && bump.to_shwi () != HOST_WIDE_INT_MIN
1945 /* It is not useful to replace casts, copies, or adds of
1946 an SSA name and a constant. */
1947 && cand_code != MODIFY_EXPR
1948 && cand_code != NOP_EXPR
1949 && cand_code != PLUS_EXPR
1950 && cand_code != POINTER_PLUS_EXPR
1951 && cand_code != MINUS_EXPR)
1952 {
1953 enum tree_code code = PLUS_EXPR;
1954 tree bump_tree;
1955 gimple stmt_to_print = NULL;
1956
1957 /* If the basis name and the candidate's LHS have incompatible
1958 types, introduce a cast. */
1959 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
1960 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
1961 if (bump.is_negative ())
1962 {
1963 code = MINUS_EXPR;
1964 bump = -bump;
1965 }
1966
1967 bump_tree = double_int_to_tree (target_type, bump);
1968
1969 if (dump_file && (dump_flags & TDF_DETAILS))
1970 {
1971 fputs ("Replacing: ", dump_file);
1972 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1973 }
1974
1975 if (bump.is_zero ())
1976 {
1977 tree lhs = gimple_assign_lhs (c->cand_stmt);
1978 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
1979 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1980 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
1981 gsi_replace (&gsi, copy_stmt, false);
1982 c->cand_stmt = copy_stmt;
1983 if (dump_file && (dump_flags & TDF_DETAILS))
1984 stmt_to_print = copy_stmt;
1985 }
1986 else
1987 {
1988 tree rhs1, rhs2;
1989 if (cand_code != NEGATE_EXPR) {
1990 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
1991 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
1992 }
1993 if (cand_code != NEGATE_EXPR
1994 && ((operand_equal_p (rhs1, basis_name, 0)
1995 && operand_equal_p (rhs2, bump_tree, 0))
1996 || (operand_equal_p (rhs1, bump_tree, 0)
1997 && operand_equal_p (rhs2, basis_name, 0))))
1998 {
1999 if (dump_file && (dump_flags & TDF_DETAILS))
2000 {
2001 fputs ("(duplicate, not actually replacing)", dump_file);
2002 stmt_to_print = c->cand_stmt;
2003 }
2004 }
2005 else
2006 {
2007 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2008 gimple_assign_set_rhs_with_ops (&gsi, code,
2009 basis_name, bump_tree);
2010 update_stmt (gsi_stmt (gsi));
2011 c->cand_stmt = gsi_stmt (gsi);
2012 if (dump_file && (dump_flags & TDF_DETAILS))
2013 stmt_to_print = gsi_stmt (gsi);
2014 }
2015 }
2016
2017 if (dump_file && (dump_flags & TDF_DETAILS))
2018 {
2019 fputs ("With: ", dump_file);
2020 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2021 fputs ("\n", dump_file);
2022 }
2023 }
2024 }
2025
2026 /* Replace candidate C with an add or subtract. Note that we only
2027 operate on CAND_MULTs with known strides, so we will never generate
2028 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2029 X = Y + ((i - i') * S), as described in the module commentary. The
2030 folded value ((i - i') * S) is referred to here as the "bump." */
2031
2032 static void
2033 replace_unconditional_candidate (slsr_cand_t c)
2034 {
2035 slsr_cand_t basis;
2036 double_int stride, bump;
2037
2038 if (cand_already_replaced (c))
2039 return;
2040
2041 basis = lookup_cand (c->basis);
2042 stride = tree_to_double_int (c->stride);
2043 bump = cand_increment (c) * stride;
2044
2045 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2046 }
2047 \f
2048 /* Return the index in the increment vector of the given INCREMENT,
2049 or -1 if not found. The latter can occur if more than
2050 MAX_INCR_VEC_LEN increments have been found. */
2051
2052 static inline int
2053 incr_vec_index (double_int increment)
2054 {
2055 unsigned i;
2056
2057 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2058 ;
2059
2060 if (i < incr_vec_len)
2061 return i;
2062 else
2063 return -1;
2064 }
2065
2066 /* Create a new statement along edge E to add BASIS_NAME to the product
2067 of INCREMENT and the stride of candidate C. Create and return a new
2068 SSA name from *VAR to be used as the LHS of the new statement.
2069 KNOWN_STRIDE is true iff C's stride is a constant. */
2070
2071 static tree
2072 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2073 double_int increment, edge e, location_t loc,
2074 bool known_stride)
2075 {
2076 basic_block insert_bb;
2077 gimple_stmt_iterator gsi;
2078 tree lhs, basis_type;
2079 gimple new_stmt;
2080
2081 /* If the add candidate along this incoming edge has the same
2082 index as C's hidden basis, the hidden basis represents this
2083 edge correctly. */
2084 if (increment.is_zero ())
2085 return basis_name;
2086
2087 basis_type = TREE_TYPE (basis_name);
2088 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2089
2090 if (known_stride)
2091 {
2092 tree bump_tree;
2093 enum tree_code code = PLUS_EXPR;
2094 double_int bump = increment * tree_to_double_int (c->stride);
2095 if (bump.is_negative ())
2096 {
2097 code = MINUS_EXPR;
2098 bump = -bump;
2099 }
2100
2101 bump_tree = double_int_to_tree (basis_type, bump);
2102 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2103 bump_tree);
2104 }
2105 else
2106 {
2107 int i;
2108 bool negate_incr = (!address_arithmetic_p && increment.is_negative ());
2109 i = incr_vec_index (negate_incr ? -increment : increment);
2110 gcc_assert (i >= 0);
2111
2112 if (incr_vec[i].initializer)
2113 {
2114 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2115 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2116 incr_vec[i].initializer);
2117 }
2118 else if (increment.is_one ())
2119 new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, lhs, basis_name,
2120 c->stride);
2121 else if (increment.is_minus_one ())
2122 new_stmt = gimple_build_assign_with_ops (MINUS_EXPR, lhs, basis_name,
2123 c->stride);
2124 else
2125 gcc_unreachable ();
2126 }
2127
2128 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2129 gsi = gsi_last_bb (insert_bb);
2130
2131 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2132 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2133 else
2134 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2135
2136 gimple_set_location (new_stmt, loc);
2137
2138 if (dump_file && (dump_flags & TDF_DETAILS))
2139 {
2140 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2141 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2142 }
2143
2144 return lhs;
2145 }
2146
2147 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2148 is hidden by the phi node FROM_PHI, create a new phi node in the same
2149 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2150 with its phi arguments representing conditional adjustments to the
2151 hidden basis along conditional incoming paths. Those adjustments are
2152 made by creating add statements (and sometimes recursively creating
2153 phis) along those incoming paths. LOC is the location to attach to
2154 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2155 constant. */
2156
2157 static tree
2158 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2159 location_t loc, bool known_stride)
2160 {
2161 int i;
2162 tree name, phi_arg;
2163 gimple phi;
2164 vec<tree> phi_args;
2165 slsr_cand_t basis = lookup_cand (c->basis);
2166 int nargs = gimple_phi_num_args (from_phi);
2167 basic_block phi_bb = gimple_bb (from_phi);
2168 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2169 phi_args.create (nargs);
2170
2171 /* Process each argument of the existing phi that represents
2172 conditionally-executed add candidates. */
2173 for (i = 0; i < nargs; i++)
2174 {
2175 edge e = (*phi_bb->preds)[i];
2176 tree arg = gimple_phi_arg_def (from_phi, i);
2177 tree feeding_def;
2178
2179 /* If the phi argument is the base name of the CAND_PHI, then
2180 this incoming arc should use the hidden basis. */
2181 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2182 if (basis->index.is_zero ())
2183 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2184 else
2185 {
2186 double_int incr = -basis->index;
2187 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2188 e, loc, known_stride);
2189 }
2190 else
2191 {
2192 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2193
2194 /* If there is another phi along this incoming edge, we must
2195 process it in the same fashion to ensure that all basis
2196 adjustments are made along its incoming edges. */
2197 if (gimple_code (arg_def) == GIMPLE_PHI)
2198 feeding_def = create_phi_basis (c, arg_def, basis_name,
2199 loc, known_stride);
2200 else
2201 {
2202 slsr_cand_t arg_cand = base_cand_from_table (arg);
2203 double_int diff = arg_cand->index - basis->index;
2204 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2205 e, loc, known_stride);
2206 }
2207 }
2208
2209 /* Because of recursion, we need to save the arguments in a vector
2210 so we can create the PHI statement all at once. Otherwise the
2211 storage for the half-created PHI can be reclaimed. */
2212 phi_args.safe_push (feeding_def);
2213 }
2214
2215 /* Create the new phi basis. */
2216 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2217 phi = create_phi_node (name, phi_bb);
2218 SSA_NAME_DEF_STMT (name) = phi;
2219
2220 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2221 {
2222 edge e = (*phi_bb->preds)[i];
2223 add_phi_arg (phi, phi_arg, e, loc);
2224 }
2225
2226 update_stmt (phi);
2227
2228 if (dump_file && (dump_flags & TDF_DETAILS))
2229 {
2230 fputs ("Introducing new phi basis: ", dump_file);
2231 print_gimple_stmt (dump_file, phi, 0, 0);
2232 }
2233
2234 return name;
2235 }
2236
2237 /* Given a candidate C whose basis is hidden by at least one intervening
2238 phi, introduce a matching number of new phis to represent its basis
2239 adjusted by conditional increments along possible incoming paths. Then
2240 replace C as though it were an unconditional candidate, using the new
2241 basis. */
2242
2243 static void
2244 replace_conditional_candidate (slsr_cand_t c)
2245 {
2246 tree basis_name, name;
2247 slsr_cand_t basis;
2248 location_t loc;
2249 double_int stride, bump;
2250
2251 /* Look up the LHS SSA name from C's basis. This will be the
2252 RHS1 of the adds we will introduce to create new phi arguments. */
2253 basis = lookup_cand (c->basis);
2254 basis_name = gimple_assign_lhs (basis->cand_stmt);
2255
2256 /* Create a new phi statement which will represent C's true basis
2257 after the transformation is complete. */
2258 loc = gimple_location (c->cand_stmt);
2259 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2260 basis_name, loc, KNOWN_STRIDE);
2261 /* Replace C with an add of the new basis phi and a constant. */
2262 stride = tree_to_double_int (c->stride);
2263 bump = c->index * stride;
2264
2265 replace_mult_candidate (c, name, bump);
2266 }
2267
2268 /* Compute the expected costs of inserting basis adjustments for
2269 candidate C with phi-definition PHI. The cost of inserting
2270 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2271 which are themselves phi results, recursively calculate costs
2272 for those phis as well. */
2273
2274 static int
2275 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2276 {
2277 unsigned i;
2278 int cost = 0;
2279 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2280
2281 /* If we work our way back to a phi that isn't dominated by the hidden
2282 basis, this isn't a candidate for replacement. Indicate this by
2283 returning an unreasonably high cost. It's not easy to detect
2284 these situations when determining the basis, so we defer the
2285 decision until now. */
2286 basic_block phi_bb = gimple_bb (phi);
2287 slsr_cand_t basis = lookup_cand (c->basis);
2288 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2289
2290 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2291 return COST_INFINITE;
2292
2293 for (i = 0; i < gimple_phi_num_args (phi); i++)
2294 {
2295 tree arg = gimple_phi_arg_def (phi, i);
2296
2297 if (arg != phi_cand->base_expr)
2298 {
2299 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2300
2301 if (gimple_code (arg_def) == GIMPLE_PHI)
2302 cost += phi_add_costs (arg_def, c, one_add_cost);
2303 else
2304 {
2305 slsr_cand_t arg_cand = base_cand_from_table (arg);
2306
2307 if (arg_cand->index != c->index)
2308 cost += one_add_cost;
2309 }
2310 }
2311 }
2312
2313 return cost;
2314 }
2315
2316 /* For candidate C, each sibling of candidate C, and each dependent of
2317 candidate C, determine whether the candidate is dependent upon a
2318 phi that hides its basis. If not, replace the candidate unconditionally.
2319 Otherwise, determine whether the cost of introducing compensation code
2320 for the candidate is offset by the gains from strength reduction. If
2321 so, replace the candidate and introduce the compensation code. */
2322
2323 static void
2324 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2325 {
2326 if (phi_dependent_cand_p (c))
2327 {
2328 if (c->kind == CAND_MULT)
2329 {
2330 /* A candidate dependent upon a phi will replace a multiply by
2331 a constant with an add, and will insert at most one add for
2332 each phi argument. Add these costs with the potential dead-code
2333 savings to determine profitability. */
2334 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2335 int mult_savings = stmt_cost (c->cand_stmt, speed);
2336 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2337 tree phi_result = gimple_phi_result (phi);
2338 int one_add_cost = add_cost (speed,
2339 TYPE_MODE (TREE_TYPE (phi_result)));
2340 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2341 int cost = add_costs - mult_savings - c->dead_savings;
2342
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2344 {
2345 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2346 fprintf (dump_file, " add_costs = %d\n", add_costs);
2347 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2348 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2349 fprintf (dump_file, " cost = %d\n", cost);
2350 if (cost <= COST_NEUTRAL)
2351 fputs (" Replacing...\n", dump_file);
2352 else
2353 fputs (" Not replaced.\n", dump_file);
2354 }
2355
2356 if (cost <= COST_NEUTRAL)
2357 replace_conditional_candidate (c);
2358 }
2359 }
2360 else
2361 replace_unconditional_candidate (c);
2362
2363 if (c->sibling)
2364 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2365
2366 if (c->dependent)
2367 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2368 }
2369 \f
2370 /* Count the number of candidates in the tree rooted at C that have
2371 not already been replaced under other interpretations. */
2372
2373 static int
2374 count_candidates (slsr_cand_t c)
2375 {
2376 unsigned count = cand_already_replaced (c) ? 0 : 1;
2377
2378 if (c->sibling)
2379 count += count_candidates (lookup_cand (c->sibling));
2380
2381 if (c->dependent)
2382 count += count_candidates (lookup_cand (c->dependent));
2383
2384 return count;
2385 }
2386
2387 /* Increase the count of INCREMENT by one in the increment vector.
2388 INCREMENT is associated with candidate C. If INCREMENT is to be
2389 conditionally executed as part of a conditional candidate replacement,
2390 IS_PHI_ADJUST is true, otherwise false. If an initializer
2391 T_0 = stride * I is provided by a candidate that dominates all
2392 candidates with the same increment, also record T_0 for subsequent use. */
2393
2394 static void
2395 record_increment (slsr_cand_t c, double_int increment, bool is_phi_adjust)
2396 {
2397 bool found = false;
2398 unsigned i;
2399
2400 /* Treat increments that differ only in sign as identical so as to
2401 share initializers, unless we are generating pointer arithmetic. */
2402 if (!address_arithmetic_p && increment.is_negative ())
2403 increment = -increment;
2404
2405 for (i = 0; i < incr_vec_len; i++)
2406 {
2407 if (incr_vec[i].incr == increment)
2408 {
2409 incr_vec[i].count++;
2410 found = true;
2411
2412 /* If we previously recorded an initializer that doesn't
2413 dominate this candidate, it's not going to be useful to
2414 us after all. */
2415 if (incr_vec[i].initializer
2416 && !dominated_by_p (CDI_DOMINATORS,
2417 gimple_bb (c->cand_stmt),
2418 incr_vec[i].init_bb))
2419 {
2420 incr_vec[i].initializer = NULL_TREE;
2421 incr_vec[i].init_bb = NULL;
2422 }
2423
2424 break;
2425 }
2426 }
2427
2428 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2429 {
2430 /* The first time we see an increment, create the entry for it.
2431 If this is the root candidate which doesn't have a basis, set
2432 the count to zero. We're only processing it so it can possibly
2433 provide an initializer for other candidates. */
2434 incr_vec[incr_vec_len].incr = increment;
2435 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2436 incr_vec[incr_vec_len].cost = COST_INFINITE;
2437
2438 /* Optimistically record the first occurrence of this increment
2439 as providing an initializer (if it does); we will revise this
2440 opinion later if it doesn't dominate all other occurrences.
2441 Exception: increments of -1, 0, 1 never need initializers;
2442 and phi adjustments don't ever provide initializers. */
2443 if (c->kind == CAND_ADD
2444 && !is_phi_adjust
2445 && c->index == increment
2446 && (increment.sgt (double_int_one)
2447 || increment.slt (double_int_minus_one))
2448 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2449 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2450 {
2451 tree t0 = NULL_TREE;
2452 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2453 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2454 if (operand_equal_p (rhs1, c->base_expr, 0))
2455 t0 = rhs2;
2456 else if (operand_equal_p (rhs2, c->base_expr, 0))
2457 t0 = rhs1;
2458 if (t0
2459 && SSA_NAME_DEF_STMT (t0)
2460 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2461 {
2462 incr_vec[incr_vec_len].initializer = t0;
2463 incr_vec[incr_vec_len++].init_bb
2464 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2465 }
2466 else
2467 {
2468 incr_vec[incr_vec_len].initializer = NULL_TREE;
2469 incr_vec[incr_vec_len++].init_bb = NULL;
2470 }
2471 }
2472 else
2473 {
2474 incr_vec[incr_vec_len].initializer = NULL_TREE;
2475 incr_vec[incr_vec_len++].init_bb = NULL;
2476 }
2477 }
2478 }
2479
2480 /* Given phi statement PHI that hides a candidate from its BASIS, find
2481 the increments along each incoming arc (recursively handling additional
2482 phis that may be present) and record them. These increments are the
2483 difference in index between the index-adjusting statements and the
2484 index of the basis. */
2485
2486 static void
2487 record_phi_increments (slsr_cand_t basis, gimple phi)
2488 {
2489 unsigned i;
2490 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2491
2492 for (i = 0; i < gimple_phi_num_args (phi); i++)
2493 {
2494 tree arg = gimple_phi_arg_def (phi, i);
2495
2496 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2497 {
2498 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2499
2500 if (gimple_code (arg_def) == GIMPLE_PHI)
2501 record_phi_increments (basis, arg_def);
2502 else
2503 {
2504 slsr_cand_t arg_cand = base_cand_from_table (arg);
2505 double_int diff = arg_cand->index - basis->index;
2506 record_increment (arg_cand, diff, PHI_ADJUST);
2507 }
2508 }
2509 }
2510 }
2511
2512 /* Determine how many times each unique increment occurs in the set
2513 of candidates rooted at C's parent, recording the data in the
2514 increment vector. For each unique increment I, if an initializer
2515 T_0 = stride * I is provided by a candidate that dominates all
2516 candidates with the same increment, also record T_0 for subsequent
2517 use. */
2518
2519 static void
2520 record_increments (slsr_cand_t c)
2521 {
2522 if (!cand_already_replaced (c))
2523 {
2524 if (!phi_dependent_cand_p (c))
2525 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2526 else
2527 {
2528 /* A candidate with a basis hidden by a phi will have one
2529 increment for its relationship to the index represented by
2530 the phi, and potentially additional increments along each
2531 incoming edge. For the root of the dependency tree (which
2532 has no basis), process just the initial index in case it has
2533 an initializer that can be used by subsequent candidates. */
2534 record_increment (c, c->index, NOT_PHI_ADJUST);
2535
2536 if (c->basis)
2537 record_phi_increments (lookup_cand (c->basis),
2538 lookup_cand (c->def_phi)->cand_stmt);
2539 }
2540 }
2541
2542 if (c->sibling)
2543 record_increments (lookup_cand (c->sibling));
2544
2545 if (c->dependent)
2546 record_increments (lookup_cand (c->dependent));
2547 }
2548
2549 /* Add up and return the costs of introducing add statements that
2550 require the increment INCR on behalf of candidate C and phi
2551 statement PHI. Accumulate into *SAVINGS the potential savings
2552 from removing existing statements that feed PHI and have no other
2553 uses. */
2554
2555 static int
2556 phi_incr_cost (slsr_cand_t c, double_int incr, gimple phi, int *savings)
2557 {
2558 unsigned i;
2559 int cost = 0;
2560 slsr_cand_t basis = lookup_cand (c->basis);
2561 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2562
2563 for (i = 0; i < gimple_phi_num_args (phi); i++)
2564 {
2565 tree arg = gimple_phi_arg_def (phi, i);
2566
2567 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2568 {
2569 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2570
2571 if (gimple_code (arg_def) == GIMPLE_PHI)
2572 {
2573 int feeding_savings = 0;
2574 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2575 if (has_single_use (gimple_phi_result (arg_def)))
2576 *savings += feeding_savings;
2577 }
2578 else
2579 {
2580 slsr_cand_t arg_cand = base_cand_from_table (arg);
2581 double_int diff = arg_cand->index - basis->index;
2582
2583 if (incr == diff)
2584 {
2585 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2586 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2587 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2588 if (has_single_use (lhs))
2589 *savings += stmt_cost (arg_cand->cand_stmt, true);
2590 }
2591 }
2592 }
2593 }
2594
2595 return cost;
2596 }
2597
2598 /* Return the first candidate in the tree rooted at C that has not
2599 already been replaced, favoring siblings over dependents. */
2600
2601 static slsr_cand_t
2602 unreplaced_cand_in_tree (slsr_cand_t c)
2603 {
2604 if (!cand_already_replaced (c))
2605 return c;
2606
2607 if (c->sibling)
2608 {
2609 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2610 if (sib)
2611 return sib;
2612 }
2613
2614 if (c->dependent)
2615 {
2616 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2617 if (dep)
2618 return dep;
2619 }
2620
2621 return NULL;
2622 }
2623
2624 /* Return TRUE if the candidates in the tree rooted at C should be
2625 optimized for speed, else FALSE. We estimate this based on the block
2626 containing the most dominant candidate in the tree that has not yet
2627 been replaced. */
2628
2629 static bool
2630 optimize_cands_for_speed_p (slsr_cand_t c)
2631 {
2632 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2633 gcc_assert (c2);
2634 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2635 }
2636
2637 /* Add COST_IN to the lowest cost of any dependent path starting at
2638 candidate C or any of its siblings, counting only candidates along
2639 such paths with increment INCR. Assume that replacing a candidate
2640 reduces cost by REPL_SAVINGS. Also account for savings from any
2641 statements that would go dead. If COUNT_PHIS is true, include
2642 costs of introducing feeding statements for conditional candidates. */
2643
2644 static int
2645 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2646 double_int incr, bool count_phis)
2647 {
2648 int local_cost, sib_cost, savings = 0;
2649 double_int cand_incr = cand_abs_increment (c);
2650
2651 if (cand_already_replaced (c))
2652 local_cost = cost_in;
2653 else if (incr == cand_incr)
2654 local_cost = cost_in - repl_savings - c->dead_savings;
2655 else
2656 local_cost = cost_in - c->dead_savings;
2657
2658 if (count_phis
2659 && phi_dependent_cand_p (c)
2660 && !cand_already_replaced (c))
2661 {
2662 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2663 local_cost += phi_incr_cost (c, incr, phi, &savings);
2664
2665 if (has_single_use (gimple_phi_result (phi)))
2666 local_cost -= savings;
2667 }
2668
2669 if (c->dependent)
2670 local_cost = lowest_cost_path (local_cost, repl_savings,
2671 lookup_cand (c->dependent), incr,
2672 count_phis);
2673
2674 if (c->sibling)
2675 {
2676 sib_cost = lowest_cost_path (cost_in, repl_savings,
2677 lookup_cand (c->sibling), incr,
2678 count_phis);
2679 local_cost = MIN (local_cost, sib_cost);
2680 }
2681
2682 return local_cost;
2683 }
2684
2685 /* Compute the total savings that would accrue from all replacements
2686 in the candidate tree rooted at C, counting only candidates with
2687 increment INCR. Assume that replacing a candidate reduces cost
2688 by REPL_SAVINGS. Also account for savings from statements that
2689 would go dead. */
2690
2691 static int
2692 total_savings (int repl_savings, slsr_cand_t c, double_int incr,
2693 bool count_phis)
2694 {
2695 int savings = 0;
2696 double_int cand_incr = cand_abs_increment (c);
2697
2698 if (incr == cand_incr && !cand_already_replaced (c))
2699 savings += repl_savings + c->dead_savings;
2700
2701 if (count_phis
2702 && phi_dependent_cand_p (c)
2703 && !cand_already_replaced (c))
2704 {
2705 int phi_savings = 0;
2706 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2707 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2708
2709 if (has_single_use (gimple_phi_result (phi)))
2710 savings += phi_savings;
2711 }
2712
2713 if (c->dependent)
2714 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2715 count_phis);
2716
2717 if (c->sibling)
2718 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2719 count_phis);
2720
2721 return savings;
2722 }
2723
2724 /* Use target-specific costs to determine and record which increments
2725 in the current candidate tree are profitable to replace, assuming
2726 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2727 the candidate tree.
2728
2729 One slight limitation here is that we don't account for the possible
2730 introduction of casts in some cases. See replace_one_candidate for
2731 the cases where these are introduced. This should probably be cleaned
2732 up sometime. */
2733
2734 static void
2735 analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed)
2736 {
2737 unsigned i;
2738
2739 for (i = 0; i < incr_vec_len; i++)
2740 {
2741 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2742
2743 /* If somehow this increment is bigger than a HWI, we won't
2744 be optimizing candidates that use it. And if the increment
2745 has a count of zero, nothing will be done with it. */
2746 if (!incr_vec[i].incr.fits_shwi () || !incr_vec[i].count)
2747 incr_vec[i].cost = COST_INFINITE;
2748
2749 /* Increments of 0, 1, and -1 are always profitable to replace,
2750 because they always replace a multiply or add with an add or
2751 copy, and may cause one or more existing instructions to go
2752 dead. Exception: -1 can't be assumed to be profitable for
2753 pointer addition. */
2754 else if (incr == 0
2755 || incr == 1
2756 || (incr == -1
2757 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2758 != POINTER_PLUS_EXPR)))
2759 incr_vec[i].cost = COST_NEUTRAL;
2760
2761 /* FORNOW: If we need to add an initializer, give up if a cast from
2762 the candidate's type to its stride's type can lose precision.
2763 This could eventually be handled better by expressly retaining the
2764 result of a cast to a wider type in the stride. Example:
2765
2766 short int _1;
2767 _2 = (int) _1;
2768 _3 = _2 * 10;
2769 _4 = x + _3; ADD: x + (10 * _1) : int
2770 _5 = _2 * 15;
2771 _6 = x + _3; ADD: x + (15 * _1) : int
2772
2773 Right now replacing _6 would cause insertion of an initializer
2774 of the form "short int T = _1 * 5;" followed by a cast to
2775 int, which could overflow incorrectly. Had we recorded _2 or
2776 (int)_1 as the stride, this wouldn't happen. However, doing
2777 this breaks other opportunities, so this will require some
2778 care. */
2779 else if (!incr_vec[i].initializer
2780 && TREE_CODE (first_dep->stride) != INTEGER_CST
2781 && !legal_cast_p_1 (first_dep->stride,
2782 gimple_assign_lhs (first_dep->cand_stmt)))
2783
2784 incr_vec[i].cost = COST_INFINITE;
2785
2786 /* If we need to add an initializer, make sure we don't introduce
2787 a multiply by a pointer type, which can happen in certain cast
2788 scenarios. FIXME: When cleaning up these cast issues, we can
2789 afford to introduce the multiply provided we cast out to an
2790 unsigned int of appropriate size. */
2791 else if (!incr_vec[i].initializer
2792 && TREE_CODE (first_dep->stride) != INTEGER_CST
2793 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2794
2795 incr_vec[i].cost = COST_INFINITE;
2796
2797 /* For any other increment, if this is a multiply candidate, we
2798 must introduce a temporary T and initialize it with
2799 T_0 = stride * increment. When optimizing for speed, walk the
2800 candidate tree to calculate the best cost reduction along any
2801 path; if it offsets the fixed cost of inserting the initializer,
2802 replacing the increment is profitable. When optimizing for
2803 size, instead calculate the total cost reduction from replacing
2804 all candidates with this increment. */
2805 else if (first_dep->kind == CAND_MULT)
2806 {
2807 int cost = mult_by_coeff_cost (incr, mode, speed);
2808 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2809 if (speed)
2810 cost = lowest_cost_path (cost, repl_savings, first_dep,
2811 incr_vec[i].incr, COUNT_PHIS);
2812 else
2813 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2814 COUNT_PHIS);
2815
2816 incr_vec[i].cost = cost;
2817 }
2818
2819 /* If this is an add candidate, the initializer may already
2820 exist, so only calculate the cost of the initializer if it
2821 doesn't. We are replacing one add with another here, so the
2822 known replacement savings is zero. We will account for removal
2823 of dead instructions in lowest_cost_path or total_savings. */
2824 else
2825 {
2826 int cost = 0;
2827 if (!incr_vec[i].initializer)
2828 cost = mult_by_coeff_cost (incr, mode, speed);
2829
2830 if (speed)
2831 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2832 DONT_COUNT_PHIS);
2833 else
2834 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2835 DONT_COUNT_PHIS);
2836
2837 incr_vec[i].cost = cost;
2838 }
2839 }
2840 }
2841
2842 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2843 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2844 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2845 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2846 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2847
2848 static basic_block
2849 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2850 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2851 {
2852 basic_block ncd;
2853
2854 if (!bb1)
2855 {
2856 *where = c2;
2857 return bb2;
2858 }
2859
2860 if (!bb2)
2861 {
2862 *where = c1;
2863 return bb1;
2864 }
2865
2866 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2867
2868 /* If both candidates are in the same block, the earlier
2869 candidate wins. */
2870 if (bb1 == ncd && bb2 == ncd)
2871 {
2872 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2873 *where = c2;
2874 else
2875 *where = c1;
2876 }
2877
2878 /* Otherwise, if one of them produced a candidate in the
2879 dominator, that one wins. */
2880 else if (bb1 == ncd)
2881 *where = c1;
2882
2883 else if (bb2 == ncd)
2884 *where = c2;
2885
2886 /* If neither matches the dominator, neither wins. */
2887 else
2888 *where = NULL;
2889
2890 return ncd;
2891 }
2892
2893 /* Consider all candidates that feed PHI. Find the nearest common
2894 dominator of those candidates requiring the given increment INCR.
2895 Further find and return the nearest common dominator of this result
2896 with block NCD. If the returned block contains one or more of the
2897 candidates, return the earliest candidate in the block in *WHERE. */
2898
2899 static basic_block
2900 ncd_with_phi (slsr_cand_t c, double_int incr, gimple phi,
2901 basic_block ncd, slsr_cand_t *where)
2902 {
2903 unsigned i;
2904 slsr_cand_t basis = lookup_cand (c->basis);
2905 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2906
2907 for (i = 0; i < gimple_phi_num_args (phi); i++)
2908 {
2909 tree arg = gimple_phi_arg_def (phi, i);
2910
2911 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2912 {
2913 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2914
2915 if (gimple_code (arg_def) == GIMPLE_PHI)
2916 ncd = ncd_with_phi (c, incr, arg_def, ncd, where);
2917 else
2918 {
2919 slsr_cand_t arg_cand = base_cand_from_table (arg);
2920 double_int diff = arg_cand->index - basis->index;
2921
2922 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2923 ncd = ncd_for_two_cands (ncd, gimple_bb (arg_cand->cand_stmt),
2924 *where, arg_cand, where);
2925 }
2926 }
2927 }
2928
2929 return ncd;
2930 }
2931
2932 /* Consider the candidate C together with any candidates that feed
2933 C's phi dependence (if any). Find and return the nearest common
2934 dominator of those candidates requiring the given increment INCR.
2935 If the returned block contains one or more of the candidates,
2936 return the earliest candidate in the block in *WHERE. */
2937
2938 static basic_block
2939 ncd_of_cand_and_phis (slsr_cand_t c, double_int incr, slsr_cand_t *where)
2940 {
2941 basic_block ncd = NULL;
2942
2943 if (cand_abs_increment (c) == incr)
2944 {
2945 ncd = gimple_bb (c->cand_stmt);
2946 *where = c;
2947 }
2948
2949 if (phi_dependent_cand_p (c))
2950 ncd = ncd_with_phi (c, incr, lookup_cand (c->def_phi)->cand_stmt,
2951 ncd, where);
2952
2953 return ncd;
2954 }
2955
2956 /* Consider all candidates in the tree rooted at C for which INCR
2957 represents the required increment of C relative to its basis.
2958 Find and return the basic block that most nearly dominates all
2959 such candidates. If the returned block contains one or more of
2960 the candidates, return the earliest candidate in the block in
2961 *WHERE. */
2962
2963 static basic_block
2964 nearest_common_dominator_for_cands (slsr_cand_t c, double_int incr,
2965 slsr_cand_t *where)
2966 {
2967 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
2968 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
2969
2970 /* First find the NCD of all siblings and dependents. */
2971 if (c->sibling)
2972 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
2973 incr, &sib_where);
2974 if (c->dependent)
2975 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
2976 incr, &dep_where);
2977 if (!sib_ncd && !dep_ncd)
2978 {
2979 new_where = NULL;
2980 ncd = NULL;
2981 }
2982 else if (sib_ncd && !dep_ncd)
2983 {
2984 new_where = sib_where;
2985 ncd = sib_ncd;
2986 }
2987 else if (dep_ncd && !sib_ncd)
2988 {
2989 new_where = dep_where;
2990 ncd = dep_ncd;
2991 }
2992 else
2993 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
2994 dep_where, &new_where);
2995
2996 /* If the candidate's increment doesn't match the one we're interested
2997 in (and nor do any increments for feeding defs of a phi-dependence),
2998 then the result depends only on siblings and dependents. */
2999 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3000
3001 if (!this_ncd || cand_already_replaced (c))
3002 {
3003 *where = new_where;
3004 return ncd;
3005 }
3006
3007 /* Otherwise, compare this candidate with the result from all siblings
3008 and dependents. */
3009 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3010
3011 return ncd;
3012 }
3013
3014 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3015
3016 static inline bool
3017 profitable_increment_p (unsigned index)
3018 {
3019 return (incr_vec[index].cost <= COST_NEUTRAL);
3020 }
3021
3022 /* For each profitable increment in the increment vector not equal to
3023 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3024 dominator of all statements in the candidate chain rooted at C
3025 that require that increment, and insert an initializer
3026 T_0 = stride * increment at that location. Record T_0 with the
3027 increment record. */
3028
3029 static void
3030 insert_initializers (slsr_cand_t c)
3031 {
3032 unsigned i;
3033
3034 for (i = 0; i < incr_vec_len; i++)
3035 {
3036 basic_block bb;
3037 slsr_cand_t where = NULL;
3038 gimple init_stmt;
3039 tree stride_type, new_name, incr_tree;
3040 double_int incr = incr_vec[i].incr;
3041
3042 if (!profitable_increment_p (i)
3043 || incr.is_one ()
3044 || (incr.is_minus_one ()
3045 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3046 || incr.is_zero ())
3047 continue;
3048
3049 /* We may have already identified an existing initializer that
3050 will suffice. */
3051 if (incr_vec[i].initializer)
3052 {
3053 if (dump_file && (dump_flags & TDF_DETAILS))
3054 {
3055 fputs ("Using existing initializer: ", dump_file);
3056 print_gimple_stmt (dump_file,
3057 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3058 0, 0);
3059 }
3060 continue;
3061 }
3062
3063 /* Find the block that most closely dominates all candidates
3064 with this increment. If there is at least one candidate in
3065 that block, the earliest one will be returned in WHERE. */
3066 bb = nearest_common_dominator_for_cands (c, incr, &where);
3067
3068 /* Create a new SSA name to hold the initializer's value. */
3069 stride_type = TREE_TYPE (c->stride);
3070 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3071 incr_vec[i].initializer = new_name;
3072
3073 /* Create the initializer and insert it in the latest possible
3074 dominating position. */
3075 incr_tree = double_int_to_tree (stride_type, incr);
3076 init_stmt = gimple_build_assign_with_ops (MULT_EXPR, new_name,
3077 c->stride, incr_tree);
3078 if (where)
3079 {
3080 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3081 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3082 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3083 }
3084 else
3085 {
3086 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3087 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3088
3089 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3090 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3091 else
3092 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3093
3094 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3095 }
3096
3097 if (dump_file && (dump_flags & TDF_DETAILS))
3098 {
3099 fputs ("Inserting initializer: ", dump_file);
3100 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3101 }
3102 }
3103 }
3104
3105 /* Return TRUE iff all required increments for candidates feeding PHI
3106 are profitable to replace on behalf of candidate C. */
3107
3108 static bool
3109 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3110 {
3111 unsigned i;
3112 slsr_cand_t basis = lookup_cand (c->basis);
3113 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3114
3115 for (i = 0; i < gimple_phi_num_args (phi); i++)
3116 {
3117 tree arg = gimple_phi_arg_def (phi, i);
3118
3119 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3120 {
3121 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3122
3123 if (gimple_code (arg_def) == GIMPLE_PHI)
3124 {
3125 if (!all_phi_incrs_profitable (c, arg_def))
3126 return false;
3127 }
3128 else
3129 {
3130 int j;
3131 slsr_cand_t arg_cand = base_cand_from_table (arg);
3132 double_int increment = arg_cand->index - basis->index;
3133
3134 if (!address_arithmetic_p && increment.is_negative ())
3135 increment = -increment;
3136
3137 j = incr_vec_index (increment);
3138
3139 if (dump_file && (dump_flags & TDF_DETAILS))
3140 {
3141 fprintf (dump_file, " Conditional candidate %d, phi: ",
3142 c->cand_num);
3143 print_gimple_stmt (dump_file, phi, 0, 0);
3144 fputs (" increment: ", dump_file);
3145 dump_double_int (dump_file, increment, false);
3146 if (j < 0)
3147 fprintf (dump_file,
3148 "\n Not replaced; incr_vec overflow.\n");
3149 else {
3150 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3151 if (profitable_increment_p (j))
3152 fputs (" Replacing...\n", dump_file);
3153 else
3154 fputs (" Not replaced.\n", dump_file);
3155 }
3156 }
3157
3158 if (j < 0 || !profitable_increment_p (j))
3159 return false;
3160 }
3161 }
3162 }
3163
3164 return true;
3165 }
3166
3167 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3168 type TO_TYPE, and insert it in front of the statement represented
3169 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3170 the new SSA name. */
3171
3172 static tree
3173 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3174 {
3175 tree cast_lhs;
3176 gimple cast_stmt;
3177 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3178
3179 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3180 cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, cast_lhs,
3181 from_expr, NULL_TREE);
3182 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3183 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3184
3185 if (dump_file && (dump_flags & TDF_DETAILS))
3186 {
3187 fputs (" Inserting: ", dump_file);
3188 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3189 }
3190
3191 return cast_lhs;
3192 }
3193
3194 /* Replace the RHS of the statement represented by candidate C with
3195 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3196 leave C unchanged or just interchange its operands. The original
3197 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3198 If the replacement was made and we are doing a details dump,
3199 return the revised statement, else NULL. */
3200
3201 static gimple
3202 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3203 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3204 slsr_cand_t c)
3205 {
3206 if (new_code != old_code
3207 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3208 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3209 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3210 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3211 {
3212 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3213 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3214 update_stmt (gsi_stmt (gsi));
3215 c->cand_stmt = gsi_stmt (gsi);
3216
3217 if (dump_file && (dump_flags & TDF_DETAILS))
3218 return gsi_stmt (gsi);
3219 }
3220
3221 else if (dump_file && (dump_flags & TDF_DETAILS))
3222 fputs (" (duplicate, not actually replacing)\n", dump_file);
3223
3224 return NULL;
3225 }
3226
3227 /* Strength-reduce the statement represented by candidate C by replacing
3228 it with an equivalent addition or subtraction. I is the index into
3229 the increment vector identifying C's increment. NEW_VAR is used to
3230 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3231 is the rhs1 to use in creating the add/subtract. */
3232
3233 static void
3234 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3235 {
3236 gimple stmt_to_print = NULL;
3237 tree orig_rhs1, orig_rhs2;
3238 tree rhs2;
3239 enum tree_code orig_code, repl_code;
3240 double_int cand_incr;
3241
3242 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3243 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3244 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3245 cand_incr = cand_increment (c);
3246
3247 if (dump_file && (dump_flags & TDF_DETAILS))
3248 {
3249 fputs ("Replacing: ", dump_file);
3250 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3251 stmt_to_print = c->cand_stmt;
3252 }
3253
3254 if (address_arithmetic_p)
3255 repl_code = POINTER_PLUS_EXPR;
3256 else
3257 repl_code = PLUS_EXPR;
3258
3259 /* If the increment has an initializer T_0, replace the candidate
3260 statement with an add of the basis name and the initializer. */
3261 if (incr_vec[i].initializer)
3262 {
3263 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3264 tree orig_type = TREE_TYPE (orig_rhs2);
3265
3266 if (types_compatible_p (orig_type, init_type))
3267 rhs2 = incr_vec[i].initializer;
3268 else
3269 rhs2 = introduce_cast_before_cand (c, orig_type,
3270 incr_vec[i].initializer);
3271
3272 if (incr_vec[i].incr != cand_incr)
3273 {
3274 gcc_assert (repl_code == PLUS_EXPR);
3275 repl_code = MINUS_EXPR;
3276 }
3277
3278 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3279 orig_code, orig_rhs1, orig_rhs2,
3280 c);
3281 }
3282
3283 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3284 with a subtract of the stride from the basis name, a copy
3285 from the basis name, or an add of the stride to the basis
3286 name, respectively. It may be necessary to introduce a
3287 cast (or reuse an existing cast). */
3288 else if (cand_incr.is_one ())
3289 {
3290 tree stride_type = TREE_TYPE (c->stride);
3291 tree orig_type = TREE_TYPE (orig_rhs2);
3292
3293 if (types_compatible_p (orig_type, stride_type))
3294 rhs2 = c->stride;
3295 else
3296 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3297
3298 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3299 orig_code, orig_rhs1, orig_rhs2,
3300 c);
3301 }
3302
3303 else if (cand_incr.is_minus_one ())
3304 {
3305 tree stride_type = TREE_TYPE (c->stride);
3306 tree orig_type = TREE_TYPE (orig_rhs2);
3307 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3308
3309 if (types_compatible_p (orig_type, stride_type))
3310 rhs2 = c->stride;
3311 else
3312 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3313
3314 if (orig_code != MINUS_EXPR
3315 || !operand_equal_p (basis_name, orig_rhs1, 0)
3316 || !operand_equal_p (rhs2, orig_rhs2, 0))
3317 {
3318 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3319 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3320 update_stmt (gsi_stmt (gsi));
3321 c->cand_stmt = gsi_stmt (gsi);
3322
3323 if (dump_file && (dump_flags & TDF_DETAILS))
3324 stmt_to_print = gsi_stmt (gsi);
3325 }
3326 else if (dump_file && (dump_flags & TDF_DETAILS))
3327 fputs (" (duplicate, not actually replacing)\n", dump_file);
3328 }
3329
3330 else if (cand_incr.is_zero ())
3331 {
3332 tree lhs = gimple_assign_lhs (c->cand_stmt);
3333 tree lhs_type = TREE_TYPE (lhs);
3334 tree basis_type = TREE_TYPE (basis_name);
3335
3336 if (types_compatible_p (lhs_type, basis_type))
3337 {
3338 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
3339 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3340 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3341 gsi_replace (&gsi, copy_stmt, false);
3342 c->cand_stmt = copy_stmt;
3343
3344 if (dump_file && (dump_flags & TDF_DETAILS))
3345 stmt_to_print = copy_stmt;
3346 }
3347 else
3348 {
3349 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3350 gimple cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
3351 basis_name,
3352 NULL_TREE);
3353 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3354 gsi_replace (&gsi, cast_stmt, false);
3355 c->cand_stmt = cast_stmt;
3356
3357 if (dump_file && (dump_flags & TDF_DETAILS))
3358 stmt_to_print = cast_stmt;
3359 }
3360 }
3361 else
3362 gcc_unreachable ();
3363
3364 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3365 {
3366 fputs ("With: ", dump_file);
3367 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3368 fputs ("\n", dump_file);
3369 }
3370 }
3371
3372 /* For each candidate in the tree rooted at C, replace it with
3373 an increment if such has been shown to be profitable. */
3374
3375 static void
3376 replace_profitable_candidates (slsr_cand_t c)
3377 {
3378 if (!cand_already_replaced (c))
3379 {
3380 double_int increment = cand_abs_increment (c);
3381 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3382 int i;
3383
3384 i = incr_vec_index (increment);
3385
3386 /* Only process profitable increments. Nothing useful can be done
3387 to a cast or copy. */
3388 if (i >= 0
3389 && profitable_increment_p (i)
3390 && orig_code != MODIFY_EXPR
3391 && orig_code != NOP_EXPR)
3392 {
3393 if (phi_dependent_cand_p (c))
3394 {
3395 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3396
3397 if (all_phi_incrs_profitable (c, phi))
3398 {
3399 /* Look up the LHS SSA name from C's basis. This will be
3400 the RHS1 of the adds we will introduce to create new
3401 phi arguments. */
3402 slsr_cand_t basis = lookup_cand (c->basis);
3403 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3404
3405 /* Create a new phi statement that will represent C's true
3406 basis after the transformation is complete. */
3407 location_t loc = gimple_location (c->cand_stmt);
3408 tree name = create_phi_basis (c, phi, basis_name,
3409 loc, UNKNOWN_STRIDE);
3410
3411 /* Replace C with an add of the new basis phi and the
3412 increment. */
3413 replace_one_candidate (c, i, name);
3414 }
3415 }
3416 else
3417 {
3418 slsr_cand_t basis = lookup_cand (c->basis);
3419 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3420 replace_one_candidate (c, i, basis_name);
3421 }
3422 }
3423 }
3424
3425 if (c->sibling)
3426 replace_profitable_candidates (lookup_cand (c->sibling));
3427
3428 if (c->dependent)
3429 replace_profitable_candidates (lookup_cand (c->dependent));
3430 }
3431 \f
3432 /* Analyze costs of related candidates in the candidate vector,
3433 and make beneficial replacements. */
3434
3435 static void
3436 analyze_candidates_and_replace (void)
3437 {
3438 unsigned i;
3439 slsr_cand_t c;
3440
3441 /* Each candidate that has a null basis and a non-null
3442 dependent is the root of a tree of related statements.
3443 Analyze each tree to determine a subset of those
3444 statements that can be replaced with maximum benefit. */
3445 FOR_EACH_VEC_ELT (cand_vec, i, c)
3446 {
3447 slsr_cand_t first_dep;
3448
3449 if (c->basis != 0 || c->dependent == 0)
3450 continue;
3451
3452 if (dump_file && (dump_flags & TDF_DETAILS))
3453 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3454 c->cand_num);
3455
3456 first_dep = lookup_cand (c->dependent);
3457
3458 /* If this is a chain of CAND_REFs, unconditionally replace
3459 each of them with a strength-reduced data reference. */
3460 if (c->kind == CAND_REF)
3461 replace_refs (c);
3462
3463 /* If the common stride of all related candidates is a known
3464 constant, each candidate without a phi-dependence can be
3465 profitably replaced. Each replaces a multiply by a single
3466 add, with the possibility that a feeding add also goes dead.
3467 A candidate with a phi-dependence is replaced only if the
3468 compensation code it requires is offset by the strength
3469 reduction savings. */
3470 else if (TREE_CODE (c->stride) == INTEGER_CST)
3471 replace_uncond_cands_and_profitable_phis (first_dep);
3472
3473 /* When the stride is an SSA name, it may still be profitable
3474 to replace some or all of the dependent candidates, depending
3475 on whether the introduced increments can be reused, or are
3476 less expensive to calculate than the replaced statements. */
3477 else
3478 {
3479 enum machine_mode mode;
3480 bool speed;
3481
3482 /* Determine whether we'll be generating pointer arithmetic
3483 when replacing candidates. */
3484 address_arithmetic_p = (c->kind == CAND_ADD
3485 && POINTER_TYPE_P (c->cand_type));
3486
3487 /* If all candidates have already been replaced under other
3488 interpretations, nothing remains to be done. */
3489 if (!count_candidates (c))
3490 continue;
3491
3492 /* Construct an array of increments for this candidate chain. */
3493 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3494 incr_vec_len = 0;
3495 record_increments (c);
3496
3497 /* Determine which increments are profitable to replace. */
3498 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3499 speed = optimize_cands_for_speed_p (c);
3500 analyze_increments (first_dep, mode, speed);
3501
3502 /* Insert initializers of the form T_0 = stride * increment
3503 for use in profitable replacements. */
3504 insert_initializers (first_dep);
3505 dump_incr_vec ();
3506
3507 /* Perform the replacements. */
3508 replace_profitable_candidates (first_dep);
3509 free (incr_vec);
3510 }
3511 }
3512 }
3513
3514 static unsigned
3515 execute_strength_reduction (void)
3516 {
3517 /* Create the obstack where candidates will reside. */
3518 gcc_obstack_init (&cand_obstack);
3519
3520 /* Allocate the candidate vector. */
3521 cand_vec.create (128);
3522
3523 /* Allocate the mapping from statements to candidate indices. */
3524 stmt_cand_map = pointer_map_create ();
3525
3526 /* Create the obstack where candidate chains will reside. */
3527 gcc_obstack_init (&chain_obstack);
3528
3529 /* Allocate the mapping from base expressions to candidate chains. */
3530 base_cand_map.create (500);
3531
3532 /* Initialize the loop optimizer. We need to detect flow across
3533 back edges, and this gives us dominator information as well. */
3534 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3535
3536 /* Walk the CFG in predominator order looking for strength reduction
3537 candidates. */
3538 find_candidates_dom_walker (CDI_DOMINATORS)
3539 .walk (cfun->cfg->x_entry_block_ptr);
3540
3541 if (dump_file && (dump_flags & TDF_DETAILS))
3542 {
3543 dump_cand_vec ();
3544 dump_cand_chains ();
3545 }
3546
3547 /* Analyze costs and make appropriate replacements. */
3548 analyze_candidates_and_replace ();
3549
3550 loop_optimizer_finalize ();
3551 base_cand_map.dispose ();
3552 obstack_free (&chain_obstack, NULL);
3553 pointer_map_destroy (stmt_cand_map);
3554 cand_vec.release ();
3555 obstack_free (&cand_obstack, NULL);
3556
3557 return 0;
3558 }
3559
3560 static bool
3561 gate_strength_reduction (void)
3562 {
3563 return flag_tree_slsr;
3564 }
3565
3566 namespace {
3567
3568 const pass_data pass_data_strength_reduction =
3569 {
3570 GIMPLE_PASS, /* type */
3571 "slsr", /* name */
3572 OPTGROUP_NONE, /* optinfo_flags */
3573 true, /* has_gate */
3574 true, /* has_execute */
3575 TV_GIMPLE_SLSR, /* tv_id */
3576 ( PROP_cfg | PROP_ssa ), /* properties_required */
3577 0, /* properties_provided */
3578 0, /* properties_destroyed */
3579 0, /* todo_flags_start */
3580 TODO_verify_ssa, /* todo_flags_finish */
3581 };
3582
3583 class pass_strength_reduction : public gimple_opt_pass
3584 {
3585 public:
3586 pass_strength_reduction (gcc::context *ctxt)
3587 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3588 {}
3589
3590 /* opt_pass methods: */
3591 bool gate () { return gate_strength_reduction (); }
3592 unsigned int execute () { return execute_strength_reduction (); }
3593
3594 }; // class pass_strength_reduction
3595
3596 } // anon namespace
3597
3598 gimple_opt_pass *
3599 make_pass_strength_reduction (gcc::context *ctxt)
3600 {
3601 return new pass_strength_reduction (ctxt);
3602 }