gimple-ssa-strength-reduction.c (struct slsr_cand_d): Change base_name to base_expr.
[gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction will be implemented in four stages, gradually
28 adding more complex candidates:
29
30 1) Explicit multiplies, known constant multipliers, no
31 conditional increments. (complete)
32 2) Explicit multiplies, unknown constant multipliers,
33 no conditional increments. (data gathering complete,
34 replacements pending)
35 3) Implicit multiplies in addressing expressions. (complete)
36 4) Explicit multiplies, conditional increments. (pending)
37
38 It would also be possible to apply strength reduction to divisions
39 and modulos, but such opportunities are relatively uncommon.
40
41 Strength reduction is also currently restricted to integer operations.
42 If desired, it could be extended to floating-point operations under
43 control of something like -funsafe-math-optimizations. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tree.h"
49 #include "gimple.h"
50 #include "basic-block.h"
51 #include "tree-pass.h"
52 #include "cfgloop.h"
53 #include "gimple-pretty-print.h"
54 #include "tree-flow.h"
55 #include "domwalk.h"
56 #include "pointer-set.h"
57 #include "expmed.h"
58 \f
59 /* Information about a strength reduction candidate. Each statement
60 in the candidate table represents an expression of one of the
61 following forms (the special case of CAND_REF will be described
62 later):
63
64 (CAND_MULT) S1: X = (B + i) * S
65 (CAND_ADD) S1: X = B + (i * S)
66
67 Here X and B are SSA names, i is an integer constant, and S is
68 either an SSA name or a constant. We call B the "base," i the
69 "index", and S the "stride."
70
71 Any statement S0 that dominates S1 and is of the form:
72
73 (CAND_MULT) S0: Y = (B + i') * S
74 (CAND_ADD) S0: Y = B + (i' * S)
75
76 is called a "basis" for S1. In both cases, S1 may be replaced by
77
78 S1': X = Y + (i - i') * S,
79
80 where (i - i') * S is folded to the extent possible.
81
82 All gimple statements are visited in dominator order, and each
83 statement that may contribute to one of the forms of S1 above is
84 given at least one entry in the candidate table. Such statements
85 include addition, pointer addition, subtraction, multiplication,
86 negation, copies, and nontrivial type casts. If a statement may
87 represent more than one expression of the forms of S1 above,
88 multiple "interpretations" are stored in the table and chained
89 together. Examples:
90
91 * An add of two SSA names may treat either operand as the base.
92 * A multiply of two SSA names, likewise.
93 * A copy or cast may be thought of as either a CAND_MULT with
94 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
95
96 Candidate records are allocated from an obstack. They are addressed
97 both from a hash table keyed on S1, and from a vector of candidate
98 pointers arranged in predominator order.
99
100 Opportunity note
101 ----------------
102 Currently we don't recognize:
103
104 S0: Y = (S * i') - B
105 S1: X = (S * i) - B
106
107 as a strength reduction opportunity, even though this S1 would
108 also be replaceable by the S1' above. This can be added if it
109 comes up in practice.
110
111 Strength reduction in addressing
112 --------------------------------
113 There is another kind of candidate known as CAND_REF. A CAND_REF
114 describes a statement containing a memory reference having
115 complex addressing that might benefit from strength reduction.
116 Specifically, we are interested in references for which
117 get_inner_reference returns a base address, offset, and bitpos as
118 follows:
119
120 base: MEM_REF (T1, C1)
121 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
122 bitpos: C4 * BITS_PER_UNIT
123
124 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
125 arbitrary integer constants. Note that C2 may be zero, in which
126 case the offset will be MULT_EXPR (T2, C3).
127
128 When this pattern is recognized, the original memory reference
129 can be replaced with:
130
131 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
132 C1 + (C2 * C3) + C4)
133
134 which distributes the multiply to allow constant folding. When
135 two or more addressing expressions can be represented by MEM_REFs
136 of this form, differing only in the constants C1, C2, and C4,
137 making this substitution produces more efficient addressing during
138 the RTL phases. When there are not at least two expressions with
139 the same values of T1, T2, and C3, there is nothing to be gained
140 by the replacement.
141
142 Strength reduction of CAND_REFs uses the same infrastructure as
143 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
144 field, MULT_EXPR (T2, C3) in the stride (S) field, and
145 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
146 is thus another CAND_REF with the same B and S values. When at
147 least two CAND_REFs are chained together using the basis relation,
148 each of them is replaced as above, resulting in improved code
149 generation for addressing. */
150
151
152 /* Index into the candidate vector, offset by 1. VECs are zero-based,
153 while cand_idx's are one-based, with zero indicating null. */
154 typedef unsigned cand_idx;
155
156 /* The kind of candidate. */
157 enum cand_kind
158 {
159 CAND_MULT,
160 CAND_ADD,
161 CAND_REF
162 };
163
164 struct slsr_cand_d
165 {
166 /* The candidate statement S1. */
167 gimple cand_stmt;
168
169 /* The base expression B: often an SSA name, but not always. */
170 tree base_expr;
171
172 /* The stride S. */
173 tree stride;
174
175 /* The index constant i. */
176 double_int index;
177
178 /* The type of the candidate. This is normally the type of base_expr,
179 but casts may have occurred when combining feeding instructions.
180 A candidate can only be a basis for candidates of the same final type.
181 (For CAND_REFs, this is the type to be used for operand 1 of the
182 replacement MEM_REF.) */
183 tree cand_type;
184
185 /* The kind of candidate (CAND_MULT, etc.). */
186 enum cand_kind kind;
187
188 /* Index of this candidate in the candidate vector. */
189 cand_idx cand_num;
190
191 /* Index of the next candidate record for the same statement.
192 A statement may be useful in more than one way (e.g., due to
193 commutativity). So we can have multiple "interpretations"
194 of a statement. */
195 cand_idx next_interp;
196
197 /* Index of the basis statement S0, if any, in the candidate vector. */
198 cand_idx basis;
199
200 /* First candidate for which this candidate is a basis, if one exists. */
201 cand_idx dependent;
202
203 /* Next candidate having the same basis as this one. */
204 cand_idx sibling;
205
206 /* If this is a conditional candidate, the defining PHI statement
207 for the base SSA name B. For future use; always NULL for now. */
208 gimple def_phi;
209
210 /* Savings that can be expected from eliminating dead code if this
211 candidate is replaced. */
212 int dead_savings;
213 };
214
215 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
216 typedef const struct slsr_cand_d *const_slsr_cand_t;
217
218 /* Pointers to candidates are chained together as part of a mapping
219 from base expressions to the candidates that use them. */
220
221 struct cand_chain_d
222 {
223 /* Base expression for the chain of candidates: often, but not
224 always, an SSA name. */
225 tree base_expr;
226
227 /* Pointer to a candidate. */
228 slsr_cand_t cand;
229
230 /* Chain pointer. */
231 struct cand_chain_d *next;
232
233 };
234
235 typedef struct cand_chain_d cand_chain, *cand_chain_t;
236 typedef const struct cand_chain_d *const_cand_chain_t;
237
238 /* Candidates are maintained in a vector. If candidate X dominates
239 candidate Y, then X appears before Y in the vector; but the
240 converse does not necessarily hold. */
241 DEF_VEC_P (slsr_cand_t);
242 DEF_VEC_ALLOC_P (slsr_cand_t, heap);
243 static VEC (slsr_cand_t, heap) *cand_vec;
244
245 enum cost_consts
246 {
247 COST_NEUTRAL = 0,
248 COST_INFINITE = 1000
249 };
250
251 /* Pointer map embodying a mapping from statements to candidates. */
252 static struct pointer_map_t *stmt_cand_map;
253
254 /* Obstack for candidates. */
255 static struct obstack cand_obstack;
256
257 /* Hash table embodying a mapping from base exprs to chains of candidates. */
258 static htab_t base_cand_map;
259
260 /* Obstack for candidate chains. */
261 static struct obstack chain_obstack;
262 \f
263 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
264
265 static slsr_cand_t
266 lookup_cand (cand_idx idx)
267 {
268 return VEC_index (slsr_cand_t, cand_vec, idx - 1);
269 }
270
271 /* Callback to produce a hash value for a candidate chain header. */
272
273 static hashval_t
274 base_cand_hash (const void *p)
275 {
276 tree base_expr = ((const_cand_chain_t) p)->base_expr;
277 return iterative_hash_expr (base_expr, 0);
278 }
279
280 /* Callback when an element is removed from the hash table.
281 We never remove entries until the entire table is released. */
282
283 static void
284 base_cand_free (void *p ATTRIBUTE_UNUSED)
285 {
286 }
287
288 /* Callback to return true if two candidate chain headers are equal. */
289
290 static int
291 base_cand_eq (const void *p1, const void *p2)
292 {
293 const_cand_chain_t const chain1 = (const_cand_chain_t) p1;
294 const_cand_chain_t const chain2 = (const_cand_chain_t) p2;
295 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
296 }
297 \f
298 /* Use the base expr from candidate C to look for possible candidates
299 that can serve as a basis for C. Each potential basis must also
300 appear in a block that dominates the candidate statement and have
301 the same stride and type. If more than one possible basis exists,
302 the one with highest index in the vector is chosen; this will be
303 the most immediately dominating basis. */
304
305 static int
306 find_basis_for_candidate (slsr_cand_t c)
307 {
308 cand_chain mapping_key;
309 cand_chain_t chain;
310 slsr_cand_t basis = NULL;
311
312 mapping_key.base_expr = c->base_expr;
313 chain = (cand_chain_t) htab_find (base_cand_map, &mapping_key);
314
315 for (; chain; chain = chain->next)
316 {
317 slsr_cand_t one_basis = chain->cand;
318
319 if (one_basis->kind != c->kind
320 || !operand_equal_p (one_basis->stride, c->stride, 0)
321 || !types_compatible_p (one_basis->cand_type, c->cand_type)
322 || !dominated_by_p (CDI_DOMINATORS,
323 gimple_bb (c->cand_stmt),
324 gimple_bb (one_basis->cand_stmt)))
325 continue;
326
327 if (!basis || basis->cand_num < one_basis->cand_num)
328 basis = one_basis;
329 }
330
331 if (basis)
332 {
333 c->sibling = basis->dependent;
334 basis->dependent = c->cand_num;
335 return basis->cand_num;
336 }
337
338 return 0;
339 }
340
341 /* Record a mapping from the base expression of C to C itself, indicating that
342 C may potentially serve as a basis using that base expression. */
343
344 static void
345 record_potential_basis (slsr_cand_t c)
346 {
347 cand_chain_t node;
348 void **slot;
349
350 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
351 node->base_expr = c->base_expr;
352 node->cand = c;
353 node->next = NULL;
354 slot = htab_find_slot (base_cand_map, node, INSERT);
355
356 if (*slot)
357 {
358 cand_chain_t head = (cand_chain_t) (*slot);
359 node->next = head->next;
360 head->next = node;
361 }
362 else
363 *slot = node;
364 }
365
366 /* Allocate storage for a new candidate and initialize its fields.
367 Attempt to find a basis for the candidate. */
368
369 static slsr_cand_t
370 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
371 double_int index, tree stride, tree ctype,
372 unsigned savings)
373 {
374 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
375 sizeof (slsr_cand));
376 c->cand_stmt = gs;
377 c->base_expr = base;
378 c->stride = stride;
379 c->index = index;
380 c->cand_type = ctype;
381 c->kind = kind;
382 c->cand_num = VEC_length (slsr_cand_t, cand_vec) + 1;
383 c->next_interp = 0;
384 c->dependent = 0;
385 c->sibling = 0;
386 c->def_phi = NULL;
387 c->dead_savings = savings;
388
389 VEC_safe_push (slsr_cand_t, heap, cand_vec, c);
390 c->basis = find_basis_for_candidate (c);
391 record_potential_basis (c);
392
393 return c;
394 }
395
396 /* Determine the target cost of statement GS when compiling according
397 to SPEED. */
398
399 static int
400 stmt_cost (gimple gs, bool speed)
401 {
402 tree lhs, rhs1, rhs2;
403 enum machine_mode lhs_mode;
404
405 gcc_assert (is_gimple_assign (gs));
406 lhs = gimple_assign_lhs (gs);
407 rhs1 = gimple_assign_rhs1 (gs);
408 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
409
410 switch (gimple_assign_rhs_code (gs))
411 {
412 case MULT_EXPR:
413 rhs2 = gimple_assign_rhs2 (gs);
414
415 if (host_integerp (rhs2, 0))
416 return mult_by_coeff_cost (TREE_INT_CST_LOW (rhs2), lhs_mode, speed);
417
418 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
419 return mul_cost (speed, lhs_mode);
420
421 case PLUS_EXPR:
422 case POINTER_PLUS_EXPR:
423 case MINUS_EXPR:
424 rhs2 = gimple_assign_rhs2 (gs);
425 return add_cost (speed, lhs_mode);
426
427 case NEGATE_EXPR:
428 return neg_cost (speed, lhs_mode);
429
430 case NOP_EXPR:
431 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
432
433 /* Note that we don't assign costs to copies that in most cases
434 will go away. */
435 default:
436 ;
437 }
438
439 gcc_unreachable ();
440 return 0;
441 }
442
443 /* Look up the defining statement for BASE_IN and return a pointer
444 to its candidate in the candidate table, if any; otherwise NULL.
445 Only CAND_ADD and CAND_MULT candidates are returned. */
446
447 static slsr_cand_t
448 base_cand_from_table (tree base_in)
449 {
450 slsr_cand_t *result;
451
452 gimple def = SSA_NAME_DEF_STMT (base_in);
453 if (!def)
454 return (slsr_cand_t) NULL;
455
456 result = (slsr_cand_t *) pointer_map_contains (stmt_cand_map, def);
457
458 if (result && (*result)->kind != CAND_REF)
459 return *result;
460
461 return (slsr_cand_t) NULL;
462 }
463
464 /* Add an entry to the statement-to-candidate mapping. */
465
466 static void
467 add_cand_for_stmt (gimple gs, slsr_cand_t c)
468 {
469 void **slot = pointer_map_insert (stmt_cand_map, gs);
470 gcc_assert (!*slot);
471 *slot = c;
472 }
473 \f
474 /* Look for the following pattern:
475
476 *PBASE: MEM_REF (T1, C1)
477
478 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
479 or
480 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
481 or
482 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
483
484 *PINDEX: C4 * BITS_PER_UNIT
485
486 If not present, leave the input values unchanged and return FALSE.
487 Otherwise, modify the input values as follows and return TRUE:
488
489 *PBASE: T1
490 *POFFSET: MULT_EXPR (T2, C3)
491 *PINDEX: C1 + (C2 * C3) + C4 */
492
493 static bool
494 restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
495 tree *ptype)
496 {
497 tree base = *pbase, offset = *poffset;
498 double_int index = *pindex;
499 double_int bpu = uhwi_to_double_int (BITS_PER_UNIT);
500 tree mult_op0, mult_op1, t1, t2, type;
501 double_int c1, c2, c3, c4;
502
503 if (!base
504 || !offset
505 || TREE_CODE (base) != MEM_REF
506 || TREE_CODE (offset) != MULT_EXPR
507 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
508 || !double_int_zero_p (double_int_umod (index, bpu, FLOOR_MOD_EXPR)))
509 return false;
510
511 t1 = TREE_OPERAND (base, 0);
512 c1 = mem_ref_offset (base);
513 type = TREE_TYPE (TREE_OPERAND (base, 1));
514
515 mult_op0 = TREE_OPERAND (offset, 0);
516 mult_op1 = TREE_OPERAND (offset, 1);
517
518 c3 = tree_to_double_int (mult_op1);
519
520 if (TREE_CODE (mult_op0) == PLUS_EXPR)
521
522 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
523 {
524 t2 = TREE_OPERAND (mult_op0, 0);
525 c2 = tree_to_double_int (TREE_OPERAND (mult_op0, 1));
526 }
527 else
528 return false;
529
530 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
531
532 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
533 {
534 t2 = TREE_OPERAND (mult_op0, 0);
535 c2 = double_int_neg (tree_to_double_int (TREE_OPERAND (mult_op0, 1)));
536 }
537 else
538 return false;
539
540 else
541 {
542 t2 = mult_op0;
543 c2 = double_int_zero;
544 }
545
546 c4 = double_int_udiv (index, bpu, FLOOR_DIV_EXPR);
547
548 *pbase = t1;
549 *poffset = fold_build2 (MULT_EXPR, sizetype, t2,
550 double_int_to_tree (sizetype, c3));
551 *pindex = double_int_add (double_int_add (c1, double_int_mul (c2, c3)), c4);
552 *ptype = type;
553
554 return true;
555 }
556
557 /* Given GS which contains a data reference, create a CAND_REF entry in
558 the candidate table and attempt to find a basis. */
559
560 static void
561 slsr_process_ref (gimple gs)
562 {
563 tree ref_expr, base, offset, type;
564 HOST_WIDE_INT bitsize, bitpos;
565 enum machine_mode mode;
566 int unsignedp, volatilep;
567 double_int index;
568 slsr_cand_t c;
569
570 if (gimple_vdef (gs))
571 ref_expr = gimple_assign_lhs (gs);
572 else
573 ref_expr = gimple_assign_rhs1 (gs);
574
575 if (!handled_component_p (ref_expr)
576 || TREE_CODE (ref_expr) == BIT_FIELD_REF
577 || (TREE_CODE (ref_expr) == COMPONENT_REF
578 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
579 return;
580
581 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
582 &unsignedp, &volatilep, false);
583 index = uhwi_to_double_int (bitpos);
584
585 if (!restructure_reference (&base, &offset, &index, &type))
586 return;
587
588 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
589 type, 0);
590
591 /* Add the candidate to the statement-candidate mapping. */
592 add_cand_for_stmt (gs, c);
593 }
594
595 /* Create a candidate entry for a statement GS, where GS multiplies
596 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
597 about the two SSA names into the new candidate. Return the new
598 candidate. */
599
600 static slsr_cand_t
601 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
602 {
603 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
604 double_int index;
605 unsigned savings = 0;
606 slsr_cand_t c;
607 slsr_cand_t base_cand = base_cand_from_table (base_in);
608
609 /* Look at all interpretations of the base candidate, if necessary,
610 to find information to propagate into this candidate. */
611 while (base_cand && !base)
612 {
613
614 if (base_cand->kind == CAND_MULT
615 && operand_equal_p (base_cand->stride, integer_one_node, 0))
616 {
617 /* Y = (B + i') * 1
618 X = Y * Z
619 ================
620 X = (B + i') * Z */
621 base = base_cand->base_expr;
622 index = base_cand->index;
623 stride = stride_in;
624 ctype = base_cand->cand_type;
625 if (has_single_use (base_in))
626 savings = (base_cand->dead_savings
627 + stmt_cost (base_cand->cand_stmt, speed));
628 }
629 else if (base_cand->kind == CAND_ADD
630 && TREE_CODE (base_cand->stride) == INTEGER_CST)
631 {
632 /* Y = B + (i' * S), S constant
633 X = Y * Z
634 ============================
635 X = B + ((i' * S) * Z) */
636 base = base_cand->base_expr;
637 index = double_int_mul (base_cand->index,
638 tree_to_double_int (base_cand->stride));
639 stride = stride_in;
640 ctype = base_cand->cand_type;
641 if (has_single_use (base_in))
642 savings = (base_cand->dead_savings
643 + stmt_cost (base_cand->cand_stmt, speed));
644 }
645
646 if (base_cand->next_interp)
647 base_cand = lookup_cand (base_cand->next_interp);
648 else
649 base_cand = NULL;
650 }
651
652 if (!base)
653 {
654 /* No interpretations had anything useful to propagate, so
655 produce X = (Y + 0) * Z. */
656 base = base_in;
657 index = double_int_zero;
658 stride = stride_in;
659 ctype = TREE_TYPE (SSA_NAME_VAR (base_in));
660 }
661
662 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
663 ctype, savings);
664 return c;
665 }
666
667 /* Create a candidate entry for a statement GS, where GS multiplies
668 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
669 information about BASE_IN into the new candidate. Return the new
670 candidate. */
671
672 static slsr_cand_t
673 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
674 {
675 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
676 double_int index, temp;
677 unsigned savings = 0;
678 slsr_cand_t c;
679 slsr_cand_t base_cand = base_cand_from_table (base_in);
680
681 /* Look at all interpretations of the base candidate, if necessary,
682 to find information to propagate into this candidate. */
683 while (base_cand && !base)
684 {
685 if (base_cand->kind == CAND_MULT
686 && TREE_CODE (base_cand->stride) == INTEGER_CST)
687 {
688 /* Y = (B + i') * S, S constant
689 X = Y * c
690 ============================
691 X = (B + i') * (S * c) */
692 base = base_cand->base_expr;
693 index = base_cand->index;
694 temp = double_int_mul (tree_to_double_int (base_cand->stride),
695 tree_to_double_int (stride_in));
696 stride = double_int_to_tree (TREE_TYPE (stride_in), temp);
697 ctype = base_cand->cand_type;
698 if (has_single_use (base_in))
699 savings = (base_cand->dead_savings
700 + stmt_cost (base_cand->cand_stmt, speed));
701 }
702 else if (base_cand->kind == CAND_ADD
703 && operand_equal_p (base_cand->stride, integer_one_node, 0))
704 {
705 /* Y = B + (i' * 1)
706 X = Y * c
707 ===========================
708 X = (B + i') * c */
709 base = base_cand->base_expr;
710 index = base_cand->index;
711 stride = stride_in;
712 ctype = base_cand->cand_type;
713 if (has_single_use (base_in))
714 savings = (base_cand->dead_savings
715 + stmt_cost (base_cand->cand_stmt, speed));
716 }
717 else if (base_cand->kind == CAND_ADD
718 && double_int_one_p (base_cand->index)
719 && TREE_CODE (base_cand->stride) == INTEGER_CST)
720 {
721 /* Y = B + (1 * S), S constant
722 X = Y * c
723 ===========================
724 X = (B + S) * c */
725 base = base_cand->base_expr;
726 index = tree_to_double_int (base_cand->stride);
727 stride = stride_in;
728 ctype = base_cand->cand_type;
729 if (has_single_use (base_in))
730 savings = (base_cand->dead_savings
731 + stmt_cost (base_cand->cand_stmt, speed));
732 }
733
734 if (base_cand->next_interp)
735 base_cand = lookup_cand (base_cand->next_interp);
736 else
737 base_cand = NULL;
738 }
739
740 if (!base)
741 {
742 /* No interpretations had anything useful to propagate, so
743 produce X = (Y + 0) * c. */
744 base = base_in;
745 index = double_int_zero;
746 stride = stride_in;
747 ctype = TREE_TYPE (SSA_NAME_VAR (base_in));
748 }
749
750 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
751 ctype, savings);
752 return c;
753 }
754
755 /* Given GS which is a multiply of scalar integers, make an appropriate
756 entry in the candidate table. If this is a multiply of two SSA names,
757 create two CAND_MULT interpretations and attempt to find a basis for
758 each of them. Otherwise, create a single CAND_MULT and attempt to
759 find a basis. */
760
761 static void
762 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
763 {
764 slsr_cand_t c, c2;
765
766 /* If this is a multiply of an SSA name with itself, it is highly
767 unlikely that we will get a strength reduction opportunity, so
768 don't record it as a candidate. This simplifies the logic for
769 finding a basis, so if this is removed that must be considered. */
770 if (rhs1 == rhs2)
771 return;
772
773 if (TREE_CODE (rhs2) == SSA_NAME)
774 {
775 /* Record an interpretation of this statement in the candidate table
776 assuming RHS1 is the base expression and RHS2 is the stride. */
777 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
778
779 /* Add the first interpretation to the statement-candidate mapping. */
780 add_cand_for_stmt (gs, c);
781
782 /* Record another interpretation of this statement assuming RHS1
783 is the stride and RHS2 is the base expression. */
784 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
785 c->next_interp = c2->cand_num;
786 }
787 else
788 {
789 /* Record an interpretation for the multiply-immediate. */
790 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
791
792 /* Add the interpretation to the statement-candidate mapping. */
793 add_cand_for_stmt (gs, c);
794 }
795 }
796
797 /* Create a candidate entry for a statement GS, where GS adds two
798 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
799 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
800 information about the two SSA names into the new candidate.
801 Return the new candidate. */
802
803 static slsr_cand_t
804 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
805 bool subtract_p, bool speed)
806 {
807 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
808 double_int index;
809 unsigned savings = 0;
810 slsr_cand_t c;
811 slsr_cand_t base_cand = base_cand_from_table (base_in);
812 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
813
814 /* The most useful transformation is a multiply-immediate feeding
815 an add or subtract. Look for that first. */
816 while (addend_cand && !base)
817 {
818 if (addend_cand->kind == CAND_MULT
819 && double_int_zero_p (addend_cand->index)
820 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
821 {
822 /* Z = (B + 0) * S, S constant
823 X = Y +/- Z
824 ===========================
825 X = Y + ((+/-1 * S) * B) */
826 base = base_in;
827 index = tree_to_double_int (addend_cand->stride);
828 if (subtract_p)
829 index = double_int_neg (index);
830 stride = addend_cand->base_expr;
831 ctype = TREE_TYPE (SSA_NAME_VAR (base_in));
832 if (has_single_use (addend_in))
833 savings = (addend_cand->dead_savings
834 + stmt_cost (addend_cand->cand_stmt, speed));
835 }
836
837 if (addend_cand->next_interp)
838 addend_cand = lookup_cand (addend_cand->next_interp);
839 else
840 addend_cand = NULL;
841 }
842
843 while (base_cand && !base)
844 {
845 if (base_cand->kind == CAND_ADD
846 && (double_int_zero_p (base_cand->index)
847 || operand_equal_p (base_cand->stride,
848 integer_zero_node, 0)))
849 {
850 /* Y = B + (i' * S), i' * S = 0
851 X = Y +/- Z
852 ============================
853 X = B + (+/-1 * Z) */
854 base = base_cand->base_expr;
855 index = subtract_p ? double_int_minus_one : double_int_one;
856 stride = addend_in;
857 ctype = base_cand->cand_type;
858 if (has_single_use (base_in))
859 savings = (base_cand->dead_savings
860 + stmt_cost (base_cand->cand_stmt, speed));
861 }
862 else if (subtract_p)
863 {
864 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
865
866 while (subtrahend_cand && !base)
867 {
868 if (subtrahend_cand->kind == CAND_MULT
869 && double_int_zero_p (subtrahend_cand->index)
870 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
871 {
872 /* Z = (B + 0) * S, S constant
873 X = Y - Z
874 ===========================
875 Value: X = Y + ((-1 * S) * B) */
876 base = base_in;
877 index = tree_to_double_int (subtrahend_cand->stride);
878 index = double_int_neg (index);
879 stride = subtrahend_cand->base_expr;
880 ctype = TREE_TYPE (SSA_NAME_VAR (base_in));
881 if (has_single_use (addend_in))
882 savings = (subtrahend_cand->dead_savings
883 + stmt_cost (subtrahend_cand->cand_stmt, speed));
884 }
885
886 if (subtrahend_cand->next_interp)
887 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
888 else
889 subtrahend_cand = NULL;
890 }
891 }
892
893 if (base_cand->next_interp)
894 base_cand = lookup_cand (base_cand->next_interp);
895 else
896 base_cand = NULL;
897 }
898
899 if (!base)
900 {
901 /* No interpretations had anything useful to propagate, so
902 produce X = Y + (1 * Z). */
903 base = base_in;
904 index = subtract_p ? double_int_minus_one : double_int_one;
905 stride = addend_in;
906 ctype = TREE_TYPE (SSA_NAME_VAR (base_in));
907 }
908
909 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
910 ctype, savings);
911 return c;
912 }
913
914 /* Create a candidate entry for a statement GS, where GS adds SSA
915 name BASE_IN to constant INDEX_IN. Propagate any known information
916 about BASE_IN into the new candidate. Return the new candidate. */
917
918 static slsr_cand_t
919 create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed)
920 {
921 enum cand_kind kind = CAND_ADD;
922 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
923 double_int index, multiple;
924 unsigned savings = 0;
925 slsr_cand_t c;
926 slsr_cand_t base_cand = base_cand_from_table (base_in);
927
928 while (base_cand && !base)
929 {
930 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (base_cand->stride));
931
932 if (TREE_CODE (base_cand->stride) == INTEGER_CST
933 && double_int_multiple_of (index_in,
934 tree_to_double_int (base_cand->stride),
935 unsigned_p,
936 &multiple))
937 {
938 /* Y = (B + i') * S, S constant, c = kS for some integer k
939 X = Y + c
940 ============================
941 X = (B + (i'+ k)) * S
942 OR
943 Y = B + (i' * S), S constant, c = kS for some integer k
944 X = Y + c
945 ============================
946 X = (B + (i'+ k)) * S */
947 kind = base_cand->kind;
948 base = base_cand->base_expr;
949 index = double_int_add (base_cand->index, multiple);
950 stride = base_cand->stride;
951 ctype = base_cand->cand_type;
952 if (has_single_use (base_in))
953 savings = (base_cand->dead_savings
954 + stmt_cost (base_cand->cand_stmt, speed));
955 }
956
957 if (base_cand->next_interp)
958 base_cand = lookup_cand (base_cand->next_interp);
959 else
960 base_cand = NULL;
961 }
962
963 if (!base)
964 {
965 /* No interpretations had anything useful to propagate, so
966 produce X = Y + (c * 1). */
967 kind = CAND_ADD;
968 base = base_in;
969 index = index_in;
970 stride = integer_one_node;
971 ctype = TREE_TYPE (SSA_NAME_VAR (base_in));
972 }
973
974 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
975 ctype, savings);
976 return c;
977 }
978
979 /* Given GS which is an add or subtract of scalar integers or pointers,
980 make at least one appropriate entry in the candidate table. */
981
982 static void
983 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
984 {
985 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
986 slsr_cand_t c = NULL, c2;
987
988 if (TREE_CODE (rhs2) == SSA_NAME)
989 {
990 /* First record an interpretation assuming RHS1 is the base expression
991 and RHS2 is the stride. But it doesn't make sense for the
992 stride to be a pointer, so don't record a candidate in that case. */
993 if (!POINTER_TYPE_P (TREE_TYPE (SSA_NAME_VAR (rhs2))))
994 {
995 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
996
997 /* Add the first interpretation to the statement-candidate
998 mapping. */
999 add_cand_for_stmt (gs, c);
1000 }
1001
1002 /* If the two RHS operands are identical, or this is a subtract,
1003 we're done. */
1004 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1005 return;
1006
1007 /* Otherwise, record another interpretation assuming RHS2 is the
1008 base expression and RHS1 is the stride, again provided that the
1009 stride is not a pointer. */
1010 if (!POINTER_TYPE_P (TREE_TYPE (SSA_NAME_VAR (rhs1))))
1011 {
1012 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1013 if (c)
1014 c->next_interp = c2->cand_num;
1015 else
1016 add_cand_for_stmt (gs, c2);
1017 }
1018 }
1019 else
1020 {
1021 double_int index;
1022
1023 /* Record an interpretation for the add-immediate. */
1024 index = tree_to_double_int (rhs2);
1025 if (subtract_p)
1026 index = double_int_neg (index);
1027
1028 c = create_add_imm_cand (gs, rhs1, index, speed);
1029
1030 /* Add the interpretation to the statement-candidate mapping. */
1031 add_cand_for_stmt (gs, c);
1032 }
1033 }
1034
1035 /* Given GS which is a negate of a scalar integer, make an appropriate
1036 entry in the candidate table. A negate is equivalent to a multiply
1037 by -1. */
1038
1039 static void
1040 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1041 {
1042 /* Record a CAND_MULT interpretation for the multiply by -1. */
1043 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1044
1045 /* Add the interpretation to the statement-candidate mapping. */
1046 add_cand_for_stmt (gs, c);
1047 }
1048
1049 /* Return TRUE if GS is a statement that defines an SSA name from
1050 a conversion and is legal for us to combine with an add and multiply
1051 in the candidate table. For example, suppose we have:
1052
1053 A = B + i;
1054 C = (type) A;
1055 D = C * S;
1056
1057 Without the type-cast, we would create a CAND_MULT for D with base B,
1058 index i, and stride S. We want to record this candidate only if it
1059 is equivalent to apply the type cast following the multiply:
1060
1061 A = B + i;
1062 E = A * S;
1063 D = (type) E;
1064
1065 We will record the type with the candidate for D. This allows us
1066 to use a similar previous candidate as a basis. If we have earlier seen
1067
1068 A' = B + i';
1069 C' = (type) A';
1070 D' = C' * S;
1071
1072 we can replace D with
1073
1074 D = D' + (i - i') * S;
1075
1076 But if moving the type-cast would change semantics, we mustn't do this.
1077
1078 This is legitimate for casts from a non-wrapping integral type to
1079 any integral type of the same or larger size. It is not legitimate
1080 to convert a wrapping type to a non-wrapping type, or to a wrapping
1081 type of a different size. I.e., with a wrapping type, we must
1082 assume that the addition B + i could wrap, in which case performing
1083 the multiply before or after one of the "illegal" type casts will
1084 have different semantics. */
1085
1086 static bool
1087 legal_cast_p (gimple gs, tree rhs)
1088 {
1089 tree lhs, lhs_type, rhs_type;
1090 unsigned lhs_size, rhs_size;
1091 bool lhs_wraps, rhs_wraps;
1092
1093 if (!is_gimple_assign (gs)
1094 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1095 return false;
1096
1097 lhs = gimple_assign_lhs (gs);
1098 lhs_type = TREE_TYPE (lhs);
1099 rhs_type = TREE_TYPE (rhs);
1100 lhs_size = TYPE_PRECISION (lhs_type);
1101 rhs_size = TYPE_PRECISION (rhs_type);
1102 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1103 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1104
1105 if (lhs_size < rhs_size
1106 || (rhs_wraps && !lhs_wraps)
1107 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1108 return false;
1109
1110 return true;
1111 }
1112
1113 /* Given GS which is a cast to a scalar integer type, determine whether
1114 the cast is legal for strength reduction. If so, make at least one
1115 appropriate entry in the candidate table. */
1116
1117 static void
1118 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1119 {
1120 tree lhs, ctype;
1121 slsr_cand_t base_cand, c, c2;
1122 unsigned savings = 0;
1123
1124 if (!legal_cast_p (gs, rhs1))
1125 return;
1126
1127 lhs = gimple_assign_lhs (gs);
1128 base_cand = base_cand_from_table (rhs1);
1129 ctype = TREE_TYPE (lhs);
1130
1131 if (base_cand)
1132 {
1133 while (base_cand)
1134 {
1135 /* Propagate all data from the base candidate except the type,
1136 which comes from the cast, and the base candidate's cast,
1137 which is no longer applicable. */
1138 if (has_single_use (rhs1))
1139 savings = (base_cand->dead_savings
1140 + stmt_cost (base_cand->cand_stmt, speed));
1141
1142 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1143 base_cand->base_expr,
1144 base_cand->index, base_cand->stride,
1145 ctype, savings);
1146 if (base_cand->next_interp)
1147 base_cand = lookup_cand (base_cand->next_interp);
1148 else
1149 base_cand = NULL;
1150 }
1151 }
1152 else
1153 {
1154 /* If nothing is known about the RHS, create fresh CAND_ADD and
1155 CAND_MULT interpretations:
1156
1157 X = Y + (0 * 1)
1158 X = (Y + 0) * 1
1159
1160 The first of these is somewhat arbitrary, but the choice of
1161 1 for the stride simplifies the logic for propagating casts
1162 into their uses. */
1163 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, double_int_zero,
1164 integer_one_node, ctype, 0);
1165 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, double_int_zero,
1166 integer_one_node, ctype, 0);
1167 c->next_interp = c2->cand_num;
1168 }
1169
1170 /* Add the first (or only) interpretation to the statement-candidate
1171 mapping. */
1172 add_cand_for_stmt (gs, c);
1173 }
1174
1175 /* Given GS which is a copy of a scalar integer type, make at least one
1176 appropriate entry in the candidate table.
1177
1178 This interface is included for completeness, but is unnecessary
1179 if this pass immediately follows a pass that performs copy
1180 propagation, such as DOM. */
1181
1182 static void
1183 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1184 {
1185 slsr_cand_t base_cand, c, c2;
1186 unsigned savings = 0;
1187
1188 base_cand = base_cand_from_table (rhs1);
1189
1190 if (base_cand)
1191 {
1192 while (base_cand)
1193 {
1194 /* Propagate all data from the base candidate. */
1195 if (has_single_use (rhs1))
1196 savings = (base_cand->dead_savings
1197 + stmt_cost (base_cand->cand_stmt, speed));
1198
1199 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1200 base_cand->base_expr,
1201 base_cand->index, base_cand->stride,
1202 base_cand->cand_type, savings);
1203 if (base_cand->next_interp)
1204 base_cand = lookup_cand (base_cand->next_interp);
1205 else
1206 base_cand = NULL;
1207 }
1208 }
1209 else
1210 {
1211 /* If nothing is known about the RHS, create fresh CAND_ADD and
1212 CAND_MULT interpretations:
1213
1214 X = Y + (0 * 1)
1215 X = (Y + 0) * 1
1216
1217 The first of these is somewhat arbitrary, but the choice of
1218 1 for the stride simplifies the logic for propagating casts
1219 into their uses. */
1220 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, double_int_zero,
1221 integer_one_node, TREE_TYPE (rhs1), 0);
1222 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, double_int_zero,
1223 integer_one_node, TREE_TYPE (rhs1), 0);
1224 c->next_interp = c2->cand_num;
1225 }
1226
1227 /* Add the first (or only) interpretation to the statement-candidate
1228 mapping. */
1229 add_cand_for_stmt (gs, c);
1230 }
1231 \f
1232 /* Find strength-reduction candidates in block BB. */
1233
1234 static void
1235 find_candidates_in_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1236 basic_block bb)
1237 {
1238 bool speed = optimize_bb_for_speed_p (bb);
1239 gimple_stmt_iterator gsi;
1240
1241 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1242 {
1243 gimple gs = gsi_stmt (gsi);
1244
1245 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1246 slsr_process_ref (gs);
1247
1248 else if (is_gimple_assign (gs)
1249 && SCALAR_INT_MODE_P
1250 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1251 {
1252 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1253
1254 switch (gimple_assign_rhs_code (gs))
1255 {
1256 case MULT_EXPR:
1257 case PLUS_EXPR:
1258 rhs1 = gimple_assign_rhs1 (gs);
1259 rhs2 = gimple_assign_rhs2 (gs);
1260 /* Should never happen, but currently some buggy situations
1261 in earlier phases put constants in rhs1. */
1262 if (TREE_CODE (rhs1) != SSA_NAME)
1263 continue;
1264 break;
1265
1266 /* Possible future opportunity: rhs1 of a ptr+ can be
1267 an ADDR_EXPR. */
1268 case POINTER_PLUS_EXPR:
1269 case MINUS_EXPR:
1270 rhs2 = gimple_assign_rhs2 (gs);
1271 /* Fall-through. */
1272
1273 case NOP_EXPR:
1274 case MODIFY_EXPR:
1275 case NEGATE_EXPR:
1276 rhs1 = gimple_assign_rhs1 (gs);
1277 if (TREE_CODE (rhs1) != SSA_NAME)
1278 continue;
1279 break;
1280
1281 default:
1282 ;
1283 }
1284
1285 switch (gimple_assign_rhs_code (gs))
1286 {
1287 case MULT_EXPR:
1288 slsr_process_mul (gs, rhs1, rhs2, speed);
1289 break;
1290
1291 case PLUS_EXPR:
1292 case POINTER_PLUS_EXPR:
1293 case MINUS_EXPR:
1294 slsr_process_add (gs, rhs1, rhs2, speed);
1295 break;
1296
1297 case NEGATE_EXPR:
1298 slsr_process_neg (gs, rhs1, speed);
1299 break;
1300
1301 case NOP_EXPR:
1302 slsr_process_cast (gs, rhs1, speed);
1303 break;
1304
1305 case MODIFY_EXPR:
1306 slsr_process_copy (gs, rhs1, speed);
1307 break;
1308
1309 default:
1310 ;
1311 }
1312 }
1313 }
1314 }
1315 \f
1316 /* Dump a candidate for debug. */
1317
1318 static void
1319 dump_candidate (slsr_cand_t c)
1320 {
1321 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1322 gimple_bb (c->cand_stmt)->index);
1323 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1324 switch (c->kind)
1325 {
1326 case CAND_MULT:
1327 fputs (" MULT : (", dump_file);
1328 print_generic_expr (dump_file, c->base_expr, 0);
1329 fputs (" + ", dump_file);
1330 dump_double_int (dump_file, c->index, false);
1331 fputs (") * ", dump_file);
1332 print_generic_expr (dump_file, c->stride, 0);
1333 fputs (" : ", dump_file);
1334 break;
1335 case CAND_ADD:
1336 fputs (" ADD : ", dump_file);
1337 print_generic_expr (dump_file, c->base_expr, 0);
1338 fputs (" + (", dump_file);
1339 dump_double_int (dump_file, c->index, false);
1340 fputs (" * ", dump_file);
1341 print_generic_expr (dump_file, c->stride, 0);
1342 fputs (") : ", dump_file);
1343 break;
1344 case CAND_REF:
1345 fputs (" REF : ", dump_file);
1346 print_generic_expr (dump_file, c->base_expr, 0);
1347 fputs (" + (", dump_file);
1348 print_generic_expr (dump_file, c->stride, 0);
1349 fputs (") + ", dump_file);
1350 dump_double_int (dump_file, c->index, false);
1351 fputs (" : ", dump_file);
1352 break;
1353 default:
1354 gcc_unreachable ();
1355 }
1356 print_generic_expr (dump_file, c->cand_type, 0);
1357 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1358 c->basis, c->dependent, c->sibling);
1359 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1360 c->next_interp, c->dead_savings);
1361 if (c->def_phi)
1362 {
1363 fputs (" phi: ", dump_file);
1364 print_gimple_stmt (dump_file, c->def_phi, 0, 0);
1365 }
1366 fputs ("\n", dump_file);
1367 }
1368
1369 /* Dump the candidate vector for debug. */
1370
1371 static void
1372 dump_cand_vec (void)
1373 {
1374 unsigned i;
1375 slsr_cand_t c;
1376
1377 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1378
1379 FOR_EACH_VEC_ELT (slsr_cand_t, cand_vec, i, c)
1380 dump_candidate (c);
1381 }
1382
1383 /* Callback used to dump the candidate chains hash table. */
1384
1385 static int
1386 base_cand_dump_callback (void **slot, void *ignored ATTRIBUTE_UNUSED)
1387 {
1388 const_cand_chain_t chain = *((const_cand_chain_t *) slot);
1389 cand_chain_t p;
1390
1391 print_generic_expr (dump_file, chain->base_expr, 0);
1392 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1393
1394 for (p = chain->next; p; p = p->next)
1395 fprintf (dump_file, " -> %d", p->cand->cand_num);
1396
1397 fputs ("\n", dump_file);
1398 return 1;
1399 }
1400
1401 /* Dump the candidate chains. */
1402
1403 static void
1404 dump_cand_chains (void)
1405 {
1406 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1407 htab_traverse_noresize (base_cand_map, base_cand_dump_callback, NULL);
1408 fputs ("\n", dump_file);
1409 }
1410 \f
1411 /* Recursive helper for unconditional_cands_with_known_stride_p.
1412 Returns TRUE iff C, its siblings, and its dependents are all
1413 unconditional candidates. */
1414
1415 static bool
1416 unconditional_cands (slsr_cand_t c)
1417 {
1418 if (c->def_phi)
1419 return false;
1420
1421 if (c->sibling && !unconditional_cands (lookup_cand (c->sibling)))
1422 return false;
1423
1424 if (c->dependent && !unconditional_cands (lookup_cand (c->dependent)))
1425 return false;
1426
1427 return true;
1428 }
1429
1430 /* Determine whether or not the tree of candidates rooted at
1431 ROOT consists entirely of unconditional increments with
1432 an INTEGER_CST stride. */
1433
1434 static bool
1435 unconditional_cands_with_known_stride_p (slsr_cand_t root)
1436 {
1437 /* The stride is identical for all related candidates, so
1438 check it once. */
1439 if (TREE_CODE (root->stride) != INTEGER_CST)
1440 return false;
1441
1442 return unconditional_cands (lookup_cand (root->dependent));
1443 }
1444
1445 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1446 data reference. */
1447
1448 static void
1449 replace_ref (tree *expr, slsr_cand_t c)
1450 {
1451 tree add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1452 c->base_expr, c->stride);
1453 tree mem_ref = fold_build2 (MEM_REF, TREE_TYPE (*expr), add_expr,
1454 double_int_to_tree (c->cand_type, c->index));
1455
1456 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1457 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1458 TREE_OPERAND (mem_ref, 0)
1459 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1460 /*simple_p=*/true, NULL,
1461 /*before=*/true, GSI_SAME_STMT);
1462 copy_ref_info (mem_ref, *expr);
1463 *expr = mem_ref;
1464 update_stmt (c->cand_stmt);
1465 }
1466
1467 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1468 dependent of candidate C with an equivalent strength-reduced data
1469 reference. */
1470
1471 static void
1472 replace_refs (slsr_cand_t c)
1473 {
1474 if (gimple_vdef (c->cand_stmt))
1475 {
1476 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1477 replace_ref (lhs, c);
1478 }
1479 else
1480 {
1481 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1482 replace_ref (rhs, c);
1483 }
1484
1485 if (c->sibling)
1486 replace_refs (lookup_cand (c->sibling));
1487
1488 if (c->dependent)
1489 replace_refs (lookup_cand (c->dependent));
1490 }
1491
1492 /* Calculate the increment required for candidate C relative to
1493 its basis. */
1494
1495 static double_int
1496 cand_increment (slsr_cand_t c)
1497 {
1498 slsr_cand_t basis;
1499
1500 /* If the candidate doesn't have a basis, just return its own
1501 index. This is useful in record_increments to help us find
1502 an existing initializer. */
1503 if (!c->basis)
1504 return c->index;
1505
1506 basis = lookup_cand (c->basis);
1507 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1508 return double_int_sub (c->index, basis->index);
1509 }
1510
1511 /* Return TRUE iff candidate C has already been replaced under
1512 another interpretation. */
1513
1514 static inline bool
1515 cand_already_replaced (slsr_cand_t c)
1516 {
1517 return (gimple_bb (c->cand_stmt) == 0);
1518 }
1519
1520 /* Helper routine for replace_dependents, doing the work for a
1521 single candidate C. */
1522
1523 static void
1524 replace_dependent (slsr_cand_t c, enum tree_code cand_code)
1525 {
1526 double_int stride = tree_to_double_int (c->stride);
1527 double_int bump = double_int_mul (cand_increment (c), stride);
1528 gimple stmt_to_print = NULL;
1529 slsr_cand_t basis;
1530 tree basis_name, incr_type, bump_tree;
1531 enum tree_code code;
1532
1533 /* It is highly unlikely, but possible, that the resulting
1534 bump doesn't fit in a HWI. Abandon the replacement
1535 in this case. Restriction to signed HWI is conservative
1536 for unsigned types but allows for safe negation without
1537 twisted logic. */
1538 if (!double_int_fits_in_shwi_p (bump))
1539 return;
1540
1541 basis = lookup_cand (c->basis);
1542 basis_name = gimple_assign_lhs (basis->cand_stmt);
1543 incr_type = TREE_TYPE (gimple_assign_rhs1 (c->cand_stmt));
1544 code = PLUS_EXPR;
1545
1546 if (double_int_negative_p (bump))
1547 {
1548 code = MINUS_EXPR;
1549 bump = double_int_neg (bump);
1550 }
1551
1552 bump_tree = double_int_to_tree (incr_type, bump);
1553
1554 if (dump_file && (dump_flags & TDF_DETAILS))
1555 {
1556 fputs ("Replacing: ", dump_file);
1557 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1558 }
1559
1560 if (double_int_zero_p (bump))
1561 {
1562 tree lhs = gimple_assign_lhs (c->cand_stmt);
1563 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
1564 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1565 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
1566 gsi_replace (&gsi, copy_stmt, false);
1567 if (dump_file && (dump_flags & TDF_DETAILS))
1568 stmt_to_print = copy_stmt;
1569 }
1570 else
1571 {
1572 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
1573 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
1574 if (cand_code != NEGATE_EXPR
1575 && ((operand_equal_p (rhs1, basis_name, 0)
1576 && operand_equal_p (rhs2, bump_tree, 0))
1577 || (operand_equal_p (rhs1, bump_tree, 0)
1578 && operand_equal_p (rhs2, basis_name, 0))))
1579 {
1580 if (dump_file && (dump_flags & TDF_DETAILS))
1581 {
1582 fputs ("(duplicate, not actually replacing)", dump_file);
1583 stmt_to_print = c->cand_stmt;
1584 }
1585 }
1586 else
1587 {
1588 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1589 gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
1590 update_stmt (gsi_stmt (gsi));
1591 if (dump_file && (dump_flags & TDF_DETAILS))
1592 stmt_to_print = gsi_stmt (gsi);
1593 }
1594 }
1595
1596 if (dump_file && (dump_flags & TDF_DETAILS))
1597 {
1598 fputs ("With: ", dump_file);
1599 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
1600 fputs ("\n", dump_file);
1601 }
1602 }
1603
1604 /* Replace candidate C, each sibling of candidate C, and each
1605 dependent of candidate C with an add or subtract. Note that we
1606 only operate on CAND_MULTs with known strides, so we will never
1607 generate a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is
1608 replaced by X = Y + ((i - i') * S), as described in the module
1609 commentary. The folded value ((i - i') * S) is referred to here
1610 as the "bump." */
1611
1612 static void
1613 replace_dependents (slsr_cand_t c)
1614 {
1615 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
1616
1617 /* It is not useful to replace casts, copies, or adds of an SSA name
1618 and a constant. Also skip candidates that have already been
1619 replaced under another interpretation. */
1620 if (cand_code != MODIFY_EXPR
1621 && cand_code != NOP_EXPR
1622 && c->kind == CAND_MULT
1623 && !cand_already_replaced (c))
1624 replace_dependent (c, cand_code);
1625
1626 if (c->sibling)
1627 replace_dependents (lookup_cand (c->sibling));
1628
1629 if (c->dependent)
1630 replace_dependents (lookup_cand (c->dependent));
1631 }
1632 \f
1633 /* Analyze costs of related candidates in the candidate vector,
1634 and make beneficial replacements. */
1635
1636 static void
1637 analyze_candidates_and_replace (void)
1638 {
1639 unsigned i;
1640 slsr_cand_t c;
1641
1642 /* Each candidate that has a null basis and a non-null
1643 dependent is the root of a tree of related statements.
1644 Analyze each tree to determine a subset of those
1645 statements that can be replaced with maximum benefit. */
1646 FOR_EACH_VEC_ELT (slsr_cand_t, cand_vec, i, c)
1647 {
1648 slsr_cand_t first_dep;
1649
1650 if (c->basis != 0 || c->dependent == 0)
1651 continue;
1652
1653 if (dump_file && (dump_flags & TDF_DETAILS))
1654 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
1655 c->cand_num);
1656
1657 first_dep = lookup_cand (c->dependent);
1658
1659 /* If this is a chain of CAND_REFs, unconditionally replace
1660 each of them with a strength-reduced data reference. */
1661 if (c->kind == CAND_REF)
1662 replace_refs (c);
1663
1664 /* If the common stride of all related candidates is a
1665 known constant, and none of these has a phi-dependence,
1666 then all replacements are considered profitable.
1667 Each replaces a multiply by a single add, with the
1668 possibility that a feeding add also goes dead as a
1669 result. */
1670 else if (unconditional_cands_with_known_stride_p (c))
1671 replace_dependents (first_dep);
1672
1673 /* TODO: When the stride is an SSA name, it may still be
1674 profitable to replace some or all of the dependent
1675 candidates, depending on whether the introduced increments
1676 can be reused, or are less expensive to calculate than
1677 the replaced statements. */
1678
1679 /* TODO: When conditional increments occur so that a
1680 candidate is dependent upon a phi-basis, the cost of
1681 introducing a temporary must be accounted for. */
1682 }
1683 }
1684
1685 static unsigned
1686 execute_strength_reduction (void)
1687 {
1688 struct dom_walk_data walk_data;
1689
1690 /* Create the obstack where candidates will reside. */
1691 gcc_obstack_init (&cand_obstack);
1692
1693 /* Allocate the candidate vector. */
1694 cand_vec = VEC_alloc (slsr_cand_t, heap, 128);
1695
1696 /* Allocate the mapping from statements to candidate indices. */
1697 stmt_cand_map = pointer_map_create ();
1698
1699 /* Create the obstack where candidate chains will reside. */
1700 gcc_obstack_init (&chain_obstack);
1701
1702 /* Allocate the mapping from base expressions to candidate chains. */
1703 base_cand_map = htab_create (500, base_cand_hash,
1704 base_cand_eq, base_cand_free);
1705
1706 /* Initialize the loop optimizer. We need to detect flow across
1707 back edges, and this gives us dominator information as well. */
1708 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
1709
1710 /* Set up callbacks for the generic dominator tree walker. */
1711 walk_data.dom_direction = CDI_DOMINATORS;
1712 walk_data.initialize_block_local_data = NULL;
1713 walk_data.before_dom_children = find_candidates_in_block;
1714 walk_data.after_dom_children = NULL;
1715 walk_data.global_data = NULL;
1716 walk_data.block_local_data_size = 0;
1717 init_walk_dominator_tree (&walk_data);
1718
1719 /* Walk the CFG in predominator order looking for strength reduction
1720 candidates. */
1721 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1722
1723 if (dump_file && (dump_flags & TDF_DETAILS))
1724 {
1725 dump_cand_vec ();
1726 dump_cand_chains ();
1727 }
1728
1729 /* Analyze costs and make appropriate replacements. */
1730 analyze_candidates_and_replace ();
1731
1732 /* Free resources. */
1733 fini_walk_dominator_tree (&walk_data);
1734 loop_optimizer_finalize ();
1735 htab_delete (base_cand_map);
1736 obstack_free (&chain_obstack, NULL);
1737 pointer_map_destroy (stmt_cand_map);
1738 VEC_free (slsr_cand_t, heap, cand_vec);
1739 obstack_free (&cand_obstack, NULL);
1740
1741 return 0;
1742 }
1743
1744 static bool
1745 gate_strength_reduction (void)
1746 {
1747 return flag_tree_slsr;
1748 }
1749
1750 struct gimple_opt_pass pass_strength_reduction =
1751 {
1752 {
1753 GIMPLE_PASS,
1754 "slsr", /* name */
1755 gate_strength_reduction, /* gate */
1756 execute_strength_reduction, /* execute */
1757 NULL, /* sub */
1758 NULL, /* next */
1759 0, /* static_pass_number */
1760 TV_GIMPLE_SLSR, /* tv_id */
1761 PROP_cfg | PROP_ssa, /* properties_required */
1762 0, /* properties_provided */
1763 0, /* properties_destroyed */
1764 0, /* todo_flags_start */
1765 TODO_verify_ssa /* todo_flags_finish */
1766 }
1767 };