usage.adb: Change "pragma inline" to "pragma Inline" in information and error messages
[gcc.git] / gcc / loop-iv.c
1 /* Rtl-level induction variable analysis.
2 Copyright (C) 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 /* This is just a very simplistic analysis of induction variables of the loop.
22 The major use is for determining the number of iterations of a loop for
23 loop unrolling, doloop optimization and branch prediction. For this we
24 are only interested in bivs and a fairly limited set of givs that are
25 needed in the exit condition. We also only compute the iv information on
26 demand.
27
28 The interesting registers are determined. A register is interesting if
29
30 -- it is set only in the blocks that dominate the latch of the current loop
31 -- all its sets are simple -- i.e. in the form we understand
32
33 We also number the insns sequentially in each basic block. For a use of the
34 interesting reg, it is now easy to find a reaching definition (there may be
35 only one).
36
37 Induction variable is then simply analyzed by walking the use-def
38 chains.
39
40 Usage:
41
42 iv_analysis_loop_init (loop);
43 insn = iv_get_reaching_def (where, reg);
44 if (iv_analyze (insn, reg, &iv))
45 {
46 ...
47 }
48 iv_analysis_done (); */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "hard-reg-set.h"
56 #include "basic-block.h"
57 #include "cfgloop.h"
58 #include "expr.h"
59 #include "output.h"
60
61 /* The insn information. */
62
63 struct insn_info
64 {
65 /* Id of the insn. */
66 unsigned luid;
67
68 /* The previous definition of the register defined by the single
69 set in the insn. */
70 rtx prev_def;
71
72 /* The description of the iv. */
73 struct rtx_iv iv;
74 };
75
76 static struct insn_info *insn_info;
77
78 /* The last definition of register. */
79
80 static rtx *last_def;
81
82 /* The bivs. */
83
84 static struct rtx_iv *bivs;
85
86 /* Maximal insn number for that there is place in insn_info array. */
87
88 static unsigned max_insn_no;
89
90 /* Maximal register number for that there is place in bivs and last_def
91 arrays. */
92
93 static unsigned max_reg_no;
94
95 /* Dumps information about IV to FILE. */
96
97 extern void dump_iv_info (FILE *, struct rtx_iv *);
98 void
99 dump_iv_info (FILE *file, struct rtx_iv *iv)
100 {
101 if (!iv->base)
102 {
103 fprintf (file, "not simple");
104 return;
105 }
106
107 if (iv->step == const0_rtx
108 && !iv->first_special)
109 fprintf (file, "invariant ");
110
111 print_rtl (file, iv->base);
112 if (iv->step != const0_rtx)
113 {
114 fprintf (file, " + ");
115 print_rtl (file, iv->step);
116 fprintf (file, " * iteration");
117 }
118 fprintf (file, " (in %s)", GET_MODE_NAME (iv->mode));
119
120 if (iv->mode != iv->extend_mode)
121 fprintf (file, " %s to %s",
122 rtx_name[iv->extend],
123 GET_MODE_NAME (iv->extend_mode));
124
125 if (iv->mult != const1_rtx)
126 {
127 fprintf (file, " * ");
128 print_rtl (file, iv->mult);
129 }
130 if (iv->delta != const0_rtx)
131 {
132 fprintf (file, " + ");
133 print_rtl (file, iv->delta);
134 }
135 if (iv->first_special)
136 fprintf (file, " (first special)");
137 }
138
139 /* Assigns luids to insns in basic block BB. */
140
141 static void
142 assign_luids (basic_block bb)
143 {
144 unsigned i = 0, uid;
145 rtx insn;
146
147 FOR_BB_INSNS (bb, insn)
148 {
149 uid = INSN_UID (insn);
150 insn_info[uid].luid = i++;
151 insn_info[uid].prev_def = NULL_RTX;
152 insn_info[uid].iv.analysed = false;
153 }
154 }
155
156 /* Generates a subreg to get the least significant part of EXPR (in mode
157 INNER_MODE) to OUTER_MODE. */
158
159 static rtx
160 lowpart_subreg (enum machine_mode outer_mode, rtx expr,
161 enum machine_mode inner_mode)
162 {
163 return simplify_gen_subreg (outer_mode, expr, inner_mode,
164 subreg_lowpart_offset (outer_mode, inner_mode));
165 }
166
167 /* Checks whether REG is a well-behaved register. */
168
169 static bool
170 simple_reg_p (rtx reg)
171 {
172 unsigned r;
173
174 if (GET_CODE (reg) == SUBREG)
175 {
176 if (!subreg_lowpart_p (reg))
177 return false;
178 reg = SUBREG_REG (reg);
179 }
180
181 if (!REG_P (reg))
182 return false;
183
184 r = REGNO (reg);
185 if (HARD_REGISTER_NUM_P (r))
186 return false;
187
188 if (GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
189 return false;
190
191 if (last_def[r] == const0_rtx)
192 return false;
193
194 return true;
195 }
196
197 /* Checks whether assignment LHS = RHS is simple enough for us to process. */
198
199 static bool
200 simple_set_p (rtx lhs, rtx rhs)
201 {
202 rtx op0, op1;
203
204 if (!REG_P (lhs)
205 || !simple_reg_p (lhs))
206 return false;
207
208 if (CONSTANT_P (rhs))
209 return true;
210
211 switch (GET_CODE (rhs))
212 {
213 case SUBREG:
214 case REG:
215 return simple_reg_p (rhs);
216
217 case SIGN_EXTEND:
218 case ZERO_EXTEND:
219 case NEG:
220 return simple_reg_p (XEXP (rhs, 0));
221
222 case PLUS:
223 case MINUS:
224 case MULT:
225 case ASHIFT:
226 op0 = XEXP (rhs, 0);
227 op1 = XEXP (rhs, 1);
228
229 if (!simple_reg_p (op0)
230 && !CONSTANT_P (op0))
231 return false;
232
233 if (!simple_reg_p (op1)
234 && !CONSTANT_P (op1))
235 return false;
236
237 if (GET_CODE (rhs) == MULT
238 && !CONSTANT_P (op0)
239 && !CONSTANT_P (op1))
240 return false;
241
242 if (GET_CODE (rhs) == ASHIFT
243 && CONSTANT_P (op0))
244 return false;
245
246 return true;
247
248 default:
249 return false;
250 }
251 }
252
253 /* Mark single SET in INSN. */
254
255 static rtx
256 mark_single_set (rtx insn, rtx set)
257 {
258 rtx def = SET_DEST (set), src;
259 unsigned regno, uid;
260
261 src = find_reg_equal_equiv_note (insn);
262 if (src)
263 src = XEXP (src, 0);
264 else
265 src = SET_SRC (set);
266
267 if (!simple_set_p (SET_DEST (set), src))
268 return NULL_RTX;
269
270 regno = REGNO (def);
271 uid = INSN_UID (insn);
272
273 bivs[regno].analysed = false;
274 insn_info[uid].prev_def = last_def[regno];
275 last_def[regno] = insn;
276
277 return def;
278 }
279
280 /* Invalidate register REG unless it is equal to EXCEPT. */
281
282 static void
283 kill_sets (rtx reg, rtx by ATTRIBUTE_UNUSED, void *except)
284 {
285 if (GET_CODE (reg) == SUBREG)
286 reg = SUBREG_REG (reg);
287 if (!REG_P (reg))
288 return;
289 if (reg == except)
290 return;
291
292 last_def[REGNO (reg)] = const0_rtx;
293 }
294
295 /* Marks sets in basic block BB. If DOM is true, BB dominates the loop
296 latch. */
297
298 static void
299 mark_sets (basic_block bb, bool dom)
300 {
301 rtx insn, set, def;
302
303 FOR_BB_INSNS (bb, insn)
304 {
305 if (!INSN_P (insn))
306 continue;
307
308 if (dom
309 && (set = single_set (insn)))
310 def = mark_single_set (insn, set);
311 else
312 def = NULL_RTX;
313
314 note_stores (PATTERN (insn), kill_sets, def);
315 }
316 }
317
318 /* Prepare the data for an induction variable analysis of a LOOP. */
319
320 void
321 iv_analysis_loop_init (struct loop *loop)
322 {
323 basic_block *body = get_loop_body_in_dom_order (loop);
324 unsigned b;
325
326 if ((unsigned) get_max_uid () >= max_insn_no)
327 {
328 /* Add some reserve for insns and registers produced in optimizations. */
329 max_insn_no = get_max_uid () + 100;
330 if (insn_info)
331 free (insn_info);
332 insn_info = xmalloc (max_insn_no * sizeof (struct insn_info));
333 }
334
335 if ((unsigned) max_reg_num () >= max_reg_no)
336 {
337 max_reg_no = max_reg_num () + 100;
338 if (last_def)
339 free (last_def);
340 last_def = xmalloc (max_reg_no * sizeof (rtx));
341 if (bivs)
342 free (bivs);
343 bivs = xmalloc (max_reg_no * sizeof (struct rtx_iv));
344 }
345
346 memset (last_def, 0, max_reg_num () * sizeof (rtx));
347
348 for (b = 0; b < loop->num_nodes; b++)
349 {
350 assign_luids (body[b]);
351 mark_sets (body[b], just_once_each_iteration_p (loop, body[b]));
352 }
353
354 free (body);
355 }
356
357 /* Gets definition of REG reaching the INSN. If REG is not simple, const0_rtx
358 is returned. If INSN is before the first def in the loop, NULL_RTX is
359 returned. */
360
361 rtx
362 iv_get_reaching_def (rtx insn, rtx reg)
363 {
364 unsigned regno, luid, auid;
365 rtx ainsn;
366 basic_block bb, abb;
367
368 if (GET_CODE (reg) == SUBREG)
369 {
370 if (!subreg_lowpart_p (reg))
371 return const0_rtx;
372 reg = SUBREG_REG (reg);
373 }
374 if (!REG_P (reg))
375 return NULL_RTX;
376
377 regno = REGNO (reg);
378 if (!last_def[regno]
379 || last_def[regno] == const0_rtx)
380 return last_def[regno];
381
382 bb = BLOCK_FOR_INSN (insn);
383 luid = insn_info[INSN_UID (insn)].luid;
384
385 ainsn = last_def[regno];
386 while (1)
387 {
388 abb = BLOCK_FOR_INSN (ainsn);
389
390 if (dominated_by_p (CDI_DOMINATORS, bb, abb))
391 break;
392
393 auid = INSN_UID (ainsn);
394 ainsn = insn_info[auid].prev_def;
395
396 if (!ainsn)
397 return NULL_RTX;
398 }
399
400 while (1)
401 {
402 abb = BLOCK_FOR_INSN (ainsn);
403 if (abb != bb)
404 return ainsn;
405
406 auid = INSN_UID (ainsn);
407 if (luid > insn_info[auid].luid)
408 return ainsn;
409
410 ainsn = insn_info[auid].prev_def;
411 if (!ainsn)
412 return NULL_RTX;
413 }
414 }
415
416 /* Sets IV to invariant CST in MODE. Always returns true (just for
417 consistency with other iv manipulation functions that may fail). */
418
419 static bool
420 iv_constant (struct rtx_iv *iv, rtx cst, enum machine_mode mode)
421 {
422 if (mode == VOIDmode)
423 mode = GET_MODE (cst);
424
425 iv->analysed = true;
426 iv->mode = mode;
427 iv->base = cst;
428 iv->step = const0_rtx;
429 iv->first_special = false;
430 iv->extend = UNKNOWN;
431 iv->extend_mode = iv->mode;
432 iv->delta = const0_rtx;
433 iv->mult = const1_rtx;
434
435 return true;
436 }
437
438 /* Evaluates application of subreg to MODE on IV. */
439
440 static bool
441 iv_subreg (struct rtx_iv *iv, enum machine_mode mode)
442 {
443 /* If iv is invariant, just calculate the new value. */
444 if (iv->step == const0_rtx
445 && !iv->first_special)
446 {
447 rtx val = get_iv_value (iv, const0_rtx);
448 val = lowpart_subreg (mode, val, iv->extend_mode);
449
450 iv->base = val;
451 iv->extend = UNKNOWN;
452 iv->mode = iv->extend_mode = mode;
453 iv->delta = const0_rtx;
454 iv->mult = const1_rtx;
455 return true;
456 }
457
458 if (iv->extend_mode == mode)
459 return true;
460
461 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (iv->mode))
462 return false;
463
464 iv->extend = UNKNOWN;
465 iv->mode = mode;
466
467 iv->base = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
468 simplify_gen_binary (MULT, iv->extend_mode,
469 iv->base, iv->mult));
470 iv->step = simplify_gen_binary (MULT, iv->extend_mode, iv->step, iv->mult);
471 iv->mult = const1_rtx;
472 iv->delta = const0_rtx;
473 iv->first_special = false;
474
475 return true;
476 }
477
478 /* Evaluates application of EXTEND to MODE on IV. */
479
480 static bool
481 iv_extend (struct rtx_iv *iv, enum rtx_code extend, enum machine_mode mode)
482 {
483 /* If iv is invariant, just calculate the new value. */
484 if (iv->step == const0_rtx
485 && !iv->first_special)
486 {
487 rtx val = get_iv_value (iv, const0_rtx);
488 val = simplify_gen_unary (extend, mode, val, iv->extend_mode);
489
490 iv->base = val;
491 iv->extend = UNKNOWN;
492 iv->mode = iv->extend_mode = mode;
493 iv->delta = const0_rtx;
494 iv->mult = const1_rtx;
495 return true;
496 }
497
498 if (mode != iv->extend_mode)
499 return false;
500
501 if (iv->extend != UNKNOWN
502 && iv->extend != extend)
503 return false;
504
505 iv->extend = extend;
506
507 return true;
508 }
509
510 /* Evaluates negation of IV. */
511
512 static bool
513 iv_neg (struct rtx_iv *iv)
514 {
515 if (iv->extend == UNKNOWN)
516 {
517 iv->base = simplify_gen_unary (NEG, iv->extend_mode,
518 iv->base, iv->extend_mode);
519 iv->step = simplify_gen_unary (NEG, iv->extend_mode,
520 iv->step, iv->extend_mode);
521 }
522 else
523 {
524 iv->delta = simplify_gen_unary (NEG, iv->extend_mode,
525 iv->delta, iv->extend_mode);
526 iv->mult = simplify_gen_unary (NEG, iv->extend_mode,
527 iv->mult, iv->extend_mode);
528 }
529
530 return true;
531 }
532
533 /* Evaluates addition or subtraction (according to OP) of IV1 to IV0. */
534
535 static bool
536 iv_add (struct rtx_iv *iv0, struct rtx_iv *iv1, enum rtx_code op)
537 {
538 enum machine_mode mode;
539 rtx arg;
540
541 /* Extend the constant to extend_mode of the other operand if necessary. */
542 if (iv0->extend == UNKNOWN
543 && iv0->mode == iv0->extend_mode
544 && iv0->step == const0_rtx
545 && GET_MODE_SIZE (iv0->extend_mode) < GET_MODE_SIZE (iv1->extend_mode))
546 {
547 iv0->extend_mode = iv1->extend_mode;
548 iv0->base = simplify_gen_unary (ZERO_EXTEND, iv0->extend_mode,
549 iv0->base, iv0->mode);
550 }
551 if (iv1->extend == UNKNOWN
552 && iv1->mode == iv1->extend_mode
553 && iv1->step == const0_rtx
554 && GET_MODE_SIZE (iv1->extend_mode) < GET_MODE_SIZE (iv0->extend_mode))
555 {
556 iv1->extend_mode = iv0->extend_mode;
557 iv1->base = simplify_gen_unary (ZERO_EXTEND, iv1->extend_mode,
558 iv1->base, iv1->mode);
559 }
560
561 mode = iv0->extend_mode;
562 if (mode != iv1->extend_mode)
563 return false;
564
565 if (iv0->extend == UNKNOWN && iv1->extend == UNKNOWN)
566 {
567 if (iv0->mode != iv1->mode)
568 return false;
569
570 iv0->base = simplify_gen_binary (op, mode, iv0->base, iv1->base);
571 iv0->step = simplify_gen_binary (op, mode, iv0->step, iv1->step);
572
573 return true;
574 }
575
576 /* Handle addition of constant. */
577 if (iv1->extend == UNKNOWN
578 && iv1->mode == mode
579 && iv1->step == const0_rtx)
580 {
581 iv0->delta = simplify_gen_binary (op, mode, iv0->delta, iv1->base);
582 return true;
583 }
584
585 if (iv0->extend == UNKNOWN
586 && iv0->mode == mode
587 && iv0->step == const0_rtx)
588 {
589 arg = iv0->base;
590 *iv0 = *iv1;
591 if (op == MINUS
592 && !iv_neg (iv0))
593 return false;
594
595 iv0->delta = simplify_gen_binary (PLUS, mode, iv0->delta, arg);
596 return true;
597 }
598
599 return false;
600 }
601
602 /* Evaluates multiplication of IV by constant CST. */
603
604 static bool
605 iv_mult (struct rtx_iv *iv, rtx mby)
606 {
607 enum machine_mode mode = iv->extend_mode;
608
609 if (GET_MODE (mby) != VOIDmode
610 && GET_MODE (mby) != mode)
611 return false;
612
613 if (iv->extend == UNKNOWN)
614 {
615 iv->base = simplify_gen_binary (MULT, mode, iv->base, mby);
616 iv->step = simplify_gen_binary (MULT, mode, iv->step, mby);
617 }
618 else
619 {
620 iv->delta = simplify_gen_binary (MULT, mode, iv->delta, mby);
621 iv->mult = simplify_gen_binary (MULT, mode, iv->mult, mby);
622 }
623
624 return true;
625 }
626
627 /* Evaluates shift of IV by constant CST. */
628
629 static bool
630 iv_shift (struct rtx_iv *iv, rtx mby)
631 {
632 enum machine_mode mode = iv->extend_mode;
633
634 if (GET_MODE (mby) != VOIDmode
635 && GET_MODE (mby) != mode)
636 return false;
637
638 if (iv->extend == UNKNOWN)
639 {
640 iv->base = simplify_gen_binary (ASHIFT, mode, iv->base, mby);
641 iv->step = simplify_gen_binary (ASHIFT, mode, iv->step, mby);
642 }
643 else
644 {
645 iv->delta = simplify_gen_binary (ASHIFT, mode, iv->delta, mby);
646 iv->mult = simplify_gen_binary (ASHIFT, mode, iv->mult, mby);
647 }
648
649 return true;
650 }
651
652 /* The recursive part of get_biv_step. Gets the value of the single value
653 defined in INSN wrto initial value of REG inside loop, in shape described
654 at get_biv_step. */
655
656 static bool
657 get_biv_step_1 (rtx insn, rtx reg,
658 rtx *inner_step, enum machine_mode *inner_mode,
659 enum rtx_code *extend, enum machine_mode outer_mode,
660 rtx *outer_step)
661 {
662 rtx set, lhs, rhs, op0 = NULL_RTX, op1 = NULL_RTX;
663 rtx next, nextr, def_insn, tmp;
664 enum rtx_code code;
665
666 set = single_set (insn);
667 rhs = find_reg_equal_equiv_note (insn);
668 if (rhs)
669 rhs = XEXP (rhs, 0);
670 else
671 rhs = SET_SRC (set);
672 lhs = SET_DEST (set);
673
674 code = GET_CODE (rhs);
675 switch (code)
676 {
677 case SUBREG:
678 case REG:
679 next = rhs;
680 break;
681
682 case PLUS:
683 case MINUS:
684 op0 = XEXP (rhs, 0);
685 op1 = XEXP (rhs, 1);
686
687 if (code == PLUS && CONSTANT_P (op0))
688 {
689 tmp = op0; op0 = op1; op1 = tmp;
690 }
691
692 if (!simple_reg_p (op0)
693 || !CONSTANT_P (op1))
694 return false;
695
696 if (GET_MODE (rhs) != outer_mode)
697 {
698 /* ppc64 uses expressions like
699
700 (set x:SI (plus:SI (subreg:SI y:DI) 1)).
701
702 this is equivalent to
703
704 (set x':DI (plus:DI y:DI 1))
705 (set x:SI (subreg:SI (x':DI)). */
706 if (GET_CODE (op0) != SUBREG)
707 return false;
708 if (GET_MODE (SUBREG_REG (op0)) != outer_mode)
709 return false;
710 }
711
712 next = op0;
713 break;
714
715 case SIGN_EXTEND:
716 case ZERO_EXTEND:
717 if (GET_MODE (rhs) != outer_mode)
718 return false;
719
720 op0 = XEXP (rhs, 0);
721 if (!simple_reg_p (op0))
722 return false;
723
724 next = op0;
725 break;
726
727 default:
728 return false;
729 }
730
731 if (GET_CODE (next) == SUBREG)
732 {
733 if (!subreg_lowpart_p (next))
734 return false;
735
736 nextr = SUBREG_REG (next);
737 if (GET_MODE (nextr) != outer_mode)
738 return false;
739 }
740 else
741 nextr = next;
742
743 def_insn = iv_get_reaching_def (insn, nextr);
744 if (def_insn == const0_rtx)
745 return false;
746
747 if (!def_insn)
748 {
749 if (!rtx_equal_p (nextr, reg))
750 return false;
751
752 *inner_step = const0_rtx;
753 *extend = UNKNOWN;
754 *inner_mode = outer_mode;
755 *outer_step = const0_rtx;
756 }
757 else if (!get_biv_step_1 (def_insn, reg,
758 inner_step, inner_mode, extend, outer_mode,
759 outer_step))
760 return false;
761
762 if (GET_CODE (next) == SUBREG)
763 {
764 enum machine_mode amode = GET_MODE (next);
765
766 if (GET_MODE_SIZE (amode) > GET_MODE_SIZE (*inner_mode))
767 return false;
768
769 *inner_mode = amode;
770 *inner_step = simplify_gen_binary (PLUS, outer_mode,
771 *inner_step, *outer_step);
772 *outer_step = const0_rtx;
773 *extend = UNKNOWN;
774 }
775
776 switch (code)
777 {
778 case REG:
779 case SUBREG:
780 break;
781
782 case PLUS:
783 case MINUS:
784 if (*inner_mode == outer_mode
785 /* See comment in previous switch. */
786 || GET_MODE (rhs) != outer_mode)
787 *inner_step = simplify_gen_binary (code, outer_mode,
788 *inner_step, op1);
789 else
790 *outer_step = simplify_gen_binary (code, outer_mode,
791 *outer_step, op1);
792 break;
793
794 case SIGN_EXTEND:
795 case ZERO_EXTEND:
796 if (GET_MODE (op0) != *inner_mode
797 || *extend != UNKNOWN
798 || *outer_step != const0_rtx)
799 abort ();
800
801 *extend = code;
802 break;
803
804 default:
805 abort ();
806 }
807
808 return true;
809 }
810
811 /* Gets the operation on register REG inside loop, in shape
812
813 OUTER_STEP + EXTEND_{OUTER_MODE} (SUBREG_{INNER_MODE} (REG + INNER_STEP))
814
815 If the operation cannot be described in this shape, return false. */
816
817 static bool
818 get_biv_step (rtx reg, rtx *inner_step, enum machine_mode *inner_mode,
819 enum rtx_code *extend, enum machine_mode *outer_mode,
820 rtx *outer_step)
821 {
822 *outer_mode = GET_MODE (reg);
823
824 if (!get_biv_step_1 (last_def[REGNO (reg)], reg,
825 inner_step, inner_mode, extend, *outer_mode,
826 outer_step))
827 return false;
828
829 if (*inner_mode != *outer_mode
830 && *extend == UNKNOWN)
831 abort ();
832
833 if (*inner_mode == *outer_mode
834 && *extend != UNKNOWN)
835 abort ();
836
837 if (*inner_mode == *outer_mode
838 && *outer_step != const0_rtx)
839 abort ();
840
841 return true;
842 }
843
844 /* Determines whether DEF is a biv and if so, stores its description
845 to *IV. */
846
847 static bool
848 iv_analyze_biv (rtx def, struct rtx_iv *iv)
849 {
850 unsigned regno;
851 rtx inner_step, outer_step;
852 enum machine_mode inner_mode, outer_mode;
853 enum rtx_code extend;
854
855 if (dump_file)
856 {
857 fprintf (dump_file, "Analysing ");
858 print_rtl (dump_file, def);
859 fprintf (dump_file, " for bivness.\n");
860 }
861
862 if (!REG_P (def))
863 {
864 if (!CONSTANT_P (def))
865 return false;
866
867 return iv_constant (iv, def, VOIDmode);
868 }
869
870 regno = REGNO (def);
871 if (last_def[regno] == const0_rtx)
872 {
873 if (dump_file)
874 fprintf (dump_file, " not simple.\n");
875 return false;
876 }
877
878 if (last_def[regno] && bivs[regno].analysed)
879 {
880 if (dump_file)
881 fprintf (dump_file, " already analysed.\n");
882
883 *iv = bivs[regno];
884 return iv->base != NULL_RTX;
885 }
886
887 if (!last_def[regno])
888 {
889 iv_constant (iv, def, VOIDmode);
890 goto end;
891 }
892
893 iv->analysed = true;
894 if (!get_biv_step (def, &inner_step, &inner_mode, &extend,
895 &outer_mode, &outer_step))
896 {
897 iv->base = NULL_RTX;
898 goto end;
899 }
900
901 /* Loop transforms base to es (base + inner_step) + outer_step,
902 where es means extend of subreg between inner_mode and outer_mode.
903 The corresponding induction variable is
904
905 es ((base - outer_step) + i * (inner_step + outer_step)) + outer_step */
906
907 iv->base = simplify_gen_binary (MINUS, outer_mode, def, outer_step);
908 iv->step = simplify_gen_binary (PLUS, outer_mode, inner_step, outer_step);
909 iv->mode = inner_mode;
910 iv->extend_mode = outer_mode;
911 iv->extend = extend;
912 iv->mult = const1_rtx;
913 iv->delta = outer_step;
914 iv->first_special = inner_mode != outer_mode;
915
916 end:
917 if (dump_file)
918 {
919 fprintf (dump_file, " ");
920 dump_iv_info (dump_file, iv);
921 fprintf (dump_file, "\n");
922 }
923
924 bivs[regno] = *iv;
925
926 return iv->base != NULL_RTX;
927 }
928
929 /* Analyzes operand OP of INSN and stores the result to *IV. */
930
931 static bool
932 iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
933 {
934 rtx def_insn;
935 unsigned regno;
936 bool inv = CONSTANT_P (op);
937
938 if (dump_file)
939 {
940 fprintf (dump_file, "Analysing operand ");
941 print_rtl (dump_file, op);
942 fprintf (dump_file, " of insn ");
943 print_rtl_single (dump_file, insn);
944 }
945
946 if (GET_CODE (op) == SUBREG)
947 {
948 if (!subreg_lowpart_p (op))
949 return false;
950
951 if (!iv_analyze_op (insn, SUBREG_REG (op), iv))
952 return false;
953
954 return iv_subreg (iv, GET_MODE (op));
955 }
956
957 if (!inv)
958 {
959 regno = REGNO (op);
960 if (!last_def[regno])
961 inv = true;
962 else if (last_def[regno] == const0_rtx)
963 {
964 if (dump_file)
965 fprintf (dump_file, " not simple.\n");
966 return false;
967 }
968 }
969
970 if (inv)
971 {
972 iv_constant (iv, op, VOIDmode);
973
974 if (dump_file)
975 {
976 fprintf (dump_file, " ");
977 dump_iv_info (dump_file, iv);
978 fprintf (dump_file, "\n");
979 }
980 return true;
981 }
982
983 def_insn = iv_get_reaching_def (insn, op);
984 if (def_insn == const0_rtx)
985 {
986 if (dump_file)
987 fprintf (dump_file, " not simple.\n");
988 return false;
989 }
990
991 return iv_analyze (def_insn, op, iv);
992 }
993
994 /* Analyzes iv DEF defined in INSN and stores the result to *IV. */
995
996 bool
997 iv_analyze (rtx insn, rtx def, struct rtx_iv *iv)
998 {
999 unsigned uid;
1000 rtx set, rhs, mby = NULL_RTX, tmp;
1001 rtx op0 = NULL_RTX, op1 = NULL_RTX;
1002 struct rtx_iv iv0, iv1;
1003 enum machine_mode amode;
1004 enum rtx_code code;
1005
1006 if (insn == const0_rtx)
1007 return false;
1008
1009 if (GET_CODE (def) == SUBREG)
1010 {
1011 if (!subreg_lowpart_p (def))
1012 return false;
1013
1014 if (!iv_analyze (insn, SUBREG_REG (def), iv))
1015 return false;
1016
1017 return iv_subreg (iv, GET_MODE (def));
1018 }
1019
1020 if (!insn)
1021 return iv_analyze_biv (def, iv);
1022
1023 if (dump_file)
1024 {
1025 fprintf (dump_file, "Analysing def of ");
1026 print_rtl (dump_file, def);
1027 fprintf (dump_file, " in insn ");
1028 print_rtl_single (dump_file, insn);
1029 }
1030
1031 uid = INSN_UID (insn);
1032 if (insn_info[uid].iv.analysed)
1033 {
1034 if (dump_file)
1035 fprintf (dump_file, " already analysed.\n");
1036 *iv = insn_info[uid].iv;
1037 return iv->base != NULL_RTX;
1038 }
1039
1040 iv->mode = VOIDmode;
1041 iv->base = NULL_RTX;
1042 iv->step = NULL_RTX;
1043
1044 set = single_set (insn);
1045 rhs = find_reg_equal_equiv_note (insn);
1046 if (rhs)
1047 rhs = XEXP (rhs, 0);
1048 else
1049 rhs = SET_SRC (set);
1050 code = GET_CODE (rhs);
1051
1052 if (CONSTANT_P (rhs))
1053 {
1054 op0 = rhs;
1055 amode = GET_MODE (def);
1056 }
1057 else
1058 {
1059 switch (code)
1060 {
1061 case SUBREG:
1062 if (!subreg_lowpart_p (rhs))
1063 goto end;
1064 op0 = rhs;
1065 break;
1066
1067 case REG:
1068 op0 = rhs;
1069 break;
1070
1071 case SIGN_EXTEND:
1072 case ZERO_EXTEND:
1073 case NEG:
1074 op0 = XEXP (rhs, 0);
1075 break;
1076
1077 case PLUS:
1078 case MINUS:
1079 op0 = XEXP (rhs, 0);
1080 op1 = XEXP (rhs, 1);
1081 break;
1082
1083 case MULT:
1084 op0 = XEXP (rhs, 0);
1085 mby = XEXP (rhs, 1);
1086 if (!CONSTANT_P (mby))
1087 {
1088 if (!CONSTANT_P (op0))
1089 abort ();
1090 tmp = op0;
1091 op0 = mby;
1092 mby = tmp;
1093 }
1094 break;
1095
1096 case ASHIFT:
1097 if (CONSTANT_P (XEXP (rhs, 0)))
1098 abort ();
1099 op0 = XEXP (rhs, 0);
1100 mby = XEXP (rhs, 1);
1101 break;
1102
1103 default:
1104 abort ();
1105 }
1106
1107 amode = GET_MODE (rhs);
1108 }
1109
1110 if (op0)
1111 {
1112 if (!iv_analyze_op (insn, op0, &iv0))
1113 goto end;
1114
1115 if (iv0.mode == VOIDmode)
1116 {
1117 iv0.mode = amode;
1118 iv0.extend_mode = amode;
1119 }
1120 }
1121
1122 if (op1)
1123 {
1124 if (!iv_analyze_op (insn, op1, &iv1))
1125 goto end;
1126
1127 if (iv1.mode == VOIDmode)
1128 {
1129 iv1.mode = amode;
1130 iv1.extend_mode = amode;
1131 }
1132 }
1133
1134 switch (code)
1135 {
1136 case SIGN_EXTEND:
1137 case ZERO_EXTEND:
1138 if (!iv_extend (&iv0, code, amode))
1139 goto end;
1140 break;
1141
1142 case NEG:
1143 if (!iv_neg (&iv0))
1144 goto end;
1145 break;
1146
1147 case PLUS:
1148 case MINUS:
1149 if (!iv_add (&iv0, &iv1, code))
1150 goto end;
1151 break;
1152
1153 case MULT:
1154 if (!iv_mult (&iv0, mby))
1155 goto end;
1156 break;
1157
1158 case ASHIFT:
1159 if (!iv_shift (&iv0, mby))
1160 goto end;
1161 break;
1162
1163 default:
1164 break;
1165 }
1166
1167 *iv = iv0;
1168
1169 end:
1170 iv->analysed = true;
1171 insn_info[uid].iv = *iv;
1172
1173 if (dump_file)
1174 {
1175 print_rtl (dump_file, def);
1176 fprintf (dump_file, " in insn ");
1177 print_rtl_single (dump_file, insn);
1178 fprintf (dump_file, " is ");
1179 dump_iv_info (dump_file, iv);
1180 fprintf (dump_file, "\n");
1181 }
1182
1183 return iv->base != NULL_RTX;
1184 }
1185
1186 /* Checks whether definition of register REG in INSN a basic induction
1187 variable. IV analysis must have been initialized (via a call to
1188 iv_analysis_loop_init) for this function to produce a result. */
1189
1190 bool
1191 biv_p (rtx insn, rtx reg)
1192 {
1193 struct rtx_iv iv;
1194
1195 if (!REG_P (reg))
1196 return false;
1197
1198 if (last_def[REGNO (reg)] != insn)
1199 return false;
1200
1201 return iv_analyze_biv (reg, &iv);
1202 }
1203
1204 /* Calculates value of IV at ITERATION-th iteration. */
1205
1206 rtx
1207 get_iv_value (struct rtx_iv *iv, rtx iteration)
1208 {
1209 rtx val;
1210
1211 /* We would need to generate some if_then_else patterns, and so far
1212 it is not needed anywhere. */
1213 if (iv->first_special)
1214 abort ();
1215
1216 if (iv->step != const0_rtx && iteration != const0_rtx)
1217 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
1218 simplify_gen_binary (MULT, iv->extend_mode,
1219 iv->step, iteration));
1220 else
1221 val = iv->base;
1222
1223 if (iv->extend_mode == iv->mode)
1224 return val;
1225
1226 val = lowpart_subreg (iv->mode, val, iv->extend_mode);
1227
1228 if (iv->extend == UNKNOWN)
1229 return val;
1230
1231 val = simplify_gen_unary (iv->extend, iv->extend_mode, val, iv->mode);
1232 val = simplify_gen_binary (PLUS, iv->extend_mode, iv->delta,
1233 simplify_gen_binary (MULT, iv->extend_mode,
1234 iv->mult, val));
1235
1236 return val;
1237 }
1238
1239 /* Free the data for an induction variable analysis. */
1240
1241 void
1242 iv_analysis_done (void)
1243 {
1244 max_insn_no = 0;
1245 max_reg_no = 0;
1246 if (insn_info)
1247 {
1248 free (insn_info);
1249 insn_info = NULL;
1250 }
1251 if (last_def)
1252 {
1253 free (last_def);
1254 last_def = NULL;
1255 }
1256 if (bivs)
1257 {
1258 free (bivs);
1259 bivs = NULL;
1260 }
1261 }
1262
1263 /* Computes inverse to X modulo (1 << MOD). */
1264
1265 static unsigned HOST_WIDEST_INT
1266 inverse (unsigned HOST_WIDEST_INT x, int mod)
1267 {
1268 unsigned HOST_WIDEST_INT mask =
1269 ((unsigned HOST_WIDEST_INT) 1 << (mod - 1) << 1) - 1;
1270 unsigned HOST_WIDEST_INT rslt = 1;
1271 int i;
1272
1273 for (i = 0; i < mod - 1; i++)
1274 {
1275 rslt = (rslt * x) & mask;
1276 x = (x * x) & mask;
1277 }
1278
1279 return rslt;
1280 }
1281
1282 /* Tries to estimate the maximum number of iterations. */
1283
1284 static unsigned HOST_WIDEST_INT
1285 determine_max_iter (struct niter_desc *desc)
1286 {
1287 rtx niter = desc->niter_expr;
1288 rtx mmin, mmax, left, right;
1289 unsigned HOST_WIDEST_INT nmax, inc;
1290
1291 if (GET_CODE (niter) == AND
1292 && GET_CODE (XEXP (niter, 0)) == CONST_INT)
1293 {
1294 nmax = INTVAL (XEXP (niter, 0));
1295 if (!(nmax & (nmax + 1)))
1296 {
1297 desc->niter_max = nmax;
1298 return nmax;
1299 }
1300 }
1301
1302 get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
1303 nmax = INTVAL (mmax) - INTVAL (mmin);
1304
1305 if (GET_CODE (niter) == UDIV)
1306 {
1307 if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
1308 {
1309 desc->niter_max = nmax;
1310 return nmax;
1311 }
1312 inc = INTVAL (XEXP (niter, 1));
1313 niter = XEXP (niter, 0);
1314 }
1315 else
1316 inc = 1;
1317
1318 if (GET_CODE (niter) == PLUS)
1319 {
1320 left = XEXP (niter, 0);
1321 right = XEXP (niter, 0);
1322
1323 if (GET_CODE (right) == CONST_INT)
1324 right = GEN_INT (-INTVAL (right));
1325 }
1326 else if (GET_CODE (niter) == MINUS)
1327 {
1328 left = XEXP (niter, 0);
1329 right = XEXP (niter, 0);
1330 }
1331 else
1332 {
1333 left = niter;
1334 right = mmin;
1335 }
1336
1337 if (GET_CODE (left) == CONST_INT)
1338 mmax = left;
1339 if (GET_CODE (right) == CONST_INT)
1340 mmin = right;
1341 nmax = INTVAL (mmax) - INTVAL (mmin);
1342
1343 desc->niter_max = nmax / inc;
1344 return nmax / inc;
1345 }
1346
1347 /* Checks whether register *REG is in set ALT. Callback for for_each_rtx. */
1348
1349 static int
1350 altered_reg_used (rtx *reg, void *alt)
1351 {
1352 if (!REG_P (*reg))
1353 return 0;
1354
1355 return REGNO_REG_SET_P (alt, REGNO (*reg));
1356 }
1357
1358 /* Marks registers altered by EXPR in set ALT. */
1359
1360 static void
1361 mark_altered (rtx expr, rtx by ATTRIBUTE_UNUSED, void *alt)
1362 {
1363 if (GET_CODE (expr) == SUBREG)
1364 expr = SUBREG_REG (expr);
1365 if (!REG_P (expr))
1366 return;
1367
1368 SET_REGNO_REG_SET (alt, REGNO (expr));
1369 }
1370
1371 /* Checks whether RHS is simple enough to process. */
1372
1373 static bool
1374 simple_rhs_p (rtx rhs)
1375 {
1376 rtx op0, op1;
1377
1378 if (CONSTANT_P (rhs)
1379 || REG_P (rhs))
1380 return true;
1381
1382 switch (GET_CODE (rhs))
1383 {
1384 case PLUS:
1385 case MINUS:
1386 op0 = XEXP (rhs, 0);
1387 op1 = XEXP (rhs, 1);
1388 /* Allow reg + const sets only. */
1389 if (REG_P (op0) && CONSTANT_P (op1))
1390 return true;
1391 if (REG_P (op1) && CONSTANT_P (op0))
1392 return true;
1393
1394 return false;
1395
1396 default:
1397 return false;
1398 }
1399 }
1400
1401 /* Simplifies *EXPR using assignment in INSN. ALTERED is the set of registers
1402 altered so far. */
1403
1404 static void
1405 simplify_using_assignment (rtx insn, rtx *expr, regset altered)
1406 {
1407 rtx set = single_set (insn);
1408 rtx lhs = NULL_RTX, rhs;
1409 bool ret = false;
1410
1411 if (set)
1412 {
1413 lhs = SET_DEST (set);
1414 if (!REG_P (lhs)
1415 || altered_reg_used (&lhs, altered))
1416 ret = true;
1417 }
1418 else
1419 ret = true;
1420
1421 note_stores (PATTERN (insn), mark_altered, altered);
1422 if (CALL_P (insn))
1423 {
1424 int i;
1425
1426 /* Kill all call clobbered registers. */
1427 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1428 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1429 SET_REGNO_REG_SET (altered, i);
1430 }
1431
1432 if (ret)
1433 return;
1434
1435 rhs = find_reg_equal_equiv_note (insn);
1436 if (rhs)
1437 rhs = XEXP (rhs, 0);
1438 else
1439 rhs = SET_SRC (set);
1440
1441 if (!simple_rhs_p (rhs))
1442 return;
1443
1444 if (for_each_rtx (&rhs, altered_reg_used, altered))
1445 return;
1446
1447 *expr = simplify_replace_rtx (*expr, lhs, rhs);
1448 }
1449
1450 /* Checks whether A implies B. */
1451
1452 static bool
1453 implies_p (rtx a, rtx b)
1454 {
1455 rtx op0, op1, opb0, opb1, r;
1456 enum machine_mode mode;
1457
1458 if (GET_CODE (a) == EQ)
1459 {
1460 op0 = XEXP (a, 0);
1461 op1 = XEXP (a, 1);
1462
1463 if (REG_P (op0))
1464 {
1465 r = simplify_replace_rtx (b, op0, op1);
1466 if (r == const_true_rtx)
1467 return true;
1468 }
1469
1470 if (REG_P (op1))
1471 {
1472 r = simplify_replace_rtx (b, op1, op0);
1473 if (r == const_true_rtx)
1474 return true;
1475 }
1476 }
1477
1478 /* A < B implies A + 1 <= B. */
1479 if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
1480 && (GET_CODE (b) == GE || GET_CODE (b) == LE))
1481 {
1482 op0 = XEXP (a, 0);
1483 op1 = XEXP (a, 1);
1484 opb0 = XEXP (b, 0);
1485 opb1 = XEXP (b, 1);
1486
1487 if (GET_CODE (a) == GT)
1488 {
1489 r = op0;
1490 op0 = op1;
1491 op1 = r;
1492 }
1493
1494 if (GET_CODE (b) == GE)
1495 {
1496 r = opb0;
1497 opb0 = opb1;
1498 opb1 = r;
1499 }
1500
1501 mode = GET_MODE (op0);
1502 if (mode != GET_MODE (opb0))
1503 mode = VOIDmode;
1504 else if (mode == VOIDmode)
1505 {
1506 mode = GET_MODE (op1);
1507 if (mode != GET_MODE (opb1))
1508 mode = VOIDmode;
1509 }
1510
1511 if (mode != VOIDmode
1512 && rtx_equal_p (op1, opb1)
1513 && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
1514 return true;
1515 }
1516
1517 return false;
1518 }
1519
1520 /* Canonicalizes COND so that
1521
1522 (1) Ensure that operands are ordered according to
1523 swap_commutative_operands_p.
1524 (2) (LE x const) will be replaced with (LT x <const+1>) and similarly
1525 for GE, GEU, and LEU. */
1526
1527 rtx
1528 canon_condition (rtx cond)
1529 {
1530 rtx tem;
1531 rtx op0, op1;
1532 enum rtx_code code;
1533 enum machine_mode mode;
1534
1535 code = GET_CODE (cond);
1536 op0 = XEXP (cond, 0);
1537 op1 = XEXP (cond, 1);
1538
1539 if (swap_commutative_operands_p (op0, op1))
1540 {
1541 code = swap_condition (code);
1542 tem = op0;
1543 op0 = op1;
1544 op1 = tem;
1545 }
1546
1547 mode = GET_MODE (op0);
1548 if (mode == VOIDmode)
1549 mode = GET_MODE (op1);
1550 if (mode == VOIDmode)
1551 abort ();
1552
1553 if (GET_CODE (op1) == CONST_INT
1554 && GET_MODE_CLASS (mode) != MODE_CC
1555 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1556 {
1557 HOST_WIDE_INT const_val = INTVAL (op1);
1558 unsigned HOST_WIDE_INT uconst_val = const_val;
1559 unsigned HOST_WIDE_INT max_val
1560 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode);
1561
1562 switch (code)
1563 {
1564 case LE:
1565 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
1566 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
1567 break;
1568
1569 /* When cross-compiling, const_val might be sign-extended from
1570 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
1571 case GE:
1572 if ((HOST_WIDE_INT) (const_val & max_val)
1573 != (((HOST_WIDE_INT) 1
1574 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
1575 code = GT, op1 = gen_int_mode (const_val - 1, mode);
1576 break;
1577
1578 case LEU:
1579 if (uconst_val < max_val)
1580 code = LTU, op1 = gen_int_mode (uconst_val + 1, mode);
1581 break;
1582
1583 case GEU:
1584 if (uconst_val != 0)
1585 code = GTU, op1 = gen_int_mode (uconst_val - 1, mode);
1586 break;
1587
1588 default:
1589 break;
1590 }
1591 }
1592
1593 if (op0 != XEXP (cond, 0)
1594 || op1 != XEXP (cond, 1)
1595 || code != GET_CODE (cond)
1596 || GET_MODE (cond) != SImode)
1597 cond = gen_rtx_fmt_ee (code, SImode, op0, op1);
1598
1599 return cond;
1600 }
1601
1602 /* Tries to use the fact that COND holds to simplify EXPR. ALTERED is the
1603 set of altered regs. */
1604
1605 void
1606 simplify_using_condition (rtx cond, rtx *expr, regset altered)
1607 {
1608 rtx rev, reve, exp = *expr;
1609
1610 if (!COMPARISON_P (exp))
1611 return;
1612
1613 /* If some register gets altered later, we do not really speak about its
1614 value at the time of comparison. */
1615 if (altered
1616 && for_each_rtx (&cond, altered_reg_used, altered))
1617 return;
1618
1619 rev = reversed_condition (cond);
1620 reve = reversed_condition (exp);
1621
1622 cond = canon_condition (cond);
1623 exp = canon_condition (exp);
1624 if (rev)
1625 rev = canon_condition (rev);
1626 if (reve)
1627 reve = canon_condition (reve);
1628
1629 if (rtx_equal_p (exp, cond))
1630 {
1631 *expr = const_true_rtx;
1632 return;
1633 }
1634
1635
1636 if (rev && rtx_equal_p (exp, rev))
1637 {
1638 *expr = const0_rtx;
1639 return;
1640 }
1641
1642 if (implies_p (cond, exp))
1643 {
1644 *expr = const_true_rtx;
1645 return;
1646 }
1647
1648 if (reve && implies_p (cond, reve))
1649 {
1650 *expr = const0_rtx;
1651 return;
1652 }
1653
1654 /* A proof by contradiction. If *EXPR implies (not cond), *EXPR must
1655 be false. */
1656 if (rev && implies_p (exp, rev))
1657 {
1658 *expr = const0_rtx;
1659 return;
1660 }
1661
1662 /* Similarly, If (not *EXPR) implies (not cond), *EXPR must be true. */
1663 if (rev && reve && implies_p (reve, rev))
1664 {
1665 *expr = const_true_rtx;
1666 return;
1667 }
1668
1669 /* We would like to have some other tests here. TODO. */
1670
1671 return;
1672 }
1673
1674 /* Use relationship between A and *B to eventually eliminate *B.
1675 OP is the operation we consider. */
1676
1677 static void
1678 eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
1679 {
1680 if (op == AND)
1681 {
1682 /* If A implies *B, we may replace *B by true. */
1683 if (implies_p (a, *b))
1684 *b = const_true_rtx;
1685 }
1686 else if (op == IOR)
1687 {
1688 /* If *B implies A, we may replace *B by false. */
1689 if (implies_p (*b, a))
1690 *b = const0_rtx;
1691 }
1692 else
1693 abort ();
1694 }
1695
1696 /* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
1697 operation we consider. */
1698
1699 static void
1700 eliminate_implied_conditions (enum rtx_code op, rtx *head, rtx tail)
1701 {
1702 rtx elt;
1703
1704 for (elt = tail; elt; elt = XEXP (elt, 1))
1705 eliminate_implied_condition (op, *head, &XEXP (elt, 0));
1706 for (elt = tail; elt; elt = XEXP (elt, 1))
1707 eliminate_implied_condition (op, XEXP (elt, 0), head);
1708 }
1709
1710 /* Simplifies *EXPR using initial values at the start of the LOOP. If *EXPR
1711 is a list, its elements are assumed to be combined using OP. */
1712
1713 static void
1714 simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
1715 {
1716 rtx head, tail, insn;
1717 rtx neutral, aggr;
1718 regset altered;
1719 regset_head altered_head;
1720 edge e;
1721
1722 if (!*expr)
1723 return;
1724
1725 if (CONSTANT_P (*expr))
1726 return;
1727
1728 if (GET_CODE (*expr) == EXPR_LIST)
1729 {
1730 head = XEXP (*expr, 0);
1731 tail = XEXP (*expr, 1);
1732
1733 eliminate_implied_conditions (op, &head, tail);
1734
1735 if (op == AND)
1736 {
1737 neutral = const_true_rtx;
1738 aggr = const0_rtx;
1739 }
1740 else if (op == IOR)
1741 {
1742 neutral = const0_rtx;
1743 aggr = const_true_rtx;
1744 }
1745 else
1746 abort ();
1747
1748 simplify_using_initial_values (loop, UNKNOWN, &head);
1749 if (head == aggr)
1750 {
1751 XEXP (*expr, 0) = aggr;
1752 XEXP (*expr, 1) = NULL_RTX;
1753 return;
1754 }
1755 else if (head == neutral)
1756 {
1757 *expr = tail;
1758 simplify_using_initial_values (loop, op, expr);
1759 return;
1760 }
1761 simplify_using_initial_values (loop, op, &tail);
1762
1763 if (tail && XEXP (tail, 0) == aggr)
1764 {
1765 *expr = tail;
1766 return;
1767 }
1768
1769 XEXP (*expr, 0) = head;
1770 XEXP (*expr, 1) = tail;
1771 return;
1772 }
1773
1774 if (op != UNKNOWN)
1775 abort ();
1776
1777 e = loop_preheader_edge (loop);
1778 if (e->src == ENTRY_BLOCK_PTR)
1779 return;
1780
1781 altered = INITIALIZE_REG_SET (altered_head);
1782
1783 while (1)
1784 {
1785 basic_block tmp_bb;
1786
1787 insn = BB_END (e->src);
1788 if (any_condjump_p (insn))
1789 {
1790 rtx cond = get_condition (BB_END (e->src), NULL, false, true);
1791
1792 if (cond && (e->flags & EDGE_FALLTHRU))
1793 cond = reversed_condition (cond);
1794 if (cond)
1795 {
1796 simplify_using_condition (cond, expr, altered);
1797 if (CONSTANT_P (*expr))
1798 {
1799 FREE_REG_SET (altered);
1800 return;
1801 }
1802 }
1803 }
1804
1805 FOR_BB_INSNS_REVERSE (e->src, insn)
1806 {
1807 if (!INSN_P (insn))
1808 continue;
1809
1810 simplify_using_assignment (insn, expr, altered);
1811 if (CONSTANT_P (*expr))
1812 {
1813 FREE_REG_SET (altered);
1814 return;
1815 }
1816 }
1817
1818 /* This is a bit subtle. Store away e->src in tmp_bb, since we
1819 modify `e' and this can invalidate the subsequent count of
1820 e->src's predecessors by looking at the wrong block. */
1821 tmp_bb = e->src;
1822 e = EDGE_PRED (tmp_bb, 0);
1823 if (EDGE_COUNT (tmp_bb->preds) > 1
1824 || e->src == ENTRY_BLOCK_PTR)
1825 break;
1826 }
1827
1828 FREE_REG_SET (altered);
1829 }
1830
1831 /* Transforms invariant IV into MODE. Adds assumptions based on the fact
1832 that IV occurs as left operands of comparison COND and its signedness
1833 is SIGNED_P to DESC. */
1834
1835 static void
1836 shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
1837 enum rtx_code cond, bool signed_p, struct niter_desc *desc)
1838 {
1839 rtx mmin, mmax, cond_over, cond_under;
1840
1841 get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
1842 cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
1843 iv->base, mmin);
1844 cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
1845 iv->base, mmax);
1846
1847 switch (cond)
1848 {
1849 case LE:
1850 case LT:
1851 case LEU:
1852 case LTU:
1853 if (cond_under != const0_rtx)
1854 desc->infinite =
1855 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1856 if (cond_over != const0_rtx)
1857 desc->noloop_assumptions =
1858 alloc_EXPR_LIST (0, cond_over, desc->noloop_assumptions);
1859 break;
1860
1861 case GE:
1862 case GT:
1863 case GEU:
1864 case GTU:
1865 if (cond_over != const0_rtx)
1866 desc->infinite =
1867 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1868 if (cond_under != const0_rtx)
1869 desc->noloop_assumptions =
1870 alloc_EXPR_LIST (0, cond_under, desc->noloop_assumptions);
1871 break;
1872
1873 case NE:
1874 if (cond_over != const0_rtx)
1875 desc->infinite =
1876 alloc_EXPR_LIST (0, cond_over, desc->infinite);
1877 if (cond_under != const0_rtx)
1878 desc->infinite =
1879 alloc_EXPR_LIST (0, cond_under, desc->infinite);
1880 break;
1881
1882 default:
1883 abort ();
1884 }
1885
1886 iv->mode = mode;
1887 iv->extend = signed_p ? SIGN_EXTEND : ZERO_EXTEND;
1888 }
1889
1890 /* Transforms IV0 and IV1 compared by COND so that they are both compared as
1891 subregs of the same mode if possible (sometimes it is necessary to add
1892 some assumptions to DESC). */
1893
1894 static bool
1895 canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
1896 enum rtx_code cond, struct niter_desc *desc)
1897 {
1898 enum machine_mode comp_mode;
1899 bool signed_p;
1900
1901 /* If the ivs behave specially in the first iteration, or are
1902 added/multiplied after extending, we ignore them. */
1903 if (iv0->first_special || iv0->mult != const1_rtx || iv0->delta != const0_rtx)
1904 return false;
1905 if (iv1->first_special || iv1->mult != const1_rtx || iv1->delta != const0_rtx)
1906 return false;
1907
1908 /* If there is some extend, it must match signedness of the comparison. */
1909 switch (cond)
1910 {
1911 case LE:
1912 case LT:
1913 if (iv0->extend == ZERO_EXTEND
1914 || iv1->extend == ZERO_EXTEND)
1915 return false;
1916 signed_p = true;
1917 break;
1918
1919 case LEU:
1920 case LTU:
1921 if (iv0->extend == SIGN_EXTEND
1922 || iv1->extend == SIGN_EXTEND)
1923 return false;
1924 signed_p = false;
1925 break;
1926
1927 case NE:
1928 if (iv0->extend != UNKNOWN
1929 && iv1->extend != UNKNOWN
1930 && iv0->extend != iv1->extend)
1931 return false;
1932
1933 signed_p = false;
1934 if (iv0->extend != UNKNOWN)
1935 signed_p = iv0->extend == SIGN_EXTEND;
1936 if (iv1->extend != UNKNOWN)
1937 signed_p = iv1->extend == SIGN_EXTEND;
1938 break;
1939
1940 default:
1941 abort ();
1942 }
1943
1944 /* Values of both variables should be computed in the same mode. These
1945 might indeed be different, if we have comparison like
1946
1947 (compare (subreg:SI (iv0)) (subreg:SI (iv1)))
1948
1949 and iv0 and iv1 are both ivs iterating in SI mode, but calculated
1950 in different modes. This does not seem impossible to handle, but
1951 it hardly ever occurs in practice.
1952
1953 The only exception is the case when one of operands is invariant.
1954 For example pentium 3 generates comparisons like
1955 (lt (subreg:HI (reg:SI)) 100). Here we assign HImode to 100, but we
1956 definitely do not want this prevent the optimization. */
1957 comp_mode = iv0->extend_mode;
1958 if (GET_MODE_BITSIZE (comp_mode) < GET_MODE_BITSIZE (iv1->extend_mode))
1959 comp_mode = iv1->extend_mode;
1960
1961 if (iv0->extend_mode != comp_mode)
1962 {
1963 if (iv0->mode != iv0->extend_mode
1964 || iv0->step != const0_rtx)
1965 return false;
1966
1967 iv0->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1968 comp_mode, iv0->base, iv0->mode);
1969 iv0->extend_mode = comp_mode;
1970 }
1971
1972 if (iv1->extend_mode != comp_mode)
1973 {
1974 if (iv1->mode != iv1->extend_mode
1975 || iv1->step != const0_rtx)
1976 return false;
1977
1978 iv1->base = simplify_gen_unary (signed_p ? SIGN_EXTEND : ZERO_EXTEND,
1979 comp_mode, iv1->base, iv1->mode);
1980 iv1->extend_mode = comp_mode;
1981 }
1982
1983 /* Check that both ivs belong to a range of a single mode. If one of the
1984 operands is an invariant, we may need to shorten it into the common
1985 mode. */
1986 if (iv0->mode == iv0->extend_mode
1987 && iv0->step == const0_rtx
1988 && iv0->mode != iv1->mode)
1989 shorten_into_mode (iv0, iv1->mode, cond, signed_p, desc);
1990
1991 if (iv1->mode == iv1->extend_mode
1992 && iv1->step == const0_rtx
1993 && iv0->mode != iv1->mode)
1994 shorten_into_mode (iv1, iv0->mode, swap_condition (cond), signed_p, desc);
1995
1996 if (iv0->mode != iv1->mode)
1997 return false;
1998
1999 desc->mode = iv0->mode;
2000 desc->signed_p = signed_p;
2001
2002 return true;
2003 }
2004
2005 /* Computes number of iterations of the CONDITION in INSN in LOOP and stores
2006 the result into DESC. Very similar to determine_number_of_iterations
2007 (basically its rtl version), complicated by things like subregs. */
2008
2009 void
2010 iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
2011 struct niter_desc *desc)
2012 {
2013 rtx op0, op1, delta, step, bound, may_xform, def_insn, tmp, tmp0, tmp1;
2014 struct rtx_iv iv0, iv1, tmp_iv;
2015 rtx assumption, may_not_xform;
2016 enum rtx_code cond;
2017 enum machine_mode mode, comp_mode;
2018 rtx mmin, mmax, mode_mmin, mode_mmax;
2019 unsigned HOST_WIDEST_INT s, size, d, inv;
2020 HOST_WIDEST_INT up, down, inc;
2021 int was_sharp = false;
2022 rtx old_niter;
2023
2024 /* The meaning of these assumptions is this:
2025 if !assumptions
2026 then the rest of information does not have to be valid
2027 if noloop_assumptions then the loop does not roll
2028 if infinite then this exit is never used */
2029
2030 desc->assumptions = NULL_RTX;
2031 desc->noloop_assumptions = NULL_RTX;
2032 desc->infinite = NULL_RTX;
2033 desc->simple_p = true;
2034
2035 desc->const_iter = false;
2036 desc->niter_expr = NULL_RTX;
2037 desc->niter_max = 0;
2038
2039 cond = GET_CODE (condition);
2040 if (!COMPARISON_P (condition))
2041 abort ();
2042
2043 mode = GET_MODE (XEXP (condition, 0));
2044 if (mode == VOIDmode)
2045 mode = GET_MODE (XEXP (condition, 1));
2046 /* The constant comparisons should be folded. */
2047 if (mode == VOIDmode)
2048 abort ();
2049
2050 /* We only handle integers or pointers. */
2051 if (GET_MODE_CLASS (mode) != MODE_INT
2052 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2053 goto fail;
2054
2055 op0 = XEXP (condition, 0);
2056 def_insn = iv_get_reaching_def (insn, op0);
2057 if (!iv_analyze (def_insn, op0, &iv0))
2058 goto fail;
2059 if (iv0.extend_mode == VOIDmode)
2060 iv0.mode = iv0.extend_mode = mode;
2061
2062 op1 = XEXP (condition, 1);
2063 def_insn = iv_get_reaching_def (insn, op1);
2064 if (!iv_analyze (def_insn, op1, &iv1))
2065 goto fail;
2066 if (iv1.extend_mode == VOIDmode)
2067 iv1.mode = iv1.extend_mode = mode;
2068
2069 if (GET_MODE_BITSIZE (iv0.extend_mode) > HOST_BITS_PER_WIDE_INT
2070 || GET_MODE_BITSIZE (iv1.extend_mode) > HOST_BITS_PER_WIDE_INT)
2071 goto fail;
2072
2073 /* Check condition and normalize it. */
2074
2075 switch (cond)
2076 {
2077 case GE:
2078 case GT:
2079 case GEU:
2080 case GTU:
2081 tmp_iv = iv0; iv0 = iv1; iv1 = tmp_iv;
2082 cond = swap_condition (cond);
2083 break;
2084 case NE:
2085 case LE:
2086 case LEU:
2087 case LT:
2088 case LTU:
2089 break;
2090 default:
2091 goto fail;
2092 }
2093
2094 /* Handle extends. This is relatively nontrivial, so we only try in some
2095 easy cases, when we can canonicalize the ivs (possibly by adding some
2096 assumptions) to shape subreg (base + i * step). This function also fills
2097 in desc->mode and desc->signed_p. */
2098
2099 if (!canonicalize_iv_subregs (&iv0, &iv1, cond, desc))
2100 goto fail;
2101
2102 comp_mode = iv0.extend_mode;
2103 mode = iv0.mode;
2104 size = GET_MODE_BITSIZE (mode);
2105 get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
2106 mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
2107 mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
2108
2109 if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
2110 goto fail;
2111
2112 /* We can take care of the case of two induction variables chasing each other
2113 if the test is NE. I have never seen a loop using it, but still it is
2114 cool. */
2115 if (iv0.step != const0_rtx && iv1.step != const0_rtx)
2116 {
2117 if (cond != NE)
2118 goto fail;
2119
2120 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2121 iv1.step = const0_rtx;
2122 }
2123
2124 /* This is either infinite loop or the one that ends immediately, depending
2125 on initial values. Unswitching should remove this kind of conditions. */
2126 if (iv0.step == const0_rtx && iv1.step == const0_rtx)
2127 goto fail;
2128
2129 /* Ignore loops of while (i-- < 10) type. */
2130 if (cond != NE
2131 && (INTVAL (iv0.step) < 0 || INTVAL (iv1.step) > 0))
2132 goto fail;
2133
2134 /* Some more condition normalization. We must record some assumptions
2135 due to overflows. */
2136 switch (cond)
2137 {
2138 case LT:
2139 case LTU:
2140 /* We want to take care only of non-sharp relationals; this is easy,
2141 as in cases the overflow would make the transformation unsafe
2142 the loop does not roll. Seemingly it would make more sense to want
2143 to take care of sharp relationals instead, as NE is more similar to
2144 them, but the problem is that here the transformation would be more
2145 difficult due to possibly infinite loops. */
2146 if (iv0.step == const0_rtx)
2147 {
2148 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2149 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2150 mode_mmax);
2151 if (assumption == const_true_rtx)
2152 goto zero_iter;
2153 iv0.base = simplify_gen_binary (PLUS, comp_mode,
2154 iv0.base, const1_rtx);
2155 }
2156 else
2157 {
2158 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2159 assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
2160 mode_mmin);
2161 if (assumption == const_true_rtx)
2162 goto zero_iter;
2163 iv1.base = simplify_gen_binary (PLUS, comp_mode,
2164 iv1.base, constm1_rtx);
2165 }
2166
2167 if (assumption != const0_rtx)
2168 desc->noloop_assumptions =
2169 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2170 cond = (cond == LT) ? LE : LEU;
2171
2172 /* It will be useful to be able to tell the difference once more in
2173 LE -> NE reduction. */
2174 was_sharp = true;
2175 break;
2176 default: ;
2177 }
2178
2179 /* Take care of trivially infinite loops. */
2180 if (cond != NE)
2181 {
2182 if (iv0.step == const0_rtx)
2183 {
2184 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2185 if (rtx_equal_p (tmp, mode_mmin))
2186 {
2187 desc->infinite =
2188 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2189 return;
2190 }
2191 }
2192 else
2193 {
2194 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2195 if (rtx_equal_p (tmp, mode_mmax))
2196 {
2197 desc->infinite =
2198 alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
2199 return;
2200 }
2201 }
2202 }
2203
2204 /* If we can we want to take care of NE conditions instead of size
2205 comparisons, as they are much more friendly (most importantly
2206 this takes care of special handling of loops with step 1). We can
2207 do it if we first check that upper bound is greater or equal to
2208 lower bound, their difference is constant c modulo step and that
2209 there is not an overflow. */
2210 if (cond != NE)
2211 {
2212 if (iv0.step == const0_rtx)
2213 step = simplify_gen_unary (NEG, comp_mode, iv1.step, comp_mode);
2214 else
2215 step = iv0.step;
2216 delta = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2217 delta = lowpart_subreg (mode, delta, comp_mode);
2218 delta = simplify_gen_binary (UMOD, mode, delta, step);
2219 may_xform = const0_rtx;
2220 may_not_xform = const_true_rtx;
2221
2222 if (GET_CODE (delta) == CONST_INT)
2223 {
2224 if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
2225 {
2226 /* A special case. We have transformed condition of type
2227 for (i = 0; i < 4; i += 4)
2228 into
2229 for (i = 0; i <= 3; i += 4)
2230 obviously if the test for overflow during that transformation
2231 passed, we cannot overflow here. Most importantly any
2232 loop with sharp end condition and step 1 falls into this
2233 category, so handling this case specially is definitely
2234 worth the troubles. */
2235 may_xform = const_true_rtx;
2236 }
2237 else if (iv0.step == const0_rtx)
2238 {
2239 bound = simplify_gen_binary (PLUS, comp_mode, mmin, step);
2240 bound = simplify_gen_binary (MINUS, comp_mode, bound, delta);
2241 bound = lowpart_subreg (mode, bound, comp_mode);
2242 tmp = lowpart_subreg (mode, iv0.base, comp_mode);
2243 may_xform = simplify_gen_relational (cond, SImode, mode,
2244 bound, tmp);
2245 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2246 SImode, mode,
2247 bound, tmp);
2248 }
2249 else
2250 {
2251 bound = simplify_gen_binary (MINUS, comp_mode, mmax, step);
2252 bound = simplify_gen_binary (PLUS, comp_mode, bound, delta);
2253 bound = lowpart_subreg (mode, bound, comp_mode);
2254 tmp = lowpart_subreg (mode, iv1.base, comp_mode);
2255 may_xform = simplify_gen_relational (cond, SImode, mode,
2256 tmp, bound);
2257 may_not_xform = simplify_gen_relational (reverse_condition (cond),
2258 SImode, mode,
2259 tmp, bound);
2260 }
2261 }
2262
2263 if (may_xform != const0_rtx)
2264 {
2265 /* We perform the transformation always provided that it is not
2266 completely senseless. This is OK, as we would need this assumption
2267 to determine the number of iterations anyway. */
2268 if (may_xform != const_true_rtx)
2269 {
2270 /* If the step is a power of two and the final value we have
2271 computed overflows, the cycle is infinite. Otherwise it
2272 is nontrivial to compute the number of iterations. */
2273 s = INTVAL (step);
2274 if ((s & (s - 1)) == 0)
2275 desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
2276 desc->infinite);
2277 else
2278 desc->assumptions = alloc_EXPR_LIST (0, may_xform,
2279 desc->assumptions);
2280 }
2281
2282 /* We are going to lose some information about upper bound on
2283 number of iterations in this step, so record the information
2284 here. */
2285 inc = INTVAL (iv0.step) - INTVAL (iv1.step);
2286 if (GET_CODE (iv1.base) == CONST_INT)
2287 up = INTVAL (iv1.base);
2288 else
2289 up = INTVAL (mode_mmax) - inc;
2290 down = INTVAL (GET_CODE (iv0.base) == CONST_INT
2291 ? iv0.base
2292 : mode_mmin);
2293 desc->niter_max = (up - down) / inc + 1;
2294
2295 if (iv0.step == const0_rtx)
2296 {
2297 iv0.base = simplify_gen_binary (PLUS, comp_mode, iv0.base, delta);
2298 iv0.base = simplify_gen_binary (MINUS, comp_mode, iv0.base, step);
2299 }
2300 else
2301 {
2302 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, delta);
2303 iv1.base = simplify_gen_binary (PLUS, comp_mode, iv1.base, step);
2304 }
2305
2306 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2307 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2308 assumption = simplify_gen_relational (reverse_condition (cond),
2309 SImode, mode, tmp0, tmp1);
2310 if (assumption == const_true_rtx)
2311 goto zero_iter;
2312 else if (assumption != const0_rtx)
2313 desc->noloop_assumptions =
2314 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2315 cond = NE;
2316 }
2317 }
2318
2319 /* Count the number of iterations. */
2320 if (cond == NE)
2321 {
2322 /* Everything we do here is just arithmetics modulo size of mode. This
2323 makes us able to do more involved computations of number of iterations
2324 than in other cases. First transform the condition into shape
2325 s * i <> c, with s positive. */
2326 iv1.base = simplify_gen_binary (MINUS, comp_mode, iv1.base, iv0.base);
2327 iv0.base = const0_rtx;
2328 iv0.step = simplify_gen_binary (MINUS, comp_mode, iv0.step, iv1.step);
2329 iv1.step = const0_rtx;
2330 if (INTVAL (iv0.step) < 0)
2331 {
2332 iv0.step = simplify_gen_unary (NEG, comp_mode, iv0.step, mode);
2333 iv1.base = simplify_gen_unary (NEG, comp_mode, iv1.base, mode);
2334 }
2335 iv0.step = lowpart_subreg (mode, iv0.step, comp_mode);
2336
2337 /* Let nsd (s, size of mode) = d. If d does not divide c, the loop
2338 is infinite. Otherwise, the number of iterations is
2339 (inverse(s/d) * (c/d)) mod (size of mode/d). */
2340 s = INTVAL (iv0.step); d = 1;
2341 while (s % 2 != 1)
2342 {
2343 s /= 2;
2344 d *= 2;
2345 size--;
2346 }
2347 bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) - 1);
2348
2349 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2350 tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
2351 assumption = simplify_gen_relational (NE, SImode, mode, tmp, const0_rtx);
2352 desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
2353
2354 tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
2355 inv = inverse (s, size);
2356 inv = trunc_int_for_mode (inv, mode);
2357 tmp = simplify_gen_binary (MULT, mode, tmp, GEN_INT (inv));
2358 desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
2359 }
2360 else
2361 {
2362 if (iv1.step == const0_rtx)
2363 /* Condition in shape a + s * i <= b
2364 We must know that b + s does not overflow and a <= b + s and then we
2365 can compute number of iterations as (b + s - a) / s. (It might
2366 seem that we in fact could be more clever about testing the b + s
2367 overflow condition using some information about b - a mod s,
2368 but it was already taken into account during LE -> NE transform). */
2369 {
2370 step = iv0.step;
2371 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2372 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2373
2374 bound = simplify_gen_binary (MINUS, mode, mode_mmax,
2375 lowpart_subreg (mode, step, comp_mode));
2376 assumption = simplify_gen_relational (cond, SImode, mode,
2377 tmp1, bound);
2378 desc->assumptions =
2379 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2380
2381 tmp = simplify_gen_binary (PLUS, comp_mode, iv1.base, iv0.step);
2382 tmp = lowpart_subreg (mode, tmp, comp_mode);
2383 assumption = simplify_gen_relational (reverse_condition (cond),
2384 SImode, mode, tmp0, tmp);
2385
2386 delta = simplify_gen_binary (PLUS, mode, tmp1, step);
2387 delta = simplify_gen_binary (MINUS, mode, delta, tmp0);
2388 }
2389 else
2390 {
2391 /* Condition in shape a <= b - s * i
2392 We must know that a - s does not overflow and a - s <= b and then
2393 we can again compute number of iterations as (b - (a - s)) / s. */
2394 step = simplify_gen_unary (NEG, mode, iv1.step, mode);
2395 tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
2396 tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
2397
2398 bound = simplify_gen_binary (MINUS, mode, mode_mmin,
2399 lowpart_subreg (mode, step, comp_mode));
2400 assumption = simplify_gen_relational (cond, SImode, mode,
2401 bound, tmp0);
2402 desc->assumptions =
2403 alloc_EXPR_LIST (0, assumption, desc->assumptions);
2404
2405 tmp = simplify_gen_binary (PLUS, comp_mode, iv0.base, iv1.step);
2406 tmp = lowpart_subreg (mode, tmp, comp_mode);
2407 assumption = simplify_gen_relational (reverse_condition (cond),
2408 SImode, mode,
2409 tmp, tmp1);
2410 delta = simplify_gen_binary (MINUS, mode, tmp0, step);
2411 delta = simplify_gen_binary (MINUS, mode, tmp1, delta);
2412 }
2413 if (assumption == const_true_rtx)
2414 goto zero_iter;
2415 else if (assumption != const0_rtx)
2416 desc->noloop_assumptions =
2417 alloc_EXPR_LIST (0, assumption, desc->noloop_assumptions);
2418 delta = simplify_gen_binary (UDIV, mode, delta, step);
2419 desc->niter_expr = delta;
2420 }
2421
2422 old_niter = desc->niter_expr;
2423
2424 simplify_using_initial_values (loop, AND, &desc->assumptions);
2425 if (desc->assumptions
2426 && XEXP (desc->assumptions, 0) == const0_rtx)
2427 goto fail;
2428 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2429 simplify_using_initial_values (loop, IOR, &desc->infinite);
2430 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2431
2432 /* Rerun the simplification. Consider code (created by copying loop headers)
2433
2434 i = 0;
2435
2436 if (0 < n)
2437 {
2438 do
2439 {
2440 i++;
2441 } while (i < n);
2442 }
2443
2444 The first pass determines that i = 0, the second pass uses it to eliminate
2445 noloop assumption. */
2446
2447 simplify_using_initial_values (loop, AND, &desc->assumptions);
2448 if (desc->assumptions
2449 && XEXP (desc->assumptions, 0) == const0_rtx)
2450 goto fail;
2451 simplify_using_initial_values (loop, IOR, &desc->noloop_assumptions);
2452 simplify_using_initial_values (loop, IOR, &desc->infinite);
2453 simplify_using_initial_values (loop, UNKNOWN, &desc->niter_expr);
2454
2455 if (desc->noloop_assumptions
2456 && XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
2457 goto zero_iter;
2458
2459 if (GET_CODE (desc->niter_expr) == CONST_INT)
2460 {
2461 unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
2462
2463 desc->const_iter = true;
2464 desc->niter_max = desc->niter = val & GET_MODE_MASK (desc->mode);
2465 }
2466 else
2467 {
2468 if (!desc->niter_max)
2469 desc->niter_max = determine_max_iter (desc);
2470
2471 /* simplify_using_initial_values does a copy propagation on the registers
2472 in the expression for the number of iterations. This prolongs life
2473 ranges of registers and increases register pressure, and usually
2474 brings no gain (and if it happens to do, the cse pass will take care
2475 of it anyway). So prevent this behavior, unless it enabled us to
2476 derive that the number of iterations is a constant. */
2477 desc->niter_expr = old_niter;
2478 }
2479
2480 return;
2481
2482 fail:
2483 desc->simple_p = false;
2484 return;
2485
2486 zero_iter:
2487 desc->const_iter = true;
2488 desc->niter = 0;
2489 desc->niter_max = 0;
2490 desc->niter_expr = const0_rtx;
2491 return;
2492 }
2493
2494 /* Checks whether E is a simple exit from LOOP and stores its description
2495 into DESC. */
2496
2497 static void
2498 check_simple_exit (struct loop *loop, edge e, struct niter_desc *desc)
2499 {
2500 basic_block exit_bb;
2501 rtx condition, at;
2502 edge ein;
2503
2504 exit_bb = e->src;
2505 desc->simple_p = false;
2506
2507 /* It must belong directly to the loop. */
2508 if (exit_bb->loop_father != loop)
2509 return;
2510
2511 /* It must be tested (at least) once during any iteration. */
2512 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit_bb))
2513 return;
2514
2515 /* It must end in a simple conditional jump. */
2516 if (!any_condjump_p (BB_END (exit_bb)))
2517 return;
2518
2519 ein = EDGE_SUCC (exit_bb, 0);
2520 if (ein == e)
2521 ein = EDGE_SUCC (exit_bb, 1);
2522
2523 desc->out_edge = e;
2524 desc->in_edge = ein;
2525
2526 /* Test whether the condition is suitable. */
2527 if (!(condition = get_condition (BB_END (ein->src), &at, false, false)))
2528 return;
2529
2530 if (ein->flags & EDGE_FALLTHRU)
2531 {
2532 condition = reversed_condition (condition);
2533 if (!condition)
2534 return;
2535 }
2536
2537 /* Check that we are able to determine number of iterations and fill
2538 in information about it. */
2539 iv_number_of_iterations (loop, at, condition, desc);
2540 }
2541
2542 /* Finds a simple exit of LOOP and stores its description into DESC. */
2543
2544 void
2545 find_simple_exit (struct loop *loop, struct niter_desc *desc)
2546 {
2547 unsigned i;
2548 basic_block *body;
2549 edge e;
2550 struct niter_desc act;
2551 bool any = false;
2552 edge_iterator ei;
2553
2554 desc->simple_p = false;
2555 body = get_loop_body (loop);
2556
2557 for (i = 0; i < loop->num_nodes; i++)
2558 {
2559 FOR_EACH_EDGE (e, ei, body[i]->succs)
2560 {
2561 if (flow_bb_inside_loop_p (loop, e->dest))
2562 continue;
2563
2564 check_simple_exit (loop, e, &act);
2565 if (!act.simple_p)
2566 continue;
2567
2568 /* Prefer constant iterations; the less the better. */
2569 if (!any)
2570 any = true;
2571 else if (!act.const_iter
2572 || (desc->const_iter && act.niter >= desc->niter))
2573 continue;
2574 *desc = act;
2575 }
2576 }
2577
2578 if (dump_file)
2579 {
2580 if (desc->simple_p)
2581 {
2582 fprintf (dump_file, "Loop %d is simple:\n", loop->num);
2583 fprintf (dump_file, " simple exit %d -> %d\n",
2584 desc->out_edge->src->index,
2585 desc->out_edge->dest->index);
2586 if (desc->assumptions)
2587 {
2588 fprintf (dump_file, " assumptions: ");
2589 print_rtl (dump_file, desc->assumptions);
2590 fprintf (dump_file, "\n");
2591 }
2592 if (desc->noloop_assumptions)
2593 {
2594 fprintf (dump_file, " does not roll if: ");
2595 print_rtl (dump_file, desc->noloop_assumptions);
2596 fprintf (dump_file, "\n");
2597 }
2598 if (desc->infinite)
2599 {
2600 fprintf (dump_file, " infinite if: ");
2601 print_rtl (dump_file, desc->infinite);
2602 fprintf (dump_file, "\n");
2603 }
2604
2605 fprintf (dump_file, " number of iterations: ");
2606 print_rtl (dump_file, desc->niter_expr);
2607 fprintf (dump_file, "\n");
2608
2609 fprintf (dump_file, " upper bound: ");
2610 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, desc->niter_max);
2611 fprintf (dump_file, "\n");
2612 }
2613 else
2614 fprintf (dump_file, "Loop %d is not simple.\n", loop->num);
2615 }
2616
2617 free (body);
2618 }
2619
2620 /* Creates a simple loop description of LOOP if it was not computed
2621 already. */
2622
2623 struct niter_desc *
2624 get_simple_loop_desc (struct loop *loop)
2625 {
2626 struct niter_desc *desc = simple_loop_desc (loop);
2627
2628 if (desc)
2629 return desc;
2630
2631 desc = xmalloc (sizeof (struct niter_desc));
2632 iv_analysis_loop_init (loop);
2633 find_simple_exit (loop, desc);
2634 loop->aux = desc;
2635
2636 return desc;
2637 }
2638
2639 /* Releases simple loop description for LOOP. */
2640
2641 void
2642 free_simple_loop_desc (struct loop *loop)
2643 {
2644 struct niter_desc *desc = simple_loop_desc (loop);
2645
2646 if (!desc)
2647 return;
2648
2649 free (desc);
2650 loop->aux = NULL;
2651 }