tree-affine.h (print_aff): New prototype.
[gcc.git] / gcc / tree-affine.c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "output.h"
29 #include "diagnostic.h"
30 #include "tree-dump.h"
31 #include "pointer-set.h"
32 #include "tree-affine.h"
33 #include "tree-gimple.h"
34
35 /* Extends CST as appropriate for the affine combinations COMB. */
36
37 double_int
38 double_int_ext_for_comb (double_int cst, aff_tree *comb)
39 {
40 return double_int_sext (cst, TYPE_PRECISION (comb->type));
41 }
42
43 /* Initializes affine combination COMB so that its value is zero in TYPE. */
44
45 static void
46 aff_combination_zero (aff_tree *comb, tree type)
47 {
48 comb->type = type;
49 comb->offset = double_int_zero;
50 comb->n = 0;
51 comb->rest = NULL_TREE;
52 }
53
54 /* Sets COMB to CST. */
55
56 void
57 aff_combination_const (aff_tree *comb, tree type, double_int cst)
58 {
59 aff_combination_zero (comb, type);
60 comb->offset = double_int_ext_for_comb (cst, comb);
61 }
62
63 /* Sets COMB to single element ELT. */
64
65 void
66 aff_combination_elt (aff_tree *comb, tree type, tree elt)
67 {
68 aff_combination_zero (comb, type);
69
70 comb->n = 1;
71 comb->elts[0].val = elt;
72 comb->elts[0].coef = double_int_one;
73 }
74
75 /* Scales COMB by SCALE. */
76
77 void
78 aff_combination_scale (aff_tree *comb, double_int scale)
79 {
80 unsigned i, j;
81
82 scale = double_int_ext_for_comb (scale, comb);
83 if (double_int_one_p (scale))
84 return;
85
86 if (double_int_zero_p (scale))
87 {
88 aff_combination_zero (comb, comb->type);
89 return;
90 }
91
92 comb->offset
93 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
94 for (i = 0, j = 0; i < comb->n; i++)
95 {
96 double_int new_coef;
97
98 new_coef
99 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
100 comb);
101 /* A coefficient may become zero due to overflow. Remove the zero
102 elements. */
103 if (double_int_zero_p (new_coef))
104 continue;
105 comb->elts[j].coef = new_coef;
106 comb->elts[j].val = comb->elts[i].val;
107 j++;
108 }
109 comb->n = j;
110
111 if (comb->rest)
112 {
113 tree type = comb->type;
114 if (POINTER_TYPE_P (type))
115 type = sizetype;
116 if (comb->n < MAX_AFF_ELTS)
117 {
118 comb->elts[comb->n].coef = scale;
119 comb->elts[comb->n].val = comb->rest;
120 comb->rest = NULL_TREE;
121 comb->n++;
122 }
123 else
124 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
125 double_int_to_tree (type, scale));
126 }
127 }
128
129 /* Adds ELT * SCALE to COMB. */
130
131 void
132 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
133 {
134 unsigned i;
135 tree type;
136
137 scale = double_int_ext_for_comb (scale, comb);
138 if (double_int_zero_p (scale))
139 return;
140
141 for (i = 0; i < comb->n; i++)
142 if (operand_equal_p (comb->elts[i].val, elt, 0))
143 {
144 double_int new_coef;
145
146 new_coef = double_int_add (comb->elts[i].coef, scale);
147 new_coef = double_int_ext_for_comb (new_coef, comb);
148 if (!double_int_zero_p (new_coef))
149 {
150 comb->elts[i].coef = new_coef;
151 return;
152 }
153
154 comb->n--;
155 comb->elts[i] = comb->elts[comb->n];
156
157 if (comb->rest)
158 {
159 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
160 comb->elts[comb->n].coef = double_int_one;
161 comb->elts[comb->n].val = comb->rest;
162 comb->rest = NULL_TREE;
163 comb->n++;
164 }
165 return;
166 }
167 if (comb->n < MAX_AFF_ELTS)
168 {
169 comb->elts[comb->n].coef = scale;
170 comb->elts[comb->n].val = elt;
171 comb->n++;
172 return;
173 }
174
175 type = comb->type;
176 if (POINTER_TYPE_P (type))
177 type = sizetype;
178
179 if (double_int_one_p (scale))
180 elt = fold_convert (type, elt);
181 else
182 elt = fold_build2 (MULT_EXPR, type,
183 fold_convert (type, elt),
184 double_int_to_tree (type, scale));
185
186 if (comb->rest)
187 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
188 elt);
189 else
190 comb->rest = elt;
191 }
192
193 /* Adds CST to C. */
194
195 static void
196 aff_combination_add_cst (aff_tree *c, double_int cst)
197 {
198 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
199 }
200
201 /* Adds COMB2 to COMB1. */
202
203 void
204 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
205 {
206 unsigned i;
207
208 aff_combination_add_cst (comb1, comb2->offset);
209 for (i = 0; i < comb2->n; i++)
210 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
211 if (comb2->rest)
212 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
213 }
214
215 /* Converts affine combination COMB to TYPE. */
216
217 void
218 aff_combination_convert (aff_tree *comb, tree type)
219 {
220 unsigned i, j;
221 tree comb_type = comb->type;
222
223 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
224 {
225 tree val = fold_convert (type, aff_combination_to_tree (comb));
226 tree_to_aff_combination (val, type, comb);
227 return;
228 }
229
230 comb->type = type;
231 if (comb->rest && !POINTER_TYPE_P (type))
232 comb->rest = fold_convert (type, comb->rest);
233
234 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
235 return;
236
237 comb->offset = double_int_ext_for_comb (comb->offset, comb);
238 for (i = j = 0; i < comb->n; i++)
239 {
240 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
241 if (double_int_zero_p (new_coef))
242 continue;
243 comb->elts[j].coef = new_coef;
244 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
245 j++;
246 }
247
248 comb->n = j;
249 if (comb->n < MAX_AFF_ELTS && comb->rest)
250 {
251 comb->elts[comb->n].coef = double_int_one;
252 comb->elts[comb->n].val = comb->rest;
253 comb->rest = NULL_TREE;
254 comb->n++;
255 }
256 }
257
258 /* Splits EXPR into an affine combination of parts. */
259
260 void
261 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
262 {
263 aff_tree tmp;
264 enum tree_code code;
265 tree cst, core, toffset;
266 HOST_WIDE_INT bitpos, bitsize;
267 enum machine_mode mode;
268 int unsignedp, volatilep;
269
270 STRIP_NOPS (expr);
271
272 code = TREE_CODE (expr);
273 switch (code)
274 {
275 case INTEGER_CST:
276 aff_combination_const (comb, type, tree_to_double_int (expr));
277 return;
278
279 case POINTER_PLUS_EXPR:
280 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
281 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
282 aff_combination_convert (&tmp, type);
283 aff_combination_add (comb, &tmp);
284 return;
285
286 case PLUS_EXPR:
287 case MINUS_EXPR:
288 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
289 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
290 if (code == MINUS_EXPR)
291 aff_combination_scale (&tmp, double_int_minus_one);
292 aff_combination_add (comb, &tmp);
293 return;
294
295 case MULT_EXPR:
296 cst = TREE_OPERAND (expr, 1);
297 if (TREE_CODE (cst) != INTEGER_CST)
298 break;
299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
300 aff_combination_scale (comb, tree_to_double_int (cst));
301 return;
302
303 case NEGATE_EXPR:
304 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305 aff_combination_scale (comb, double_int_minus_one);
306 return;
307
308 case BIT_NOT_EXPR:
309 /* ~x = -x - 1 */
310 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
311 aff_combination_scale (comb, double_int_minus_one);
312 aff_combination_add_cst (comb, double_int_minus_one);
313 return;
314
315 case ADDR_EXPR:
316 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
317 &toffset, &mode, &unsignedp, &volatilep,
318 false);
319 if (bitpos % BITS_PER_UNIT != 0)
320 break;
321 aff_combination_const (comb, type,
322 uhwi_to_double_int (bitpos / BITS_PER_UNIT));
323 core = build_fold_addr_expr (core);
324 if (TREE_CODE (core) == ADDR_EXPR)
325 aff_combination_add_elt (comb, core, double_int_one);
326 else
327 {
328 tree_to_aff_combination (core, type, &tmp);
329 aff_combination_add (comb, &tmp);
330 }
331 if (toffset)
332 {
333 tree_to_aff_combination (toffset, type, &tmp);
334 aff_combination_add (comb, &tmp);
335 }
336 return;
337
338 default:
339 break;
340 }
341
342 aff_combination_elt (comb, type, expr);
343 }
344
345 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
346 combination COMB. */
347
348 static tree
349 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
350 aff_tree *comb)
351 {
352 enum tree_code code;
353
354 scale = double_int_ext_for_comb (scale, comb);
355 elt = fold_convert (type, elt);
356
357 if (double_int_one_p (scale))
358 {
359 if (!expr)
360 return elt;
361
362 return fold_build2 (PLUS_EXPR, type, expr, elt);
363 }
364
365 if (double_int_minus_one_p (scale))
366 {
367 if (!expr)
368 return fold_build1 (NEGATE_EXPR, type, elt);
369
370 return fold_build2 (MINUS_EXPR, type, expr, elt);
371 }
372
373 if (!expr)
374 return fold_build2 (MULT_EXPR, type, elt,
375 double_int_to_tree (type, scale));
376
377 if (double_int_negative_p (scale))
378 {
379 code = MINUS_EXPR;
380 scale = double_int_neg (scale);
381 }
382 else
383 code = PLUS_EXPR;
384
385 elt = fold_build2 (MULT_EXPR, type, elt,
386 double_int_to_tree (type, scale));
387 return fold_build2 (code, type, expr, elt);
388 }
389
390 /* Makes tree from the affine combination COMB. */
391
392 tree
393 aff_combination_to_tree (aff_tree *comb)
394 {
395 tree type = comb->type;
396 tree expr = comb->rest;
397 unsigned i;
398 double_int off, sgn;
399
400 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
401
402 for (i = 0; i < comb->n; i++)
403 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
404 comb);
405
406 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
407 unsigned. */
408 if (double_int_negative_p (comb->offset))
409 {
410 off = double_int_neg (comb->offset);
411 sgn = double_int_minus_one;
412 }
413 else
414 {
415 off = comb->offset;
416 sgn = double_int_one;
417 }
418 return add_elt_to_tree (expr, type, double_int_to_tree (type, off), sgn,
419 comb);
420 }
421
422 /* Copies the tree elements of COMB to ensure that they are not shared. */
423
424 void
425 unshare_aff_combination (aff_tree *comb)
426 {
427 unsigned i;
428
429 for (i = 0; i < comb->n; i++)
430 comb->elts[i].val = unshare_expr (comb->elts[i].val);
431 if (comb->rest)
432 comb->rest = unshare_expr (comb->rest);
433 }
434
435 /* Remove M-th element from COMB. */
436
437 void
438 aff_combination_remove_elt (aff_tree *comb, unsigned m)
439 {
440 comb->n--;
441 if (m <= comb->n)
442 comb->elts[m] = comb->elts[comb->n];
443 if (comb->rest)
444 {
445 comb->elts[comb->n].coef = double_int_one;
446 comb->elts[comb->n].val = comb->rest;
447 comb->rest = NULL_TREE;
448 comb->n++;
449 }
450 }
451
452 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
453 C * COEF is added to R. */
454
455
456 static void
457 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
458 aff_tree *r)
459 {
460 unsigned i;
461 tree aval, type;
462
463 for (i = 0; i < c->n; i++)
464 {
465 aval = c->elts[i].val;
466 if (val)
467 {
468 type = TREE_TYPE (aval);
469 aval = fold_build2 (MULT_EXPR, type, aval,
470 fold_convert (type, val));
471 }
472
473 aff_combination_add_elt (r, aval,
474 double_int_mul (coef, c->elts[i].coef));
475 }
476
477 if (c->rest)
478 {
479 aval = c->rest;
480 if (val)
481 {
482 type = TREE_TYPE (aval);
483 aval = fold_build2 (MULT_EXPR, type, aval,
484 fold_convert (type, val));
485 }
486
487 aff_combination_add_elt (r, aval, coef);
488 }
489
490 if (val)
491 aff_combination_add_elt (r, val,
492 double_int_mul (coef, c->offset));
493 else
494 aff_combination_add_cst (r, double_int_mul (coef, c->offset));
495 }
496
497 /* Multiplies C1 by C2, storing the result to R */
498
499 void
500 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
501 {
502 unsigned i;
503 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
504
505 aff_combination_zero (r, c1->type);
506
507 for (i = 0; i < c2->n; i++)
508 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
509 if (c2->rest)
510 aff_combination_add_product (c1, double_int_one, c2->rest, r);
511 aff_combination_add_product (c1, c2->offset, NULL, r);
512 }
513
514 /* Returns the element of COMB whose value is VAL, or NULL if no such
515 element exists. If IDX is not NULL, it is set to the index of VAL in
516 COMB. */
517
518 static struct aff_comb_elt *
519 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
520 {
521 unsigned i;
522
523 for (i = 0; i < comb->n; i++)
524 if (operand_equal_p (comb->elts[i].val, val, 0))
525 {
526 if (idx)
527 *idx = i;
528
529 return &comb->elts[i];
530 }
531
532 return NULL;
533 }
534
535 /* Element of the cache that maps ssa name NAME to its expanded form
536 as an affine expression EXPANSION. */
537
538 struct name_expansion
539 {
540 aff_tree expansion;
541
542 /* True if the expansion for the name is just being generated. */
543 unsigned in_progress : 1;
544 };
545
546 /* Similar to tree_to_aff_combination, but follows SSA name definitions
547 and expands them recursively. CACHE is used to cache the expansions
548 of the ssa names, to avoid exponential time complexity for cases
549 like
550
551 a1 = a0 + a0;
552 a2 = a1 + a1;
553 a3 = a2 + a2;
554 ... */
555
556 void
557 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
558 struct pointer_map_t **cache)
559 {
560 unsigned i;
561 aff_tree to_add, current, curre;
562 tree e, def, rhs;
563 double_int scale;
564 void **slot;
565 struct name_expansion *exp;
566
567 tree_to_aff_combination (expr, type, comb);
568 aff_combination_zero (&to_add, type);
569 for (i = 0; i < comb->n; i++)
570 {
571 e = comb->elts[i].val;
572 if (TREE_CODE (e) != SSA_NAME)
573 continue;
574 def = SSA_NAME_DEF_STMT (e);
575 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
576 || GIMPLE_STMT_OPERAND (def, 0) != e)
577 continue;
578
579 rhs = GIMPLE_STMT_OPERAND (def, 1);
580 if (TREE_CODE (rhs) != SSA_NAME
581 && !EXPR_P (rhs)
582 && !is_gimple_min_invariant (rhs))
583 continue;
584
585 /* We do not know whether the reference retains its value at the
586 place where the expansion is used. */
587 if (REFERENCE_CLASS_P (rhs))
588 continue;
589
590 if (!*cache)
591 *cache = pointer_map_create ();
592 slot = pointer_map_insert (*cache, e);
593 exp = *slot;
594
595 if (!exp)
596 {
597 exp = XNEW (struct name_expansion);
598 exp->in_progress = 1;
599 *slot = exp;
600 tree_to_aff_combination_expand (rhs, type, &current, cache);
601 exp->expansion = current;
602 exp->in_progress = 0;
603 }
604 else
605 {
606 /* Since we follow the definitions in the SSA form, we should not
607 enter a cycle unless we pass through a phi node. */
608 gcc_assert (!exp->in_progress);
609 current = exp->expansion;
610 }
611
612 /* Accumulate the new terms to TO_ADD, so that we do not modify
613 COMB while traversing it; include the term -coef * E, to remove
614 it from COMB. */
615 scale = comb->elts[i].coef;
616 aff_combination_zero (&curre, type);
617 aff_combination_add_elt (&curre, e, double_int_neg (scale));
618 aff_combination_scale (&current, scale);
619 aff_combination_add (&to_add, &current);
620 aff_combination_add (&to_add, &curre);
621 }
622 aff_combination_add (comb, &to_add);
623 }
624
625 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
626 pointer_map_traverse. */
627
628 static bool
629 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
630 void *data ATTRIBUTE_UNUSED)
631 {
632 struct name_expansion *exp = *value;
633
634 free (exp);
635 return true;
636 }
637
638 /* Frees memory allocated for the CACHE used by
639 tree_to_aff_combination_expand. */
640
641 void
642 free_affine_expand_cache (struct pointer_map_t **cache)
643 {
644 if (!*cache)
645 return;
646
647 pointer_map_traverse (*cache, free_name_expansion, NULL);
648 pointer_map_destroy (*cache);
649 *cache = NULL;
650 }
651
652 /* If VAL != CST * DIV for any constant CST, returns false.
653 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
654 additionally compares CST and MULT, and if they are different,
655 returns false. Finally, if neither of these two cases occur,
656 true is returned, and if CST != 0, CST is stored to MULT and
657 MULT_SET is set to true. */
658
659 static bool
660 double_int_constant_multiple_p (double_int val, double_int div,
661 bool *mult_set, double_int *mult)
662 {
663 double_int rem, cst;
664
665 if (double_int_zero_p (val))
666 return true;
667
668 if (double_int_zero_p (div))
669 return false;
670
671 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
672 if (!double_int_zero_p (rem))
673 return false;
674
675 if (*mult_set && !double_int_equal_p (*mult, cst))
676 return false;
677
678 *mult_set = true;
679 *mult = cst;
680 return true;
681 }
682
683 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
684 X is stored to MULT. */
685
686 bool
687 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
688 double_int *mult)
689 {
690 bool mult_set = false;
691 unsigned i;
692
693 if (val->n == 0 && double_int_zero_p (val->offset))
694 {
695 *mult = double_int_zero;
696 return true;
697 }
698 if (val->n != div->n)
699 return false;
700
701 if (val->rest || div->rest)
702 return false;
703
704 if (!double_int_constant_multiple_p (val->offset, div->offset,
705 &mult_set, mult))
706 return false;
707
708 for (i = 0; i < div->n; i++)
709 {
710 struct aff_comb_elt *elt
711 = aff_combination_find_elt (val, div->elts[i].val, NULL);
712 if (!elt)
713 return false;
714 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
715 &mult_set, mult))
716 return false;
717 }
718
719 gcc_assert (mult_set);
720 return true;
721 }
722
723 /* Prints the affine VAL to the FILE. */
724
725 void
726 print_aff (FILE *file, aff_tree *val)
727 {
728 unsigned i;
729 bool uns = TYPE_UNSIGNED (val->type);
730 if (POINTER_TYPE_P (val->type))
731 uns = false;
732 fprintf (file, "{\n type = ");
733 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
734 fprintf (file, "\n offset = ");
735 dump_double_int (file, val->offset, uns);
736 if (val->n > 0)
737 {
738 fprintf (file, "\n elements = {\n");
739 for (i = 0; i < val->n; i++)
740 {
741 fprintf (file, " [%d] = ", i);
742 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
743
744 fprintf (file, " * ");
745 dump_double_int (file, val->elts[i].coef, uns);
746 if (i != val->n - 1)
747 fprintf (file, ", \n");
748 }
749 fprintf (file, "\n }");
750 }
751 if (val->rest)
752 {
753 fprintf (file, "\n rest = ");
754 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
755 }
756 fprintf (file, "\n}");
757 }
758
759 /* Prints the affine VAL to the standard error, used for debugging. */
760
761 void
762 debug_aff (aff_tree *val)
763 {
764 print_aff (stderr, val);
765 fprintf (stderr, "\n");
766 }