tree-vrp.c (debug_value_range, [...]): Annotate with DEBUG_FUNCTION.
[gcc.git] / gcc / tree-affine.c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "output.h"
25 #include "diagnostic.h"
26 #include "tree-pretty-print.h"
27 #include "tree-dump.h"
28 #include "pointer-set.h"
29 #include "tree-affine.h"
30 #include "gimple.h"
31 #include "flags.h"
32
33 /* Extends CST as appropriate for the affine combinations COMB. */
34
35 double_int
36 double_int_ext_for_comb (double_int cst, aff_tree *comb)
37 {
38 return double_int_sext (cst, TYPE_PRECISION (comb->type));
39 }
40
41 /* Initializes affine combination COMB so that its value is zero in TYPE. */
42
43 static void
44 aff_combination_zero (aff_tree *comb, tree type)
45 {
46 comb->type = type;
47 comb->offset = double_int_zero;
48 comb->n = 0;
49 comb->rest = NULL_TREE;
50 }
51
52 /* Sets COMB to CST. */
53
54 void
55 aff_combination_const (aff_tree *comb, tree type, double_int cst)
56 {
57 aff_combination_zero (comb, type);
58 comb->offset = double_int_ext_for_comb (cst, comb);
59 }
60
61 /* Sets COMB to single element ELT. */
62
63 void
64 aff_combination_elt (aff_tree *comb, tree type, tree elt)
65 {
66 aff_combination_zero (comb, type);
67
68 comb->n = 1;
69 comb->elts[0].val = elt;
70 comb->elts[0].coef = double_int_one;
71 }
72
73 /* Scales COMB by SCALE. */
74
75 void
76 aff_combination_scale (aff_tree *comb, double_int scale)
77 {
78 unsigned i, j;
79
80 scale = double_int_ext_for_comb (scale, comb);
81 if (double_int_one_p (scale))
82 return;
83
84 if (double_int_zero_p (scale))
85 {
86 aff_combination_zero (comb, comb->type);
87 return;
88 }
89
90 comb->offset
91 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
92 for (i = 0, j = 0; i < comb->n; i++)
93 {
94 double_int new_coef;
95
96 new_coef
97 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
98 comb);
99 /* A coefficient may become zero due to overflow. Remove the zero
100 elements. */
101 if (double_int_zero_p (new_coef))
102 continue;
103 comb->elts[j].coef = new_coef;
104 comb->elts[j].val = comb->elts[i].val;
105 j++;
106 }
107 comb->n = j;
108
109 if (comb->rest)
110 {
111 tree type = comb->type;
112 if (POINTER_TYPE_P (type))
113 type = sizetype;
114 if (comb->n < MAX_AFF_ELTS)
115 {
116 comb->elts[comb->n].coef = scale;
117 comb->elts[comb->n].val = comb->rest;
118 comb->rest = NULL_TREE;
119 comb->n++;
120 }
121 else
122 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
123 double_int_to_tree (type, scale));
124 }
125 }
126
127 /* Adds ELT * SCALE to COMB. */
128
129 void
130 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
131 {
132 unsigned i;
133 tree type;
134
135 scale = double_int_ext_for_comb (scale, comb);
136 if (double_int_zero_p (scale))
137 return;
138
139 for (i = 0; i < comb->n; i++)
140 if (operand_equal_p (comb->elts[i].val, elt, 0))
141 {
142 double_int new_coef;
143
144 new_coef = double_int_add (comb->elts[i].coef, scale);
145 new_coef = double_int_ext_for_comb (new_coef, comb);
146 if (!double_int_zero_p (new_coef))
147 {
148 comb->elts[i].coef = new_coef;
149 return;
150 }
151
152 comb->n--;
153 comb->elts[i] = comb->elts[comb->n];
154
155 if (comb->rest)
156 {
157 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
158 comb->elts[comb->n].coef = double_int_one;
159 comb->elts[comb->n].val = comb->rest;
160 comb->rest = NULL_TREE;
161 comb->n++;
162 }
163 return;
164 }
165 if (comb->n < MAX_AFF_ELTS)
166 {
167 comb->elts[comb->n].coef = scale;
168 comb->elts[comb->n].val = elt;
169 comb->n++;
170 return;
171 }
172
173 type = comb->type;
174 if (POINTER_TYPE_P (type))
175 type = sizetype;
176
177 if (double_int_one_p (scale))
178 elt = fold_convert (type, elt);
179 else
180 elt = fold_build2 (MULT_EXPR, type,
181 fold_convert (type, elt),
182 double_int_to_tree (type, scale));
183
184 if (comb->rest)
185 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
186 elt);
187 else
188 comb->rest = elt;
189 }
190
191 /* Adds CST to C. */
192
193 static void
194 aff_combination_add_cst (aff_tree *c, double_int cst)
195 {
196 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
197 }
198
199 /* Adds COMB2 to COMB1. */
200
201 void
202 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
203 {
204 unsigned i;
205
206 aff_combination_add_cst (comb1, comb2->offset);
207 for (i = 0; i < comb2->n; i++)
208 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
209 if (comb2->rest)
210 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
211 }
212
213 /* Converts affine combination COMB to TYPE. */
214
215 void
216 aff_combination_convert (aff_tree *comb, tree type)
217 {
218 unsigned i, j;
219 tree comb_type = comb->type;
220
221 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
222 {
223 tree val = fold_convert (type, aff_combination_to_tree (comb));
224 tree_to_aff_combination (val, type, comb);
225 return;
226 }
227
228 comb->type = type;
229 if (comb->rest && !POINTER_TYPE_P (type))
230 comb->rest = fold_convert (type, comb->rest);
231
232 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
233 return;
234
235 comb->offset = double_int_ext_for_comb (comb->offset, comb);
236 for (i = j = 0; i < comb->n; i++)
237 {
238 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
239 if (double_int_zero_p (new_coef))
240 continue;
241 comb->elts[j].coef = new_coef;
242 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
243 j++;
244 }
245
246 comb->n = j;
247 if (comb->n < MAX_AFF_ELTS && comb->rest)
248 {
249 comb->elts[comb->n].coef = double_int_one;
250 comb->elts[comb->n].val = comb->rest;
251 comb->rest = NULL_TREE;
252 comb->n++;
253 }
254 }
255
256 /* Splits EXPR into an affine combination of parts. */
257
258 void
259 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
260 {
261 aff_tree tmp;
262 enum tree_code code;
263 tree cst, core, toffset;
264 HOST_WIDE_INT bitpos, bitsize;
265 enum machine_mode mode;
266 int unsignedp, volatilep;
267
268 STRIP_NOPS (expr);
269
270 code = TREE_CODE (expr);
271 switch (code)
272 {
273 case INTEGER_CST:
274 aff_combination_const (comb, type, tree_to_double_int (expr));
275 return;
276
277 case POINTER_PLUS_EXPR:
278 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
279 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
280 aff_combination_add (comb, &tmp);
281 return;
282
283 case PLUS_EXPR:
284 case MINUS_EXPR:
285 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
286 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
287 if (code == MINUS_EXPR)
288 aff_combination_scale (&tmp, double_int_minus_one);
289 aff_combination_add (comb, &tmp);
290 return;
291
292 case MULT_EXPR:
293 cst = TREE_OPERAND (expr, 1);
294 if (TREE_CODE (cst) != INTEGER_CST)
295 break;
296 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
297 aff_combination_scale (comb, tree_to_double_int (cst));
298 return;
299
300 case NEGATE_EXPR:
301 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
302 aff_combination_scale (comb, double_int_minus_one);
303 return;
304
305 case BIT_NOT_EXPR:
306 /* ~x = -x - 1 */
307 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
308 aff_combination_scale (comb, double_int_minus_one);
309 aff_combination_add_cst (comb, double_int_minus_one);
310 return;
311
312 case ADDR_EXPR:
313 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
314 &toffset, &mode, &unsignedp, &volatilep,
315 false);
316 if (bitpos % BITS_PER_UNIT != 0)
317 break;
318 aff_combination_const (comb, type,
319 uhwi_to_double_int (bitpos / BITS_PER_UNIT));
320 core = build_fold_addr_expr (core);
321 if (TREE_CODE (core) == ADDR_EXPR)
322 aff_combination_add_elt (comb, core, double_int_one);
323 else
324 {
325 tree_to_aff_combination (core, type, &tmp);
326 aff_combination_add (comb, &tmp);
327 }
328 if (toffset)
329 {
330 tree_to_aff_combination (toffset, type, &tmp);
331 aff_combination_add (comb, &tmp);
332 }
333 return;
334
335 default:
336 break;
337 }
338
339 aff_combination_elt (comb, type, expr);
340 }
341
342 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
343 combination COMB. */
344
345 static tree
346 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
347 aff_tree *comb)
348 {
349 enum tree_code code;
350 tree type1 = type;
351 if (POINTER_TYPE_P (type))
352 type1 = sizetype;
353
354 scale = double_int_ext_for_comb (scale, comb);
355 elt = fold_convert (type1, elt);
356
357 if (double_int_one_p (scale))
358 {
359 if (!expr)
360 return fold_convert (type, elt);
361
362 if (POINTER_TYPE_P (type))
363 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
364 return fold_build2 (PLUS_EXPR, type, expr, elt);
365 }
366
367 if (double_int_minus_one_p (scale))
368 {
369 if (!expr)
370 return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
371
372 if (POINTER_TYPE_P (type))
373 {
374 elt = fold_build1 (NEGATE_EXPR, type1, elt);
375 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
376 }
377 return fold_build2 (MINUS_EXPR, type, expr, elt);
378 }
379
380 if (!expr)
381 return fold_convert (type,
382 fold_build2 (MULT_EXPR, type1, elt,
383 double_int_to_tree (type1, scale)));
384
385 if (double_int_negative_p (scale))
386 {
387 code = MINUS_EXPR;
388 scale = double_int_neg (scale);
389 }
390 else
391 code = PLUS_EXPR;
392
393 elt = fold_build2 (MULT_EXPR, type1, elt,
394 double_int_to_tree (type1, scale));
395 if (POINTER_TYPE_P (type))
396 {
397 if (code == MINUS_EXPR)
398 elt = fold_build1 (NEGATE_EXPR, type1, elt);
399 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
400 }
401 return fold_build2 (code, type, expr, elt);
402 }
403
404 /* Makes tree from the affine combination COMB. */
405
406 tree
407 aff_combination_to_tree (aff_tree *comb)
408 {
409 tree type = comb->type;
410 tree expr = comb->rest;
411 unsigned i;
412 double_int off, sgn;
413 tree type1 = type;
414 if (POINTER_TYPE_P (type))
415 type1 = sizetype;
416
417 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
418
419 for (i = 0; i < comb->n; i++)
420 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
421 comb);
422
423 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
424 unsigned. */
425 if (double_int_negative_p (comb->offset))
426 {
427 off = double_int_neg (comb->offset);
428 sgn = double_int_minus_one;
429 }
430 else
431 {
432 off = comb->offset;
433 sgn = double_int_one;
434 }
435 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
436 comb);
437 }
438
439 /* Copies the tree elements of COMB to ensure that they are not shared. */
440
441 void
442 unshare_aff_combination (aff_tree *comb)
443 {
444 unsigned i;
445
446 for (i = 0; i < comb->n; i++)
447 comb->elts[i].val = unshare_expr (comb->elts[i].val);
448 if (comb->rest)
449 comb->rest = unshare_expr (comb->rest);
450 }
451
452 /* Remove M-th element from COMB. */
453
454 void
455 aff_combination_remove_elt (aff_tree *comb, unsigned m)
456 {
457 comb->n--;
458 if (m <= comb->n)
459 comb->elts[m] = comb->elts[comb->n];
460 if (comb->rest)
461 {
462 comb->elts[comb->n].coef = double_int_one;
463 comb->elts[comb->n].val = comb->rest;
464 comb->rest = NULL_TREE;
465 comb->n++;
466 }
467 }
468
469 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
470 C * COEF is added to R. */
471
472
473 static void
474 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
475 aff_tree *r)
476 {
477 unsigned i;
478 tree aval, type;
479
480 for (i = 0; i < c->n; i++)
481 {
482 aval = c->elts[i].val;
483 if (val)
484 {
485 type = TREE_TYPE (aval);
486 aval = fold_build2 (MULT_EXPR, type, aval,
487 fold_convert (type, val));
488 }
489
490 aff_combination_add_elt (r, aval,
491 double_int_mul (coef, c->elts[i].coef));
492 }
493
494 if (c->rest)
495 {
496 aval = c->rest;
497 if (val)
498 {
499 type = TREE_TYPE (aval);
500 aval = fold_build2 (MULT_EXPR, type, aval,
501 fold_convert (type, val));
502 }
503
504 aff_combination_add_elt (r, aval, coef);
505 }
506
507 if (val)
508 aff_combination_add_elt (r, val,
509 double_int_mul (coef, c->offset));
510 else
511 aff_combination_add_cst (r, double_int_mul (coef, c->offset));
512 }
513
514 /* Multiplies C1 by C2, storing the result to R */
515
516 void
517 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
518 {
519 unsigned i;
520 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
521
522 aff_combination_zero (r, c1->type);
523
524 for (i = 0; i < c2->n; i++)
525 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
526 if (c2->rest)
527 aff_combination_add_product (c1, double_int_one, c2->rest, r);
528 aff_combination_add_product (c1, c2->offset, NULL, r);
529 }
530
531 /* Returns the element of COMB whose value is VAL, or NULL if no such
532 element exists. If IDX is not NULL, it is set to the index of VAL in
533 COMB. */
534
535 static struct aff_comb_elt *
536 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
537 {
538 unsigned i;
539
540 for (i = 0; i < comb->n; i++)
541 if (operand_equal_p (comb->elts[i].val, val, 0))
542 {
543 if (idx)
544 *idx = i;
545
546 return &comb->elts[i];
547 }
548
549 return NULL;
550 }
551
552 /* Element of the cache that maps ssa name NAME to its expanded form
553 as an affine expression EXPANSION. */
554
555 struct name_expansion
556 {
557 aff_tree expansion;
558
559 /* True if the expansion for the name is just being generated. */
560 unsigned in_progress : 1;
561 };
562
563 /* Expands SSA names in COMB recursively. CACHE is used to cache the
564 results. */
565
566 void
567 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
568 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
569 {
570 unsigned i;
571 aff_tree to_add, current, curre;
572 tree e, rhs;
573 gimple def;
574 double_int scale;
575 void **slot;
576 struct name_expansion *exp;
577
578 aff_combination_zero (&to_add, comb->type);
579 for (i = 0; i < comb->n; i++)
580 {
581 tree type, name;
582 enum tree_code code;
583
584 e = comb->elts[i].val;
585 type = TREE_TYPE (e);
586 name = e;
587 /* Look through some conversions. */
588 if (TREE_CODE (e) == NOP_EXPR
589 && (TYPE_PRECISION (type)
590 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
591 name = TREE_OPERAND (e, 0);
592 if (TREE_CODE (name) != SSA_NAME)
593 continue;
594 def = SSA_NAME_DEF_STMT (name);
595 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
596 continue;
597
598 code = gimple_assign_rhs_code (def);
599 if (code != SSA_NAME
600 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
601 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
602 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
603 continue;
604
605 /* We do not know whether the reference retains its value at the
606 place where the expansion is used. */
607 if (TREE_CODE_CLASS (code) == tcc_reference)
608 continue;
609
610 if (!*cache)
611 *cache = pointer_map_create ();
612 slot = pointer_map_insert (*cache, e);
613 exp = (struct name_expansion *) *slot;
614
615 if (!exp)
616 {
617 exp = XNEW (struct name_expansion);
618 exp->in_progress = 1;
619 *slot = exp;
620 /* In principle this is a generally valid folding, but
621 it is not unconditionally an optimization, so do it
622 here and not in fold_unary. */
623 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
624 than the type of X and overflow for the type of X is
625 undefined. */
626 if (e != name
627 && INTEGRAL_TYPE_P (type)
628 && INTEGRAL_TYPE_P (TREE_TYPE (name))
629 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
630 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
631 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
632 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
633 rhs = fold_build2 (code, type,
634 fold_convert (type, gimple_assign_rhs1 (def)),
635 fold_convert (type, gimple_assign_rhs2 (def)));
636 else
637 {
638 rhs = gimple_assign_rhs_to_tree (def);
639 if (e != name)
640 rhs = fold_convert (type, rhs);
641 }
642 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
643 exp->expansion = current;
644 exp->in_progress = 0;
645 }
646 else
647 {
648 /* Since we follow the definitions in the SSA form, we should not
649 enter a cycle unless we pass through a phi node. */
650 gcc_assert (!exp->in_progress);
651 current = exp->expansion;
652 }
653
654 /* Accumulate the new terms to TO_ADD, so that we do not modify
655 COMB while traversing it; include the term -coef * E, to remove
656 it from COMB. */
657 scale = comb->elts[i].coef;
658 aff_combination_zero (&curre, comb->type);
659 aff_combination_add_elt (&curre, e, double_int_neg (scale));
660 aff_combination_scale (&current, scale);
661 aff_combination_add (&to_add, &current);
662 aff_combination_add (&to_add, &curre);
663 }
664 aff_combination_add (comb, &to_add);
665 }
666
667 /* Similar to tree_to_aff_combination, but follows SSA name definitions
668 and expands them recursively. CACHE is used to cache the expansions
669 of the ssa names, to avoid exponential time complexity for cases
670 like
671
672 a1 = a0 + a0;
673 a2 = a1 + a1;
674 a3 = a2 + a2;
675 ... */
676
677 void
678 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
679 struct pointer_map_t **cache)
680 {
681 tree_to_aff_combination (expr, type, comb);
682 aff_combination_expand (comb, cache);
683 }
684
685 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
686 pointer_map_traverse. */
687
688 static bool
689 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
690 void *data ATTRIBUTE_UNUSED)
691 {
692 struct name_expansion *const exp = (struct name_expansion *) *value;
693
694 free (exp);
695 return true;
696 }
697
698 /* Frees memory allocated for the CACHE used by
699 tree_to_aff_combination_expand. */
700
701 void
702 free_affine_expand_cache (struct pointer_map_t **cache)
703 {
704 if (!*cache)
705 return;
706
707 pointer_map_traverse (*cache, free_name_expansion, NULL);
708 pointer_map_destroy (*cache);
709 *cache = NULL;
710 }
711
712 /* If VAL != CST * DIV for any constant CST, returns false.
713 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
714 additionally compares CST and MULT, and if they are different,
715 returns false. Finally, if neither of these two cases occur,
716 true is returned, and if CST != 0, CST is stored to MULT and
717 MULT_SET is set to true. */
718
719 static bool
720 double_int_constant_multiple_p (double_int val, double_int div,
721 bool *mult_set, double_int *mult)
722 {
723 double_int rem, cst;
724
725 if (double_int_zero_p (val))
726 return true;
727
728 if (double_int_zero_p (div))
729 return false;
730
731 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
732 if (!double_int_zero_p (rem))
733 return false;
734
735 if (*mult_set && !double_int_equal_p (*mult, cst))
736 return false;
737
738 *mult_set = true;
739 *mult = cst;
740 return true;
741 }
742
743 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
744 X is stored to MULT. */
745
746 bool
747 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
748 double_int *mult)
749 {
750 bool mult_set = false;
751 unsigned i;
752
753 if (val->n == 0 && double_int_zero_p (val->offset))
754 {
755 *mult = double_int_zero;
756 return true;
757 }
758 if (val->n != div->n)
759 return false;
760
761 if (val->rest || div->rest)
762 return false;
763
764 if (!double_int_constant_multiple_p (val->offset, div->offset,
765 &mult_set, mult))
766 return false;
767
768 for (i = 0; i < div->n; i++)
769 {
770 struct aff_comb_elt *elt
771 = aff_combination_find_elt (val, div->elts[i].val, NULL);
772 if (!elt)
773 return false;
774 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
775 &mult_set, mult))
776 return false;
777 }
778
779 gcc_assert (mult_set);
780 return true;
781 }
782
783 /* Prints the affine VAL to the FILE. */
784
785 void
786 print_aff (FILE *file, aff_tree *val)
787 {
788 unsigned i;
789 bool uns = TYPE_UNSIGNED (val->type);
790 if (POINTER_TYPE_P (val->type))
791 uns = false;
792 fprintf (file, "{\n type = ");
793 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
794 fprintf (file, "\n offset = ");
795 dump_double_int (file, val->offset, uns);
796 if (val->n > 0)
797 {
798 fprintf (file, "\n elements = {\n");
799 for (i = 0; i < val->n; i++)
800 {
801 fprintf (file, " [%d] = ", i);
802 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
803
804 fprintf (file, " * ");
805 dump_double_int (file, val->elts[i].coef, uns);
806 if (i != val->n - 1)
807 fprintf (file, ", \n");
808 }
809 fprintf (file, "\n }");
810 }
811 if (val->rest)
812 {
813 fprintf (file, "\n rest = ");
814 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
815 }
816 fprintf (file, "\n}");
817 }
818
819 /* Prints the affine VAL to the standard error, used for debugging. */
820
821 DEBUG_FUNCTION void
822 debug_aff (aff_tree *val)
823 {
824 print_aff (stderr, val);
825 fprintf (stderr, "\n");
826 }
827
828 /* Returns address of the reference REF in ADDR. The size of the accessed
829 location is stored to SIZE. */
830
831 void
832 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
833 {
834 HOST_WIDE_INT bitsize, bitpos;
835 tree toff;
836 enum machine_mode mode;
837 int uns, vol;
838 aff_tree tmp;
839 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
840 &uns, &vol, false);
841 tree base_addr = build_fold_addr_expr (base);
842
843 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
844
845 tree_to_aff_combination (base_addr, sizetype, addr);
846
847 if (toff)
848 {
849 tree_to_aff_combination (toff, sizetype, &tmp);
850 aff_combination_add (addr, &tmp);
851 }
852
853 aff_combination_const (&tmp, sizetype,
854 shwi_to_double_int (bitpos / BITS_PER_UNIT));
855 aff_combination_add (addr, &tmp);
856
857 *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
858 }
859