decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / tree-affine.c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "alias.h"
24 #include "symtab.h"
25 #include "options.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "tm.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "rtl.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "tree-pretty-print.h"
43 #include "tree-affine.h"
44 #include "predict.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "gimplify.h"
51 #include "dumpfile.h"
52 #include "cfgexpand.h"
53 #include "wide-int-print.h"
54
55 /* Extends CST as appropriate for the affine combinations COMB. */
56
57 widest_int
58 wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
59 {
60 return wi::sext (cst, TYPE_PRECISION (comb->type));
61 }
62
63 /* Initializes affine combination COMB so that its value is zero in TYPE. */
64
65 static void
66 aff_combination_zero (aff_tree *comb, tree type)
67 {
68 int i;
69 comb->type = type;
70 comb->offset = 0;
71 comb->n = 0;
72 for (i = 0; i < MAX_AFF_ELTS; i++)
73 comb->elts[i].coef = 0;
74 comb->rest = NULL_TREE;
75 }
76
77 /* Sets COMB to CST. */
78
79 void
80 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
81 {
82 aff_combination_zero (comb, type);
83 comb->offset = wide_int_ext_for_comb (cst, comb);;
84 }
85
86 /* Sets COMB to single element ELT. */
87
88 void
89 aff_combination_elt (aff_tree *comb, tree type, tree elt)
90 {
91 aff_combination_zero (comb, type);
92
93 comb->n = 1;
94 comb->elts[0].val = elt;
95 comb->elts[0].coef = 1;
96 }
97
98 /* Scales COMB by SCALE. */
99
100 void
101 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
102 {
103 unsigned i, j;
104
105 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
106 if (scale == 1)
107 return;
108
109 if (scale == 0)
110 {
111 aff_combination_zero (comb, comb->type);
112 return;
113 }
114
115 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
116 for (i = 0, j = 0; i < comb->n; i++)
117 {
118 widest_int new_coef
119 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
120 /* A coefficient may become zero due to overflow. Remove the zero
121 elements. */
122 if (new_coef == 0)
123 continue;
124 comb->elts[j].coef = new_coef;
125 comb->elts[j].val = comb->elts[i].val;
126 j++;
127 }
128 comb->n = j;
129
130 if (comb->rest)
131 {
132 tree type = comb->type;
133 if (POINTER_TYPE_P (type))
134 type = sizetype;
135 if (comb->n < MAX_AFF_ELTS)
136 {
137 comb->elts[comb->n].coef = scale;
138 comb->elts[comb->n].val = comb->rest;
139 comb->rest = NULL_TREE;
140 comb->n++;
141 }
142 else
143 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
144 wide_int_to_tree (type, scale));
145 }
146 }
147
148 /* Adds ELT * SCALE to COMB. */
149
150 void
151 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
152 {
153 unsigned i;
154 tree type;
155
156 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
157 if (scale == 0)
158 return;
159
160 for (i = 0; i < comb->n; i++)
161 if (operand_equal_p (comb->elts[i].val, elt, 0))
162 {
163 widest_int new_coef
164 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
165 if (new_coef != 0)
166 {
167 comb->elts[i].coef = new_coef;
168 return;
169 }
170
171 comb->n--;
172 comb->elts[i] = comb->elts[comb->n];
173
174 if (comb->rest)
175 {
176 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
177 comb->elts[comb->n].coef = 1;
178 comb->elts[comb->n].val = comb->rest;
179 comb->rest = NULL_TREE;
180 comb->n++;
181 }
182 return;
183 }
184 if (comb->n < MAX_AFF_ELTS)
185 {
186 comb->elts[comb->n].coef = scale;
187 comb->elts[comb->n].val = elt;
188 comb->n++;
189 return;
190 }
191
192 type = comb->type;
193 if (POINTER_TYPE_P (type))
194 type = sizetype;
195
196 if (scale == 1)
197 elt = fold_convert (type, elt);
198 else
199 elt = fold_build2 (MULT_EXPR, type,
200 fold_convert (type, elt),
201 wide_int_to_tree (type, scale));
202
203 if (comb->rest)
204 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
205 elt);
206 else
207 comb->rest = elt;
208 }
209
210 /* Adds CST to C. */
211
212 static void
213 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
214 {
215 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
216 }
217
218 /* Adds COMB2 to COMB1. */
219
220 void
221 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
222 {
223 unsigned i;
224
225 aff_combination_add_cst (comb1, comb2->offset);
226 for (i = 0; i < comb2->n; i++)
227 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
228 if (comb2->rest)
229 aff_combination_add_elt (comb1, comb2->rest, 1);
230 }
231
232 /* Converts affine combination COMB to TYPE. */
233
234 void
235 aff_combination_convert (aff_tree *comb, tree type)
236 {
237 unsigned i, j;
238 tree comb_type = comb->type;
239
240 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
241 {
242 tree val = fold_convert (type, aff_combination_to_tree (comb));
243 tree_to_aff_combination (val, type, comb);
244 return;
245 }
246
247 comb->type = type;
248 if (comb->rest && !POINTER_TYPE_P (type))
249 comb->rest = fold_convert (type, comb->rest);
250
251 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
252 return;
253
254 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
255 for (i = j = 0; i < comb->n; i++)
256 {
257 if (comb->elts[i].coef == 0)
258 continue;
259 comb->elts[j].coef = comb->elts[i].coef;
260 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
261 j++;
262 }
263
264 comb->n = j;
265 if (comb->n < MAX_AFF_ELTS && comb->rest)
266 {
267 comb->elts[comb->n].coef = 1;
268 comb->elts[comb->n].val = comb->rest;
269 comb->rest = NULL_TREE;
270 comb->n++;
271 }
272 }
273
274 /* Splits EXPR into an affine combination of parts. */
275
276 void
277 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
278 {
279 aff_tree tmp;
280 enum tree_code code;
281 tree cst, core, toffset;
282 HOST_WIDE_INT bitpos, bitsize;
283 machine_mode mode;
284 int unsignedp, volatilep;
285
286 STRIP_NOPS (expr);
287
288 code = TREE_CODE (expr);
289 switch (code)
290 {
291 case INTEGER_CST:
292 aff_combination_const (comb, type, wi::to_widest (expr));
293 return;
294
295 case POINTER_PLUS_EXPR:
296 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
297 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
298 aff_combination_add (comb, &tmp);
299 return;
300
301 case PLUS_EXPR:
302 case MINUS_EXPR:
303 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
304 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
305 if (code == MINUS_EXPR)
306 aff_combination_scale (&tmp, -1);
307 aff_combination_add (comb, &tmp);
308 return;
309
310 case MULT_EXPR:
311 cst = TREE_OPERAND (expr, 1);
312 if (TREE_CODE (cst) != INTEGER_CST)
313 break;
314 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
315 aff_combination_scale (comb, wi::to_widest (cst));
316 return;
317
318 case NEGATE_EXPR:
319 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
320 aff_combination_scale (comb, -1);
321 return;
322
323 case BIT_NOT_EXPR:
324 /* ~x = -x - 1 */
325 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
326 aff_combination_scale (comb, -1);
327 aff_combination_add_cst (comb, -1);
328 return;
329
330 case ADDR_EXPR:
331 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
332 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
333 {
334 expr = TREE_OPERAND (expr, 0);
335 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
336 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
337 aff_combination_add (comb, &tmp);
338 return;
339 }
340 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
341 &toffset, &mode, &unsignedp, &volatilep,
342 false);
343 if (bitpos % BITS_PER_UNIT != 0)
344 break;
345 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
346 if (TREE_CODE (core) == MEM_REF)
347 {
348 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
349 core = TREE_OPERAND (core, 0);
350 }
351 else
352 core = build_fold_addr_expr (core);
353
354 if (TREE_CODE (core) == ADDR_EXPR)
355 aff_combination_add_elt (comb, core, 1);
356 else
357 {
358 tree_to_aff_combination (core, type, &tmp);
359 aff_combination_add (comb, &tmp);
360 }
361 if (toffset)
362 {
363 tree_to_aff_combination (toffset, type, &tmp);
364 aff_combination_add (comb, &tmp);
365 }
366 return;
367
368 case MEM_REF:
369 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
370 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
371 type, comb);
372 else if (integer_zerop (TREE_OPERAND (expr, 1)))
373 {
374 aff_combination_elt (comb, type, expr);
375 return;
376 }
377 else
378 aff_combination_elt (comb, type,
379 build2 (MEM_REF, TREE_TYPE (expr),
380 TREE_OPERAND (expr, 0),
381 build_int_cst
382 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
383 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
384 aff_combination_add (comb, &tmp);
385 return;
386
387 default:
388 break;
389 }
390
391 aff_combination_elt (comb, type, expr);
392 }
393
394 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
395 combination COMB. */
396
397 static tree
398 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
399 aff_tree *comb ATTRIBUTE_UNUSED)
400 {
401 enum tree_code code;
402 tree type1 = type;
403 if (POINTER_TYPE_P (type))
404 type1 = sizetype;
405
406 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
407
408 if (scale == -1
409 && POINTER_TYPE_P (TREE_TYPE (elt)))
410 {
411 elt = convert_to_ptrofftype (elt);
412 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
413 scale = 1;
414 }
415
416 if (scale == 1)
417 {
418 if (!expr)
419 {
420 if (POINTER_TYPE_P (TREE_TYPE (elt)))
421 return elt;
422 else
423 return fold_convert (type1, elt);
424 }
425
426 if (POINTER_TYPE_P (TREE_TYPE (expr)))
427 return fold_build_pointer_plus (expr, elt);
428 if (POINTER_TYPE_P (TREE_TYPE (elt)))
429 return fold_build_pointer_plus (elt, expr);
430 return fold_build2 (PLUS_EXPR, type1,
431 expr, fold_convert (type1, elt));
432 }
433
434 if (scale == -1)
435 {
436 if (!expr)
437 return fold_build1 (NEGATE_EXPR, type1,
438 fold_convert (type1, elt));
439
440 if (POINTER_TYPE_P (TREE_TYPE (expr)))
441 {
442 elt = convert_to_ptrofftype (elt);
443 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
444 return fold_build_pointer_plus (expr, elt);
445 }
446 return fold_build2 (MINUS_EXPR, type1,
447 expr, fold_convert (type1, elt));
448 }
449
450 elt = fold_convert (type1, elt);
451 if (!expr)
452 return fold_build2 (MULT_EXPR, type1, elt,
453 wide_int_to_tree (type1, scale));
454
455 if (wi::neg_p (scale))
456 {
457 code = MINUS_EXPR;
458 scale = -scale;
459 }
460 else
461 code = PLUS_EXPR;
462
463 elt = fold_build2 (MULT_EXPR, type1, elt,
464 wide_int_to_tree (type1, scale));
465 if (POINTER_TYPE_P (TREE_TYPE (expr)))
466 {
467 if (code == MINUS_EXPR)
468 elt = fold_build1 (NEGATE_EXPR, type1, elt);
469 return fold_build_pointer_plus (expr, elt);
470 }
471 return fold_build2 (code, type1, expr, elt);
472 }
473
474 /* Makes tree from the affine combination COMB. */
475
476 tree
477 aff_combination_to_tree (aff_tree *comb)
478 {
479 tree type = comb->type;
480 tree expr = NULL_TREE;
481 unsigned i;
482 widest_int off, sgn;
483 tree type1 = type;
484 if (POINTER_TYPE_P (type))
485 type1 = sizetype;
486
487 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
488
489 for (i = 0; i < comb->n; i++)
490 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
491 comb);
492
493 if (comb->rest)
494 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
495
496 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
497 unsigned. */
498 if (wi::neg_p (comb->offset))
499 {
500 off = -comb->offset;
501 sgn = -1;
502 }
503 else
504 {
505 off = comb->offset;
506 sgn = 1;
507 }
508 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
509 comb);
510 }
511
512 /* Copies the tree elements of COMB to ensure that they are not shared. */
513
514 void
515 unshare_aff_combination (aff_tree *comb)
516 {
517 unsigned i;
518
519 for (i = 0; i < comb->n; i++)
520 comb->elts[i].val = unshare_expr (comb->elts[i].val);
521 if (comb->rest)
522 comb->rest = unshare_expr (comb->rest);
523 }
524
525 /* Remove M-th element from COMB. */
526
527 void
528 aff_combination_remove_elt (aff_tree *comb, unsigned m)
529 {
530 comb->n--;
531 if (m <= comb->n)
532 comb->elts[m] = comb->elts[comb->n];
533 if (comb->rest)
534 {
535 comb->elts[comb->n].coef = 1;
536 comb->elts[comb->n].val = comb->rest;
537 comb->rest = NULL_TREE;
538 comb->n++;
539 }
540 }
541
542 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
543 C * COEF is added to R. */
544
545
546 static void
547 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
548 aff_tree *r)
549 {
550 unsigned i;
551 tree aval, type;
552
553 for (i = 0; i < c->n; i++)
554 {
555 aval = c->elts[i].val;
556 if (val)
557 {
558 type = TREE_TYPE (aval);
559 aval = fold_build2 (MULT_EXPR, type, aval,
560 fold_convert (type, val));
561 }
562
563 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
564 }
565
566 if (c->rest)
567 {
568 aval = c->rest;
569 if (val)
570 {
571 type = TREE_TYPE (aval);
572 aval = fold_build2 (MULT_EXPR, type, aval,
573 fold_convert (type, val));
574 }
575
576 aff_combination_add_elt (r, aval, coef);
577 }
578
579 if (val)
580 aff_combination_add_elt (r, val, coef * c->offset);
581 else
582 aff_combination_add_cst (r, coef * c->offset);
583 }
584
585 /* Multiplies C1 by C2, storing the result to R */
586
587 void
588 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
589 {
590 unsigned i;
591 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
592
593 aff_combination_zero (r, c1->type);
594
595 for (i = 0; i < c2->n; i++)
596 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
597 if (c2->rest)
598 aff_combination_add_product (c1, 1, c2->rest, r);
599 aff_combination_add_product (c1, c2->offset, NULL, r);
600 }
601
602 /* Returns the element of COMB whose value is VAL, or NULL if no such
603 element exists. If IDX is not NULL, it is set to the index of VAL in
604 COMB. */
605
606 static struct aff_comb_elt *
607 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
608 {
609 unsigned i;
610
611 for (i = 0; i < comb->n; i++)
612 if (operand_equal_p (comb->elts[i].val, val, 0))
613 {
614 if (idx)
615 *idx = i;
616
617 return &comb->elts[i];
618 }
619
620 return NULL;
621 }
622
623 /* Element of the cache that maps ssa name NAME to its expanded form
624 as an affine expression EXPANSION. */
625
626 struct name_expansion
627 {
628 aff_tree expansion;
629
630 /* True if the expansion for the name is just being generated. */
631 unsigned in_progress : 1;
632 };
633
634 /* Expands SSA names in COMB recursively. CACHE is used to cache the
635 results. */
636
637 void
638 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
639 hash_map<tree, name_expansion *> **cache)
640 {
641 unsigned i;
642 aff_tree to_add, current, curre;
643 tree e, rhs;
644 gimple def;
645 widest_int scale;
646 struct name_expansion *exp;
647
648 aff_combination_zero (&to_add, comb->type);
649 for (i = 0; i < comb->n; i++)
650 {
651 tree type, name;
652 enum tree_code code;
653
654 e = comb->elts[i].val;
655 type = TREE_TYPE (e);
656 name = e;
657 /* Look through some conversions. */
658 if (CONVERT_EXPR_P (e)
659 && (TYPE_PRECISION (type)
660 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
661 name = TREE_OPERAND (e, 0);
662 if (TREE_CODE (name) != SSA_NAME)
663 continue;
664 def = SSA_NAME_DEF_STMT (name);
665 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
666 continue;
667
668 code = gimple_assign_rhs_code (def);
669 if (code != SSA_NAME
670 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
671 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
672 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
673 continue;
674
675 /* We do not know whether the reference retains its value at the
676 place where the expansion is used. */
677 if (TREE_CODE_CLASS (code) == tcc_reference)
678 continue;
679
680 if (!*cache)
681 *cache = new hash_map<tree, name_expansion *>;
682 name_expansion **slot = &(*cache)->get_or_insert (e);
683 exp = *slot;
684
685 if (!exp)
686 {
687 exp = XNEW (struct name_expansion);
688 exp->in_progress = 1;
689 *slot = exp;
690 /* In principle this is a generally valid folding, but
691 it is not unconditionally an optimization, so do it
692 here and not in fold_unary. */
693 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
694 than the type of X and overflow for the type of X is
695 undefined. */
696 if (e != name
697 && INTEGRAL_TYPE_P (type)
698 && INTEGRAL_TYPE_P (TREE_TYPE (name))
699 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
700 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
701 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
702 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
703 rhs = fold_build2 (code, type,
704 fold_convert (type, gimple_assign_rhs1 (def)),
705 fold_convert (type, gimple_assign_rhs2 (def)));
706 else
707 {
708 rhs = gimple_assign_rhs_to_tree (def);
709 if (e != name)
710 rhs = fold_convert (type, rhs);
711 }
712 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
713 exp->expansion = current;
714 exp->in_progress = 0;
715 }
716 else
717 {
718 /* Since we follow the definitions in the SSA form, we should not
719 enter a cycle unless we pass through a phi node. */
720 gcc_assert (!exp->in_progress);
721 current = exp->expansion;
722 }
723
724 /* Accumulate the new terms to TO_ADD, so that we do not modify
725 COMB while traversing it; include the term -coef * E, to remove
726 it from COMB. */
727 scale = comb->elts[i].coef;
728 aff_combination_zero (&curre, comb->type);
729 aff_combination_add_elt (&curre, e, -scale);
730 aff_combination_scale (&current, scale);
731 aff_combination_add (&to_add, &current);
732 aff_combination_add (&to_add, &curre);
733 }
734 aff_combination_add (comb, &to_add);
735 }
736
737 /* Similar to tree_to_aff_combination, but follows SSA name definitions
738 and expands them recursively. CACHE is used to cache the expansions
739 of the ssa names, to avoid exponential time complexity for cases
740 like
741
742 a1 = a0 + a0;
743 a2 = a1 + a1;
744 a3 = a2 + a2;
745 ... */
746
747 void
748 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
749 hash_map<tree, name_expansion *> **cache)
750 {
751 tree_to_aff_combination (expr, type, comb);
752 aff_combination_expand (comb, cache);
753 }
754
755 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
756 hash_map::traverse. */
757
758 bool
759 free_name_expansion (tree const &, name_expansion **value, void *)
760 {
761 free (*value);
762 return true;
763 }
764
765 /* Frees memory allocated for the CACHE used by
766 tree_to_aff_combination_expand. */
767
768 void
769 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
770 {
771 if (!*cache)
772 return;
773
774 (*cache)->traverse<void *, free_name_expansion> (NULL);
775 delete (*cache);
776 *cache = NULL;
777 }
778
779 /* If VAL != CST * DIV for any constant CST, returns false.
780 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
781 and if they are different, returns false. Finally, if neither of these
782 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
783 is set to true. */
784
785 static bool
786 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
787 bool *mult_set, widest_int *mult)
788 {
789 widest_int rem, cst;
790
791 if (val == 0)
792 {
793 if (*mult_set && mult != 0)
794 return false;
795 *mult_set = true;
796 *mult = 0;
797 return true;
798 }
799
800 if (div == 0)
801 return false;
802
803 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
804 return false;
805
806 if (*mult_set && *mult != cst)
807 return false;
808
809 *mult_set = true;
810 *mult = cst;
811 return true;
812 }
813
814 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
815 X is stored to MULT. */
816
817 bool
818 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
819 widest_int *mult)
820 {
821 bool mult_set = false;
822 unsigned i;
823
824 if (val->n == 0 && val->offset == 0)
825 {
826 *mult = 0;
827 return true;
828 }
829 if (val->n != div->n)
830 return false;
831
832 if (val->rest || div->rest)
833 return false;
834
835 if (!wide_int_constant_multiple_p (val->offset, div->offset,
836 &mult_set, mult))
837 return false;
838
839 for (i = 0; i < div->n; i++)
840 {
841 struct aff_comb_elt *elt
842 = aff_combination_find_elt (val, div->elts[i].val, NULL);
843 if (!elt)
844 return false;
845 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
846 &mult_set, mult))
847 return false;
848 }
849
850 gcc_assert (mult_set);
851 return true;
852 }
853
854 /* Prints the affine VAL to the FILE. */
855
856 static void
857 print_aff (FILE *file, aff_tree *val)
858 {
859 unsigned i;
860 signop sgn = TYPE_SIGN (val->type);
861 if (POINTER_TYPE_P (val->type))
862 sgn = SIGNED;
863 fprintf (file, "{\n type = ");
864 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
865 fprintf (file, "\n offset = ");
866 print_dec (val->offset, file, sgn);
867 if (val->n > 0)
868 {
869 fprintf (file, "\n elements = {\n");
870 for (i = 0; i < val->n; i++)
871 {
872 fprintf (file, " [%d] = ", i);
873 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
874
875 fprintf (file, " * ");
876 print_dec (val->elts[i].coef, file, sgn);
877 if (i != val->n - 1)
878 fprintf (file, ", \n");
879 }
880 fprintf (file, "\n }");
881 }
882 if (val->rest)
883 {
884 fprintf (file, "\n rest = ");
885 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
886 }
887 fprintf (file, "\n}");
888 }
889
890 /* Prints the affine VAL to the standard error, used for debugging. */
891
892 DEBUG_FUNCTION void
893 debug_aff (aff_tree *val)
894 {
895 print_aff (stderr, val);
896 fprintf (stderr, "\n");
897 }
898
899 /* Computes address of the reference REF in ADDR. The size of the accessed
900 location is stored to SIZE. Returns the ultimate containing object to
901 which REF refers. */
902
903 tree
904 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
905 {
906 HOST_WIDE_INT bitsize, bitpos;
907 tree toff;
908 machine_mode mode;
909 int uns, vol;
910 aff_tree tmp;
911 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
912 &uns, &vol, false);
913 tree base_addr = build_fold_addr_expr (base);
914
915 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
916
917 tree_to_aff_combination (base_addr, sizetype, addr);
918
919 if (toff)
920 {
921 tree_to_aff_combination (toff, sizetype, &tmp);
922 aff_combination_add (addr, &tmp);
923 }
924
925 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
926 aff_combination_add (addr, &tmp);
927
928 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
929
930 return base;
931 }
932
933 /* Returns true if a region of size SIZE1 at position 0 and a region of
934 size SIZE2 at position DIFF cannot overlap. */
935
936 bool
937 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
938 const widest_int &size2)
939 {
940 /* Unless the difference is a constant, we fail. */
941 if (diff->n != 0)
942 return false;
943
944 if (wi::neg_p (diff->offset))
945 {
946 /* The second object is before the first one, we succeed if the last
947 element of the second object is before the start of the first one. */
948 return wi::neg_p (diff->offset + size2 - 1);
949 }
950 else
951 {
952 /* We succeed if the second object starts after the first one ends. */
953 return wi::les_p (size1, diff->offset);
954 }
955 }
956