re PR debug/66691 (ICE on valid code at -O3 with -g enabled in simplify_subreg, at...
[gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "basic-block.h"
37 #include "tree-pretty-print.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "stringpool.h"
45 #include "tree-ssanames.h"
46 #include "tree-ssa-loop-ivopts.h"
47 #include "rtl.h"
48 #include "flags.h"
49 #include "insn-config.h"
50 #include "expmed.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "calls.h"
54 #include "emit-rtl.h"
55 #include "varasm.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "tree-dfa.h"
59 #include "dumpfile.h"
60 #include "tree-inline.h"
61 #include "tree-affine.h"
62
63 /* FIXME: We compute address costs using RTL. */
64 #include "recog.h"
65 #include "target.h"
66 #include "tree-ssa-address.h"
67
68 /* TODO -- handling of symbols (according to Richard Hendersons
69 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
70
71 There are at least 5 different kinds of symbols that we can run up against:
72
73 (1) binds_local_p, small data area.
74 (2) binds_local_p, eg local statics
75 (3) !binds_local_p, eg global variables
76 (4) thread local, local_exec
77 (5) thread local, !local_exec
78
79 Now, (1) won't appear often in an array context, but it certainly can.
80 All you have to do is set -GN high enough, or explicitly mark any
81 random object __attribute__((section (".sdata"))).
82
83 All of these affect whether or not a symbol is in fact a valid address.
84 The only one tested here is (3). And that result may very well
85 be incorrect for (4) or (5).
86
87 An incorrect result here does not cause incorrect results out the
88 back end, because the expander in expr.c validizes the address. However
89 it would be nice to improve the handling here in order to produce more
90 precise results. */
91
92 /* A "template" for memory address, used to determine whether the address is
93 valid for mode. */
94
95 typedef struct GTY (()) mem_addr_template {
96 rtx ref; /* The template. */
97 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
98 filled in. */
99 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
100 be filled in. */
101 } mem_addr_template;
102
103
104 /* The templates. Each of the low five bits of the index corresponds to one
105 component of TARGET_MEM_REF being present, while the high bits identify
106 the address space. See TEMPL_IDX. */
107
108 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
109
110 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
111 (((int) (AS) << 5) \
112 | ((SYMBOL != 0) << 4) \
113 | ((BASE != 0) << 3) \
114 | ((INDEX != 0) << 2) \
115 | ((STEP != 0) << 1) \
116 | (OFFSET != 0))
117
118 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
119 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
120 to where step is placed to *STEP_P and offset to *OFFSET_P. */
121
122 static void
123 gen_addr_rtx (machine_mode address_mode,
124 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
125 rtx *addr, rtx **step_p, rtx **offset_p)
126 {
127 rtx act_elem;
128
129 *addr = NULL_RTX;
130 if (step_p)
131 *step_p = NULL;
132 if (offset_p)
133 *offset_p = NULL;
134
135 if (index)
136 {
137 act_elem = index;
138 if (step)
139 {
140 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
141
142 if (step_p)
143 *step_p = &XEXP (act_elem, 1);
144 }
145
146 *addr = act_elem;
147 }
148
149 if (base && base != const0_rtx)
150 {
151 if (*addr)
152 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
153 else
154 *addr = base;
155 }
156
157 if (symbol)
158 {
159 act_elem = symbol;
160 if (offset)
161 {
162 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
163
164 if (offset_p)
165 *offset_p = &XEXP (act_elem, 1);
166
167 if (GET_CODE (symbol) == SYMBOL_REF
168 || GET_CODE (symbol) == LABEL_REF
169 || GET_CODE (symbol) == CONST)
170 act_elem = gen_rtx_CONST (address_mode, act_elem);
171 }
172
173 if (*addr)
174 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
175 else
176 *addr = act_elem;
177 }
178 else if (offset)
179 {
180 if (*addr)
181 {
182 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
183 if (offset_p)
184 *offset_p = &XEXP (*addr, 1);
185 }
186 else
187 {
188 *addr = offset;
189 if (offset_p)
190 *offset_p = addr;
191 }
192 }
193
194 if (!*addr)
195 *addr = const0_rtx;
196 }
197
198 /* Description of a memory address. */
199
200 struct mem_address
201 {
202 tree symbol, base, index, step, offset;
203 };
204
205 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
206 in address space AS.
207 If REALLY_EXPAND is false, just make fake registers instead
208 of really expanding the operands, and perform the expansion in-place
209 by using one of the "templates". */
210
211 rtx
212 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
213 bool really_expand)
214 {
215 machine_mode address_mode = targetm.addr_space.address_mode (as);
216 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
217 rtx address, sym, bse, idx, st, off;
218 struct mem_addr_template *templ;
219
220 if (addr->step && !integer_onep (addr->step))
221 st = immed_wide_int_const (addr->step, pointer_mode);
222 else
223 st = NULL_RTX;
224
225 if (addr->offset && !integer_zerop (addr->offset))
226 {
227 offset_int dc = offset_int::from (addr->offset, SIGNED);
228 off = immed_wide_int_const (dc, pointer_mode);
229 }
230 else
231 off = NULL_RTX;
232
233 if (!really_expand)
234 {
235 unsigned int templ_index
236 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
237
238 if (templ_index >= vec_safe_length (mem_addr_template_list))
239 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
240
241 /* Reuse the templates for addresses, so that we do not waste memory. */
242 templ = &(*mem_addr_template_list)[templ_index];
243 if (!templ->ref)
244 {
245 sym = (addr->symbol ?
246 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
247 : NULL_RTX);
248 bse = (addr->base ?
249 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
250 : NULL_RTX);
251 idx = (addr->index ?
252 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
253 : NULL_RTX);
254
255 gen_addr_rtx (pointer_mode, sym, bse, idx,
256 st? const0_rtx : NULL_RTX,
257 off? const0_rtx : NULL_RTX,
258 &templ->ref,
259 &templ->step_p,
260 &templ->off_p);
261 }
262
263 if (st)
264 *templ->step_p = st;
265 if (off)
266 *templ->off_p = off;
267
268 return templ->ref;
269 }
270
271 /* Otherwise really expand the expressions. */
272 sym = (addr->symbol
273 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
274 : NULL_RTX);
275 bse = (addr->base
276 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
277 : NULL_RTX);
278 idx = (addr->index
279 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
280 : NULL_RTX);
281
282 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
283 if (pointer_mode != address_mode)
284 address = convert_memory_address (address_mode, address);
285 return address;
286 }
287
288 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
289 the mem_address structure. */
290
291 rtx
292 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
293 {
294 struct mem_address addr;
295 get_address_description (exp, &addr);
296 return addr_for_mem_ref (&addr, as, really_expand);
297 }
298
299 /* Returns address of MEM_REF in TYPE. */
300
301 tree
302 tree_mem_ref_addr (tree type, tree mem_ref)
303 {
304 tree addr;
305 tree act_elem;
306 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
307 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
308
309 addr_base = fold_convert (type, TMR_BASE (mem_ref));
310
311 act_elem = TMR_INDEX (mem_ref);
312 if (act_elem)
313 {
314 if (step)
315 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
316 act_elem, step);
317 addr_off = act_elem;
318 }
319
320 act_elem = TMR_INDEX2 (mem_ref);
321 if (act_elem)
322 {
323 if (addr_off)
324 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
325 addr_off, act_elem);
326 else
327 addr_off = act_elem;
328 }
329
330 if (offset && !integer_zerop (offset))
331 {
332 if (addr_off)
333 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
334 fold_convert (TREE_TYPE (addr_off), offset));
335 else
336 addr_off = offset;
337 }
338
339 if (addr_off)
340 addr = fold_build_pointer_plus (addr_base, addr_off);
341 else
342 addr = addr_base;
343
344 return addr;
345 }
346
347 /* Returns true if a memory reference in MODE and with parameters given by
348 ADDR is valid on the current target. */
349
350 static bool
351 valid_mem_ref_p (machine_mode mode, addr_space_t as,
352 struct mem_address *addr)
353 {
354 rtx address;
355
356 address = addr_for_mem_ref (addr, as, false);
357 if (!address)
358 return false;
359
360 return memory_address_addr_space_p (mode, address, as);
361 }
362
363 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
364 is valid on the current target and if so, creates and returns the
365 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
366
367 static tree
368 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
369 bool verify)
370 {
371 tree base, index2;
372
373 if (verify
374 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
375 return NULL_TREE;
376
377 if (addr->step && integer_onep (addr->step))
378 addr->step = NULL_TREE;
379
380 if (addr->offset)
381 addr->offset = fold_convert (alias_ptr_type, addr->offset);
382 else
383 addr->offset = build_int_cst (alias_ptr_type, 0);
384
385 if (addr->symbol)
386 {
387 base = addr->symbol;
388 index2 = addr->base;
389 }
390 else if (addr->base
391 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
392 {
393 base = addr->base;
394 index2 = NULL_TREE;
395 }
396 else
397 {
398 base = build_int_cst (ptr_type_node, 0);
399 index2 = addr->base;
400 }
401
402 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
403 ??? As IVOPTs does not follow restrictions to where the base
404 pointer may point to create a MEM_REF only if we know that
405 base is valid. */
406 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
407 && (!index2 || integer_zerop (index2))
408 && (!addr->index || integer_zerop (addr->index)))
409 return fold_build2 (MEM_REF, type, base, addr->offset);
410
411 return build5 (TARGET_MEM_REF, type,
412 base, addr->offset, addr->index, addr->step, index2);
413 }
414
415 /* Returns true if OBJ is an object whose address is a link time constant. */
416
417 static bool
418 fixed_address_object_p (tree obj)
419 {
420 return (TREE_CODE (obj) == VAR_DECL
421 && (TREE_STATIC (obj)
422 || DECL_EXTERNAL (obj))
423 && ! DECL_DLLIMPORT_P (obj));
424 }
425
426 /* If ADDR contains an address of object that is a link time constant,
427 move it to PARTS->symbol. */
428
429 static void
430 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
431 {
432 unsigned i;
433 tree val = NULL_TREE;
434
435 for (i = 0; i < addr->n; i++)
436 {
437 if (addr->elts[i].coef != 1)
438 continue;
439
440 val = addr->elts[i].val;
441 if (TREE_CODE (val) == ADDR_EXPR
442 && fixed_address_object_p (TREE_OPERAND (val, 0)))
443 break;
444 }
445
446 if (i == addr->n)
447 return;
448
449 parts->symbol = val;
450 aff_combination_remove_elt (addr, i);
451 }
452
453 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
454
455 static void
456 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
457 aff_tree *addr)
458 {
459 unsigned i;
460 tree val = NULL_TREE;
461 int qual;
462
463 for (i = 0; i < addr->n; i++)
464 {
465 if (addr->elts[i].coef != 1)
466 continue;
467
468 val = addr->elts[i].val;
469 if (operand_equal_p (val, base_hint, 0))
470 break;
471 }
472
473 if (i == addr->n)
474 return;
475
476 /* Cast value to appropriate pointer type. We cannot use a pointer
477 to TYPE directly, as the back-end will assume registers of pointer
478 type are aligned, and just the base itself may not actually be.
479 We use void pointer to the type's address space instead. */
480 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
481 type = build_qualified_type (void_type_node, qual);
482 parts->base = fold_convert (build_pointer_type (type), val);
483 aff_combination_remove_elt (addr, i);
484 }
485
486 /* If ADDR contains an address of a dereferenced pointer, move it to
487 PARTS->base. */
488
489 static void
490 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
491 {
492 unsigned i;
493 tree val = NULL_TREE;
494
495 for (i = 0; i < addr->n; i++)
496 {
497 if (addr->elts[i].coef != 1)
498 continue;
499
500 val = addr->elts[i].val;
501 if (POINTER_TYPE_P (TREE_TYPE (val)))
502 break;
503 }
504
505 if (i == addr->n)
506 return;
507
508 parts->base = val;
509 aff_combination_remove_elt (addr, i);
510 }
511
512 /* Moves the loop variant part V in linear address ADDR to be the index
513 of PARTS. */
514
515 static void
516 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
517 {
518 unsigned i;
519 tree val = NULL_TREE;
520
521 gcc_assert (!parts->index);
522 for (i = 0; i < addr->n; i++)
523 {
524 val = addr->elts[i].val;
525 if (operand_equal_p (val, v, 0))
526 break;
527 }
528
529 if (i == addr->n)
530 return;
531
532 parts->index = fold_convert (sizetype, val);
533 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
534 aff_combination_remove_elt (addr, i);
535 }
536
537 /* Adds ELT to PARTS. */
538
539 static void
540 add_to_parts (struct mem_address *parts, tree elt)
541 {
542 tree type;
543
544 if (!parts->index)
545 {
546 parts->index = fold_convert (sizetype, elt);
547 return;
548 }
549
550 if (!parts->base)
551 {
552 parts->base = elt;
553 return;
554 }
555
556 /* Add ELT to base. */
557 type = TREE_TYPE (parts->base);
558 if (POINTER_TYPE_P (type))
559 parts->base = fold_build_pointer_plus (parts->base, elt);
560 else
561 parts->base = fold_build2 (PLUS_EXPR, type,
562 parts->base, elt);
563 }
564
565 /* Finds the most expensive multiplication in ADDR that can be
566 expressed in an addressing mode and move the corresponding
567 element(s) to PARTS. */
568
569 static void
570 most_expensive_mult_to_index (tree type, struct mem_address *parts,
571 aff_tree *addr, bool speed)
572 {
573 addr_space_t as = TYPE_ADDR_SPACE (type);
574 machine_mode address_mode = targetm.addr_space.address_mode (as);
575 HOST_WIDE_INT coef;
576 unsigned best_mult_cost = 0, acost;
577 tree mult_elt = NULL_TREE, elt;
578 unsigned i, j;
579 enum tree_code op_code;
580
581 offset_int best_mult = 0;
582 for (i = 0; i < addr->n; i++)
583 {
584 if (!wi::fits_shwi_p (addr->elts[i].coef))
585 continue;
586
587 coef = addr->elts[i].coef.to_shwi ();
588 if (coef == 1
589 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
590 continue;
591
592 acost = mult_by_coeff_cost (coef, address_mode, speed);
593
594 if (acost > best_mult_cost)
595 {
596 best_mult_cost = acost;
597 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
598 }
599 }
600
601 if (!best_mult_cost)
602 return;
603
604 /* Collect elements multiplied by best_mult. */
605 for (i = j = 0; i < addr->n; i++)
606 {
607 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
608 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
609
610 if (amult == best_mult)
611 op_code = PLUS_EXPR;
612 else if (amult_neg == best_mult)
613 op_code = MINUS_EXPR;
614 else
615 {
616 addr->elts[j] = addr->elts[i];
617 j++;
618 continue;
619 }
620
621 elt = fold_convert (sizetype, addr->elts[i].val);
622 if (mult_elt)
623 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
624 else if (op_code == PLUS_EXPR)
625 mult_elt = elt;
626 else
627 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
628 }
629 addr->n = j;
630
631 parts->index = mult_elt;
632 parts->step = wide_int_to_tree (sizetype, best_mult);
633 }
634
635 /* Splits address ADDR for a memory access of type TYPE into PARTS.
636 If BASE_HINT is non-NULL, it specifies an SSA name to be used
637 preferentially as base of the reference, and IV_CAND is the selected
638 iv candidate used in ADDR.
639
640 TODO -- be more clever about the distribution of the elements of ADDR
641 to PARTS. Some architectures do not support anything but single
642 register in address, possibly with a small integer offset; while
643 create_mem_ref will simplify the address to an acceptable shape
644 later, it would be more efficient to know that asking for complicated
645 addressing modes is useless. */
646
647 static void
648 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
649 tree base_hint, struct mem_address *parts,
650 bool speed)
651 {
652 tree part;
653 unsigned i;
654
655 parts->symbol = NULL_TREE;
656 parts->base = NULL_TREE;
657 parts->index = NULL_TREE;
658 parts->step = NULL_TREE;
659
660 if (addr->offset != 0)
661 parts->offset = wide_int_to_tree (sizetype, addr->offset);
662 else
663 parts->offset = NULL_TREE;
664
665 /* Try to find a symbol. */
666 move_fixed_address_to_symbol (parts, addr);
667
668 /* No need to do address parts reassociation if the number of parts
669 is <= 2 -- in that case, no loop invariant code motion can be
670 exposed. */
671
672 if (!base_hint && (addr->n > 2))
673 move_variant_to_index (parts, addr, iv_cand);
674
675 /* First move the most expensive feasible multiplication
676 to index. */
677 if (!parts->index)
678 most_expensive_mult_to_index (type, parts, addr, speed);
679
680 /* Try to find a base of the reference. Since at the moment
681 there is no reliable way how to distinguish between pointer and its
682 offset, this is just a guess. */
683 if (!parts->symbol && base_hint)
684 move_hint_to_base (type, parts, base_hint, addr);
685 if (!parts->symbol && !parts->base)
686 move_pointer_to_base (parts, addr);
687
688 /* Then try to process the remaining elements. */
689 for (i = 0; i < addr->n; i++)
690 {
691 part = fold_convert (sizetype, addr->elts[i].val);
692 if (addr->elts[i].coef != 1)
693 part = fold_build2 (MULT_EXPR, sizetype, part,
694 wide_int_to_tree (sizetype, addr->elts[i].coef));
695 add_to_parts (parts, part);
696 }
697 if (addr->rest)
698 add_to_parts (parts, fold_convert (sizetype, addr->rest));
699 }
700
701 /* Force the PARTS to register. */
702
703 static void
704 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
705 {
706 if (parts->base)
707 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
708 is_gimple_mem_ref_addr, NULL_TREE,
709 true, GSI_SAME_STMT);
710 if (parts->index)
711 parts->index = force_gimple_operand_gsi (gsi, parts->index,
712 true, NULL_TREE,
713 true, GSI_SAME_STMT);
714 }
715
716 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
717 computations are emitted in front of GSI. TYPE is the mode
718 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
719 and BASE_HINT is non NULL if IV_CAND comes from a base address
720 object. */
721
722 tree
723 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
724 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
725 {
726 tree mem_ref, tmp;
727 struct mem_address parts;
728
729 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
730 gimplify_mem_ref_parts (gsi, &parts);
731 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
732 if (mem_ref)
733 return mem_ref;
734
735 /* The expression is too complicated. Try making it simpler. */
736
737 if (parts.step && !integer_onep (parts.step))
738 {
739 /* Move the multiplication to index. */
740 gcc_assert (parts.index);
741 parts.index = force_gimple_operand_gsi (gsi,
742 fold_build2 (MULT_EXPR, sizetype,
743 parts.index, parts.step),
744 true, NULL_TREE, true, GSI_SAME_STMT);
745 parts.step = NULL_TREE;
746
747 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
748 if (mem_ref)
749 return mem_ref;
750 }
751
752 if (parts.symbol)
753 {
754 tmp = parts.symbol;
755 gcc_assert (is_gimple_val (tmp));
756
757 /* Add the symbol to base, eventually forcing it to register. */
758 if (parts.base)
759 {
760 gcc_assert (useless_type_conversion_p
761 (sizetype, TREE_TYPE (parts.base)));
762
763 if (parts.index)
764 {
765 parts.base = force_gimple_operand_gsi_1 (gsi,
766 fold_build_pointer_plus (tmp, parts.base),
767 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
768 }
769 else
770 {
771 parts.index = parts.base;
772 parts.base = tmp;
773 }
774 }
775 else
776 parts.base = tmp;
777 parts.symbol = NULL_TREE;
778
779 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
780 if (mem_ref)
781 return mem_ref;
782 }
783
784 if (parts.index)
785 {
786 /* Add index to base. */
787 if (parts.base)
788 {
789 parts.base = force_gimple_operand_gsi_1 (gsi,
790 fold_build_pointer_plus (parts.base, parts.index),
791 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
792 }
793 else
794 parts.base = parts.index;
795 parts.index = NULL_TREE;
796
797 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
798 if (mem_ref)
799 return mem_ref;
800 }
801
802 if (parts.offset && !integer_zerop (parts.offset))
803 {
804 /* Try adding offset to base. */
805 if (parts.base)
806 {
807 parts.base = force_gimple_operand_gsi_1 (gsi,
808 fold_build_pointer_plus (parts.base, parts.offset),
809 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
810 }
811 else
812 parts.base = parts.offset;
813
814 parts.offset = NULL_TREE;
815
816 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
817 if (mem_ref)
818 return mem_ref;
819 }
820
821 /* Verify that the address is in the simplest possible shape
822 (only a register). If we cannot create such a memory reference,
823 something is really wrong. */
824 gcc_assert (parts.symbol == NULL_TREE);
825 gcc_assert (parts.index == NULL_TREE);
826 gcc_assert (!parts.step || integer_onep (parts.step));
827 gcc_assert (!parts.offset || integer_zerop (parts.offset));
828 gcc_unreachable ();
829 }
830
831 /* Copies components of the address from OP to ADDR. */
832
833 void
834 get_address_description (tree op, struct mem_address *addr)
835 {
836 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
837 {
838 addr->symbol = TMR_BASE (op);
839 addr->base = TMR_INDEX2 (op);
840 }
841 else
842 {
843 addr->symbol = NULL_TREE;
844 if (TMR_INDEX2 (op))
845 {
846 gcc_assert (integer_zerop (TMR_BASE (op)));
847 addr->base = TMR_INDEX2 (op);
848 }
849 else
850 addr->base = TMR_BASE (op);
851 }
852 addr->index = TMR_INDEX (op);
853 addr->step = TMR_STEP (op);
854 addr->offset = TMR_OFFSET (op);
855 }
856
857 /* Copies the reference information from OLD_REF to NEW_REF, where
858 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
859
860 void
861 copy_ref_info (tree new_ref, tree old_ref)
862 {
863 tree new_ptr_base = NULL_TREE;
864
865 gcc_assert (TREE_CODE (new_ref) == MEM_REF
866 || TREE_CODE (new_ref) == TARGET_MEM_REF);
867
868 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
869 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
870
871 new_ptr_base = TREE_OPERAND (new_ref, 0);
872
873 /* We can transfer points-to information from an old pointer
874 or decl base to the new one. */
875 if (new_ptr_base
876 && TREE_CODE (new_ptr_base) == SSA_NAME
877 && !SSA_NAME_PTR_INFO (new_ptr_base))
878 {
879 tree base = get_base_address (old_ref);
880 if (!base)
881 ;
882 else if ((TREE_CODE (base) == MEM_REF
883 || TREE_CODE (base) == TARGET_MEM_REF)
884 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
885 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
886 {
887 struct ptr_info_def *new_pi;
888 unsigned int align, misalign;
889
890 duplicate_ssa_name_ptr_info
891 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
892 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
893 /* We have to be careful about transferring alignment information. */
894 if (get_ptr_info_alignment (new_pi, &align, &misalign)
895 && TREE_CODE (old_ref) == MEM_REF
896 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
897 && (TMR_INDEX2 (new_ref)
898 || (TMR_STEP (new_ref)
899 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
900 < align)))))
901 {
902 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
903 - mem_ref_offset (new_ref).to_short_addr ());
904 adjust_ptr_info_misalignment (new_pi, inc);
905 }
906 else
907 mark_ptr_info_alignment_unknown (new_pi);
908 }
909 else if (TREE_CODE (base) == VAR_DECL
910 || TREE_CODE (base) == PARM_DECL
911 || TREE_CODE (base) == RESULT_DECL)
912 {
913 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
914 pt_solution_set_var (&pi->pt, base);
915 }
916 }
917 }
918
919 /* Move constants in target_mem_ref REF to offset. Returns the new target
920 mem ref if anything changes, NULL_TREE otherwise. */
921
922 tree
923 maybe_fold_tmr (tree ref)
924 {
925 struct mem_address addr;
926 bool changed = false;
927 tree new_ref, off;
928
929 get_address_description (ref, &addr);
930
931 if (addr.base
932 && TREE_CODE (addr.base) == INTEGER_CST
933 && !integer_zerop (addr.base))
934 {
935 addr.offset = fold_binary_to_constant (PLUS_EXPR,
936 TREE_TYPE (addr.offset),
937 addr.offset, addr.base);
938 addr.base = NULL_TREE;
939 changed = true;
940 }
941
942 if (addr.symbol
943 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
944 {
945 addr.offset = fold_binary_to_constant
946 (PLUS_EXPR, TREE_TYPE (addr.offset),
947 addr.offset,
948 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
949 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
950 changed = true;
951 }
952 else if (addr.symbol
953 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
954 {
955 HOST_WIDE_INT offset;
956 addr.symbol = build_fold_addr_expr
957 (get_addr_base_and_unit_offset
958 (TREE_OPERAND (addr.symbol, 0), &offset));
959 addr.offset = int_const_binop (PLUS_EXPR,
960 addr.offset, size_int (offset));
961 changed = true;
962 }
963
964 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
965 {
966 off = addr.index;
967 if (addr.step)
968 {
969 off = fold_binary_to_constant (MULT_EXPR, sizetype,
970 off, addr.step);
971 addr.step = NULL_TREE;
972 }
973
974 addr.offset = fold_binary_to_constant (PLUS_EXPR,
975 TREE_TYPE (addr.offset),
976 addr.offset, off);
977 addr.index = NULL_TREE;
978 changed = true;
979 }
980
981 if (!changed)
982 return NULL_TREE;
983
984 /* If we have propagated something into this TARGET_MEM_REF and thus
985 ended up folding it, always create a new TARGET_MEM_REF regardless
986 if it is valid in this for on the target - the propagation result
987 wouldn't be anyway. */
988 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
989 TREE_TYPE (addr.offset), &addr, false);
990 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
991 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
992 return new_ref;
993 }
994
995 /* Dump PARTS to FILE. */
996
997 extern void dump_mem_address (FILE *, struct mem_address *);
998 void
999 dump_mem_address (FILE *file, struct mem_address *parts)
1000 {
1001 if (parts->symbol)
1002 {
1003 fprintf (file, "symbol: ");
1004 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1005 fprintf (file, "\n");
1006 }
1007 if (parts->base)
1008 {
1009 fprintf (file, "base: ");
1010 print_generic_expr (file, parts->base, TDF_SLIM);
1011 fprintf (file, "\n");
1012 }
1013 if (parts->index)
1014 {
1015 fprintf (file, "index: ");
1016 print_generic_expr (file, parts->index, TDF_SLIM);
1017 fprintf (file, "\n");
1018 }
1019 if (parts->step)
1020 {
1021 fprintf (file, "step: ");
1022 print_generic_expr (file, parts->step, TDF_SLIM);
1023 fprintf (file, "\n");
1024 }
1025 if (parts->offset)
1026 {
1027 fprintf (file, "offset: ");
1028 print_generic_expr (file, parts->offset, TDF_SLIM);
1029 fprintf (file, "\n");
1030 }
1031 }
1032
1033 #include "gt-tree-ssa-address.h"