1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
26 #include "coretypes.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-flow.h"
35 #include "tree-inline.h"
36 #include "tree-affine.h"
38 /* FIXME: We compute address costs using RTL. */
39 #include "insn-config.h"
47 /* TODO -- handling of symbols (according to Richard Hendersons
48 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
50 There are at least 5 different kinds of symbols that we can run up against:
52 (1) binds_local_p, small data area.
53 (2) binds_local_p, eg local statics
54 (3) !binds_local_p, eg global variables
55 (4) thread local, local_exec
56 (5) thread local, !local_exec
58 Now, (1) won't appear often in an array context, but it certainly can.
59 All you have to do is set -GN high enough, or explicitly mark any
60 random object __attribute__((section (".sdata"))).
62 All of these affect whether or not a symbol is in fact a valid address.
63 The only one tested here is (3). And that result may very well
64 be incorrect for (4) or (5).
66 An incorrect result here does not cause incorrect results out the
67 back end, because the expander in expr.c validizes the address. However
68 it would be nice to improve the handling here in order to produce more
71 /* A "template" for memory address, used to determine whether the address is
74 typedef struct GTY (()) mem_addr_template
{
75 rtx ref
; /* The template. */
76 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
78 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
82 DEF_VEC_O (mem_addr_template
);
83 DEF_VEC_ALLOC_O (mem_addr_template
, gc
);
85 /* The templates. Each of the low five bits of the index corresponds to one
86 component of TARGET_MEM_REF being present, while the high bits identify
87 the address space. See TEMPL_IDX. */
89 static GTY(()) VEC (mem_addr_template
, gc
) *mem_addr_template_list
;
91 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
93 | ((SYMBOL != 0) << 4) \
94 | ((BASE != 0) << 3) \
95 | ((INDEX != 0) << 2) \
96 | ((STEP != 0) << 1) \
99 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
100 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
101 to where step is placed to *STEP_P and offset to *OFFSET_P. */
104 gen_addr_rtx (enum machine_mode address_mode
,
105 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
106 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
121 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
124 *step_p
= &XEXP (act_elem
, 1);
130 if (base
&& base
!= const0_rtx
)
133 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
143 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
146 *offset_p
= &XEXP (act_elem
, 1);
148 if (GET_CODE (symbol
) == SYMBOL_REF
149 || GET_CODE (symbol
) == LABEL_REF
150 || GET_CODE (symbol
) == CONST
)
151 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
155 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
163 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
165 *offset_p
= &XEXP (*addr
, 1);
179 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
181 If REALLY_EXPAND is false, just make fake registers instead
182 of really expanding the operands, and perform the expansion in-place
183 by using one of the "templates". */
186 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
189 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
190 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
191 rtx address
, sym
, bse
, idx
, st
, off
;
192 struct mem_addr_template
*templ
;
194 if (addr
->step
&& !integer_onep (addr
->step
))
195 st
= immed_double_int_const (tree_to_double_int (addr
->step
), pointer_mode
);
199 if (addr
->offset
&& !integer_zerop (addr
->offset
))
200 off
= immed_double_int_const
201 (double_int_sext (tree_to_double_int (addr
->offset
),
202 TYPE_PRECISION (TREE_TYPE (addr
->offset
))),
209 unsigned int templ_index
210 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
213 >= VEC_length (mem_addr_template
, mem_addr_template_list
))
214 VEC_safe_grow_cleared (mem_addr_template
, gc
, mem_addr_template_list
,
217 /* Reuse the templates for addresses, so that we do not waste memory. */
218 templ
= VEC_index (mem_addr_template
, mem_addr_template_list
, templ_index
);
221 sym
= (addr
->symbol
?
222 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
225 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
228 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
231 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
232 st
? const0_rtx
: NULL_RTX
,
233 off
? const0_rtx
: NULL_RTX
,
247 /* Otherwise really expand the expressions. */
249 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
252 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
255 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
258 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
259 if (pointer_mode
!= address_mode
)
260 address
= convert_memory_address (address_mode
, address
);
264 /* Returns address of MEM_REF in TYPE. */
267 tree_mem_ref_addr (tree type
, tree mem_ref
)
271 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
272 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
274 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
276 act_elem
= TMR_INDEX (mem_ref
);
280 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
285 act_elem
= TMR_INDEX2 (mem_ref
);
289 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
295 if (offset
&& !integer_zerop (offset
))
298 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
299 fold_convert (TREE_TYPE (addr_off
), offset
));
305 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
312 /* Returns true if a memory reference in MODE and with parameters given by
313 ADDR is valid on the current target. */
316 valid_mem_ref_p (enum machine_mode mode
, addr_space_t as
,
317 struct mem_address
*addr
)
321 address
= addr_for_mem_ref (addr
, as
, false);
325 return memory_address_addr_space_p (mode
, address
, as
);
328 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
329 is valid on the current target and if so, creates and returns the
330 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
333 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
339 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
342 if (addr
->step
&& integer_onep (addr
->step
))
343 addr
->step
= NULL_TREE
;
346 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
348 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
356 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
363 base
= build_int_cst (ptr_type_node
, 0);
367 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
368 ??? As IVOPTs does not follow restrictions to where the base
369 pointer may point to create a MEM_REF only if we know that
371 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
372 && (!index2
|| integer_zerop (index2
))
373 && (!addr
->index
|| integer_zerop (addr
->index
)))
374 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
376 return build5 (TARGET_MEM_REF
, type
,
377 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
380 /* Returns true if OBJ is an object whose address is a link time constant. */
383 fixed_address_object_p (tree obj
)
385 return (TREE_CODE (obj
) == VAR_DECL
386 && (TREE_STATIC (obj
)
387 || DECL_EXTERNAL (obj
))
388 && ! DECL_DLLIMPORT_P (obj
));
391 /* If ADDR contains an address of object that is a link time constant,
392 move it to PARTS->symbol. */
395 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
398 tree val
= NULL_TREE
;
400 for (i
= 0; i
< addr
->n
; i
++)
402 if (!double_int_one_p (addr
->elts
[i
].coef
))
405 val
= addr
->elts
[i
].val
;
406 if (TREE_CODE (val
) == ADDR_EXPR
407 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
415 aff_combination_remove_elt (addr
, i
);
418 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
421 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
425 tree val
= NULL_TREE
;
428 for (i
= 0; i
< addr
->n
; i
++)
430 if (!double_int_one_p (addr
->elts
[i
].coef
))
433 val
= addr
->elts
[i
].val
;
434 if (operand_equal_p (val
, base_hint
, 0))
441 /* Cast value to appropriate pointer type. We cannot use a pointer
442 to TYPE directly, as the back-end will assume registers of pointer
443 type are aligned, and just the base itself may not actually be.
444 We use void pointer to the type's address space instead. */
445 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
446 type
= build_qualified_type (void_type_node
, qual
);
447 parts
->base
= fold_convert (build_pointer_type (type
), val
);
448 aff_combination_remove_elt (addr
, i
);
451 /* If ADDR contains an address of a dereferenced pointer, move it to
455 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
458 tree val
= NULL_TREE
;
460 for (i
= 0; i
< addr
->n
; i
++)
462 if (!double_int_one_p (addr
->elts
[i
].coef
))
465 val
= addr
->elts
[i
].val
;
466 if (POINTER_TYPE_P (TREE_TYPE (val
)))
474 aff_combination_remove_elt (addr
, i
);
477 /* Moves the loop variant part V in linear address ADDR to be the index
481 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
484 tree val
= NULL_TREE
;
486 gcc_assert (!parts
->index
);
487 for (i
= 0; i
< addr
->n
; i
++)
489 val
= addr
->elts
[i
].val
;
490 if (operand_equal_p (val
, v
, 0))
497 parts
->index
= fold_convert (sizetype
, val
);
498 parts
->step
= double_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
499 aff_combination_remove_elt (addr
, i
);
502 /* Adds ELT to PARTS. */
505 add_to_parts (struct mem_address
*parts
, tree elt
)
511 parts
->index
= fold_convert (sizetype
, elt
);
521 /* Add ELT to base. */
522 type
= TREE_TYPE (parts
->base
);
523 if (POINTER_TYPE_P (type
))
524 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
526 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
530 /* Finds the most expensive multiplication in ADDR that can be
531 expressed in an addressing mode and move the corresponding
532 element(s) to PARTS. */
535 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
536 aff_tree
*addr
, bool speed
)
538 addr_space_t as
= TYPE_ADDR_SPACE (type
);
539 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
541 double_int best_mult
, amult
, amult_neg
;
542 unsigned best_mult_cost
= 0, acost
;
543 tree mult_elt
= NULL_TREE
, elt
;
545 enum tree_code op_code
;
547 best_mult
= double_int_zero
;
548 for (i
= 0; i
< addr
->n
; i
++)
550 if (!double_int_fits_in_shwi_p (addr
->elts
[i
].coef
))
553 coef
= double_int_to_shwi (addr
->elts
[i
].coef
);
555 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
558 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
560 if (acost
> best_mult_cost
)
562 best_mult_cost
= acost
;
563 best_mult
= addr
->elts
[i
].coef
;
570 /* Collect elements multiplied by best_mult. */
571 for (i
= j
= 0; i
< addr
->n
; i
++)
573 amult
= addr
->elts
[i
].coef
;
574 amult_neg
= double_int_ext_for_comb (double_int_neg (amult
), addr
);
576 if (double_int_equal_p (amult
, best_mult
))
578 else if (double_int_equal_p (amult_neg
, best_mult
))
579 op_code
= MINUS_EXPR
;
582 addr
->elts
[j
] = addr
->elts
[i
];
587 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
589 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
590 else if (op_code
== PLUS_EXPR
)
593 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
597 parts
->index
= mult_elt
;
598 parts
->step
= double_int_to_tree (sizetype
, best_mult
);
601 /* Splits address ADDR for a memory access of type TYPE into PARTS.
602 If BASE_HINT is non-NULL, it specifies an SSA name to be used
603 preferentially as base of the reference, and IV_CAND is the selected
604 iv candidate used in ADDR.
606 TODO -- be more clever about the distribution of the elements of ADDR
607 to PARTS. Some architectures do not support anything but single
608 register in address, possibly with a small integer offset; while
609 create_mem_ref will simplify the address to an acceptable shape
610 later, it would be more efficient to know that asking for complicated
611 addressing modes is useless. */
614 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
615 tree base_hint
, struct mem_address
*parts
,
621 parts
->symbol
= NULL_TREE
;
622 parts
->base
= NULL_TREE
;
623 parts
->index
= NULL_TREE
;
624 parts
->step
= NULL_TREE
;
626 if (!double_int_zero_p (addr
->offset
))
627 parts
->offset
= double_int_to_tree (sizetype
, addr
->offset
);
629 parts
->offset
= NULL_TREE
;
631 /* Try to find a symbol. */
632 move_fixed_address_to_symbol (parts
, addr
);
634 /* No need to do address parts reassociation if the number of parts
635 is <= 2 -- in that case, no loop invariant code motion can be
638 if (!base_hint
&& (addr
->n
> 2))
639 move_variant_to_index (parts
, addr
, iv_cand
);
641 /* First move the most expensive feasible multiplication
644 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
646 /* Try to find a base of the reference. Since at the moment
647 there is no reliable way how to distinguish between pointer and its
648 offset, this is just a guess. */
649 if (!parts
->symbol
&& base_hint
)
650 move_hint_to_base (type
, parts
, base_hint
, addr
);
651 if (!parts
->symbol
&& !parts
->base
)
652 move_pointer_to_base (parts
, addr
);
654 /* Then try to process the remaining elements. */
655 for (i
= 0; i
< addr
->n
; i
++)
657 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
658 if (!double_int_one_p (addr
->elts
[i
].coef
))
659 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
660 double_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
661 add_to_parts (parts
, part
);
664 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
667 /* Force the PARTS to register. */
670 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
673 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
674 is_gimple_mem_ref_addr
, NULL_TREE
,
675 true, GSI_SAME_STMT
);
677 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
679 true, GSI_SAME_STMT
);
682 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
683 computations are emitted in front of GSI. TYPE is the mode
684 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
685 and BASE_HINT is non NULL if IV_CAND comes from a base address
689 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
690 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
693 struct mem_address parts
;
695 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
696 gimplify_mem_ref_parts (gsi
, &parts
);
697 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
701 /* The expression is too complicated. Try making it simpler. */
703 if (parts
.step
&& !integer_onep (parts
.step
))
705 /* Move the multiplication to index. */
706 gcc_assert (parts
.index
);
707 parts
.index
= force_gimple_operand_gsi (gsi
,
708 fold_build2 (MULT_EXPR
, sizetype
,
709 parts
.index
, parts
.step
),
710 true, NULL_TREE
, true, GSI_SAME_STMT
);
711 parts
.step
= NULL_TREE
;
713 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
721 gcc_assert (is_gimple_val (tmp
));
723 /* Add the symbol to base, eventually forcing it to register. */
726 gcc_assert (useless_type_conversion_p
727 (sizetype
, TREE_TYPE (parts
.base
)));
731 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
732 fold_build_pointer_plus (tmp
, parts
.base
),
733 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
737 parts
.index
= parts
.base
;
743 parts
.symbol
= NULL_TREE
;
745 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
752 /* Add index to base. */
755 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
756 fold_build_pointer_plus (parts
.base
, parts
.index
),
757 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
760 parts
.base
= parts
.index
;
761 parts
.index
= NULL_TREE
;
763 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
768 if (parts
.offset
&& !integer_zerop (parts
.offset
))
770 /* Try adding offset to base. */
773 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
774 fold_build_pointer_plus (parts
.base
, parts
.offset
),
775 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
778 parts
.base
= parts
.offset
;
780 parts
.offset
= NULL_TREE
;
782 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
787 /* Verify that the address is in the simplest possible shape
788 (only a register). If we cannot create such a memory reference,
789 something is really wrong. */
790 gcc_assert (parts
.symbol
== NULL_TREE
);
791 gcc_assert (parts
.index
== NULL_TREE
);
792 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
793 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
797 /* Copies components of the address from OP to ADDR. */
800 get_address_description (tree op
, struct mem_address
*addr
)
802 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
804 addr
->symbol
= TMR_BASE (op
);
805 addr
->base
= TMR_INDEX2 (op
);
809 addr
->symbol
= NULL_TREE
;
812 gcc_assert (integer_zerop (TMR_BASE (op
)));
813 addr
->base
= TMR_INDEX2 (op
);
816 addr
->base
= TMR_BASE (op
);
818 addr
->index
= TMR_INDEX (op
);
819 addr
->step
= TMR_STEP (op
);
820 addr
->offset
= TMR_OFFSET (op
);
823 /* Copies the additional information attached to target_mem_ref FROM to TO. */
826 copy_mem_ref_info (tree to
, tree from
)
828 /* And the info about the original reference. */
829 TREE_SIDE_EFFECTS (to
) = TREE_SIDE_EFFECTS (from
);
830 TREE_THIS_VOLATILE (to
) = TREE_THIS_VOLATILE (from
);
833 /* Copies the reference information from OLD_REF to NEW_REF, where
834 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
837 copy_ref_info (tree new_ref
, tree old_ref
)
839 tree new_ptr_base
= NULL_TREE
;
841 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
842 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
844 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
845 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
847 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
849 /* We can transfer points-to information from an old pointer
850 or decl base to the new one. */
852 && TREE_CODE (new_ptr_base
) == SSA_NAME
853 && !SSA_NAME_PTR_INFO (new_ptr_base
))
855 tree base
= get_base_address (old_ref
);
858 else if ((TREE_CODE (base
) == MEM_REF
859 || TREE_CODE (base
) == TARGET_MEM_REF
)
860 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
861 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
863 struct ptr_info_def
*new_pi
;
864 unsigned int align
, misalign
;
866 duplicate_ssa_name_ptr_info
867 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
868 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
869 /* We have to be careful about transferring alignment information. */
870 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
871 && TREE_CODE (old_ref
) == MEM_REF
872 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
873 && (TMR_INDEX2 (new_ref
)
874 || (TMR_STEP (new_ref
)
875 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
878 unsigned int inc
= double_int_sub (mem_ref_offset (old_ref
),
879 mem_ref_offset (new_ref
)).low
;
880 adjust_ptr_info_misalignment (new_pi
, inc
);
883 mark_ptr_info_alignment_unknown (new_pi
);
885 else if (TREE_CODE (base
) == VAR_DECL
886 || TREE_CODE (base
) == PARM_DECL
887 || TREE_CODE (base
) == RESULT_DECL
)
889 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
890 pt_solution_set_var (&pi
->pt
, base
);
895 /* Move constants in target_mem_ref REF to offset. Returns the new target
896 mem ref if anything changes, NULL_TREE otherwise. */
899 maybe_fold_tmr (tree ref
)
901 struct mem_address addr
;
902 bool changed
= false;
905 get_address_description (ref
, &addr
);
908 && TREE_CODE (addr
.base
) == INTEGER_CST
909 && !integer_zerop (addr
.base
))
911 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
912 TREE_TYPE (addr
.offset
),
913 addr
.offset
, addr
.base
);
914 addr
.base
= NULL_TREE
;
919 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
921 addr
.offset
= fold_binary_to_constant
922 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
924 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
925 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
929 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
931 HOST_WIDE_INT offset
;
932 addr
.symbol
= build_fold_addr_expr
933 (get_addr_base_and_unit_offset
934 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
935 addr
.offset
= int_const_binop (PLUS_EXPR
,
936 addr
.offset
, size_int (offset
));
940 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
945 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
947 addr
.step
= NULL_TREE
;
950 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
951 TREE_TYPE (addr
.offset
),
953 addr
.index
= NULL_TREE
;
960 /* If we have propagated something into this TARGET_MEM_REF and thus
961 ended up folding it, always create a new TARGET_MEM_REF regardless
962 if it is valid in this for on the target - the propagation result
963 wouldn't be anyway. */
964 ret
= create_mem_ref_raw (TREE_TYPE (ref
),
965 TREE_TYPE (addr
.offset
), &addr
, false);
966 copy_mem_ref_info (ret
, ref
);
970 /* Dump PARTS to FILE. */
972 extern void dump_mem_address (FILE *, struct mem_address
*);
974 dump_mem_address (FILE *file
, struct mem_address
*parts
)
978 fprintf (file
, "symbol: ");
979 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
980 fprintf (file
, "\n");
984 fprintf (file
, "base: ");
985 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
986 fprintf (file
, "\n");
990 fprintf (file
, "index: ");
991 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
992 fprintf (file
, "\n");
996 fprintf (file
, "step: ");
997 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
998 fprintf (file
, "\n");
1002 fprintf (file
, "offset: ");
1003 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1004 fprintf (file
, "\n");
1008 #include "gt-tree-ssa-address.h"