#include "libfuncs.h"
#include "recog.h"
#include "reload.h"
-#include "output.h"
#include "typeclass.h"
#include "toplev.h"
#include "langhooks.h"
rtx new_from;
enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
+ convert_optab ctab = unsignedp ? zext_optab : sext_optab;
+ enum insn_code icode;
- gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
- != CODE_FOR_nothing);
+ icode = convert_optab_handler (ctab, full_mode, from_mode);
+ gcc_assert (icode != CODE_FOR_nothing);
if (to_mode == full_mode)
{
- emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
- from_mode),
- to, from, UNKNOWN);
+ emit_unop_insn (icode, to, from, UNKNOWN);
return;
}
new_from = gen_reg_rtx (full_mode);
- emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
- new_from, from, UNKNOWN);
+ emit_unop_insn (icode, new_from, from, UNKNOWN);
/* else proceed to integer conversions below. */
from_mode = full_mode;
/* No special multiword conversion insn; do it by hand. */
start_sequence ();
- /* Since we will turn this into a no conflict block, we must ensure
- that the source does not overlap the target. */
+ /* Since we will turn this into a no conflict block, we must ensure the
+ the source does not overlap the target so force it into an isolated
+ register when maybe so. Likewise for any MEM input, since the
+ conversion sequence might require several references to it and we
+ must ensure we're getting the same value every time. */
- if (reg_overlap_mentioned_p (to, from))
+ if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
from = force_reg (from_mode, from);
/* Get a copy of FROM widened to a word, if necessary. */
make the high-order word of the constant zero, not all ones. */
if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
+ && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
&& CONST_INT_P (x) && INTVAL (x) < 0)
{
double_int val = uhwi_to_double_int (INTVAL (x));
unsigned int align, int endp)
{
struct move_by_pieces_d data;
- enum machine_mode to_addr_mode, from_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
+ enum machine_mode to_addr_mode;
+ enum machine_mode from_addr_mode = get_address_mode (from);
rtx to_addr, from_addr = XEXP (from, 0);
unsigned int max_size = MOVE_MAX_PIECES + 1;
enum insn_code icode;
data.from_addr = from_addr;
if (to)
{
- to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
+ to_addr_mode = get_address_mode (to);
to_addr = XEXP (to, 0);
data.to = to;
data.autinc_to
if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
{
data.from_addr = copy_to_mode_reg (from_addr_mode,
- plus_constant (from_addr, len));
+ plus_constant (from_addr_mode,
+ from_addr, len));
data.autinc_from = 1;
data.explicit_inc_from = -1;
}
if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
{
data.to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (to_addr, len));
+ plus_constant (to_addr_mode,
+ to_addr, len));
data.autinc_to = 1;
data.explicit_inc_to = -1;
}
emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
else
data.to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (data.to_addr,
+ plus_constant (to_addr_mode,
+ data.to_addr,
-1));
}
to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
pseudos. We can then place those new pseudos into a VAR_DECL and
use them later. */
- dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
- src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
+ dst_addr = copy_addr_to_reg (XEXP (dst, 0));
+ src_addr = copy_addr_to_reg (XEXP (src, 0));
dst_addr = convert_memory_address (ptr_mode, dst_addr);
src_addr = convert_memory_address (ptr_mode, src_addr);
}
/* A subroutine of emit_block_move_via_libcall. Create the tree node
- for the function we use for block copies. The first time FOR_CALL
- is true, we call assemble_external. */
+ for the function we use for block copies. */
static GTY(()) tree block_move_fn;
{
if (!block_move_fn)
{
- tree args, fn;
+ tree args, fn, attrs, attr_args;
fn = get_identifier ("memcpy");
args = build_function_type_list (ptr_type_node, ptr_type_node,
DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
DECL_VISIBILITY_SPECIFIED (fn) = 1;
+ attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
+ attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
+
+ decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
+
block_move_fn = fn;
}
{
emitted_extern = true;
make_decl_rtl (block_move_fn);
- assemble_external (block_move_fn);
}
return block_move_fn;
unsigned int align ATTRIBUTE_UNUSED)
{
rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
- enum machine_mode x_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
- enum machine_mode y_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
+ enum machine_mode x_addr_mode = get_address_mode (x);
+ enum machine_mode y_addr_mode = get_address_mode (y);
enum machine_mode iter_mode;
iter_mode = GET_MODE (size);
{
enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
if (imode == BLKmode)
- src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
+ src = assign_stack_temp (GET_MODE (orig_src), ssize);
else
src = gen_reg_rtx (imode);
if (imode != BLKmode)
rtx mem;
gcc_assert (!bytepos);
- mem = assign_stack_temp (GET_MODE (src), slen, 0);
+ mem = assign_stack_temp (GET_MODE (src), slen);
emit_move_insn (mem, src);
tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
0, 1, false, NULL_RTX, mode, mode);
int slen = GET_MODE_SIZE (GET_MODE (src));
rtx mem;
- mem = assign_stack_temp (GET_MODE (src), slen, 0);
+ mem = assign_stack_temp (GET_MODE (src), slen);
emit_move_insn (mem, src);
tmps[i] = adjust_address (mem, mode, (int) bytepos);
}
{
enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
if (imode == BLKmode)
- dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
+ dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
else
dst = gen_reg_rtx (imode);
emit_group_store (dst, src, type, ssize);
it. Allocate a temporary, and split this into a store/load to/from
the temporary. */
- temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
+ temp = assign_stack_temp (GET_MODE (dst), ssize);
emit_group_store (temp, src, type, ssize);
emit_group_load (dst, temp, type, ssize);
return;
>= GET_MODE_ALIGNMENT (tmp_mode))
{
dest = assign_stack_temp (dest_mode,
- GET_MODE_SIZE (dest_mode),
- 0);
+ GET_MODE_SIZE (dest_mode));
emit_move_insn (adjust_address (dest,
tmp_mode,
bytepos),
else
{
dest = assign_stack_temp (tmp_mode,
- GET_MODE_SIZE (tmp_mode),
- 0);
+ GET_MODE_SIZE (tmp_mode));
emit_move_insn (dest, tmps[i]);
dst = adjust_address (dest, dest_mode, bytepos);
}
tgtblk = assign_temp (build_qualified_type (type,
(TYPE_QUALS (type)
| TYPE_QUAL_CONST)),
- 0, 1, 1);
+ 1, 1);
preserve_temp_slots (tgtblk);
}
return def_stmt;
}
+
+#ifdef HAVE_conditional_move
+/* Return the defining gimple statement for SSA_NAME NAME if it is an
+ assigment and the class of the expresion on the RHS is CLASS. Return
+ NULL otherwise. */
+
+static gimple
+get_def_for_expr_class (tree name, enum tree_code_class tclass)
+{
+ gimple def_stmt;
+
+ if (TREE_CODE (name) != SSA_NAME)
+ return NULL;
+
+ def_stmt = get_gimple_for_ssa_name (name);
+ if (!def_stmt
+ || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
+ return NULL;
+
+ return def_stmt;
+}
+#endif
\f
/* Determine whether the LEN bytes generated by CONSTFUN can be
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
void *constfundata, unsigned int align, bool memsetp, int endp)
{
- enum machine_mode to_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
+ enum machine_mode to_addr_mode = get_address_mode (to);
struct store_by_pieces_d data;
if (len == 0)
emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
else
data.to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (data.to_addr,
+ plus_constant (to_addr_mode,
+ data.to_addr,
-1));
}
to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
unsigned int align ATTRIBUTE_UNUSED)
{
- enum machine_mode to_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
+ enum machine_mode to_addr_mode = get_address_mode (data->to);
rtx to_addr = XEXP (data->to, 0);
unsigned int max_size = STORE_MAX_PIECES + 1;
enum insn_code icode;
if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
{
data->to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (to_addr, data->len));
+ plus_constant (to_addr_mode,
+ to_addr,
+ data->len));
data->autinc_to = 1;
data->explicit_inc_to = -1;
}
/* Emit code to copy OBJECT and SIZE into new pseudos. We can then
place those into new pseudos into a VAR_DECL and use them later. */
- object = copy_to_mode_reg (Pmode, XEXP (object, 0));
+ object = copy_addr_to_reg (XEXP (object, 0));
size_mode = TYPE_MODE (sizetype);
size = convert_to_mode (size_mode, size, 1);
}
/* A subroutine of set_storage_via_libcall. Create the tree node
- for the function we use for block clears. The first time FOR_CALL
- is true, we call assemble_external. */
+ for the function we use for block clears. */
tree block_clear_fn;
{
emitted_extern = true;
make_decl_rtl (block_clear_fn);
- assemble_external (block_clear_fn);
}
return block_clear_fn;
case POST_INC:
case POST_DEC:
case POST_MODIFY:
- temp = plus_constant (stack_pointer_rtx, -adjust);
+ temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
break;
default:
gcc_unreachable ();
size = convert_modes (Pmode, ptr_mode, size, 1);
if (CONSTANT_P (size))
- anti_adjust_stack (plus_constant (size, extra));
+ anti_adjust_stack (plus_constant (Pmode, size, extra));
else if (REG_P (size) && extra == 0)
anti_adjust_stack (size);
else
{
temp = virtual_outgoing_args_rtx;
if (extra != 0 && below)
- temp = plus_constant (temp, extra);
+ temp = plus_constant (Pmode, temp, extra);
}
else
{
if (CONST_INT_P (size))
- temp = plus_constant (virtual_outgoing_args_rtx,
+ temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
-INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
- negate_rtx (Pmode, plus_constant (size, extra)));
+ negate_rtx (Pmode, plus_constant (Pmode, size,
+ extra)));
else
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
size = GEN_INT (GET_MODE_SIZE (mode));
if (!MEM_P (xinner))
{
- temp = assign_temp (type, 0, 1, 1);
+ temp = assign_temp (type, 1, 1);
emit_move_insn (temp, xinner);
xinner = temp;
}
}
else if (CONST_INT_P (args_so_far))
temp = memory_address (BLKmode,
- plus_constant (args_addr,
+ plus_constant (Pmode, args_addr,
skip + INTVAL (args_so_far)));
else
temp = memory_address (BLKmode,
- plus_constant (gen_rtx_PLUS (Pmode,
+ plus_constant (Pmode,
+ gen_rtx_PLUS (Pmode,
args_addr,
args_so_far),
skip));
if (CONST_INT_P (args_so_far))
addr
= memory_address (mode,
- plus_constant (args_addr,
+ plus_constant (Pmode, args_addr,
INTVAL (args_so_far)));
else
addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
value = expand_and (str_mode, value, const1_rtx, NULL);
binop = xor_optab;
}
- value = expand_shift (LSHIFT_EXPR, str_mode, value,
- bitpos, NULL_RTX, 1);
+ value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
result = expand_binop (str_mode, binop, str_rtx,
value, str_rtx, 1, OPTAB_WIDEN);
if (result != str_rtx)
case BIT_XOR_EXPR:
if (TREE_CODE (op1) != INTEGER_CST)
break;
- value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
- value = convert_modes (GET_MODE (str_rtx),
+ value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
+ value = convert_modes (str_mode,
TYPE_MODE (TREE_TYPE (op1)), value,
TYPE_UNSIGNED (TREE_TYPE (op1)));
}
binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
- if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
+ if (bitpos + bitsize != str_bitsize)
{
- rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
- - 1);
- value = expand_and (GET_MODE (str_rtx), value, mask,
- NULL_RTX);
+ rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
+ value = expand_and (str_mode, value, mask, NULL_RTX);
}
- value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
- bitpos, NULL_RTX, 1);
- result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
+ value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
+ result = expand_binop (str_mode, binop, str_rtx,
value, str_rtx, 1, OPTAB_WIDEN);
if (result != str_rtx)
emit_move_insn (str_rtx, result);
/* In the C++ memory model, consecutive bit fields in a structure are
considered one memory location.
- Given a COMPONENT_REF EXP at bit position BITPOS, this function
+ Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
returns the bit range of consecutive bits in which this COMPONENT_REF
- belongs in. The values are returned in *BITSTART and *BITEND.
- If the access does not need to be restricted 0 is returned in
+ belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
+ and *OFFSET may be adjusted in the process.
+
+ If the access does not need to be restricted, 0 is returned in both
*BITSTART and *BITEND. */
static void
get_bit_range (unsigned HOST_WIDE_INT *bitstart,
unsigned HOST_WIDE_INT *bitend,
tree exp,
- HOST_WIDE_INT bitpos)
+ HOST_WIDE_INT *bitpos,
+ tree *offset)
{
- unsigned HOST_WIDE_INT bitoffset;
- tree field, repr, offset;
+ HOST_WIDE_INT bitoffset;
+ tree field, repr;
gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
return;
}
+ /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
+ part of a larger bit field, then the representative does not serve any
+ useful purpose. This can occur in Ada. */
+ if (handled_component_p (TREE_OPERAND (exp, 0)))
+ {
+ enum machine_mode rmode;
+ HOST_WIDE_INT rbitsize, rbitpos;
+ tree roffset;
+ int unsignedp;
+ int volatilep = 0;
+ get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
+ &roffset, &rmode, &unsignedp, &volatilep, false);
+ if ((rbitpos % BITS_PER_UNIT) != 0)
+ {
+ *bitstart = *bitend = 0;
+ return;
+ }
+ }
+
/* Compute the adjustment to bitpos from the offset of the field
- relative to the representative. */
- offset = size_diffop (DECL_FIELD_OFFSET (field),
- DECL_FIELD_OFFSET (repr));
- bitoffset = (tree_low_cst (offset, 1) * BITS_PER_UNIT
- + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
-
- *bitstart = bitpos - bitoffset;
+ relative to the representative. DECL_FIELD_OFFSET of field and
+ repr are the same by construction if they are not constants,
+ see finish_bitfield_layout. */
+ if (host_integerp (DECL_FIELD_OFFSET (field), 1)
+ && host_integerp (DECL_FIELD_OFFSET (repr), 1))
+ bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
+ - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
+ else
+ bitoffset = 0;
+ bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
+
+ /* If the adjustment is larger than bitpos, we would have a negative bit
+ position for the lower bound and this may wreak havoc later. This can
+ occur only if we have a non-null offset, so adjust offset and bitpos
+ to make the lower bound non-negative. */
+ if (bitoffset > *bitpos)
+ {
+ HOST_WIDE_INT adjust = bitoffset - *bitpos;
+
+ gcc_assert ((adjust % BITS_PER_UNIT) == 0);
+ gcc_assert (*offset != NULL_TREE);
+
+ *bitpos += adjust;
+ *offset
+ = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
+ *bitstart = 0;
+ }
+ else
+ *bitstart = *bitpos - bitoffset;
+
*bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
}
!= CODE_FOR_nothing)
|| SLOW_UNALIGNED_ACCESS (mode, align)))
{
- addr_space_t as
- = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
- struct expand_operand ops[2];
- enum machine_mode address_mode;
- rtx reg, op0, mem;
+ rtx reg, mem;
reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
reg = force_not_mem (reg);
-
- if (TREE_CODE (to) == MEM_REF)
- {
- tree base = TREE_OPERAND (to, 0);
- address_mode = targetm.addr_space.address_mode (as);
- op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op0 = convert_memory_address_addr_space (address_mode, op0, as);
- if (!integer_zerop (TREE_OPERAND (to, 1)))
- {
- rtx off
- = immed_double_int_const (mem_ref_offset (to), address_mode);
- op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
- }
- op0 = memory_address_addr_space (mode, op0, as);
- mem = gen_rtx_MEM (mode, op0);
- set_mem_attributes (mem, to, 0);
- set_mem_addr_space (mem, as);
- }
- else if (TREE_CODE (to) == TARGET_MEM_REF)
- {
- struct mem_address addr;
- get_address_description (to, &addr);
- op0 = addr_for_mem_ref (&addr, as, true);
- op0 = memory_address_addr_space (mode, op0, as);
- mem = gen_rtx_MEM (mode, op0);
- set_mem_attributes (mem, to, 0);
- set_mem_addr_space (mem, as);
- }
- else
- gcc_unreachable ();
- if (TREE_THIS_VOLATILE (to))
- MEM_VOLATILE_P (mem) = 1;
+ mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
if (icode != CODE_FOR_nothing)
{
+ struct expand_operand ops[2];
+
create_fixed_operand (&ops[0], mem);
create_input_operand (&ops[1], reg, mode);
/* The movmisalign<mode> pattern cannot fail, else the assignment
if (TREE_CODE (to) == COMPONENT_REF
&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
- get_bit_range (&bitregion_start, &bitregion_end, to, bitpos);
+ get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
&& ((icode = optab_handler (movmisalign_optab, mode))
!= CODE_FOR_nothing))
{
- enum machine_mode address_mode;
- rtx op0;
struct expand_operand ops[2];
- addr_space_t as = TYPE_ADDR_SPACE
- (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0))));
- tree base = TREE_OPERAND (tem, 0);
misalignp = true;
to_rtx = gen_reg_rtx (mode);
-
- address_mode = targetm.addr_space.address_mode (as);
- op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op0 = convert_memory_address_addr_space (address_mode, op0, as);
- if (!integer_zerop (TREE_OPERAND (tem, 1)))
- {
- rtx off = immed_double_int_const (mem_ref_offset (tem),
- address_mode);
- op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
- }
- op0 = memory_address_addr_space (mode, op0, as);
- mem = gen_rtx_MEM (mode, op0);
- set_mem_attributes (mem, tem, 0);
- set_mem_addr_space (mem, as);
- if (TREE_THIS_VOLATILE (tem))
- MEM_VOLATILE_P (mem) = 1;
+ mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
/* If the misaligned store doesn't overwrite all bits, perform
rmw cycle on MEM. */
else
{
misalignp = false;
- to_rtx = expand_normal (tem);
+ to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
}
/* If the bitfield is volatile, we want to access it in the
}
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
- address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
+ address_mode = get_address_mode (to_rtx);
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
else
{
rtx temp = assign_stack_temp (GET_MODE (to_rtx),
- GET_MODE_SIZE (GET_MODE (to_rtx)),
- 0);
+ GET_MODE_SIZE (GET_MODE (to_rtx)));
write_complex_part (temp, XEXP (to_rtx, 0), false);
write_complex_part (temp, XEXP (to_rtx, 1), true);
result = store_field (temp, bitsize, bitpos,
if (result)
preserve_temp_slots (result);
- free_temp_slots ();
pop_temp_slots ();
return;
}
emit_move_insn (to_rtx, value);
}
preserve_temp_slots (to_rtx);
- free_temp_slots ();
pop_temp_slots ();
return;
}
emit_move_insn (to_rtx, temp);
preserve_temp_slots (to_rtx);
- free_temp_slots ();
pop_temp_slots ();
return;
}
TYPE_MODE (sizetype));
preserve_temp_slots (to_rtx);
- free_temp_slots ();
pop_temp_slots ();
return;
}
push_temp_slots ();
result = store_expr (from, to_rtx, 0, nontemporal);
preserve_temp_slots (result);
- free_temp_slots ();
pop_temp_slots ();
return;
}
{
enum machine_mode pointer_mode
= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
+ enum machine_mode address_mode = get_address_mode (target);
/* Compute the size of the data to copy from the string. */
tree copy_size
Do all calculations in pointer_mode. */
if (CONST_INT_P (copy_size_rtx))
{
- size = plus_constant (size, -INTVAL (copy_size_rtx));
+ size = plus_constant (address_mode, size,
+ -INTVAL (copy_size_rtx));
target = adjust_address (target, BLKmode,
INTVAL (copy_size_rtx));
}
case VECTOR_CST:
{
- tree v;
- for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
+ unsigned i;
+ for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
{
- if (!initializer_zerop (TREE_VALUE (v)))
+ tree v = VECTOR_CST_ELT (value, i);
+ if (!initializer_zerop (v))
nz_elts += mult;
init_elts += mult;
}
offset_rtx = expand_normal (offset);
gcc_assert (MEM_P (to_rtx));
- address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
+ address_mode = get_address_mode (to_rtx);
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
if (mode == BLKmode
&& (REG_P (target) || GET_CODE (target) == SUBREG))
{
- rtx object = assign_temp (type, 0, 1, 1);
+ rtx object = assign_temp (type, 1, 1);
rtx blk_object = adjust_address (object, BLKmode, 0);
if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
NULL_RTX, 1);
- /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
- MODE. */
+ /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
if (mode != VOIDmode && mode != BLKmode
&& mode != TYPE_MODE (TREE_TYPE (exp)))
temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
}
+/* Returns true if REF is an array reference to an array at the end of
+ a structure. If this is the case, the array may be allocated larger
+ than its upper bound implies. */
+
+bool
+array_at_struct_end_p (tree ref)
+{
+ if (TREE_CODE (ref) != ARRAY_REF
+ && TREE_CODE (ref) != ARRAY_RANGE_REF)
+ return false;
+
+ while (handled_component_p (ref))
+ {
+ /* If the reference chain contains a component reference to a
+ non-union type and there follows another field the reference
+ is not at the end of a structure. */
+ if (TREE_CODE (ref) == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
+ {
+ tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
+ while (nextf && TREE_CODE (nextf) != FIELD_DECL)
+ nextf = DECL_CHAIN (nextf);
+ if (nextf)
+ return false;
+ }
+
+ ref = TREE_OPERAND (ref, 0);
+ }
+
+ /* If the reference is based on a declared entity, the size of the array
+ is constrained by its given domain. */
+ if (DECL_P (ref))
+ return false;
+
+ return true;
+}
+
/* Return a tree representing the upper bound of the array mentioned in
EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
return MAX (factor, talign);
}
\f
+#ifdef HAVE_conditional_move
+/* Convert the tree comparison code TCODE to the rtl one where the
+ signedness is UNSIGNEDP. */
+
+static enum rtx_code
+convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
+{
+ enum rtx_code code;
+ switch (tcode)
+ {
+ case EQ_EXPR:
+ code = EQ;
+ break;
+ case NE_EXPR:
+ code = NE;
+ break;
+ case LT_EXPR:
+ code = unsignedp ? LTU : LT;
+ break;
+ case LE_EXPR:
+ code = unsignedp ? LEU : LE;
+ break;
+ case GT_EXPR:
+ code = unsignedp ? GTU : GT;
+ break;
+ case GE_EXPR:
+ code = unsignedp ? GEU : GE;
+ break;
+ case UNORDERED_EXPR:
+ code = UNORDERED;
+ break;
+ case ORDERED_EXPR:
+ code = ORDERED;
+ break;
+ case UNLT_EXPR:
+ code = UNLT;
+ break;
+ case UNLE_EXPR:
+ code = UNLE;
+ break;
+ case UNGT_EXPR:
+ code = UNGT;
+ break;
+ case UNGE_EXPR:
+ code = UNGE;
+ break;
+ case UNEQ_EXPR:
+ code = UNEQ;
+ break;
+ case LTGT_EXPR:
+ code = LTGT;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ return code;
+}
+#endif
+
/* Subroutine of expand_expr. Expand the two operands of a binary
expression EXP0 and EXP1 placing the results in OP0 and OP1.
The value may be stored in TARGET if TARGET is nonzero. The
result = XEXP (result, 0);
/* ??? Is this needed anymore? */
- if (DECL_P (exp) && !TREE_USED (exp) == 0)
- {
- assemble_external (exp);
- TREE_USED (exp) = 1;
- }
+ if (DECL_P (exp))
+ TREE_USED (exp) = 1;
if (modifier != EXPAND_INITIALIZER
&& modifier != EXPAND_CONST_ADDRESS
of such an object. */
gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
- result = plus_constant (result, bitpos / BITS_PER_UNIT);
+ result = convert_memory_address_addr_space (tmode, result, as);
+ result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
if (modifier < EXPAND_SUM)
result = force_operand (result, target);
}
= assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
| (TREE_READONLY (exp)
* TYPE_QUAL_CONST))),
- 0, TREE_ADDRESSABLE (exp), 1);
+ TREE_ADDRESSABLE (exp), 1);
}
store_constructor (exp, target, 0, int_expr_size (exp));
return ret;
}
+/* Try to expand the conditional expression which is represented by
+ TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
+ return the rtl reg which repsents the result. Otherwise return
+ NULL_RTL. */
+
+static rtx
+expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
+ tree treeop1 ATTRIBUTE_UNUSED,
+ tree treeop2 ATTRIBUTE_UNUSED)
+{
+#ifdef HAVE_conditional_move
+ rtx insn;
+ rtx op00, op01, op1, op2;
+ enum rtx_code comparison_code;
+ enum machine_mode comparison_mode;
+ gimple srcstmt;
+ rtx temp;
+ tree type = TREE_TYPE (treeop1);
+ int unsignedp = TYPE_UNSIGNED (type);
+ enum machine_mode mode = TYPE_MODE (type);
+
+ temp = assign_temp (type, 0, 1);
+
+ /* If we cannot do a conditional move on the mode, try doing it
+ with the promoted mode. */
+ if (!can_conditionally_move_p (mode))
+ mode = promote_mode (type, mode, &unsignedp);
+
+ if (!can_conditionally_move_p (mode))
+ return NULL_RTX;
+
+ start_sequence ();
+ expand_operands (treeop1, treeop2,
+ temp, &op1, &op2, EXPAND_NORMAL);
+
+ if (TREE_CODE (treeop0) == SSA_NAME
+ && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
+ {
+ tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
+ enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
+ op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
+ op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
+ comparison_mode = TYPE_MODE (type);
+ unsignedp = TYPE_UNSIGNED (type);
+ comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
+ }
+ else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
+ enum tree_code cmpcode = TREE_CODE (treeop0);
+ op00 = expand_normal (TREE_OPERAND (treeop0, 0));
+ op01 = expand_normal (TREE_OPERAND (treeop0, 1));
+ unsignedp = TYPE_UNSIGNED (type);
+ comparison_mode = TYPE_MODE (type);
+ comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
+ }
+ else
+ {
+ op00 = expand_normal (treeop0);
+ op01 = const0_rtx;
+ comparison_code = NE;
+ comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
+ }
+
+ if (GET_MODE (op1) != mode)
+ op1 = gen_lowpart (mode, op1);
+
+ if (GET_MODE (op2) != mode)
+ op2 = gen_lowpart (mode, op2);
+
+ /* Try to emit the conditional move. */
+ insn = emit_conditional_move (temp, comparison_code,
+ op00, op01, comparison_mode,
+ op1, op2, mode,
+ unsignedp);
+
+ /* If we could do the conditional move, emit the sequence,
+ and return. */
+ if (insn)
+ {
+ rtx seq = get_insns ();
+ end_sequence ();
+ emit_insn (seq);
+ return temp;
+ }
+
+ /* Otherwise discard the sequence and fall back to code with
+ branches. */
+ end_sequence ();
+#endif
+ return NULL_RTX;
+}
+
rtx
expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
enum expand_modifier modifier)
if (TYPE_MODE (type) != BLKmode)
target = gen_reg_rtx (TYPE_MODE (type));
else
- target = assign_temp (type, 0, 1, 1);
+ target = assign_temp (type, 1, 1);
}
if (MEM_P (target))
treeop1 = fold_convert_loc (loc, type,
fold_convert_loc (loc, ssizetype,
treeop1));
+ /* If sizetype precision is larger than pointer precision, truncate the
+ offset to have matching modes. */
+ else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
+ treeop1 = fold_convert_loc (loc, type, treeop1);
+
case PLUS_EXPR:
/* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
something else, make sure we add the register to the constant and
|| DECL_RTL (treeop1) == stack_pointer_rtx
|| DECL_RTL (treeop1) == arg_pointer_rtx))
{
- tree t = treeop1;
-
- treeop1 = TREE_OPERAND (treeop0, 0);
- TREE_OPERAND (treeop0, 0) = t;
+ gcc_unreachable ();
}
/* If the result is to be ptr_mode and we are adding an integer to
= immed_double_const (TREE_INT_CST_LOW (treeop0),
(HOST_WIDE_INT) 0,
TYPE_MODE (TREE_TYPE (treeop1)));
- op1 = plus_constant (op1, INTVAL (constant_part));
+ op1 = plus_constant (mode, op1, INTVAL (constant_part));
if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
op1 = force_operand (op1, target);
return REDUCE_BIT_FIELD (op1);
= immed_double_const (TREE_INT_CST_LOW (treeop1),
(HOST_WIDE_INT) 0,
TYPE_MODE (TREE_TYPE (treeop0)));
- op0 = plus_constant (op0, INTVAL (constant_part));
+ op0 = plus_constant (mode, op0, INTVAL (constant_part));
if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
op0 = force_operand (op0, target);
return REDUCE_BIT_FIELD (op0);
/* If the last operand is a CONST_INT, use plus_constant of
the negated constant. Else make the MINUS. */
if (CONST_INT_P (op1))
- return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
+ return REDUCE_BIT_FIELD (plus_constant (mode, op0,
+ -INTVAL (op1)));
else
return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
}
return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
case RDIV_EXPR:
+ case MULT_HIGHPART_EXPR:
goto binop;
case TRUNC_MOD_EXPR:
&& TREE_TYPE (treeop1) != void_type_node
&& TREE_TYPE (treeop2) != void_type_node);
+ temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
+ if (temp)
+ return temp;
+
/* If we are not to produce a result, we have no target. Otherwise,
if a target was specified use it; it will not be used as an
intermediate target unless it is safe. If no target, use a
&& original_target
&& safe_from_p (original_target, treeop0, 1)
&& GET_MODE (original_target) == mode
-#ifdef HAVE_conditional_move
- && (! can_conditionally_move_p (mode)
- || REG_P (original_target))
-#endif
&& !MEM_P (original_target))
temp = original_target;
else
- temp = assign_temp (type, 0, 0, 1);
+ temp = assign_temp (type, 0, 1);
do_pending_stack_adjust ();
NO_DEFER_POP;
&& stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
g = SSA_NAME_DEF_STMT (exp);
if (g)
- return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
- modifier, NULL);
+ {
+ rtx r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
+ tmode, modifier, NULL);
+ if (REG_P (r) && !REG_EXPR (r))
+ set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
+ return r;
+ }
ssa_name = exp;
decl_rtl = get_rtx_for_ssa_name (ssa_name);
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
definition. */
- if (! TREE_USED (exp))
- {
- assemble_external (exp);
- TREE_USED (exp) = 1;
- }
+ TREE_USED (exp) = 1;
/* Show we haven't gotten RTL for this yet. */
temp = 0;
/* This is the case of an array whose size is to be determined
from its initializer, while the initializer is still being parsed.
- See expand_decl. */
+ ??? We aren't parsing while expanding anymore. */
if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
temp = validize_mem (decl_rtl);
tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
}
if (!tmp)
- tmp = build_constructor_from_list (type,
- TREE_VECTOR_CST_ELTS (exp));
+ {
+ VEC(constructor_elt,gc) *v;
+ unsigned i;
+ v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
+ for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
+ tmp = build_constructor (type, v);
+ }
return expand_expr (tmp, ignore ? const0_rtx : target,
tmode, modifier);
}
set_mem_attributes (temp, exp, 0);
set_mem_addr_space (temp, as);
align = get_object_or_type_alignment (exp);
- if (mode != BLKmode
+ if (modifier != EXPAND_WRITE
+ && mode != BLKmode
&& align < GET_MODE_ALIGNMENT (mode)
/* If the target does not have special handling for unaligned
loads of mode then it can use regular moves for them. */
else
{
temp = assign_stack_temp (DECL_MODE (base),
- GET_MODE_SIZE (DECL_MODE (base)),
- 0);
+ GET_MODE_SIZE (DECL_MODE (base)));
store_expr (base, temp, 0, false);
temp = adjust_address (temp, BLKmode, offset);
set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
set_mem_addr_space (temp, as);
if (TREE_THIS_VOLATILE (exp))
MEM_VOLATILE_P (temp) = 1;
- if (mode != BLKmode
- && align < GET_MODE_ALIGNMENT (mode)
- /* If the target does not have special handling for unaligned
- loads of mode then it can use regular moves for them. */
- && ((icode = optab_handler (movmisalign_optab, mode))
- != CODE_FOR_nothing))
+ if (modifier != EXPAND_WRITE
+ && mode != BLKmode
+ && align < GET_MODE_ALIGNMENT (mode))
{
- struct expand_operand ops[2];
-
- /* We've already validated the memory, and we're creating a
- new pseudo destination. The predicates really can't fail,
- nor can the generator. */
- create_output_operand (&ops[0], NULL_RTX, mode);
- create_fixed_operand (&ops[1], temp);
- expand_insn (icode, 2, ops);
- return ops[0].value;
+ if ((icode = optab_handler (movmisalign_optab, mode))
+ != CODE_FOR_nothing)
+ {
+ struct expand_operand ops[2];
+
+ /* We've already validated the memory, and we're creating a
+ new pseudo destination. The predicates really can't fail,
+ nor can the generator. */
+ create_output_operand (&ops[0], NULL_RTX, mode);
+ create_fixed_operand (&ops[1], temp);
+ expand_insn (icode, 2, ops);
+ return ops[0].value;
+ }
+ else if (SLOW_UNALIGNED_ACCESS (mode, align))
+ temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
+ 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
+ true, (modifier == EXPAND_STACK_PARM
+ ? NULL_RTX : target),
+ mode, mode);
}
return temp;
}
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep, true);
rtx orig_op0, memloc;
+ bool mem_attrs_from_type = false;
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
orig_op0 = op0
= expand_expr (tem,
(TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
+ && COMPLETE_TYPE_P (TREE_TYPE (tem))
&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
!= INTEGER_CST)
&& modifier != EXPAND_STACK_PARM
tree nt = build_qualified_type (TREE_TYPE (tem),
(TYPE_QUALS (TREE_TYPE (tem))
| TYPE_QUAL_CONST));
- memloc = assign_temp (nt, 1, 1, 1);
+ memloc = assign_temp (nt, 1, 1);
emit_move_insn (memloc, op0);
op0 = memloc;
+ mem_attrs_from_type = true;
}
if (offset)
gcc_assert (MEM_P (op0));
- address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
+ address_mode = get_address_mode (op0);
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
if (ext_mode == BLKmode)
{
if (target == 0)
- target = assign_temp (type, 0, 1, 1);
+ target = assign_temp (type, 1, 1);
if (bitsize == 0)
return target;
necessarily be constant. */
if (mode == BLKmode)
{
- HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
rtx new_rtx;
- /* If the reference doesn't use the alias set of its type,
- we cannot create the temporary using that type. */
- if (component_uses_parent_alias_set (exp))
- {
- new_rtx = assign_stack_local (ext_mode, size, 0);
- set_mem_alias_set (new_rtx, get_alias_set (exp));
- }
- else
- new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
-
+ new_rtx = assign_stack_temp_for_type (ext_mode,
+ GET_MODE_BITSIZE (ext_mode),
+ type);
emit_move_insn (new_rtx, op0);
op0 = copy_rtx (new_rtx);
PUT_MODE (op0, BLKmode);
- set_mem_attributes (op0, exp, 1);
}
return op0;
if (op0 == orig_op0)
op0 = copy_rtx (op0);
- set_mem_attributes (op0, exp, 0);
+ /* If op0 is a temporary because of forcing to memory, pass only the
+ type to set_mem_attributes so that the original expression is never
+ marked as ADDRESSABLE through MEM_EXPR of the temporary. */
+ if (mem_attrs_from_type)
+ set_mem_attributes (op0, type, 0);
+ else
+ set_mem_attributes (op0, exp, 0);
+
if (REG_P (XEXP (op0, 0)))
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
target
= assign_stack_temp_for_type
(TYPE_MODE (inner_type),
- GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
+ GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
emit_move_insn (target, op0);
op0 = target;
= MAX (int_size_in_bytes (inner_type),
(HOST_WIDE_INT) GET_MODE_SIZE (mode));
rtx new_rtx
- = assign_stack_temp_for_type (mode, temp_size, 0, type);
+ = assign_stack_temp_for_type (mode, temp_size, type);
rtx new_with_op0_mode
= adjust_address (new_rtx, GET_MODE (op0), 0);
case POSTDECREMENT_EXPR:
case LOOP_EXPR:
case EXIT_EXPR:
+ case COMPOUND_LITERAL_EXPR:
/* Lowered by gimplify.c. */
gcc_unreachable ();
return expand_expr_real (treeop0, original_target, tmode,
modifier, alt_rtl);
- case COMPOUND_LITERAL_EXPR:
- {
- /* Initialize the anonymous variable declared in the compound
- literal, then return the variable. */
- tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
-
- /* Create RTL for this variable. */
- if (!DECL_RTL_SET_P (decl))
- {
- if (DECL_HARD_REGISTER (decl))
- /* The user specified an assembler name for this variable.
- Set that up now. */
- rest_of_decl_compilation (decl, 0, 0);
- else
- expand_decl (decl);
- }
-
- return expand_expr_real (decl, original_target, tmode,
- modifier, alt_rtl);
- }
-
default:
return expand_expr_real_2 (&ops, target, tmode, modifier);
}
const_vector_from_tree (tree exp)
{
rtvec v;
- int units, i;
- tree link, elt;
+ unsigned i;
+ int units;
+ tree elt;
enum machine_mode inner, mode;
mode = TYPE_MODE (TREE_TYPE (exp));
v = rtvec_alloc (units);
- link = TREE_VECTOR_CST_ELTS (exp);
- for (i = 0; link; link = TREE_CHAIN (link), ++i)
+ for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
{
- elt = TREE_VALUE (link);
+ elt = VECTOR_CST_ELT (exp, i);
if (TREE_CODE (elt) == REAL_CST)
RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
inner);
}
- /* Initialize remaining elements to 0. */
- for (; i < units; ++i)
- RTVEC_ELT (v, i) = CONST0_RTX (inner);
-
return gen_rtx_CONST_VECTOR (mode, v);
}