/* Convert tree expression to rtl instructions, for GNU compiler.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
This file is part of GCC.
#include "libfuncs.h"
#include "recog.h"
#include "reload.h"
-#include "output.h"
#include "typeclass.h"
#include "toplev.h"
#include "langhooks.h"
#include "diagnostic.h"
#include "ssaexpand.h"
#include "target-globals.h"
+#include "params.h"
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
int reverse;
};
-static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
- unsigned int,
- unsigned int);
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces_d *);
static bool block_move_libcall_safe_for_call_parm (void);
HOST_WIDE_INT, enum machine_mode,
tree, tree, int, alias_set_type);
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
-static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
+static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
+ unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
+ enum machine_mode,
tree, tree, alias_set_type, bool);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
rtx new_from;
enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
+ convert_optab ctab = unsignedp ? zext_optab : sext_optab;
+ enum insn_code icode;
- gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
- != CODE_FOR_nothing);
+ icode = convert_optab_handler (ctab, full_mode, from_mode);
+ gcc_assert (icode != CODE_FOR_nothing);
if (to_mode == full_mode)
{
- emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
- from_mode),
- to, from, UNKNOWN);
+ emit_unop_insn (icode, to, from, UNKNOWN);
return;
}
new_from = gen_reg_rtx (full_mode);
- emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
- new_from, from, UNKNOWN);
+ emit_unop_insn (icode, new_from, from, UNKNOWN);
/* else proceed to integer conversions below. */
from_mode = full_mode;
/* No special multiword conversion insn; do it by hand. */
start_sequence ();
- /* Since we will turn this into a no conflict block, we must ensure
- that the source does not overlap the target. */
+ /* Since we will turn this into a no conflict block, we must ensure the
+ the source does not overlap the target so force it into an isolated
+ register when maybe so. Likewise for any MEM input, since the
+ conversion sequence might require several references to it and we
+ must ensure we're getting the same value every time. */
- if (reg_overlap_mentioned_p (to, from))
+ if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
from = force_reg (from_mode, from);
/* Get a copy of FROM widened to a word, if necessary. */
make the high-order word of the constant zero, not all ones. */
if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
+ && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
&& CONST_INT_P (x) && INTVAL (x) < 0)
{
double_int val = uhwi_to_double_int (INTVAL (x));
unsigned int align, int endp)
{
struct move_by_pieces_d data;
- enum machine_mode to_addr_mode, from_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
+ enum machine_mode to_addr_mode;
+ enum machine_mode from_addr_mode = get_address_mode (from);
rtx to_addr, from_addr = XEXP (from, 0);
unsigned int max_size = MOVE_MAX_PIECES + 1;
enum insn_code icode;
data.from_addr = from_addr;
if (to)
{
- to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
+ to_addr_mode = get_address_mode (to);
to_addr = XEXP (to, 0);
data.to = to;
data.autinc_to
if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
{
data.from_addr = copy_to_mode_reg (from_addr_mode,
- plus_constant (from_addr, len));
+ plus_constant (from_addr_mode,
+ from_addr, len));
data.autinc_from = 1;
data.explicit_inc_from = -1;
}
if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
{
data.to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (to_addr, len));
+ plus_constant (to_addr_mode,
+ to_addr, len));
data.autinc_to = 1;
data.explicit_inc_to = -1;
}
emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
else
data.to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (data.to_addr,
+ plus_constant (to_addr_mode,
+ data.to_addr,
-1));
}
to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
/* Return number of insns required to move L bytes by pieces.
ALIGN (in bits) is maximum alignment we can assume. */
-static unsigned HOST_WIDE_INT
+unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
unsigned int max_size)
{
{
x = shallow_copy_rtx (x);
y = shallow_copy_rtx (y);
- set_mem_size (x, size);
- set_mem_size (y, size);
+ set_mem_size (x, INTVAL (size));
+ set_mem_size (y, INTVAL (size));
}
if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
pseudos. We can then place those new pseudos into a VAR_DECL and
use them later. */
- dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
- src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
+ dst_addr = copy_addr_to_reg (XEXP (dst, 0));
+ src_addr = copy_addr_to_reg (XEXP (src, 0));
dst_addr = convert_memory_address (ptr_mode, dst_addr);
src_addr = convert_memory_address (ptr_mode, src_addr);
}
/* A subroutine of emit_block_move_via_libcall. Create the tree node
- for the function we use for block copies. The first time FOR_CALL
- is true, we call assemble_external. */
+ for the function we use for block copies. */
static GTY(()) tree block_move_fn;
{
if (!block_move_fn)
{
- tree args, fn;
+ tree args, fn, attrs, attr_args;
fn = get_identifier ("memcpy");
args = build_function_type_list (ptr_type_node, ptr_type_node,
DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
DECL_VISIBILITY_SPECIFIED (fn) = 1;
+ attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
+ attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
+
+ decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
+
block_move_fn = fn;
}
{
emitted_extern = true;
make_decl_rtl (block_move_fn);
- assemble_external (block_move_fn);
}
return block_move_fn;
unsigned int align ATTRIBUTE_UNUSED)
{
rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
- enum machine_mode x_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
- enum machine_mode y_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
+ enum machine_mode x_addr_mode = get_address_mode (x);
+ enum machine_mode y_addr_mode = get_address_mode (y);
enum machine_mode iter_mode;
iter_mode = GET_MODE (size);
{
enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
if (imode == BLKmode)
- src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
+ src = assign_stack_temp (GET_MODE (orig_src), ssize);
else
src = gen_reg_rtx (imode);
if (imode != BLKmode)
rtx mem;
gcc_assert (!bytepos);
- mem = assign_stack_temp (GET_MODE (src), slen, 0);
+ mem = assign_stack_temp (GET_MODE (src), slen);
emit_move_insn (mem, src);
tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
0, 1, false, NULL_RTX, mode, mode);
int slen = GET_MODE_SIZE (GET_MODE (src));
rtx mem;
- mem = assign_stack_temp (GET_MODE (src), slen, 0);
+ mem = assign_stack_temp (GET_MODE (src), slen);
emit_move_insn (mem, src);
tmps[i] = adjust_address (mem, mode, (int) bytepos);
}
{
enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
if (imode == BLKmode)
- dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
+ dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
else
dst = gen_reg_rtx (imode);
emit_group_store (dst, src, type, ssize);
it. Allocate a temporary, and split this into a store/load to/from
the temporary. */
- temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
+ temp = assign_stack_temp (GET_MODE (dst), ssize);
emit_group_store (temp, src, type, ssize);
emit_group_load (dst, temp, type, ssize);
return;
>= GET_MODE_ALIGNMENT (tmp_mode))
{
dest = assign_stack_temp (dest_mode,
- GET_MODE_SIZE (dest_mode),
- 0);
+ GET_MODE_SIZE (dest_mode));
emit_move_insn (adjust_address (dest,
tmp_mode,
bytepos),
else
{
dest = assign_stack_temp (tmp_mode,
- GET_MODE_SIZE (tmp_mode),
- 0);
+ GET_MODE_SIZE (tmp_mode));
emit_move_insn (dest, tmps[i]);
dst = adjust_address (dest, dest_mode, bytepos);
}
emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
else
store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
- mode, tmps[i]);
+ 0, 0, mode, tmps[i]);
}
/* Copy from the pseudo into the (probable) hard reg. */
tgtblk = assign_temp (build_qualified_type (type,
(TYPE_QUALS (type)
| TYPE_QUAL_CONST)),
- 0, 1, 1);
+ 1, 1);
preserve_temp_slots (tgtblk);
}
/* Use xbitpos for the source extraction (right justified) and
bitpos for the destination store (left justified). */
- store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
+ store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
extract_bit_field (src, bitsize,
xbitpos % BITS_PER_WORD, 1, false,
NULL_RTX, copy_mode, copy_mode));
return tgtblk;
}
+/* Copy BLKmode value SRC into a register of mode MODE. Return the
+ register if it contains any data, otherwise return null.
+
+ This is used on targets that return BLKmode values in registers. */
+
+rtx
+copy_blkmode_to_reg (enum machine_mode mode, tree src)
+{
+ int i, n_regs;
+ unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
+ unsigned int bitsize;
+ rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
+ enum machine_mode dst_mode;
+
+ gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
+
+ x = expand_normal (src);
+
+ bytes = int_size_in_bytes (TREE_TYPE (src));
+ if (bytes == 0)
+ return NULL_RTX;
+
+ /* If the structure doesn't take up a whole number of words, see
+ whether the register value should be padded on the left or on
+ the right. Set PADDING_CORRECTION to the number of padding
+ bits needed on the left side.
+
+ In most ABIs, the structure will be returned at the least end of
+ the register, which translates to right padding on little-endian
+ targets and left padding on big-endian targets. The opposite
+ holds if the structure is returned at the most significant
+ end of the register. */
+ if (bytes % UNITS_PER_WORD != 0
+ && (targetm.calls.return_in_msb (TREE_TYPE (src))
+ ? !BYTES_BIG_ENDIAN
+ : BYTES_BIG_ENDIAN))
+ padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
+ * BITS_PER_UNIT));
+
+ n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
+ dst_words = XALLOCAVEC (rtx, n_regs);
+ bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
+
+ /* Copy the structure BITSIZE bits at a time. */
+ for (bitpos = 0, xbitpos = padding_correction;
+ bitpos < bytes * BITS_PER_UNIT;
+ bitpos += bitsize, xbitpos += bitsize)
+ {
+ /* We need a new destination pseudo each time xbitpos is
+ on a word boundary and when xbitpos == padding_correction
+ (the first time through). */
+ if (xbitpos % BITS_PER_WORD == 0
+ || xbitpos == padding_correction)
+ {
+ /* Generate an appropriate register. */
+ dst_word = gen_reg_rtx (word_mode);
+ dst_words[xbitpos / BITS_PER_WORD] = dst_word;
+
+ /* Clear the destination before we move anything into it. */
+ emit_move_insn (dst_word, CONST0_RTX (word_mode));
+ }
+
+ /* We need a new source operand each time bitpos is on a word
+ boundary. */
+ if (bitpos % BITS_PER_WORD == 0)
+ src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
+
+ /* Use bitpos for the source extraction (left justified) and
+ xbitpos for the destination store (right justified). */
+ store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
+ 0, 0, word_mode,
+ extract_bit_field (src_word, bitsize,
+ bitpos % BITS_PER_WORD, 1, false,
+ NULL_RTX, word_mode, word_mode));
+ }
+
+ if (mode == BLKmode)
+ {
+ /* Find the smallest integer mode large enough to hold the
+ entire structure. */
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ /* Have we found a large enough mode? */
+ if (GET_MODE_SIZE (mode) >= bytes)
+ break;
+
+ /* A suitable mode should have been found. */
+ gcc_assert (mode != VOIDmode);
+ }
+
+ if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
+ dst_mode = word_mode;
+ else
+ dst_mode = mode;
+ dst = gen_reg_rtx (dst_mode);
+
+ for (i = 0; i < n_regs; i++)
+ emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
+
+ if (mode != dst_mode)
+ dst = gen_lowpart (mode, dst);
+
+ return dst;
+}
+
/* Add a USE expression for REG to the (possibly empty) list pointed
to by CALL_FUSAGE. REG must denote a hard register. */
void
-use_reg (rtx *call_fusage, rtx reg)
+use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
{
gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
*call_fusage
- = gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_USE (VOIDmode, reg), *call_fusage);
+ = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
}
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
return def_stmt;
}
+
+#ifdef HAVE_conditional_move
+/* Return the defining gimple statement for SSA_NAME NAME if it is an
+ assigment and the class of the expresion on the RHS is CLASS. Return
+ NULL otherwise. */
+
+static gimple
+get_def_for_expr_class (tree name, enum tree_code_class tclass)
+{
+ gimple def_stmt;
+
+ if (TREE_CODE (name) != SSA_NAME)
+ return NULL;
+
+ def_stmt = get_gimple_for_ssa_name (name);
+ if (!def_stmt
+ || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
+ return NULL;
+
+ return def_stmt;
+}
+#endif
\f
/* Determine whether the LEN bytes generated by CONSTFUN can be
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
void *constfundata, unsigned int align, bool memsetp, int endp)
{
- enum machine_mode to_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
+ enum machine_mode to_addr_mode = get_address_mode (to);
struct store_by_pieces_d data;
if (len == 0)
emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
else
data.to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (data.to_addr,
+ plus_constant (to_addr_mode,
+ data.to_addr,
-1));
}
to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
unsigned int align ATTRIBUTE_UNUSED)
{
- enum machine_mode to_addr_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
+ enum machine_mode to_addr_mode = get_address_mode (data->to);
rtx to_addr = XEXP (data->to, 0);
unsigned int max_size = STORE_MAX_PIECES + 1;
enum insn_code icode;
if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
{
data->to_addr = copy_to_mode_reg (to_addr_mode,
- plus_constant (to_addr, data->len));
+ plus_constant (to_addr_mode,
+ to_addr,
+ data->len));
data->autinc_to = 1;
data->explicit_inc_to = -1;
}
/* Emit code to copy OBJECT and SIZE into new pseudos. We can then
place those into new pseudos into a VAR_DECL and use them later. */
- object = copy_to_mode_reg (Pmode, XEXP (object, 0));
+ object = copy_addr_to_reg (XEXP (object, 0));
size_mode = TYPE_MODE (sizetype);
size = convert_to_mode (size_mode, size, 1);
}
/* A subroutine of set_storage_via_libcall. Create the tree node
- for the function we use for block clears. The first time FOR_CALL
- is true, we call assemble_external. */
+ for the function we use for block clears. */
tree block_clear_fn;
{
emitted_extern = true;
make_decl_rtl (block_clear_fn);
- assemble_external (block_clear_fn);
}
return block_clear_fn;
gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
}
- store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
+ store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
}
/* Extract one of the components of the complex value CPLX. Extract the
case POST_INC:
case POST_DEC:
case POST_MODIFY:
- temp = plus_constant (stack_pointer_rtx, -adjust);
+ temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
break;
default:
gcc_unreachable ();
REAL_VALUE_FROM_CONST_DOUBLE (r, y);
if (targetm.legitimate_constant_p (dstmode, y))
- oldcost = rtx_cost (y, SET, speed);
+ oldcost = set_src_cost (y, speed);
else
- oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
+ oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
srcmode != orig_srcmode;
if (!insn_operand_matches (ic, 1, trunc_y))
continue;
/* This is valid, but may not be cheaper than the original. */
- newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
+ newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
+ speed);
if (oldcost < newcost)
continue;
}
{
trunc_y = force_const_mem (srcmode, trunc_y);
/* This is valid, but may not be cheaper than the original. */
- newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
+ newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
+ speed);
if (oldcost < newcost)
continue;
trunc_y = validize_mem (trunc_y);
size = convert_modes (Pmode, ptr_mode, size, 1);
if (CONSTANT_P (size))
- anti_adjust_stack (plus_constant (size, extra));
+ anti_adjust_stack (plus_constant (Pmode, size, extra));
else if (REG_P (size) && extra == 0)
anti_adjust_stack (size);
else
{
temp = virtual_outgoing_args_rtx;
if (extra != 0 && below)
- temp = plus_constant (temp, extra);
+ temp = plus_constant (Pmode, temp, extra);
}
else
{
if (CONST_INT_P (size))
- temp = plus_constant (virtual_outgoing_args_rtx,
+ temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
-INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
- negate_rtx (Pmode, plus_constant (size, extra)));
+ negate_rtx (Pmode, plus_constant (Pmode, size,
+ extra)));
else
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
}
-#ifdef PUSH_ROUNDING
+/* A utility routine that returns the base of an auto-inc memory, or NULL. */
+
+static rtx
+mem_autoinc_base (rtx mem)
+{
+ if (MEM_P (mem))
+ {
+ rtx addr = XEXP (mem, 0);
+ if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
+ return XEXP (addr, 0);
+ }
+ return NULL;
+}
+
+/* A utility routine used here, in reload, and in try_split. The insns
+ after PREV up to and including LAST are known to adjust the stack,
+ with a final value of END_ARGS_SIZE. Iterate backward from LAST
+ placing notes as appropriate. PREV may be NULL, indicating the
+ entire insn sequence prior to LAST should be scanned.
+
+ The set of allowed stack pointer modifications is small:
+ (1) One or more auto-inc style memory references (aka pushes),
+ (2) One or more addition/subtraction with the SP as destination,
+ (3) A single move insn with the SP as destination,
+ (4) A call_pop insn,
+ (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
+
+ Insns in the sequence that do not modify the SP are ignored,
+ except for noreturn calls.
+
+ The return value is the amount of adjustment that can be trivially
+ verified, via immediate operand or auto-inc. If the adjustment
+ cannot be trivially extracted, the return value is INT_MIN. */
+
+HOST_WIDE_INT
+find_args_size_adjust (rtx insn)
+{
+ rtx dest, set, pat;
+ int i;
+
+ pat = PATTERN (insn);
+ set = NULL;
+
+ /* Look for a call_pop pattern. */
+ if (CALL_P (insn))
+ {
+ /* We have to allow non-call_pop patterns for the case
+ of emit_single_push_insn of a TLS address. */
+ if (GET_CODE (pat) != PARALLEL)
+ return 0;
+
+ /* All call_pop have a stack pointer adjust in the parallel.
+ The call itself is always first, and the stack adjust is
+ usually last, so search from the end. */
+ for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
+ {
+ set = XVECEXP (pat, 0, i);
+ if (GET_CODE (set) != SET)
+ continue;
+ dest = SET_DEST (set);
+ if (dest == stack_pointer_rtx)
+ break;
+ }
+ /* We'd better have found the stack pointer adjust. */
+ if (i == 0)
+ return 0;
+ /* Fall through to process the extracted SET and DEST
+ as if it was a standalone insn. */
+ }
+ else if (GET_CODE (pat) == SET)
+ set = pat;
+ else if ((set = single_set (insn)) != NULL)
+ ;
+ else if (GET_CODE (pat) == PARALLEL)
+ {
+ /* ??? Some older ports use a parallel with a stack adjust
+ and a store for a PUSH_ROUNDING pattern, rather than a
+ PRE/POST_MODIFY rtx. Don't force them to update yet... */
+ /* ??? See h8300 and m68k, pushqi1. */
+ for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
+ {
+ set = XVECEXP (pat, 0, i);
+ if (GET_CODE (set) != SET)
+ continue;
+ dest = SET_DEST (set);
+ if (dest == stack_pointer_rtx)
+ break;
+
+ /* We do not expect an auto-inc of the sp in the parallel. */
+ gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
+ gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
+ != stack_pointer_rtx);
+ }
+ if (i < 0)
+ return 0;
+ }
+ else
+ return 0;
+
+ dest = SET_DEST (set);
+
+ /* Look for direct modifications of the stack pointer. */
+ if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
+ {
+ /* Look for a trivial adjustment, otherwise assume nothing. */
+ /* Note that the SPU restore_stack_block pattern refers to
+ the stack pointer in V4SImode. Consider that non-trivial. */
+ if (SCALAR_INT_MODE_P (GET_MODE (dest))
+ && GET_CODE (SET_SRC (set)) == PLUS
+ && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
+ && CONST_INT_P (XEXP (SET_SRC (set), 1)))
+ return INTVAL (XEXP (SET_SRC (set), 1));
+ /* ??? Reload can generate no-op moves, which will be cleaned
+ up later. Recognize it and continue searching. */
+ else if (rtx_equal_p (dest, SET_SRC (set)))
+ return 0;
+ else
+ return HOST_WIDE_INT_MIN;
+ }
+ else
+ {
+ rtx mem, addr;
+
+ /* Otherwise only think about autoinc patterns. */
+ if (mem_autoinc_base (dest) == stack_pointer_rtx)
+ {
+ mem = dest;
+ gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
+ != stack_pointer_rtx);
+ }
+ else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
+ mem = SET_SRC (set);
+ else
+ return 0;
+
+ addr = XEXP (mem, 0);
+ switch (GET_CODE (addr))
+ {
+ case PRE_INC:
+ case POST_INC:
+ return GET_MODE_SIZE (GET_MODE (mem));
+ case PRE_DEC:
+ case POST_DEC:
+ return -GET_MODE_SIZE (GET_MODE (mem));
+ case PRE_MODIFY:
+ case POST_MODIFY:
+ addr = XEXP (addr, 1);
+ gcc_assert (GET_CODE (addr) == PLUS);
+ gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
+ gcc_assert (CONST_INT_P (XEXP (addr, 1)));
+ return INTVAL (XEXP (addr, 1));
+ default:
+ gcc_unreachable ();
+ }
+ }
+}
+
+int
+fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
+{
+ int args_size = end_args_size;
+ bool saw_unknown = false;
+ rtx insn;
+
+ for (insn = last; insn != prev; insn = PREV_INSN (insn))
+ {
+ HOST_WIDE_INT this_delta;
+ if (!NONDEBUG_INSN_P (insn))
+ continue;
+
+ this_delta = find_args_size_adjust (insn);
+ if (this_delta == 0)
+ {
+ if (!CALL_P (insn)
+ || ACCUMULATE_OUTGOING_ARGS
+ || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
+ continue;
+ }
+
+ gcc_assert (!saw_unknown);
+ if (this_delta == HOST_WIDE_INT_MIN)
+ saw_unknown = true;
+
+ add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
+#ifdef STACK_GROWS_DOWNWARD
+ this_delta = -this_delta;
+#endif
+ args_size -= this_delta;
+ }
+
+ return saw_unknown ? INT_MIN : args_size;
+}
+
+#ifdef PUSH_ROUNDING
/* Emit single push insn. */
static void
-emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
+emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
{
rtx dest_addr;
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
}
emit_move_insn (dest, x);
}
+
+/* Emit and annotate a single push insn. */
+
+static void
+emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
+{
+ int delta, old_delta = stack_pointer_delta;
+ rtx prev = get_last_insn ();
+ rtx last;
+
+ emit_single_push_insn_1 (mode, x, type);
+
+ last = get_last_insn ();
+
+ /* Notice the common case where we emitted exactly one insn. */
+ if (PREV_INSN (last) == prev)
+ {
+ add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
+ return;
+ }
+
+ delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
+ gcc_assert (delta == INT_MIN || delta == old_delta);
+}
#endif
/* Generate code to push X onto the stack, assuming it has mode MODE and
size = GEN_INT (GET_MODE_SIZE (mode));
if (!MEM_P (xinner))
{
- temp = assign_temp (type, 0, 1, 1);
+ temp = assign_temp (type, 1, 1);
emit_move_insn (temp, xinner);
xinner = temp;
}
}
else if (CONST_INT_P (args_so_far))
temp = memory_address (BLKmode,
- plus_constant (args_addr,
+ plus_constant (Pmode, args_addr,
skip + INTVAL (args_so_far)));
else
temp = memory_address (BLKmode,
- plus_constant (gen_rtx_PLUS (Pmode,
+ plus_constant (Pmode,
+ gen_rtx_PLUS (Pmode,
args_addr,
args_so_far),
skip));
if (CONST_INT_P (args_so_far))
addr
= memory_address (mode,
- plus_constant (args_addr,
+ plus_constant (Pmode, args_addr,
INTVAL (args_so_far)));
else
addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
static bool
optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitpos,
+ unsigned HOST_WIDE_INT bitregion_start,
+ unsigned HOST_WIDE_INT bitregion_end,
enum machine_mode mode1, rtx str_rtx,
tree to, tree src)
{
if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
str_mode = word_mode;
str_mode = get_best_mode (bitsize, bitpos,
+ bitregion_start, bitregion_end,
MEM_ALIGN (str_rtx), str_mode, 0);
if (str_mode == VOIDmode)
return false;
value = expand_and (str_mode, value, const1_rtx, NULL);
binop = xor_optab;
}
- value = expand_shift (LSHIFT_EXPR, str_mode, value,
- bitpos, NULL_RTX, 1);
+ value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
result = expand_binop (str_mode, binop, str_rtx,
value, str_rtx, 1, OPTAB_WIDEN);
if (result != str_rtx)
case BIT_XOR_EXPR:
if (TREE_CODE (op1) != INTEGER_CST)
break;
- value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
- value = convert_modes (GET_MODE (str_rtx),
+ value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
+ value = convert_modes (str_mode,
TYPE_MODE (TREE_TYPE (op1)), value,
TYPE_UNSIGNED (TREE_TYPE (op1)));
}
binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
- if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
+ if (bitpos + bitsize != str_bitsize)
{
- rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
- - 1);
- value = expand_and (GET_MODE (str_rtx), value, mask,
- NULL_RTX);
+ rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
+ value = expand_and (str_mode, value, mask, NULL_RTX);
}
- value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
- bitpos, NULL_RTX, 1);
- result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
+ value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
+ result = expand_binop (str_mode, binop, str_rtx,
value, str_rtx, 1, OPTAB_WIDEN);
if (result != str_rtx)
emit_move_insn (str_rtx, result);
return false;
}
+/* In the C++ memory model, consecutive bit fields in a structure are
+ considered one memory location.
+
+ Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
+ returns the bit range of consecutive bits in which this COMPONENT_REF
+ belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
+ and *OFFSET may be adjusted in the process.
+
+ If the access does not need to be restricted, 0 is returned in both
+ *BITSTART and *BITEND. */
+
+static void
+get_bit_range (unsigned HOST_WIDE_INT *bitstart,
+ unsigned HOST_WIDE_INT *bitend,
+ tree exp,
+ HOST_WIDE_INT *bitpos,
+ tree *offset)
+{
+ HOST_WIDE_INT bitoffset;
+ tree field, repr;
+
+ gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
+
+ field = TREE_OPERAND (exp, 1);
+ repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
+ need to limit the range we can access. */
+ if (!repr)
+ {
+ *bitstart = *bitend = 0;
+ return;
+ }
+
+ /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
+ part of a larger bit field, then the representative does not serve any
+ useful purpose. This can occur in Ada. */
+ if (handled_component_p (TREE_OPERAND (exp, 0)))
+ {
+ enum machine_mode rmode;
+ HOST_WIDE_INT rbitsize, rbitpos;
+ tree roffset;
+ int unsignedp;
+ int volatilep = 0;
+ get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
+ &roffset, &rmode, &unsignedp, &volatilep, false);
+ if ((rbitpos % BITS_PER_UNIT) != 0)
+ {
+ *bitstart = *bitend = 0;
+ return;
+ }
+ }
+
+ /* Compute the adjustment to bitpos from the offset of the field
+ relative to the representative. DECL_FIELD_OFFSET of field and
+ repr are the same by construction if they are not constants,
+ see finish_bitfield_layout. */
+ if (host_integerp (DECL_FIELD_OFFSET (field), 1)
+ && host_integerp (DECL_FIELD_OFFSET (repr), 1))
+ bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
+ - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
+ else
+ bitoffset = 0;
+ bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
+
+ /* If the adjustment is larger than bitpos, we would have a negative bit
+ position for the lower bound and this may wreak havoc later. This can
+ occur only if we have a non-null offset, so adjust offset and bitpos
+ to make the lower bound non-negative. */
+ if (bitoffset > *bitpos)
+ {
+ HOST_WIDE_INT adjust = bitoffset - *bitpos;
+
+ gcc_assert ((adjust % BITS_PER_UNIT) == 0);
+ gcc_assert (*offset != NULL_TREE);
+
+ *bitpos += adjust;
+ *offset
+ = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
+ *bitstart = 0;
+ }
+ else
+ *bitstart = *bitpos - bitoffset;
+
+ *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
+}
+
+/* Returns true if the MEM_REF REF refers to an object that does not
+ reside in memory and has non-BLKmode. */
+
+static bool
+mem_ref_refers_to_non_mem_p (tree ref)
+{
+ tree base = TREE_OPERAND (ref, 0);
+ if (TREE_CODE (base) != ADDR_EXPR)
+ return false;
+ base = TREE_OPERAND (base, 0);
+ return (DECL_P (base)
+ && !TREE_ADDRESSABLE (base)
+ && DECL_MODE (base) != BLKmode
+ && DECL_RTL_SET_P (base)
+ && !MEM_P (DECL_RTL (base)));
+}
/* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
is true, try generating a nontemporal store. */
rtx to_rtx = 0;
rtx result;
enum machine_mode mode;
- int align;
+ unsigned int align;
enum insn_code icode;
/* Don't crash if the lhs of the assignment was erroneous. */
if (operand_equal_p (to, from, 0))
return;
+ /* Handle misaligned stores. */
mode = TYPE_MODE (TREE_TYPE (to));
if ((TREE_CODE (to) == MEM_REF
|| TREE_CODE (to) == TARGET_MEM_REF)
&& mode != BLKmode
- && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
- get_object_alignment (to, BIGGEST_ALIGNMENT)))
- < (signed) GET_MODE_ALIGNMENT (mode))
- && ((icode = optab_handler (movmisalign_optab, mode))
- != CODE_FOR_nothing))
+ && !mem_ref_refers_to_non_mem_p (to)
+ && ((align = get_object_or_type_alignment (to))
+ < GET_MODE_ALIGNMENT (mode))
+ && (((icode = optab_handler (movmisalign_optab, mode))
+ != CODE_FOR_nothing)
+ || SLOW_UNALIGNED_ACCESS (mode, align)))
{
- struct expand_operand ops[2];
- enum machine_mode address_mode;
- rtx reg, op0, mem;
+ rtx reg, mem;
reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
reg = force_not_mem (reg);
+ mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
- if (TREE_CODE (to) == MEM_REF)
- {
- addr_space_t as
- = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
- tree base = TREE_OPERAND (to, 0);
- address_mode = targetm.addr_space.address_mode (as);
- op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- op0 = convert_memory_address_addr_space (address_mode, op0, as);
- if (!integer_zerop (TREE_OPERAND (to, 1)))
- {
- rtx off
- = immed_double_int_const (mem_ref_offset (to), address_mode);
- op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
- }
- op0 = memory_address_addr_space (mode, op0, as);
- mem = gen_rtx_MEM (mode, op0);
- set_mem_attributes (mem, to, 0);
- set_mem_addr_space (mem, as);
- }
- else if (TREE_CODE (to) == TARGET_MEM_REF)
+ if (icode != CODE_FOR_nothing)
{
- addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
- struct mem_address addr;
+ struct expand_operand ops[2];
- get_address_description (to, &addr);
- op0 = addr_for_mem_ref (&addr, as, true);
- op0 = memory_address_addr_space (mode, op0, as);
- mem = gen_rtx_MEM (mode, op0);
- set_mem_attributes (mem, to, 0);
- set_mem_addr_space (mem, as);
+ create_fixed_operand (&ops[0], mem);
+ create_input_operand (&ops[1], reg, mode);
+ /* The movmisalign<mode> pattern cannot fail, else the assignment
+ would silently be omitted. */
+ expand_insn (icode, 2, ops);
}
else
- gcc_unreachable ();
- if (TREE_THIS_VOLATILE (to))
- MEM_VOLATILE_P (mem) = 1;
-
- create_fixed_operand (&ops[0], mem);
- create_input_operand (&ops[1], reg, mode);
- /* The movmisalign<mode> pattern cannot fail, else the assignment would
- silently be omitted. */
- expand_insn (icode, 2, ops);
+ store_bit_field (mem, GET_MODE_BITSIZE (mode),
+ 0, 0, 0, mode, reg);
return;
}
if the structure component's rtx is not simply a MEM.
Assignment of an array element at a constant index, and assignment of
an array element in an unaligned packed structure field, has the same
- problem. */
+ problem. Same for (partially) storing into a non-memory object. */
if (handled_component_p (to)
- /* ??? We only need to handle MEM_REF here if the access is not
- a full access of the base object. */
|| (TREE_CODE (to) == MEM_REF
- && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
+ && mem_ref_refers_to_non_mem_p (to))
|| TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
{
enum machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos;
+ unsigned HOST_WIDE_INT bitregion_start = 0;
+ unsigned HOST_WIDE_INT bitregion_end = 0;
tree offset;
int unsignedp;
int volatilep = 0;
tree tem;
+ bool misalignp;
+ rtx mem = NULL_RTX;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
&unsignedp, &volatilep, true);
+ if (TREE_CODE (to) == COMPONENT_REF
+ && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
+ get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
+
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
+ mode = TYPE_MODE (TREE_TYPE (tem));
+ if (TREE_CODE (tem) == MEM_REF
+ && mode != BLKmode
+ && ((align = get_object_or_type_alignment (tem))
+ < GET_MODE_ALIGNMENT (mode))
+ && ((icode = optab_handler (movmisalign_optab, mode))
+ != CODE_FOR_nothing))
+ {
+ struct expand_operand ops[2];
+
+ misalignp = true;
+ to_rtx = gen_reg_rtx (mode);
+ mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
+
+ /* If the misaligned store doesn't overwrite all bits, perform
+ rmw cycle on MEM. */
+ if (bitsize != GET_MODE_BITSIZE (mode))
+ {
+ create_input_operand (&ops[0], to_rtx, mode);
+ create_fixed_operand (&ops[1], mem);
+ /* The movmisalign<mode> pattern cannot fail, else the assignment
+ would silently be omitted. */
+ expand_insn (icode, 2, ops);
- to_rtx = expand_normal (tem);
+ mem = copy_rtx (mem);
+ }
+ }
+ else
+ {
+ misalignp = false;
+ to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ }
/* If the bitfield is volatile, we want to access it in the
field's mode, not the computed mode.
}
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
- address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
+ address_mode = get_address_mode (to_rtx);
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
nontemporal);
else if (bitpos + bitsize <= mode_bitsize / 2)
result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
+ bitregion_start, bitregion_end,
mode1, from, TREE_TYPE (tem),
get_alias_set (to), nontemporal);
else if (bitpos >= mode_bitsize / 2)
result = store_field (XEXP (to_rtx, 1), bitsize,
- bitpos - mode_bitsize / 2, mode1, from,
+ bitpos - mode_bitsize / 2,
+ bitregion_start, bitregion_end,
+ mode1, from,
TREE_TYPE (tem), get_alias_set (to),
nontemporal);
else if (bitpos == 0 && bitsize == mode_bitsize)
else
{
rtx temp = assign_stack_temp (GET_MODE (to_rtx),
- GET_MODE_SIZE (GET_MODE (to_rtx)),
- 0);
+ GET_MODE_SIZE (GET_MODE (to_rtx)));
write_complex_part (temp, XEXP (to_rtx, 0), false);
write_complex_part (temp, XEXP (to_rtx, 1), true);
- result = store_field (temp, bitsize, bitpos, mode1, from,
+ result = store_field (temp, bitsize, bitpos,
+ bitregion_start, bitregion_end,
+ mode1, from,
TREE_TYPE (tem), get_alias_set (to),
nontemporal);
emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
}
- if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
+ if (optimize_bitfield_assignment_op (bitsize, bitpos,
+ bitregion_start, bitregion_end,
+ mode1,
to_rtx, to, from))
result = NULL;
else
- result = store_field (to_rtx, bitsize, bitpos, mode1, from,
+ result = store_field (to_rtx, bitsize, bitpos,
+ bitregion_start, bitregion_end,
+ mode1, from,
TREE_TYPE (tem), get_alias_set (to),
nontemporal);
}
+ if (misalignp)
+ {
+ struct expand_operand ops[2];
+
+ create_fixed_operand (&ops[0], mem);
+ create_input_operand (&ops[1], to_rtx, mode);
+ /* The movmisalign<mode> pattern cannot fail, else the assignment
+ would silently be omitted. */
+ expand_insn (icode, 2, ops);
+ }
+
if (result)
preserve_temp_slots (result);
- free_temp_slots ();
pop_temp_slots ();
return;
}
if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
&& COMPLETE_TYPE_P (TREE_TYPE (from))
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
- && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
+ && ! (((TREE_CODE (to) == VAR_DECL
+ || TREE_CODE (to) == PARM_DECL
+ || TREE_CODE (to) == RESULT_DECL)
&& REG_P (DECL_RTL (to)))
|| TREE_CODE (to) == SSA_NAME))
{
emit_move_insn (to_rtx, value);
}
preserve_temp_slots (to_rtx);
- free_temp_slots ();
pop_temp_slots ();
return;
}
- /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
- Don't re-expand if it was expanded already (in COMPONENT_REF case). */
-
- if (to_rtx == 0)
- to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
/* Don't move directly into a return register. */
if (TREE_CODE (to) == RESULT_DECL
rtx temp;
push_temp_slots ();
- temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
+ if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
+ temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
+ else
+ temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
if (GET_CODE (to_rtx) == PARALLEL)
emit_group_load (to_rtx, temp, TREE_TYPE (from),
int_size_in_bytes (TREE_TYPE (from)));
- else
+ else if (temp)
emit_move_insn (to_rtx, temp);
preserve_temp_slots (to_rtx);
- free_temp_slots ();
pop_temp_slots ();
return;
}
TYPE_MODE (sizetype));
preserve_temp_slots (to_rtx);
- free_temp_slots ();
pop_temp_slots ();
return;
}
push_temp_slots ();
result = store_expr (from, to_rtx, 0, nontemporal);
preserve_temp_slots (result);
- free_temp_slots ();
pop_temp_slots ();
return;
}
: BLOCK_OP_NORMAL));
else if (GET_MODE (target) == BLKmode)
store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
- 0, GET_MODE (temp), temp);
+ 0, 0, 0, GET_MODE (temp), temp);
else
convert_move (target, temp, unsignedp);
}
{
enum machine_mode pointer_mode
= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
- enum machine_mode address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
+ enum machine_mode address_mode = get_address_mode (target);
/* Compute the size of the data to copy from the string. */
tree copy_size
Do all calculations in pointer_mode. */
if (CONST_INT_P (copy_size_rtx))
{
- size = plus_constant (size, -INTVAL (copy_size_rtx));
+ size = plus_constant (address_mode, size,
+ -INTVAL (copy_size_rtx));
target = adjust_address (target, BLKmode,
INTVAL (copy_size_rtx));
}
return NULL_RTX;
}
\f
-/* Helper for categorize_ctor_elements. Identical interface. */
+/* Return true if field F of structure TYPE is a flexible array. */
static bool
-categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
- HOST_WIDE_INT *p_elt_count,
- bool *p_must_clear)
+flexible_array_member_p (const_tree f, const_tree type)
{
- unsigned HOST_WIDE_INT idx;
- HOST_WIDE_INT nz_elts, elt_count;
- tree value, purpose;
+ const_tree tf;
+
+ tf = TREE_TYPE (f);
+ return (DECL_CHAIN (f) == NULL
+ && TREE_CODE (tf) == ARRAY_TYPE
+ && TYPE_DOMAIN (tf)
+ && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
+ && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
+ && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
+ && int_size_in_bytes (type) >= 0);
+}
- /* Whether CTOR is a valid constant initializer, in accordance with what
- initializer_constant_valid_p does. If inferred from the constructor
- elements, true until proven otherwise. */
- bool const_from_elts_p = constructor_static_from_elts_p (ctor);
- bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
+/* If FOR_CTOR_P, return the number of top-level elements that a constructor
+ must have in order for it to completely initialize a value of type TYPE.
+ Return -1 if the number isn't known.
- nz_elts = 0;
- elt_count = 0;
+ If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
- FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
+static HOST_WIDE_INT
+count_type_elements (const_tree type, bool for_ctor_p)
+{
+ switch (TREE_CODE (type))
{
- HOST_WIDE_INT mult = 1;
-
- if (TREE_CODE (purpose) == RANGE_EXPR)
- {
- tree lo_index = TREE_OPERAND (purpose, 0);
- tree hi_index = TREE_OPERAND (purpose, 1);
-
- if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
- mult = (tree_low_cst (hi_index, 1)
- - tree_low_cst (lo_index, 1) + 1);
- }
+ case ARRAY_TYPE:
+ {
+ tree nelts;
- switch (TREE_CODE (value))
- {
- case CONSTRUCTOR:
+ nelts = array_type_nelts (type);
+ if (nelts && host_integerp (nelts, 1))
{
- HOST_WIDE_INT nz = 0, ic = 0;
-
- bool const_elt_p
- = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
-
- nz_elts += mult * nz;
- elt_count += mult * ic;
+ unsigned HOST_WIDE_INT n;
- if (const_from_elts_p && const_p)
- const_p = const_elt_p;
+ n = tree_low_cst (nelts, 1) + 1;
+ if (n == 0 || for_ctor_p)
+ return n;
+ else
+ return n * count_type_elements (TREE_TYPE (type), false);
}
- break;
+ return for_ctor_p ? -1 : 1;
+ }
- case INTEGER_CST:
- case REAL_CST:
+ case RECORD_TYPE:
+ {
+ unsigned HOST_WIDE_INT n;
+ tree f;
+
+ n = 0;
+ for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
+ if (TREE_CODE (f) == FIELD_DECL)
+ {
+ if (!for_ctor_p)
+ n += count_type_elements (TREE_TYPE (f), false);
+ else if (!flexible_array_member_p (f, type))
+ /* Don't count flexible arrays, which are not supposed
+ to be initialized. */
+ n += 1;
+ }
+
+ return n;
+ }
+
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ tree f;
+ HOST_WIDE_INT n, m;
+
+ gcc_assert (!for_ctor_p);
+ /* Estimate the number of scalars in each field and pick the
+ maximum. Other estimates would do instead; the idea is simply
+ to make sure that the estimate is not sensitive to the ordering
+ of the fields. */
+ n = 1;
+ for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
+ if (TREE_CODE (f) == FIELD_DECL)
+ {
+ m = count_type_elements (TREE_TYPE (f), false);
+ /* If the field doesn't span the whole union, add an extra
+ scalar for the rest. */
+ if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
+ TYPE_SIZE (type)) != 1)
+ m++;
+ if (n < m)
+ n = m;
+ }
+ return n;
+ }
+
+ case COMPLEX_TYPE:
+ return 2;
+
+ case VECTOR_TYPE:
+ return TYPE_VECTOR_SUBPARTS (type);
+
+ case INTEGER_TYPE:
+ case REAL_TYPE:
+ case FIXED_POINT_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ case POINTER_TYPE:
+ case OFFSET_TYPE:
+ case REFERENCE_TYPE:
+ case NULLPTR_TYPE:
+ return 1;
+
+ case ERROR_MARK:
+ return 0;
+
+ case VOID_TYPE:
+ case METHOD_TYPE:
+ case FUNCTION_TYPE:
+ case LANG_TYPE:
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Helper for categorize_ctor_elements. Identical interface. */
+
+static bool
+categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
+ HOST_WIDE_INT *p_init_elts, bool *p_complete)
+{
+ unsigned HOST_WIDE_INT idx;
+ HOST_WIDE_INT nz_elts, init_elts, num_fields;
+ tree value, purpose, elt_type;
+
+ /* Whether CTOR is a valid constant initializer, in accordance with what
+ initializer_constant_valid_p does. If inferred from the constructor
+ elements, true until proven otherwise. */
+ bool const_from_elts_p = constructor_static_from_elts_p (ctor);
+ bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
+
+ nz_elts = 0;
+ init_elts = 0;
+ num_fields = 0;
+ elt_type = NULL_TREE;
+
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
+ {
+ HOST_WIDE_INT mult = 1;
+
+ if (TREE_CODE (purpose) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (purpose, 0);
+ tree hi_index = TREE_OPERAND (purpose, 1);
+
+ if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
+ mult = (tree_low_cst (hi_index, 1)
+ - tree_low_cst (lo_index, 1) + 1);
+ }
+ num_fields += mult;
+ elt_type = TREE_TYPE (value);
+
+ switch (TREE_CODE (value))
+ {
+ case CONSTRUCTOR:
+ {
+ HOST_WIDE_INT nz = 0, ic = 0;
+
+ bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
+ p_complete);
+
+ nz_elts += mult * nz;
+ init_elts += mult * ic;
+
+ if (const_from_elts_p && const_p)
+ const_p = const_elt_p;
+ }
+ break;
+
+ case INTEGER_CST:
+ case REAL_CST:
case FIXED_CST:
if (!initializer_zerop (value))
nz_elts += mult;
- elt_count += mult;
+ init_elts += mult;
break;
case STRING_CST:
nz_elts += mult * TREE_STRING_LENGTH (value);
- elt_count += mult * TREE_STRING_LENGTH (value);
+ init_elts += mult * TREE_STRING_LENGTH (value);
break;
case COMPLEX_CST:
nz_elts += mult;
if (!initializer_zerop (TREE_IMAGPART (value)))
nz_elts += mult;
- elt_count += mult;
+ init_elts += mult;
break;
case VECTOR_CST:
{
- tree v;
- for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
+ unsigned i;
+ for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
{
- if (!initializer_zerop (TREE_VALUE (v)))
+ tree v = VECTOR_CST_ELT (value, i);
+ if (!initializer_zerop (v))
nz_elts += mult;
- elt_count += mult;
+ init_elts += mult;
}
}
break;
default:
{
- HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
- if (tc < 1)
- tc = 1;
+ HOST_WIDE_INT tc = count_type_elements (elt_type, false);
nz_elts += mult * tc;
- elt_count += mult * tc;
+ init_elts += mult * tc;
if (const_from_elts_p && const_p)
- const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
+ const_p = initializer_constant_valid_p (value, elt_type)
!= NULL_TREE;
}
break;
}
}
- if (!*p_must_clear
- && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
- || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
- {
- tree init_sub_type;
- bool clear_this = true;
-
- if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
- {
- /* We don't expect more than one element of the union to be
- initialized. Not sure what we should do otherwise... */
- gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
- == 1);
-
- init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (ctor),
- 0)->value);
-
- /* ??? We could look at each element of the union, and find the
- largest element. Which would avoid comparing the size of the
- initialized element against any tail padding in the union.
- Doesn't seem worth the effort... */
- if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
- TYPE_SIZE (init_sub_type)) == 1)
- {
- /* And now we have to find out if the element itself is fully
- constructed. E.g. for union { struct { int a, b; } s; } u
- = { .s = { .a = 1 } }. */
- if (elt_count == count_type_elements (init_sub_type, false))
- clear_this = false;
- }
- }
-
- *p_must_clear = clear_this;
- }
+ if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
+ num_fields, elt_type))
+ *p_complete = false;
*p_nz_elts += nz_elts;
- *p_elt_count += elt_count;
+ *p_init_elts += init_elts;
return const_p;
}
and place it in *P_NZ_ELTS;
* how many scalar fields in total are in CTOR,
and place it in *P_ELT_COUNT.
- * if a type is a union, and the initializer from the constructor
- is not the largest element in the union, then set *p_must_clear.
+ * whether the constructor is complete -- in the sense that every
+ meaningful byte is explicitly given a value --
+ and place it in *P_COMPLETE.
Return whether or not CTOR is a valid static constant initializer, the same
as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
bool
categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
- HOST_WIDE_INT *p_elt_count,
- bool *p_must_clear)
+ HOST_WIDE_INT *p_init_elts, bool *p_complete)
{
*p_nz_elts = 0;
- *p_elt_count = 0;
- *p_must_clear = false;
+ *p_init_elts = 0;
+ *p_complete = true;
- return
- categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
+ return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
}
-/* Count the number of scalars in TYPE. Return -1 on overflow or
- variable-sized. If ALLOW_FLEXARR is true, don't count flexible
- array member at the end of the structure. */
+/* TYPE is initialized by a constructor with NUM_ELTS elements, the last
+ of which had type LAST_TYPE. Each element was itself a complete
+ initializer, in the sense that every meaningful byte was explicitly
+ given a value. Return true if the same is true for the constructor
+ as a whole. */
-HOST_WIDE_INT
-count_type_elements (const_tree type, bool allow_flexarr)
+bool
+complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
+ const_tree last_type)
{
- const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
- switch (TREE_CODE (type))
+ if (TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
{
- case ARRAY_TYPE:
- {
- tree telts = array_type_nelts (type);
- if (telts && host_integerp (telts, 1))
- {
- HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
- HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
- if (n == 0)
- return 0;
- else if (max / n > m)
- return n * m;
- }
- return -1;
- }
-
- case RECORD_TYPE:
- {
- HOST_WIDE_INT n = 0, t;
- tree f;
-
- for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
- if (TREE_CODE (f) == FIELD_DECL)
- {
- t = count_type_elements (TREE_TYPE (f), false);
- if (t < 0)
- {
- /* Check for structures with flexible array member. */
- tree tf = TREE_TYPE (f);
- if (allow_flexarr
- && DECL_CHAIN (f) == NULL
- && TREE_CODE (tf) == ARRAY_TYPE
- && TYPE_DOMAIN (tf)
- && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
- && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
- && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
- && int_size_in_bytes (type) >= 0)
- break;
-
- return -1;
- }
- n += t;
- }
-
- return n;
- }
-
- case UNION_TYPE:
- case QUAL_UNION_TYPE:
- return -1;
-
- case COMPLEX_TYPE:
- return 2;
-
- case VECTOR_TYPE:
- return TYPE_VECTOR_SUBPARTS (type);
-
- case INTEGER_TYPE:
- case REAL_TYPE:
- case FIXED_POINT_TYPE:
- case ENUMERAL_TYPE:
- case BOOLEAN_TYPE:
- case POINTER_TYPE:
- case OFFSET_TYPE:
- case REFERENCE_TYPE:
- return 1;
+ if (num_elts == 0)
+ return false;
- case ERROR_MARK:
- return 0;
+ gcc_assert (num_elts == 1 && last_type);
- case VOID_TYPE:
- case METHOD_TYPE:
- case FUNCTION_TYPE:
- case LANG_TYPE:
- default:
- gcc_unreachable ();
+ /* ??? We could look at each element of the union, and find the
+ largest element. Which would avoid comparing the size of the
+ initialized element against any tail padding in the union.
+ Doesn't seem worth the effort... */
+ return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
}
+
+ return count_type_elements (type, true) == num_elts;
}
/* Return 1 if EXP contains mostly (3/4) zeros. */
mostly_zeros_p (const_tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
-
{
- HOST_WIDE_INT nz_elts, count, elts;
- bool must_clear;
-
- categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
- if (must_clear)
- return 1;
+ HOST_WIDE_INT nz_elts, init_elts;
+ bool complete_p;
- elts = count_type_elements (TREE_TYPE (exp), false);
-
- return nz_elts < elts / 4;
+ categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
+ return !complete_p || nz_elts < init_elts / 4;
}
return initializer_zerop (exp);
all_zeros_p (const_tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
-
{
- HOST_WIDE_INT nz_elts, count;
- bool must_clear;
+ HOST_WIDE_INT nz_elts, init_elts;
+ bool complete_p;
- categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
+ categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
return nz_elts == 0;
}
store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
}
else
- store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
+ store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
+ false);
}
/* Store the value of constructor EXP into the rtx TARGET.
offset_rtx = expand_normal (offset);
gcc_assert (MEM_P (to_rtx));
- address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
+ address_mode = get_address_mode (to_rtx);
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
- type = lang_hooks.types.type_for_size
- (BITS_PER_WORD, TYPE_UNSIGNED (type));
+ type = lang_hooks.types.type_for_mode
+ (word_mode, TYPE_UNSIGNED (type));
value = fold_convert (type, value);
}
BITSIZE bits, starting BITPOS bits from the start of TARGET.
If MODE is VOIDmode, it means that we are storing into a bit-field.
+ BITREGION_START is bitpos of the first bitfield in this region.
+ BITREGION_END is the bitpos of the ending bitfield in this region.
+ These two fields are 0, if the C++ memory model does not apply,
+ or we are not interested in keeping track of bitfield regions.
+
Always return const0_rtx unless we have something particular to
return.
static rtx
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
+ unsigned HOST_WIDE_INT bitregion_start,
+ unsigned HOST_WIDE_INT bitregion_end,
enum machine_mode mode, tree exp, tree type,
alias_set_type alias_set, bool nontemporal)
{
if (mode == BLKmode
&& (REG_P (target) || GET_CODE (target) == SUBREG))
{
- rtx object = assign_temp (type, 0, 1, 1);
+ rtx object = assign_temp (type, 1, 1);
rtx blk_object = adjust_address (object, BLKmode, 0);
if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
emit_move_insn (object, target);
- store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
- nontemporal);
+ store_field (blk_object, bitsize, bitpos,
+ bitregion_start, bitregion_end,
+ mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
emit_move_insn (target, object);
|| bitpos % GET_MODE_ALIGNMENT (mode))
&& SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
|| (bitpos % BITS_PER_UNIT != 0)))
+ || (bitsize >= 0 && mode != BLKmode
+ && GET_MODE_BITSIZE (mode) > bitsize)
/* If the RHS and field are a constant size and the size of the
RHS isn't the same size as the bitfield, we must use bitfield
operations. */
GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
NULL_RTX, 1);
- /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
- MODE. */
+ /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
if (mode != VOIDmode && mode != BLKmode
&& mode != TYPE_MODE (TREE_TYPE (exp)))
temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
}
/* Store the value in the bitfield. */
- store_bit_field (target, bitsize, bitpos, mode, temp);
+ store_bit_field (target, bitsize, bitpos,
+ bitregion_start, bitregion_end,
+ mode, temp);
return const0_rtx;
}
if (to_rtx == target)
to_rtx = copy_rtx (to_rtx);
- if (!MEM_SCALAR_P (to_rtx))
- MEM_IN_STRUCT_P (to_rtx) = 1;
if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
set_mem_alias_set (to_rtx, alias_set);
/* If OFFSET is constant, see if we can return the whole thing as a
constant bit position. Make sure to handle overflow during
this conversion. */
- if (host_integerp (offset, 0))
- {
- double_int tem = double_int_lshift (tree_to_double_int (offset),
- BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT),
- HOST_BITS_PER_DOUBLE_INT, true);
+ if (TREE_CODE (offset) == INTEGER_CST)
+ {
+ double_int tem = tree_to_double_int (offset);
+ tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
+ tem = double_int_lshift (tem,
+ BITS_PER_UNIT == 8
+ ? 3 : exact_log2 (BITS_PER_UNIT),
+ HOST_BITS_PER_DOUBLE_INT, true);
tem = double_int_add (tem, bit_offset);
if (double_int_fits_in_shwi_p (tem))
{
/* Otherwise, split it up. */
if (offset)
{
+ /* Avoid returning a negative bitpos as this may wreak havoc later. */
+ if (double_int_negative_p (bit_offset))
+ {
+ double_int mask
+ = double_int_mask (BITS_PER_UNIT == 8
+ ? 3 : exact_log2 (BITS_PER_UNIT));
+ double_int tem = double_int_and_not (bit_offset, mask);
+ /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
+ Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
+ bit_offset = double_int_sub (bit_offset, tem);
+ tem = double_int_rshift (tem,
+ BITS_PER_UNIT == 8
+ ? 3 : exact_log2 (BITS_PER_UNIT),
+ HOST_BITS_PER_DOUBLE_INT, true);
+ offset = size_binop (PLUS_EXPR, offset,
+ double_int_to_tree (sizetype, tem));
+ }
+
*pbitpos = double_int_to_shwi (bit_offset);
*poffset = offset;
}
return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
}
+/* Returns true if REF is an array reference to an array at the end of
+ a structure. If this is the case, the array may be allocated larger
+ than its upper bound implies. */
+
+bool
+array_at_struct_end_p (tree ref)
+{
+ if (TREE_CODE (ref) != ARRAY_REF
+ && TREE_CODE (ref) != ARRAY_RANGE_REF)
+ return false;
+
+ while (handled_component_p (ref))
+ {
+ /* If the reference chain contains a component reference to a
+ non-union type and there follows another field the reference
+ is not at the end of a structure. */
+ if (TREE_CODE (ref) == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
+ {
+ tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
+ while (nextf && TREE_CODE (nextf) != FIELD_DECL)
+ nextf = DECL_CHAIN (nextf);
+ if (nextf)
+ return false;
+ }
+
+ ref = TREE_OPERAND (ref, 0);
+ }
+
+ /* If the reference is based on a declared entity, the size of the array
+ is constrained by its given domain. */
+ if (DECL_P (ref))
+ return false;
+
+ return true;
+}
+
/* Return a tree representing the upper bound of the array mentioned in
EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
are memory and they conflict. */
return ! (rtx_equal_p (x, exp_rtl)
|| (MEM_P (x) && MEM_P (exp_rtl)
- && true_dependence (exp_rtl, VOIDmode, x,
- rtx_addr_varies_p)));
+ && true_dependence (exp_rtl, VOIDmode, x)));
}
/* If we reach here, it is safe. */
return MAX (factor, talign);
}
\f
+#ifdef HAVE_conditional_move
+/* Convert the tree comparison code TCODE to the rtl one where the
+ signedness is UNSIGNEDP. */
+
+static enum rtx_code
+convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
+{
+ enum rtx_code code;
+ switch (tcode)
+ {
+ case EQ_EXPR:
+ code = EQ;
+ break;
+ case NE_EXPR:
+ code = NE;
+ break;
+ case LT_EXPR:
+ code = unsignedp ? LTU : LT;
+ break;
+ case LE_EXPR:
+ code = unsignedp ? LEU : LE;
+ break;
+ case GT_EXPR:
+ code = unsignedp ? GTU : GT;
+ break;
+ case GE_EXPR:
+ code = unsignedp ? GEU : GE;
+ break;
+ case UNORDERED_EXPR:
+ code = UNORDERED;
+ break;
+ case ORDERED_EXPR:
+ code = ORDERED;
+ break;
+ case UNLT_EXPR:
+ code = UNLT;
+ break;
+ case UNLE_EXPR:
+ code = UNLE;
+ break;
+ case UNGT_EXPR:
+ code = UNGT;
+ break;
+ case UNGE_EXPR:
+ code = UNGE;
+ break;
+ case UNEQ_EXPR:
+ code = UNEQ;
+ break;
+ case LTGT_EXPR:
+ code = LTGT;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ return code;
+}
+#endif
+
/* Subroutine of expand_expr. Expand the two operands of a binary
expression EXP0 and EXP1 placing the results in OP0 and OP1.
The value may be stored in TARGET if TARGET is nonzero. The
generating ADDR_EXPR of something that isn't an LVALUE. The only
exception here is STRING_CST. */
if (CONSTANT_CLASS_P (exp))
- return XEXP (expand_expr_constant (exp, 0, modifier), 0);
+ {
+ result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
+ if (modifier < EXPAND_SUM)
+ result = force_operand (result, target);
+ return result;
+ }
/* Everything must be something allowed by is_gimple_addressable. */
switch (TREE_CODE (exp))
{
tree tem = TREE_OPERAND (exp, 0);
if (!integer_zerop (TREE_OPERAND (exp, 1)))
- tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
- tem,
- double_int_to_tree (sizetype, mem_ref_offset (exp)));
+ tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
return expand_expr (tem, target, tmode, modifier);
}
case CONST_DECL:
/* Expand the initializer like constants above. */
- return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
+ result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
+ 0, modifier), 0);
+ if (modifier < EXPAND_SUM)
+ result = force_operand (result, target);
+ return result;
case REALPART_EXPR:
/* The real part of the complex number is always first, therefore
/* If the DECL isn't in memory, then the DECL wasn't properly
marked TREE_ADDRESSABLE, which will be either a front-end
or a tree optimizer bug. */
- gcc_assert (MEM_P (result));
- result = XEXP (result, 0);
- /* ??? Is this needed anymore? */
- if (DECL_P (exp) && !TREE_USED (exp) == 0)
+ if (TREE_ADDRESSABLE (exp)
+ && ! MEM_P (result)
+ && ! targetm.calls.allocate_stack_slots_for_args())
{
- assemble_external (exp);
- TREE_USED (exp) = 1;
+ error ("local frame unavailable (naked function?)");
+ return result;
}
+ else
+ gcc_assert (MEM_P (result));
+ result = XEXP (result, 0);
+
+ /* ??? Is this needed anymore? */
+ if (DECL_P (exp))
+ TREE_USED (exp) = 1;
if (modifier != EXPAND_INITIALIZER
- && modifier != EXPAND_CONST_ADDRESS)
+ && modifier != EXPAND_CONST_ADDRESS
+ && modifier != EXPAND_SUM)
result = force_operand (result, target);
return result;
}
of such an object. */
gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
- result = plus_constant (result, bitpos / BITS_PER_UNIT);
+ result = convert_memory_address_addr_space (tmode, result, as);
+ result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
if (modifier < EXPAND_SUM)
result = force_operand (result, target);
}
= assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
| (TREE_READONLY (exp)
* TYPE_QUAL_CONST))),
- 0, TREE_ADDRESSABLE (exp), 1);
+ TREE_ADDRESSABLE (exp), 1);
}
store_constructor (exp, target, 0, int_expr_size (exp));
return ret;
}
+/* Try to expand the conditional expression which is represented by
+ TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
+ return the rtl reg which repsents the result. Otherwise return
+ NULL_RTL. */
+
+static rtx
+expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
+ tree treeop1 ATTRIBUTE_UNUSED,
+ tree treeop2 ATTRIBUTE_UNUSED)
+{
+#ifdef HAVE_conditional_move
+ rtx insn;
+ rtx op00, op01, op1, op2;
+ enum rtx_code comparison_code;
+ enum machine_mode comparison_mode;
+ gimple srcstmt;
+ rtx temp;
+ tree type = TREE_TYPE (treeop1);
+ int unsignedp = TYPE_UNSIGNED (type);
+ enum machine_mode mode = TYPE_MODE (type);
+
+ temp = assign_temp (type, 0, 1);
+
+ /* If we cannot do a conditional move on the mode, try doing it
+ with the promoted mode. */
+ if (!can_conditionally_move_p (mode))
+ mode = promote_mode (type, mode, &unsignedp);
+
+ if (!can_conditionally_move_p (mode))
+ return NULL_RTX;
+
+ start_sequence ();
+ expand_operands (treeop1, treeop2,
+ temp, &op1, &op2, EXPAND_NORMAL);
+
+ if (TREE_CODE (treeop0) == SSA_NAME
+ && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
+ {
+ tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
+ enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
+ op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
+ op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
+ comparison_mode = TYPE_MODE (type);
+ unsignedp = TYPE_UNSIGNED (type);
+ comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
+ }
+ else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
+ enum tree_code cmpcode = TREE_CODE (treeop0);
+ op00 = expand_normal (TREE_OPERAND (treeop0, 0));
+ op01 = expand_normal (TREE_OPERAND (treeop0, 1));
+ unsignedp = TYPE_UNSIGNED (type);
+ comparison_mode = TYPE_MODE (type);
+ comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
+ }
+ else
+ {
+ op00 = expand_normal (treeop0);
+ op01 = const0_rtx;
+ comparison_code = NE;
+ comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
+ }
+
+ if (GET_MODE (op1) != mode)
+ op1 = gen_lowpart (mode, op1);
+
+ if (GET_MODE (op2) != mode)
+ op2 = gen_lowpart (mode, op2);
+
+ /* Try to emit the conditional move. */
+ insn = emit_conditional_move (temp, comparison_code,
+ op00, op01, comparison_mode,
+ op1, op2, mode,
+ unsignedp);
+
+ /* If we could do the conditional move, emit the sequence,
+ and return. */
+ if (insn)
+ {
+ rtx seq = get_insns ();
+ end_sequence ();
+ emit_insn (seq);
+ return temp;
+ }
+
+ /* Otherwise discard the sequence and fall back to code with
+ branches. */
+ end_sequence ();
+#endif
+ return NULL_RTX;
+}
+
rtx
expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
enum expand_modifier modifier)
if (TYPE_MODE (type) != BLKmode)
target = gen_reg_rtx (TYPE_MODE (type));
else
- target = assign_temp (type, 0, 1, 1);
+ target = assign_temp (type, 1, 1);
}
if (MEM_P (target))
(treeop0))
* BITS_PER_UNIT),
(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
- 0, TYPE_MODE (valtype), treeop0,
+ 0, 0, 0, TYPE_MODE (valtype), treeop0,
type, 0, false);
}
treeop1 = fold_convert_loc (loc, type,
fold_convert_loc (loc, ssizetype,
treeop1));
+ /* If sizetype precision is larger than pointer precision, truncate the
+ offset to have matching modes. */
+ else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
+ treeop1 = fold_convert_loc (loc, type, treeop1);
+
case PLUS_EXPR:
/* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
something else, make sure we add the register to the constant and
|| DECL_RTL (treeop1) == stack_pointer_rtx
|| DECL_RTL (treeop1) == arg_pointer_rtx))
{
- tree t = treeop1;
-
- treeop1 = TREE_OPERAND (treeop0, 0);
- TREE_OPERAND (treeop0, 0) = t;
+ gcc_unreachable ();
}
/* If the result is to be ptr_mode and we are adding an integer to
= immed_double_const (TREE_INT_CST_LOW (treeop0),
(HOST_WIDE_INT) 0,
TYPE_MODE (TREE_TYPE (treeop1)));
- op1 = plus_constant (op1, INTVAL (constant_part));
+ op1 = plus_constant (mode, op1, INTVAL (constant_part));
if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
op1 = force_operand (op1, target);
return REDUCE_BIT_FIELD (op1);
= immed_double_const (TREE_INT_CST_LOW (treeop1),
(HOST_WIDE_INT) 0,
TYPE_MODE (TREE_TYPE (treeop0)));
- op0 = plus_constant (op0, INTVAL (constant_part));
+ op0 = plus_constant (mode, op0, INTVAL (constant_part));
if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
op0 = force_operand (op0, target);
return REDUCE_BIT_FIELD (op0);
/* If the last operand is a CONST_INT, use plus_constant of
the negated constant. Else make the MINUS. */
if (CONST_INT_P (op1))
- return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
+ return REDUCE_BIT_FIELD (plus_constant (mode, op0,
+ -INTVAL (op1)));
else
return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
}
{
enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
this_optab = usmul_widen_optab;
- if (mode == GET_MODE_2XWIDER_MODE (innermode))
+ if (find_widening_optab_handler (this_optab, mode, innermode, 0)
+ != CODE_FOR_nothing)
{
- if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
- {
- if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
- expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
- EXPAND_NORMAL);
- else
- expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
- EXPAND_NORMAL);
- goto binop3;
- }
+ if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
+ expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
+ EXPAND_NORMAL);
+ else
+ expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
+ EXPAND_NORMAL);
+ goto binop3;
}
}
/* Check for a multiplication with matching signedness. */
optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
- if (mode == GET_MODE_2XWIDER_MODE (innermode)
- && TREE_CODE (treeop0) != INTEGER_CST)
+ if (TREE_CODE (treeop0) != INTEGER_CST)
{
- if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
+ if (find_widening_optab_handler (this_optab, mode, innermode, 0)
+ != CODE_FOR_nothing)
{
expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
EXPAND_NORMAL);
unsignedp, this_optab);
return REDUCE_BIT_FIELD (temp);
}
- if (optab_handler (other_optab, mode) != CODE_FOR_nothing
+ if (find_widening_optab_handler (other_optab, mode, innermode, 0)
+ != CODE_FOR_nothing
&& innermode == word_mode)
{
rtx htem, hipart;
return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
case RDIV_EXPR:
+ case MULT_HIGHPART_EXPR:
goto binop;
case TRUNC_MOD_EXPR:
VOIDmode, EXPAND_NORMAL);
if (modifier == EXPAND_STACK_PARM)
target = 0;
- temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
+ /* In case we have to reduce the result to bitfield precision
+ for unsigned bitfield expand this as XOR with a proper constant
+ instead. */
+ if (reduce_bit_field && TYPE_UNSIGNED (type))
+ temp = expand_binop (mode, xor_optab, op0,
+ immed_double_int_const
+ (double_int_mask (TYPE_PRECISION (type)), mode),
+ target, 1, OPTAB_LIB_WIDEN);
+ else
+ temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
gcc_assert (temp);
return temp;
and (a bitwise1 b) bitwise2 b (etc)
but that is probably not worth while. */
- /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
- boolean values when we want in all cases to compute both of them. In
- general it is fastest to do TRUTH_AND_EXPR by computing both operands
- as actual zero-or-1 values and then bitwise anding. In cases where
- there cannot be any side effects, better code would be made by
- treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
- how to recognize those cases. */
-
- case TRUTH_AND_EXPR:
- code = BIT_AND_EXPR;
case BIT_AND_EXPR:
- goto binop;
-
- case TRUTH_OR_EXPR:
- code = BIT_IOR_EXPR;
case BIT_IOR_EXPR:
- goto binop;
-
- case TRUTH_XOR_EXPR:
- code = BIT_XOR_EXPR;
case BIT_XOR_EXPR:
goto binop;
emit_label (op1);
return target;
- case TRUTH_NOT_EXPR:
- if (modifier == EXPAND_STACK_PARM)
- target = 0;
- op0 = expand_expr (treeop0, target,
- VOIDmode, EXPAND_NORMAL);
- /* The parser is careful to generate TRUTH_NOT_EXPR
- only with operands that are always zero or one. */
- temp = expand_binop (mode, xor_optab, op0, const1_rtx,
- target, 1, OPTAB_LIB_WIDEN);
- gcc_assert (temp);
- return temp;
-
case COMPLEX_EXPR:
/* Get the rtx code of the operands. */
op0 = expand_normal (treeop0);
return temp;
}
- case VEC_EXTRACT_EVEN_EXPR:
- case VEC_EXTRACT_ODD_EXPR:
- {
- expand_operands (treeop0, treeop1,
- NULL_RTX, &op0, &op1, EXPAND_NORMAL);
- this_optab = optab_for_tree_code (code, type, optab_default);
- temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
- OPTAB_WIDEN);
- gcc_assert (temp);
- return temp;
- }
-
- case VEC_INTERLEAVE_HIGH_EXPR:
- case VEC_INTERLEAVE_LOW_EXPR:
- {
- expand_operands (treeop0, treeop1,
- NULL_RTX, &op0, &op1, EXPAND_NORMAL);
- this_optab = optab_for_tree_code (code, type, optab_default);
- temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
- OPTAB_WIDEN);
- gcc_assert (temp);
- return temp;
- }
-
case VEC_LSHIFT_EXPR:
case VEC_RSHIFT_EXPR:
{
return target;
}
+ case VEC_WIDEN_LSHIFT_HI_EXPR:
+ case VEC_WIDEN_LSHIFT_LO_EXPR:
+ {
+ tree oprnd0 = treeop0;
+ tree oprnd1 = treeop1;
+
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+ target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
+ target, unsignedp);
+ gcc_assert (target);
+ return target;
+ }
+
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
mode = TYPE_MODE (TREE_TYPE (treeop0));
goto binop;
+ case VEC_PERM_EXPR:
+ expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
+ op2 = expand_normal (treeop2);
+
+ /* Careful here: if the target doesn't support integral vector modes,
+ a constant selection vector could wind up smooshed into a normal
+ integral constant. */
+ if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
+ {
+ tree sel_type = TREE_TYPE (treeop2);
+ enum machine_mode vmode
+ = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
+ TYPE_VECTOR_SUBPARTS (sel_type));
+ gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
+ op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
+ gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
+ }
+ else
+ gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
+
+ temp = expand_vec_perm (mode, op0, op1, op2, target);
+ gcc_assert (temp);
+ return temp;
+
case DOT_PROD_EXPR:
{
tree oprnd0 = treeop0;
return temp;
}
+ case COND_EXPR:
+ /* A COND_EXPR with its type being VOID_TYPE represents a
+ conditional jump and is handled in
+ expand_gimple_cond_expr. */
+ gcc_assert (!VOID_TYPE_P (type));
+
+ /* Note that COND_EXPRs whose type is a structure or union
+ are required to be constructed to contain assignments of
+ a temporary variable, so that we can evaluate them here
+ for side effect only. If type is void, we must do likewise. */
+
+ gcc_assert (!TREE_ADDRESSABLE (type)
+ && !ignore
+ && TREE_TYPE (treeop1) != void_type_node
+ && TREE_TYPE (treeop2) != void_type_node);
+
+ temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
+ if (temp)
+ return temp;
+
+ /* If we are not to produce a result, we have no target. Otherwise,
+ if a target was specified use it; it will not be used as an
+ intermediate target unless it is safe. If no target, use a
+ temporary. */
+
+ if (modifier != EXPAND_STACK_PARM
+ && original_target
+ && safe_from_p (original_target, treeop0, 1)
+ && GET_MODE (original_target) == mode
+ && !MEM_P (original_target))
+ temp = original_target;
+ else
+ temp = assign_temp (type, 0, 1);
+
+ do_pending_stack_adjust ();
+ NO_DEFER_POP;
+ op0 = gen_label_rtx ();
+ op1 = gen_label_rtx ();
+ jumpifnot (treeop0, op0, -1);
+ store_expr (treeop1, temp,
+ modifier == EXPAND_STACK_PARM,
+ false);
+
+ emit_jump_insn (gen_jump (op1));
+ emit_barrier ();
+ emit_label (op0);
+ store_expr (treeop2, temp,
+ modifier == EXPAND_STACK_PARM,
+ false);
+
+ emit_label (op1);
+ OK_DEFER_POP;
+ return temp;
+
+ case VEC_COND_EXPR:
+ target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
+ return target;
+
default:
gcc_unreachable ();
}
temp = expand_binop (mode, this_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN);
gcc_assert (temp);
+ /* Bitwise operations do not need bitfield reduction as we expect their
+ operands being properly truncated. */
+ if (code == BIT_XOR_EXPR
+ || code == BIT_AND_EXPR
+ || code == BIT_IOR_EXPR)
+ return temp;
return REDUCE_BIT_FIELD (temp);
}
#undef REDUCE_BIT_FIELD
&& stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
g = SSA_NAME_DEF_STMT (exp);
if (g)
- return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
- modifier, NULL);
+ {
+ rtx r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
+ tmode, modifier, NULL);
+ if (REG_P (r) && !REG_EXPR (r))
+ set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
+ return r;
+ }
ssa_name = exp;
decl_rtl = get_rtx_for_ssa_name (ssa_name);
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
definition. */
- if (! TREE_USED (exp))
- {
- assemble_external (exp);
- TREE_USED (exp) = 1;
- }
+ TREE_USED (exp) = 1;
/* Show we haven't gotten RTL for this yet. */
temp = 0;
/* This is the case of an array whose size is to be determined
from its initializer, while the initializer is still being parsed.
- See expand_decl. */
+ ??? We aren't parsing while expanding anymore. */
if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
temp = validize_mem (decl_rtl);
return temp;
}
- /* If the mode of DECL_RTL does not match that of the decl, it
- must be a promoted value. We return a SUBREG of the wanted mode,
- but mark it so that we know that it was already extended. */
- if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
+ /* If the mode of DECL_RTL does not match that of the decl,
+ there are two cases: we are dealing with a BLKmode value
+ that is returned in a register, or we are dealing with
+ a promoted value. In the latter case, return a SUBREG
+ of the wanted mode, but mark it so that we know that it
+ was already extended. */
+ if (REG_P (decl_rtl)
+ && DECL_MODE (exp) != BLKmode
+ && GET_MODE (decl_rtl) != DECL_MODE (exp))
{
enum machine_mode pmode;
tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
}
if (!tmp)
- tmp = build_constructor_from_list (type,
- TREE_VECTOR_CST_ELTS (exp));
+ {
+ VEC(constructor_elt,gc) *v;
+ unsigned i;
+ v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
+ for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
+ tmp = build_constructor (type, v);
+ }
return expand_expr (tmp, ignore ? const0_rtx : target,
tmode, modifier);
}
case TARGET_MEM_REF:
{
- addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
+ addr_space_t as
+ = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
struct mem_address addr;
- int icode, align;
+ enum insn_code icode;
+ unsigned int align;
get_address_description (exp, &addr);
op0 = addr_for_mem_ref (&addr, as, true);
temp = gen_rtx_MEM (mode, op0);
set_mem_attributes (temp, exp, 0);
set_mem_addr_space (temp, as);
- align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
- get_object_alignment (exp, BIGGEST_ALIGNMENT));
- if (mode != BLKmode
- && (unsigned) align < GET_MODE_ALIGNMENT (mode)
+ align = get_object_or_type_alignment (exp);
+ if (modifier != EXPAND_WRITE
+ && mode != BLKmode
+ && align < GET_MODE_ALIGNMENT (mode)
/* If the target does not have special handling for unaligned
loads of mode then it can use regular moves for them. */
&& ((icode = optab_handler (movmisalign_optab, mode))
!= CODE_FOR_nothing))
{
- rtx reg, insn;
+ struct expand_operand ops[2];
/* We've already validated the memory, and we're creating a
- new pseudo destination. The predicates really can't fail. */
- reg = gen_reg_rtx (mode);
-
- /* Nor can the insn generator. */
- insn = GEN_FCN (icode) (reg, temp);
- gcc_assert (insn != NULL_RTX);
- emit_insn (insn);
-
- return reg;
+ new pseudo destination. The predicates really can't fail,
+ nor can the generator. */
+ create_output_operand (&ops[0], NULL_RTX, mode);
+ create_fixed_operand (&ops[1], temp);
+ expand_insn (icode, 2, ops);
+ return ops[0].value;
}
return temp;
}
case MEM_REF:
{
addr_space_t as
- = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
+ = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
enum machine_mode address_mode;
tree base = TREE_OPERAND (exp, 0);
gimple def_stmt;
- int icode, align;
+ enum insn_code icode;
+ unsigned align;
/* Handle expansion of non-aliased memory with non-BLKmode. That
might end up in a register. */
- if (TREE_CODE (base) == ADDR_EXPR)
+ if (mem_ref_refers_to_non_mem_p (exp))
{
HOST_WIDE_INT offset = mem_ref_offset (exp).low;
tree bit_offset;
+ tree bftype;
base = TREE_OPERAND (base, 0);
- if (!DECL_P (base))
- {
- HOST_WIDE_INT off;
- base = get_addr_base_and_unit_offset (base, &off);
- gcc_assert (base);
- offset += off;
- }
- /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
- decl we must use bitfield operations. */
- if (DECL_P (base)
- && !TREE_ADDRESSABLE (base)
- && DECL_MODE (base) != BLKmode
- && DECL_RTL_SET_P (base)
- && !MEM_P (DECL_RTL (base)))
+ if (offset == 0
+ && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
+ && (GET_MODE_BITSIZE (DECL_MODE (base))
+ == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
+ return expand_expr (build1 (VIEW_CONVERT_EXPR,
+ TREE_TYPE (exp), base),
+ target, tmode, modifier);
+ bit_offset = bitsize_int (offset * BITS_PER_UNIT);
+ bftype = TREE_TYPE (base);
+ if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
+ bftype = TREE_TYPE (exp);
+ else
{
- tree bftype;
- if (offset == 0
- && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
- && (GET_MODE_BITSIZE (DECL_MODE (base))
- == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
- return expand_expr (build1 (VIEW_CONVERT_EXPR,
- TREE_TYPE (exp), base),
- target, tmode, modifier);
- bit_offset = bitsize_int (offset * BITS_PER_UNIT);
- bftype = TREE_TYPE (base);
- if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
- bftype = TREE_TYPE (exp);
- return expand_expr (build3 (BIT_FIELD_REF, bftype,
- base,
- TYPE_SIZE (TREE_TYPE (exp)),
- bit_offset),
- target, tmode, modifier);
+ temp = assign_stack_temp (DECL_MODE (base),
+ GET_MODE_SIZE (DECL_MODE (base)));
+ store_expr (base, temp, 0, false);
+ temp = adjust_address (temp, BLKmode, offset);
+ set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
+ return temp;
}
+ return expand_expr (build3 (BIT_FIELD_REF, bftype,
+ base,
+ TYPE_SIZE (TREE_TYPE (exp)),
+ bit_offset),
+ target, tmode, modifier);
}
address_mode = targetm.addr_space.address_mode (as);
base = TREE_OPERAND (exp, 0);
gimple_assign_rhs1 (def_stmt), mask);
TREE_OPERAND (exp, 0) = base;
}
- align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
- get_object_alignment (exp, BIGGEST_ALIGNMENT));
+ align = get_object_or_type_alignment (exp);
op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address_addr_space (address_mode, op0, as);
if (!integer_zerop (TREE_OPERAND (exp, 1)))
set_mem_addr_space (temp, as);
if (TREE_THIS_VOLATILE (exp))
MEM_VOLATILE_P (temp) = 1;
- if (mode != BLKmode
- && (unsigned) align < GET_MODE_ALIGNMENT (mode)
- /* If the target does not have special handling for unaligned
- loads of mode then it can use regular moves for them. */
- && ((icode = optab_handler (movmisalign_optab, mode))
- != CODE_FOR_nothing))
+ if (modifier != EXPAND_WRITE
+ && mode != BLKmode
+ && align < GET_MODE_ALIGNMENT (mode))
{
- rtx reg, insn;
-
- /* We've already validated the memory, and we're creating a
- new pseudo destination. The predicates really can't fail. */
- reg = gen_reg_rtx (mode);
-
- /* Nor can the insn generator. */
- insn = GEN_FCN (icode) (reg, temp);
- emit_insn (insn);
-
- return reg;
+ if ((icode = optab_handler (movmisalign_optab, mode))
+ != CODE_FOR_nothing)
+ {
+ struct expand_operand ops[2];
+
+ /* We've already validated the memory, and we're creating a
+ new pseudo destination. The predicates really can't fail,
+ nor can the generator. */
+ create_output_operand (&ops[0], NULL_RTX, mode);
+ create_fixed_operand (&ops[1], temp);
+ expand_insn (icode, 2, ops);
+ return ops[0].value;
+ }
+ else if (SLOW_UNALIGNED_ACCESS (mode, align))
+ temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
+ 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
+ true, (modifier == EXPAND_STACK_PARM
+ ? NULL_RTX : target),
+ mode, mode);
}
return temp;
}
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep, true);
rtx orig_op0, memloc;
+ bool mem_attrs_from_type = false;
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
orig_op0 = op0
= expand_expr (tem,
(TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
+ && COMPLETE_TYPE_P (TREE_TYPE (tem))
&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
!= INTEGER_CST)
&& modifier != EXPAND_STACK_PARM
tree nt = build_qualified_type (TREE_TYPE (tem),
(TYPE_QUALS (TREE_TYPE (tem))
| TYPE_QUAL_CONST));
- memloc = assign_temp (nt, 1, 1, 1);
+ memloc = assign_temp (nt, 1, 1);
emit_move_insn (memloc, op0);
op0 = memloc;
+ mem_attrs_from_type = true;
}
if (offset)
gcc_assert (MEM_P (op0));
- address_mode
- = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
+ address_mode = get_address_mode (op0);
if (GET_MODE (offset_rtx) != address_mode)
offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
&& modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER)
/* If the field is volatile, we always want an aligned
- access. Only do this if the access is not already naturally
+ access. Do this in following two situations:
+ 1. the access is not already naturally
aligned, otherwise "normal" (non-bitfield) volatile fields
- become non-addressable. */
+ become non-addressable.
+ 2. the bitsize is narrower than the access size. Need
+ to extract bitfields from the access. */
|| (volatilep && flag_strict_volatile_bitfields > 0
- && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
+ && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
+ || (mode1 != BLKmode
+ && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
|| (mode1 != BLKmode
if (ext_mode == BLKmode)
{
if (target == 0)
- target = assign_temp (type, 0, 1, 1);
+ target = assign_temp (type, 1, 1);
if (bitsize == 0)
return target;
necessarily be constant. */
if (mode == BLKmode)
{
- HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
rtx new_rtx;
- /* If the reference doesn't use the alias set of its type,
- we cannot create the temporary using that type. */
- if (component_uses_parent_alias_set (exp))
- {
- new_rtx = assign_stack_local (ext_mode, size, 0);
- set_mem_alias_set (new_rtx, get_alias_set (exp));
- }
- else
- new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
-
+ new_rtx = assign_stack_temp_for_type (ext_mode,
+ GET_MODE_BITSIZE (ext_mode),
+ type);
emit_move_insn (new_rtx, op0);
op0 = copy_rtx (new_rtx);
PUT_MODE (op0, BLKmode);
- set_mem_attributes (op0, exp, 1);
}
return op0;
if (op0 == orig_op0)
op0 = copy_rtx (op0);
- set_mem_attributes (op0, exp, 0);
+ /* If op0 is a temporary because of forcing to memory, pass only the
+ type to set_mem_attributes so that the original expression is never
+ marked as ADDRESSABLE through MEM_EXPR of the temporary. */
+ if (mem_attrs_from_type)
+ set_mem_attributes (op0, type, 0);
+ else
+ set_mem_attributes (op0, exp, 0);
+
if (REG_P (XEXP (op0, 0)))
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
target
= assign_stack_temp_for_type
(TYPE_MODE (inner_type),
- GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
+ GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
emit_move_insn (target, op0);
op0 = target;
results. */
if (MEM_P (op0))
{
+ enum insn_code icode;
+
op0 = copy_rtx (op0);
if (TYPE_ALIGN_OK (type))
set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
+ else if (mode != BLKmode
+ && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
+ /* If the target does have special handling for unaligned
+ loads of mode then use them. */
+ && ((icode = optab_handler (movmisalign_optab, mode))
+ != CODE_FOR_nothing))
+ {
+ rtx reg, insn;
+
+ op0 = adjust_address (op0, mode, 0);
+ /* We've already validated the memory, and we're creating a
+ new pseudo destination. The predicates really can't
+ fail. */
+ reg = gen_reg_rtx (mode);
+
+ /* Nor can the insn generator. */
+ insn = GEN_FCN (icode) (reg, op0);
+ emit_insn (insn);
+ return reg;
+ }
else if (STRICT_ALIGNMENT
&& mode != BLKmode
&& MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
= MAX (int_size_in_bytes (inner_type),
(HOST_WIDE_INT) GET_MODE_SIZE (mode));
rtx new_rtx
- = assign_stack_temp_for_type (mode, temp_size, 0, type);
+ = assign_stack_temp_for_type (mode, temp_size, type);
rtx new_with_op0_mode
= adjust_address (new_rtx, GET_MODE (op0), 0);
return op0;
- /* Use a compare and a jump for BLKmode comparisons, or for function
- type comparisons is HAVE_canonicalize_funcptr_for_compare. */
-
- /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
- are occassionally created by folding during expansion. */
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- if (! ignore
- && (target == 0
- || modifier == EXPAND_STACK_PARM
- || ! safe_from_p (target, treeop0, 1)
- || ! safe_from_p (target, treeop1, 1)
- /* Make sure we don't have a hard reg (such as function's return
- value) live across basic blocks, if not optimizing. */
- || (!optimize && REG_P (target)
- && REGNO (target) < FIRST_PSEUDO_REGISTER)))
- target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
-
- if (target)
- emit_move_insn (target, const0_rtx);
-
- op1 = gen_label_rtx ();
- jumpifnot_1 (code, treeop0, treeop1, op1, -1);
-
- if (target)
- emit_move_insn (target, const1_rtx);
-
- emit_label (op1);
- return ignore ? const0_rtx : target;
-
- case STATEMENT_LIST:
- {
- tree_stmt_iterator iter;
-
- gcc_assert (ignore);
-
- for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
- expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
- }
- return const0_rtx;
-
- case COND_EXPR:
- /* A COND_EXPR with its type being VOID_TYPE represents a
- conditional jump and is handled in
- expand_gimple_cond_expr. */
- gcc_assert (!VOID_TYPE_P (type));
-
- /* Note that COND_EXPRs whose type is a structure or union
- are required to be constructed to contain assignments of
- a temporary variable, so that we can evaluate them here
- for side effect only. If type is void, we must do likewise. */
-
- gcc_assert (!TREE_ADDRESSABLE (type)
- && !ignore
- && TREE_TYPE (treeop1) != void_type_node
- && TREE_TYPE (treeop2) != void_type_node);
-
- /* If we are not to produce a result, we have no target. Otherwise,
- if a target was specified use it; it will not be used as an
- intermediate target unless it is safe. If no target, use a
- temporary. */
-
- if (modifier != EXPAND_STACK_PARM
- && original_target
- && safe_from_p (original_target, treeop0, 1)
- && GET_MODE (original_target) == mode
-#ifdef HAVE_conditional_move
- && (! can_conditionally_move_p (mode)
- || REG_P (original_target))
-#endif
- && !MEM_P (original_target))
- temp = original_target;
- else
- temp = assign_temp (type, 0, 0, 1);
-
- do_pending_stack_adjust ();
- NO_DEFER_POP;
- op0 = gen_label_rtx ();
- op1 = gen_label_rtx ();
- jumpifnot (treeop0, op0, -1);
- store_expr (treeop1, temp,
- modifier == EXPAND_STACK_PARM,
- false);
-
- emit_jump_insn (gen_jump (op1));
- emit_barrier ();
- emit_label (op0);
- store_expr (treeop2, temp,
- modifier == EXPAND_STACK_PARM,
- false);
-
- emit_label (op1);
- OK_DEFER_POP;
- return temp;
-
- case VEC_COND_EXPR:
- target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
- return target;
-
case MODIFY_EXPR:
{
tree lhs = treeop0;
case POSTDECREMENT_EXPR:
case LOOP_EXPR:
case EXIT_EXPR:
+ case COMPOUND_LITERAL_EXPR:
/* Lowered by gimplify.c. */
gcc_unreachable ();
return expand_expr_real (treeop0, original_target, tmode,
modifier, alt_rtl);
- case COMPOUND_LITERAL_EXPR:
- {
- /* Initialize the anonymous variable declared in the compound
- literal, then return the variable. */
- tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
-
- /* Create RTL for this variable. */
- if (!DECL_RTL_SET_P (decl))
- {
- if (DECL_HARD_REGISTER (decl))
- /* The user specified an assembler name for this variable.
- Set that up now. */
- rest_of_decl_compilation (decl, 0, 0);
- else
- expand_decl (decl);
- }
-
- return expand_expr_real (decl, original_target, tmode,
- modifier, alt_rtl);
- }
-
default:
return expand_expr_real_2 (&ops, target, tmode, modifier);
}
fold_convert (sizetype, lower_bound));
}
}
+ else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
+ {
+ array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
+ if (TREE_CODE (array) != ADDR_EXPR)
+ return 0;
+ array = TREE_OPERAND (array, 0);
+ if (TREE_CODE (array) != STRING_CST
+ && TREE_CODE (array) != VAR_DECL)
+ return 0;
+ }
else
return 0;
}
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
+
+ /* For vector typed comparisons emit code to generate the desired
+ all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
+ expander for this. */
+ if (TREE_CODE (ops->type) == VECTOR_TYPE)
+ {
+ tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
+ tree if_true = constant_boolean_node (true, ops->type);
+ tree if_false = constant_boolean_node (false, ops->type);
+ return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
+ }
+
+ /* For vector typed comparisons emit code to generate the desired
+ all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
+ expander for this. */
+ if (TREE_CODE (ops->type) == VECTOR_TYPE)
+ {
+ tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
+ tree if_true = constant_boolean_node (true, ops->type);
+ tree if_false = constant_boolean_node (false, ops->type);
+ return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
+ }
/* Get the rtx comparison code to use. We know that EXP is a comparison
operation of some type. Some comparisons against 1 and -1 can be
so we just call into the folder and expand its result. */
if ((code == NE || code == EQ)
- && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
- && integer_pow2p (TREE_OPERAND (arg0, 1))
+ && integer_zerop (arg1)
&& (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
{
- tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
- return expand_expr (fold_single_bit_test (loc,
- code == NE ? NE_EXPR : EQ_EXPR,
- arg0, arg1, type),
- target, VOIDmode, EXPAND_NORMAL);
+ gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
+ if (srcstmt
+ && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
+ {
+ enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
+ tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
+ tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
+ gimple_assign_rhs1 (srcstmt),
+ gimple_assign_rhs2 (srcstmt));
+ temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
+ if (temp)
+ return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
+ }
}
if (! get_subtarget (target)
{
struct expand_operand ops[5];
enum machine_mode index_mode = SImode;
- int index_bits = GET_MODE_BITSIZE (index_mode);
rtx op1, op2, index;
if (! HAVE_casesi)
{
if (TYPE_MODE (index_type) != index_mode)
{
- index_type = lang_hooks.types.type_for_size (index_bits, 0);
+ index_type = lang_hooks.types.type_for_mode (index_mode, 0);
index_expr = fold_convert (index_type, index_expr);
}
const_vector_from_tree (tree exp)
{
rtvec v;
- int units, i;
- tree link, elt;
+ unsigned i;
+ int units;
+ tree elt;
enum machine_mode inner, mode;
mode = TYPE_MODE (TREE_TYPE (exp));
v = rtvec_alloc (units);
- link = TREE_VECTOR_CST_ELTS (exp);
- for (i = 0; link; link = TREE_CHAIN (link), ++i)
+ for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
{
- elt = TREE_VALUE (link);
+ elt = VECTOR_CST_ELT (exp, i);
if (TREE_CODE (elt) == REAL_CST)
RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
inner);
}
- /* Initialize remaining elements to 0. */
- for (; i < units; ++i)
- RTVEC_ELT (v, i) = CONST0_RTX (inner);
-
return gen_rtx_CONST_VECTOR (mode, v);
}