the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
-
#include "config.h"
#include "system.h"
#include "machmode.h"
#include "insn-flags.h"
#include "insn-codes.h"
#include "insn-config.h"
-/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
+/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
#include "expr.h"
#include "recog.h"
#include "reload.h"
#ifdef PUSH_ROUNDING
#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
-#define PUSH_ARGS_REVERSED /* If it's last to first */
+#define PUSH_ARGS_REVERSED /* If it's last to first. */
#endif
#endif
#define CASE_VECTOR_PC_RELATIVE 0
#endif
+/* Hook called by safe_from_p for language-specific tree codes. It is
+ up to the language front-end to install a hook if it has any such
+ codes that safe_from_p needs to know about. Since same_from_p will
+ recursively explore the TREE_OPERANDs of an expression, this hook
+ should not reexamine those pieces. This routine may recursively
+ call safe_from_p; it should always pass `0' as the TOP_P
+ parameter. */
+int (*lang_safe_from_p) PARAMS ((rtx, tree));
+
/* If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to
output indirect addresses. If cse is to follow, we reject
rtx to_addr;
int autinc_to;
int explicit_inc_to;
- int to_struct;
- int to_readonly;
rtx from;
rtx from_addr;
int autinc_from;
int explicit_inc_from;
- int from_struct;
- int from_readonly;
- int len;
- int offset;
+ unsigned HOST_WIDE_INT len;
+ HOST_WIDE_INT offset;
int reverse;
};
rtx to_addr;
int autinc_to;
int explicit_inc_to;
- int to_struct;
- int len;
- int offset;
+ unsigned HOST_WIDE_INT len;
+ HOST_WIDE_INT offset;
int reverse;
};
static rtx get_push_address PARAMS ((int));
static rtx enqueue_insn PARAMS ((rtx, rtx));
-static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
+static unsigned HOST_WIDE_INT move_by_pieces_ninsns
+ PARAMS ((unsigned HOST_WIDE_INT,
+ unsigned int));
static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *));
-static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
+static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
+ unsigned int));
static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
enum machine_mode,
struct clear_by_pieces *));
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
- tree, tree, unsigned int, int));
+ tree, tree, unsigned int, int,
+ int));
static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
HOST_WIDE_INT));
static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
static tree save_noncopied_parts PARAMS ((tree, tree));
static tree init_noncopied_parts PARAMS ((tree, tree));
-static int safe_from_p PARAMS ((rtx, tree, int));
static int fixed_type_p PARAMS ((tree));
static rtx var_rtx PARAMS ((tree));
static int readonly_fields_p PARAMS ((tree));
#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
#define MOVE_RATIO 2
#else
-/* If we are optimizing for space (-Os), cut down the default move ratio */
+/* If we are optimizing for space (-Os), cut down the default move ratio. */
#define MOVE_RATIO (optimize_size ? 3 : 15)
#endif
#endif
/* This macro is used to determine whether move_by_pieces should be called
- to perform a structure copy. */
+ to perform a structure copy. */
#ifndef MOVE_BY_PIECES_P
#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
(move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
return;
}
+ if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
+ {
+ if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
+ abort ();
+
+ if (VECTOR_MODE_P (to_mode))
+ from = gen_rtx_SUBREG (to_mode, from, 0);
+ else
+ to = gen_rtx_SUBREG (from_mode, to, 0);
+
+ emit_move_insn (to, from);
+ return;
+ }
+
+ if (to_real != from_real)
+ abort ();
+
if (to_real)
{
- rtx value;
+ rtx value, insns;
if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
{
return;
}
}
-
+
#ifdef HAVE_trunchfqf2
if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
{
case TFmode:
libcall = extendsftf2_libfunc;
break;
-
+
default:
break;
}
case TFmode:
libcall = extenddftf2_libfunc;
break;
-
+
default:
break;
}
case DFmode:
libcall = truncxfdf2_libfunc;
break;
-
+
default:
break;
}
case DFmode:
libcall = trunctfdf2_libfunc;
break;
-
+
default:
break;
}
break;
-
+
default:
break;
}
/* This conversion is not implemented yet. */
abort ();
- value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
+ start_sequence ();
+ value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
1, from, from_mode);
- emit_move_insn (to, value);
+ insns = get_insns ();
+ end_sequence ();
+ emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
+ from));
return;
}
return;
}
- /* Handle pointer conversion */ /* SPEE 900220 */
+ /* Handle pointer conversion. */ /* SPEE 900220. */
if (to_mode == PQImode)
{
if (from_mode != QImode)
else
{
#ifdef HAVE_extendpsisi2
- if (HAVE_extendpsisi2)
+ if (! unsignedp && HAVE_extendpsisi2)
{
emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
return;
}
#endif /* HAVE_extendpsisi2 */
+#ifdef HAVE_zero_extendpsisi2
+ if (unsignedp && HAVE_zero_extendpsisi2)
+ {
+ emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
+ return;
+ }
+#endif /* HAVE_zero_extendpsisi2 */
abort ();
}
}
}
/* No suitable intermediate mode.
- Generate what we need with shifts. */
+ Generate what we need with shifts. */
shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
- GET_MODE_BITSIZE (from_mode), 0);
from = gen_lowpart (to_mode, force_reg (from_mode, from));
tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
to, unsignedp);
- tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
+ tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
to, unsignedp);
if (tmp != to)
emit_move_insn (to, tmp);
}
}
- /* Support special truncate insns for certain modes. */
+ /* Support special truncate insns for certain modes. */
if (from_mode == DImode && to_mode == SImode)
{
if (GET_MODE (x) != VOIDmode)
oldmode = GET_MODE (x);
-
+
if (mode == oldmode)
return x;
return temp;
}
\f
-
/* This macro is used to determine what the largest unit size that
- move_by_pieces can use is. */
+ move_by_pieces can use is. */
/* MOVE_MAX_PIECES is the number of bytes at a time which we can
move efficiently, as opposed to MOVE_MAX which is the maximum
- number of bytes we can move with a single instruction. */
+ number of bytes we can move with a single instruction. */
#ifndef MOVE_MAX_PIECES
#define MOVE_MAX_PIECES MOVE_MAX
void
move_by_pieces (to, from, len, align)
rtx to, from;
- int len;
+ unsigned HOST_WIDE_INT len;
unsigned int align;
{
struct move_by_pieces data;
if (data.reverse) data.offset = len;
data.len = len;
- data.to_struct = MEM_IN_STRUCT_P (to);
- data.from_struct = MEM_IN_STRUCT_P (from);
- data.to_readonly = RTX_UNCHANGING_P (to);
- data.from_readonly = RTX_UNCHANGING_P (from);
-
/* If copying requires more than two move insns,
copy addresses to registers (to make displacements shorter)
and use post-increment if available. */
if (!(data.autinc_from && data.autinc_to)
&& move_by_pieces_ninsns (len, align) > 2)
{
- /* Find the mode of the largest move... */
+ /* Find the mode of the largest move... */
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
if (GET_MODE_SIZE (tmode) < max_size)
/* Return number of insns required to move L bytes by pieces.
ALIGN (in bytes) is maximum alignment we can assume. */
-static int
+static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (l, align)
- unsigned int l;
+ unsigned HOST_WIDE_INT l;
unsigned int align;
{
- register int n_insns = 0;
- unsigned int max_size = MOVE_MAX + 1;
+ unsigned HOST_WIDE_INT n_insns = 0;
+ unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
enum machine_mode mode;
struct move_by_pieces *data;
{
- register int size = GET_MODE_SIZE (mode);
- register rtx to1, from1;
+ unsigned int size = GET_MODE_SIZE (mode);
+ rtx to1, from1;
while (data->len >= size)
{
- if (data->reverse) data->offset -= size;
-
- to1 = (data->autinc_to
- ? gen_rtx_MEM (mode, data->to_addr)
- : copy_rtx (change_address (data->to, mode,
- plus_constant (data->to_addr,
- data->offset))));
- MEM_IN_STRUCT_P (to1) = data->to_struct;
- RTX_UNCHANGING_P (to1) = data->to_readonly;
-
- from1
- = (data->autinc_from
- ? gen_rtx_MEM (mode, data->from_addr)
- : copy_rtx (change_address (data->from, mode,
- plus_constant (data->from_addr,
- data->offset))));
- MEM_IN_STRUCT_P (from1) = data->from_struct;
- RTX_UNCHANGING_P (from1) = data->from_readonly;
+ if (data->reverse)
+ data->offset -= size;
+
+ if (data->autinc_to)
+ {
+ to1 = gen_rtx_MEM (mode, data->to_addr);
+ MEM_COPY_ATTRIBUTES (to1, data->to);
+ }
+ else
+ to1 = change_address (data->to, mode,
+ plus_constant (data->to_addr, data->offset));
+
+ if (data->autinc_from)
+ {
+ from1 = gen_rtx_MEM (mode, data->from_addr);
+ MEM_COPY_ATTRIBUTES (from1, data->from);
+ }
+ else
+ from1 = change_address (data->from, mode,
+ plus_constant (data->from_addr, data->offset));
if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
emit_insn ((*genfun) (to1, from1));
+
if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
- if (! data->reverse) data->offset += size;
+ if (! data->reverse)
+ data->offset += size;
data->len -= size;
}
To avoid this problem we go ahead and emit code to copy X, Y &
SIZE into new pseudos. We can then place those new pseudos
into an RTL_EXPR and use them later, even after a call to
- emit_queue.
+ emit_queue.
Note this is not strictly needed for library calls since they
do not call emit_queue before loading their arguments. However,
examine the return value from memcpy.
For targets where libcalls and normal calls have different conventions
- for returning pointers, we could end up generating incorrect code.
+ for returning pointers, we could end up generating incorrect code.
So instead of using a libcall sequence we build up a suitable
CALL_EXPR and expand the call in the normal fashion. */
fntype = build_pointer_type (void_type_node);
fntype = build_function_type (fntype, NULL_TREE);
fn = build_decl (FUNCTION_DECL, fn, fntype);
- ggc_add_tree_root (&fn, 1);
+ ggc_add_tree_root (&fn, 1);
DECL_EXTERNAL (fn) = 1;
TREE_PUBLIC (fn) = 1;
DECL_ARTIFICIAL (fn) = 1;
pop_obstacks ();
}
- /* We need to make an argument list for the function call.
+ /* We need to make an argument list for the function call.
memcpy has three arguments, the first two are void * addresses and
the last is a size_t byte count for the copy. */
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
#else
- emit_library_call (bcopy_libfunc, 0,
+ emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, y, Pmode, x, Pmode,
convert_to_mode (TYPE_MODE (integer_type_node), size,
TREE_UNSIGNED (integer_type_node)),
{
int i;
#ifdef HAVE_load_multiple
- rtx pat;
+ rtx pat;
rtx last;
#endif
The number of registers to be filled is NREGS. SIZE indicates the number
of bytes in the object X. */
-
void
move_block_from_reg (regno, x, nregs, size)
int regno;
{
int i;
#ifdef HAVE_store_multiple
- rtx pat;
+ rtx pat;
rtx last;
#endif
enum machine_mode mode;
gen_rtx_REG (mode, regno));
return;
}
-
+
/* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
to the left before storing to memory. Note that the previous test
doesn't handle all cases (e.g. SIZE == 3). */
else
start = 1;
- tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
+ tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
/* If we won't be loading directly from memory, protect the real source
from strange tricks we might play. */
src = orig_src;
- if (GET_CODE (src) != MEM)
+ if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
{
- if (GET_CODE (src) == VOIDmode)
+ if (GET_MODE (src) == VOIDmode)
src = gen_reg_rtx (GET_MODE (dst));
else
src = gen_reg_rtx (GET_MODE (orig_src));
else
abort ();
}
+ else if ((CONSTANT_P (src)
+ && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
+ || (GET_CODE (src) == REG && GET_MODE (src) == mode))
+ tmps[i] = src;
else
tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
bytepos * BITS_PER_UNIT, 1, NULL_RTX,
tmps[i], 0, OPTAB_WIDEN);
}
- emit_queue();
+ emit_queue ();
/* Copy the extracted pieces into the proper (probable) hard regs. */
for (i = start; i < XVECLEN (dst, 0); i++)
else
start = 1;
- tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
+ tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
/* Copy the (probable) hard regs into pseudos. */
for (i = start; i < XVECLEN (src, 0); i++)
tmps[i] = gen_reg_rtx (GET_MODE (reg));
emit_move_insn (tmps[i], reg);
}
- emit_queue();
+ emit_queue ();
/* If we won't be storing directly into memory, protect the real destination
from strange tricks we might play. */
/* Make life a bit easier for combine. */
emit_move_insn (dst, const0_rtx);
}
- else if (! MEM_IN_STRUCT_P (dst))
- {
- /* store_bit_field requires that memory operations have
- mem_in_struct_p set; we might not. */
-
- dst = copy_rtx (orig_dst);
- MEM_SET_IN_STRUCT_P (dst, 1);
- }
/* Process the pieces. */
for (i = start; i < XVECLEN (src, 0); i++)
mode, tmps[i], align, ssize);
}
- emit_queue();
+ emit_queue ();
/* Copy from the pseudo into the (probable) hard reg. */
if (GET_CODE (dst) == REG)
The primary purpose of this routine is to handle functions
that return BLKmode structures in registers. Some machines
(the PA for example) want to return all small structures
- in registers regardless of the structure's alignment. */
+ in registers regardless of the structure's alignment. */
rtx
copy_blkmode_from_reg (tgtblk, srcreg, type)
MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
preserve_temp_slots (tgtblk);
}
-
+
/* This code assumes srcreg is at least a full word. If it isn't,
copy it into a new pseudo which is a full word. */
if (GET_MODE (srcreg) != BLKmode
= (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
/* Copy the structure BITSIZE bites at a time.
-
+
We could probably emit more efficient code for machines which do not use
strict alignment, but it doesn't seem worth the effort at the current
time. */
bitpos < bytes * BITS_PER_UNIT;
bitpos += bitsize, xbitpos += bitsize)
{
- /* We need a new source operand each time xbitpos is on a
+ /* We need a new source operand each time xbitpos is on a
word boundary and when xbitpos == big_endian_correction
(the first time through). */
if (xbitpos % BITS_PER_WORD == 0
a word boundary. */
if (bitpos % BITS_PER_WORD == 0)
dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
-
+
/* Use xbitpos for the source extraction (right justified) and
xbitpos for the destination store (left justified). */
store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
{
if (GET_CODE (reg) != REG
|| REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort();
+ abort ();
*call_fusage
= gen_rtx_EXPR_LIST (VOIDmode,
static void
clear_by_pieces (to, len, align)
rtx to;
- int len;
+ unsigned HOST_WIDE_INT len;
unsigned int align;
{
struct clear_by_pieces data;
rtx to_addr = XEXP (to, 0);
- unsigned int max_size = MOVE_MAX_PIECES + 1;
+ unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
enum machine_mode mode = VOIDmode, tmode;
enum insn_code icode;
data.explicit_inc_to = 0;
data.reverse
= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
- if (data.reverse) data.offset = len;
+ if (data.reverse)
+ data.offset = len;
data.len = len;
- data.to_struct = MEM_IN_STRUCT_P (to);
-
/* If copying requires more than two move insns,
copy addresses to registers (to make displacements shorter)
and use post-increment if available. */
if (!data.autinc_to
&& move_by_pieces_ninsns (len, align) > 2)
{
- /* Determine the main mode we'll be using */
+ /* Determine the main mode we'll be using. */
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
if (GET_MODE_SIZE (tmode) < max_size)
data.autinc_to = 1;
data.explicit_inc_to = -1;
}
- if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
+
+ if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
+ && ! data.autinc_to)
{
data.to_addr = copy_addr_to_reg (to_addr);
data.autinc_to = 1;
data.explicit_inc_to = 1;
}
- if (!data.autinc_to && CONSTANT_P (to_addr))
+
+ if ( !data.autinc_to && CONSTANT_P (to_addr))
data.to_addr = copy_addr_to_reg (to_addr);
}
enum machine_mode mode;
struct clear_by_pieces *data;
{
- register int size = GET_MODE_SIZE (mode);
- register rtx to1;
+ unsigned int size = GET_MODE_SIZE (mode);
+ rtx to1;
while (data->len >= size)
{
- if (data->reverse) data->offset -= size;
+ if (data->reverse)
+ data->offset -= size;
- to1 = (data->autinc_to
- ? gen_rtx_MEM (mode, data->to_addr)
- : copy_rtx (change_address (data->to, mode,
- plus_constant (data->to_addr,
- data->offset))));
- MEM_IN_STRUCT_P (to1) = data->to_struct;
+ if (data->autinc_to)
+ {
+ to1 = gen_rtx_MEM (mode, data->to_addr);
+ MEM_COPY_ATTRIBUTES (to1, data->to);
+ }
+ else
+ to1 = change_address (data->to, mode,
+ plus_constant (data->to_addr, data->offset));
if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
emit_insn ((*genfun) (to1, const0_rtx));
+
if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
- if (! data->reverse) data->offset += size;
+ if (! data->reverse)
+ data->offset += size;
data->len -= size;
}
#endif
rtx retval = 0;
- if (GET_MODE (object) == BLKmode)
+ /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
+ just move a zero. Otherwise, do this a piece at a time. */
+ if (GET_MODE (object) != BLKmode
+ && GET_CODE (size) == CONST_INT
+ && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
+ emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
+ else
{
object = protect_from_queue (object, 1);
size = protect_from_queue (size, 0);
size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
#endif
-
#ifdef TARGET_MEM_FUNCTIONS
/* It is incorrect to use the libcall calling conventions to call
memset in this context.
For targets where libcalls and normal calls have different
conventions for returning pointers, we could end up generating
- incorrect code.
+ incorrect code.
So instead of using a libcall sequence we build up a suitable
CALL_EXPR and expand the call in the normal fashion. */
pop_obstacks ();
}
- /* We need to make an argument list for the function call.
+ /* We need to make an argument list for the function call.
memset has three arguments, the first is a void * addresses, the
second a integer with the initialization value, the last is a
object));
TREE_CHAIN (arg_list)
= build_tree_list (NULL_TREE,
- make_tree (integer_type_node, const0_rtx));
+ make_tree (integer_type_node, const0_rtx));
TREE_CHAIN (TREE_CHAIN (arg_list))
= build_tree_list (NULL_TREE, make_tree (sizetype, size));
TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
#else
- emit_library_call (bzero_libfunc, 0,
+ emit_library_call (bzero_libfunc, LCT_NORMAL,
VOIDmode, 2, object, Pmode, size,
TYPE_MODE (integer_type_node));
#endif
}
}
- else
- emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
return retval;
}
unsigned int i;
if (mode >= MAX_MACHINE_MODE)
- abort ();
+ abort ();
if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
return
regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_imagpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_realpart (submode, y)));
#else
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_realpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_imagpart (submode, y)));
#endif
}
memory and reload. FIXME, we should see about using extract and
insert on integer registers, but complex short and complex char
variables should be rarely used. */
- if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
+ if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
&& (reload_in_progress | reload_completed) == 0)
{
int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
? MODE_FLOAT : MODE_INT);
- enum machine_mode reg_mode =
+ enum machine_mode reg_mode =
mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
if (reg_mode != BLKmode)
rtx last_insn = 0;
rtx seq, inner;
int need_clobber;
-
+
#ifdef PUSH_ROUNDING
/* If X is a push on the stack, do the push now and replace
x = change_address (x, VOIDmode, stack_pointer_rtx);
}
#endif
-
+
/* If we are in reload, see if either operand is a MEM whose address
is scheduled for replacement. */
if (reload_in_progress && GET_CODE (x) == MEM
need_clobber = 0;
for (i = 0;
- i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
+ i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
i++)
{
rtx xpart = operand_subword (x, i, 1, mode);
{
if (GET_CODE (size) == CONST_INT)
temp = plus_constant (virtual_outgoing_args_rtx,
- - INTVAL (size) - (below ? 0 : extra));
+ -INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
- negate_rtx (Pmode, plus_constant (size, extra)));
+ negate_rtx (Pmode, plus_constant (size, extra)));
else
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
static rtx
get_push_address (size)
- int size;
+ int size;
{
register rtx temp;
int used = partial * UNITS_PER_WORD;
int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
int skip;
-
+
if (size == 0)
abort ();
if (current_function_check_memory_usage && ! in_check_memory_usage)
{
rtx temp;
-
+
in_check_memory_usage = 1;
- temp = get_push_address (INTVAL(size) - used);
+ temp = get_push_address (INTVAL (size) - used);
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- temp, Pmode,
- XEXP (xinner, 0), Pmode,
- GEN_INT (INTVAL(size) - used),
+ emit_library_call (chkr_copy_bitmap_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
+ Pmode, XEXP (xinner, 0), Pmode,
+ GEN_INT (INTVAL (size) - used),
TYPE_MODE (sizetype));
else
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
- temp, Pmode,
- GEN_INT (INTVAL(size) - used),
+ emit_library_call (chkr_set_right_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
+ Pmode, GEN_INT (INTVAL (size) - used),
TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RW),
TYPE_MODE (integer_type_node));
}
}
else
-#endif /* PUSH_ROUNDING */
+#endif /* PUSH_ROUNDING */
{
+ rtx target;
+
/* Otherwise make space on the stack and copy the data
to the address of that space. */
skip));
if (current_function_check_memory_usage && ! in_check_memory_usage)
{
- rtx target;
-
in_check_memory_usage = 1;
target = copy_to_reg (temp);
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_copy_bitmap_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
target, Pmode,
XEXP (xinner, 0), Pmode,
size, TYPE_MODE (sizetype));
else
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_set_right_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
target, Pmode,
size, TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RW),
in_check_memory_usage = 0;
}
+ target = gen_rtx_MEM (BLKmode, temp);
+
+ if (type != 0)
+ {
+ set_mem_attributes (target, type, 1);
+ /* Function incoming arguments may overlap with sibling call
+ outgoing arguments and we cannot allow reordering of reads
+ from function arguments with stores to outgoing arguments
+ of sibling calls. */
+ MEM_ALIAS_SET (target) = 0;
+ }
+
/* TEMP is the address of the block. Copy the data there. */
if (GET_CODE (size) == CONST_INT
&& MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
{
- move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
- INTVAL (size), align);
+ move_by_pieces (target, xinner, INTVAL (size), align);
goto ret;
}
else
{
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
- rtx target = gen_rtx_MEM (BLKmode, temp);
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
mode != VOIDmode;
to force it to pop the bcopy-arguments right away. */
NO_DEFER_POP;
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memcpy_libfunc, 0,
+ emit_library_call (memcpy_libfunc, LCT_NORMAL,
VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#else
- emit_library_call (bcopy_libfunc, 0,
+ emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
convert_to_mode (TYPE_MODE (integer_type_node),
size,
{
rtx addr;
rtx target = NULL_RTX;
+ rtx dest;
/* Push padding now if padding above and stack grows down,
or if padding below and stack grows up.
if (GET_CODE (args_so_far) == CONST_INT)
addr
= memory_address (mode,
- plus_constant (args_addr,
+ plus_constant (args_addr,
INTVAL (args_so_far)));
- else
+ else
addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
args_so_far));
target = addr;
}
- emit_move_insn (gen_rtx_MEM (mode, addr), x);
+ dest = gen_rtx_MEM (mode, addr);
+ if (type != 0)
+ {
+ set_mem_attributes (dest, type, 1);
+ /* Function incoming arguments may overlap with sibling call
+ outgoing arguments and we cannot allow reordering of reads
+ from function arguments with stores to outgoing arguments
+ of sibling calls. */
+ MEM_ALIAS_SET (dest) = 0;
+ }
+
+ emit_move_insn (dest, x);
if (current_function_check_memory_usage && ! in_check_memory_usage)
{
target = get_push_address (GET_MODE_SIZE (mode));
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- target, Pmode,
- XEXP (x, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
+ Pmode, XEXP (x, 0), Pmode,
GEN_INT (GET_MODE_SIZE (mode)),
TYPE_MODE (sizetype));
else
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
- target, Pmode,
- GEN_INT (GET_MODE_SIZE (mode)),
+ emit_library_call (chkr_set_right_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
+ Pmode, GEN_INT (GET_MODE_SIZE (mode)),
TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RW),
TYPE_MODE (integer_type_node));
if (extra && args_addr == 0 && where_pad == stack_direction)
anti_adjust_stack (GEN_INT (extra));
-
+
if (alignment_pad && args_addr == 0)
anti_adjust_stack (alignment_pad);
}
&& GET_MODE (to_rtx) == BLKmode
&& GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
&& bitsize
- && (bitpos % bitsize) == 0
+ && (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
&& alignment == GET_MODE_ALIGNMENT (mode1))
{
size *= GET_MODE_SIZE (best_mode);
/* Check the access right of the pointer. */
+ in_check_memory_usage = 1;
if (size)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- to_addr, Pmode,
+ emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, to_addr, Pmode,
GEN_INT (size), TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* If this is a varying-length object, we must get the address of
/* Copy the rights of the bitmap. */
if (current_function_check_memory_usage)
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- XEXP (to_rtx, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memcpy_libfunc, 0,
+ emit_library_call (memcpy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#else
- emit_library_call (bcopy_libfunc, 0,
+ emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
XEXP (to_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (integer_type_node),
SUBREG_PROMOTED_UNSIGNED_P (target)),
exp);
}
-
+
temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
/* If TEMP is a volatile MEM and we want a result value, make
&& GET_CODE (target) == MEM
&& AGGREGATE_TYPE_P (TREE_TYPE (exp)))
{
+ in_check_memory_usage = 1;
if (GET_CODE (temp) == MEM)
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- XEXP (target, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, XEXP (target, 0), Pmode,
XEXP (temp, 0), Pmode,
expr_size (exp), TYPE_MODE (sizetype));
else
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- XEXP (target, 0), Pmode,
+ emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, XEXP (target, 0), Pmode,
expr_size (exp), TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO),
+ GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* If value was not generated in the target, store it there.
if (GET_CODE (copy_size_rtx) == CONST_INT)
{
addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
- size = plus_constant (size, - TREE_STRING_LENGTH (exp));
+ size = plus_constant (size, -TREE_STRING_LENGTH (exp));
align = MIN (align, (BITS_PER_UNIT
* (INTVAL (copy_size_rtx)
& - INTVAL (copy_size_rtx))));
if (size != const0_rtx)
{
+ rtx dest = gen_rtx_MEM (BLKmode, addr);
+
+ MEM_COPY_ATTRIBUTES (dest, target);
+
/* Be sure we can write on ADDR. */
+ in_check_memory_usage = 1;
if (current_function_check_memory_usage)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
addr, Pmode,
size, TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO),
+ GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
- clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
+ in_check_memory_usage = 0;
+ clear_storage (dest, size, align);
}
if (label)
&& ! (GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
return copy_to_reg (target);
-
+
else
return target;
}
return 0;
return 1;
-
+
default:
return 0;
}
TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
TYPE is the type of the CONSTRUCTOR, not the element type.
ALIGN and CLEARED are as for store_constructor.
+ ALIAS_SET is the alias set to use for any stores.
This provides a recursive shortcut back to store_constructor when it isn't
necessary to go through store_field. This is so that we can pass through
static void
store_constructor_field (target, bitsize, bitpos,
- mode, exp, type, align, cleared)
+ mode, exp, type, align, cleared, alias_set)
rtx target;
unsigned HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
tree exp, type;
unsigned int align;
int cleared;
+ int alias_set;
{
if (TREE_CODE (exp) == CONSTRUCTOR
&& bitpos % BITS_PER_UNIT == 0
? BLKmode : VOIDmode,
plus_constant (XEXP (target, 0),
bitpos / BITS_PER_UNIT));
+
+ MEM_ALIAS_SET (target) = alias_set;
store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
}
else
store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
- int_size_in_bytes (type), 0);
+ int_size_in_bytes (type), alias_set);
}
/* Store the value of constructor EXP into the rtx TARGET.
/* If the constructor has fewer fields than the structure
or if we are initializing the structure to mostly zeros,
- clear the whole structure first. */
+ clear the whole structure first. Don't do this is TARGET is
+ register whose mode size isn't equal to SIZE since clear_storage
+ can't handle this case. */
else if (size > 0
&& ((list_length (CONSTRUCTOR_ELTS (exp))
!= fields_length (type))
- || mostly_zeros_p (exp)))
+ || mostly_zeros_p (exp))
+ && (GET_CODE (target) != REG
+ || GET_MODE_SIZE (GET_MODE (target)) == size))
{
if (! cleared)
clear_storage (target, GEN_INT (size), align);
}
else
bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
-
+
if (offset)
{
rtx offset_rtx;
if (GET_CODE (to_rtx) != MEM)
abort ();
- if (GET_MODE (offset_rtx) != ptr_mode)
- {
+ if (GET_MODE (offset_rtx) != ptr_mode)
+ {
#ifdef POINTERS_EXTEND_UNSIGNED
offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
#else
}
#endif
store_constructor_field (to_rtx, bitsize, bitpos, mode,
- TREE_VALUE (elt), type, align, cleared);
+ TREE_VALUE (elt), type, align, cleared,
+ DECL_NONADDRESSABLE_P (field)
+ ? MEM_ALIAS_SET (to_rtx)
+ : get_alias_set (TREE_TYPE (field)));
}
}
else if (TREE_CODE (type) == ARRAY_TYPE)
register int i;
int need_to_clear;
tree domain = TYPE_DOMAIN (type);
- HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
- HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
tree elttype = TREE_TYPE (type);
+ int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
+ && host_integerp (TYPE_MAX_VALUE (domain), 0));
+ HOST_WIDE_INT minelt;
+ HOST_WIDE_INT maxelt;
+
+ /* If we have constant bounds for the range of the type, get them. */
+ if (const_bounds_p)
+ {
+ minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
+ maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
+ }
/* If the constructor has fewer elements than the array,
clear the whole array first. Similarly if this is
else
{
HOST_WIDE_INT count = 0, zero_count = 0;
- need_to_clear = 0;
+ need_to_clear = ! const_bounds_p;
+
/* This loop is a more accurate version of the loop in
mostly_zeros_p (it handles RANGE_EXPR in an index).
It is also needed to check for missing elements. */
for (elt = CONSTRUCTOR_ELTS (exp);
- elt != NULL_TREE;
+ elt != NULL_TREE && ! need_to_clear;
elt = TREE_CHAIN (elt))
{
tree index = TREE_PURPOSE (elt);
}
else
this_node_count = 1;
+
count += this_node_count;
if (mostly_zeros_p (TREE_VALUE (elt)))
zero_count += this_node_count;
}
+
/* Clear the entire array first if there are any missing elements,
or if the incidence of zero elements is >= 75%. */
- if (count < maxelt - minelt + 1
- || 4 * zero_count >= 3 * count)
+ if (! need_to_clear
+ && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
need_to_clear = 1;
}
+
if (need_to_clear && size > 0)
{
if (! cleared)
tree position;
/* If the range is constant and "small", unroll the loop. */
- if (host_integerp (lo_index, 0)
+ if (const_bounds_p
+ && host_integerp (lo_index, 0)
&& host_integerp (hi_index, 0)
&& (lo = tree_low_cst (lo_index, 0),
hi = tree_low_cst (hi_index, 0),
for (; lo <= hi; lo++)
{
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
- store_constructor_field (target, bitsize, bitpos, mode,
- value, type, align, cleared);
+ store_constructor_field
+ (target, bitsize, bitpos, mode, value, type, align,
+ cleared,
+ TYPE_NONALIASED_COMPONENT (type)
+ ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
}
}
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
store_constructor_field (target, bitsize, bitpos, mode, value,
- type, align, cleared);
+ type, align, cleared,
+ TYPE_NONALIASED_COMPONENT (type)
+ ? MEM_ALIAS_SET (target) :
+ get_alias_set (elttype));
+
}
}
}
- /* Set constructor assignments */
+ /* Set constructor assignments. */
else if (TREE_CODE (type) == SET_TYPE)
{
tree elt = CONSTRUCTOR_ELTS (exp);
Also, if a large set has just a single range, it may also be
better to first clear all the first clear the set (using
bzero/memset), and set the bits we want. */
-
+
/* Check for all zeros. */
if (elt == NULL_TREE && size > 0)
{
/* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
are "complicated" (more than one range), initialize (the
- constant parts) by copying from a constant. */
+ constant parts) by copying from a constant. */
if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
|| (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
{
to_rtx = plus_constant (XEXP (target, 0), offset);
to_rtx = change_address (target, mode, to_rtx);
}
- else if (offset == 0)
+ else if (offset == 0)
to_rtx = target;
else
abort ();
- tree_low_cst (TREE_PURPOSE (elt), 0) + 1
!= (HOST_WIDE_INT) nbits))))
clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
-
+
for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
{
- /* start of range of element or NULL */
+ /* Start of range of element or NULL. */
tree startbit = TREE_PURPOSE (elt);
- /* end of range of element, or element value */
+ /* End of range of element, or element value. */
tree endbit = TREE_VALUE (elt);
#ifdef TARGET_MEM_FUNCTIONS
HOST_WIDE_INT startb, endb;
bitlength_rtx = expand_expr (bitlength,
NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
- /* handle non-range tuple element like [ expr ] */
+ /* Handle non-range tuple element like [ expr ]. */
if (startbit == NULL_TREE)
{
startbit = save_expr (endbit);
startbit = size_binop (MINUS_EXPR, startbit, domain_min);
endbit = size_binop (MINUS_EXPR, endbit, domain_min);
}
- startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
+ startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS);
- endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
+ endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS);
if (REG_P (target))
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
&& (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
{
- emit_library_call (memset_libfunc, 0,
+ emit_library_call (memset_libfunc, LCT_NORMAL,
VOIDmode, 3,
plus_constant (XEXP (targetx, 0),
startb / BITS_PER_UNIT),
else
#endif
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
- 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
- bitlength_rtx, TYPE_MODE (sizetype),
+ LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
+ Pmode, bitlength_rtx, TYPE_MODE (sizetype),
startbit_rtx, TYPE_MODE (sizetype),
endbit_rtx, TYPE_MODE (sizetype));
In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
ALIGN is the alignment that TARGET is known to have.
- TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
+ TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ALIAS_SET is the alias set for the destination. This value will
(in general) be different from that for TARGET, since TARGET is a
align >>= 1;
emit_block_move (target, temp,
- GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT),
+ bitsize == -1 ? expr_size (exp)
+ : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+ / BITS_PER_UNIT),
align);
return value_mode == VOIDmode ? const0_rtx : target;
else
*pbitsize = GET_MODE_BITSIZE (mode);
}
-
+
if (size_tree != 0)
{
if (! host_integerp (size_tree, 1))
force_operand (XEXP (XEXP (value, 0), 1), 0),
target, 0, OPTAB_LIB_WIDEN);
}
-
+
tmp = force_operand (XEXP (value, 0), subtarget);
return expand_binop (GET_MODE (value), binoptab, tmp,
force_operand (op2, NULL_RTX),
It is always safe for this routine to return zero since it merely
searches for optimization opportunities. */
-static int
+int
safe_from_p (x, exp, top_p)
rtx x;
tree exp;
int rtn;
save_expr_count = 0;
- save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
+ save_expr_size = ARRAY_SIZE (save_expr_trees);
save_expr_rewritten = &save_expr_trees[0];
rtn = safe_from_p (x, exp, 1);
where it is so we can turn it back in the top-level safe_from_p()
when we're done. */
- /* For now, don't bother re-sizing the array. */
+ /* For now, don't bother re-sizing the array. */
if (save_expr_count >= save_expr_size)
return 0;
save_expr_rewritten[save_expr_count++] = exp;
- nops = tree_code_length[(int) SAVE_EXPR];
+ nops = TREE_CODE_LENGTH (SAVE_EXPR);
for (i = 0; i < nops; i++)
{
tree operand = TREE_OPERAND (exp, i);
case METHOD_CALL_EXPR:
/* This takes a rtx argument, but shouldn't appear here. */
abort ();
-
+
default:
break;
}
if (exp_rtl)
break;
- nops = tree_code_length[(int) TREE_CODE (exp)];
+ nops = first_rtl_op (TREE_CODE (exp));
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
return 0;
+
+ /* If this is a language-specific tree code, it may require
+ special handling. */
+ if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
+ && lang_safe_from_p
+ && !(*lang_safe_from_p) (x, exp))
+ return 0;
}
/* If we have an rtl, find any enclosed object. Then see if we conflict
#ifdef MAX_INTEGER_COMPUTATION_MODE
void
check_max_integer_computation_mode (exp)
- tree exp;
+ tree exp;
{
enum tree_code code;
enum machine_mode mode;
&& mode > MAX_INTEGER_COMPUTATION_MODE)
fatal ("unsupported wide integer operation");
}
-
+
/* Check operands of a binary/comparison op. */
if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
{
}
}
#endif
-
\f
/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
has any readonly fields. If any of the fields have types that
tree field;
for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
- if (TREE_CODE (field) == FIELD_DECL
+ if (TREE_CODE (field) == FIELD_DECL
&& (TREE_READONLY (field)
|| (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
&& readonly_fields_p (TREE_TYPE (field)))))
/* Used by check-memory-usage to make modifier read only. */
enum expand_modifier ro_modifier;
- /* Handle ERROR_MARK before anybody tries to access its type. */
- if (TREE_CODE (exp) == ERROR_MARK)
+ /* Handle ERROR_MARK before anybody tries to access its type. */
+ if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
{
op0 = CONST0_RTX (tmode);
if (op0 != 0)
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
VOIDmode, ro_modifier);
else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
- || code == ARRAY_REF)
+ || code == ARRAY_REF)
{
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
return const0_rtx;
}
-;
+ ;
target = 0;
}
enum memory_use_mode memory_usage;
memory_usage = get_memory_usage_from_modifier (modifier);
+ in_check_memory_usage = 1;
if (memory_usage != MEMORY_USE_DONT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
XEXP (DECL_RTL (exp), 0), Pmode,
GEN_INT (int_size_in_bytes (type)),
TYPE_MODE (sizetype),
GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* ... fall through ... */
abort ();
addr = XEXP (DECL_RTL (exp), 0);
if (GET_CODE (addr) == MEM)
- addr = gen_rtx_MEM (Pmode,
- fix_lexical_addr (XEXP (addr, 0), exp));
+ addr = change_address (addr, Pmode,
+ fix_lexical_addr (XEXP (addr, 0), exp));
else
addr = fix_lexical_addr (addr, exp);
+
temp = change_address (DECL_RTL (exp), mode, addr);
}
case CONST_DECL:
return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
+ EXPAND_MEMORY_USE_BAD);
case REAL_CST:
/* If optimized, generate immediate CONST_DOUBLE
- which will be turned into memory by reload if necessary.
-
+ which will be turned into memory by reload if necessary.
+
We used to force a register so that loop.c could see it. But
this does not allow gen_* patterns to perform optimizations with
the constants. It also produces two insns in cases like "x = 1.0;".
lineno = EXPR_WFL_LINENO (exp);
if (EXPR_WFL_EMIT_LINE_NOTE (exp))
emit_line_note (input_filename, lineno);
- /* Possibly avoid switching back and force here */
+ /* Possibly avoid switching back and force here. */
to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
input_filename = saved_input_filename;
lineno = saved_lineno;
tree elt;
/* Find the outermost reference that is of the type we want.
- If none, see if any object has a type that is a pointer to
+ If none, see if any object has a type that is a pointer to
the type we want. */
for (elt = TREE_PURPOSE (placeholder_expr);
elt != 0 && object == 0;
&& ! (target != 0 && safe_from_p (target, exp, 1)))
|| TREE_ADDRESSABLE (exp)
|| (host_integerp (TYPE_SIZE_UNIT (type), 1)
- && (! MOVE_BY_PIECES_P
+ && (! MOVE_BY_PIECES_P
(tree_low_cst (TYPE_SIZE_UNIT (type), 1),
TYPE_ALIGN (type)))
&& ! mostly_zeros_p (exp))))
case INDIRECT_REF:
{
tree exp1 = TREE_OPERAND (exp, 0);
- tree exp2;
tree index;
- tree string = string_constant (exp1, &index);
-
+ tree string = string_constant (exp1, &index);
+
/* Try to optimize reads from const strings. */
if (string
&& TREE_CODE (string) == STRING_CST
if (memory_usage != MEMORY_USE_DONT)
{
in_check_memory_usage = 1;
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- op0, Pmode,
- GEN_INT (int_size_in_bytes (type)),
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
+ Pmode, GEN_INT (int_size_in_bytes (type)),
TYPE_MODE (sizetype),
GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
}
temp = gen_rtx_MEM (mode, op0);
- /* If address was computed by addition,
- mark this as an element of an aggregate. */
- if (TREE_CODE (exp1) == PLUS_EXPR
- || (TREE_CODE (exp1) == SAVE_EXPR
- && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
- || AGGREGATE_TYPE_P (TREE_TYPE (exp))
- || (TREE_CODE (exp1) == ADDR_EXPR
- && (exp2 = TREE_OPERAND (exp1, 0))
- && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
- MEM_SET_IN_STRUCT_P (temp, 1);
-
- MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
- MEM_ALIAS_SET (temp) = get_alias_set (exp);
+ set_mem_attributes (temp, exp, 0);
/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
here, because, in C and C++, the fact that a location is accessed
if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
&& TREE_CODE (index) == INTEGER_CST
- && 0 > compare_tree_int (index,
+ && 0 > compare_tree_int (index,
list_length (CONSTRUCTOR_ELTS
(TREE_OPERAND (exp, 0)))))
{
return expand_expr (fold (TREE_VALUE (elem)), target,
tmode, ro_modifier);
}
-
+
else if (optimize >= 1
&& TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
&& TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
else if (TREE_CODE (init) == STRING_CST
&& 0 > compare_tree_int (index,
TREE_STRING_LENGTH (init)))
- return (GEN_INT
- (TREE_STRING_POINTER
- (init)[TREE_INT_CST_LOW (index)]));
+ {
+ tree type = TREE_TYPE (TREE_TYPE (init));
+ enum machine_mode mode = TYPE_MODE (type);
+
+ if (GET_MODE_CLASS (mode) == MODE_INT
+ && GET_MODE_SIZE (mode) == 1)
+ return (GEN_INT
+ (TREE_STRING_POINTER
+ (init)[TREE_INT_CST_LOW (index)]));
+ }
}
}
}
-
- /* ... fall through ... */
+ /* Fall through. */
case COMPONENT_REF:
case BIT_FIELD_REF:
&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
<= HOST_BITS_PER_WIDE_INT))))
{
- op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
+ op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
{
HOST_WIDE_INT bitsize
/* If TEM's type is a union of variable size, pass TARGET to the inner
computation, since it will need a temporary and TARGET is known
to have to do. This occurs in unchecked conversion in Ada. */
-
+
op0 = expand_expr (tem,
(TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
&& GET_MODE (op0) == BLKmode
&& GET_MODE (XEXP (op0, 0)) != VOIDmode
&& bitsize != 0
- && (bitpos % bitsize) == 0
+ && (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
&& alignment == GET_MODE_ALIGNMENT (mode1))
{
bitpos = 0;
}
-
op0 = change_address (op0, VOIDmode,
gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
force_reg (ptr_mode,
/* Check the access. */
if (cfun != 0 && current_function_check_memory_usage
&& GET_CODE (op0) == MEM)
- {
+ {
enum memory_use_mode memory_usage;
memory_usage = get_memory_usage_from_modifier (modifier);
size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
/* Check the access right of the pointer. */
+ in_check_memory_usage = 1;
if (size > BITS_PER_UNIT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- to, Pmode,
- GEN_INT (size / BITS_PER_UNIT),
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
+ Pmode, GEN_INT (size / BITS_PER_UNIT),
TYPE_MODE (sizetype),
- GEN_INT (memory_usage),
+ GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
}
target = assign_temp (type, 0, 1, 1);
emit_block_move (target, op0,
- GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT),
+ bitsize == -1 ? expr_size (exp)
+ : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+ / BITS_PER_UNIT),
BITS_PER_UNIT);
-
+
return target;
}
/* Get a reference to just this component. */
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
- (bitpos / BITS_PER_UNIT)));
+ {
+ rtx new = gen_rtx_MEM (mode1,
+ plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
+
+ MEM_COPY_ATTRIBUTES (new, op0);
+ op0 = new;
+ }
else
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0),
(bitpos / BITS_PER_UNIT)));
- if (GET_CODE (op0) == MEM)
- MEM_ALIAS_SET (op0) = get_alias_set (exp);
-
+ set_mem_attributes (op0, exp, 0);
if (GET_CODE (XEXP (op0, 0)) == REG)
mark_reg_pointer (XEXP (op0, 0), alignment);
- MEM_SET_IN_STRUCT_P (op0, 1);
MEM_VOLATILE_P (op0) |= volatilep;
if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
|| modifier == EXPAND_CONST_ADDRESS
of the set. */
if (GET_CODE (lo_r) == CONST_INT)
rlow = GEN_INT (INTVAL (lo_r)
- & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
+ & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
else
rlow = expand_binop (index_mode, and_optab, lo_r,
GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
setaddr, NULL_RTX, iunsignedp,
OPTAB_LIB_WIDEN));
- /* Extract the bit we want to examine */
+ /* Extract the bit we want to examine. */
bit = expand_shift (RSHIFT_EXPR, byte_mode,
gen_rtx_MEM (byte_mode, addr),
make_tree (TREE_TYPE (index), rem),
&& (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
== FUNCTION_DECL)
&& DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
- return expand_builtin (exp, target, subtarget, tmode, ignore);
+ {
+ if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ == BUILT_IN_FRONTEND)
+ return (*lang_expand_expr) (exp, original_target, tmode, modifier);
+ else
+ return expand_builtin (exp, target, subtarget, tmode, ignore);
+ }
/* If this call was expanded already by preexpand_calls,
just return the result we got. */
case NOP_EXPR:
case CONVERT_EXPR:
case REFERENCE_EXPR:
+ if (TREE_OPERAND (exp, 0) == error_mark_node)
+ return const0_rtx;
+
if (TREE_CODE (type) == UNION_TYPE)
{
tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
op0 = eliminate_constant_term (op0, &constant_term);
/* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
- their sum should be a constant. Form it into OP1, since the
+ their sum should be a constant. Form it into OP1, since the
result we want will then be OP0 + OP1. */
temp = simplify_binary_operation (PLUS, mode, constant_term,
op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
NULL_RTX, VOIDmode, 0);
if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
- VOIDmode, 0);
+ op1 = convert_modes (innermode, mode,
+ expand_expr (TREE_OPERAND (exp, 1),
+ NULL_RTX, VOIDmode, 0),
+ unsignedp);
else
op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
NULL_RTX, VOIDmode, 0);
/* At this point, a MEM target is no longer useful; we will get better
code without it. */
-
+
if (GET_CODE (target) == MEM)
target = gen_reg_rtx (mode);
/* If we are not to produce a result, we have no target. Otherwise,
if a target was specified use it; it will not be used as an
- intermediate target unless it is safe. If no target, use a
+ intermediate target unless it is safe. If no target, use a
temporary. */
if (ignore)
TREE_OPERAND (exp, 0)
= invert_truthvalue (TREE_OPERAND (exp, 0));
}
-
+
do_pending_stack_adjust ();
NO_DEFER_POP;
op0 = gen_label_rtx ();
|| TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
&& safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
{
- if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
+ if (GET_CODE (temp) == REG
+ && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 1), temp, 0);
jumpif (TREE_OPERAND (exp, 0), op0);
|| TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
&& safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
{
- if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
+ if (GET_CODE (temp) == REG
+ && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 2), temp, 0);
jumpifnot (TREE_OPERAND (exp, 0), op0);
jumpifnot (TREE_OPERAND (exp, 0), op0);
start_cleanup_deferral ();
-
+
/* One branch of the cond can be void, if it never returns. For
- example A ? throw : E */
+ example A ? throw : E */
if (temp != 0
- && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
+ && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
store_expr (TREE_OPERAND (exp, 1), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 1),
emit_label (op0);
start_cleanup_deferral ();
if (temp != 0
- && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
+ && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
store_expr (TREE_OPERAND (exp, 2), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 2),
/* Set this here so that if we get a target that refers to a
register variable that's already been used, put_reg_into_stack
- knows that it should fix up those uses. */
+ knows that it should fix up those uses. */
TREE_USED (slot) = 1;
if (target == 0)
preserve_temp_slots (target);
DECL_RTL (slot) = target;
if (TREE_ADDRESSABLE (slot))
- {
- TREE_ADDRESSABLE (slot) = 0;
- mark_addressable (slot);
- }
+ put_var_into_stack (slot);
/* Since SLOT is not known to the called function
to belong to its stack frame, we must build an explicit
not target that we were passed in, as our target
parameter is only a hint. */
if (DECL_RTL (slot) != 0)
- {
- target = DECL_RTL (slot);
- /* If we have already expanded the slot, so don't do
+ {
+ target = DECL_RTL (slot);
+ /* If we have already expanded the slot, so don't do
it again. (mrs) */
- if (TREE_OPERAND (exp, 1) == NULL_TREE)
- return target;
+ if (TREE_OPERAND (exp, 1) == NULL_TREE)
+ return target;
}
else
{
/* If we must have an addressable slot, then make sure that
the RTL that we just stored in slot is OK. */
if (TREE_ADDRESSABLE (slot))
- {
- TREE_ADDRESSABLE (slot) = 0;
- mark_addressable (slot);
- }
+ put_var_into_stack (slot);
}
}
store_expr (exp1, target, 0);
expand_decl_cleanup (NULL_TREE, cleanups);
-
+
return target;
}
if (GET_CODE (op0) != MEM)
abort ();
-
+
if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
{
temp = XEXP (op0, 0);
case REALPART_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
return gen_realpart (mode, op0);
-
+
case IMAGPART_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
return gen_imagpart (mode, op0);
enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
rtx imag_t;
rtx insns;
-
- op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
+
+ op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
if (! target)
target = gen_reg_rtx (mode);
-
+
start_sequence ();
/* Store the realpart and the negated imagpart to target. */
imag_t = gen_imagpart (partmode, target);
temp = expand_unop (partmode, neg_optab,
- gen_imagpart (partmode, op0), imag_t, 0);
+ gen_imagpart (partmode, op0), imag_t, 0);
if (temp != imag_t)
emit_move_insn (imag_t, temp);
insns = get_insns ();
end_sequence ();
- /* Conjugate should appear as a single unit
+ /* Conjugate should appear as a single unit
If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
each with a separate pseudo as destination.
It's not correct for flow to treat them as a unit. */
return op0;
}
- case GOTO_SUBROUTINE_EXPR:
+ case GOTO_SUBROUTINE_EXPR:
{
rtx subr = (rtx) TREE_OPERAND (exp, 0);
rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
rtx return_address = gen_label_rtx ();
- emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
+ emit_move_insn (return_link,
+ gen_rtx_LABEL_REF (Pmode, return_address));
emit_jump (subr);
emit_label (return_address);
return const0_rtx;
that was declared const. */
if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
- && 0 > compare_tree_int (index,
+ && 0 > compare_tree_int (index,
list_length (CONSTRUCTOR_ELTS
(TREE_OPERAND (exp, 0)))))
{
if (elem)
return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
}
-
+
else if (optimize >= 1
&& TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
&& TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
}
}
}
-
- /* ... fall through ... */
+ /* Fall through. */
case COMPONENT_REF:
case BIT_FIELD_REF:
/* Check the access. */
if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
- {
+ {
rtx to;
int size;
size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
/* Check the access right of the pointer. */
+ in_check_memory_usage = 1;
if (size > BITS_PER_UNIT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RO),
+ GEN_INT (MEMORY_USE_RO),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* In cases where an aligned union has an unaligned object
- bitsize),
op0, 1);
-
emit_move_insn (new, op0);
op0 = copy_rtx (new);
PUT_MODE (op0, BLKmode);
else
/* Get a reference to just this component. */
op0 = change_address (op0, mode1,
- plus_constant (XEXP (op0, 0),
- (bitpos / BITS_PER_UNIT)));
+ plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
MEM_ALIAS_SET (op0) = get_alias_set (exp);
if (this_optab == sub_optab
&& GET_CODE (op1) == CONST_INT)
{
- op1 = GEN_INT (- INTVAL (op1));
+ op1 = GEN_INT (-INTVAL (op1));
this_optab = add_optab;
}
/* Increment however we can. */
op1 = expand_binop (mode, this_optab, value, op1,
- current_function_check_memory_usage ? NULL_RTX : op0,
+ current_function_check_memory_usage ? NULL_RTX : op0,
TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
/* Make sure the value is stored into OP0. */
if (op1 != op0)
tree exp;
{
register int nops, i;
- int type = TREE_CODE_CLASS (TREE_CODE (exp));
+ int class = TREE_CODE_CLASS (TREE_CODE (exp));
if (! do_preexpand_calls)
return;
/* Only expressions and references can contain calls. */
- if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
+ if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
return;
switch (TREE_CODE (exp))
case SAVE_EXPR:
if (SAVE_EXPR_RTL (exp) != 0)
return;
-
+
default:
break;
}
- nops = tree_code_length[(int) TREE_CODE (exp)];
+ nops = TREE_CODE_LENGTH (TREE_CODE (exp));
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0)
{
;
else
{
- type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
- if (type == 'e' || type == '<' || type == '1' || type == '2'
- || type == 'r')
+ class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
+ if (IS_EXPR_CODE_CLASS (class) || class == 'r')
preexpand_calls (TREE_OPERAND (exp, i));
}
}
cmp = UNORDERED, rcmp = ORDERED;
else
cmp = ORDERED, rcmp = UNORDERED;
- mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
do_rev = 0;
if (! can_compare_p (cmp, mode, ccp_jump)
tree op1 = save_expr (TREE_OPERAND (exp, 1));
tree cmp0, cmp1;
- /* If the target doesn't support combined unordered
+ /* If the target doesn't support combined unordered
compares, decompose into UNORDERED + comparison. */
cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
unsignedp = 1;
}
#endif
-
+
emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
else
return 0;
}
-
+
preexpand_calls (exp);
if (! get_subtarget (target)
|| GET_MODE (subtarget) != operand_mode
emit_barrier ();
}
-#endif /* HAVE_tablejump */
+#endif /* HAVE_tablejump */