+2004-07-14 Paolo Bonzini <bonzini@gnu.org>
+
+ * expr.c (enqueue_insn, finish_expr_for_function,
+ protect_from_queue, queued_subexp_p, mark_queue,
+ emit_insns_enqueued_after_mark, emit_queue,
+ expand_increment): Remove.
+ (store_constructor): Expand increment as an assignment.
+ (expand_expr_real_1 <case PREINCREMENT_EXPR,
+ case PREDECREMENT_EXPR, case POSTINCREMENT_EXPR,
+ case POSTDECREMENT_EXPR>): Abort.
+ * expr.h (QUEUED_VAR, QUEUED_INSN, QUEUED_COPY,
+ QUEUED_BODY, QUEUED_NEXT, finish_expr_for_function,
+ protect_from_queue, emit_queue, queued_subexp_p): Remove.
+ * function.h (pending_chain, x_pending_chain): Remove.
+ * rtl.def (QUEUED): Remove.
+
+ * emit-rtl.c (copy_insn_1, copy_most_rtx,
+ set_used_flags, verify_rtx_sharing): Remove references to QUEUED.
+ * genattrtab.c (attr_copy_rtx, clear_struct_flag,
+ encode_units_mask): Likewise.
+ * local-alloc.c (equiv_init_varies_p): Likewise.
+ * rtl.c (copy_rtx): Likewise.
+ * rtlanal.c (rtx_unstable_p, rtx_varies_p): Likewise.
+ * simplify-rtx.c (simplify_gen_subreg): Likewise.
+ * config/mn10300/mn10300.c (legitimate_pic_operand_p): Likewise.
+
+ * builtins.c (expand_builtin, expand_builtin_apply,
+ expand_builtin_mathfn, expand_builtin_mathfn_2,
+ expand_builtin_mathfn_3, expand_builtin_setjmp_setup):
+ Remove calls to emit_queue and protect_from_queue.
+ * calls.c (expand_call, precompute_arguments,
+ precompute_register_parameters, rtx_for_function_call,
+ store_one_arg): Likewise.
+ * dojump.c (do_compare_and_jump, do_jump): Likewise.
+ * explow.c (memory_address): Likewise.
+ * expmed.c (clear_by_pieces_1, clear_storage,
+ clear_storage_via_libcall, emit_group_load,
+ emit_group_store, emit_store_flag,
+ expand_expr_real_1, store_by_pieces,
+ store_constructor, store_expr, try_casesi,
+ try_tablejump): Likewise.
+ * function.c (expand_pending_sizes): Likewise.
+ * optabs.c (emit_cmp_and_jump_insns,
+ emit_conditional_add, emit_conditional_move,
+ expand_fix, expand_float, prepare_cmp_insn): Likewise.
+ * stmt.c (emit_case_bit_tests,
+ expand_asm_expr, expand_computed_goto,
+ expand_decl_init, expand_end_case_type,
+ expand_end_stmt_expr, expand_expr_stmt_value,
+ expand_return, expand_start_case,
+ optimize_tail_recursion): Likewise.
+ * config/c4x/c4x.c (c4x_expand_builtin): Likewise.
+ * config/s390/s390.c (s390_expand_cmpmem): Likewise.
+
2004-07-14 Ben Elliston <bje@au.ibm.com>
* vec.h: Comment fix.
buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
- emit_queue ();
-
/* We store the frame pointer and the address of receiver_label in
the buffer and use the rest of it for the stack save area, which
is machine-dependent. */
}
emit_insn (gen_prefetch (op0, op1, op2));
}
- else
#endif
- op0 = protect_from_queue (op0, 0);
+
/* Don't do anything with direct references to volatile memory, but
generate code to handle other side effects. */
if (!MEM_P (op0) && side_effects_p (op0))
incoming_args, 0, OPTAB_LIB_WIDEN);
#endif
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
-
/* Push a new argument block and copy the arguments. Do not allow
the (potential) memcpy call below to interfere with our stack
manipulations. */
op0 = expand_expr (arg, subtarget, VOIDmode, 0);
- emit_queue ();
start_sequence ();
/* Compute into TARGET.
op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
op1 = expand_expr (arg1, 0, VOIDmode, 0);
- emit_queue ();
start_sequence ();
/* Compute into TARGET.
op0 = expand_expr (arg, subtarget, VOIDmode, 0);
- emit_queue ();
start_sequence ();
/* Compute into TARGET.
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
- /* Perform postincrements before expanding builtin functions. */
- emit_queue ();
-
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
prepare_call_address (rtx funexp, rtx static_chain_value,
rtx *call_fusage, int reg_parm_seen, int sibcallp)
{
- funexp = protect_from_queue (funexp, 0);
-
/* Make a valid memory address and copy constants through pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
VOIDmode, 0);
preserve_temp_slots (args[i].value);
pop_temp_slots ();
-
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
}
/* If the value is a non-legitimate constant, force it into a
if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
abort ();
- args[i].value
- = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
-
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
-
args[i].initial_value = args[i].value
- = protect_from_queue (args[i].value, 0);
+ = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
if (mode != args[i].mode)
push_temp_slots ();
funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
pop_temp_slots (); /* FUNEXP can't be BLKmode. */
- emit_queue ();
}
return funexp;
}
if (pass == 0)
{
- /* Emit any queued insns now; otherwise they would end up in
- only one of the alternates. */
- emit_queue ();
-
/* State variables we need to save and restore between
iterations. */
save_pending_stack_adjust = pending_stack_adjust;
load_register_parameters (args, num_actuals, &call_fusage, flags,
pass == 0, &sibcall_failure);
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
-
/* Save a pointer to the last insn before the call, so that we can
later safely search backwards to find the CALL_INSN. */
before_call = get_last_insn ();
|| (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
abort ();
- /* There's no need to call protect_from_queue, because
- either emit_move_insn or emit_push_insn will do that. */
-
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
&& ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
for a value of mode OUTMODE,
with NARGS different arguments, passed as alternating rtx values
and machine_modes to convert them to.
- The rtx values should have been passed through protect_from_queue already.
FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
be deferred during the rest of the arguments. */
NO_DEFER_POP;
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
-
/* Free any temporary slots made in processing this argument. Show
that we might have taken the address of something and pushed that
as an operand. */
case C4X_BUILTIN_FIX:
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_fixqfqi_clobber (target, r0));
case C4X_BUILTIN_FIX_ANSI:
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_fix_truncqfqi2 (target, r0));
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
- r0 = protect_from_queue (r0, 0);
- r1 = protect_from_queue (r1, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QFmode))
target = gen_reg_rtx (QFmode);
emit_insn (gen_toieee (target, r0));
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (register_operand (r0, QFmode))
{
r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QFmode))
target = gen_reg_rtx (QFmode);
emit_insn (gen_rcpfqf_clobber (target, r0));
|| XINT (x, 1) == UNSPEC_PLT))
return 1;
- if (GET_CODE (x) == QUEUED)
- return legitimate_pic_operand_p (QUEUED_VAR (x));
-
fmt = GET_RTX_FORMAT (GET_CODE (x));
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
{
rtx (*gen_result) (rtx) =
GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
- len = protect_from_queue (len, 0);
-
if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
{
if (INTVAL (len) > 0)
tree type;
enum machine_mode mode;
- emit_queue ();
-
switch (code)
{
case ERROR_MARK:
preserve_temp_slots (NULL_RTX);
free_temp_slots ();
pop_temp_slots ();
- emit_queue ();
do_pending_stack_adjust ();
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
break;
temp = copy_to_reg (temp);
#endif
do_pending_stack_adjust ();
- /* Do any postincrements in the expression that was tested. */
- emit_queue ();
if (GET_CODE (temp) == CONST_INT
|| (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
}
#endif
- /* Do any postincrements in the expression that was tested. */
- emit_queue ();
-
do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
((mode == BLKmode)
? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
x = force_reg (Pmode, x);
- /* Accept a QUEUED that refers to a REG
- even though that isn't a valid address.
- On attempting to put this in an insn we will call protect_from_queue
- which will turn it into a REG, which is valid. */
- else if (GET_CODE (x) == QUEUED
- && REG_P (QUEUED_VAR (x)))
- ;
-
/* We get better cse by rejecting indirect addressing at this stage.
Let the combiner create indirect addresses where appropriate.
For now, generate the code so that the subexpressions useful to share
adjust_stack (rtx adjust)
{
rtx temp;
- adjust = protect_from_queue (adjust, 0);
if (adjust == const0_rtx)
return;
anti_adjust_stack (rtx adjust)
{
rtx temp;
- adjust = protect_from_queue (adjust, 0);
if (adjust == const0_rtx)
return;
op0 = SUBREG_REG (op0);
}
- value = protect_from_queue (value, 0);
-
/* Use vec_set patterns for inserting parts of vectors whenever
available. */
if (VECTOR_MODE_P (GET_MODE (op0))
}
offset = 0;
}
- else
- op0 = protect_from_queue (op0, 1);
/* If VALUE is a floating-point mode, access it as an integer of the
corresponding size. This can occur on a machine with 64 bit registers
The field starts at position BITPOS within the byte.
(If OP0 is a register, it may be a full word or a narrower mode,
but BITPOS still counts within a full word,
- which is significant on bigendian machines.)
-
- Note that protect_from_queue has already been done on OP0 and VALUE. */
+ which is significant on bigendian machines.) */
static void
store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset,
}
offset = 0;
}
- else
- op0 = protect_from_queue (str_rtx, 1);
/* Now OFFSET is nonzero only for memory operands. */
bitsize_rtx = GEN_INT (bitsize);
bitpos_rtx = GEN_INT (xbitpos);
- pat = gen_extzv (protect_from_queue (xtarget, 1),
- xop0, bitsize_rtx, bitpos_rtx);
+ pat = gen_extzv (xtarget, xop0, bitsize_rtx, bitpos_rtx);
if (pat)
{
emit_insn (pat);
bitsize_rtx = GEN_INT (bitsize);
bitpos_rtx = GEN_INT (xbitpos);
- pat = gen_extv (protect_from_queue (xtarget, 1),
- xop0, bitsize_rtx, bitpos_rtx);
+ pat = gen_extv (xtarget, xop0, bitsize_rtx, bitpos_rtx);
if (pat)
{
emit_insn (pat);
int opno;
enum machine_mode nmode;
- /* op0 must be register to make mult_cost match the precomputed
- shiftadd_cost array. */
- op0 = protect_from_queue (op0, 0);
-
/* Avoid referencing memory over and over.
For speed, but also for correctness when mem is volatile. */
if (MEM_P (op0))
rtx last = get_last_insn ();
rtx pattern, comparison;
- /* ??? Ok to do this and then fail? */
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
-
if (unsignedp)
code = unsigned_condition (code);
first. */
if (GET_MODE_SIZE (target_mode) > GET_MODE_SIZE (mode))
{
- op0 = protect_from_queue (op0, 0);
op0 = convert_modes (target_mode, mode, op0, 0);
mode = target_mode;
}
insn_operand_predicate_fn pred;
/* We think we may be able to do this with a scc insn. Emit the
- comparison and then the scc insn.
-
- compare_from_rtx may call emit_queue, which would be deleted below
- if the scc insn fails. So call it ourselves before setting LAST.
- Likewise for do_pending_stack_adjust. */
+ comparison and then the scc insn. */
- emit_queue ();
do_pending_stack_adjust ();
last = get_last_insn ();
tem = expand_unop (mode, ffs_optab, op0, subtarget, 1);
else if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
{
- op0 = protect_from_queue (op0, 0);
tem = convert_modes (word_mode, mode, op0, 1);
mode = word_mode;
}
int reverse;
};
-static rtx enqueue_insn (rtx, rtx);
static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
unsigned int);
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
static int is_aligning_offset (tree, tree);
-static rtx expand_increment (tree, int, int);
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
enum expand_modifier);
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
{
cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
}
-
-/* Small sanity check that the queue is empty at the end of a function. */
-
-void
-finish_expr_for_function (void)
-{
- if (pending_chain)
- abort ();
-}
-\f
-/* Manage the queue of increment instructions to be output
- for POSTINCREMENT_EXPR expressions, etc. */
-
-/* Queue up to increment (or change) VAR later. BODY says how:
- BODY should be the same thing you would pass to emit_insn
- to increment right away. It will go to emit_insn later on.
-
- The value is a QUEUED expression to be used in place of VAR
- where you want to guarantee the pre-incrementation value of VAR. */
-
-static rtx
-enqueue_insn (rtx var, rtx body)
-{
- pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
- body, pending_chain);
- return pending_chain;
-}
-
-/* Use protect_from_queue to convert a QUEUED expression
- into something that you can put immediately into an instruction.
- If the queued incrementation has not happened yet,
- protect_from_queue returns the variable itself.
- If the incrementation has happened, protect_from_queue returns a temp
- that contains a copy of the old value of the variable.
-
- Any time an rtx which might possibly be a QUEUED is to be put
- into an instruction, it must be passed through protect_from_queue first.
- QUEUED expressions are not meaningful in instructions.
-
- Do not pass a value through protect_from_queue and then hold
- on to it for a while before putting it in an instruction!
- If the queue is flushed in between, incorrect code will result. */
-
-rtx
-protect_from_queue (rtx x, int modify)
-{
- RTX_CODE code = GET_CODE (x);
-
-#if 0 /* A QUEUED can hang around after the queue is forced out. */
- /* Shortcut for most common case. */
- if (pending_chain == 0)
- return x;
-#endif
-
- if (code != QUEUED)
- {
- /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
- use of autoincrement. Make a copy of the contents of the memory
- location rather than a copy of the address, but not if the value is
- of mode BLKmode. Don't modify X in place since it might be
- shared. */
- if (code == MEM && GET_MODE (x) != BLKmode
- && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
- {
- rtx y = XEXP (x, 0);
- rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
-
- if (QUEUED_INSN (y))
- {
- rtx temp = gen_reg_rtx (GET_MODE (x));
-
- emit_insn_before (gen_move_insn (temp, new),
- QUEUED_INSN (y));
- return temp;
- }
-
- /* Copy the address into a pseudo, so that the returned value
- remains correct across calls to emit_queue. */
- return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
- }
-
- /* Otherwise, recursively protect the subexpressions of all
- the kinds of rtx's that can contain a QUEUED. */
- if (code == MEM)
- {
- rtx tem = protect_from_queue (XEXP (x, 0), 0);
- if (tem != XEXP (x, 0))
- {
- x = copy_rtx (x);
- XEXP (x, 0) = tem;
- }
- }
- else if (code == PLUS || code == MULT)
- {
- rtx new0 = protect_from_queue (XEXP (x, 0), 0);
- rtx new1 = protect_from_queue (XEXP (x, 1), 0);
- if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
- {
- x = copy_rtx (x);
- XEXP (x, 0) = new0;
- XEXP (x, 1) = new1;
- }
- }
- return x;
- }
- /* If the increment has not happened, use the variable itself. Copy it
- into a new pseudo so that the value remains correct across calls to
- emit_queue. */
- if (QUEUED_INSN (x) == 0)
- return copy_to_reg (QUEUED_VAR (x));
- /* If the increment has happened and a pre-increment copy exists,
- use that copy. */
- if (QUEUED_COPY (x) != 0)
- return QUEUED_COPY (x);
- /* The increment has happened but we haven't set up a pre-increment copy.
- Set one up now, and use it. */
- QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
- emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
- QUEUED_INSN (x));
- return QUEUED_COPY (x);
-}
-
-/* Return nonzero if X contains a QUEUED expression:
- if it contains anything that will be altered by a queued increment.
- We handle only combinations of MEM, PLUS, MINUS and MULT operators
- since memory addresses generally contain only those. */
-
-int
-queued_subexp_p (rtx x)
-{
- enum rtx_code code = GET_CODE (x);
- switch (code)
- {
- case QUEUED:
- return 1;
- case MEM:
- return queued_subexp_p (XEXP (x, 0));
- case MULT:
- case PLUS:
- case MINUS:
- return (queued_subexp_p (XEXP (x, 0))
- || queued_subexp_p (XEXP (x, 1)));
- default:
- return 0;
- }
-}
-
-/* Retrieve a mark on the queue. */
-
-static rtx
-mark_queue (void)
-{
- return pending_chain;
-}
-
-/* Perform all the pending incrementations that have been enqueued
- after MARK was retrieved. If MARK is null, perform all the
- pending incrementations. */
-
-static void
-emit_insns_enqueued_after_mark (rtx mark)
-{
- rtx p;
-
- /* The marked incrementation may have been emitted in the meantime
- through a call to emit_queue. In this case, the mark is not valid
- anymore so do nothing. */
- if (mark && ! QUEUED_BODY (mark))
- return;
-
- while ((p = pending_chain) != mark)
- {
- rtx body = QUEUED_BODY (p);
-
- switch (GET_CODE (body))
- {
- case INSN:
- case JUMP_INSN:
- case CALL_INSN:
- case CODE_LABEL:
- case BARRIER:
- case NOTE:
- QUEUED_INSN (p) = body;
- emit_insn (body);
- break;
-
-#ifdef ENABLE_CHECKING
- case SEQUENCE:
- abort ();
- break;
-#endif
-
- default:
- QUEUED_INSN (p) = emit_insn (body);
- break;
- }
-
- QUEUED_BODY (p) = 0;
- pending_chain = QUEUED_NEXT (p);
- }
-}
-
-/* Perform all the pending incrementations. */
-
-void
-emit_queue (void)
-{
- emit_insns_enqueued_after_mark (NULL_RTX);
-}
\f
/* Copy data from FROM to TO, where the machine modes are not the same.
Both modes may be integer, or both may be floating.
enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
: (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
if (to_real != from_real)
abort ();
Both X and MODE may be floating, or both integer.
UNSIGNEDP is nonzero if X is an unsigned value.
This can be done by referring to a part of X in place
- or by copying to a new temporary with conversion.
-
- This function *must not* call protect_from_queue
- except when putting X into an insn (in which case convert_move does it). */
+ or by copying to a new temporary with conversion. */
rtx
convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
This can be done by referring to a part of X in place
or by copying to a new temporary with conversion.
- You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
-
- This function *must not* call protect_from_queue
- except when putting X into an insn (in which case convert_move does it). */
+ You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
rtx
convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
}
/* Generate several move instructions to copy LEN bytes from block FROM to
- block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
- and TO through protect_from_queue before calling.
+ block TO. (These are MEM rtx's with BLKmode).
If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
used to push FROM to the stack.
align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
- x = protect_from_queue (x, 1);
- y = protect_from_queue (y, 0);
- size = protect_from_queue (size, 0);
-
if (!MEM_P (x))
abort ();
if (!MEM_P (y))
enum machine_mode size_mode;
rtx retval;
- /* DST, SRC, or SIZE may have been passed through protect_from_queue.
-
- It is unsafe to save the value generated by protect_from_queue and reuse
- it later. Consider what happens if emit_queue is called before the
- return value from protect_from_queue is used.
-
- Expansion of the CALL_EXPR below will call emit_queue before we are
- finished emitting RTL for argument setup. So if we are not careful we
- could get the wrong value for an argument.
-
- To avoid this problem we go ahead and emit code to copy the addresses of
- DST and SRC and SIZE into new pseudos.
-
- Note this is not strictly needed for library calls since they do not call
- emit_queue before loading their arguments. However, we may need to have
- library calls call emit_queue in the future since failing to do so could
- cause problems for targets which define SMALL_REGISTER_CLASSES and pass
- arguments in registers. */
+ /* Emit code to copy the addresses of DST and SRC and SIZE into new
+ pseudos. We can then place those new pseudos into a VAR_DECL and
+ use them later. */
dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
build_int_2 (shift, 0), tmps[i], 0);
}
- emit_queue ();
-
/* Copy the extracted pieces into the proper (probable) hard regs. */
for (i = start; i < XVECLEN (dst, 0); i++)
emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
tmps[i] = gen_reg_rtx (GET_MODE (reg));
emit_move_insn (tmps[i], reg);
}
- emit_queue ();
/* If we won't be storing directly into memory, protect the real destination
from strange tricks we might play. */
mode, tmps[i]);
}
- emit_queue ();
-
/* Copy from the pseudo into the (probable) hard reg. */
if (orig_dst != dst)
emit_move_insn (orig_dst, dst);
if (! STORE_BY_PIECES_P (len, align))
abort ();
- to = protect_from_queue (to, 1);
data.constfun = constfun;
data.constfundata = constfundata;
data.len = len;
}
/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
- rtx with BLKmode). The caller must pass TO through protect_from_queue
- before calling. ALIGN is maximum alignment we can assume. */
+ rtx with BLKmode). ALIGN is maximum alignment we can assume. */
static void
clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
/* Subroutine of clear_by_pieces and store_by_pieces.
Generate several move instructions to store LEN bytes of block TO. (A MEM
- rtx with BLKmode). The caller must pass TO through protect_from_queue
- before calling. ALIGN is maximum alignment we can assume. */
+ rtx with BLKmode). ALIGN is maximum alignment we can assume. */
static void
store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
else
{
- object = protect_from_queue (object, 1);
- size = protect_from_queue (size, 0);
-
if (size == const0_rtx)
;
else if (GET_CODE (size) == CONST_INT
enum machine_mode size_mode;
rtx retval;
- /* OBJECT or SIZE may have been passed through protect_from_queue.
-
- It is unsafe to save the value generated by protect_from_queue
- and reuse it later. Consider what happens if emit_queue is
- called before the return value from protect_from_queue is used.
-
- Expansion of the CALL_EXPR below will call emit_queue before
- we are finished emitting RTL for argument setup. So if we are
- not careful we could get the wrong value for an argument.
-
- To avoid this problem we go ahead and emit code to copy OBJECT
- and SIZE into new pseudos.
-
- Note this is not strictly needed for library calls since they
- do not call emit_queue before loading their arguments. However,
- we may need to have library calls call emit_queue in the future
- since failing to do so could cause problems for targets which
- define SMALL_REGISTER_CLASSES and pass arguments in registers. */
+ /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
+ place those into new pseudos into a VAR_DECL and use them later. */
object = copy_to_mode_reg (Pmode, XEXP (object, 0));
rtx y_cst = NULL_RTX;
rtx last_insn, set;
- x = protect_from_queue (x, 1);
- y = protect_from_queue (y, 0);
-
if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
abort ();
if (where_pad != none)
where_pad = (where_pad == downward ? upward : downward);
- xinner = x = protect_from_queue (x, 0);
+ xinner = x;
if (mode == BLKmode)
{
&& (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
break;
value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
- value = protect_from_queue (value, 0);
- to_rtx = protect_from_queue (to_rtx, 1);
binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
if (bitsize == 1
&& count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
{
rtx temp;
rtx alt_rtl = NULL_RTX;
- rtx mark = mark_queue ();
int dont_return_target = 0;
int dont_store_target = 0;
part. */
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
- emit_queue ();
return store_expr (TREE_OPERAND (exp, 1), target, want_value);
}
else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- emit_queue ();
- target = protect_from_queue (target, 1);
-
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
- emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
- emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
return want_value & 1 ? target : NULL_RTX;
}
- else if (queued_subexp_p (target))
- /* If target contains a postincrement, let's not risk
- using it as the place to generate the rhs. */
- {
- if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
- {
- /* Expand EXP into a new pseudo. */
- temp = gen_reg_rtx (GET_MODE (target));
- temp = expand_expr (exp, temp, GET_MODE (target),
- (want_value & 2
- ? EXPAND_STACK_PARM : EXPAND_NORMAL));
- }
- else
- temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
- (want_value & 2
- ? EXPAND_STACK_PARM : EXPAND_NORMAL));
-
- /* If target is volatile, ANSI requires accessing the value
- *from* the target, if it is accessed. So make that happen.
- In no case return the target itself. */
- if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
- dont_return_target = 1;
- }
else if ((want_value & 1) != 0
&& MEM_P (target)
&& ! MEM_VOLATILE_P (target)
bit-initialized. */
&& expr_size (exp) != const0_rtx)
{
- emit_insns_enqueued_after_mark (mark);
- target = protect_from_queue (target, 1);
- temp = protect_from_queue (temp, 0);
if (GET_MODE (temp) != GET_MODE (target)
&& GET_MODE (temp) != VOIDmode)
{
/* Build the head of the loop. */
do_pending_stack_adjust ();
- emit_queue ();
emit_label (loop_start);
/* Assign value to element index. */
/* Update the loop counter, and jump to the head of
the loop. */
- expand_increment (build (PREINCREMENT_EXPR,
- TREE_TYPE (index),
- index, integer_one_node), 0, 0);
+ expand_assignment (index,
+ build2 (PLUS_EXPR, TREE_TYPE (index),
+ index, integer_one_node), 0);
+
emit_jump (loop_start);
/* Build the end of the loop. */
case COMPOUND_EXPR:
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
- emit_queue ();
return expand_expr_real (TREE_OPERAND (exp, 1),
(ignore ? const0_rtx : target),
VOIDmode, modifier, alt_rtl);
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
}
- emit_queue ();
emit_label (op1);
OK_DEFER_POP;
expand_return (TREE_OPERAND (exp, 0));
return const0_rtx;
- case PREINCREMENT_EXPR:
- case PREDECREMENT_EXPR:
- return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
-
- case POSTINCREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- /* Faster to treat as pre-increment if result is not used. */
- return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
-
case ADDR_EXPR:
if (modifier == EXPAND_STACK_PARM)
target = 0;
if (ignore)
return op0;
- /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
- clever and returns a REG when given a MEM. */
- op0 = protect_from_queue (op0, 1);
-
/* We would like the object in memory. If it is a constant, we can
have it be statically allocated into memory. For a non-constant,
we need to allocate some memory and store the value into it. */
case FILTER_EXPR:
return get_exception_filter (cfun);
+ case PREINCREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
case FDESC_EXPR:
/* Function descriptors are not valid except for as
initialization constants, and should not be expanded. */
return 0;
}
\f
-/* Expand code for a post- or pre- increment or decrement
- and return the RTX for the result.
- POST is 1 for postinc/decrements and 0 for preinc/decrements. */
-
-static rtx
-expand_increment (tree exp, int post, int ignore)
-{
- rtx op0, op1;
- rtx temp, value;
- tree incremented = TREE_OPERAND (exp, 0);
- optab this_optab = add_optab;
- int icode;
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- int op0_is_copy = 0;
- int single_insn = 0;
- /* 1 means we can't store into OP0 directly,
- because it is a subreg narrower than a word,
- and we don't dare clobber the rest of the word. */
- int bad_subreg = 0;
-
- /* Stabilize any component ref that might need to be
- evaluated more than once below. */
- if (!post
- || TREE_CODE (incremented) == BIT_FIELD_REF
- || (TREE_CODE (incremented) == COMPONENT_REF
- && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
- || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
- incremented = stabilize_reference (incremented);
- /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
- ones into save exprs so that they don't accidentally get evaluated
- more than once by the code below. */
- if (TREE_CODE (incremented) == PREINCREMENT_EXPR
- || TREE_CODE (incremented) == PREDECREMENT_EXPR)
- incremented = save_expr (incremented);
-
- /* Compute the operands as RTX.
- Note whether OP0 is the actual lvalue or a copy of it:
- I believe it is a copy iff it is a register or subreg
- and insns were generated in computing it. */
-
- temp = get_last_insn ();
- op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
-
- /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
- in place but instead must do sign- or zero-extension during assignment,
- so we copy it into a new register and let the code below use it as
- a copy.
-
- Note that we can safely modify this SUBREG since it is know not to be
- shared (it was made by the expand_expr call above). */
-
- if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
- {
- if (post)
- SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
- else
- bad_subreg = 1;
- }
- else if (GET_CODE (op0) == SUBREG
- && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
- {
- /* We cannot increment this SUBREG in place. If we are
- post-incrementing, get a copy of the old value. Otherwise,
- just mark that we cannot increment in place. */
- if (post)
- op0 = copy_to_reg (op0);
- else
- bad_subreg = 1;
- }
-
- op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
- && temp != get_last_insn ());
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
-
- /* Decide whether incrementing or decrementing. */
- if (TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- this_optab = sub_optab;
-
- /* Convert decrement by a constant into a negative increment. */
- if (this_optab == sub_optab
- && GET_CODE (op1) == CONST_INT)
- {
- op1 = GEN_INT (-INTVAL (op1));
- this_optab = add_optab;
- }
-
- if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
- this_optab = this_optab == add_optab ? addv_optab : subv_optab;
-
- /* For a preincrement, see if we can do this with a single instruction. */
- if (!post)
- {
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_data[icode].operand[0].predicate) (op0, mode)
- && (*insn_data[icode].operand[1].predicate) (op0, mode)
- && (*insn_data[icode].operand[2].predicate) (op1, mode))
- single_insn = 1;
- }
-
- /* If OP0 is not the actual lvalue, but rather a copy in a register,
- then we cannot just increment OP0. We must therefore contrive to
- increment the original value. Then, for postincrement, we can return
- OP0 since it is a copy of the old value. For preincrement, expand here
- unless we can do it with a single insn.
-
- Likewise if storing directly into OP0 would clobber high bits
- we need to preserve (bad_subreg). */
- if (op0_is_copy || (!post && !single_insn) || bad_subreg)
- {
- /* This is the easiest way to increment the value wherever it is.
- Problems with multiple evaluation of INCREMENTED are prevented
- because either (1) it is a component_ref or preincrement,
- in which case it was stabilized above, or (2) it is an array_ref
- with constant index in an array in a register, which is
- safe to reevaluate. */
- tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- ? MINUS_EXPR : PLUS_EXPR),
- TREE_TYPE (exp),
- incremented,
- TREE_OPERAND (exp, 1));
-
- while (TREE_CODE (incremented) == NOP_EXPR
- || TREE_CODE (incremented) == CONVERT_EXPR)
- {
- newexp = convert (TREE_TYPE (incremented), newexp);
- incremented = TREE_OPERAND (incremented, 0);
- }
-
- temp = expand_assignment (incremented, newexp, ! post && ! ignore);
- return post ? op0 : temp;
- }
-
- if (post)
- {
- /* We have a true reference to the value in OP0.
- If there is an insn to add or subtract in this mode, queue it.
- Queuing the increment insn avoids the register shuffling
- that often results if we must increment now and first save
- the old value for subsequent use. */
-
-#if 0 /* Turned off to avoid making extra insn for indexed memref. */
- op0 = stabilize (op0);
-#endif
-
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_data[icode].operand[0].predicate) (op0, mode)
- && (*insn_data[icode].operand[1].predicate) (op0, mode))
- {
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
- op1 = force_reg (mode, op1);
-
- return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
- }
- if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
- {
- rtx addr = (general_operand (XEXP (op0, 0), mode)
- ? force_reg (Pmode, XEXP (op0, 0))
- : copy_to_reg (XEXP (op0, 0)));
- rtx temp, result;
-
- op0 = replace_equiv_address (op0, addr);
- temp = force_reg (GET_MODE (op0), op0);
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
- op1 = force_reg (mode, op1);
-
- /* The increment queue is LIFO, thus we have to `queue'
- the instructions in reverse order. */
- enqueue_insn (op0, gen_move_insn (op0, temp));
- result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
- return result;
- }
- }
-
- /* Preincrement, or we can't increment with one simple insn. */
- if (post)
- /* Save a copy of the value before inc or dec, to return it later. */
- temp = value = copy_to_reg (op0);
- else
- /* Arrange to return the incremented value. */
- /* Copy the rtx because expand_binop will protect from the queue,
- and the results of that would be invalid for us to return
- if our caller does emit_queue before using our result. */
- temp = copy_rtx (value = op0);
-
- /* Increment however we can. */
- op1 = expand_binop (mode, this_optab, value, op1, op0,
- TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
-
- /* Make sure the value is stored into OP0. */
- if (op1 != op0)
- emit_move_insn (op0, op1);
-
- return temp;
-}
-\f
/* Generate code to calculate EXP using a store-flag instruction
and return an rtx for the result. EXP is either a comparison
or a TRUTH_NOT_EXPR whose operand is a comparison.
because, if the emit_store_flag does anything it will succeed and
OP0 and OP1 will not be used subsequently. */
- result = emit_store_flag (target, code,
- queued_subexp_p (op0) ? copy_rtx (op0) : op0,
- queued_subexp_p (op1) ? copy_rtx (op1) : op1,
+ result = emit_store_flag (target, code, op0, op1,
operand_mode, unsignedp, 1);
if (result)
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
}
- emit_queue ();
- index = protect_from_queue (index, 0);
+
do_pending_stack_adjust ();
op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
convert (index_type, index_expr),
convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_queue ();
- index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
do_tablejump (index, TYPE_MODE (index_type),
#define BRANCH_COST 1
#endif
-/* Macros to access the slots of a QUEUED rtx.
- Here rather than in rtl.h because only the expansion pass
- should ever encounter a QUEUED. */
-
-/* The variable for which an increment is queued. */
-#define QUEUED_VAR(P) XEXP (P, 0)
-/* If the increment has been emitted, this is the insn
- that does the increment. It is zero before the increment is emitted.
- If more than one insn is emitted, this is the first insn. */
-#define QUEUED_INSN(P) XEXP (P, 1)
-/* If a pre-increment copy has been generated, this is the copy
- (it is a temporary reg). Zero if no copy made yet. */
-#define QUEUED_COPY(P) XEXP (P, 2)
-/* This is the body to use for the insn to do the increment.
- It is used to emit the increment. */
-#define QUEUED_BODY(P) XEXP (P, 3)
-/* Next QUEUED in the queue. */
-#define QUEUED_NEXT(P) XEXP (P, 4)
-
/* This is the 4th arg to `expand_expr'.
EXPAND_STACK_PARM means we are possibly expanding a call param onto
the stack. Choosing a value of 2 isn't special; It just allows
/* Create but don't emit one rtl instruction to perform certain operations.
Modes must match; operands must meet the operation's predicates.
- Likewise for subtraction and for just copying.
- These do not call protect_from_queue; caller must do so. */
+ Likewise for subtraction and for just copying. */
extern rtx gen_add2_insn (rtx, rtx);
extern rtx gen_add3_insn (rtx, rtx, rtx);
extern rtx gen_sub2_insn (rtx, rtx);
/* This is run at the start of compiling a function. */
extern void init_expr (void);
-/* This is run at the end of compiling a function. */
-extern void finish_expr_for_function (void);
-
-/* Use protect_from_queue to convert a QUEUED expression
- into something that you can put immediately into an instruction. */
-extern rtx protect_from_queue (rtx, int);
-
-/* Perform all the pending incrementations. */
-extern void emit_queue (void);
-
-/* Tell if something has a queued subexpression. */
-extern int queued_subexp_p (rtx);
-
/* Emit some rtl insns to move data between rtx's, converting machine modes.
Both modes must be floating or both fixed. */
extern void convert_move (rtx, rtx, int);
/* Evaluate now the sizes of any types declared among the arguments. */
for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
- {
- expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
- /* Flush the queue in case this parameter declaration has
- side-effects. */
- emit_queue ();
- }
+ expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
}
/* Start the RTL for a new function, and set variables used for
{
rtx clobber_after;
- finish_expr_for_function ();
-
/* If arg_pointer_save_area was referenced only from a nested
function, we will not have initialized it yet. Do that now. */
if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
/* List of labels that must never be deleted. */
rtx x_forced_labels;
-
- /* Postincrements that still need to be expanded. */
- rtx x_pending_chain;
};
#define pending_stack_adjust (cfun->expr->x_pending_stack_adjust)
#define saveregs_value (cfun->expr->x_saveregs_value)
#define apply_args_value (cfun->expr->x_apply_args_value)
#define forced_labels (cfun->expr->x_forced_labels)
-#define pending_chain (cfun->expr->x_pending_chain)
#define stack_pointer_delta (cfun->expr->x_stack_pointer_delta)
/* This structure can save all the important global and static variables
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
return attr_rtx (CONST_STRING, attr_printf (MAX_DIGITS, "%d", j));
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case MEM:
return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
- if (target)
- target = protect_from_queue (target, 1);
-
if (flag_force_mem)
{
/* Load duplicate non-volatile operands once. */
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
-
if (flag_force_mem)
- {
- op0 = force_not_mem (op0);
- }
+ op0 = force_not_mem (op0);
- if (targ0)
- targ0 = protect_from_queue (targ0, 1);
- else
+ if (!targ0)
targ0 = gen_reg_rtx (mode);
- if (targ1)
- targ1 = protect_from_queue (targ1, 1);
- else
+ if (!targ1)
targ1 = gen_reg_rtx (mode);
/* Record where to go back to if we fail. */
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
-
if (flag_force_mem)
{
op0 = force_not_mem (op0);
&& rtx_cost (op1, binoptab->code) > COSTS_N_INSNS (1))
op1 = force_reg (mode, op1);
- if (targ0)
- targ0 = protect_from_queue (targ0, 1);
- else
+ if (!targ0)
targ0 = gen_reg_rtx (mode);
- if (targ1)
- targ1 = protect_from_queue (targ1, 1);
- else
+ if (!targ1)
targ1 = gen_reg_rtx (mode);
/* Record where to go back to if we fail. */
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
-
if (flag_force_mem)
- {
- op0 = force_not_mem (op0);
- }
-
- if (target)
- target = protect_from_queue (target, 1);
+ op0 = force_not_mem (op0);
if (unoptab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
if (submode == BLKmode)
abort ();
- op0 = protect_from_queue (op0, 0);
-
if (flag_force_mem)
- {
- op0 = force_not_mem (op0);
- }
+ op0 = force_not_mem (op0);
last = get_last_insn ();
- if (target)
- target = protect_from_queue (target, 1);
-
this_abs_optab = ! unsignedp && flag_trapv
&& (GET_MODE_CLASS(mode) == MODE_INT)
? absv_optab : abs_optab;
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
rtx pat;
- temp = target = protect_from_queue (target, 1);
-
- op0 = protect_from_queue (op0, 0);
+ temp = target;
/* Sign and zero extension from memory is often done specially on
RISC machines, so forcing into a register here can pessimize
if (size == 0)
abort ();
- emit_queue ();
- x = protect_from_queue (x, 0);
- y = protect_from_queue (y, 0);
- size = protect_from_queue (size, 0);
-
/* Try to use a memory block compare insn - either cmpstr
or cmpmem will do. */
for (cmp_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
prepare_operand (int icode, rtx x, int opnum, enum machine_mode mode,
enum machine_mode wider_mode, int unsignedp)
{
- x = protect_from_queue (x, 0);
-
if (mode != wider_mode)
x = convert_modes (wider_mode, mode, x, unsignedp);
op0 = force_reg (mode, op0);
#endif
- emit_queue ();
if (unsignedp)
comparison = unsigned_condition (comparison);
{
enum rtx_code comparison = *pcomparison;
enum rtx_code swapped = swap_condition (comparison);
- rtx x = protect_from_queue (*px, 0);
- rtx y = protect_from_queue (*py, 0);
+ rtx x = *px;
+ rtx y = *py;
enum machine_mode orig_mode = GET_MODE (x);
enum machine_mode mode;
rtx value, target, insns, equiv;
op3 = force_not_mem (op3);
}
- if (target)
- target = protect_from_queue (target, 1);
- else
+ if (!target)
target = gen_reg_rtx (mode);
subtarget = target;
- emit_queue ();
-
- op2 = protect_from_queue (op2, 0);
- op3 = protect_from_queue (op3, 0);
-
/* If the insn doesn't accept these operands, put them in pseudos. */
if (! (*insn_data[icode].operand[0].predicate)
op3 = force_not_mem (op3);
}
- if (target)
- target = protect_from_queue (target, 1);
- else
+ if (!target)
target = gen_reg_rtx (mode);
- subtarget = target;
-
- emit_queue ();
-
- op2 = protect_from_queue (op2, 0);
- op3 = protect_from_queue (op3, 0);
-
/* If the insn doesn't accept these operands, put them in pseudos. */
if (! (*insn_data[icode].operand[0].predicate)
- (subtarget, insn_data[icode].operand[0].mode))
+ (target, insn_data[icode].operand[0].mode))
subtarget = gen_reg_rtx (insn_data[icode].operand[0].mode);
+ else
+ subtarget = target;
if (! (*insn_data[icode].operand[2].predicate)
(op2, insn_data[icode].operand[2].mode))
\f
/* These functions attempt to generate an insn body, rather than
emitting the insn, but if the gen function already emits them, we
- make no attempt to turn them back into naked patterns.
-
- They do not protect from queued increments,
- because they may be used 1) in protect_from_queue itself
- and 2) in other passes where there is no queue. */
+ make no attempt to turn them back into naked patterns. */
/* Generate and return an insn body to add Y to X. */
if (icode != CODE_FOR_nothing)
{
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (imode != GET_MODE (from))
from = convert_to_mode (imode, from, unsignedp);
rtx temp;
REAL_VALUE_TYPE offset;
- emit_queue ();
-
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (flag_force_mem)
from = force_not_mem (from);
rtx value;
convert_optab tab = unsignedp ? ufloat_optab : sfloat_optab;
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (GET_MODE_SIZE (GET_MODE (from)) < GET_MODE_SIZE (SImode))
from = convert_to_mode (SImode, from, unsignedp);
if (icode != CODE_FOR_nothing)
{
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (fmode != GET_MODE (from))
from = convert_to_mode (fmode, from, 0);
lab1 = gen_label_rtx ();
lab2 = gen_label_rtx ();
- emit_queue ();
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (flag_force_mem)
from = force_not_mem (from);
if (!libfunc)
abort ();
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (flag_force_mem)
from = force_not_mem (from);
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
pretend to be looking at the entire value and comparing it. */
DEF_RTL_EXPR(CC0, "cc0", "", RTX_OBJ)
-/* =====================================================================
- A QUEUED expression really points to a member of the queue of instructions
- to be output later for postincrement/postdecrement.
- QUEUED expressions never become part of instructions.
- When a QUEUED expression would be put into an instruction,
- instead either the incremented variable or a copy of its previous
- value is used.
-
- Operands are:
- 0. the variable to be incremented (a REG rtx).
- 1. the incrementing instruction, or 0 if it hasn't been output yet.
- 2. A REG rtx for a copy of the old value of the variable, or 0 if none yet.
- 3. the body to use for the incrementing instruction
- 4. the next QUEUED expression in the queue.
- ====================================================================== */
-
-DEF_RTL_EXPR(QUEUED, "queued", "eeeee", RTX_EXTRA)
-
/* ----------------------------------------------------------------------
Expressions for operators in an rtl pattern
---------------------------------------------------------------------- */
case MEM:
return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
case MEM:
return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
|| byte >= GET_MODE_SIZE (innermode))
abort ();
- if (GET_CODE (op) == QUEUED)
- return NULL_RTX;
-
new = simplify_subreg (outermode, op, innermode, byte);
if (new)
return new;
x = convert_memory_address (Pmode, x);
- emit_queue ();
do_pending_stack_adjust ();
emit_indirect_jump (x);
}
if ((! allows_mem && MEM_P (op))
|| GET_CODE (op) == CONCAT)
{
- real_output_rtx[i] = protect_from_queue (op, 1);
+ real_output_rtx[i] = op;
op = gen_reg_rtx (GET_MODE (op));
if (is_inout)
emit_move_insn (op, real_output_rtx[i]);
generating_concat_p = 0;
- for (i = 0; i < ninputs - ninout; i++)
- ASM_OPERANDS_INPUT (body, i)
- = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
-
- for (i = 0; i < noutputs; i++)
- output_rtx[i] = protect_from_queue (output_rtx[i], 1);
-
/* For in-out operands, copy output rtx to input rtx. */
for (i = 0; i < ninout; i++)
{
TREE_VALUE (tail) = o[i];
}
}
-
- /* Those MODIFY_EXPRs could do autoincrements. */
- emit_queue ();
}
/* A subroutine of expand_asm_operands. Check that all operands have
/* Free any temporaries used to evaluate this expression. */
free_temp_slots ();
-
- emit_queue ();
}
/* Warn if EXP contains any computations whose results are not used.
if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
{
expand_expr (retval, NULL_RTX, VOIDmode, 0);
- emit_queue ();
expand_null_return ();
return;
}
result_reg_mode = tmpmode;
result_reg = gen_reg_rtx (result_reg_mode);
- emit_queue ();
for (i = 0; i < n_regs; i++)
emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
result_pseudos[i]);
val = assign_temp (nt, 0, 0, 1);
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
- emit_queue ();
/* Return the calculated value. */
expand_value_return (shift_return_value (val));
}
{
/* No hard reg used; calculate value into hard return reg. */
expand_expr (retval, const0_rtx, VOIDmode, 0);
- emit_queue ();
expand_value_return (result_rtl);
}
}
|| code == POINTER_TYPE || code == REFERENCE_TYPE)
expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
0);
- emit_queue ();
}
else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
{
emit_line_note (DECL_SOURCE_LOCATION (decl));
expand_assignment (decl, DECL_INITIAL (decl), 0);
- emit_queue ();
}
/* Don't let the initialization count as "using" the variable. */
nesting_stack = thiscase;
do_pending_stack_adjust ();
- emit_queue ();
/* Make sure case_stmt.start points to something that won't
need any transformation before expand_end_case. */
convert (index_type, index_expr),
convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_queue ();
- index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
mode = TYPE_MODE (index_type);
if (count == 0)
{
expand_expr (index_expr, const0_rtx, VOIDmode, 0);
- emit_queue ();
emit_jump (default_label);
}
}
}
- emit_queue ();
do_pending_stack_adjust ();
- index = protect_from_queue (index, 0);
if (MEM_P (index))
index = copy_to_reg (index);
if (GET_CODE (index) == CONST_INT