+2017-08-30 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * rtl.h (partial_subreg_p): New function.
+ * caller-save.c (save_call_clobbered_regs): Use it.
+ * calls.c (expand_call): Likewise.
+ * combine.c (combinable_i3pat): Likewise.
+ (simplify_set): Likewise.
+ (make_extraction): Likewise.
+ (make_compound_operation_int): Likewise.
+ (gen_lowpart_or_truncate): Likewise.
+ (force_to_mode): Likewise.
+ (make_field_assignment): Likewise.
+ (reg_truncated_to_mode): Likewise.
+ (record_truncated_value): Likewise.
+ (move_deaths): Likewise.
+ * cse.c (record_jump_cond): Likewise.
+ (cse_insn): Likewise.
+ * cselib.c (cselib_lookup_1): Likewise.
+ * expmed.c (extract_bit_field_using_extv): Likewise.
+ * function.c (assign_parm_setup_reg): Likewise.
+ * ifcvt.c (noce_convert_multiple_sets): Likewise.
+ * ira-build.c (create_insn_allocnos): Likewise.
+ * lra-coalesce.c (merge_pseudos): Likewise.
+ * lra-constraints.c (match_reload): Likewise.
+ (simplify_operand_subreg): Likewise.
+ (curr_insn_transform): Likewise.
+ * lra-lives.c (process_bb_lives): Likewise.
+ * lra.c (new_insn_reg): Likewise.
+ (lra_substitute_pseudo): Likewise.
+ * regcprop.c (mode_change_ok): Likewise.
+ (maybe_mode_change): Likewise.
+ (copyprop_hardreg_forward_1): Likewise.
+ * reload.c (push_reload): Likewise.
+ (find_reloads): Likewise.
+ (find_reloads_subreg_address): Likewise.
+ * reload1.c (alter_reg): Likewise.
+ (eliminate_regs_1): Likewise.
+ * simplify-rtx.c (simplify_unary_operation_1): Likewise.
+
2017-08-30 David Edelsohn <dje.gcc@gmail.com>
* config/rs6000/rs6000.c (rs6000_expand_binop_builtin): Revert
nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
mode = HARD_REGNO_CALLER_SAVE_MODE
(r, nregs, PSEUDO_REGNO_MODE (regno));
- if (GET_MODE_BITSIZE (mode)
- > GET_MODE_BITSIZE (save_mode[r]))
+ if (partial_subreg_p (save_mode[r], mode))
save_mode[r] = mode;
while (nregs-- > 0)
SET_HARD_REG_BIT (hard_regs_to_save, r + nregs);
|| ((caller_mode != caller_promoted_mode
|| callee_mode != callee_promoted_mode)
&& (caller_unsignedp != callee_unsignedp
- || GET_MODE_BITSIZE (caller_mode)
- < GET_MODE_BITSIZE (callee_mode)))))
+ || partial_subreg_p (caller_mode, callee_mode)))))
{
try_tail_call = 0;
maybe_complain_about_tail_call (exp,
STACK_POINTER_REGNUM, since these are always considered to be
live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
subdest = dest;
- if (GET_CODE (subdest) == SUBREG
- && (GET_MODE_SIZE (GET_MODE (subdest))
- >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
+ if (GET_CODE (subdest) == SUBREG && !partial_subreg_p (subdest))
subdest = SUBREG_REG (subdest);
if (pi3dest_killed
&& REG_P (subdest)
/* If we have (set (cc0) (subreg ...)), we try to remove the subreg
in SRC. */
if (dest == cc0_rtx
- && GET_CODE (src) == SUBREG
- && subreg_lowpart_p (src)
- && (GET_MODE_PRECISION (GET_MODE (src))
- < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (src)))))
+ && partial_subreg_p (src)
+ && subreg_lowpart_p (src))
{
rtx inner = SUBREG_REG (src);
machine_mode inner_mode = GET_MODE (inner);
/* Never narrow an object, since that might not be safe. */
if (mode != VOIDmode
- && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
+ && partial_subreg_p (extraction_mode, mode))
extraction_mode = mode;
if (!MEM_P (inner))
if (wanted_inner_mode != VOIDmode
&& inner_mode != wanted_inner_mode
&& ! pos_rtx
- && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
+ && partial_subreg_p (wanted_inner_mode, is_mode)
&& MEM_P (inner)
&& ! mode_dependent_address_p (XEXP (inner, 0), MEM_ADDR_SPACE (inner))
&& ! MEM_VOLATILE_P (inner))
to (subreg:QI (lshiftrt:SI (reg:SI) (const_int 7)) 0). */
|| (GET_CODE (inner) == AND
&& CONST_INT_P (XEXP (inner, 1))
- && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (inner))
+ && partial_subreg_p (x)
&& exact_log2 (UINTVAL (XEXP (inner, 1)))
>= GET_MODE_BITSIZE (mode) - 1)))
subreg_code = SET;
tem = simplified;
if (GET_CODE (tem) != GET_CODE (inner)
- && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (inner))
+ && partial_subreg_p (x)
&& subreg_lowpart_p (x))
{
rtx newer
if (GET_CODE (newer) != SUBREG)
newer = make_compound_operation (newer, in_code);
- /* force_to_mode can expand compounds. If it just re-expanded the
- compound, use gen_lowpart to convert to the desired mode. */
+ /* force_to_mode can expand compounds. If it just re-expanded
+ the compound, use gen_lowpart to convert to the desired
+ mode. */
if (rtx_equal_p (newer, x)
/* Likewise if it re-expanded the compound only partially.
This happens for SUBREG of ZERO_EXTRACT if they extract
gen_lowpart_or_truncate (machine_mode mode, rtx x)
{
if (!CONST_INT_P (x)
- && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
+ && partial_subreg_p (mode, GET_MODE (x))
&& !TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
&& !(REG_P (x) && reg_truncated_to_mode (mode, x)))
{
/* It is not valid to do a right-shift in a narrower mode
than the one it came in with. */
if ((code == LSHIFTRT || code == ASHIFTRT)
- && GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (x)))
+ && partial_subreg_p (mode, GET_MODE (x)))
op_mode = GET_MODE (x);
/* Truncate MASK to fit OP_MODE. */
if the constant masks to zero all the bits the mode doesn't have. */
if (GET_CODE (x) == SUBREG
&& subreg_lowpart_p (x)
- && ((GET_MODE_SIZE (GET_MODE (x))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
+ && (partial_subreg_p (x)
|| (0 == (mask
& GET_MODE_MASK (GET_MODE (x))
& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
&& subreg_lowpart_p (XEXP (src, 0))
- && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
+ && partial_subreg_p (XEXP (src, 0))
&& GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
&& CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
&& INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
if (truncated == 0
|| rsp->truncation_label < label_tick_ebb_start)
return false;
- if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
+ if (!partial_subreg_p (mode, truncated))
return true;
if (TRULY_NOOP_TRUNCATION_MODES_P (mode, truncated))
return true;
machine_mode original_mode = GET_MODE (SUBREG_REG (x));
truncated_mode = GET_MODE (x);
- if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
+ if (!partial_subreg_p (truncated_mode, original_mode))
return true;
+ truncated_mode = GET_MODE (x);
if (TRULY_NOOP_TRUNCATION_MODES_P (truncated_mode, original_mode))
return true;
rsp = ®_stat[REGNO (x)];
if (rsp->truncated_to_mode == 0
|| rsp->truncation_label < label_tick_ebb_start
- || (GET_MODE_SIZE (truncated_mode)
- < GET_MODE_SIZE (rsp->truncated_to_mode)))
+ || partial_subreg_p (truncated_mode, rsp->truncated_to_mode))
{
rsp->truncated_to_mode = truncated_mode;
rsp->truncation_label = label_tick;
the remaining registers in place of NOTE. */
if (note != 0 && regno < FIRST_PSEUDO_REGISTER
- && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
- > GET_MODE_SIZE (GET_MODE (x))))
+ && partial_subreg_p (GET_MODE (x), GET_MODE (XEXP (note, 0))))
{
unsigned int deadregno = REGNO (XEXP (note, 0));
unsigned int deadend = END_REGNO (XEXP (note, 0));
their own REG_DEAD notes lying around. */
else if ((note == 0
|| (note != 0
- && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
- < GET_MODE_SIZE (GET_MODE (x)))))
+ && partial_subreg_p (GET_MODE (XEXP (note, 0)),
+ GET_MODE (x))))
&& regno < FIRST_PSEUDO_REGISTER
&& REG_NREGS (x) > 1)
{
if we test MODE instead, we can get an infinite recursion
alternating between two modes each wider than MODE. */
- if (code == NE && GET_CODE (op0) == SUBREG
- && subreg_lowpart_p (op0)
- && (GET_MODE_SIZE (GET_MODE (op0))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
+ if (code == NE
+ && partial_subreg_p (op0)
+ && subreg_lowpart_p (op0))
{
machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
rtx tem = record_jump_cond_subreg (inner_mode, op1);
reversed_nonequality);
}
- if (code == NE && GET_CODE (op1) == SUBREG
- && subreg_lowpart_p (op1)
- && (GET_MODE_SIZE (GET_MODE (op1))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
+ if (code == NE
+ && partial_subreg_p (op1)
+ && subreg_lowpart_p (op1))
{
machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
rtx tem = record_jump_cond_subreg (inner_mode, op0);
&& ! (src != 0
&& GET_CODE (src) == SUBREG
&& GET_MODE (src) == GET_MODE (p->exp)
- && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
+ && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
+ GET_MODE (SUBREG_REG (p->exp)))))
continue;
if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
&& ! (src != 0
&& GET_CODE (src) == SUBREG
&& GET_MODE (src) == GET_MODE (elt->exp)
- && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
+ && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
+ GET_MODE (SUBREG_REG (elt->exp)))))
{
elt = elt->next_same_value;
continue;
&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
/ UNITS_PER_WORD)
== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
- && (GET_MODE_SIZE (GET_MODE (dest))
- >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
+ && !partial_subreg_p (dest)
&& sets[i].src_elt != 0)
{
machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
&& GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
&& (lwider == NULL
- || GET_MODE_SIZE (lmode)
- < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
+ || partial_subreg_p (lmode,
+ GET_MODE (lwider->elt->val_rtx))))
{
struct elt_loc_list *el;
if (i < FIRST_PSEUDO_REGISTER
&& TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (target), ext_mode))
{
target = gen_lowpart (ext_mode, target);
- if (GET_MODE_PRECISION (ext_mode)
- > GET_MODE_PRECISION (GET_MODE (spec_target)))
+ if (partial_subreg_p (GET_MODE (spec_target), ext_mode))
spec_target_subreg = target;
}
else
push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
- if (GET_CODE (tempreg) == SUBREG
+ if (partial_subreg_p (tempreg)
&& GET_MODE (tempreg) == data->nominal_mode
&& REG_P (SUBREG_REG (tempreg))
&& data->nominal_mode == data->passed_mode
- && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
- && GET_MODE_SIZE (GET_MODE (tempreg))
- < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
+ && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
{
/* The argument is already sign/zero extended, so note it
into the subreg. */
{
machine_mode src_mode = GET_MODE (new_val);
machine_mode dst_mode = GET_MODE (temp);
- if (GET_MODE_SIZE (src_mode) <= GET_MODE_SIZE (dst_mode))
+ if (!partial_subreg_p (dst_mode, src_mode))
{
end_sequence ();
return FALSE;
{
machine_mode src_mode = GET_MODE (old_val);
machine_mode dst_mode = GET_MODE (temp);
- if (GET_MODE_SIZE (src_mode) <= GET_MODE_SIZE (dst_mode))
+ if (!partial_subreg_p (dst_mode, src_mode))
{
end_sequence ();
return FALSE;
if (outer != NULL && GET_CODE (outer) == SUBREG)
{
machine_mode wmode = GET_MODE (outer);
- if (GET_MODE_SIZE (wmode) > GET_MODE_SIZE (ALLOCNO_WMODE (a)))
+ if (partial_subreg_p (ALLOCNO_WMODE (a), wmode))
ALLOCNO_WMODE (a) = wmode;
}
}
= (lra_merge_live_ranges
(lra_reg_info[first].live_ranges,
lra_copy_live_range_list (lra_reg_info[first2].live_ranges)));
- if (GET_MODE_SIZE (lra_reg_info[first].biggest_mode)
- < GET_MODE_SIZE (lra_reg_info[first2].biggest_mode))
+ if (partial_subreg_p (lra_reg_info[first].biggest_mode,
+ lra_reg_info[first2].biggest_mode))
lra_reg_info[first].biggest_mode = lra_reg_info[first2].biggest_mode;
}
push_to_sequence (*before);
if (inmode != outmode)
{
- if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
+ if (partial_subreg_p (outmode, inmode))
{
reg = new_in_reg
= lra_create_new_reg_with_unique_value (inmode, in_rtx,
bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
insert_before = (type != OP_OUT
- || GET_MODE_SIZE (innermode)
- > GET_MODE_SIZE (mode));
+ || partial_subreg_p (mode, innermode));
insert_after = type != OP_IN;
insert_move_for_subreg (insert_before ? &before : NULL,
insert_after ? &after : NULL,
lra_assert (out >= 0 && in >= 0
&& curr_static_id->operand[out].type == OP_OUT
&& curr_static_id->operand[in].type == OP_IN);
- rld = (GET_MODE_SIZE (GET_MODE (dest)) <= GET_MODE_SIZE (GET_MODE (src))
- ? dest : src);
+ rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
rld_mode = GET_MODE (rld);
#ifdef SECONDARY_MEMORY_NEEDED_MODE
sec_mode = SECONDARY_MEMORY_NEEDED_MODE (rld_mode);
new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
NO_REGS, "secondary");
/* If the mode is changed, it should be wider. */
- lra_assert (GET_MODE_SIZE (sec_mode) >= GET_MODE_SIZE (rld_mode));
+ lra_assert (!partial_subreg_p (sec_mode, rld_mode));
if (sec_mode != rld_mode)
{
/* If the target says specifically to use another mode for
for (reg = curr_id->regs; reg != NULL; reg = reg->next)
{
int i, regno = reg->regno;
-
- if (GET_MODE_SIZE (reg->biggest_mode)
- > GET_MODE_SIZE (lra_reg_info[regno].biggest_mode))
+
+ if (partial_subreg_p (lra_reg_info[regno].biggest_mode,
+ reg->biggest_mode))
lra_reg_info[regno].biggest_mode = reg->biggest_mode;
if (regno < FIRST_PSEUDO_REGISTER)
{
part of multi-register group. Process this case
here. */
for (i = 1; i < hard_regno_nregs[regno][reg->biggest_mode]; i++)
- if (GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno + i]))
- > GET_MODE_SIZE (lra_reg_info[regno + i].biggest_mode))
+ if (partial_subreg_p (lra_reg_info[regno + i].biggest_mode,
+ GET_MODE (regno_reg_rtx[regno + i])))
lra_reg_info[regno + i].biggest_mode
= GET_MODE (regno_reg_rtx[regno + i]);
}
lra_insn_reg *ir = lra_insn_reg_pool.allocate ();
ir->type = type;
ir->biggest_mode = mode;
- if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (lra_reg_info[regno].biggest_mode)
- && NONDEBUG_INSN_P (insn))
+ if (NONDEBUG_INSN_P (insn)
+ && partial_subreg_p (lra_reg_info[regno].biggest_mode, mode))
lra_reg_info[regno].biggest_mode = mode;
ir->subreg_p = subreg_p;
ir->early_clobber = early_clobber;
if (mode != inner_mode
&& ! (CONST_INT_P (new_reg) && SCALAR_INT_MODE_P (mode)))
{
- if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (inner_mode)
+ if (!partial_subreg_p (mode, inner_mode)
|| ! SCALAR_INT_MODE_P (inner_mode))
new_reg = gen_rtx_SUBREG (mode, new_reg, 0);
else
mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
unsigned int regno ATTRIBUTE_UNUSED)
{
- if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
+ if (partial_subreg_p (orig_mode, new_mode))
return false;
#ifdef CANNOT_CHANGE_MODE_CLASS
machine_mode new_mode, unsigned int regno,
unsigned int copy_regno ATTRIBUTE_UNUSED)
{
- if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
- && GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (new_mode))
+ if (partial_subreg_p (copy_mode, orig_mode)
+ && partial_subreg_p (copy_mode, new_mode))
return NULL_RTX;
/* Avoid creating multiple copies of the stack pointer. Some ports
/* If a noop move is using narrower mode than we have recorded,
we need to either remove the noop move, or kill_set_value. */
if (noop_p
- && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
- < GET_MODE_BITSIZE (vd->e[REGNO (SET_DEST (set))].mode)))
+ && partial_subreg_p (GET_MODE (SET_DEST (set)),
+ vd->e[REGNO (SET_DEST (set))].mode))
{
if (noop_move_p (insn))
{
&& paradoxical_subreg_p (inmode, inner_mode)
&& LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
|| (WORD_REGISTER_OPERATIONS
- && (GET_MODE_PRECISION (inmode)
- < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
+ && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
&& ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
/ UNITS_PER_WORD)))))
|| MEM_P (SUBREG_REG (out)))
&& (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
|| (WORD_REGISTER_OPERATIONS
- && (GET_MODE_PRECISION (outmode)
- < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
+ && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
&& ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
/ UNITS_PER_WORD)))))
/* The modes can be different. If they are, we want to reload in
the larger mode, so that the value is valid for both modes. */
if (inmode != VOIDmode
- && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
+ && partial_subreg_p (rld[i].inmode, inmode))
rld[i].inmode = inmode;
if (outmode != VOIDmode
- && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
+ && partial_subreg_p (rld[i].outmode, outmode))
rld[i].outmode = outmode;
if (in != 0)
{
overwrite the operands only when the new mode is larger.
See also PR33613. */
if (!rld[i].in
- || GET_MODE_SIZE (GET_MODE (in))
- > GET_MODE_SIZE (GET_MODE (rld[i].in)))
+ || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
rld[i].in = in;
if (!rld[i].in_reg
|| (in_reg
- && GET_MODE_SIZE (GET_MODE (in_reg))
- > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
+ && partial_subreg_p (GET_MODE (rld[i].in_reg),
+ GET_MODE (in_reg))))
rld[i].in_reg = in_reg;
}
if (out != 0)
{
if (!rld[i].out
|| (out
- && GET_MODE_SIZE (GET_MODE (out))
- > GET_MODE_SIZE (GET_MODE (rld[i].out))))
+ && partial_subreg_p (GET_MODE (rld[i].out),
+ GET_MODE (out))))
rld[i].out = out;
if (outloc
&& (!rld[i].out_reg
- || GET_MODE_SIZE (GET_MODE (*outloc))
- > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
+ || partial_subreg_p (GET_MODE (rld[i].out_reg),
+ GET_MODE (*outloc))))
rld[i].out_reg = *outloc;
}
if (reg_class_subset_p (rclass, rld[i].rclass))
int regno;
machine_mode rel_mode = inmode;
- if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
+ if (out && partial_subreg_p (rel_mode, outmode))
rel_mode = outmode;
for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
/* Compute reload_mode and reload_nregs. */
for (i = 0; i < n_reloads; i++)
{
- rld[i].mode
- = (rld[i].inmode == VOIDmode
- || (GET_MODE_SIZE (rld[i].outmode)
- > GET_MODE_SIZE (rld[i].inmode)))
- ? rld[i].outmode : rld[i].inmode;
+ rld[i].mode = rld[i].inmode;
+ if (rld[i].mode == VOIDmode
+ || partial_subreg_p (rld[i].mode, rld[i].outmode))
+ rld[i].mode = rld[i].outmode;
rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
}
return NULL;
if (WORD_REGISTER_OPERATIONS
- && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
+ && partial_subreg_p (outer_mode, inner_mode)
&& ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
== (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
return NULL;
if (spill_stack_slot[from_reg])
{
- if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
- > inherent_size)
+ if (partial_subreg_p (mode,
+ GET_MODE (spill_stack_slot[from_reg])))
mode = GET_MODE (spill_stack_slot[from_reg]);
if (spill_stack_slot_width[from_reg] > total_size)
total_size = spill_stack_slot_width[from_reg];
int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
if (MEM_P (new_rtx)
- && ((x_size < new_size
+ && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
/* On RISC machines, combine can create rtl of the form
(set (subreg:m1 (reg:m2 R) 0) ...)
where m1 < m2, and expects something interesting to
extern int subreg_lowpart_p (const_rtx);
extern unsigned int subreg_size_lowpart_offset (unsigned int, unsigned int);
+/* Return true if a subreg of mode OUTERMODE would only access part of
+ an inner register with mode INNERMODE. The other bits of the inner
+ register would then be "don't care" on read. The behavior for writes
+ depends on REGMODE_NATURAL_SIZE; bits in the same REGMODE_NATURAL_SIZE-d
+ chunk would be clobbered but other bits would be preserved. */
+
+inline bool
+partial_subreg_p (machine_mode outermode, machine_mode innermode)
+{
+ return GET_MODE_PRECISION (outermode) < GET_MODE_PRECISION (innermode);
+}
+
+/* Likewise return true if X is a subreg that is smaller than the inner
+ register. Use df_read_modify_subreg_p to test whether writing to such
+ a subreg preserves any part of the inner register. */
+
+inline bool
+partial_subreg_p (const_rtx x)
+{
+ if (GET_CODE (x) != SUBREG)
+ return false;
+ return partial_subreg_p (GET_MODE (x), GET_MODE (SUBREG_REG (x)));
+}
+
/* Return true if a subreg with the given outer and inner modes is
paradoxical. */
XEXP (op, 0), const0_rtx);
- if (GET_CODE (op) == SUBREG
+ if (partial_subreg_p (op)
&& subreg_lowpart_p (op)
- && (GET_MODE_SIZE (GET_MODE (op))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
&& GET_CODE (SUBREG_REG (op)) == ASHIFT
&& XEXP (SUBREG_REG (op), 0) == const1_rtx)
{
of mode N. E.g.
(zero_extend:SI (subreg:QI (and:SI (reg:SI) (const_int 63)) 0)) is
(and:SI (reg:SI) (const_int 63)). */
- if (GET_CODE (op) == SUBREG
+ if (partial_subreg_p (op)
&& is_a <scalar_int_mode> (mode, &int_mode)
&& is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op)), &op0_mode)
- && GET_MODE_PRECISION (GET_MODE (op)) < GET_MODE_PRECISION (op0_mode)
&& GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT
&& GET_MODE_PRECISION (int_mode) >= GET_MODE_PRECISION (op0_mode)
&& subreg_lowpart_p (op)