}
else
{
- scalar_mode intermediate;
rtx tmp;
int shift_amount;
static bool
block_move_libcall_safe_for_call_parm (void)
{
-#if defined (REG_PARM_STACK_SPACE)
tree fn;
-#endif
/* If arguments are pushed on the stack, then they're safe. */
if (PUSH_ARGS)
{
CUMULATIVE_ARGS args_so_far_v;
cumulative_args_t args_so_far;
- tree fn, arg;
+ tree arg;
fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
/* Get the address of the stack space.
In this case, we do not deal with EXTRA separately.
A single stack adjust will do. */
- poly_int64 offset;
+ poly_int64 const_args_so_far;
if (! args_addr)
{
temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
extra = 0;
}
- else if (poly_int_rtx_p (args_so_far, &offset))
+ else if (poly_int_rtx_p (args_so_far, &const_args_so_far))
temp = memory_address (BLKmode,
plus_constant (Pmode, args_addr,
- skip + offset));
+ skip + const_args_so_far));
else
temp = memory_address (BLKmode,
plus_constant (Pmode,
{
/* Scalar partly in registers. This case is only supported
for fixed-wdth modes. */
- int size = GET_MODE_SIZE (mode).to_constant ();
- size /= UNITS_PER_WORD;
+ int num_words = GET_MODE_SIZE (mode).to_constant ();
+ num_words /= UNITS_PER_WORD;
int i;
int not_stack;
/* # bytes of start of argument
/* Loop over all the words allocated on the stack for this arg. */
/* We can do it by words, because any scalar bigger than a word
has a size a multiple of a word. */
- for (i = size - 1; i >= not_stack; i--)
+ for (i = num_words - 1; i >= not_stack; i--)
if (i >= not_stack + offset)
if (!emit_push_insn (operand_subword_force (x, i, mode),
word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
}
else
{
- machine_mode to_mode
- = GET_MODE_INNER (GET_MODE (to_rtx));
+ to_mode = GET_MODE_INNER (to_mode);
rtx from_real
= simplify_gen_subreg (to_mode, result,
TYPE_MODE (TREE_TYPE (from)),
else
{
concat_store_slow:;
- rtx temp = assign_stack_temp (to_mode,
+ rtx temp = assign_stack_temp (GET_MODE (to_rtx),
GET_MODE_SIZE (GET_MODE (to_rtx)));
write_complex_part (temp, XEXP (to_rtx, 0), false);
write_complex_part (temp, XEXP (to_rtx, 1), true);
&& exp_size >= 0
&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
{
- tree type = TREE_TYPE (value);
+ type = TREE_TYPE (value);
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
else
{
unsigned HOST_WIDE_INT idx;
- tree index, value;
HOST_WIDE_INT count = 0, zero_count = 0;
need_to_clear = ! const_bounds_p;
icode = convert_optab_handler (vec_init_optab, mode, emode);
if (icode != CODE_FOR_nothing)
{
- unsigned int i, n = const_n_elts;
+ unsigned int n = const_n_elts;
if (emode != eltmode)
{
vec_vec_init_p = true;
}
vector = rtvec_alloc (n);
- for (i = 0; i < n; i++)
- RTVEC_ELT (vector, i) = CONST0_RTX (emode);
+ for (unsigned int k = 0; k < n; k++)
+ RTVEC_ELT (vector, k) = CONST0_RTX (emode);
}
}
word size, we need to load the value (see again store_bit_field). */
if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
{
- scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
+ temp_mode = smallest_int_mode_for_size (bitsize);
temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
temp_mode, false, NULL);
}
if (TREE_CODE (treeop0) == SSA_NAME
&& (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
{
- tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
+ type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
}
else if (COMPARISON_CLASS_P (treeop0))
{
- tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
+ type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
enum tree_code cmpcode = TREE_CODE (treeop0);
op00 = expand_normal (TREE_OPERAND (treeop0, 0));
op01 = expand_normal (TREE_OPERAND (treeop0, 1));
tree oprnd0 = treeop0;
tree oprnd1 = treeop1;
tree oprnd2 = treeop2;
- rtx op2;
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
op2 = expand_normal (oprnd2);
tree oprnd0 = treeop0;
tree oprnd1 = treeop1;
tree oprnd2 = treeop2;
- rtx op2;
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
op2 = expand_normal (oprnd2);
tree oprnd0 = treeop0;
tree oprnd1 = treeop1;
tree oprnd2 = treeop2;
- rtx op2;
this_optab = optab_for_tree_code (code, type, optab_default);
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
else
bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
- rtx op0 = expand_normal (treeop0);
- rtx op1 = expand_normal (treeop1);
+ op0 = expand_normal (treeop0);
+ op1 = expand_normal (treeop1);
rtx dst = gen_reg_rtx (mode);
emit_move_insn (dst, op0);
store_bit_field (dst, bitsize, bitpos, 0, 0,
{
rtx r;
location_t saved_loc = curr_insn_location ();
- location_t loc = gimple_location (g);
+ loc = gimple_location (g);
if (loc != UNKNOWN_LOCATION)
set_curr_insn_location (loc);
ops.code = gimple_assign_rhs_code (g);
GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
the former to the latter according to the signedness of the
type. */
- scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
+ scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (type);
temp = immed_wide_int_const
- (wi::to_wide (exp, GET_MODE_PRECISION (mode)), mode);
+ (wi::to_wide (exp, GET_MODE_PRECISION (int_mode)), int_mode);
return temp;
}
/* Handle evaluating a complex constant in a CONCAT target. */
if (original_target && GET_CODE (original_target) == CONCAT)
{
- machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
rtx rtarg, itarg;
+ mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
rtarg = XEXP (original_target, 0);
itarg = XEXP (original_target, 1);
if (tree_fits_uhwi_p (index1)
&& compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
{
- tree type = TREE_TYPE (TREE_TYPE (init));
- scalar_int_mode mode;
+ tree char_type = TREE_TYPE (TREE_TYPE (init));
+ scalar_int_mode char_mode;
- if (is_int_mode (TYPE_MODE (type), &mode)
- && GET_MODE_SIZE (mode) == 1)
+ if (is_int_mode (TYPE_MODE (char_type), &char_mode)
+ && GET_MODE_SIZE (char_mode) == 1)
return gen_int_mode (TREE_STRING_POINTER (init)
[TREE_INT_CST_LOW (index1)],
- mode);
+ char_mode);
}
}
}
rtx op = read_complex_part (op0, i != 0);
if (GET_CODE (op) == SUBREG)
op = force_reg (GET_MODE (op), op);
- rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
- op);
+ temp = gen_lowpart_common (GET_MODE_INNER (mode1), op);
if (temp)
op = temp;
else
&& TREE_CODE (arg0) == SSA_NAME
&& TREE_CODE (arg1) == INTEGER_CST)
{
- enum tree_code code = maybe_optimize_mod_cmp (ops->code, &arg0, &arg1);
- if (code != ops->code)
+ enum tree_code new_code = maybe_optimize_mod_cmp (ops->code,
+ &arg0, &arg1);
+ if (new_code != ops->code)
{
struct separate_ops nops = *ops;
- nops.code = ops->code = code;
+ nops.code = ops->code = new_code;
nops.op0 = arg0;
nops.op1 = arg1;
nops.type = TREE_TYPE (arg0);
&& integer_pow2p (gimple_assign_rhs2 (srcstmt)))
{
enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
- tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
+ type = lang_hooks.types.type_for_mode (mode, unsignedp);
tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
gimple_assign_rhs1 (srcstmt),
gimple_assign_rhs2 (srcstmt));