num_insns_constant (rtx op, enum machine_mode mode)
{
HOST_WIDE_INT low, high;
-
+
switch (GET_CODE (op))
{
case CONST_INT:
{
long l;
REAL_VALUE_TYPE rv;
-
+
REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
REAL_VALUE_TO_TARGET_SINGLE (rv, l);
return num_insns_constant_wide ((HOST_WIDE_INT) l);
{
long l[2];
REAL_VALUE_TYPE rv;
-
+
REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
high = l[WORDS_BIG_ENDIAN == 0];
if ((high == 0 && low >= 0)
|| (high == -1 && low < 0))
return num_insns_constant_wide (low);
-
+
else if (mask_operand (op, mode))
return 2;
-
+
else if (low == 0)
return num_insns_constant_wide (high) + 1;
-
+
else
return (num_insns_constant_wide (high)
+ num_insns_constant_wide (low) + 1);
}
-
+
default:
gcc_unreachable ();
}
return "vxor %0,%0,%0";
gcc_assert (easy_vector_constant (vec, mode));
-
+
operands[1] = GEN_INT (cst);
switch (mode)
{
}
else if (EASY_VECTOR_15_ADD_SELF (cst))
return "#";
-
+
default:
gcc_unreachable ();
}
}
gcc_assert (TARGET_SPE);
-
+
/* Vector constant 0 is handled as a splitter of V2SI, and in the
pattern of V1DI, V4HI, and V2SF.
{
HOST_WIDE_INT c, lsb;
bool one_ok;
-
+
c = INTVAL (op);
/* Disallow all zeros. */
AND there are zero, one or two transitions in the _whole_ of
C. */
one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff);
-
+
/* We don't change the number of transitions by inverting,
so make sure we start with the LS bit zero. */
if (c & 1)
{
tree field = TYPE_FIELDS (type);
- /* Skip all non field decls */
+ /* Skip all non field decls */
while (field != NULL && TREE_CODE (field) != FIELD_DECL)
field = TREE_CHAIN (field);
dest = gen_reg_rtx (mode);
emit_insn (gen_rtx_SET (VOIDmode, dest, source));
return dest;
-
+
case SImode:
result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
-
+
emit_insn (gen_rtx_SET (VOIDmode, result,
GEN_INT (INTVAL (source)
& (~ (HOST_WIDE_INT) 0xffff))));
c0 = INTVAL (source);
c1 = -(c0 < 0);
break;
-
+
case CONST_DOUBLE:
#if HOST_BITS_PER_WIDE_INT >= 64
c0 = CONST_DOUBLE_LOW (source);
|| CONST_DOUBLE_LOW (operands[1]) < 0)
&& (CONST_DOUBLE_HIGH (operands[1]) != -1
|| CONST_DOUBLE_LOW (operands[1]) >= 0)));
-
+
/* Check if GCC is setting up a block move that will end up using FP
registers as temporaries. We must make sure this is acceptable. */
if (GET_CODE (operands[0]) == MEM
grovel through the fields for these too. */
cum->intoffset = 0;
rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
- rs6000_darwin64_record_arg_advance_flush (cum,
+ rs6000_darwin64_record_arg_advance_flush (cum,
size * BITS_PER_UNIT);
}
}
structure between cum->intoffset and bitpos to integer registers. */
static void
-rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
+rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
HOST_WIDE_INT bitpos, rtx rvec[], int *k)
{
enum machine_mode mode;
if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
cum->use_stack = 1;
-
+
intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
if (intregs <= 0)
return;
/* Recursive workhorse for the following. */
static void
-rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
+rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
HOST_WIDE_INT startbitpos, rtx rvec[],
int *k)
{
#endif
rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
rvec[(*k)++]
- = gen_rtx_EXPR_LIST (VOIDmode,
+ = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_REG (mode, cum->fregno++),
GEN_INT (bitpos / BITS_PER_UNIT));
if (mode == TFmode)
{
rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
rvec[(*k)++]
- = gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_REG (mode, cum->vregno++),
+ = gen_rtx_EXPR_LIST (VOIDmode,
+ gen_rtx_REG (mode, cum->vregno++),
GEN_INT (bitpos / BITS_PER_UNIT));
}
else if (cum->intoffset == -1)
being passed by value, along with the offset of where the
register's value may be found in the block. FP fields go in FP
register, vector fields go in vector registers, and everything
- else goes in int registers, packed as in memory.
+ else goes in int registers, packed as in memory.
This code is also used for function return values. RETVAL indicates
whether this is the case.
/* If we are passing this arg in the fixed parameter save area
(gprs or memory) as well as fprs, then this function should
return the number of bytes passed in the parameter save area
- rather than bytes passed in fprs. */
+ rather than bytes passed in fprs. */
&& !(type
&& (cum->nargs_prototype <= 0
|| (DEFAULT_ABI == ABI_AIX
}
gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
-
+
/* Handle simple unary operations. */
d = (struct builtin_description *) bdesc_1arg;
for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
if (d->code == fcode)
return rs6000_expand_unop_builtin (d->icode, arglist, target);
-
+
/* Handle simple binary operations. */
d = (struct builtin_description *) bdesc_2arg;
for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
if (d->code == fcode)
return rs6000_expand_ternop_builtin (d->icode, arglist, target);
-
+
gcc_unreachable ();
}
mode2 = insn_data[d->icode].operand[2].mode;
mode3 = insn_data[d->icode].operand[3].mode;
}
-
+
/* When all four are of the same mode. */
if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
{
mode2 = VOIDmode;
}
else
- {
+ {
if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue;
{
if (d->name == 0 || d->icode == CODE_FOR_nothing)
continue;
-
+
mode0 = insn_data[d->icode].operand[0].mode;
mode1 = insn_data[d->icode].operand[1].mode;
}
mems_ok_for_quad_peep (rtx mem1, rtx mem2)
{
rtx addr1, addr2;
- unsigned int reg1;
- int offset1;
+ unsigned int reg1, reg2;
+ int offset1, offset2;
/* The mems cannot be volatile. */
if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
offset1 = 0;
}
- /* Make sure the second address is a (mem (plus (reg) (const_int)))
- or if it is (mem (reg)) then make sure that offset1 is -8 and the same
- register as addr1. */
- if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
- return 1;
- if (GET_CODE (addr2) != PLUS)
- return 0;
-
- if (GET_CODE (XEXP (addr2, 0)) != REG
- || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
+ /* And now for the second addr. */
+ if (GET_CODE (addr2) == PLUS)
+ {
+ /* If not a REG, return zero. */
+ if (GET_CODE (XEXP (addr2, 0)) != REG)
+ return 0;
+ else
+ {
+ reg2 = REGNO (XEXP (addr2, 0));
+ /* The offset must be constant. */
+ if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
+ return 0;
+ offset2 = INTVAL (XEXP (addr2, 1));
+ }
+ }
+ else if (GET_CODE (addr2) != REG)
return 0;
+ else
+ {
+ reg2 = REGNO (addr2);
+ /* This was a simple (mem (reg)) expression. Offset is 0. */
+ offset2 = 0;
+ }
- if (reg1 != REGNO (XEXP (addr2, 0)))
+ /* Both of these must have the same base register. */
+ if (reg1 != reg2)
return 0;
/* The offset for the second addr must be 8 more than the first addr. */
- if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
+ if (offset2 != offset1 + 8)
return 0;
/* All the tests passed. addr1 and addr2 are valid for lfq or stfq
{
gcc_assert (GET_CODE (tmp) == PLUS
&& GET_CODE (XEXP (tmp, 1)) == REG);
-
+
if (REGNO (XEXP (tmp, 0)) == 0)
fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
reg_names[ REGNO (XEXP (tmp, 0)) ]);
gcc_unreachable ();
}
break;
-
+
case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
switch (op_mode)
{
: gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
rs6000_compare_op1);
break;
-
+
case DFmode:
cmp = flag_unsafe_math_optimizations
? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
gcc_unreachable ();
}
break;
-
+
case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
switch (op_mode)
{
: gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
rs6000_compare_op1);
break;
-
+
case DFmode:
cmp = flag_unsafe_math_optimizations
? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
temp = gen_reg_rtx (dest_mode);
- /* For each vector element, select op1 when mask is 1 otherwise
+ /* For each vector element, select op1 when mask is 1 otherwise
select op2. */
t = gen_rtx_SET (VOIDmode, temp,
gen_rtx_UNSPEC (dest_mode,
(set M (CODE:MODE M OP))
If not NULL, BEFORE is atomically set to M before the operation, and
AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
- If SYNC_P then a memory barrier is emitted before the operation.
+ If SYNC_P then a memory barrier is emitted before the operation.
Either OP or M may be wrapped in a NOT operation. */
void
rtvec vec;
HOST_WIDE_INT imask = GET_MODE_MASK (mode);
rtx shift = NULL_RTX;
-
+
if (sync_p)
emit_insn (gen_memory_barrier ());
-
+
if (GET_CODE (m) == NOT)
used_m = XEXP (m, 0);
else
int ishift = 0;
if (BYTES_BIG_ENDIAN)
ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
-
+
shift = GEN_INT (ishift);
}
else
{
rtx addrSI, aligned_addr;
int shift_mask = mode == QImode ? 0x18 : 0x10;
-
+
addrSI = force_reg (SImode, gen_lowpart_common (SImode,
XEXP (used_m, 0)));
shift = gen_reg_rtx (SImode);
case MINUS:
{
rtx mask;
-
+
newop = expand_binop (SImode, and_optab,
oldop, GEN_INT (imask), NULL_RTX,
1, OPTAB_LIB_WIDEN);
if (after == NULL_RTX)
after = gen_reg_rtx (used_mode);
}
-
+
if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
&& used_mode != mode)
the_op = op; /* Computed above. */
}
/* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
- to perform. MEM is the memory on which to operate. VAL is the second
+ to perform. MEM is the memory on which to operate. VAL is the second
operand of the binary operator. BEFORE and AFTER are optional locations to
return the value of MEM either before of after the operation. SCRATCH is
a scratch register. */
else
{
gcc_assert (DEFAULT_ABI == ABI_AIX);
-
+
if (TARGET_32BIT)
insn = emit_insn (gen_load_toc_aix_si (dest));
else
return orig;
gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
-
+
/* Use a different reg for the intermediate value, as
it will be marked UNCHANGING. */
reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
offset =
rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
Pmode, reg);
-
+
if (GET_CODE (offset) == CONST_INT)
{
if (SMALL_INT (offset))
return false;
}
-/* Target hook for invalid_arg_for_unprototyped_fn. */
-static const char *
+/* Target hook for invalid_arg_for_unprototyped_fn. */
+static const char *
invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
{
return (!rs6000_darwin64_abi