+2009-05-21 Shujing Zhao <pearly.zhao@oracle.com>
+
+ * config/i386/i386.c: Use REG_P, MEM_P, CONST_INT_P, LABEL_P and
+ JUMP_TABLE_DATA_P predicates where applicable.
+ * config/i386/predicates.md: Ditto.
+ * config/i386/sse.md: Ditto.
+
2009-05-21 Jakub Jelinek <jakub@redhat.com>
* config/i386/i386.md (adddi_4_rex64, addsi_4, addhi_4): For
{
if (GET_MODE (count_exp) != VOIDmode)
return GET_MODE (count_exp);
- if (GET_CODE (count_exp) != CONST_INT)
+ if (!CONST_INT_P (count_exp))
return Pmode;
if (TARGET_64BIT && (INTVAL (count_exp) & ~0xffffffff))
return DImode;
}
if (ix86_cmodel == CM_LARGE_PIC
- && GET_CODE (fnaddr) == MEM
+ && MEM_P (fnaddr)
&& GET_CODE (XEXP (fnaddr, 0)) == SYMBOL_REF
&& !local_symbolic_operand (XEXP (fnaddr, 0), VOIDmode))
fnaddr = gen_rtx_MEM (QImode, construct_plt_address (XEXP (fnaddr, 0)));
if (last_arg_constant && i == nargs-1)
{
- if (GET_CODE (op) != CONST_INT)
+ if (!CONST_INT_P (op))
{
error ("last argument must be an immediate");
return gen_reg_rtx (tmode);
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
&& XINT (PATTERN (insn), 1) == UNSPECV_ALIGN)
return 0;
- if (JUMP_P (insn)
- && (GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
+ if (JUMP_TABLE_DATA_P(insn))
return 0;
/* Important case - calls are always 5 bytes.
{
int min_size;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
int align = label_to_alignment (insn);
int max_skip = label_to_max_skip (insn);
;; Return true if op is not xmm0 register.
(define_predicate "reg_not_xmm0_operand"
(and (match_operand 0 "register_operand")
- (match_test "GET_CODE (op) != REG
+ (match_test "!REG_P (op)
|| REGNO (op) != FIRST_SSE_REG")))
;; As above, but allow nonimmediate operands.
(define_predicate "nonimm_not_xmm0_operand"
(and (match_operand 0 "nonimmediate_operand")
- (match_test "GET_CODE (op) != REG
+ (match_test "!REG_P (op)
|| REGNO (op) != FIRST_SSE_REG")))
;; Return 1 if VALUE can be stored in a sign extended immediate field.
int ok;
/* Registers and immediate operands are always "aligned". */
- if (GET_CODE (op) != MEM)
+ if (!MEM_P (op))
return 1;
/* All patterns using aligned_operand on memory operands ends up
rtx par = gen_rtx_PARALLEL (V16QImode, vs);
rtx reg = gen_reg_rtx (V16QImode);
int i;
- rtx ele = ((GET_CODE (operands[2]) == CONST_INT)
+ rtx ele = ((CONST_INT_P (operands[2]))
? GEN_INT (- INTVAL (operands[2]))
: operands[2]);
emit_insn (gen_vec_initv16qi (reg, par));
- if (GET_CODE (operands[2]) != CONST_INT)
+ if (!CONST_INT_P (operands[2]))
{
rtx neg = gen_reg_rtx (V16QImode);
emit_insn (gen_negv16qi2 (neg, reg));
rtx reg = gen_reg_rtx (V2DImode);
rtx ele;
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
ele = GEN_INT (- INTVAL (operands[2]));
else if (GET_MODE (operands[2]) != DImode)
{