+2013-03-29 Steven Bosscher <steven@gcc.gnu.org>
+
+ * cfgbuild.c (inside_basic_block_p): Use JUMP_TABLE_DATA_P in lieu
+ of tests for JUMP_P and a ADDR_DIFF_VEC or ADDR_VEC pattern.
+ (control_flow_insn_p): Likewise.
+ * cfgrtl.c (duplicate_insn_chain): Likewise.
+ * final.c (get_attr_length_1): Likewise.
+ (shorten_branches): Likewise.
+ (final_scan_insn): Likewise.
+ * function.c (instantiate_virtual_regs): Likewise.
+ * gcse.c (insert_insn_end_basic_block): Likewise.
+ * ira-costs.c (scan_one_insn): Likewise.
+ * lra-eliminations.c (eliminate_regs_in_insn): Likewise.
+ * lra.c (check_rtl): Likewise.
+ * reload1.c (elimination_costs_in_insn): Likewise.
+ * reorg.c (follow_jumps): Likewise.
+
+ * config/arm/arm.c (is_jump_table): Use JUMP_TABLE_DATA_P in lieu
+ of tests for JUMP_P and a ADDR_DIFF_VEC or ADDR_VEC pattern.
+ (thumb_far_jump_used_p): Likewise.
+ * config/bfin/bfin.c (workaround_rts_anomaly): Likewise.
+ (workaround_speculation): Likewise.
+ (add_sched_insns_for_speculation): Likewise.
+ * config/c6x/c6x.c (reorg_emit_nops): Likewise.
+ * config/frv/frv.c (frv_function_contains_far_jump): Likewise.
+ (frv_for_each_packet): Likewise.
+ * config/i386/i386.c (ix86_avoid_jump_mispredicts): Likewise.
+ * config/ia64/ia64.c (emit_all_insn_group_barriers): Likewise.
+ (final_emit_insn_group_barriers): Likewise.
+ * config/m32r/m32r.c (m32r_is_insn): Likewise.
+ * config/mips/mips.c (USEFUL_INSN_P): Likewise.
+ (mips16_insn_length): Likewise.
+ * config/pa/pa.c (pa_reorg): Likewise.
+ (pa_combine_instructions): Likewise.
+ * config/rs6000/rs6000.c (rs6000_invalid_within_doloop): Likewise.
+ * config/sh/sh.c (fixup_addr_diff_vecs): Likewise.
+ (sh_reorg): Likewise.
+ (split_branches): Likewise.
+ * config/spu/spu.c (get_branch_target): Likewise.
+
+ * config/s390/s390.c (s390_chunkify_start): Simplify logic using
+ JUMP_TABLE_DATA_P.
+
2013-03-29 Kirill Yukhin <kirill.yukhin@intel.com>
* gcc/config/i386/avx2intrin.h (_mm256_broadcastsi128_si256):
case CODE_LABEL:
/* Avoid creating of basic block for jumptables. */
return (NEXT_INSN (insn) == 0
- || !JUMP_P (NEXT_INSN (insn))
- || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
- && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
+ || ! JUMP_TABLE_DATA_P (insn));
case JUMP_INSN:
- return (GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
+ return (! JUMP_TABLE_DATA_P (insn));
case CALL_INSN:
case INSN:
case JUMP_INSN:
/* Jump insn always causes control transfer except for tablejumps. */
- return (GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
+ return (! JUMP_TABLE_DATA_P (insn));
case CALL_INSN:
/* Noreturn and sibling call instructions terminate the basic blocks
/* Avoid copying of dispatch tables. We never duplicate
tablejumps, so this can hit only in case the table got
moved far from original jump. */
- if (GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (insn))
{
/* Avoid copying following barrier as well if any
(and debug insns in between). */
&& ((table = next_real_insn (JUMP_LABEL (insn)))
== next_real_insn (insn))
&& table != NULL
- && JUMP_P (table)
- && (GET_CODE (PATTERN (table)) == ADDR_VEC
- || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
+ && JUMP_TABLE_DATA_P (table))
return table;
return NULL_RTX;
{
if (JUMP_P (insn)
/* Ignore tablejump patterns. */
- && GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
+ && ! JUMP_TABLE_DATA_P (insn)
&& get_attr_far_jump (insn) == FAR_JUMP_YES
)
{
if (NOTE_P (insn) || LABEL_P (insn))
continue;
+ if (JUMP_TABLE_DATA_P (insn))
+ continue;
+
if (first_insn == NULL_RTX)
first_insn = insn;
pat = PATTERN (insn);
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
- || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
- || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
+ || GET_CODE (pat) == ASM_INPUT
+ || asm_noperands (pat) >= 0)
continue;
if (CALL_P (insn))
if (NOTE_P (insn) || BARRIER_P (insn))
continue;
+ if (JUMP_TABLE_DATA_P (insn))
+ continue;
if (LABEL_P (insn))
{
}
pat = PATTERN (insn);
- if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
- || GET_CODE (pat) == ADDR_VEC || GET_CODE (pat) == ADDR_DIFF_VEC)
+ if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue;
if (GET_CODE (pat) == ASM_INPUT || asm_noperands (pat) >= 0)
if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
continue;
+ if (JUMP_TABLE_DATA_P (target))
+ continue;
+
pat = PATTERN (target);
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
- || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
- || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
+ || GET_CODE (pat) == ASM_INPUT
+ || asm_noperands (pat) >= 0)
continue;
if (NONDEBUG_INSN_P (target))
if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
continue;
+ if (JUMP_TABLE_DATA_P (insn))
+ continue;
pat = PATTERN (insn);
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
- || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
- || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
+ || GET_CODE (pat) == ASM_INPUT
+ || asm_noperands (pat) >= 0)
continue;
if (JUMP_P (insn))
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
|| shadow_or_blockage_p (insn)
- || (JUMP_P (insn)
- && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_VEC)))
+ || JUMP_TABLE_DATA_P (insn))
goto next_insn;
if (!c6x_flag_schedule_insns2)
while (insn != NULL
&& !(JUMP_P (insn)
/* Ignore tablejump patterns. */
- && GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
+ && ! JUMP_TABLE_DATA_P (insn)
&& get_attr_far_jump (insn) == FAR_JUMP_YES))
insn = NEXT_INSN (insn);
return (insn != NULL);
frv_start_packet_block ();
}
- if (INSN_P (insn))
+ if (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
switch (GET_CODE (PATTERN (insn)))
{
case USE:
case CLOBBER:
- case ADDR_VEC:
- case ADDR_DIFF_VEC:
break;
default:
{
start = NEXT_INSN (start);
if ((JUMP_P (start)
- && GET_CODE (PATTERN (start)) != ADDR_VEC
- && GET_CODE (PATTERN (start)) != ADDR_DIFF_VEC)
+ && ! JUMP_TABLE_DATA_P (start))
|| CALL_P (start))
njumps--, isjump = 1;
else
fprintf (dump_file, "Insn %i estimated to %i bytes\n",
INSN_UID (insn), min_size);
if ((JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
+ && ! JUMP_TABLE_DATA_P (insn))
|| CALL_P (insn))
njumps++;
else
{
start = NEXT_INSN (start);
if ((JUMP_P (start)
- && GET_CODE (PATTERN (start)) != ADDR_VEC
- && GET_CODE (PATTERN (start)) != ADDR_DIFF_VEC)
+ && ! JUMP_TABLE_DATA_P (start))
|| CALL_P (start))
njumps--, isjump = 1;
else
if (! last)
continue;
- if (JUMP_P (last)
- && GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (last))
last = prev_active_insn (last);
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
emit_insn_after (gen_insn_group_barrier (GEN_INT (3)), last);
if (! last)
continue;
- if (JUMP_P (last)
- && GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (last))
last = prev_active_insn (last);
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
emit_insn_after (gen_insn_group_barrier (GEN_INT (3)), last);
m32r_is_insn (rtx insn)
{
return (NONDEBUG_INSN_P (insn)
+ && ! JUMP_TABLE_DATA_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
- && GET_CODE (PATTERN (insn)) != CLOBBER
- && GET_CODE (PATTERN (insn)) != ADDR_VEC);
+ && GET_CODE (PATTERN (insn)) != CLOBBER);
}
/* Increase the priority of long instructions so that the
: TARGET_64BIT ? 0x100 : 0x400)
/* True if INSN is a mips.md pattern or asm statement. */
+/* ??? This test exists through the compiler, perhaps it should be
+ moved to rtl.h. */
#define USEFUL_INSN_P(INSN) \
(NONDEBUG_INSN_P (INSN) \
+ && ! JUMP_TABLE_DATA_P (INSN) \
&& GET_CODE (PATTERN (INSN)) != USE \
- && GET_CODE (PATTERN (INSN)) != CLOBBER \
- && GET_CODE (PATTERN (INSN)) != ADDR_VEC \
- && GET_CODE (PATTERN (INSN)) != ADDR_DIFF_VEC)
+ && GET_CODE (PATTERN (INSN)) != CLOBBER)
/* If INSN is a delayed branch sequence, return the first instruction
in the sequence, otherwise return INSN itself. */
static int
mips16_insn_length (rtx insn)
{
- if (JUMP_P (insn))
+ if (JUMP_TABLE_DATA_P (insn))
{
rtx body = PATTERN (insn);
if (GET_CODE (body) == ADDR_VEC)
unsigned int length, i;
/* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode. */
- if (! JUMP_P (insn)
- || (GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
+ if (! JUMP_TABLE_DATA_P (insn))
continue;
/* Emit marker for the beginning of the branch table. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
/* Find an ADDR_VEC insn. */
- if (! JUMP_P (insn)
- || (GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
+ if (! JUMP_TABLE_DATA_P (insn))
continue;
/* Now generate markers for the beginning and end of the
/* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
Also ignore any special USE insns. */
if ((! NONJUMP_INSN_P (anchor) && ! JUMP_P (anchor) && ! CALL_P (anchor))
+ || JUMP_TABLE_DATA_P (anchor)
|| GET_CODE (PATTERN (anchor)) == USE
- || GET_CODE (PATTERN (anchor)) == CLOBBER
- || GET_CODE (PATTERN (anchor)) == ADDR_VEC
- || GET_CODE (PATTERN (anchor)) == ADDR_DIFF_VEC)
+ || GET_CODE (PATTERN (anchor)) == CLOBBER)
continue;
anchor_attr = get_attr_pa_combine_type (anchor);
/* Anything except a regular INSN will stop our search. */
if (! NONJUMP_INSN_P (floater)
- || GET_CODE (PATTERN (floater)) == ADDR_VEC
- || GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
+ || JUMP_TABLE_DATA_P (floater))
{
floater = NULL_RTX;
break;
/* Anything except a regular INSN will stop our search. */
if (! NONJUMP_INSN_P (floater)
- || GET_CODE (PATTERN (floater)) == ADDR_VEC
- || GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
+ || JUMP_TABLE_DATA_P (floater))
{
floater = NULL_RTX;
break;
if (CALL_P (insn))
return "Function call in the loop.";
- if (JUMP_P (insn)
- && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_VEC))
+ if (JUMP_TABLE_DATA_P (insn))
return "Computed branch in the loop.";
return NULL;
rtx target, pat;
if (NONJUMP_INSN_P (dep_rtx))
- dep_rtx = PATTERN (dep_rtx);
+ dep_rtx = PATTERN (dep_rtx);
if (GET_CODE (dep_rtx) == SET)
{
&& (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
{
rtx vec_insn = next_real_insn (insn);
- rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
- PATTERN (vec_insn) : NULL_RTX;
- if (!vec_pat
- || !(GET_CODE (vec_pat) == ADDR_VEC
- || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
+ if (! vec_insn || ! JUMP_TABLE_DATA_P (vec_insn))
bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
}
/* Find the jump table used by this casesi jump. */
rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
rtx vec_insn = next_real_insn (vec_label);
- rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
- PATTERN (vec_insn) : NULL_RTX;
- if (vec_pat
- && (GET_CODE (vec_pat) == ADDR_VEC
- || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
+ if (vec_insn && JUMP_TABLE_DATA_P (vec_insn))
{
+ rtx vec_pat = PATTERN (vec_insn);
int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
{
rtx vec_lab, pat, prev, prevpat, x, braf_label;
- if (!JUMP_P (insn)
+ if (! JUMP_TABLE_DATA_P (insn)
|| GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
continue;
pat = PATTERN (insn);
num_mova = 0;
}
}
- else if (JUMP_P (insn)
+ else if (JUMP_TABLE_DATA_P (insn)
&& GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
&& num_mova
/* ??? loop invariant motion can also move a mova out of a
}
else if (JUMP_P (insn)
/* Don't mess with ADDR_DIFF_VEC */
- && (GET_CODE (PATTERN (insn)) == SET
- || GET_CODE (PATTERN (insn)) == RETURN))
+ && ! JUMP_TABLE_DATA_P (insn))
{
enum attr_type type = get_attr_type (insn);
if (type == TYPE_CBRANCH)
return gen_rtx_REG (SImode, LINK_REGISTER_REGNUM);
/* jump table */
- if (GET_CODE (PATTERN (branch)) == ADDR_VEC
- || GET_CODE (PATTERN (branch)) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (branch))
return 0;
/* ASM GOTOs. */
case JUMP_INSN:
body = PATTERN (insn);
- if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (insn))
{
/* Alignment is machine-dependent and should be handled by
ADDR_VEC_ALIGN. */
int min_align;
addr_diff_vec_flags flags;
- if (!JUMP_P (insn)
+ if (! JUMP_TABLE_DATA_P (insn)
|| GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
continue;
pat = PATTERN (insn);
continue;
body = PATTERN (insn);
- if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (insn))
{
/* This only takes room if read-only data goes into the text
section. */
INSN_ADDRESSES (uid) = insn_current_address;
#ifdef CASE_VECTOR_SHORTEN_MODE
- if (optimize && JUMP_P (insn)
+ if (optimize
+ && JUMP_TABLE_DATA_P (insn)
&& GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
{
rtx body = PATTERN (insn);
/* Detect insns that are really jump-tables
and output them as such. */
- if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (insn))
{
#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
int vlen, idx;
{
/* These patterns in the instruction stream can never be recognized.
Fortunately, they shouldn't contain virtual registers either. */
- if (GET_CODE (PATTERN (insn)) == USE
+ if (JUMP_TABLE_DATA_P (insn)
+ || GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
- || GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ASM_INPUT)
continue;
else if (DEBUG_INSN_P (insn))
/* If this is a jump table, then we can't insert stuff here. Since
we know the previous real insn must be the tablejump, we insert
the new instruction just before the tablejump. */
- if (GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ if (JUMP_TABLE_DATA_P (insn))
insn = prev_active_insn (insn);
#ifdef HAVE_cc0
int i, k;
bool counted_mem;
- if (!NONDEBUG_INSN_P (insn))
+ if (!NONDEBUG_INSN_P (insn)
+ || JUMP_TABLE_DATA_P (insn))
return insn;
pat_code = GET_CODE (PATTERN (insn));
- if (pat_code == USE || pat_code == CLOBBER || pat_code == ASM_INPUT
- || pat_code == ADDR_VEC || pat_code == ADDR_DIFF_VEC)
+ if (pat_code == USE || pat_code == CLOBBER || pat_code == ASM_INPUT)
return insn;
counted_mem = false;
if (icode < 0 && asm_noperands (PATTERN (insn)) < 0 && ! DEBUG_INSN_P (insn))
{
- lra_assert (GET_CODE (PATTERN (insn)) == USE
+ lra_assert (JUMP_TABLE_DATA_P (insn)
+ || GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
- || GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ASM_INPUT);
return;
}
FOR_EACH_BB (bb)
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn)
+ && ! JUMP_TABLE_DATA_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER
- && GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
&& GET_CODE (PATTERN (insn)) != ASM_INPUT)
{
if (final_p)
if (! insn_is_asm && icode < 0)
{
- gcc_assert (GET_CODE (PATTERN (insn)) == USE
+ gcc_assert (JUMP_TABLE_DATA_P (insn)
+ || GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
- || GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ASM_INPUT
|| DEBUG_INSN_P (insn));
if (DEBUG_INSN_P (insn))
if (! insn_is_asm && icode < 0)
{
- gcc_assert (GET_CODE (PATTERN (insn)) == USE
+ gcc_assert (JUMP_TABLE_DATA_P (insn)
+ || GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
- || GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ASM_INPUT
|| DEBUG_INSN_P (insn));
return;
if (ANY_RETURN_P (this_label))
return this_label;
tem = next_active_insn (this_label);
- if (tem
- && (GET_CODE (PATTERN (tem)) == ADDR_VEC
- || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
+ if (tem && JUMP_TABLE_DATA_P (tem))
break;
if (!targetm.can_follow_jump (jump, insn))