if ((GET_CODE (addr) == SYMBOL_REF && !CONSTANT_POOL_ADDRESS_P (addr))
|| (GET_CODE (addr) == UNSPEC
&& (XINT (addr, 1) == UNSPEC_GOTENT
- || (TARGET_CPU_ZARCH && XINT (addr, 1) == UNSPEC_PLT))))
+ || XINT (addr, 1) == UNSPEC_PLT)))
{
if (symref)
*symref = addr;
/* mulsidi case: mr, m */
*total = s390_cost->m;
else if (GET_CODE (left) == ZERO_EXTEND
- && GET_CODE (right) == ZERO_EXTEND
- && TARGET_CPU_ZARCH)
+ && GET_CODE (right) == ZERO_EXTEND)
/* umulsidi case: ml, mlr */
*total = s390_cost->ml;
else
return 1;
/* Accept immediate LARL operands. */
- if (TARGET_CPU_ZARCH && larl_operand (op, mode))
+ if (larl_operand (op, mode))
return 1;
/* Thread-local symbols are never legal constants. This is
/* If the literal pool shares the code section, be put
execute template placeholders into the pool as well. */
case UNSPEC_INSN:
- return TARGET_CPU_ZARCH;
-
default:
return true;
}
return true;
/* Accept larl operands. */
- if (TARGET_CPU_ZARCH
- && larl_operand (op, VOIDmode))
+ if (larl_operand (op, VOIDmode))
return true;
/* Accept floating-point zero operands that fit into a single GPR. */
handled via secondary reload but this does not happen if
they are used as literal pool slot replacement in reload
inheritance (see emit_input_reload_insns). */
- if (TARGET_CPU_ZARCH
- && GET_CODE (XEXP (op, 0)) == PLUS
+ if (GET_CODE (XEXP (op, 0)) == PLUS
&& GET_CODE (XEXP (XEXP(op, 0), 0)) == SYMBOL_REF
&& GET_CODE (XEXP (XEXP(op, 0), 1)) == CONST_INT)
{
|| (SYMBOL_REF_P (addr) && s390_rel_address_ok_p (addr))
|| (GET_CODE (addr) == UNSPEC &&
(XINT (addr, 1) == UNSPEC_GOTENT
- || (TARGET_CPU_ZARCH && XINT (addr, 1) == UNSPEC_PLT))))
+ || XINT (addr, 1) == UNSPEC_PLT)))
&& GET_CODE (addend) == CONST_INT)
{
/* This can be locally addressed. */
rtx const_addr = (GET_CODE (addr) == UNSPEC ?
gen_rtx_CONST (Pmode, addr) : addr);
- if (TARGET_CPU_ZARCH
- && larl_operand (const_addr, VOIDmode)
+ if (larl_operand (const_addr, VOIDmode)
&& INTVAL (addend) < HOST_WIDE_INT_1 << 31
&& INTVAL (addend) >= -(HOST_WIDE_INT_1 << 31))
{
new_rtx = gen_rtx_CONST (Pmode, new_rtx);
new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
new_rtx = gen_const_mem (Pmode, new_rtx);
- emit_move_insn (reg, new_rtx);
- new_rtx = reg;
- }
- else if (TARGET_CPU_ZARCH)
- {
- /* If the GOT offset might be >= 4k, we determine the position
- of the GOT entry via a PC-relative LARL (@GOTENT).
+ emit_move_insn (reg, new_rtx);
+ new_rtx = reg;
+ }
+ else
+ {
+ /* If the GOT offset might be >= 4k, we determine the position
+ of the GOT entry via a PC-relative LARL (@GOTENT).
larl temp, sym@GOTENT
lg <target>, 0(temp) */
new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
new_rtx = gen_rtx_CONST (Pmode, new_rtx);
emit_move_insn (temp, new_rtx);
-
new_rtx = gen_const_mem (Pmode, temp);
- emit_move_insn (reg, new_rtx);
-
- new_rtx = reg;
- }
- else
- {
- /* If the GOT offset might be >= 4k, we have to load it
- from the literal pool (@GOT).
-
- lg temp, lit-litbase(r13)
- lg <target>, 0(temp)
- lit: .long sym@GOT */
-
- rtx temp = reg ? reg : gen_reg_rtx (Pmode);
-
- gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
- || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
-
- if (reload_in_progress || reload_completed)
- df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
-
- addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
- addr = gen_rtx_CONST (Pmode, addr);
- addr = force_const_mem (Pmode, addr);
- emit_move_insn (temp, addr);
+ emit_move_insn (reg, new_rtx);
- new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
- new_rtx = gen_const_mem (Pmode, new_rtx);
- emit_move_insn (reg, new_rtx);
- new_rtx = reg;
- }
+ new_rtx = reg;
+ }
}
else if (GET_CODE (addr) == UNSPEC && GET_CODE (addend) == CONST_INT)
{
gcc_unreachable ();
break;
- /* @PLT is OK as is on 64-bit, must be converted to
- GOT-relative @PLTOFF on 31-bit. */
+ /* For @PLT larl is used. This is handled like local
+ symbol refs. */
case UNSPEC_PLT:
- if (!TARGET_CPU_ZARCH)
- {
- rtx temp = reg? reg : gen_reg_rtx (Pmode);
-
- if (reload_in_progress || reload_completed)
- df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
-
- addr = XVECEXP (addr, 0, 0);
- addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
- UNSPEC_PLTOFF);
- if (addend != const0_rtx)
- addr = gen_rtx_PLUS (Pmode, addr, addend);
- addr = gen_rtx_CONST (Pmode, addr);
- addr = force_const_mem (Pmode, addr);
- emit_move_insn (temp, addr);
-
- new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
- if (reg != 0)
- {
- s390_load_address (reg, new_rtx);
- new_rtx = reg;
- }
- }
- else
- /* On 64 bit larl can be used. This case is handled like
- local symbol refs. */
- gcc_unreachable ();
+ gcc_unreachable ();
break;
/* Everything else cannot happen. */
temp = gen_reg_rtx (Pmode);
emit_move_insn (temp, new_rtx);
}
- else if (TARGET_CPU_ZARCH)
+ else
{
/* If the GOT offset might be >= 4k, we determine the position
of the GOT entry via a PC-relative LARL. */
temp = gen_reg_rtx (Pmode);
emit_move_insn (temp, new_rtx);
}
- else if (flag_pic)
- {
- /* If the GOT offset might be >= 4k, we have to load it
- from the literal pool. */
-
- if (reload_in_progress || reload_completed)
- df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
-
- new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
- new_rtx = gen_rtx_CONST (Pmode, new_rtx);
- new_rtx = force_const_mem (Pmode, new_rtx);
- temp = gen_reg_rtx (Pmode);
- emit_move_insn (temp, new_rtx);
-
- new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
- new_rtx = gen_const_mem (Pmode, new_rtx);
-
- new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
- temp = gen_reg_rtx (Pmode);
- emit_insn (gen_rtx_SET (temp, new_rtx));
- }
- else
- {
- /* In position-dependent code, load the absolute address of
- the GOT entry from the literal pool. */
-
- new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
- new_rtx = gen_rtx_CONST (Pmode, new_rtx);
- new_rtx = force_const_mem (Pmode, new_rtx);
- temp = gen_reg_rtx (Pmode);
- emit_move_insn (temp, new_rtx);
-
- new_rtx = temp;
- new_rtx = gen_const_mem (Pmode, new_rtx);
- new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
- temp = gen_reg_rtx (Pmode);
- emit_insn (gen_rtx_SET (temp, new_rtx));
- }
new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
if (reg != 0)
switch (XINT (XEXP (addr, 0), 1))
{
case UNSPEC_INDNTPOFF:
- gcc_assert (TARGET_CPU_ZARCH);
new_rtx = addr;
break;
}
}
-/* Split all branches that exceed the maximum distance.
- Returns true if this created a new literal pool entry. */
-
-static int
-s390_split_branches (void)
-{
- rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
- int new_literal = 0, ret;
- rtx_insn *insn;
- rtx pat, target;
- rtx *label;
-
- /* We need correct insn addresses. */
-
- shorten_branches (get_insns ());
-
- /* Find all branches that exceed 64KB, and split them. */
-
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- {
- if (! JUMP_P (insn) || tablejump_p (insn, NULL, NULL))
- continue;
-
- pat = PATTERN (insn);
- if (GET_CODE (pat) == PARALLEL)
- pat = XVECEXP (pat, 0, 0);
- if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
- continue;
-
- if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
- {
- label = &SET_SRC (pat);
- }
- else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
- {
- if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
- label = &XEXP (SET_SRC (pat), 1);
- else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
- label = &XEXP (SET_SRC (pat), 2);
- else
- continue;
- }
- else
- continue;
-
- if (get_attr_length (insn) <= 4)
- continue;
-
- /* We are going to use the return register as scratch register,
- make sure it will be saved/restored by the prologue/epilogue. */
- cfun_frame_layout.save_return_addr_p = 1;
-
- if (!flag_pic)
- {
- new_literal = 1;
- rtx mem = force_const_mem (Pmode, *label);
- rtx_insn *set_insn = emit_insn_before (gen_rtx_SET (temp_reg, mem),
- insn);
- INSN_ADDRESSES_NEW (set_insn, -1);
- annotate_constant_pool_refs (&PATTERN (set_insn));
-
- target = temp_reg;
- }
- else
- {
- new_literal = 1;
- target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
- UNSPEC_LTREL_OFFSET);
- target = gen_rtx_CONST (Pmode, target);
- target = force_const_mem (Pmode, target);
- rtx_insn *set_insn = emit_insn_before (gen_rtx_SET (temp_reg, target),
- insn);
- INSN_ADDRESSES_NEW (set_insn, -1);
- annotate_constant_pool_refs (&PATTERN (set_insn));
-
- target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
- cfun->machine->base_reg),
- UNSPEC_LTREL_BASE);
- target = gen_rtx_PLUS (Pmode, temp_reg, target);
- }
-
- ret = validate_change (insn, label, target, 0);
- gcc_assert (ret);
- }
-
- return new_literal;
-}
-
-
/* Find an annotated literal pool symbol referenced in RTX X,
and store it at REF. Will abort if X contains references to
more than one such pool symbol; multiple references to the same
return NULL_RTX;
}
-/* Add execute target for INSN to the constant pool POOL. */
-
-static void
-s390_add_execute (struct constant_pool *pool, rtx insn)
-{
- struct constant *c;
-
- for (c = pool->execute; c != NULL; c = c->next)
- if (INSN_UID (insn) == INSN_UID (c->value))
- break;
-
- if (c == NULL)
- {
- c = (struct constant *) xmalloc (sizeof *c);
- c->value = insn;
- c->label = gen_label_rtx ();
- c->next = pool->execute;
- pool->execute = c;
- pool->size += 6;
- }
-}
-
/* Find execute target for INSN in the constant pool POOL.
Return an RTX describing the distance from the start of
the pool to the location of the execute target. */
int i;
/* Switch to rodata section. */
- if (TARGET_CPU_ZARCH)
- {
- insn = emit_insn_after (gen_pool_section_start (), insn);
- INSN_ADDRESSES_NEW (insn, -1);
- }
+ insn = emit_insn_after (gen_pool_section_start (), insn);
+ INSN_ADDRESSES_NEW (insn, -1);
/* Ensure minimum pool alignment. */
- if (TARGET_CPU_ZARCH)
- insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
- else
- insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
+ insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
INSN_ADDRESSES_NEW (insn, -1);
/* Emit pool base label. */
}
/* Switch back to previous section. */
- if (TARGET_CPU_ZARCH)
- {
- insn = emit_insn_after (gen_pool_section_end (), insn);
- INSN_ADDRESSES_NEW (insn, -1);
- }
+ insn = emit_insn_after (gen_pool_section_end (), insn);
+ INSN_ADDRESSES_NEW (insn, -1);
insn = emit_barrier_after (insn);
INSN_ADDRESSES_NEW (insn, -1);
pool->pool_insn = insn;
}
- if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
- {
- s390_add_execute (pool, insn);
- }
- else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
s390_mainpool_finish (struct constant_pool *pool)
{
rtx base_reg = cfun->machine->base_reg;
+ rtx set;
+ rtx_insn *insn;
/* If the pool is empty, we're done. */
if (pool->size == 0)
/* We need correct insn addresses. */
shorten_branches (get_insns ());
- /* On zSeries, we use a LARL to load the pool register. The pool is
+ /* Use a LARL to load the pool register. The pool is
located in the .rodata section, so we emit it after the function. */
- if (TARGET_CPU_ZARCH)
- {
- rtx set = gen_main_base_64 (base_reg, pool->label);
- rtx_insn *insn = emit_insn_after (set, pool->pool_insn);
- INSN_ADDRESSES_NEW (insn, -1);
- remove_insn (pool->pool_insn);
-
- insn = get_last_insn ();
- pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
- INSN_ADDRESSES_NEW (pool->pool_insn, -1);
-
- s390_dump_pool (pool, 0);
- }
-
- /* On S/390, if the total size of the function's code plus literal pool
- does not exceed 4096 bytes, we use BASR to set up a function base
- pointer, and emit the literal pool at the end of the function. */
- else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
- + pool->size + 8 /* alignment slop */ < 4096)
- {
- rtx set = gen_main_base_31_small (base_reg, pool->label);
- rtx_insn *insn = emit_insn_after (set, pool->pool_insn);
- INSN_ADDRESSES_NEW (insn, -1);
- remove_insn (pool->pool_insn);
-
- insn = emit_label_after (pool->label, insn);
- INSN_ADDRESSES_NEW (insn, -1);
-
- /* emit_pool_after will be set by s390_mainpool_start to the
- last insn of the section where the literal pool should be
- emitted. */
- insn = pool->emit_pool_after;
-
- pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
- INSN_ADDRESSES_NEW (pool->pool_insn, -1);
-
- s390_dump_pool (pool, 1);
- }
-
- /* Otherwise, we emit an inline literal pool and use BASR to branch
- over it, setting up the pool register at the same time. */
- else
- {
- rtx_code_label *pool_end = gen_label_rtx ();
-
- rtx pat = gen_main_base_31_large (base_reg, pool->label, pool_end);
- rtx_insn *insn = emit_jump_insn_after (pat, pool->pool_insn);
- JUMP_LABEL (insn) = pool_end;
- INSN_ADDRESSES_NEW (insn, -1);
- remove_insn (pool->pool_insn);
-
- insn = emit_label_after (pool->label, insn);
- INSN_ADDRESSES_NEW (insn, -1);
-
- pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
- INSN_ADDRESSES_NEW (pool->pool_insn, -1);
-
- insn = emit_label_after (pool_end, pool->pool_insn);
- INSN_ADDRESSES_NEW (insn, -1);
+ set = gen_main_base_64 (base_reg, pool->label);
+ insn = emit_insn_after (set, pool->pool_insn);
+ INSN_ADDRESSES_NEW (insn, -1);
+ remove_insn (pool->pool_insn);
- s390_dump_pool (pool, 1);
- }
+ insn = get_last_insn ();
+ pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
+ INSN_ADDRESSES_NEW (pool->pool_insn, -1);
+ s390_dump_pool (pool, 0);
/* Replace all literal pool references. */
s390_free_pool (pool);
}
-/* POOL holds the main literal pool as collected by s390_mainpool_start.
- We have decided we cannot use this pool, so revert all changes
- to the current function that were done by s390_mainpool_start. */
-static void
-s390_mainpool_cancel (struct constant_pool *pool)
-{
- /* We didn't actually change the instruction stream, so simply
- free the pool memory. */
- s390_free_pool (pool);
-}
-
-
/* Chunkify the literal pool. */
#define S390_POOL_CHUNK_MIN 0xc00
s390_chunkify_start (void)
{
struct constant_pool *curr_pool = NULL, *pool_list = NULL;
- int extra_size = 0;
bitmap far_labels;
rtx pending_ltrel = NULL_RTX;
rtx_insn *insn;
- rtx (*gen_reload_base) (rtx, rtx) =
- TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
-
-
/* We need correct insn addresses. */
shorten_branches (get_insns ());
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- bool section_switch_p = false;
-
/* Check for pending LTREL_BASE. */
if (INSN_P (insn))
{
}
}
- if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
- {
- if (!curr_pool)
- curr_pool = s390_start_pool (&pool_list, insn);
-
- s390_add_execute (curr_pool, insn);
- s390_add_pool_insn (curr_pool, insn);
- }
- else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx pool_ref = NULL_RTX;
find_constant_pool_ref (PATTERN (insn), &pool_ref);
gcc_assert (!pending_ltrel);
}
- if (NOTE_P (insn))
- switch (NOTE_KIND (insn))
- {
- case NOTE_INSN_SWITCH_TEXT_SECTIONS:
- section_switch_p = true;
- break;
- case NOTE_INSN_VAR_LOCATION:
- continue;
- default:
- break;
- }
+ if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
+ continue;
if (!curr_pool
|| INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
- || INSN_ADDRESSES (INSN_UID (insn)) == -1)
+ || INSN_ADDRESSES (INSN_UID (insn)) == -1)
continue;
- if (TARGET_CPU_ZARCH)
- {
- if (curr_pool->size < S390_POOL_CHUNK_MAX)
- continue;
-
- s390_end_pool (curr_pool, NULL);
- curr_pool = NULL;
- }
- else
- {
- int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
- - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
- + extra_size;
-
- /* We will later have to insert base register reload insns.
- Those will have an effect on code size, which we need to
- consider here. This calculation makes rather pessimistic
- worst-case assumptions. */
- if (LABEL_P (insn))
- extra_size += 6;
-
- if (chunk_size < S390_POOL_CHUNK_MIN
- && curr_pool->size < S390_POOL_CHUNK_MIN
- && !section_switch_p)
- continue;
-
- /* Pool chunks can only be inserted after BARRIERs ... */
- if (BARRIER_P (insn))
- {
- s390_end_pool (curr_pool, insn);
- curr_pool = NULL;
- extra_size = 0;
- }
-
- /* ... so if we don't find one in time, create one. */
- else if (chunk_size > S390_POOL_CHUNK_MAX
- || curr_pool->size > S390_POOL_CHUNK_MAX
- || section_switch_p)
- {
- rtx_insn *label, *jump, *barrier, *next, *prev;
-
- if (!section_switch_p)
- {
- /* We can insert the barrier only after a 'real' insn. */
- if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
- continue;
- if (get_attr_length (insn) == 0)
- continue;
- /* Don't separate LTREL_BASE from the corresponding
- LTREL_OFFSET load. */
- if (pending_ltrel)
- continue;
- next = insn;
- do
- {
- insn = next;
- next = NEXT_INSN (insn);
- }
- while (next
- && NOTE_P (next)
- && NOTE_KIND (next) == NOTE_INSN_VAR_LOCATION);
- }
- else
- {
- gcc_assert (!pending_ltrel);
-
- /* The old pool has to end before the section switch
- note in order to make it part of the current
- section. */
- insn = PREV_INSN (insn);
- }
+ if (curr_pool->size < S390_POOL_CHUNK_MAX)
+ continue;
- label = gen_label_rtx ();
- prev = insn;
- if (prev && NOTE_P (prev))
- prev = prev_nonnote_insn (prev);
- if (prev)
- jump = emit_jump_insn_after_setloc (gen_jump (label), insn,
- INSN_LOCATION (prev));
- else
- jump = emit_jump_insn_after_noloc (gen_jump (label), insn);
- barrier = emit_barrier_after (jump);
- insn = emit_label_after (label, barrier);
- JUMP_LABEL (jump) = label;
- LABEL_NUSES (label) = 1;
-
- INSN_ADDRESSES_NEW (jump, -1);
- INSN_ADDRESSES_NEW (barrier, -1);
- INSN_ADDRESSES_NEW (insn, -1);
-
- s390_end_pool (curr_pool, barrier);
- curr_pool = NULL;
- extra_size = 0;
- }
- }
+ s390_end_pool (curr_pool, NULL);
+ curr_pool = NULL;
}
if (curr_pool)
for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
{
- rtx new_insn = gen_reload_base (cfun->machine->base_reg,
- curr_pool->label);
+ rtx new_insn = gen_reload_base_64 (cfun->machine->base_reg,
+ curr_pool->label);
rtx_insn *insn = curr_pool->first_insn;
INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
}
struct constant_pool *pool = s390_find_pool (pool_list, insn);
if (pool)
{
- rtx new_insn = gen_reload_base (cfun->machine->base_reg,
- pool->label);
+ rtx new_insn = gen_reload_base_64 (cfun->machine->base_reg,
+ pool->label);
INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
}
}
}
}
-/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
- We have decided we cannot use this list, so revert all changes
- to the current function that were done by s390_chunkify_start. */
-
-static void
-s390_chunkify_cancel (struct constant_pool *pool_list)
-{
- struct constant_pool *curr_pool = NULL;
- rtx_insn *insn;
-
- /* Remove all pool placeholder insns. */
-
- for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
- {
- /* Did we insert an extra barrier? Remove it. */
- rtx_insn *barrier = PREV_INSN (curr_pool->pool_insn);
- rtx_insn *jump = barrier? PREV_INSN (barrier) : NULL;
- rtx_insn *label = NEXT_INSN (curr_pool->pool_insn);
-
- if (jump && JUMP_P (jump)
- && barrier && BARRIER_P (barrier)
- && label && LABEL_P (label)
- && GET_CODE (PATTERN (jump)) == SET
- && SET_DEST (PATTERN (jump)) == pc_rtx
- && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
- && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
- {
- remove_insn (jump);
- remove_insn (barrier);
- remove_insn (label);
- }
-
- remove_insn (curr_pool->pool_insn);
- }
-
- /* Remove all base register reload insns. */
-
- for (insn = get_insns (); insn; )
- {
- rtx_insn *next_insn = NEXT_INSN (insn);
-
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SET
- && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
- && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
- remove_insn (insn);
-
- insn = next_insn;
- }
-
- /* Free pool list. */
-
- while (pool_list)
- {
- struct constant_pool *next = pool_list->next;
- s390_free_pool (pool_list);
- pool_list = next;
- }
-}
-
/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
void
value of RETURN_REGNUM is actually saved. */
if (count == 0)
- {
- /* On non-z architectures branch splitting could overwrite r14. */
- if (TARGET_CPU_ZARCH)
- return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
- else
- {
- cfun_frame_layout.save_return_addr_p = true;
- return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
- }
- }
+ return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
if (TARGET_PACKED_STACK)
offset = -2 * UNITS_PER_LONG;
clobbered_regs[RETURN_REGNUM]
|= (!crtl->is_leaf
|| TARGET_TPF_PROFILING
- || cfun->machine->split_branches_pending_p
|| cfun_frame_layout.save_return_addr_p
|| crtl->calls_eh_return);
int i;
gcc_assert (epilogue_completed);
- gcc_assert (!cfun->machine->split_branches_pending_p);
s390_regs_ever_clobbered (clobbered_regs);
if (reload_completed)
return;
- /* On S/390 machines, we may need to perform branch splitting, which
- will require both base and return address register. We have no
- choice but to assume we're going to need them until right at the
- end of the machine dependent reorg phase. */
- if (!TARGET_CPU_ZARCH)
- cfun->machine->split_branches_pending_p = true;
-
do
{
frame_size = cfun_frame_layout.frame_size;
/* Try to predict whether we'll need the base register. */
- base_used = cfun->machine->split_branches_pending_p
- || crtl->uses_const_pool
+ base_used = crtl->uses_const_pool
|| (!DISP_IN_RANGE (frame_size)
&& !CONST_OK_FOR_K (frame_size));
static bool
s390_can_eliminate (const int from, const int to)
{
- /* On zSeries machines, we have not marked the base register as fixed.
+ /* We have not marked the base register as fixed.
Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
If a function requires the base register, we say here that this
elimination cannot be performed. This will cause reload to free
to allocate the base register for any other purpose. */
if (from == BASE_REGNUM && to == BASE_REGNUM)
{
- if (TARGET_CPU_ZARCH)
- {
- s390_init_frame_layout ();
- return cfun->machine->base_reg == NULL_RTX;
- }
-
- return false;
+ s390_init_frame_layout ();
+ return cfun->machine->base_reg == NULL_RTX;
}
/* Everything else must point into the stack frame. */
start_sequence ();
- if (TARGET_CPU_ZARCH)
- {
- emit_move_insn (got_rtx, s390_got_symbol ());
- }
- else
- {
- rtx offset;
-
- offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, s390_got_symbol ()),
- UNSPEC_LTREL_OFFSET);
- offset = gen_rtx_CONST (Pmode, offset);
- offset = force_const_mem (Pmode, offset);
-
- emit_move_insn (got_rtx, offset);
-
- offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
- UNSPEC_LTREL_BASE);
- offset = gen_rtx_PLUS (Pmode, got_rtx, offset);
-
- emit_move_insn (got_rtx, offset);
- }
+ emit_move_insn (got_rtx, s390_got_symbol ());
insns = get_insns ();
end_sequence ();
rtx tmp;
gcc_assert (flag_split_stack && reload_completed);
- if (!TARGET_CPU_ZARCH)
- {
- sorry ("CPUs older than z900 are not supported for -fsplit-stack");
- return;
- }
r1 = gen_rtx_REG (Pmode, 1);
asm_fprintf (asm_out_file, "\t# NOPs for %s (%d halfwords)\n", user, hw);
while (hw > 0)
{
- if (TARGET_CPU_ZARCH && hw >= 3)
- {
- output_asm_insn ("brcl\t0,0", NULL);
- hw -= 3;
+ if (hw >= 3)
+ {
+ output_asm_insn ("brcl\t0,0", NULL);
+ hw -= 3;
}
else if (hw >= 2)
{
output_asm_insn (".cfi_restore\t%0", op);
}
}
- else if (TARGET_CPU_ZARCH)
+ else
{
if (flag_nop_mcount)
output_asm_nops ("-mnop-mcount", /* st */ 2 + /* larl */ 3 +
output_asm_insn ("brasl\t%0,%4", op);
output_asm_insn ("l\t%0,%1", op);
if (flag_dwarf2_cfi_asm)
- output_asm_insn (".cfi_restore\t%0", op);
- }
- }
- else if (!flag_pic)
- {
- op[6] = gen_label_rtx ();
-
- if (flag_nop_mcount)
- output_asm_nops ("-mnop-mcount", /* st */ 2 + /* bras */ 2 +
- /* .long */ 2 + /* .long */ 2 + /* l */ 2 +
- /* l */ 2 + /* basr */ 1 + /* l */ 2);
- else
- {
- output_asm_insn ("st\t%0,%1", op);
- if (flag_dwarf2_cfi_asm)
- output_asm_insn (".cfi_rel_offset\t%0,%7", op);
- output_asm_insn ("bras\t%2,%l6", op);
- output_asm_insn (".long\t%4", op);
- output_asm_insn (".long\t%3", op);
- targetm.asm_out.internal_label (file, "L",
- CODE_LABEL_NUMBER (op[6]));
- output_asm_insn ("l\t%0,0(%2)", op);
- output_asm_insn ("l\t%2,4(%2)", op);
- output_asm_insn ("basr\t%0,%0", op);
- output_asm_insn ("l\t%0,%1", op);
- if (flag_dwarf2_cfi_asm)
- output_asm_insn (".cfi_restore\t%0", op);
- }
- }
- else
- {
- op[5] = gen_label_rtx ();
- op[6] = gen_label_rtx ();
-
- if (flag_nop_mcount)
- output_asm_nops ("-mnop-mcount", /* st */ 2 + /* bras */ 2 +
- /* .long */ 2 + /* .long */ 2 + /* lr */ 1 +
- /* a */ 2 + /* a */ 2 + /* basr */ 1 + /* l */ 2);
- else
- {
- output_asm_insn ("st\t%0,%1", op);
- if (flag_dwarf2_cfi_asm)
- output_asm_insn (".cfi_rel_offset\t%0,%7", op);
- output_asm_insn ("bras\t%2,%l6", op);
- targetm.asm_out.internal_label (file, "L",
- CODE_LABEL_NUMBER (op[5]));
- output_asm_insn (".long\t%4-%l5", op);
- output_asm_insn (".long\t%3-%l5", op);
- targetm.asm_out.internal_label (file, "L",
- CODE_LABEL_NUMBER (op[6]));
- output_asm_insn ("lr\t%0,%2", op);
- output_asm_insn ("a\t%0,0(%2)", op);
- output_asm_insn ("a\t%2,4(%2)", op);
- output_asm_insn ("basr\t%0,%0", op);
- output_asm_insn ("l\t%0,%1", op);
- if (flag_dwarf2_cfi_asm)
- output_asm_insn (".cfi_restore\t%0", op);
- }
+ output_asm_insn (".cfi_restore\t%0", op);
+ }
}
if (flag_record_mcount)
optimization is illegal for S/390 so we turn the direct
call into a indirect call again. */
addr_location = force_reg (Pmode, addr_location);
- }
-
- /* Unless we can use the bras(l) insn, force the
- routine address into a register. */
- if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
- {
- if (flag_pic)
- addr_location = legitimize_pic_address (addr_location, 0);
- else
- addr_location = force_reg (Pmode, addr_location);
}
}
fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
}
- if (TARGET_CPU_ZARCH)
- {
- fixed_regs[BASE_REGNUM] = 0;
- call_used_regs[BASE_REGNUM] = 0;
- fixed_regs[RETURN_REGNUM] = 0;
- call_used_regs[RETURN_REGNUM] = 0;
- }
+ fixed_regs[BASE_REGNUM] = 0;
+ call_used_regs[BASE_REGNUM] = 0;
+ fixed_regs[RETURN_REGNUM] = 0;
+ call_used_regs[RETURN_REGNUM] = 0;
if (TARGET_64BIT)
{
for (i = FPR8_REGNUM; i <= FPR15_REGNUM; i++)
can do, so no point in walking the insn list. */
if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
- && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
- && (TARGET_CPU_ZARCH
- || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
- && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
+ && cfun_frame_layout.last_save_gpr >= BASE_REGNUM)
return;
/* Search for prologue/epilogue insns and replace them. */
-
for (insn = get_insns (); insn; insn = next_insn)
{
int first, last, off;
s390_reorg (void)
{
bool pool_overflow = false;
+ rtx_insn *insn;
int hw_before, hw_after;
if (s390_tune == PROCESSOR_2964_Z13)
/* If literal pool overflowed, start to chunkify it. */
if (pool_overflow)
- pool = s390_chunkify_start ();
-
- /* Split out-of-range branches. If this has created new
- literal pool entries, cancel current chunk list and
- recompute it. zSeries machines have large branch
- instructions, so we never need to split a branch. */
- if (!TARGET_CPU_ZARCH && s390_split_branches ())
- {
- if (pool_overflow)
- s390_chunkify_cancel (pool);
- else
- s390_mainpool_cancel (pool);
-
- continue;
- }
+ pool = s390_chunkify_start ();
/* If we made it up to here, both conditions are satisfied.
Finish up literal pool related changes. */
else
s390_mainpool_finish (pool);
- /* We're done splitting branches. */
- cfun->machine->split_branches_pending_p = false;
break;
}
/* Generate out-of-pool execute target insns. */
- if (TARGET_CPU_ZARCH)
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- rtx_insn *insn, *target;
rtx label;
+ rtx_insn *target;
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- {
- label = s390_execute_label (insn);
- if (!label)
- continue;
+ label = s390_execute_label (insn);
+ if (!label)
+ continue;
- gcc_assert (label != const0_rtx);
+ gcc_assert (label != const0_rtx);
- target = emit_label (XEXP (label, 0));
- INSN_ADDRESSES_NEW (target, -1);
+ target = emit_label (XEXP (label, 0));
+ INSN_ADDRESSES_NEW (target, -1);
- if (JUMP_P (insn))
- {
- target = emit_jump_insn (s390_execute_target (insn));
- /* This is important in order to keep a table jump
- pointing at the jump table label. Only this makes it
- being recognized as table jump. */
- JUMP_LABEL (target) = JUMP_LABEL (insn);
- }
- else
- target = emit_insn (s390_execute_target (insn));
- INSN_ADDRESSES_NEW (target, -1);
+ if (JUMP_P (insn))
+ {
+ target = emit_jump_insn (s390_execute_target (insn));
+ /* This is important in order to keep a table jump
+ pointing at the jump table label. Only this makes it
+ being recognized as table jump. */
+ JUMP_LABEL (target) = JUMP_LABEL (insn);
}
+ else
+ target = emit_insn (s390_execute_target (insn));
+ INSN_ADDRESSES_NEW (target, -1);
}
/* Try to optimize prologue and epilogue further. */
/* Output a series of NOPs before the first active insn. */
while (insn && hw_after > 0)
{
- if (hw_after >= 3 && TARGET_CPU_ZARCH)
+ if (hw_after >= 3)
{
emit_insn_before (gen_nop_6_byte (), insn);
hw_after -= 3;
if (opts->x_s390_arch == PROCESSOR_NATIVE
|| opts->x_s390_tune == PROCESSOR_NATIVE)
gcc_unreachable ();
- if (TARGET_ZARCH_P (opts->x_target_flags) && !TARGET_CPU_ZARCH_P (opts))
- error ("z/Architecture mode not supported on %s",
- processor_table[(int)opts->x_s390_arch].name);
if (TARGET_64BIT && !TARGET_ZARCH_P (opts->x_target_flags))
error ("64-bit ABI not supported in ESA/390 mode");
opts->x_s390_function_return_mem = opts->x_s390_function_return;
}
- if (!TARGET_CPU_ZARCH)
- {
- if (opts->x_s390_indirect_branch_call != indirect_branch_keep
- || opts->x_s390_indirect_branch_jump != indirect_branch_keep)
- error ("-mindirect-branch* options require -march=z900 or higher");
- if (opts->x_s390_function_return_reg != indirect_branch_keep
- || opts->x_s390_function_return_mem != indirect_branch_keep)
- error ("-mfunction-return* options require -march=z900 or higher");
- }
-
-
/* Enable hardware transactions if available and not explicitly
disabled by user. E.g. with -m31 -march=zEC12 -mzarch */
if (!TARGET_OPT_HTM_P (opts_set->x_target_flags))
if (!TARGET_CPU_Z10)
fputs ("\t.machine pop\n", asm_out_file);
}
- else if (TARGET_CPU_ZARCH)
+ else
{
/* larl %r1,1f */
fprintf (asm_out_file, "\tlarl\t%%r%d,1f\n",
fprintf (asm_out_file, "\tex\t0,0(%%r%d)\n",
INDIRECT_BRANCH_THUNK_REGNUM);
}
- else
- gcc_unreachable ();
/* 0: j 0b */
fputs ("0:\tj\t0b\n", asm_out_file);
; mnemonics which only get defined through if_then_else currently
; don't get added to the list values automatically and hence need to
; be listed here.
-(define_attr "mnemonic" "b,bas,bc,bcr_flush,unknown" (const_string "unknown"))
+(define_attr "mnemonic" "b,bas,basr,bc,bcr_flush,unknown" (const_string "unknown"))
;; Length in bytes.
(match_test "TARGET_DFP"))
(const_int 1)
- (and (eq_attr "cpu_facility" "cpu_zarch")
- (match_test "TARGET_CPU_ZARCH"))
+ (eq_attr "cpu_facility" "cpu_zarch")
(const_int 1)
(and (eq_attr "cpu_facility" "z10")
(define_insn "*movsi_larl"
[(set (match_operand:SI 0 "register_operand" "=d")
(match_operand:SI 1 "larl_operand" "X"))]
- "!TARGET_64BIT && TARGET_CPU_ZARCH
+ "!TARGET_64BIT
&& !FP_REG_P (operands[0])"
"larl\t%0,%1"
[(set_attr "op_type" "RIL")
(use (match_operand 2 "register_operand" ""))
(use (const:BLK (unspec:BLK [(const_int 0)] UNSPEC_INSN)))
(clobber (match_operand 3 "register_operand" ""))]
- "reload_completed && TARGET_CPU_ZARCH"
+ "reload_completed"
[(set (match_dup 3) (label_ref (match_dup 4)))
(parallel
[(unspec [(match_dup 2) (mem:BLK (match_dup 3))
(use (const:BLK (unspec:BLK [(const_int 0)] UNSPEC_INSN)))
(clobber (match_operand 2 "register_operand" ""))
(clobber (reg:CC CC_REGNUM))]
- "reload_completed && TARGET_CPU_ZARCH"
+ "reload_completed"
[(set (match_dup 2) (label_ref (match_dup 3)))
(parallel
[(unspec [(match_dup 1) (mem:BLK (match_dup 2))
(use (match_operand 2 "register_operand" ""))
(use (const:BLK (unspec:BLK [(const_int 0)] UNSPEC_INSN)))
(clobber (match_operand 3 "register_operand" ""))]
- "reload_completed && TARGET_CPU_ZARCH"
+ "reload_completed"
[(set (match_dup 3) (label_ref (match_dup 4)))
(parallel
[(unspec [(match_dup 2) (mem:BLK (match_dup 3))
(plus:DI (match_operand:DI 1 "nonimmediate_operand" "%0")
(match_operand:DI 2 "general_operand" "do") ) )
(clobber (reg:CC CC_REGNUM))]
- "!TARGET_ZARCH && TARGET_CPU_ZARCH"
+ "!TARGET_ZARCH"
"#"
"&& reload_completed"
[(parallel
operands[7] = operand_subword (operands[1], 1, 0, DImode);
operands[8] = operand_subword (operands[2], 1, 0, DImode);")
-(define_insn_and_split "*adddi3_31"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=&d")
- (plus:DI (match_operand:DI 1 "nonimmediate_operand" "%0")
- (match_operand:DI 2 "general_operand" "do") ) )
- (clobber (reg:CC CC_REGNUM))]
- "!TARGET_CPU_ZARCH"
- "#"
- "&& reload_completed"
- [(parallel
- [(set (match_dup 3) (plus:SI (match_dup 4) (match_dup 5)))
- (clobber (reg:CC CC_REGNUM))])
- (parallel
- [(set (reg:CCL1 CC_REGNUM)
- (compare:CCL1 (plus:SI (match_dup 7) (match_dup 8))
- (match_dup 7)))
- (set (match_dup 6) (plus:SI (match_dup 7) (match_dup 8)))])
- (set (pc)
- (if_then_else (ltu (reg:CCL1 CC_REGNUM) (const_int 0))
- (pc)
- (label_ref (match_dup 9))))
- (parallel
- [(set (match_dup 3) (plus:SI (match_dup 3) (const_int 1)))
- (clobber (reg:CC CC_REGNUM))])
- (match_dup 9)]
- "operands[3] = operand_subword (operands[0], 0, 0, DImode);
- operands[4] = operand_subword (operands[1], 0, 0, DImode);
- operands[5] = operand_subword (operands[2], 0, 0, DImode);
- operands[6] = operand_subword (operands[0], 1, 0, DImode);
- operands[7] = operand_subword (operands[1], 1, 0, DImode);
- operands[8] = operand_subword (operands[2], 1, 0, DImode);
- operands[9] = gen_label_rtx ();")
-
;
; addsi3 instruction pattern(s).
;
(minus:DI (match_operand:DI 1 "register_operand" "0")
(match_operand:DI 2 "general_operand" "do") ) )
(clobber (reg:CC CC_REGNUM))]
- "!TARGET_ZARCH && TARGET_CPU_ZARCH"
+ "!TARGET_ZARCH"
"#"
"&& reload_completed"
[(parallel
operands[7] = operand_subword (operands[1], 1, 0, DImode);
operands[8] = operand_subword (operands[2], 1, 0, DImode);")
-(define_insn_and_split "*subdi3_31"
- [(set (match_operand:DI 0 "register_operand" "=&d")
- (minus:DI (match_operand:DI 1 "register_operand" "0")
- (match_operand:DI 2 "general_operand" "do") ) )
- (clobber (reg:CC CC_REGNUM))]
- "!TARGET_CPU_ZARCH"
- "#"
- "&& reload_completed"
- [(parallel
- [(set (match_dup 3) (minus:SI (match_dup 4) (match_dup 5)))
- (clobber (reg:CC CC_REGNUM))])
- (parallel
- [(set (reg:CCL2 CC_REGNUM)
- (compare:CCL2 (minus:SI (match_dup 7) (match_dup 8))
- (match_dup 7)))
- (set (match_dup 6) (minus:SI (match_dup 7) (match_dup 8)))])
- (set (pc)
- (if_then_else (gtu (reg:CCL2 CC_REGNUM) (const_int 0))
- (pc)
- (label_ref (match_dup 9))))
- (parallel
- [(set (match_dup 3) (plus:SI (match_dup 3) (const_int -1)))
- (clobber (reg:CC CC_REGNUM))])
- (match_dup 9)]
- "operands[3] = operand_subword (operands[0], 0, 0, DImode);
- operands[4] = operand_subword (operands[1], 0, 0, DImode);
- operands[5] = operand_subword (operands[2], 0, 0, DImode);
- operands[6] = operand_subword (operands[0], 1, 0, DImode);
- operands[7] = operand_subword (operands[1], 1, 0, DImode);
- operands[8] = operand_subword (operands[2], 1, 0, DImode);
- operands[9] = gen_label_rtx ();")
-
;
; subsi3 instruction pattern(s).
;
(match_dup 1)))
(set (match_operand:GPR 0 "register_operand" "=d,d")
(plus:GPR (plus:GPR (match_dup 3) (match_dup 1)) (match_dup 2)))]
- "s390_match_ccmode (insn, CCL1mode) && TARGET_CPU_ZARCH"
+ "s390_match_ccmode (insn, CCL1mode)"
"@
alc<g>r\t%0,%2
alc<g>\t%0,%2"
(match_operand:GPR 2 "general_operand" "d,T"))
(match_dup 1)))
(clobber (match_scratch:GPR 0 "=d,d"))]
- "s390_match_ccmode (insn, CCL1mode) && TARGET_CPU_ZARCH"
+ "s390_match_ccmode (insn, CCL1mode)"
"@
alc<g>r\t%0,%2
alc<g>\t%0,%2"
(match_dup 2)))
(set (match_operand:GPR 0 "register_operand" "=d,d")
(plus:GPR (plus:GPR (match_dup 3) (match_dup 1)) (match_dup 2)))]
- "s390_match_ccmode (insn, CCL1mode) && TARGET_CPU_ZARCH"
+ "s390_match_ccmode (insn, CCL1mode)"
"@
alc<g>r\t%0,%2
alc<g>\t%0,%2"
(match_operand:GPR 2 "general_operand" "d,T"))
(match_dup 2)))
(clobber (match_scratch:GPR 0 "=d,d"))]
- "s390_match_ccmode (insn, CCL1mode) && TARGET_CPU_ZARCH"
+ "s390_match_ccmode (insn, CCL1mode)"
"@
alc<g>r\t%0,%2
alc<g>\t%0,%2"
(const_int 0)))
(set (match_operand:GPR 0 "register_operand" "=d,d")
(plus:GPR (plus:GPR (match_dup 3) (match_dup 1)) (match_dup 2)))]
- "s390_match_ccmode (insn, CCLmode) && TARGET_CPU_ZARCH"
+ "s390_match_ccmode (insn, CCLmode)"
"@
alc<g>r\t%0,%2
alc<g>\t%0,%2"
(match_operand:GPR 1 "nonimmediate_operand" "%0,0"))
(match_operand:GPR 2 "general_operand" "d,T")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_CPU_ZARCH"
+ ""
"@
alc<g>r\t%0,%2
alc<g>\t%0,%2"
(const_int 0)))
(set (match_operand:GPR 0 "register_operand" "=d,d")
(minus:GPR (minus:GPR (match_dup 1) (match_dup 2)) (match_dup 3)))]
- "s390_match_ccmode (insn, CCLmode) && TARGET_CPU_ZARCH"
+ "s390_match_ccmode (insn, CCLmode)"
"@
slb<g>r\t%0,%2
slb<g>\t%0,%2"
(match_operand:GPR 2 "general_operand" "d,T"))
(match_operand:GPR 3 "s390_slb_comparison" "")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_CPU_ZARCH"
+ ""
"@
slb<g>r\t%0,%2
slb<g>\t%0,%2"
(match_operand 1 "comparison_operator" "")
(match_operand:GPR 2 "register_operand" "")
(match_operand:GPR 3 "const_int_operand" "")]
- "TARGET_CPU_ZARCH"
+ ""
"if (!s390_expand_addcc (GET_CODE (operands[1]),
XEXP (operands[1], 0), XEXP (operands[1], 1),
operands[0], operands[2],
[(set (match_operand:GPR 0 "register_operand" "=&d")
(match_operand:GPR 1 "s390_alc_comparison" ""))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_CPU_ZARCH"
+ ""
"#"
"&& reload_completed"
[(set (match_dup 0) (const_int 0))
[(set (match_operand:GPR 0 "register_operand" "=&d")
(match_operand:GPR 1 "s390_slb_comparison" ""))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_CPU_ZARCH"
+ ""
"#"
"&& reload_completed"
[(set (match_dup 0) (const_int 0))
(match_operator:SI 1 "s390_scond_operator"
[(match_operand:GPR 2 "register_operand" "")
(match_operand:GPR 3 "general_operand" "")]))]
- "TARGET_CPU_ZARCH"
+ ""
"if (!s390_expand_addcc (GET_CODE (operands[1]), operands[2], operands[3],
operands[0], const0_rtx, const1_rtx)) FAIL; DONE;")
(match_operand:<DWH> 1 "register_operand" "%0,0"))
(zero_extend:DW
(match_operand:<DWH> 2 "nonimmediate_operand" " d,T"))))]
- "TARGET_CPU_ZARCH"
+ ""
"@
ml<tg>r\t%0,%2
ml<tg>\t%0,%2"
(set (match_operand:SI 3 "general_operand" "")
(umod:SI (match_dup 1) (match_dup 2)))])
(clobber (match_dup 4))]
- "!TARGET_ZARCH && TARGET_CPU_ZARCH"
+ "!TARGET_ZARCH"
{
rtx div_equal, mod_equal, equal;
rtx_insn *insn;
(zero_extend:DI
(truncate:SI
(udiv:DI (match_dup 1) (zero_extend:DI (match_dup 2)))))))]
- "!TARGET_ZARCH && TARGET_CPU_ZARCH"
+ "!TARGET_ZARCH"
"@
dlr\t%0,%2
dl\t%0,%2"
[(set_attr "op_type" "RRE,RXY")
(set_attr "type" "idiv")])
-(define_expand "udivsi3"
- [(set (match_operand:SI 0 "register_operand" "=d")
- (udiv:SI (match_operand:SI 1 "general_operand" "")
- (match_operand:SI 2 "general_operand" "")))
- (clobber (match_dup 3))]
- "!TARGET_ZARCH && !TARGET_CPU_ZARCH"
-{
- rtx udiv_equal, umod_equal, equal;
- rtx_insn *insn;
-
- udiv_equal = gen_rtx_UDIV (SImode, operands[1], operands[2]);
- umod_equal = gen_rtx_UMOD (SImode, operands[1], operands[2]);
- equal = gen_rtx_IOR (DImode,
- gen_rtx_ASHIFT (DImode,
- gen_rtx_ZERO_EXTEND (DImode, umod_equal),
- GEN_INT (32)),
- gen_rtx_ZERO_EXTEND (DImode, udiv_equal));
-
- operands[3] = gen_reg_rtx (DImode);
-
- if (CONSTANT_P (operands[2]))
- {
- if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) < 0)
- {
- rtx_code_label *label1 = gen_label_rtx ();
-
- operands[1] = make_safe_from (operands[1], operands[0]);
- emit_move_insn (operands[0], const0_rtx);
- emit_cmp_and_jump_insns (operands[1], operands[2], LT, NULL_RTX,
- SImode, 1, label1);
- emit_move_insn (operands[0], const1_rtx);
- emit_label (label1);
- }
- else
- {
- operands[2] = force_reg (SImode, operands[2]);
- operands[2] = make_safe_from (operands[2], operands[0]);
-
- emit_insn (gen_zero_extendsidi2 (operands[3], operands[1]));
- insn = emit_insn (gen_divmoddisi3 (operands[3], operands[3],
- operands[2]));
- set_unique_reg_note (insn, REG_EQUAL, equal);
-
- insn = emit_move_insn (operands[0],
- gen_lowpart (SImode, operands[3]));
- set_unique_reg_note (insn, REG_EQUAL, udiv_equal);
- }
- }
- else
- {
- rtx_code_label *label1 = gen_label_rtx ();
- rtx_code_label *label2 = gen_label_rtx ();
- rtx_code_label *label3 = gen_label_rtx ();
-
- operands[1] = force_reg (SImode, operands[1]);
- operands[1] = make_safe_from (operands[1], operands[0]);
- operands[2] = force_reg (SImode, operands[2]);
- operands[2] = make_safe_from (operands[2], operands[0]);
-
- emit_move_insn (operands[0], const0_rtx);
- emit_cmp_and_jump_insns (operands[2], operands[1], GT, NULL_RTX,
- SImode, 1, label3);
- emit_cmp_and_jump_insns (operands[2], const0_rtx, LT, NULL_RTX,
- SImode, 0, label2);
- emit_cmp_and_jump_insns (operands[2], const1_rtx, EQ, NULL_RTX,
- SImode, 0, label1);
- emit_insn (gen_zero_extendsidi2 (operands[3], operands[1]));
- insn = emit_insn (gen_divmoddisi3 (operands[3], operands[3],
- operands[2]));
- set_unique_reg_note (insn, REG_EQUAL, equal);
-
- insn = emit_move_insn (operands[0],
- gen_lowpart (SImode, operands[3]));
- set_unique_reg_note (insn, REG_EQUAL, udiv_equal);
-
- emit_jump (label3);
- emit_label (label1);
- emit_move_insn (operands[0], operands[1]);
- emit_jump (label3);
- emit_label (label2);
- emit_move_insn (operands[0], const1_rtx);
- emit_label (label3);
- }
- emit_move_insn (operands[0], operands[0]);
- DONE;
-})
-
-(define_expand "umodsi3"
- [(set (match_operand:SI 0 "register_operand" "=d")
- (umod:SI (match_operand:SI 1 "nonimmediate_operand" "")
- (match_operand:SI 2 "nonimmediate_operand" "")))
- (clobber (match_dup 3))]
- "!TARGET_ZARCH && !TARGET_CPU_ZARCH"
-{
- rtx udiv_equal, umod_equal, equal;
- rtx_insn *insn;
-
- udiv_equal = gen_rtx_UDIV (SImode, operands[1], operands[2]);
- umod_equal = gen_rtx_UMOD (SImode, operands[1], operands[2]);
- equal = gen_rtx_IOR (DImode,
- gen_rtx_ASHIFT (DImode,
- gen_rtx_ZERO_EXTEND (DImode, umod_equal),
- GEN_INT (32)),
- gen_rtx_ZERO_EXTEND (DImode, udiv_equal));
-
- operands[3] = gen_reg_rtx (DImode);
-
- if (CONSTANT_P (operands[2]))
- {
- if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) <= 0)
- {
- rtx_code_label *label1 = gen_label_rtx ();
-
- operands[1] = make_safe_from (operands[1], operands[0]);
- emit_move_insn (operands[0], operands[1]);
- emit_cmp_and_jump_insns (operands[0], operands[2], LT, NULL_RTX,
- SImode, 1, label1);
- emit_insn (gen_abssi2 (operands[0], operands[2]));
- emit_insn (gen_addsi3 (operands[0], operands[0], operands[1]));
- emit_label (label1);
- }
- else
- {
- operands[2] = force_reg (SImode, operands[2]);
- operands[2] = make_safe_from (operands[2], operands[0]);
-
- emit_insn (gen_zero_extendsidi2 (operands[3], operands[1]));
- insn = emit_insn (gen_divmoddisi3 (operands[3], operands[3],
- operands[2]));
- set_unique_reg_note (insn, REG_EQUAL, equal);
-
- insn = emit_move_insn (operands[0],
- gen_highpart (SImode, operands[3]));
- set_unique_reg_note (insn, REG_EQUAL, umod_equal);
- }
- }
- else
- {
- rtx_code_label *label1 = gen_label_rtx ();
- rtx_code_label *label2 = gen_label_rtx ();
- rtx_code_label *label3 = gen_label_rtx ();
-
- operands[1] = force_reg (SImode, operands[1]);
- operands[1] = make_safe_from (operands[1], operands[0]);
- operands[2] = force_reg (SImode, operands[2]);
- operands[2] = make_safe_from (operands[2], operands[0]);
-
- emit_move_insn(operands[0], operands[1]);
- emit_cmp_and_jump_insns (operands[2], operands[1], GT, NULL_RTX,
- SImode, 1, label3);
- emit_cmp_and_jump_insns (operands[2], const0_rtx, LT, NULL_RTX,
- SImode, 0, label2);
- emit_cmp_and_jump_insns (operands[2], const1_rtx, EQ, NULL_RTX,
- SImode, 0, label1);
- emit_insn (gen_zero_extendsidi2 (operands[3], operands[1]));
- insn = emit_insn (gen_divmoddisi3 (operands[3], operands[3],
- operands[2]));
- set_unique_reg_note (insn, REG_EQUAL, equal);
-
- insn = emit_move_insn (operands[0],
- gen_highpart (SImode, operands[3]));
- set_unique_reg_note (insn, REG_EQUAL, umod_equal);
-
- emit_jump (label3);
- emit_label (label1);
- emit_move_insn (operands[0], const0_rtx);
- emit_jump (label3);
- emit_label (label2);
- emit_insn (gen_subsi3 (operands[0], operands[0], operands[2]));
- emit_label (label3);
- }
- DONE;
-})
-
;
; div(df|sf)3 instruction pattern(s).
;
[(set (match_operand:GPR 0 "register_operand" "")
(rotate:GPR (match_operand:GPR 1 "register_operand" "")
(match_operand:SI 2 "nonmemory_operand" "")))]
- "TARGET_CPU_ZARCH"
+ ""
"")
; rll, rllg
[(set (match_operand:GPR 0 "register_operand" "=d")
(rotate:GPR (match_operand:GPR 1 "register_operand" "d")
(match_operand:SI 2 "nonmemory_operand" "an")))]
- "TARGET_CPU_ZARCH"
+ ""
"rll<g>\t%0,%1,<addr_style_op_ops>"
[(set_attr "op_type" "RSE")
(set_attr "atype" "reg")
(match_operand 2 "const_int_operand" "")])
(label_ref (match_operand 0 "" ""))
(pc)))]
- "TARGET_CPU_ZARCH"
+ ""
{
if (get_attr_length (insn) == 4)
return "j%C1\t%l0";
(if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
(const_int 4) (const_int 6)))])
-(define_insn "*cjump_31"
- [(set (pc)
- (if_then_else
- (match_operator 1 "s390_comparison" [(reg CC_REGNUM)
- (match_operand 2 "const_int_operand" "")])
- (label_ref (match_operand 0 "" ""))
- (pc)))]
- "!TARGET_CPU_ZARCH"
-{
- gcc_assert (get_attr_length (insn) == 4);
- return "j%C1\t%l0";
-}
- [(set_attr "op_type" "RI")
- (set_attr "type" "branch")
- (set (attr "length")
- (if_then_else (not (match_test "flag_pic"))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 6))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 8))))])
-
(define_insn "*cjump_long"
[(set (pc)
(if_then_else
(match_operator 1 "s390_comparison" [(reg CC_REGNUM) (const_int 0)])
(pc)
(label_ref (match_operand 0 "" ""))))]
- "TARGET_CPU_ZARCH"
+ ""
{
if (get_attr_length (insn) == 4)
return "j%D1\t%l0";
(if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
(const_int 4) (const_int 6)))])
-(define_insn "*icjump_31"
- [(set (pc)
- (if_then_else
- (match_operator 1 "s390_comparison" [(reg CC_REGNUM) (const_int 0)])
- (pc)
- (label_ref (match_operand 0 "" ""))))]
- "!TARGET_CPU_ZARCH"
-{
- gcc_assert (get_attr_length (insn) == 4);
- return "j%D1\t%l0";
-}
- [(set_attr "op_type" "RI")
- (set_attr "type" "branch")
- (set (attr "length")
- (if_then_else (not (match_test "flag_pic"))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 6))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 8))))])
-
(define_insn "*icjump_long"
[(set (pc)
(if_then_else
(set (match_operand:GPR 4 "nonimmediate_operand" "")
(plus:GPR (match_dup 1) (match_dup 2)))
(clobber (match_scratch:GPR 5 ""))]
- "TARGET_CPU_ZARCH"
+ ""
"#"
"!reload_completed && !reload_in_progress"
[(set (match_dup 7) (match_dup 2)) ; the increment
(subreg:SI (match_dup 2) 0)))
(clobber (match_scratch:SI 4 "=X,&1,&?d"))
(clobber (reg:CC CC_REGNUM))]
- "!TARGET_ZARCH && TARGET_CPU_ZARCH"
+ "!TARGET_ZARCH"
{
if (which_alternative != 0)
return "#";
(use (match_operand 1 "" ""))] ; label
""
{
- if (GET_MODE (operands[0]) == SImode && !TARGET_CPU_ZARCH)
- emit_jump_insn (gen_doloop_si31 (operands[1], operands[0], operands[0]));
- else if (GET_MODE (operands[0]) == SImode && TARGET_CPU_ZARCH)
+ if (GET_MODE (operands[0]) == SImode)
emit_jump_insn (gen_doloop_si64 (operands[1], operands[0], operands[0]));
else if (GET_MODE (operands[0]) == DImode && TARGET_ZARCH)
emit_jump_insn (gen_doloop_di (operands[1], operands[0], operands[0]));
(plus:SI (match_dup 1) (const_int -1)))
(clobber (match_scratch:SI 3 "=X,&1,&?d"))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_CPU_ZARCH"
+ ""
{
if (which_alternative != 0)
return "#";
(if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
(const_int 4) (const_int 10)))])
-(define_insn_and_split "doloop_si31"
- [(set (pc)
- (if_then_else
- (ne (match_operand:SI 1 "register_operand" "d,d,d")
- (const_int 1))
- (label_ref (match_operand 0 "" ""))
- (pc)))
- (set (match_operand:SI 2 "nonimmediate_operand" "=1,?X,?X")
- (plus:SI (match_dup 1) (const_int -1)))
- (clobber (match_scratch:SI 3 "=X,&1,&?d"))
- (clobber (reg:CC CC_REGNUM))]
- "!TARGET_CPU_ZARCH"
-{
- if (which_alternative != 0)
- return "#";
- else if (get_attr_length (insn) == 4)
- return "brct\t%1,%l0";
- else
- gcc_unreachable ();
-}
- "&& reload_completed
- && (! REG_P (operands[2])
- || ! rtx_equal_p (operands[1], operands[2]))"
- [(set (match_dup 3) (match_dup 1))
- (parallel [(set (reg:CCAN CC_REGNUM)
- (compare:CCAN (plus:SI (match_dup 3) (const_int -1))
- (const_int 0)))
- (set (match_dup 3) (plus:SI (match_dup 3) (const_int -1)))])
- (set (match_dup 2) (match_dup 3))
- (set (pc) (if_then_else (ne (reg:CCAN CC_REGNUM) (const_int 0))
- (label_ref (match_dup 0))
- (pc)))]
- ""
- [(set_attr "op_type" "RI")
- ; Strictly speaking, the z10 properties are valid for brct only, however, it does not
- ; hurt us in the (rare) case of ahi.
- (set_attr "z10prop" "z10_super_E1")
- (set_attr "type" "branch")
- (set (attr "length")
- (if_then_else (not (match_test "flag_pic"))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 6))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 8))))])
-
-(define_insn "*doloop_si_long"
- [(set (pc)
- (if_then_else
- (ne (match_operand:SI 1 "register_operand" "d")
- (const_int 1))
- (match_operand 0 "address_operand" "ZR")
- (pc)))
- (set (match_operand:SI 2 "register_operand" "=1")
- (plus:SI (match_dup 1) (const_int -1)))
- (clobber (match_scratch:SI 3 "=X"))
- (clobber (reg:CC CC_REGNUM))]
- "!TARGET_CPU_ZARCH"
-{
- if (get_attr_op_type (insn) == OP_TYPE_RR)
- return "bctr\t%1,%0";
- else
- return "bct\t%1,%a0";
-}
- [(set (attr "op_type")
- (if_then_else (match_operand 0 "register_operand" "")
- (const_string "RR") (const_string "RX")))
- (set_attr "type" "branch")
- (set_attr "atype" "agen")
- (set_attr "z10prop" "z10_c")
- (set_attr "z196prop" "z196_cracked")])
-
(define_insn_and_split "doloop_di"
[(set (pc)
(if_then_else
(define_insn "*jump64"
[(set (pc) (label_ref (match_operand 0 "" "")))]
- "TARGET_CPU_ZARCH"
+ ""
{
if (get_attr_length (insn) == 4)
return "j\t%l0";
(if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
(const_int 4) (const_int 6)))])
-(define_insn "*jump31"
- [(set (pc) (label_ref (match_operand 0 "" "")))]
- "!TARGET_CPU_ZARCH"
-{
- gcc_assert (get_attr_length (insn) == 4);
- return "j\t%l0";
-}
- [(set_attr "op_type" "RI")
- (set_attr "type" "branch")
- (set (attr "length")
- (if_then_else (not (match_test "flag_pic"))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 6))
- (if_then_else (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
- (const_int 4) (const_int 8))))])
-
;
; indirect-jump instruction pattern(s).
;
(define_insn "*sibcall_brcl"
[(call (mem:QI (match_operand 0 "bras_sym_operand" "X"))
(match_operand 1 "const_int_operand" "n"))]
- "SIBLING_CALL_P (insn) && TARGET_CPU_ZARCH"
+ "SIBLING_CALL_P (insn)"
"jg\t%0"
[(set_attr "op_type" "RIL")
(set_attr "type" "branch")])
[(set (match_operand 0 "" "")
(call (mem:QI (match_operand 1 "bras_sym_operand" "X"))
(match_operand 2 "const_int_operand" "n")))]
- "SIBLING_CALL_P (insn) && TARGET_CPU_ZARCH"
+ "SIBLING_CALL_P (insn)"
"jg\t%1"
[(set_attr "op_type" "RIL")
(set_attr "type" "branch")])
(match_operand 1 "const_int_operand" "n"))
(clobber (match_operand 2 "register_operand" "=r"))]
"!SIBLING_CALL_P (insn)
- && TARGET_CPU_ZARCH
+
&& GET_MODE (operands[2]) == Pmode"
"brasl\t%2,%0"
[(set_attr "op_type" "RIL")
(match_operand 2 "const_int_operand" "n")))
(clobber (match_operand 3 "register_operand" "=r"))]
"!SIBLING_CALL_P (insn)
- && TARGET_CPU_ZARCH
+
&& GET_MODE (operands[3]) == Pmode"
"brasl\t%3,%1"
[(set_attr "op_type" "RIL")
(clobber (match_operand 3 "register_operand" "=r"))
(use (match_operand 4 "" ""))]
"!SIBLING_CALL_P (insn)
- && TARGET_CPU_ZARCH
+
&& GET_MODE (operands[3]) == Pmode"
"brasl\t%3,%1%J4"
[(set_attr "op_type" "RIL")
(define_insn "nop_6_byte"
[(unspec_volatile [(const_int 0)] UNSPECV_NOP_6_BYTE)]
- "TARGET_CPU_ZARCH"
+ ""
"brcl\t0, 0"
[(set_attr "op_type" "RIL")])
}
[(set_attr "length" "0")])
-(define_insn "main_base_31_small"
- [(set (match_operand 0 "register_operand" "=a")
- (unspec [(label_ref (match_operand 1 "" ""))] UNSPEC_MAIN_BASE))]
- "!TARGET_CPU_ZARCH && GET_MODE (operands[0]) == Pmode"
- "basr\t%0,0"
- [(set_attr "op_type" "RR")
- (set_attr "type" "la")
- (set_attr "z196prop" "z196_cracked")])
-
-(define_insn "main_base_31_large"
- [(set (match_operand 0 "register_operand" "=a")
- (unspec [(label_ref (match_operand 1 "" ""))] UNSPEC_MAIN_BASE))
- (set (pc) (label_ref (match_operand 2 "" "")))]
- "!TARGET_CPU_ZARCH && GET_MODE (operands[0]) == Pmode"
- "bras\t%0,%2"
- [(set_attr "op_type" "RI")
- (set_attr "z196prop" "z196_cracked")])
-
(define_insn "main_base_64"
[(set (match_operand 0 "register_operand" "=a")
(unspec [(label_ref (match_operand 1 "" ""))] UNSPEC_MAIN_BASE))]
- "TARGET_CPU_ZARCH && GET_MODE (operands[0]) == Pmode"
+ "GET_MODE (operands[0]) == Pmode"
"larl\t%0,%1"
[(set_attr "op_type" "RIL")
(set_attr "type" "larl")
gcc_unreachable ();
}
[(set (attr "type")
- (if_then_else (match_test "TARGET_CPU_ZARCH")
- (const_string "larl") (const_string "la")))])
-
-(define_insn "reload_base_31"
- [(set (match_operand 0 "register_operand" "=a")
- (unspec [(label_ref (match_operand 1 "" ""))] UNSPEC_RELOAD_BASE))]
- "!TARGET_CPU_ZARCH && GET_MODE (operands[0]) == Pmode"
- "basr\t%0,0\;la\t%0,%1-.(%0)"
- [(set_attr "length" "6")
- (set_attr "type" "la")
- (set_attr "z196prop" "z196_cracked")])
+ (const_string "larl"))])
(define_insn "reload_base_64"
[(set (match_operand 0 "register_operand" "=a")
(unspec [(label_ref (match_operand 1 "" ""))] UNSPEC_RELOAD_BASE))]
- "TARGET_CPU_ZARCH && GET_MODE (operands[0]) == Pmode"
+ "GET_MODE (operands[0]) == Pmode"
"larl\t%0,%1"
[(set_attr "op_type" "RIL")
(set_attr "type" "larl")
(define_insn "bswap<mode>2"
[(set (match_operand:GPR 0 "nonimmediate_operand" "=d,d,T")
(bswap:GPR (match_operand:GPR 1 "nonimmediate_operand" " d,T,d")))]
- "TARGET_CPU_ZARCH"
+ ""
"@
lrv<g>r\t%0,%1
lrv<g>\t%0,%1
(define_insn "bswaphi2"
[(set (match_operand:HI 0 "nonimmediate_operand" "=d,d,T")
(bswap:HI (match_operand:HI 1 "nonimmediate_operand" " d,T,d")))]
- "TARGET_CPU_ZARCH"
+ ""
"@
#
lrvh\t%0,%1
(define_split
[(set (match_operand:HI 0 "register_operand" "")
(bswap:HI (match_operand:HI 1 "register_operand" "")))]
- "TARGET_CPU_ZARCH"
+ ""
[(set (match_dup 2) (bswap:SI (match_dup 3)))
(set (match_dup 2) (lshiftrt:SI (match_dup 2) (const_int 16)))]
{
(match_operand 2 "const_int_operand" "X")
(match_operand 3 "const_int_operand" "X")]
UNSPECV_SPLIT_STACK_DATA)]
- "TARGET_CPU_ZARCH"
+ ""
{
switch_to_section (targetm.asm_out.function_rodata_section
(current_function_decl));
(define_expand "split_stack_call"
[(match_operand 0 "bras_sym_operand" "X")
(match_operand 1 "" "")]
- "TARGET_CPU_ZARCH"
+ ""
{
if (TARGET_64BIT)
emit_jump_insn (gen_split_stack_call_di (operands[0], operands[1]));
(set (reg:P 1) (unspec_volatile [(match_operand 0 "bras_sym_operand" "X")
(reg:P 1)]
UNSPECV_SPLIT_STACK_CALL))]
- "TARGET_CPU_ZARCH"
+ ""
"jg\t%0"
[(set_attr "op_type" "RIL")
(set_attr "type" "branch")])
[(match_operand 0 "bras_sym_operand" "X")
(match_operand 1 "" "")
(match_operand 2 "" "")]
- "TARGET_CPU_ZARCH"
+ ""
{
if (TARGET_64BIT)
emit_jump_insn (gen_split_stack_cond_call_di (operands[0], operands[1], operands[2]));
(set (reg:P 1) (unspec_volatile [(match_operand 0 "bras_sym_operand" "X")
(reg:P 1)]
UNSPECV_SPLIT_STACK_CALL))]
- "TARGET_CPU_ZARCH"
+ ""
"jg%C1\t%0"
[(set_attr "op_type" "RIL")
(set_attr "type" "branch")])