--- /dev/null
+/* Subroutines used for code generation on IBM S/390 and zSeries
+ Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
+ Contributed by Hartmut Penner (hpenner@de.ibm.com) and
+ Ulrich Weigand (weigand@de.ibm.com).
+
+This file is part of GNU CC.
+
+GNU CC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2, or (at your option)
+any later version.
+
+GNU CC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GNU CC; see the file COPYING. If not, write to
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
+
+#include "config.h"
+#include <setjmp.h>
+#include "system.h"
+#include "rtl.h"
+#include "tree.h"
+#include "tm_p.h"
+#include "regs.h"
+#include "hard-reg-set.h"
+#include "real.h"
+#include "insn-config.h"
+#include "conditions.h"
+#include "output.h"
+#include "insn-attr.h"
+#include "flags.h"
+#include "except.h"
+#include "function.h"
+#include "recog.h"
+#include "expr.h"
+#include "toplev.h"
+#include "basic-block.h"
+#include "ggc.h"
+#include "target.h"
+#include "target-def.h"
+
+
+
+
+#undef TARGET_ASM_FUNCTION_PROLOGUE
+#define TARGET_ASM_FUNCTION_PROLOGUE s390_function_prologue
+
+#undef TARGET_ASM_FUNCTION_EPILOGUE
+#define TARGET_ASM_FUNCTION_EPILOGUE s390_function_epilogue
+
+struct gcc_target targetm = TARGET_INITIALIZER;
+
+extern int reload_completed;
+
+/* Function count for creating unique internal labels in a compile unit. */
+int s390_function_count = 0;
+
+/* Save information from a "cmpxx" operation until the branch or scc is
+ emitted. */
+rtx s390_compare_op0, s390_compare_op1;
+
+
+struct s390_address
+{
+ rtx base;
+ rtx indx;
+ rtx disp;
+};
+
+static int s390_match_ccmode_set
+ PARAMS ((rtx set, int req_mode));
+static int base_n_index_p
+ PARAMS ((rtx op));
+static int check_mode
+ PARAMS ((register rtx op, enum machine_mode *mode));
+static int s390_decompose_address
+ PARAMS ((register rtx addr, struct s390_address *out, int strict));
+static int check_mode
+ PARAMS ((register rtx op, enum machine_mode *mode));
+
+/* Return TRUE or FALSE depending on whether every SET in INSN that
+ set the CC register has source and destination with matching CC modes,
+ and that the CC mode is at least as constrained as REQ_MODE. */
+
+static int
+s390_match_ccmode_set (set, req_mode)
+ rtx set;
+ int req_mode;
+{
+ unsigned int set_mode;
+
+ if (GET_CODE (set) != SET)
+ abort();
+
+ if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
+ return 1;
+
+ set_mode = GET_MODE (SET_DEST (set));
+ switch (set_mode)
+ {
+ case CCmode:
+ return 0;
+
+ case CCSmode:
+ if (req_mode != CCSmode)
+ return 0;
+ break;
+ case CCUmode:
+ if (req_mode != CCUmode)
+ return 0;
+ break;
+ case CCZmode:
+ if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode)
+ return 0;
+ break;
+
+ default:
+ abort ();
+ }
+
+ return (GET_MODE (SET_SRC (set)) == set_mode);
+}
+
+int
+s390_match_ccmode (insn, req_mode)
+ rtx insn;
+ int req_mode;
+{
+ int i;
+
+ if (GET_CODE (PATTERN (insn)) == SET)
+ return s390_match_ccmode_set (PATTERN (insn), req_mode);
+
+ if (GET_CODE (PATTERN (insn)) == PARALLEL)
+ for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
+ {
+ rtx set = XVECEXP (PATTERN (insn), 0, i);
+ if (GET_CODE (set) == SET)
+ if (!s390_match_ccmode_set (set, req_mode))
+ return 0;
+ }
+
+ return 1;
+}
+
+
+void
+optimization_options (level, size)
+ int level;
+ int size ATTRIBUTE_UNUSED;
+{
+#ifdef HAVE_decrement_and_branch_on_count
+ /* When optimizing, enable use of BRCT instruction. */
+ if (level >= 1)
+ flag_branch_on_count_reg = 1;
+#endif
+}
+
+
+/* Map for smallest class containing reg regno. */
+
+enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
+{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
+ ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
+ ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
+ ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
+ FP_REGS, FP_REGS, FP_REGS, FP_REGS,
+ FP_REGS, FP_REGS, FP_REGS, FP_REGS,
+ FP_REGS, FP_REGS, FP_REGS, FP_REGS,
+ FP_REGS, FP_REGS, FP_REGS, FP_REGS,
+ ADDR_REGS, CC_REGS
+};
+
+
+/* Match exactly zero. */
+
+int
+const0_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ return op == CONST0_RTX (mode);
+}
+
+/* Match exactly one. */
+
+int
+const1_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ return op == CONST1_RTX (mode);
+}
+
+
+/* Return 1 if OP needs base and index register. */
+
+static int
+base_n_index_p (rtx op)
+{
+ if ((GET_CODE (op) == PLUS) &&
+ (GET_CODE (XEXP (op, 0)) == PLUS ||
+ GET_CODE (XEXP (op, 1)) == PLUS ||
+ GET_CODE (XEXP (op, 1)) == REG ))
+ return 1;
+ return 0;
+}
+
+/* Check mode and mode of op, set it to mode of op, if VOIDmode. */
+
+static int
+check_mode (op, mode)
+ register rtx op;
+ enum machine_mode *mode;
+{
+ if (*mode == VOIDmode)
+ *mode = GET_MODE (op);
+ else
+ {
+ if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
+ return 0;
+ }
+ return 1;
+}
+
+
+/* Return 1 if OP a valid operand for the LARL instruction.
+ OP is the current operation.
+ MODE is the current operation mode. */
+
+int
+larl_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ rtx sym;
+ register enum rtx_code code = GET_CODE (op);
+
+ if (! check_mode (op, &mode))
+ return 0;
+
+ /* Allow labels and local symbols. */
+ if (GET_CODE (op) == LABEL_REF)
+ return 1;
+ if (GET_CODE (op) == SYMBOL_REF
+ && (!flag_pic || SYMBOL_REF_FLAG (op)
+ || CONSTANT_POOL_ADDRESS_P (op)))
+ return 1;
+
+ /* Everything else must have a CONST, so strip it. */
+ if (GET_CODE (op) != CONST)
+ return 0;
+ op = XEXP (op, 0);
+
+ /* Allow adding *even* constants. */
+ if (GET_CODE (op) == PLUS)
+ {
+ if (GET_CODE (XEXP (op, 1)) != CONST_INT
+ || (INTVAL (XEXP (op, 1)) & 1) != 0)
+ return 0;
+ op = XEXP (op, 0);
+ }
+
+ /* Labels and local symbols allowed here as well. */
+ if (GET_CODE (op) == LABEL_REF)
+ return 1;
+ if (GET_CODE (op) == SYMBOL_REF
+ && (!flag_pic || SYMBOL_REF_FLAG (op)
+ || CONSTANT_POOL_ADDRESS_P (op)))
+ return 1;
+
+ /* Now we must have a @GOTENT offset or @PLT stub. */
+ if (GET_CODE (op) == UNSPEC
+ && XINT (op, 1) == 111)
+ return 1;
+ if (GET_CODE (op) == UNSPEC
+ && XINT (op, 1) == 113)
+ return 1;
+
+ return 0;
+}
+
+/* Return 1 if OP is a valid FP-Register.
+ OP is the current operation.
+ MODE is the current operation mode. */
+
+int
+fp_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ register enum rtx_code code = GET_CODE (op);
+ if (! check_mode (op, &mode))
+ return 0;
+ if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
+ return 1;
+ else
+ return 0;
+}
+
+/* Return 1 if OP is a valid S operand for an RS, SI or SS type instruction. */
+
+int
+s_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ register enum rtx_code code = GET_CODE (op);
+
+ if (! check_mode (op,&mode))
+ return 0;
+
+ if (code == MEM) {
+ if (base_n_index_p (XEXP (op, 0)))
+ return 0;
+ }
+
+ return memory_operand (op, mode);
+}
+
+/* Return 1 if OP is a valid R or S operand for an RS, SI or SS type
+ instruction. */
+
+int
+r_or_s_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ register enum rtx_code code = GET_CODE (op);
+
+ if (!general_operand (op, mode))
+ return 0;
+
+ if (code == MEM) {
+ if (base_n_index_p (XEXP (op, 0)))
+ return 0;
+ else
+ return memory_operand (op, mode);
+ }
+ return register_operand (op, mode);
+}
+
+/* Return 1 if OP is a valid R or S or immediate operand for
+ RS, SI or SS type instruction. */
+
+int
+r_or_s_or_im8_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ register enum rtx_code code = GET_CODE (op);
+
+ if (!general_operand (op, mode))
+ return 0;
+
+ if (code == MEM) {
+ if (base_n_index_p (XEXP (op, 0)))
+ return 0;
+ else
+ return memory_operand (op, mode);
+ }
+ return register_operand (op, mode) || immediate_operand (op, mode);
+}
+
+/* Return 1 if OP is a valid R or X or 16 bit immediate operand for
+ RX, RR or RI type instruction. */
+
+int
+r_or_x_or_im16_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+
+ if (! general_operand (op, mode))
+ return 0;
+
+ if (GET_CODE (op) == CONST_INT)
+ return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
+ return register_operand (op, mode) || memory_operand (op, mode);
+}
+
+/* Return 1 if OP is a valid R or 8 bit immediate operand for
+ !!!!!!! type instruction. */
+
+int
+r_or_im8_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+
+ if (!general_operand (op, mode))
+ return 0;
+
+ if (GET_CODE (op) == CONST_INT)
+ return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'));
+ return register_operand (op, mode) || memory_operand (op, mode);
+}
+
+/* Return 1 if OP is a valid operand for the 'test under mask'
+ instruction with 16 bit immediate.
+ The value should only have set bits in one halfword. */
+
+int
+tmxx_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ rtx con;
+ if (GET_CODE (op) == CONST_INT)
+ return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
+ if (GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) == SYMBOL_REF &&
+ CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
+ {
+ con = get_pool_constant (XEXP (op, 0));
+
+ if (GET_CODE (con) == CONST_INT)
+ {
+ unsigned HOST_WIDEST_INT c;
+
+ c = (unsigned HOST_WIDEST_INT) INTVAL (con);
+
+ return ((c & 0xffff) ? ((c & 0xffffffffffff0000ULL)==0) :
+ (c & 0xffff0000) ? ((c & 0xffffffff0000ffffULL)==0) :
+ (c & 0xffff00000000ULL) ? ((c & 0xffff0000ffffffffULL)==0) :
+ (c & 0xffff000000000000ULL) ? ((c & 0xffffffffffffULL)==0) : 1);
+
+ }
+ }
+ return 0;
+}
+
+
+/* Return 1 if valid operand for BRAS
+ OP is the current operation.
+ MODE is the current operation mode. */
+
+int
+bras_sym_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ register enum rtx_code code = GET_CODE (op);
+
+ /* Allow SYMBOL_REFs. */
+ if (code == SYMBOL_REF)
+ return 1;
+
+ /* Allow @PLT stubs. */
+ if (code == CONST
+ && GET_CODE (XEXP (op, 0)) == UNSPEC
+ && XINT (XEXP (op, 0), 1) == 113)
+ return 1;
+ return 0;
+}
+
+\f
+/* Return 1 if OP is a load multiple operation. It is known to be a
+ PARALLEL and the first section will be tested. */
+
+int
+load_multiple_operation (op, mode)
+ rtx op;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+{
+ int count = XVECLEN (op, 0);
+ unsigned int dest_regno;
+ rtx src_addr;
+ int i;
+
+
+ /* Perform a quick check so we don't blow up below. */
+ if (count <= 1
+ || GET_CODE (XVECEXP (op, 0, 0)) != SET
+ || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
+ || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
+ return 0;
+
+ dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
+ src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
+
+ for (i = 1; i < count; i++)
+ {
+ rtx elt = XVECEXP (op, 0, i);
+
+ if (GET_CODE (elt) != SET
+ || GET_CODE (SET_DEST (elt)) != REG
+ || GET_MODE (SET_DEST (elt)) != Pmode
+ || REGNO (SET_DEST (elt)) != dest_regno + i
+ || GET_CODE (SET_SRC (elt)) != MEM
+ || GET_MODE (SET_SRC (elt)) != Pmode
+ || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
+ || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
+ || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
+ || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
+ return 0;
+ }
+
+ return 1;
+}
+
+/* Similar, but tests for store multiple. */
+
+int
+store_multiple_operation (op, mode)
+ rtx op;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+{
+ int count = XVECLEN (op, 0) - 1;
+ unsigned int src_regno;
+ rtx dest_addr;
+ int i;
+
+ /* Perform a quick check so we don't blow up below. */
+ if (count <= 1
+ || GET_CODE (XVECEXP (op, 0, 0)) != SET
+ || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
+ || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
+ return 0;
+
+ src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
+ dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
+
+ for (i = 1; i < count; i++)
+ {
+ rtx elt = XVECEXP (op, 0, i);
+
+ if (GET_CODE (elt) != SET
+ || GET_CODE (SET_SRC (elt)) != REG
+ || GET_MODE (SET_SRC (elt)) != Pmode
+ || REGNO (SET_SRC (elt)) != src_regno + i
+ || GET_CODE (SET_DEST (elt)) != MEM
+ || GET_MODE (SET_DEST (elt)) != Pmode
+ || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
+ || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
+ || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
+ || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
+ return 0;
+ }
+ return 1;
+}
+
+
+/* Returns 1 if OP contains a symbol reference */
+
+int
+symbolic_reference_mentioned_p (op)
+ rtx op;
+{
+ register char *fmt;
+ register int i;
+
+ if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
+ return 1;
+
+ fmt = GET_RTX_FORMAT (GET_CODE (op));
+ for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
+ {
+ if (fmt[i] == 'E')
+ {
+ register int j;
+
+ for (j = XVECLEN (op, i) - 1; j >= 0; j--)
+ if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
+ return 1;
+ }
+
+ else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
+ return 1;
+ }
+
+ return 0;
+}
+
+
+int
+legitimate_pic_operand_p (op)
+ register rtx op;
+{
+ /* All non-symbolic constants that made it
+ up to here are fine. */
+ if (!SYMBOLIC_CONST (op))
+ return 1;
+
+ /* Accept immediate LARL operands. */
+ if (TARGET_64BIT)
+ return larl_operand (op, VOIDmode);
+
+ /* Reject everything else; must be handled
+ via emit_pic_move. */
+ return 0;
+}
+
+int
+legitimate_constant_p (op)
+ register rtx op;
+{
+ /* Reject doubles and integers out of range. */
+ if (GET_CODE (op) == CONST_DOUBLE ||
+ (GET_CODE (op) == CONST_INT &&
+ (INTVAL (op) < -32768 || INTVAL (op) > 32767)))
+ return 0;
+
+ /* Accept all other non-symbolic constants. */
+ if (!SYMBOLIC_CONST (op))
+ return 1;
+
+ /* In the PIC case, symbolic constants must *not* be
+ forced into the literal pool. We accept them here,
+ so that they will be handled by emit_pic_move. */
+ if (flag_pic)
+ return 1;
+
+ /* Even in the non-PIC case, we can accept immediate
+ LARL operands here. */
+ if (TARGET_64BIT)
+ return larl_operand (op, VOIDmode);
+
+ /* All remaining non-PIC symbolic constants are
+ forced into the literal pool. */
+ return 0;
+}
+
+
+/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
+ that is a valid memory address for an instruction.
+ The MODE argument is the machine mode for the MEM expression
+ that wants to use this address.
+
+ On S/390, legitimate addresses are:
+ base l reg,(base)
+ displacement l reg,disp
+ base + displacement l reg,disp(base)
+ index + base l reg,(base,index),reg
+ (index + base) + displacement l reg,disp(base,index)
+
+ It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
+ convert common non-canonical forms to canonical form so that they will
+ be recognized. */
+
+
+static int
+s390_decompose_address (addr, out, strict)
+ register rtx addr;
+ struct s390_address *out;
+ int strict;
+{
+ rtx base = NULL_RTX;
+ rtx indx = NULL_RTX;
+ rtx disp = NULL_RTX;
+
+ /* Decompose address into base + index + displacement. */
+
+ if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
+ base = addr;
+
+ else if (GET_CODE (addr) == PLUS)
+ {
+ rtx op0 = XEXP (addr, 0);
+ rtx op1 = XEXP (addr, 1);
+ enum rtx_code code0 = GET_CODE (op0);
+ enum rtx_code code1 = GET_CODE (op1);
+
+ if (code0 == REG || code0 == UNSPEC)
+ {
+ if (code1 == REG || code1 == UNSPEC)
+ {
+ indx = op0; /* index + base */
+ base = op1;
+ }
+
+ else
+ {
+ base = op0; /* base + displacement */
+ disp = op1;
+ }
+ }
+
+ else if (code0 == PLUS)
+ {
+ indx = XEXP (op0, 0); /* index + base + disp */
+ base = XEXP (op0, 1);
+ disp = op1;
+ }
+
+ else
+ {
+ return FALSE;
+ }
+ }
+
+ else
+ disp = addr; /* displacement */
+
+
+ /* Validate base register. */
+ if (base)
+ {
+ if (GET_CODE (base) == UNSPEC)
+ {
+ if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
+ return FALSE;
+ base = XVECEXP (base, 0, 0);
+ }
+
+ if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
+ return FALSE;
+
+ if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
+ || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
+ return FALSE;
+ }
+
+ /* Validate index register. */
+ if (indx)
+ {
+ if (GET_CODE (indx) == UNSPEC)
+ {
+ if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
+ return FALSE;
+ indx = XVECEXP (indx, 0, 0);
+ }
+
+ if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
+ return FALSE;
+
+ if ((strict && ! REG_OK_FOR_BASE_STRICT_P (indx))
+ || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (indx)))
+ return FALSE;
+ }
+
+ /* Validate displacement. */
+ if (disp)
+ {
+ /* Allow integer constant in range. */
+ if (GET_CODE (disp) == CONST_INT)
+ {
+ if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
+ return FALSE;
+ }
+
+ /* In the small-PIC case, the linker converts @GOT12
+ offsets to possible displacements. */
+ else if (GET_CODE (disp) == CONST
+ && GET_CODE (XEXP (disp, 0)) == UNSPEC
+ && XINT (XEXP (disp, 0), 1) == 110)
+ {
+ if (flag_pic != 1)
+ return FALSE;
+ }
+
+ /* We can convert literal pool addresses to
+ displacements by basing them off the base register. */
+ else
+ {
+ /* In some cases, we can accept an additional
+ small constant offset. Split these off here. */
+
+ int offset = 0;
+
+ if (GET_CODE (disp) == CONST
+ && GET_CODE (XEXP (disp, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
+ {
+ offset = INTVAL (XEXP (XEXP (disp, 0), 1));
+ disp = XEXP (XEXP (disp, 0), 0);
+ }
+
+ /* Now we must have a literal pool address. */
+ if (GET_CODE (disp) != SYMBOL_REF
+ || !CONSTANT_POOL_ADDRESS_P (disp))
+ return FALSE;
+
+ /* In 64-bit PIC mode we cannot accept symbolic
+ constants in the constant pool. */
+ if (TARGET_64BIT && flag_pic
+ && SYMBOLIC_CONST (get_pool_constant (disp)))
+ return FALSE;
+
+ /* If we have an offset, make sure it does not
+ exceed the size of the constant pool entry. */
+ if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
+ return FALSE;
+
+ /* Either base or index must be free to
+ hold the base register. */
+ if (base && indx)
+ return FALSE;
+
+ /* Convert the address. */
+ if (base)
+ indx = gen_rtx_REG (Pmode, BASE_REGISTER);
+ else
+ base = gen_rtx_REG (Pmode, BASE_REGISTER);
+
+ disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
+ disp = gen_rtx_CONST (Pmode, disp);
+
+ if (offset)
+ disp = plus_constant (disp, offset);
+ }
+ }
+
+ if (out)
+ {
+ out->base = base;
+ out->indx = indx;
+ out->disp = disp;
+ }
+
+ return TRUE;
+}
+
+int
+legitimate_address_p (mode, addr, strict)
+ enum machine_mode mode;
+ register rtx addr;
+ int strict;
+{
+ return s390_decompose_address (addr, NULL, strict);
+}
+
+/* Return a legitimate reference for ORIG (an address) using the
+ register REG. If REG is 0, a new pseudo is generated.
+
+ There are two types of references that must be handled:
+
+ 1. Global data references must load the address from the GOT, via
+ the PIC reg. An insn is emitted to do this load, and the reg is
+ returned.
+
+ 2. Static data references, constant pool addresses, and code labels
+ compute the address as an offset from the GOT, whose base is in
+ the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
+ differentiate them from global data objects. The returned
+ address is the PIC reg + an unspec constant.
+
+ GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
+ reg also appears in the address. */
+
+rtx
+legitimize_pic_address (orig, reg)
+ rtx orig;
+ rtx reg;
+{
+ rtx addr = orig;
+ rtx new = orig;
+ rtx base;
+
+ if (GET_CODE (addr) == LABEL_REF
+ || (GET_CODE (addr) == SYMBOL_REF
+ && (SYMBOL_REF_FLAG (addr)
+ || CONSTANT_POOL_ADDRESS_P (addr))))
+ {
+ /* This is a local symbol. */
+ if (TARGET_64BIT)
+ {
+ /* Access local symbols PC-relative via LARL.
+ This is the same as in the non-PIC case, so it is
+ handled automatically ... */
+ }
+ else
+ {
+ /* Access local symbols relative to the literal pool. */
+
+ rtx temp = reg? reg : gen_reg_rtx (Pmode);
+
+ addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
+ addr = gen_rtx_CONST (SImode, addr);
+ addr = force_const_mem (SImode, addr);
+ emit_move_insn (temp, addr);
+
+ base = gen_rtx_REG (Pmode, BASE_REGISTER);
+ base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
+ new = gen_rtx_PLUS (Pmode, base, temp);
+
+ if (reg != 0)
+ {
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ }
+ }
+ else if (GET_CODE (addr) == SYMBOL_REF)
+ {
+ if (reg == 0)
+ reg = gen_reg_rtx (Pmode);
+
+ if (flag_pic == 1)
+ {
+ /* Assume GOT offset < 4k. This is handled the same way
+ in both 31- and 64-bit code (@GOT12). */
+
+ current_function_uses_pic_offset_table = 1;
+
+ new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
+ new = gen_rtx_CONST (Pmode, new);
+ new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
+ new = gen_rtx_MEM (Pmode, new);
+ RTX_UNCHANGING_P (new) = 1;
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ else if (TARGET_64BIT)
+ {
+ /* If the GOT offset might be >= 4k, we determine the position
+ of the GOT entry via a PC-relative LARL (@GOTENT). */
+
+ rtx temp = gen_reg_rtx (Pmode);
+
+ new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
+ new = gen_rtx_CONST (Pmode, new);
+ emit_move_insn (temp, new);
+
+ new = gen_rtx_MEM (Pmode, temp);
+ RTX_UNCHANGING_P (new) = 1;
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ else
+ {
+ /* If the GOT offset might be >= 4k, we have to load it
+ from the literal pool (@GOT). */
+
+ rtx temp = gen_reg_rtx (Pmode);
+
+ current_function_uses_pic_offset_table = 1;
+
+ addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
+ addr = gen_rtx_CONST (SImode, addr);
+ addr = force_const_mem (SImode, addr);
+ emit_move_insn (temp, addr);
+
+ new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
+ new = gen_rtx_MEM (Pmode, new);
+ RTX_UNCHANGING_P (new) = 1;
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ }
+ else
+ {
+ if (GET_CODE (addr) == CONST)
+ {
+ addr = XEXP (addr, 0);
+ if (GET_CODE (addr) == UNSPEC)
+ {
+ if (XVECLEN (addr, 0) != 1)
+ abort();
+ switch (XINT (addr, 1))
+ {
+ /* If someone moved an @GOT or lt-relative UNSPEC
+ out of the literal pool, force them back in. */
+ case 100:
+ case 112:
+ case 114:
+ new = force_const_mem (SImode, orig);
+ if (reg != 0)
+ {
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ break;
+
+ /* @GOTENT is OK as is. */
+ case 111:
+ break;
+
+ /* @PLT is OK as is on 64-bit, must be converted to
+ lt-relative PLT on 31-bit. */
+ case 113:
+ if (!TARGET_64BIT)
+ {
+ rtx temp = reg? reg : gen_reg_rtx (Pmode);
+
+ addr = XVECEXP (addr, 0, 0);
+ addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
+ addr = gen_rtx_CONST (SImode, addr);
+ addr = force_const_mem (SImode, addr);
+ emit_move_insn (temp, addr);
+
+ base = gen_rtx_REG (Pmode, BASE_REGISTER);
+ base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
+ new = gen_rtx_PLUS (Pmode, base, temp);
+
+ if (reg != 0)
+ {
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ }
+ break;
+
+ /* Everything else cannot happen. */
+ default:
+ abort ();
+ }
+ }
+ else if (GET_CODE (addr) != PLUS)
+ abort();
+ }
+ if (GET_CODE (addr) == PLUS)
+ {
+ rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
+ /* Check first to see if this is a constant offset
+ from a local symbol reference. */
+ if ((GET_CODE (op0) == LABEL_REF
+ || (GET_CODE (op0) == SYMBOL_REF
+ && (SYMBOL_REF_FLAG (op0)
+ || CONSTANT_POOL_ADDRESS_P (op0))))
+ && GET_CODE (op1) == CONST_INT)
+ {
+ if (TARGET_64BIT)
+ {
+ if (INTVAL (op1) & 1)
+ {
+ /* LARL can't handle odd offsets, so emit a
+ pair of LARL and LA. */
+ rtx temp = reg? reg : gen_reg_rtx (Pmode);
+
+ if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
+ {
+ int even = INTVAL (op1) - 1;
+ op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
+ op1 = GEN_INT (1);
+ }
+
+ emit_move_insn (temp, op0);
+ new = gen_rtx_PLUS (Pmode, temp, op1);
+
+ if (reg != 0)
+ {
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ }
+ else
+ {
+ /* If the offset is even, we can just use LARL.
+ This will happen automatically. */
+ }
+ }
+ else
+ {
+ /* Access local symbols relative to the literal pool. */
+
+ rtx temp = reg? reg : gen_reg_rtx (Pmode);
+
+ addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
+ addr = gen_rtx_PLUS (SImode, addr, op1);
+ addr = gen_rtx_CONST (SImode, addr);
+ addr = force_const_mem (SImode, addr);
+ emit_move_insn (temp, addr);
+
+ base = gen_rtx_REG (Pmode, BASE_REGISTER);
+ base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
+ new = gen_rtx_PLUS (Pmode, base, temp);
+
+ if (reg != 0)
+ {
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ }
+ }
+
+ /* Now, check whether it is an LT-relative symbol plus offset
+ that was pulled out of the literal pool. Force it back in. */
+
+ else if (GET_CODE (op0) == UNSPEC
+ && GET_CODE (op1) == CONST_INT)
+ {
+ if (XVECLEN (op0, 0) != 1)
+ abort();
+ if (XINT (op0, 1) != 100)
+ abort();
+
+ new = force_const_mem (SImode, orig);
+ if (reg != 0)
+ {
+ emit_move_insn (reg, new);
+ new = reg;
+ }
+ }
+
+ /* Otherwise, compute the sum. */
+ else
+ {
+ base = legitimize_pic_address (XEXP (addr, 0), reg);
+ new = legitimize_pic_address (XEXP (addr, 1),
+ base == reg ? NULL_RTX : reg);
+ if (GET_CODE (new) == CONST_INT)
+ new = plus_constant (base, INTVAL (new));
+ else
+ {
+ if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
+ {
+ base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
+ new = XEXP (new, 1);
+ }
+ new = gen_rtx_PLUS (Pmode, base, new);
+ }
+
+ if (GET_CODE (new) == CONST)
+ new = XEXP (new, 0);
+ new = force_operand (new, 0);
+ }
+ }
+ }
+ return new;
+}
+
+/* Emit insns to move operands[1] into operands[0]. */
+
+void
+emit_pic_move (operands, mode)
+ rtx *operands;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+{
+ rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
+
+ if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
+ operands[1] = force_reg (Pmode, operands[1]);
+ else
+ operands[1] = legitimize_pic_address (operands[1], temp);
+}
+
+/* Try machine-dependent ways of modifying an illegitimate address
+ to be legitimate. If we find one, return the new, valid address.
+ This macro is used in only one place: `memory_address' in explow.c.
+
+ OLDX is the address as it was before break_out_memory_refs was called.
+ In some cases it is useful to look at this to decide what needs to be done.
+
+ MODE and WIN are passed so that this macro can use
+ GO_IF_LEGITIMATE_ADDRESS.
+
+ It is always safe for this macro to do nothing. It exists to recognize
+ opportunities to optimize the output.
+
+ When -fpic is used, special handling is needed for symbolic references.
+ See comments by legitimize_pic_address for details. */
+
+rtx
+legitimize_address (x, oldx, mode)
+ register rtx x;
+ register rtx oldx ATTRIBUTE_UNUSED;
+ enum machine_mode mode;
+{
+ if (flag_pic && SYMBOLIC_CONST (x))
+ return legitimize_pic_address (x, 0);
+
+ return x;
+}
+
+
+/* Output branch conditions. */
+
+static void
+output_branch_condition (FILE *file, rtx code)
+{
+ switch (GET_CODE (code))
+ {
+ case EQ:
+ fprintf (file, "e");
+ break;
+ case NE:
+ fprintf (file, "ne");
+ break;
+ case GT:
+ case GTU:
+ fprintf (file, "h");
+ break;
+ case LT:
+ case LTU:
+ fprintf (file, "l");
+ break;
+ case GE:
+ case GEU:
+ fprintf (file, "he");
+ break;
+ case LE:
+ case LEU:
+ fprintf (file, "le");
+ break;
+ default:
+ fatal_insn ("Unknown CC code", code);
+ }
+}
+
+static void
+output_inverse_branch_condition (FILE *file, rtx code)
+{
+ switch (GET_CODE (code))
+ {
+ case EQ:
+ fprintf (file, "ne");
+ break;
+ case NE:
+ fprintf (file, "e");
+ break;
+ case GT:
+ case GTU:
+ fprintf (file, "nh");
+ break;
+ case LT:
+ case LTU:
+ fprintf (file, "nl");
+ break;
+ case GE:
+ case GEU:
+ fprintf (file, "nhe");
+ break;
+ case LE:
+ case LEU:
+ fprintf (file, "nle");
+ break;
+ default:
+ fatal_insn ("Unknown CC code", code);
+ }
+}
+
+/* Output a symbolic constant. */
+
+void
+s390_output_symbolic_const (FILE *file, rtx x)
+{
+ switch (GET_CODE (x))
+ {
+ case CONST:
+ case ZERO_EXTEND:
+ case SIGN_EXTEND:
+ s390_output_symbolic_const (file, XEXP (x, 0));
+ break;
+
+ case PLUS:
+ s390_output_symbolic_const (file, XEXP (x, 0));
+ fprintf (file, "+");
+ s390_output_symbolic_const (file, XEXP (x, 1));
+ break;
+
+ case MINUS:
+ s390_output_symbolic_const (file, XEXP (x, 0));
+ fprintf (file, "-");
+ s390_output_symbolic_const (file, XEXP (x, 1));
+ break;
+
+ case CONST_INT:
+ output_addr_const (file, x);
+ break;
+
+ case LABEL_REF:
+ case CODE_LABEL:
+ output_addr_const (file, x);
+ break;
+
+ case SYMBOL_REF:
+ output_addr_const (file, x);
+ if (CONSTANT_POOL_ADDRESS_P (x) && s390_pool_count != 0)
+ fprintf (file, "_%X", s390_pool_count);
+ break;
+
+ case UNSPEC:
+ if (XVECLEN (x, 0) != 1)
+ output_operand_lossage ("invalid UNSPEC as operand (1)");
+ switch (XINT (x, 1))
+ {
+ case 100:
+ s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
+ fprintf (file, "-.LT%X_%X",
+ s390_function_count, s390_pool_count);
+ break;
+ case 110:
+ s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
+ fprintf (file, "@GOT12");
+ break;
+ case 111:
+ s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
+ fprintf (file, "@GOTENT");
+ break;
+ case 112:
+ s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
+ fprintf (file, "@GOT");
+ break;
+ case 113:
+ s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
+ fprintf (file, "@PLT");
+ break;
+ case 114:
+ s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
+ fprintf (file, "@PLT-.LT%X_%X",
+ s390_function_count, s390_pool_count);
+ break;
+ default:
+ output_operand_lossage ("invalid UNSPEC as operand (2)");
+ break;
+ }
+ break;
+
+ default:
+ fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
+ break;
+ }
+}
+
+/* Output an address operand. */
+
+void
+print_operand_address (FILE *file, rtx addr)
+{
+ struct s390_address ad;
+
+ if (!s390_decompose_address (addr, &ad, TRUE))
+ output_operand_lossage ("Cannot decompose address.\n");
+
+ if (ad.disp)
+ s390_output_symbolic_const (file, ad.disp);
+ else
+ fprintf (file, "0");
+
+ if (ad.base && ad.indx)
+ fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
+ reg_names[REGNO (ad.base)]);
+ else if (ad.base)
+ fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
+}
+
+/* Output an operand. */
+
+void
+print_operand (FILE *file, rtx x, char code)
+{
+ switch (code)
+ {
+ case 'C':
+ output_branch_condition (file, x);
+ return;
+
+ case 'D':
+ output_inverse_branch_condition (file, x);
+ return;
+
+ case 'Y':
+ fprintf (file, ".LT%X_%X-.", s390_function_count, s390_pool_count);
+ return;
+
+ case 'y':
+ fprintf (file, ".LT%X_%X", s390_function_count, s390_pool_count);
+ return;
+
+ case 'O':
+ {
+ struct s390_address ad;
+
+ if (GET_CODE (x) != MEM
+ || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
+ || ad.indx)
+ abort();
+
+ if (ad.disp)
+ s390_output_symbolic_const (file, ad.disp);
+ else
+ fprintf (file, "0");
+ }
+ return;
+
+ case 'R':
+ {
+ struct s390_address ad;
+
+ if (GET_CODE (x) != MEM
+ || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
+ || ad.indx)
+ abort();
+
+ if (ad.base)
+ fprintf (file, "%s", reg_names[REGNO (ad.base)]);
+ else
+ fprintf (file, "0");
+ }
+ return;
+
+ case 'N':
+ if (GET_CODE (x) == REG)
+ x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
+ else if (GET_CODE (x) == MEM)
+ x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
+ else
+ abort();
+ break;
+
+ case 'M':
+ if (GET_CODE (x) == REG)
+ x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
+ else if (GET_CODE (x) == MEM)
+ x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
+ else
+ abort();
+ break;
+ }
+
+ switch (GET_CODE (x))
+ {
+ case REG:
+ fprintf (file, "%s", reg_names[REGNO (x)]);
+ break;
+
+ case MEM:
+ output_address (XEXP (x, 0));
+ break;
+
+ case CONST:
+ case CODE_LABEL:
+ case LABEL_REF:
+ case SYMBOL_REF:
+ s390_output_symbolic_const (file, x);
+ break;
+
+ case CONST_INT:
+ if (code == 'b')
+ fprintf (file, "%d", INTVAL (x) & 0xff);
+ else if (code == 'X')
+ fprintf (file, "%d", INTVAL (x) & 0xff);
+ else if (code == 'x')
+ fprintf (file, "0x%x", INTVAL (x) & 0xffff);
+ else if (code == 'h')
+ fprintf (file, "%d", (INTVAL (x) << 16) >> 16);
+ else
+ fprintf (file, "%d", INTVAL (x));
+ break;
+
+ default:
+ fatal_insn ("UNKNOWN in print_operand !?", x);
+ break;
+ }
+}
+
+#define DEBUG_SCHED 0
+
+/* True, if register regno is used for forming a memory address in
+ a expression x. */
+
+static int
+reg_used_in_mem_p (int regno, rtx x)
+{
+ enum rtx_code code = GET_CODE (x);
+ int i, j;
+ const char *fmt;
+
+ if (code == MEM)
+ {
+ if (refers_to_regno_p (regno, regno+1,
+ XEXP (x, 0), 0))
+ return 1;
+ }
+
+ fmt = GET_RTX_FORMAT (code);
+ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
+ {
+ if (fmt[i] == 'e'
+ && reg_used_in_mem_p (regno, XEXP (x, i)))
+ return 1;
+
+ else if (fmt[i] == 'E')
+ for (j = 0; j < XVECLEN (x, i); j++)
+ if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
+ return 1;
+ }
+ return 0;
+}
+
+/* Returns true, if expression dep_rtx sets a address register
+ used by instruction insn to address memory. */
+
+static int
+addr_generation_dependency_p (rtx dep_rtx, rtx insn)
+{
+ rtx target;
+
+ if (GET_CODE (dep_rtx) == SET)
+ {
+ target = SET_DEST (dep_rtx);
+
+ if (GET_CODE (target) == REG)
+ {
+ int regno = REGNO (target);
+
+ if (get_attr_type (insn) == TYPE_LA)
+ return refers_to_regno_p (regno, regno+1,
+ SET_SRC (PATTERN (insn)), 0);
+ else if (get_attr_atype (insn) == ATYPE_MEM)
+ return reg_used_in_mem_p (regno, PATTERN (insn));
+ }
+ }
+ return 0;
+}
+
+
+/* Data dependencies are all handled without delay. But if an register
+ is changed for a memory access, at least 4 cycle need to be put
+ between the set of the register and the use. Because of that,
+ the delays specified in the .md file needs to check and adjust
+ to the right cost. */
+
+int
+s390_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost )
+{
+ rtx dep_rtx, dest, x;
+ int i;
+
+ /* If the dependence is an anti-dependence, there is no cost. For an
+ output dependence, there is sometimes a cost, but it doesn't seem
+ worth handling those few cases. */
+
+ if (REG_NOTE_KIND (link) != 0)
+ return 0;
+
+ /* If we can't recognize the insns, we can't really do anything. */
+ if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
+ return cost;
+
+ /* If cost equal 1 nothing needs to be checked. */
+
+ if (cost == 1)
+ {
+ return cost;
+ }
+
+ dep_rtx = PATTERN (dep_insn);
+
+ if (GET_CODE (dep_rtx) == SET)
+ {
+ if (addr_generation_dependency_p (dep_rtx, insn))
+ {
+ if (DEBUG_SCHED)
+ {
+ fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
+ cost);
+ debug_rtx (dep_insn);
+ debug_rtx (insn);
+ }
+ return cost;
+ }
+ }
+
+ else if (GET_CODE (dep_rtx) == PARALLEL)
+ {
+ for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
+ {
+ if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
+ insn))
+ {
+ if (DEBUG_SCHED)
+ {
+ fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
+ ,cost);
+ debug_rtx (dep_insn);
+ debug_rtx (insn);
+ }
+ return cost;
+ }
+ }
+ }
+
+ /* default cost. */
+ return 1;
+}
+
+/* Pool concept for Linux 390:
+ - Function prologue saves used register
+ - literal pool is dumped in prologue and jump across with bras
+ - If function has more than 4 k literals, at about every
+ S390_CHUNK_MAX offset in the function a literal pool will be
+ dumped
+ - in this case, a branch from one chunk to other chunk needs
+ a reload of base register at the code label branched to. */
+
+
+
+rtx s390_pool_start_insn = NULL_RTX;
+
+/* Count of actual pool in function (-1 -> before function). */
+
+int s390_pool_count = -1;
+
+
+static int pool_stop_uid;
+
+
+void
+s390_asm_output_pool_prologue (FILE *file, char *fname, tree fndecl, int size)
+{
+
+ if (s390_pool_count>0) {
+ /*
+ * We are in an internal pool, branch over
+ */
+ if (TARGET_64BIT)
+ {
+ fprintf (file, "\tlarl\t%s,.LT%X_%X\n",
+ reg_names[BASE_REGISTER],
+ s390_function_count, s390_pool_count);
+ readonly_data_section();
+ ASM_OUTPUT_ALIGN (file, floor_log2 (3));
+ fprintf (file, ".LT%X_%X:\t# Pool %d\n",
+ s390_function_count, s390_pool_count, s390_pool_count);
+ }
+ else
+ fprintf (file,"\t.align 4\n\tbras\t%s,0f\n.LT%X_%X:\t# Pool %d \n",
+ reg_names[BASE_REGISTER],
+ s390_function_count, s390_pool_count, s390_pool_count);
+ }
+ if (!TARGET_64BIT)
+ function_section (fndecl);
+}
+
+/* Check if other addr is in different chunk than my addr,
+ return symbol_ref to other pool in that case. */
+
+
+static int
+other_chunk (int *ltorg, int my_addr, int other_addr)
+{
+ int ad, i=0, j=0;
+
+ while ((ad = ltorg[i++])) {
+ if (INSN_ADDRESSES (ad) >= my_addr)
+ break;
+ }
+
+ while ((ad = ltorg[j++])) {
+ if (INSN_ADDRESSES (ad) > other_addr)
+ break;
+ }
+
+ if (i==j)
+ return 0;
+
+ return 1;
+}
+
+/* Check, if other label is to far away to branch relative. */
+
+static int
+far_away (int my_addr, int other_addr)
+{
+ /* In 64 bit mode we can jump +- 4GB. */
+ if (TARGET_64BIT)
+ return 0;
+ if (abs (my_addr - other_addr) > S390_REL_MAX)
+ return 1;
+ return 0;
+}
+
+
+
+static rtx
+check_and_change_labels (rtx insn, int *ltorg_uids)
+{
+ rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
+ rtx target, jump;
+ rtx pattern, tmp, body, label1;
+ int addr0, addr1;
+
+ if (GET_CODE (insn) != JUMP_INSN)
+ return insn;
+
+ pattern = PATTERN (insn);
+
+ addr0 = INSN_ADDRESSES (INSN_UID (insn));
+ if (GET_CODE (pattern) == SET)
+ {
+ body = XEXP (pattern, 1);
+ if (GET_CODE (body) == LABEL_REF)
+ {
+ addr1 = INSN_ADDRESSES (INSN_UID (XEXP (body, 0)));
+
+ if (other_chunk (ltorg_uids, addr0, addr1))
+ {
+ SYMBOL_REF_USED (XEXP (body, 0)) = 1;
+ }
+ if (far_away (addr0, addr1))
+ {
+ if (flag_pic)
+ {
+ target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, body), 100);
+ target = gen_rtx_CONST (SImode, target);
+ target = force_const_mem (SImode, target);
+ jump = gen_rtx_REG (Pmode, BASE_REGISTER);
+ jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
+ }
+ else
+ {
+ target = force_const_mem (Pmode, body);
+ jump = temp_reg;
+ }
+
+ emit_insn_before (gen_movsi (temp_reg, target), insn);
+ tmp = emit_jump_insn_before (gen_jump_long (jump), insn);
+ remove_insn (insn);
+ INSN_ADDRESSES_NEW (tmp, -1);
+ return tmp;
+ }
+ }
+ else if (GET_CODE (body) == IF_THEN_ELSE)
+ {
+ if (GET_CODE (XEXP (body, 1)) == LABEL_REF)
+ {
+ addr1 = INSN_ADDRESSES (INSN_UID (XEXP (XEXP (body, 1), 0)));
+
+ if (other_chunk (ltorg_uids, addr0, addr1))
+ {
+ SYMBOL_REF_USED (XEXP (XEXP (body, 1), 0)) = 1;
+ }
+
+ if (far_away (addr0, addr1))
+ {
+ if (flag_pic)
+ {
+ target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, XEXP (body, 1)), 100);
+ target = gen_rtx_CONST (SImode, target);
+ target = force_const_mem (SImode, target);
+ jump = gen_rtx_REG (Pmode, BASE_REGISTER);
+ jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
+ }
+ else
+ {
+ target = force_const_mem (Pmode, XEXP (body, 1));
+ jump = temp_reg;
+ }
+
+ label1 = gen_label_rtx ();
+ emit_jump_insn_before (gen_icjump (label1, XEXP (body, 0)), insn);
+ emit_insn_before (gen_movsi (temp_reg, target), insn);
+ tmp = emit_jump_insn_before (gen_jump_long (jump), insn);
+ INSN_ADDRESSES_NEW (emit_label_before (label1, insn), -1);
+ remove_insn (insn);
+ return tmp;
+ }
+ }
+ else if (GET_CODE (XEXP (body, 2)) == LABEL_REF)
+ {
+ addr1 = INSN_ADDRESSES (INSN_UID (XEXP (XEXP (body, 2), 0)));
+
+ if (other_chunk (ltorg_uids, addr0, addr1))
+ {
+ SYMBOL_REF_USED (XEXP (XEXP (body, 2), 0)) = 1;
+ }
+
+ if (far_away (addr0, addr1))
+ {
+ if (flag_pic)
+ {
+ target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, XEXP (body, 2)), 100);
+ target = gen_rtx_CONST (SImode, target);
+ target = force_const_mem (SImode, target);
+ jump = gen_rtx_REG (Pmode, BASE_REGISTER);
+ jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
+ }
+ else
+ {
+ target = force_const_mem (Pmode, XEXP (body, 2));
+ jump = temp_reg;
+ }
+
+ label1 = gen_label_rtx ();
+ emit_jump_insn_before (gen_cjump (label1, XEXP (body, 0)), insn);
+ emit_insn_before (gen_movsi (temp_reg, target), insn);
+ tmp = emit_jump_insn_before (gen_jump_long (jump), insn);
+ INSN_ADDRESSES_NEW (emit_label_before (label1, insn), -1);
+ remove_insn (insn);
+ return tmp;
+ }
+ }
+ }
+ }
+ else if (GET_CODE (pattern) == ADDR_VEC ||
+ GET_CODE (pattern) == ADDR_DIFF_VEC)
+ {
+ int i, diff_vec_p = GET_CODE (pattern) == ADDR_DIFF_VEC;
+ int len = XVECLEN (pattern, diff_vec_p);
+
+ for (i = 0; i < len; i++)
+ {
+ addr1 = INSN_ADDRESSES (INSN_UID (XEXP (XVECEXP (pattern, diff_vec_p, i), 0)));
+ if (other_chunk (ltorg_uids, addr0, addr1))
+ {
+ SYMBOL_REF_USED (XEXP (XVECEXP (pattern, diff_vec_p, i), 0)) = 1;
+ }
+ }
+ }
+ return insn;
+}
+
+static int chunk_max=0;
+
+void
+s390_final_chunkify (int chunkify)
+{
+ rtx insn, ninsn, tmp;
+ int addr, naddr, uids;
+
+ const char *asms;
+
+ int size = insn_current_address;
+
+ int *ltorg_uids;
+ int max_ltorg=0;
+
+ ltorg_uids = alloca (size / 1024 + 1024);
+ memset (ltorg_uids, 0, size / 1024 + 1024);
+
+ if (chunkify == 1)
+ {
+ chunk_max = size * 2048 / get_pool_size ();
+ chunk_max = chunk_max > S390_CHUNK_MAX
+ ? S390_CHUNK_MAX : chunk_max;
+ }
+
+ for (insn=get_insns (); insn;insn = next_real_insn (insn))
+ {
+ if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
+ continue;
+
+ addr = INSN_ADDRESSES (INSN_UID (insn));
+ if ((ninsn = next_real_insn (insn)))
+ {
+ naddr = INSN_ADDRESSES (INSN_UID (ninsn));
+ }
+
+ if (chunkify && (addr / chunk_max != naddr / chunk_max))
+ {
+ for (tmp = insn; tmp; tmp = NEXT_INSN (tmp))
+ {
+ if (GET_CODE (tmp) == CODE_LABEL &&
+ GET_CODE (NEXT_INSN (tmp)) != JUMP_INSN)
+ {
+ ltorg_uids[max_ltorg++] = INSN_UID (prev_real_insn (tmp));
+ break;
+ }
+ if (GET_CODE (tmp) == CALL_INSN)
+ {
+ ltorg_uids[max_ltorg++] = INSN_UID (tmp);
+ break;
+ }
+ if (INSN_ADDRESSES (INSN_UID (tmp)) - naddr > S390_CHUNK_OV)
+ {
+ debug_rtx (insn);
+ debug_rtx (tmp);
+ fprintf (stderr, "s390 multiple literalpool support:"
+ "\n No code label between this insn %X %X",
+ naddr, INSN_ADDRESSES (INSN_UID (tmp)));
+ abort();
+ }
+ }
+ if (tmp == NULL)
+ {
+ warning ("no code label found");
+ }
+ }
+ else if (GET_CODE (PATTERN (insn)) == ASM_INPUT)
+ {
+ asms = XSTR (PATTERN (insn),0);
+
+ if ((memcmp (asms,".section",8) == 0) ||
+ (memcmp (asms,".text",5) == 0) ||
+ (memcmp (asms,"\t.section",9) == 0) ||
+ (memcmp (asms,"\t.text",6) == 0)) {
+ ltorg_uids[max_ltorg++] = INSN_UID (insn);
+ INSN_ADDRESSES_NEW (emit_insn_before (gen_rtx_ASM_INPUT (VOIDmode,
+ ".align 4"), insn), -1);
+ }
+ }
+ }
+ ltorg_uids[max_ltorg] = 0;
+ for (insn=get_insns (),uids=0; insn;insn = next_real_insn (insn))
+ {
+ if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
+ continue;
+ if (INSN_UID (insn) == ltorg_uids[uids])
+ {
+ INSN_ADDRESSES_NEW (emit_insn_after (gen_ltorg (
+ gen_rtx_CONST_INT (Pmode, ltorg_uids[++uids])),
+ insn), -1);
+ }
+ if (GET_CODE (insn) == JUMP_INSN)
+ {
+ insn = check_and_change_labels (insn, ltorg_uids);
+ }
+ }
+ if (chunkify)
+ {
+ for (insn=get_insns (); insn;insn = next_insn (insn))
+ {
+ if (GET_CODE (insn) == CODE_LABEL)
+ {
+ if (SYMBOL_REF_USED (insn))
+ {
+ INSN_ADDRESSES_NEW (emit_insn_after (gen_reload_base (
+ gen_rtx_LABEL_REF (Pmode, XEXP (insn, 0))), insn), -1);
+ }
+ }
+ }
+ }
+ pool_stop_uid = ltorg_uids[0];
+}
+
+/* Return 1 if next literal pool is reached (check for ltorg insn)
+ maybe should use unspec insn. */
+
+
+int
+s390_stop_dump_lit_p (rtx insn)
+{
+ rtx body=PATTERN (insn);
+ if (GET_CODE (body) == PARALLEL
+ && GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (XVECEXP (body, 0, 1)) == USE
+ && GET_CODE (XEXP ((XVECEXP (body, 0, 1)),0)) == CONST_INT
+ && GET_CODE (SET_DEST (XVECEXP (body, 0, 0))) == REG
+ && REGNO (SET_DEST (XVECEXP (body, 0, 0))) == BASE_REGISTER
+ && SET_SRC (XVECEXP (body, 0, 0)) == pc_rtx) {
+ return 1;
+ }
+ else
+ return 0;
+}
+
+void
+s390_dump_literal_pool (rtx act_insn, rtx stop)
+{
+ s390_pool_start_insn = act_insn;
+ pool_stop_uid = INTVAL (stop);
+ s390_pool_count++;
+ output_constant_pool (current_function_name, current_function_decl);
+ function_section (current_function_decl);
+}
+
+
+#ifdef DWARF2_DEBUGGING_INFO
+extern char *dwarf2out_cfi_label PARAMS ((void));
+#endif
+
+/* Flag set in prologue, used in epilog to know
+ if stack is allocated or not. */
+
+static int leaf_function_flag;
+rtx s390_got_label;
+rtx s390_profile[10];
+int s390_nr_constants;
+
+/* Returns 1 if floating point registers need to be saved. */
+
+static int save_fprs_p()
+{
+ int i;
+ if (!TARGET_64BIT)
+ return 0;
+ for (i=24; i<=31; i++)
+ {
+ if (regs_ever_live[i] == 1)
+ return 1;
+ }
+ return 0;
+}
+
+/* Current function is a leaf function, without automatics,
+ alloca or vararg stuff. */
+
+static int
+cur_is_leaf_function ()
+{
+ int lsize = get_frame_size () + current_function_outgoing_args_size
+ + save_fprs_p () * 64;
+
+ if (leaf_function_p () && ((lsize) == 0) &&
+ ! (current_function_calls_alloca) &&
+ ! (current_function_stdarg) && ! (current_function_varargs))
+ return 1;
+ return 0;
+}
+
+/* Calculate offset between argument pointer and frame pointer
+ initialy after prologue. */
+
+int s390_arg_frame_offset ()
+{
+ int lsize = get_frame_size () + current_function_outgoing_args_size
+ + save_fprs_p () * 64;
+
+ if (cur_is_leaf_function ())
+ return STACK_POINTER_OFFSET;
+ else
+ return 2*STACK_POINTER_OFFSET + lsize;
+}
+
+/* Save Floating point register on current stack. */
+
+static int save_fprs(FILE *file, long offset, int fp)
+{
+ int i;
+
+ if (!TARGET_64BIT)
+ return 0;
+
+ for (i=24; i<=31; i++)
+ {
+ if (regs_ever_live[i] == 1)
+ {
+ fprintf (file, "\tstd\t%s,%d(%s)\n", reg_names[i],
+ (i-24) * 8 + offset, reg_names[fp]);
+ }
+ }
+}
+
+/* Restore Floating point register on current stack. */
+
+static int restore_fprs(FILE *file, long offset, int fp)
+{
+ int i;
+
+ if (!TARGET_64BIT)
+ return 0;
+
+ if (!save_fprs_p())
+ return 0;
+
+ if (offset < 0)
+ {
+ fp = 1;
+ offset = 0;
+ fprintf (file, "\tlgr\t%s,%s\n", reg_names[fp],
+ reg_names[STACK_POINTER_REGNUM]);
+ fprintf (file, "\taghi\t%s,-64\n", reg_names[fp]);
+ }
+
+ for (i=24; i<=31; i++)
+ {
+ if (regs_ever_live[i] == 1)
+ {
+ fprintf (file, "\tld\t%s,%d(%s)\n", reg_names[i],
+ (i-24) * 8 + offset, reg_names[fp]);
+ }
+ }
+}
+
+/* Output constant pool in function prologue (31 bit) or in readonly section. */
+
+static int
+s390_output_constant_pool(FILE* file)
+{
+ /* Output constant pool. */
+ if (s390_nr_constants || regs_ever_live[BASE_REGISTER])
+ {
+ s390_pool_count = 0;
+ if (TARGET_64BIT)
+ {
+ fprintf (file, "\tlarl\t%s,.LT%X_%X\n", reg_names[BASE_REGISTER],
+ s390_function_count, s390_pool_count);
+ readonly_data_section();
+ ASM_OUTPUT_ALIGN (file, floor_log2 (3));
+ }
+ else
+ {
+ fprintf (file, "\tbras\t%s,.LTN%X_%X\n", reg_names[BASE_REGISTER],
+ s390_function_count, s390_pool_count);
+ }
+ fprintf (file, ".LT%X_%X:\n", s390_function_count, s390_pool_count);
+ output_constant_pool (current_function_name, current_function_decl);
+ fprintf (file, ".LTN%X_%X:\n", s390_function_count,
+ s390_pool_count);
+ if (TARGET_64BIT)
+ function_section(current_function_decl);
+
+ regs_ever_live[BASE_REGISTER] = 1;
+ }
+}
+
+
+/* This function generates the assembly code for function entry. */
+
+static rtx
+s390_force_const_mem_late (rtx cst)
+{
+ cst = force_const_mem (Pmode, cst);
+
+ s390_nr_constants++;
+ regs_ever_live[BASE_REGISTER] = 1;
+
+ emit_insn_before (gen_rtx (USE, Pmode, cst), get_insns ());
+
+ return cst;
+}
+
+static rtx
+s390_force_const_mem_symbol (char *name, int func, int global)
+{
+ rtx symbol;
+
+ if (TARGET_64BIT)
+ abort ();
+
+ symbol = gen_rtx (SYMBOL_REF, Pmode, name);
+ SYMBOL_REF_FLAG (symbol) = !global;
+
+ if (flag_pic)
+ {
+ if (global)
+ {
+ current_function_uses_pic_offset_table = 1;
+ symbol = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, symbol), func? 114 : 112);
+ symbol = gen_rtx_CONST (VOIDmode, symbol);
+ }
+ else
+ {
+ symbol = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, symbol), 100);
+ symbol = gen_rtx_CONST (VOIDmode, symbol);
+ }
+ }
+
+ return s390_force_const_mem_late (symbol);
+}
+
+/* This function generates the assembly code for function entry. */
+
+void
+s390_function_prologue (FILE *file, HOST_WIDE_INT lsize)
+{
+ extern int profile_label_no;
+ int i, j;
+ long frame_size;
+ rtx stack_label = 0, got_label = 0, tmp;
+ char *l;
+ char b64[2] = " ";
+ b64[0] = TARGET_64BIT ? 'g' : '\0';
+
+ /* Check for too large size of local variables */
+
+ if (lsize > 0x7fff0000)
+ fatal_error ("Total size of local variables exceeds architecture limit.");
+
+ /* Profile code (-p, -a, -ax needs some literals). */
+
+ if (profile_block_flag && !TARGET_64BIT)
+ {
+ s390_profile[0] = s390_force_const_mem_symbol ("__bb_init_func", 1, 1);
+ s390_profile[1] = s390_force_const_mem_symbol ("__bb_init_trace_func", 1, 1);
+ s390_profile[2] = s390_force_const_mem_symbol ("__bb_trace_func", 1, 1);
+ s390_profile[3] = s390_force_const_mem_symbol ("__bb_trace_ret", 1, 1);
+ s390_profile[5] = s390_force_const_mem_symbol ("__bb", 0, 1);
+ s390_profile[6] = s390_force_const_mem_symbol (".LPBX0", 0, 0);
+ s390_profile[7] = s390_force_const_mem_symbol (".LPBX2", 0, 0);
+ }
+
+ if (profile_flag && !TARGET_64BIT)
+ {
+ static char label[128];
+ sprintf (label, "%sP%d", LPREFIX, profile_label_no);
+
+ s390_profile[4] = s390_force_const_mem_symbol ("_mcount", 1, 1);
+ s390_profile[9] = s390_force_const_mem_symbol (label, 0, 0);
+ }
+
+ if (get_pool_size () > S390_POOL_MAX)
+ s390_final_chunkify (1);
+ else
+ s390_final_chunkify (0);
+
+ if (current_function_uses_pic_offset_table)
+ regs_ever_live[12] = 1;
+
+ if (!TARGET_64BIT && current_function_uses_pic_offset_table)
+ {
+ got_label = s390_force_const_mem_symbol ("_GLOBAL_OFFSET_TABLE_", 0, 0);
+ }
+
+ if ((frame_size =
+ STARTING_FRAME_OFFSET + lsize + save_fprs_p () * 64) > 0x7fff)
+ {
+ stack_label = s390_force_const_mem_late (GEN_INT (frame_size));
+ }
+
+ if (!optimize)
+ {
+ /* Stupid register allocation is stupid ...
+ It does not always recognize the base register is used. */
+
+ regs_ever_live[BASE_REGISTER] = 1;
+ }
+
+ if (cur_is_leaf_function ())
+ {
+ leaf_function_flag = 1;
+ fprintf (file, "%s\tleaf function\n", ASM_COMMENT_START);
+ fprintf (file, "%s\thas varargs %d\n", ASM_COMMENT_START,
+ current_function_stdarg);
+ fprintf (file, "%s\tincoming args (stack) %d\n", ASM_COMMENT_START,
+ current_function_args_size);
+ fprintf (file, "%s\tfunction length %d\n", ASM_COMMENT_START,
+ insn_current_address);
+ fprintf (file, "%s\tregister live ", ASM_COMMENT_START);
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ fprintf (file, "%d", regs_ever_live[i]);
+ fputc ('\n',file);
+
+ /* Save gprs 6 - 15 and fprs 4 and 6. */
+ for (i = 6; i < 13 && (regs_ever_live[i] == 0); i++);
+
+ if (s390_nr_constants || regs_ever_live[13] || i != 13)
+ {
+ fprintf (file, "\tstm%s\t%s,%s,%d(%s)\n",
+ b64, reg_names[i], reg_names[13],
+ i * UNITS_PER_WORD,
+ reg_names[STACK_POINTER_REGNUM]);
+#ifdef INCOMING_RETURN_ADDR_RTX
+ if (dwarf2out_do_frame ())
+ {
+ l = dwarf2out_cfi_label ();
+ dwarf2out_def_cfa (l, STACK_POINTER_REGNUM,
+ STACK_POINTER_OFFSET);
+ for (j = i; j <= 14; j++)
+ dwarf2out_reg_save (l, j, (TARGET_64BIT ? (j-20) : (j-24))
+ * UNITS_PER_WORD);
+ if (regs_ever_live[18])
+ dwarf2out_reg_save (l, 18, -16);
+ if (regs_ever_live[19])
+ dwarf2out_reg_save (l, 19, -8);
+ }
+#endif
+ }
+
+ s390_output_constant_pool (file);
+
+ /* Save fprs. */
+
+ if (!TARGET_64BIT)
+ {
+ if (regs_ever_live[18])
+ fprintf (file, "\tstd\t4,80(%s)\n", reg_names[STACK_POINTER_REGNUM]);
+ if (regs_ever_live[19])
+ fprintf (file, "\tstd\t6,88(%s)\n", reg_names[STACK_POINTER_REGNUM]);
+ }
+ }
+ else
+ { /* No leaf function. */
+ fprintf (file, "%s\tleaf function %d\n", ASM_COMMENT_START,
+ leaf_function_p ());
+ fprintf (file, "%s\tautomatics %d\n", ASM_COMMENT_START,
+ lsize);
+ fprintf (file, "%s\toutgoing args %d\n", ASM_COMMENT_START,
+ current_function_outgoing_args_size);
+ fprintf (file, "%s\tneed frame pointer %d\n", ASM_COMMENT_START,
+ frame_pointer_needed);
+ fprintf (file, "%s\tcall alloca %d\n", ASM_COMMENT_START,
+ current_function_calls_alloca);
+ fprintf (file, "%s\thas varargs %d\n", ASM_COMMENT_START,
+ current_function_stdarg || current_function_varargs);
+ fprintf (file, "%s\tincoming args (stack) %d\n", ASM_COMMENT_START,
+ current_function_args_size);
+ fprintf (file, "%s\tfunction length %d\n", ASM_COMMENT_START,
+ insn_current_address);
+ fprintf (file, "%s\tregister live ", ASM_COMMENT_START);
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ fprintf (file, "%d", regs_ever_live[i]);
+ fputc ('\n',file);
+
+ /* Save gprs 6 - 15 and fprs 4 and 6. */
+
+ if (current_function_stdarg || current_function_varargs)
+ {
+ i = 2;
+ }
+ else
+ {
+ for (i = 6; i < 13 && (regs_ever_live[i] == 0); i++);
+ }
+
+ fprintf (file, "\tstm%s\t%s,%s,%d(%s)\n",
+ b64, reg_names[i], reg_names[15], i * UNITS_PER_WORD,
+ reg_names[STACK_POINTER_REGNUM]);
+
+#ifdef INCOMING_RETURN_ADDR_RTX
+ if (dwarf2out_do_frame ())
+ {
+ l = dwarf2out_cfi_label ();
+ dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, STACK_POINTER_OFFSET);
+ for (j = i; j <= 15; j++)
+ dwarf2out_reg_save (l, j, (TARGET_64BIT ? (j-20) : (j-24)) *
+ UNITS_PER_WORD);
+ if (regs_ever_live[18])
+ dwarf2out_reg_save (l, 18, -16);
+ if (regs_ever_live[19])
+ dwarf2out_reg_save (l, 19, -8);
+ }
+#endif
+
+ s390_output_constant_pool (file);
+
+ /* Save fprs. */
+
+ if (current_function_stdarg || current_function_varargs)
+ {
+ fprintf (file, "\tstd\t%s,%d(%s)\n",
+ reg_names[16],
+ STACK_POINTER_OFFSET-32,
+ reg_names[STACK_POINTER_REGNUM]);
+ fprintf (file, "\tstd\t%s,%d(%s)\n",
+ reg_names[17],
+ STACK_POINTER_OFFSET-24,
+ reg_names[STACK_POINTER_REGNUM]);
+ if (TARGET_64BIT)
+ {
+ fprintf (file, "\tstd\t%s,%d(%s)\n",
+ reg_names[18],
+ STACK_POINTER_OFFSET-16,
+ reg_names[STACK_POINTER_REGNUM]);
+ fprintf (file, "\tstd\t%s,%d(%s)\n",
+ reg_names[19],
+ STACK_POINTER_OFFSET-8,
+ reg_names[STACK_POINTER_REGNUM]);
+ }
+ }
+ if (!TARGET_64BIT)
+ {
+ if (regs_ever_live[18])
+ fprintf (file, "\tstd\t%s,%d(%s)\n",
+ reg_names[18],
+ STACK_POINTER_OFFSET-16,
+ reg_names[STACK_POINTER_REGNUM]);
+ if (regs_ever_live[19])
+ fprintf (file, "\tstd\t%s,%d(%s)\n",
+ reg_names[19],
+ STACK_POINTER_OFFSET-8,
+ reg_names[STACK_POINTER_REGNUM]);
+ }
+
+
+ if (save_fprs_p() && frame_size > 4095)
+ {
+ int fp = 1;
+ int offset = 0;
+ fprintf (file, "\tlgr\t%s,%s\n", reg_names[fp],
+ reg_names[STACK_POINTER_REGNUM]);
+ fprintf (file, "\taghi\t%s,-64\n", reg_names[fp]);
+ save_fprs(file, 0, fp);
+ }
+
+ /* Decrement stack. */
+
+ if (TARGET_BACKCHAIN || (STARTING_FRAME_OFFSET +
+ lsize + STACK_POINTER_OFFSET > 4095
+ || frame_pointer_needed
+ || current_function_calls_alloca))
+ {
+
+ fprintf (file, "\tl%sr\t%s,%s\n", b64,
+ reg_names[1], reg_names[STACK_POINTER_REGNUM]);
+ }
+
+ if (stack_label)
+ {
+ rtx operands[2];
+
+ operands[0] = stack_pointer_rtx;
+ operands[1] = stack_label;
+ if (TARGET_64BIT)
+ output_asm_insn ("sg\t%0,%1", operands);
+ else
+ output_asm_insn ("s\t%0,%1", operands);
+ }
+ else
+ {
+ fprintf (file, "\ta%shi\t%s,-%d\n",b64,
+ reg_names[STACK_POINTER_REGNUM], frame_size);
+ }
+#ifdef INCOMING_RETURN_ADDR_RTX
+ if (dwarf2out_do_frame ())
+ {
+ if (frame_pointer_needed)
+ dwarf2out_def_cfa ("", HARD_FRAME_POINTER_REGNUM,
+ STACK_POINTER_OFFSET+frame_size);
+ else
+ dwarf2out_def_cfa ("", STACK_POINTER_REGNUM,
+ STACK_POINTER_OFFSET+frame_size);
+ }
+#endif
+
+
+ /* Generate backchain. */
+
+ if (TARGET_BACKCHAIN || (STARTING_FRAME_OFFSET +
+ lsize + STACK_POINTER_OFFSET > 4095
+ || frame_pointer_needed
+ || current_function_calls_alloca))
+ {
+ fprintf (file, "\tst%s\t%s,0(%s)\n",
+ b64, reg_names[1], reg_names[STACK_POINTER_REGNUM]);
+ }
+ }
+
+ if (frame_pointer_needed)
+ {
+ fprintf (file, "\tl%sr\t%s,%s\n", b64,
+ reg_names[FRAME_POINTER_REGNUM],
+ reg_names[STACK_POINTER_REGNUM]);
+ }
+
+ /* Load GOT if used and emit use insn that optimizer does not
+ erase literal pool entry. */
+
+ if (current_function_uses_pic_offset_table)
+ {
+ rtx operands[3];
+ if (TARGET_64BIT)
+ {
+ fprintf (file, "\tlarl\t%s,_GLOBAL_OFFSET_TABLE_\n",
+ reg_names[PIC_OFFSET_TABLE_REGNUM]);
+ }
+ else
+ {
+ operands[0] = gen_rtx (REG, Pmode, PIC_OFFSET_TABLE_REGNUM);
+ operands[1] = got_label;
+ operands[2] = gen_rtx (REG, Pmode, BASE_REGISTER);
+ output_asm_insn ("l\t%0,%1\n\tar\t%0,%2", operands);
+ }
+ }
+ /* Save FPRs below save area. */
+
+ if (frame_size <= 4095)
+ save_fprs (file, frame_size - 64, STACK_POINTER_REGNUM);
+
+ return;
+}
+
+/* This function generates the assembly code for function exit. */
+
+void
+s390_function_epilogue (FILE *file, HOST_WIDE_INT lsize)
+{
+/* Register is call clobbered and not used for eh or return. */
+#define FREE_REG 4
+
+ int i;
+ long frame_size;
+ int return_reg = RETURN_REGNUM;
+ int fp, offset;
+ char b64[2] = " ";
+
+ b64[0] = TARGET_64BIT ? 'g' : '\0';
+ frame_size = STARTING_FRAME_OFFSET + lsize + save_fprs_p () * 64;
+
+ if (current_function_uses_pic_offset_table)
+ regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
+
+ if (leaf_function_flag)
+ {
+ for (i = 6; i < 13 && (regs_ever_live[i] == 0); i++);
+
+ if (s390_nr_constants || regs_ever_live[13] || i != 13)
+ {
+ fprintf (file, "\tlm%s\t%s,%s,%d(%s)\n", b64,
+ reg_names[i], reg_names[13],
+ UNITS_PER_WORD * i,
+ reg_names[STACK_POINTER_REGNUM]);
+ }
+ if (!TARGET_64BIT)
+ {
+ if (regs_ever_live[18])
+ fprintf (file, "\tld\t%s,%d(%s)\n",
+ reg_names[18],
+ STACK_POINTER_OFFSET-16,
+ reg_names[STACK_POINTER_REGNUM]);
+ if (regs_ever_live[19])
+ fprintf (file, "\tld\t%s,%d(%s)\n",
+ reg_names[19],
+ STACK_POINTER_OFFSET-8,
+ reg_names[STACK_POINTER_REGNUM]);
+ }
+ }
+ else
+ {
+ for (i = 6; i < 13 && (regs_ever_live[i] == 0); i++);
+
+ if (frame_size + STACK_POINTER_OFFSET > 4095)
+ {
+ offset = 0;
+ fp = STACK_POINTER_REGNUM;
+ }
+ else if (frame_pointer_needed || current_function_calls_alloca)
+ {
+ offset = frame_size;
+ fp = FRAME_POINTER_REGNUM;
+ }
+ else
+ {
+ offset = frame_size;
+ fp = STACK_POINTER_REGNUM;
+ }
+
+ /* Restore from offset below save area. */
+
+ if (offset == 0)
+ fprintf (file, "\tl%s\t%s,0(%s)\n", b64,
+ reg_names[fp], reg_names[fp]);
+ restore_fprs (file, offset-64, fp);
+ return_reg = FREE_REG;
+ fprintf (file, "\tl%s\t%s,%d(%s)\n", b64, reg_names[return_reg],
+ UNITS_PER_WORD*RETURN_REGNUM+offset, reg_names[fp]);
+ if (!TARGET_64BIT)
+ {
+ if (regs_ever_live[18])
+ fprintf (file, "\tld\t%s,%d(%s)\n",
+ reg_names[18],
+ offset+STACK_POINTER_OFFSET-16, reg_names[fp]);
+ if (regs_ever_live[19])
+ fprintf (file, "\tld\t%s,%d(%s)\n",
+ reg_names[19],
+ offset+STACK_POINTER_OFFSET-8, reg_names[fp]);
+ }
+ fprintf (file, "\tlm%s\t%s,%s,%d(%s)\n", b64,
+ reg_names[i], reg_names[15],
+ (UNITS_PER_WORD * i) + offset, reg_names[fp]);
+ }
+
+ fprintf (file, "\tbr\t%s\n", reg_names[return_reg]);
+
+ current_function_uses_pic_offset_table = 0;
+ leaf_function_flag = 0;
+ s390_pool_start_insn = NULL_RTX;
+ s390_pool_count = -1;
+ s390_function_count++;
+ return;
+}
+
+/* For structs of odd size the address is passed as reference.
+ Complex number are also passes on the stack.
+
+ Note: We don't use mode, since a struct with the following format
+ is BLKmode, but has size 4.
+ struct
+ {
+ char a;
+ char b[3]
+ }.
+ The ABI states, that this value has to be passed in register. */
+
+
+static int
+s390_function_arg_size (enum machine_mode mode, tree type)
+{
+ if (type)
+ return int_size_in_bytes (type);
+
+ /* No type info available for some library calls ... */
+ if (mode != BLKmode)
+ return GET_MODE_SIZE (mode);
+
+ /* If we have neither type nor mode, abort */
+ fatal_error ("no type info available for BLKmode\n");
+}
+
+int
+s390_function_arg_pass_by_reference (enum machine_mode mode, tree type)
+{
+ int size = s390_function_arg_size (mode, type);
+
+ if (type)
+ {
+ if (AGGREGATE_TYPE_P (type) &&
+ size != 1 && size != 2 && size != 4 && size != 8)
+ return 1;
+
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ return 1;
+ }
+ return 0;
+
+}
+
+/* Update the data in CUM to advance over an argument of mode MODE and
+ data type TYPE. (TYPE is null for libcalls where that information
+ may not be available.). */
+
+void
+s390_function_arg_advance (CUMULATIVE_ARGS * cum,
+ enum machine_mode mode, tree type, int named)
+{
+ if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
+ {
+ cum->fprs++;
+ }
+ else if (s390_function_arg_pass_by_reference (mode, type))
+ {
+ cum->gprs += 1;
+ }
+ else
+ {
+ int size = s390_function_arg_size (mode, type);
+ cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
+ }
+}
+
+
+
+/* Define where to put the arguments to a function. Value is zero to push
+ the argument on the stack, or a hard register in which to store the
+ argument. Gprs 2-6 and Fprs 0 and 2 are used as arguments.
+ All integral values go into register, until all are used up, the rest
+ goes onto stack. The same is valid for floating-point values. */
+
+rtx
+s390_function_arg (CUMULATIVE_ARGS * cum,
+ enum machine_mode mode, tree type, int named)
+{
+ if (s390_function_arg_pass_by_reference (mode, type))
+ return 0;
+
+ if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
+ {
+ if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
+ return 0;
+ else
+ return gen_rtx (REG, mode, cum->fprs + 16);
+ }
+ else
+ {
+ int size = s390_function_arg_size (mode, type);
+ int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
+
+ if (cum->gprs + n_gprs > 5)
+ return 0;
+ else
+ return gen_rtx (REG, mode, cum->gprs + 2);
+ }
+}
+
+
+/* Builtin va_list stuff
+ va_list is a structure of four elements:
+ __gpr: number of named args passed in general purpose register
+ __gpr: number of named args passed in floating purpose register
+ __overflow_arg_area: address of area, where arguments are passed
+ if they do not fit in gprs 2 to 6 and fpr 0 and 2
+ __reg_save_area: address, where register passed args are saved
+ in prologue. */
+
+tree
+s390_build_va_list ()
+{
+ tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
+
+ record = make_lang_type (RECORD_TYPE);
+
+ type_decl =
+ build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
+
+ f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
+ long_integer_type_node);
+ f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
+ long_integer_type_node);
+ f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
+ ptr_type_node);
+ f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
+ ptr_type_node);
+
+ DECL_FIELD_CONTEXT (f_gpr) = record;
+ DECL_FIELD_CONTEXT (f_fpr) = record;
+ DECL_FIELD_CONTEXT (f_ovf) = record;
+ DECL_FIELD_CONTEXT (f_sav) = record;
+
+ TREE_CHAIN (record) = type_decl;
+ TYPE_NAME (record) = type_decl;
+ TYPE_FIELDS (record) = f_gpr;
+ TREE_CHAIN (f_gpr) = f_fpr;
+ TREE_CHAIN (f_fpr) = f_ovf;
+ TREE_CHAIN (f_ovf) = f_sav;
+
+ layout_type (record);
+
+ /* The correct type is an array type of one element. */
+ return build_array_type (record, build_index_type (size_zero_node));
+}
+
+/* Builtin va_start
+ The va_list struct is set with the values.
+ gpr: compile time known got out of current_function_args_info
+ fpr: compile time known got out of current_function_args_info
+ overflow_arg_area: address passed with register 7 (incoming args register)
+ (setup in prologue)
+ reg_save_area: address of save area where first 5 gprs and 2 fprs sare
+ saved (saved in prologue). */
+
+void
+s390_va_start (int stdarg_p, tree valist, rtx nextarg)
+{
+ HOST_WIDE_INT n_gpr, n_fpr;
+ int off;
+ tree f_gpr, f_fpr, f_ovf, f_sav;
+ tree gpr, fpr, ovf, sav, t;
+
+ f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
+ f_fpr = TREE_CHAIN (f_gpr);
+ f_ovf = TREE_CHAIN (f_fpr);
+ f_sav = TREE_CHAIN (f_ovf);
+
+ valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
+ gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
+ fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
+ ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
+ sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
+
+ /* Count number of gp and fp argument registers used. */
+
+ n_gpr = current_function_args_info.gprs;
+ n_fpr = current_function_args_info.fprs;
+
+ t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ /* Find the overflow area. */
+ t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
+
+ off = INTVAL (current_function_arg_offset_rtx);
+ off = off < 0 ? 0 : off;
+ if (! stdarg_p)
+ off = off > 0 ? off - 4 : off;
+ if (TARGET_DEBUG_ARG)
+ fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
+ n_gpr, n_fpr, off);
+
+ t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
+
+ t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ /* Find the register save area. */
+ t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
+ t = build (PLUS_EXPR, TREE_TYPE (sav), t,
+ build_int_2 (-STACK_POINTER_OFFSET, -1));
+ t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+}
+
+
+/* Builtin va_arg.
+
+ Works like following:
+
+ if (integral value) {
+ if (size <= 4 && args.gpr < 5 ||
+ size > 4 && args.gpr < 4 )
+ ret = args.reg_save_area[args.gpr+8]
+ else
+ ret = *args.overflow_arg_area++;
+ } else if (float value) {
+ if (args.fgpr < 2)
+ ret = args.reg_save_area[args.fpr+64]
+ else
+ ret = *args.overflow_arg_area++;
+ } else if (aggregate value) {
+ if (args.gpr < 5)
+ ret = *args.reg_save_area[args.gpr]
+ else
+ ret = **args.overflow_arg_area++;
+ } */
+
+
+rtx
+s390_va_arg (tree valist, tree type)
+{
+ tree f_gpr, f_fpr, f_ovf, f_sav;
+ tree gpr, fpr, ovf, sav, reg, t, u;
+ int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
+ rtx lab_false, lab_over, addr_rtx, r;
+
+ f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
+ f_fpr = TREE_CHAIN (f_gpr);
+ f_ovf = TREE_CHAIN (f_fpr);
+ f_sav = TREE_CHAIN (f_ovf);
+
+ valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
+ gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
+ fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
+ ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
+ sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
+
+ size = int_size_in_bytes (type);
+
+ if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
+ {
+ if (TARGET_DEBUG_ARG)
+ {
+ fprintf (stderr, "va_arg: aggregate type");
+ debug_tree (type);
+ }
+
+ /* Aggregates are passed by reference. */
+ indirect_p = 1;
+ reg = gpr;
+ n_reg = 1;
+ sav_ofs = 8;
+ sav_scale = UNITS_PER_WORD;
+ size = UNITS_PER_WORD;
+ max_reg = 4;
+ }
+ else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
+ {
+ if (TARGET_DEBUG_ARG)
+ {
+ fprintf (stderr, "va_arg: float type");
+ debug_tree (type);
+ }
+
+ /* FP args go in FP registers, if present. */
+ indirect_p = 0;
+ reg = fpr;
+ n_reg = 1;
+ sav_ofs = 16 * UNITS_PER_WORD;;
+ sav_scale = 8;
+ /* TARGET_64BIT has up to 4 parameter in fprs */
+ max_reg = TARGET_64BIT ? 3 : 1;
+ }
+ else
+ {
+ if (TARGET_DEBUG_ARG)
+ {
+ fprintf (stderr, "va_arg: other type");
+ debug_tree (type);
+ }
+
+ /* Otherwise into GP registers. */
+ indirect_p = 0;
+ reg = gpr;
+ n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
+ sav_ofs = 2 * UNITS_PER_WORD;
+ if (TARGET_64BIT)
+ sav_ofs += TYPE_MODE (type) == SImode ? 4 :
+ TYPE_MODE (type) == HImode ? 6 :
+ TYPE_MODE (type) == QImode ? 7 : 0;
+ else
+ sav_ofs += TYPE_MODE (type) == HImode ? 2 :
+ TYPE_MODE (type) == QImode ? 3 : 0;
+
+ sav_scale = UNITS_PER_WORD;
+ if (n_reg > 1)
+ max_reg = 3;
+ else
+ max_reg = 4;
+ }
+
+ /* Pull the value out of the saved registers ... */
+
+ lab_false = gen_label_rtx ();
+ lab_over = gen_label_rtx ();
+ addr_rtx = gen_reg_rtx (Pmode);
+
+ emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
+ GEN_INT (max_reg),
+ GT, const1_rtx, Pmode, 0, 1, lab_false);
+
+ if (sav_ofs)
+ t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
+ else
+ t = sav;
+
+ u = build (MULT_EXPR, long_integer_type_node,
+ reg, build_int_2 (sav_scale, 0));
+ TREE_SIDE_EFFECTS (u) = 1;
+
+ t = build (PLUS_EXPR, ptr_type_node, t, u);
+ TREE_SIDE_EFFECTS (t) = 1;
+
+ r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
+ if (r != addr_rtx)
+ emit_move_insn (addr_rtx, r);
+
+
+ emit_jump_insn (gen_jump (lab_over));
+ emit_barrier ();
+ emit_label (lab_false);
+
+ /* ... Otherwise out of the overflow area. */
+
+ t = save_expr (ovf);
+
+
+ /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
+ if (size < UNITS_PER_WORD)
+ {
+ t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
+ t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ t = save_expr (ovf);
+ }
+
+ r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
+ if (r != addr_rtx)
+ emit_move_insn (addr_rtx, r);
+
+ t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
+ t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ emit_label (lab_over);
+
+ /* If less than max_regs a registers are retrieved out
+ of register save area, increment. */
+
+ u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
+ build_int_2 (n_reg, 0));
+ TREE_SIDE_EFFECTS (u) = 1;
+ expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ if (indirect_p)
+ {
+ r = gen_rtx_MEM (Pmode, addr_rtx);
+ MEM_ALIAS_SET (r) = get_varargs_alias_set ();
+ emit_move_insn (addr_rtx, r);
+ }
+
+
+ return addr_rtx;
+}
+
+/* Implementation of Trampoline
+ Gpr 1 is used as base register and for the jump
+ to the nested function.
+ Gpr 0 is static chain. */
+
+void
+s390_trampoline_template (FILE * file)
+{
+ if (TARGET_64BIT)
+ {
+ fprintf (file, "larl\t%s,0f\n", reg_names[1]);
+ fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
+ fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
+ fprintf (file, "br\t%s\n", reg_names[1]);
+ fprintf (file, "0:\t.quad\t0\n");
+ fprintf (file, ".quad\t0\n");
+ }
+ else
+ {
+ fprintf (file, "basr\t%s,0\n", reg_names[1]);
+ fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
+ fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
+ fprintf (file, "br\t%s\n", reg_names[1]);
+ fprintf (file, ".long\t0\n");
+ fprintf (file, ".long\t0\n");
+ }
+}
+
+void
+s390_initialize_trampoline (addr, fnaddr, cxt)
+ rtx addr;
+ rtx fnaddr;
+ rtx cxt;
+{
+ emit_move_insn (gen_rtx
+ (MEM, Pmode,
+ memory_address (Pmode,
+ plus_constant (addr,(TARGET_64BIT ? 20 : 12) ))), cxt);
+ emit_move_insn (gen_rtx
+ (MEM, Pmode,
+ memory_address (Pmode,
+ plus_constant (addr,(TARGET_64BIT ? 28 : 16) ))), fnaddr);
+}
--- /dev/null
+/* Definitions of target machine for GNU compiler, for IBM S/390
+ Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
+ Contributed by Hartmut Penner (hpenner@de.ibm.com) and
+ Ulrich Weigand (weigand@de.ibm.com).
+This file is part of GNU CC.
+
+GNU CC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2, or (at your option)
+any later version.
+
+GNU CC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GNU CC; see the file COPYING. If not, write to
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
+
+#ifndef _S390_H
+#define _S390_H
+
+#define TARGET_VERSION fprintf (stderr, " (S/390)");
+
+extern int flag_pic;
+
+/* Run-time compilation parameters selecting different hardware subsets. */
+
+extern int target_flags;
+
+/* Target macros checked at runtime of compiler. */
+
+#define TARGET_HARD_FLOAT (target_flags & 1)
+#define TARGET_BACKCHAIN (target_flags & 2)
+#define TARGET_SMALL_EXEC (target_flags & 4)
+#define TARGET_DEBUG_ARG (target_flags & 8)
+#define TARGET_64BIT (target_flags & 16)
+#define TARGET_MVCLE (target_flags & 32)
+
+#define TARGET_DEFAULT 0x3
+#define TARGET_SOFT_FLOAT (!(target_flags & 1))
+
+/* Macro to define tables used to set the flags. This is a list in braces
+ of pairs in braces, each pair being { "NAME", VALUE }
+ where VALUE is the bits to set or minus the bits to clear.
+ An empty string NAME is used to identify the default VALUE. */
+
+#define TARGET_SWITCHES \
+{ { "hard-float", 1,N_("Use hardware fp")}, \
+ { "soft-float", -1,N_("Don't use hardware fp")}, \
+ { "backchain", 2,N_("Set backchain")}, \
+ { "no-backchain", -2,N_("Don't set backchain (faster, but debug harder")}, \
+ { "small-exec", 4,N_("Use bras for execucable < 64k")}, \
+ { "no-small-exec",-4,N_("Don't use bras")}, \
+ { "debug_arg", 8,N_("Additional debug prints")}, \
+ { "no-debug_arg", -8,N_("Don't print additional debug prints")}, \
+ { "64", 16,N_("64 bit mode")}, \
+ { "31", -16,N_("31 bit mode")}, \
+ { "mvcle", 32,N_("mvcle use")}, \
+ { "no-mvcle", -32,N_("mvc&ex")}, \
+ { "", TARGET_DEFAULT, 0 } }
+
+/* Define this to change the optimizations performed by default. */
+#define OPTIMIZATION_OPTIONS(LEVEL,SIZE) optimization_options(LEVEL,SIZE)
+
+/* The current function count for create unique internal labels. */
+
+extern int s390_function_count;
+
+/* The amount of space used for outgoing arguments. */
+
+extern int current_function_outgoing_args_size;
+
+/* Target machine storage layout. */
+
+/* Define this if most significant bit is lowest numbered in instructions
+ that operate on numbered bit-fields. */
+
+#define BITS_BIG_ENDIAN 1
+
+/* Define this if most significant byte of a word is the lowest numbered. */
+
+#define BYTES_BIG_ENDIAN 1
+
+/* Define this if MS word of a multiword is the lowest numbered. */
+
+#define WORDS_BIG_ENDIAN 1
+
+/* Number of bits in an addressable storage unit. */
+
+#define BITS_PER_UNIT 8
+
+/* Width in bits of a "word", which is the contents of a machine register. */
+
+#define BITS_PER_WORD (TARGET_64BIT ? 64 : 32)
+#define MAX_BITS_PER_WORD 32
+
+/* Width of a word, in units (bytes). */
+
+#define UNITS_PER_WORD (TARGET_64BIT ? 8 : 4)
+#define MIN_UNITS_PER_WORD 4
+
+/* Width in bits of a pointer. See also the macro `Pmode' defined below. */
+
+#define POINTER_SIZE (TARGET_64BIT ? 64 : 32)
+
+/* A C expression for the size in bits of the type `short' on the
+ target machine. If you don't define this, the default is half a
+ word. (If this would be less than one storage unit, it is
+ rounded up to one unit.) */
+#define SHORT_TYPE_SIZE 16
+
+/* A C expression for the size in bits of the type `int' on the
+ target machine. If you don't define this, the default is one
+ word. */
+#define INT_TYPE_SIZE 32
+
+/* A C expression for the size in bits of the type `long' on the
+ target machine. If you don't define this, the default is one
+ word. */
+#define LONG_TYPE_SIZE (TARGET_64BIT ? 64 : 32)
+#define MAX_LONG_TYPE_SIZE 32
+
+/* A C expression for the size in bits of the type `long long' on the
+ target machine. If you don't define this, the default is two
+ words. */
+#define LONG_LONG_TYPE_SIZE 64
+
+/* Right now we only support two floating point formats, the
+ 32 and 64 bit ieee formats. */
+
+#define FLOAT_TYPE_SIZE 32
+#define DOUBLE_TYPE_SIZE 64
+#define LONG_DOUBLE_TYPE_SIZE 64
+
+/* Define this macro if it is advisable to hold scalars in registers
+ in a wider mode than that declared by the program. In such cases,
+ the value is constrained to be within the bounds of the declared
+ type, but kept valid in the wider mode. The signedness of the
+ extension may differ from that of the type. */
+
+#define PROMOTE_MODE(MODE, UNSIGNEDP, TYPE) \
+if (INTEGRAL_MODE_P (MODE) && \
+ GET_MODE_SIZE (MODE) < UNITS_PER_WORD) { \
+ (MODE) = Pmode; \
+ }
+
+/* Defining PROMOTE_FUNCTION_ARGS eliminates some unnecessary zero/sign
+ extensions applied to char/short functions arguments. Defining
+ PROMOTE_FUNCTION_RETURN does the same for function returns. */
+
+#define PROMOTE_FUNCTION_ARGS
+#define PROMOTE_FUNCTION_RETURN
+#define PROMOTE_FOR_CALL_ONLY
+
+/* Allocation boundary (in *bits*) for storing pointers in memory. */
+
+#define POINTER_BOUNDARY 32
+
+/* Allocation boundary (in *bits*) for storing arguments in argument list. */
+
+#define PARM_BOUNDARY (TARGET_64BIT ? 64 : 32)
+
+/* Boundary (in *bits*) on which stack pointer should be aligned. */
+
+#define STACK_BOUNDARY 64
+
+/* Allocation boundary (in *bits*) for the code of a function. */
+
+#define FUNCTION_BOUNDARY 32
+
+/* There is no point aligning anything to a rounder boundary than this. */
+
+#define BIGGEST_ALIGNMENT 64
+
+/* Alignment of field after `int : 0' in a structure. */
+
+#define EMPTY_FIELD_BOUNDARY 32
+
+/* Alignment on even adresses for LARL instruction. */
+
+#define CONSTANT_ALIGNMENT(EXP, ALIGN) (ALIGN) < 16 ? 16 : (ALIGN)
+
+#define DATA_ALIGNMENT(TYPE, ALIGN) (ALIGN) < 16 ? 16 : (ALIGN)
+
+/* Define this if move instructions will actually fail to work when given
+ unaligned data. */
+
+#define STRICT_ALIGNMENT 0
+
+/* real arithmetic */
+
+#define REAL_ARITHMETIC
+
+/* Define target floating point format. */
+
+#undef TARGET_FLOAT_FORMAT
+#ifdef IEEE_FLOAT
+#define TARGET_FLOAT_FORMAT IEEE_FLOAT_FORMAT
+#else
+#define TARGET_FLOAT_FORMAT IBM_FLOAT_FORMAT
+#endif
+
+/* Define if special allocation order desired. */
+
+#define REG_ALLOC_ORDER \
+{ 1, 2, 3, 4, 5, 0, 14, 13, 12, 11, 10, 9, 8, 7, 6, \
+ 16, 17, 18, 19, 20, 21, 22, 23, \
+ 24, 25, 26, 27, 28, 29, 30, 31, \
+ 15, 32, 33 }
+
+/* Standard register usage. */
+
+#define INT_REGNO_P(N) ( (N) >= 0 && (N) < 16 )
+#ifdef IEEE_FLOAT
+#define FLOAT_REGNO_P(N) ( (N) >= 16 && (N) < 32 )
+#else
+#define FLOAT_REGNO_P(N) ( (N) >= 16 && (N) < 20 )
+#endif
+#define CC_REGNO_P(N) ( (N) == 33 )
+
+/* Number of actual hardware registers. The hardware registers are
+ assigned numbers for the compiler from 0 to just below
+ FIRST_PSEUDO_REGISTER.
+ All registers that the compiler knows about must be given numbers,
+ even those that are not normally considered general registers.
+ For the 390, we give the data registers numbers 0-15,
+ and the floating point registers numbers 16-19.
+ G5 and following have 16 IEEE floating point register,
+ which get numbers 16-31. */
+
+#define FIRST_PSEUDO_REGISTER 34
+
+/* The following register have a fix usage
+ GPR 12: GOT register points to the GOT, setup in prologue,
+ GOT contains pointer to variables in shared libraries
+ GPR 13: Base register setup in prologue to point to the
+ literal table of each function
+ GPR 14: Return registers holds the return address
+ GPR 15: Stack pointer */
+
+#define PIC_OFFSET_TABLE_REGNUM 12
+#define BASE_REGISTER 13
+#define RETURN_REGNUM 14
+#define STACK_POINTER_REGNUM 15
+
+#define FIXED_REGISTERS \
+{ 0, 0, 0, 0, \
+ 0, 0, 0, 0, \
+ 0, 0, 0, 0, \
+ 0, 1, 1, 1, \
+ 0, 0, 0, 0, \
+ 0, 0, 0, 0, \
+ 0, 0, 0, 0, \
+ 0, 0, 0, 0, \
+ 1, 1 }
+
+/* 1 for registers not available across function calls. These must include
+ the FIXED_REGISTERS and also any registers that can be used without being
+ saved.
+ The latter must include the registers where values are returned
+ and the register where structure-value addresses are passed. */
+
+#define CALL_USED_REGISTERS \
+{ 1, 1, 1, 1, \
+ 1, 1, 0, 0, \
+ 0, 0, 0, 0, \
+ 0, 1, 1, 1, \
+ 1, 1, 0, 0, \
+ 1, 1, 1, 1, \
+ 1, 1, 1, 1, \
+ 1, 1, 1, 1, \
+ 1, 1 }
+
+/* If not pic code, gpr 12 can be used. */
+
+#define CONDITIONAL_REGISTER_USAGE \
+do \
+ { \
+ if (flag_pic) \
+ { \
+ fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1; \
+ call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; \
+ } \
+ } while (0)
+
+/* The following register have a special usage
+ GPR 11: Frame pointer if needed to point to automatic variables.
+ GPR 32: In functions with more the 5 args this register
+ points to that arguments, it is always eliminated
+ with stack- or frame-pointer.
+ GPR 33: Condition code 'register' */
+
+#define FRAME_POINTER_REGNUM 11
+
+#define ARG_POINTER_REGNUM 32
+
+#define CC_REGNUM 33
+
+/* We use the register %r0 to pass the static chain to a nested function.
+
+ Note: It is assumed that this register is call-clobbered!
+ We can't use any of the function-argument registers either,
+ and register 1 is needed by the trampoline code, so we have
+ no other choice but using this one ... */
+
+#define STATIC_CHAIN_REGNUM 0
+
+/* Return number of consecutive hard regs needed starting at reg REGNO
+ to hold something of mode MODE.
+ This is ordinarily the length in words of a value of mode MODE
+ but can be less for certain modes in special long registers. */
+
+#define HARD_REGNO_NREGS(REGNO, MODE) \
+ (FLOAT_REGNO_P(REGNO)? \
+ (GET_MODE_CLASS(MODE) == MODE_COMPLEX_FLOAT ? 2 : 1) : \
+ INT_REGNO_P(REGNO)? \
+ ((GET_MODE_SIZE(MODE)+UNITS_PER_WORD-1) / UNITS_PER_WORD) : \
+ 1)
+
+/* Value is 1 if hard register REGNO can hold a value of machine-mode MODE.
+ The gprs can hold QI, HI, SI, SF, DF, SC and DC.
+ Even gprs can hold DI.
+ The floating point registers can hold DF, SF, DC and SC. */
+
+#define HARD_REGNO_MODE_OK(REGNO, MODE) \
+ (FLOAT_REGNO_P(REGNO)? \
+ (GET_MODE_CLASS(MODE) == MODE_FLOAT || \
+ GET_MODE_CLASS(MODE) == MODE_COMPLEX_FLOAT) : \
+ INT_REGNO_P(REGNO)? \
+ (!((TARGET_64BIT && (MODE) == TImode) || \
+ (!TARGET_64BIT && (MODE) == DImode)) || ((REGNO) & 1) == 0 ) : \
+ CC_REGNO_P(REGNO)? \
+ GET_MODE_CLASS (MODE) == MODE_CC : \
+ 0)
+
+/* Value is 1 if it is a good idea to tie two pseudo registers when one has
+ mode MODE1 and one has mode MODE2.
+ If HARD_REGNO_MODE_OK could produce different values for MODE1 and MODE2,
+ for any hard reg, then this must be 0 for correct output. */
+
+#define MODES_TIEABLE_P(MODE1, MODE2) \
+ (((MODE1) == SFmode || (MODE1) == DFmode) \
+ == ((MODE2) == SFmode || (MODE2) == DFmode))
+
+
+/* Define this macro if references to a symbol must be treated
+ differently depending on something about the variable or
+ function named by the symbol (such as what section it is in).
+
+ On s390, if using PIC, mark a SYMBOL_REF for a non-global symbol
+ so that we may access it directly in the GOT. */
+
+#define ENCODE_SECTION_INFO(DECL) \
+do \
+ { \
+ if (flag_pic) \
+ { \
+ rtx rtl = (TREE_CODE_CLASS (TREE_CODE (DECL)) != 'd' \
+ ? TREE_CST_RTL (DECL) : DECL_RTL (DECL)); \
+ \
+ if (GET_CODE (rtl) == MEM) \
+ { \
+ SYMBOL_REF_FLAG (XEXP (rtl, 0)) \
+ = (TREE_CODE_CLASS (TREE_CODE (DECL)) != 'd' \
+ || ! TREE_PUBLIC (DECL)); \
+ } \
+ } \
+ } \
+while (0)
+
+
+/* This is an array of structures. Each structure initializes one pair
+ of eliminable registers. The "from" register number is given first,
+ followed by "to". Eliminations of the same "from" register are listed
+ in order of preference. */
+
+#define ELIMINABLE_REGS \
+{{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}, \
+ { ARG_POINTER_REGNUM, STACK_POINTER_REGNUM}, \
+ { ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM}}
+
+#define CAN_ELIMINATE(FROM, TO) (1)
+
+#define INITIAL_ELIMINATION_OFFSET(FROM, TO, OFFSET) \
+{ if ((FROM) == FRAME_POINTER_REGNUM && (TO) == STACK_POINTER_REGNUM) \
+ { (OFFSET) = 0; } \
+ else if ((FROM) == ARG_POINTER_REGNUM && (TO) == FRAME_POINTER_REGNUM) \
+ { (OFFSET) = s390_arg_frame_offset (); } \
+ else if ((FROM) == ARG_POINTER_REGNUM && (TO) == STACK_POINTER_REGNUM) \
+ { (OFFSET) = s390_arg_frame_offset (); } \
+}
+
+#define CAN_DEBUG_WITHOUT_FP
+
+/* Value should be nonzero if functions must have frame pointers.
+ Zero means the frame pointer need not be set up (and parms may be
+ accessed via the stack pointer) in functions that seem suitable.
+ This is computed in `reload', in reload1.c. */
+
+#define FRAME_POINTER_REQUIRED 0
+
+/* Define the classes of registers for register constraints in the
+ machine description. Also define ranges of constants.
+
+ One of the classes must always be named ALL_REGS and include all hard regs.
+ If there is more than one class, another class must be named NO_REGS
+ and contain no registers.
+
+ The name GENERAL_REGS must be the name of a class (or an alias for
+ another name such as ALL_REGS). This is the class of registers
+ that is allowed by "g" or "r" in a register constraint.
+ Also, registers outside this class are allocated only when
+ instructions express preferences for them.
+
+ The classes must be numbered in nondecreasing order; that is,
+ a larger-numbered class must never be contained completely
+ in a smaller-numbered class.
+
+ For any two classes, it is very desirable that there be another
+ class that represents their union. */
+
+/*#define SMALL_REGISTER_CLASSES 1*/
+
+enum reg_class
+{
+ NO_REGS, ADDR_REGS, GENERAL_REGS,
+ FP_REGS, CC_REGS, ALL_REGS, LIM_REG_CLASSES
+};
+
+#define N_REG_CLASSES (int) LIM_REG_CLASSES
+
+/* Give names of register classes as strings for dump file. */
+
+#define REG_CLASS_NAMES \
+{ "NO_REGS","ADDR_REGS", "GENERAL_REGS", "FP_REGS", "CC_REGS", "ALL_REGS" }
+
+/* Define which registers fit in which classes. This is an initializer for
+ a vector of HARD_REG_SET of length N_REG_CLASSES.
+ G5 and latter have 16 register and support IEEE floating point operations. */
+
+#define REG_CLASS_CONTENTS \
+{ \
+ { 0x00000000, 0x00000000 }, /* NO_REGS */ \
+ { 0x0000fffe, 0x00000001 }, /* ADDR_REGS */ \
+ { 0x0000ffff, 0x00000001 }, /* GENERAL_REGS */ \
+ { 0xffff0000, 0x00000000 }, /* FP_REGS */ \
+ { 0x00000000, 0x00000002 }, /* CC_REGS */ \
+ { 0xffffffff, 0x00000003 }, /* ALL_REGS */ \
+}
+
+
+/* The same information, inverted:
+ Return the class number of the smallest class containing
+ reg number REGNO. This could be a conditional expression
+ or could index an array. */
+
+#define REGNO_REG_CLASS(REGNO) (regclass_map[REGNO])
+
+extern enum reg_class regclass_map[]; /* smalled class containing REGNO */
+
+/* The class value for index registers, and the one for base regs. */
+
+#define INDEX_REG_CLASS ADDR_REGS
+#define BASE_REG_CLASS ADDR_REGS
+
+/* Get reg_class from a letter such as appears in the machine description. */
+
+#define REG_CLASS_FROM_LETTER(C) \
+ ((C) == 'a' ? ADDR_REGS : \
+ (C) == 'd' ? GENERAL_REGS : \
+ (C) == 'f' ? FP_REGS : NO_REGS)
+
+/* The letters I, J, K, L and M in a register constraint string can be used
+ to stand for particular ranges of immediate operands.
+ This macro defines what the ranges are.
+ C is the letter, and VALUE is a constant value.
+ Return 1 if VALUE is in the range specified by C. */
+
+#define CONST_OK_FOR_LETTER_P(VALUE, C) \
+ ((C) == 'I' ? (unsigned long) (VALUE) < 256 : \
+ (C) == 'J' ? (unsigned long) (VALUE) < 4096 : \
+ (C) == 'K' ? (VALUE) >= -32768 && (VALUE) < 32768 : \
+ (C) == 'L' ? (unsigned long) (VALUE) < 65536 : 0)
+
+/* Similar, but for floating constants, and defining letters G and H.
+ Here VALUE is the CONST_DOUBLE rtx itself. */
+
+#define CONST_DOUBLE_OK_FOR_LETTER_P(VALUE, C) 1
+
+/* 'Q' means a memory-reference for a S-type operand. */
+
+#define EXTRA_CONSTRAINT(OP, C) \
+ ((C) == 'Q' ? s_operand (OP, GET_MODE (OP)) : \
+ (C) == 'S' ? larl_operand (OP, GET_MODE (OP)) : 0)
+
+/* Given an rtx X being reloaded into a reg required to be in class CLASS,
+ return the class of reg to actually use. In general this is just CLASS;
+ but on some machines in some cases it is preferable to use a more
+ restrictive class. */
+
+#define PREFERRED_RELOAD_CLASS(X, CLASS) \
+ (GET_CODE (X) == CONST_DOUBLE ? \
+ (GET_MODE_CLASS (GET_MODE (X)) == MODE_FLOAT ? FP_REGS : ADDR_REGS) :\
+ (GET_CODE (X) == CONST_INT ? \
+ (GET_MODE_CLASS (GET_MODE (X)) == MODE_FLOAT ? FP_REGS : ADDR_REGS) :\
+ GET_CODE (X) == PLUS || \
+ GET_CODE (X) == LABEL_REF || \
+ GET_CODE (X) == SYMBOL_REF || \
+ GET_CODE (X) == CONST ? ADDR_REGS : (CLASS)))
+
+/* Return the maximum number of consecutive registers needed to represent
+ mode MODE in a register of class CLASS. */
+
+#define CLASS_MAX_NREGS(CLASS, MODE) \
+ ((CLASS) == FP_REGS ? \
+ (GET_MODE_CLASS (MODE) == MODE_COMPLEX_FLOAT ? 2 : 1) : \
+ (GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
+
+/* If we are copying between FP registers and anything else, we need a memory
+ location. */
+
+#define SECONDARY_MEMORY_NEEDED(CLASS1, CLASS2, MODE) \
+ ((CLASS1) != (CLASS2) && ((CLASS1) == FP_REGS || (CLASS2) == FP_REGS))
+
+/* Get_secondary_mem widens its argument to BITS_PER_WORD which loses on 64bit
+ because the movsi and movsf patterns don't handle r/f moves. */
+
+#define SECONDARY_MEMORY_NEEDED_MODE(MODE) \
+ (GET_MODE_BITSIZE (MODE) < 32 \
+ ? mode_for_size (32, GET_MODE_CLASS (MODE), 0) \
+ : MODE)
+
+
+/* A C expression whose value is nonzero if pseudos that have been
+ assigned to registers of class CLASS would likely be spilled
+ because registers of CLASS are needed for spill registers.
+
+ The default value of this macro returns 1 if CLASS has exactly one
+ register and zero otherwise. On most machines, this default
+ should be used. Only define this macro to some other expression
+ if pseudo allocated by `local-alloc.c' end up in memory because
+ their hard registers were needed for spill registers. If this
+ macro returns nonzero for those classes, those pseudos will only
+ be allocated by `global.c', which knows how to reallocate the
+ pseudo to another register. If there would not be another
+ register available for reallocation, you should not change the
+ definition of this macro since the only effect of such a
+ definition would be to slow down register allocation. */
+
+/* Stack layout; function entry, exit and calling. */
+
+/* The current return address is on Offset 56 of the current frame
+ if we are in an leaf_function. Otherwise we have to go one stack
+ back.
+ The return address of anything farther back is accessed normally
+ at an offset of 56 from the frame pointer.
+
+ FIXME: builtin_return_addr does not work correctly in a leaf
+ function, we need to find way to find out, if we
+ are in a leaf function
+ */
+
+#define _RETURN_ADDR_OFFSET (TARGET_64BIT ? 112 : 56)
+
+#define RETURN_ADDR_RTX(count, frame) \
+ gen_rtx (MEM, Pmode, \
+ memory_address (Pmode, \
+ plus_constant ( \
+ copy_to_reg (gen_rtx (MEM, Pmode, \
+ memory_address (Pmode, frame))), \
+ _RETURN_ADDR_OFFSET)));
+
+/* The following macros will turn on dwarf2 exception hndling
+ Other code location for this exception handling are
+ in s390.md (eh_return insn) and in linux.c in the prologue. */
+
+#define INCOMING_RETURN_ADDR_RTX gen_rtx_REG (Pmode, RETURN_REGNUM)
+
+/* We have 31 bit mode. */
+
+#define MASK_RETURN_ADDR (GEN_INT (0x7fffffff))
+
+/* Location, from where return address to load. */
+
+#define DWARF_FRAME_RETURN_COLUMN 14
+
+/* Describe how we implement __builtin_eh_return. */
+#define EH_RETURN_DATA_REGNO(N) ((N) < 4 ? (N) + 6 : INVALID_REGNUM)
+#define EH_RETURN_STACKADJ_RTX gen_rtx_REG (Pmode, 10)
+#define EH_RETURN_HANDLER_RTX \
+ gen_rtx_MEM (Pmode, plus_constant (arg_pointer_rtx, -40))
+
+/* Define this if pushing a word on the stack makes the stack pointer a
+ smaller address. */
+
+#define STACK_GROWS_DOWNWARD
+
+/* Define this if the nominal address of the stack frame is at the
+ high-address end of the local variables; that is, each additional local
+ variable allocated goes at a more negative offset in the frame. */
+
+/* #define FRAME_GROWS_DOWNWARD */
+
+/* Offset from stack-pointer to first location of outgoing args. */
+
+#define STACK_POINTER_OFFSET (TARGET_64BIT ? 160 : 96)
+
+/* Offset within stack frame to start allocating local variables at.
+ If FRAME_GROWS_DOWNWARD, this is the offset to the END of the
+ first local allocated. Otherwise, it is the offset to the BEGINNING
+ of the first local allocated. */
+
+#define STARTING_FRAME_OFFSET \
+ (STACK_POINTER_OFFSET + current_function_outgoing_args_size)
+
+#define INITIAL_FRAME_POINTER_OFFSET(DEPTH) (DEPTH) = 0
+
+/* If we generate an insn to push BYTES bytes, this says how many the stack
+ pointer really advances by. On S/390, we have no push instruction. */
+
+/* #define PUSH_ROUNDING(BYTES) */
+
+/* Accumulate the outgoing argument count so we can request the right
+ DSA size and determine stack offset. */
+
+#define ACCUMULATE_OUTGOING_ARGS 1
+
+/* Offset from the stack pointer register to an item dynamically
+ allocated on the stack, e.g., by `alloca'.
+
+ The default value for this macro is `STACK_POINTER_OFFSET' plus the
+ length of the outgoing arguments. The default is correct for most
+ machines. See `function.c' for details. */
+#define STACK_DYNAMIC_OFFSET(FUNDECL) (STARTING_FRAME_OFFSET)
+
+/* Offset of first parameter from the argument pointer register value.
+ On the S/390, we define the argument pointer to the start of the fixed
+ area. */
+#define FIRST_PARM_OFFSET(FNDECL) 0
+
+/* Define this if stack space is still allocated for a parameter passed
+ in a register. The value is the number of bytes allocated to this
+ area. */
+/* #define REG_PARM_STACK_SPACE(FNDECL) 32 */
+
+/* Define this if the above stack space is to be considered part of the
+ space allocated by the caller. */
+/* #define OUTGOING_REG_PARM_STACK_SPACE */
+
+/* 1 if N is a possible register number for function argument passing.
+ On S390, general registers 2 - 6 and floating point register 0 and 2
+ are used in this way. */
+
+#define FUNCTION_ARG_REGNO_P(N) (((N) >=2 && (N) <7) || \
+ (N) == 16 || (N) == 17)
+
+/* Define a data type for recording info about an argument list during
+ the scan of that argument list. This data type should hold all
+ necessary information about the function itself and about the args
+ processed so far, enough to enable macros such as FUNCTION_ARG to
+ determine where the next arg should go. */
+
+typedef struct s390_arg_structure
+{
+ int gprs; /* gpr so far */
+ int fprs; /* fpr so far */
+}
+CUMULATIVE_ARGS;
+
+
+/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to
+ a function whose data type is FNTYPE.
+ For a library call, FNTYPE is 0. */
+
+#define INIT_CUMULATIVE_ARGS(CUM, FNTYPE, LIBNAME, NN) \
+ ((CUM).gprs=0, (CUM).fprs=0)
+
+/* Update the data in CUM to advance over an argument of mode MODE and
+ data type TYPE. (TYPE is null for libcalls where that information
+ may not be available.) */
+
+#define FUNCTION_ARG_ADVANCE(CUM, MODE, TYPE, NAMED) \
+ s390_function_arg_advance(&CUM, MODE, TYPE, NAMED)
+
+/* Define where to put the arguments to a function. Value is zero to push
+ the argument on the stack, or a hard register in which to store the
+ argument. */
+
+#define FUNCTION_ARG(CUM, MODE, TYPE, NAMED) \
+ s390_function_arg(&CUM, MODE, TYPE, NAMED)
+
+/* Define where to expect the arguments of a function. Value is zero, if
+ the argument is on the stack, or a hard register in which the argument
+ is stored. It is the same like FUNCTION_ARG, except for unnamed args
+ That means, that all in case of varargs used, the arguments are expected
+ from the stack.
+ S/390 has already space on the stack for args coming in registers,
+ they are pushed in prologue, if needed. */
+
+
+/* Define the `__builtin_va_list' type. */
+
+#define BUILD_VA_LIST_TYPE(VALIST) \
+ (VALIST) = s390_build_va_list ()
+
+/* Implement `va_start' for varargs and stdarg. */
+
+#define EXPAND_BUILTIN_VA_START(stdarg, valist, nextarg) \
+ s390_va_start (stdarg, valist, nextarg)
+
+/* Implement `va_arg'. */
+
+#define EXPAND_BUILTIN_VA_ARG(valist, type) \
+ s390_va_arg (valist, type)
+
+/* For an arg passed partly in registers and partly in memory, this is the
+ number of registers used. For args passed entirely in registers or
+ entirely in memory, zero. */
+
+#define FUNCTION_ARG_PARTIAL_NREGS(CUM, MODE, TYPE, NAMED) 0
+
+
+/* Define if returning from a function call automatically pops the
+ arguments described by the number-of-args field in the call. */
+
+#define RETURN_POPS_ARGS(FUNDECL, FUNTYPE, SIZE) 0
+
+
+/* Define how to find the value returned by a function. VALTYPE is the
+ data type of the value (as a tree).
+ If the precise function being called is known, FUNC is its FUNCTION_DECL;
+ otherwise, FUNC is 15. */
+
+#define RET_REG(MODE) ((GET_MODE_CLASS (MODE) == MODE_INT \
+ || TARGET_SOFT_FLOAT ) ? 2 : 16)
+
+
+/* for structs the address is passed, and the Callee makes a
+ copy, only if needed */
+
+#define FUNCTION_ARG_PASS_BY_REFERENCE(CUM, MODE, TYPE, NAMED) \
+ s390_function_arg_pass_by_reference (MODE, TYPE)
+
+
+/* Register 2 (and 3) for integral values
+ or floating point register 0 (and 2) for fp values are used. */
+
+#define FUNCTION_VALUE(VALTYPE, FUNC) \
+ gen_rtx_REG ((INTEGRAL_TYPE_P (VALTYPE) \
+ && TYPE_PRECISION (VALTYPE) < BITS_PER_WORD) \
+ || POINTER_TYPE_P (VALTYPE) \
+ ? word_mode : TYPE_MODE (VALTYPE), \
+ TREE_CODE (VALTYPE) == REAL_TYPE && TARGET_HARD_FLOAT ? 16 : 2)
+
+/* Define how to find the value returned by a library function assuming
+ the value has mode MODE. */
+
+#define LIBCALL_VALUE(MODE) gen_rtx (REG, MODE, RET_REG (MODE))
+
+/* 1 if N is a possible register number for a function value. */
+
+#define FUNCTION_VALUE_REGNO_P(N) ((N) == 2 || (N) == 16)
+
+/* The definition of this macro implies that there are cases where
+ a scalar value cannot be returned in registers. */
+
+#define RETURN_IN_MEMORY(type) \
+ (TYPE_MODE (type) == BLKmode || \
+ TYPE_MODE (type) == DCmode || \
+ TYPE_MODE (type) == SCmode)
+
+/* Mode of stack savearea.
+ FUNCTION is VOIDmode because calling convention maintains SP.
+ BLOCK needs Pmode for SP.
+ NONLOCAL needs twice Pmode to maintain both backchain and SP. */
+
+#define STACK_SAVEAREA_MODE(LEVEL) \
+ (LEVEL == SAVE_FUNCTION ? VOIDmode \
+ : LEVEL == SAVE_NONLOCAL ? (TARGET_64BIT ? TImode : DImode) : Pmode)
+
+/* Structure value address is passed as invisible first argument (gpr 2). */
+
+#define STRUCT_VALUE 0
+
+/* This macro definition sets up a default value for `main' to return. */
+
+#define DEFAULT_MAIN_RETURN c_expand_return (integer_zero_node)
+
+/* Length in units of the trampoline for entering a nested function. */
+
+#define TRAMPOLINE_SIZE (TARGET_64BIT ? 36 : 20)
+
+/* Initialize the dynamic part of trampoline. */
+
+#define INITIALIZE_TRAMPOLINE(ADDR, FNADDR, CXT) \
+ s390_initialize_trampoline ((ADDR), (FNADDR), (CXT))
+
+/* Template for constant part of trampoline. */
+
+#define TRAMPOLINE_TEMPLATE(FILE) \
+ s390_trampoline_template (FILE)
+
+/* Output assembler code to FILE to increment profiler label # LABELNO
+ for profiling a function entry. */
+
+#define FUNCTION_PROFILER(FILE, LABELNO) \
+do { \
+ extern rtx s390_profile[]; \
+ extern s390_pool_count; \
+ rtx tmp; \
+ static char label[128]; \
+ fprintf (FILE, "# function profiler \n"); \
+ if (TARGET_64BIT) \
+ { \
+ rtx tmp[1]; \
+ output_asm_insn ("stg\t14,8(15)", tmp); \
+ sprintf (label, "%sP%d", LPREFIX, LABELNO); \
+ tmp[0] = gen_rtx_SYMBOL_REF (Pmode, label); \
+ SYMBOL_REF_FLAG (tmp[0]) = 1; \
+ output_asm_insn ("larl\t1,%0", tmp); \
+ tmp[0] = gen_rtx_SYMBOL_REF (Pmode, "_mcount"); \
+ if (flag_pic) \
+ { \
+ tmp[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, tmp[0]), 113); \
+ tmp[0] = gen_rtx_CONST (Pmode, tmp[0]); \
+ } \
+ output_asm_insn ("brasl\t14,%0", tmp); \
+ output_asm_insn ("lg\t14,8(15)", tmp); \
+ } \
+ else \
+ { \
+ output_asm_insn ("l 14,4(15)", s390_profile); \
+ s390_pool_count = 0; \
+ output_asm_insn ("st 14,4(15)", s390_profile); \
+ output_asm_insn ("l 14,%4", s390_profile); \
+ output_asm_insn ("l 1,%9", s390_profile); \
+ if (flag_pic) \
+ { \
+ output_asm_insn ("ar 1,13", s390_profile); \
+ output_asm_insn ("bas 14,0(14,13)", s390_profile); \
+ } \
+ else \
+ { \
+ output_asm_insn ("basr 14,14", s390_profile); \
+ } \
+ output_asm_insn ("l 14,4(15)", s390_profile); \
+ } \
+} while (0)
+
+/* #define PROFILE_BEFORE_PROLOGUE */
+
+/* There are three profiling modes for basic blocks available.
+ The modes are selected at compile time by using the options
+ -a or -ax of the gnu compiler.
+ The variable `profile_block_flag' will be set according to the
+ selected option.
+
+ profile_block_flag == 0, no option used:
+
+ No profiling done.
+
+ profile_block_flag == 1, -a option used.
+
+ Count frequency of execution of every basic block.
+
+ profile_block_flag == 2, -ax option used.
+
+ Generate code to allow several different profiling modes at run time.
+ Available modes are:
+ Produce a trace of all basic blocks.
+ Count frequency of jump instructions executed.
+ In every mode it is possible to start profiling upon entering
+ certain functions and to disable profiling of some other functions.
+
+ The result of basic-block profiling will be written to a file `bb.out'.
+ If the -ax option is used parameters for the profiling will be read
+ from file `bb.in'.
+
+*/
+
+/* The following macro shall output assembler code to FILE
+ to initialize basic-block profiling.
+
+ If profile_block_flag == 2
+
+ Output code to call the subroutine `__bb_init_trace_func'
+ and pass two parameters to it. The first parameter is
+ the address of a block allocated in the object module.
+ The second parameter is the number of the first basic block
+ of the function.
+
+ The name of the block is a local symbol made with this statement:
+
+ ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 0);
+
+ Of course, since you are writing the definition of
+ `ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
+ can take a short cut in the definition of this macro and use the
+ name that you know will result.
+
+ The number of the first basic block of the function is
+ passed to the macro in BLOCK_OR_LABEL.
+
+ If described in a virtual assembler language the code to be
+ output looks like:
+
+ parameter1 <- LPBX0
+ parameter2 <- BLOCK_OR_LABEL
+ call __bb_init_trace_func
+
+ else if profile_block_flag != 0
+
+ Output code to call the subroutine `__bb_init_func'
+ and pass one single parameter to it, which is the same
+ as the first parameter to `__bb_init_trace_func'.
+
+ The first word of this parameter is a flag which will be nonzero if
+ the object module has already been initialized. So test this word
+ first, and do not call `__bb_init_func' if the flag is nonzero.
+ Note: When profile_block_flag == 2 the test need not be done
+ but `__bb_init_trace_func' *must* be called.
+
+ BLOCK_OR_LABEL may be used to generate a label number as a
+ branch destination in case `__bb_init_func' will not be called.
+
+ If described in a virtual assembler language the code to be
+ output looks like:
+
+ cmp (LPBX0),0
+ jne local_label
+ parameter1 <- LPBX0
+ call __bb_init_func
+local_label:
+
+*/
+
+#undef FUNCTION_BLOCK_PROFILER
+#define FUNCTION_BLOCK_PROFILER(FILE, BLOCK_OR_LABEL) \
+do \
+ { \
+ if (TARGET_64BIT) \
+ { \
+ rtx tmp[1]; \
+ fprintf (FILE, "# function block profiler %d \n", profile_block_flag); \
+ output_asm_insn ("ipm 0", tmp); \
+ output_asm_insn ("aghi 15,-224", tmp); \
+ output_asm_insn ("stmg 14,5,160(15)", tmp); \
+ output_asm_insn ("larl 2,.LPBX0", tmp); \
+ switch (profile_block_flag) \
+ { \
+ case 2: \
+ if (BLOCK_OR_LABEL < 0x10000) { \
+ tmp[0] = gen_rtx_CONST_INT (Pmode, (BLOCK_OR_LABEL)); \
+ output_asm_insn ("llill 3,%x0", tmp); \
+ } else { \
+ int bo = BLOCK_OR_LABEL; \
+ tmp[0] = gen_rtx_CONST_INT (Pmode, bo&0x7fff); \
+ output_asm_insn ("llill 3,%x0", tmp); \
+ tmp[0] = gen_rtx_CONST_INT (Pmode, (bo&0xffff0000)>>16); \
+ output_asm_insn ("iilh 3,%x0", tmp); \
+ } \
+ tmp[0] = gen_rtx_SYMBOL_REF (Pmode, "__bb_init_trace_func"); \
+ if (flag_pic) \
+ { \
+ tmp[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, tmp[0]), 113); \
+ tmp[0] = gen_rtx_CONST (Pmode, tmp[0]); \
+ } \
+ output_asm_insn ("brasl\t14,%0", tmp); \
+ break; \
+ default: \
+ output_asm_insn ("cli 7(2),0", tmp); \
+ output_asm_insn ("jne 2f", tmp); \
+ tmp[0] = gen_rtx_SYMBOL_REF (Pmode, "__bb_init_func"); \
+ if (flag_pic) \
+ { \
+ tmp[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, tmp[0]), 113); \
+ tmp[0] = gen_rtx_CONST (Pmode, tmp[0]); \
+ } \
+ output_asm_insn ("brasl\t14,%0", tmp); \
+ break; \
+ } \
+ output_asm_insn ("2:", tmp); \
+ output_asm_insn ("lmg 14,5,160(15)", tmp); \
+ output_asm_insn ("aghi 15,224", tmp); \
+ output_asm_insn ("spm 0", tmp); \
+ } \
+ else \
+ { \
+ extern rtx s390_profile[]; \
+ fprintf (FILE, "# function block profiler %d \n", profile_block_flag); \
+ output_asm_insn ("ipm 0", s390_profile); \
+ output_asm_insn ("ahi 15,-128", s390_profile); \
+ output_asm_insn ("stm 14,5,96(15)", s390_profile); \
+ output_asm_insn ("l 2,%6", s390_profile); \
+ if (flag_pic) \
+ output_asm_insn ("ar 2,13", s390_profile); \
+ switch (profile_block_flag) \
+ { \
+ case 2: \
+ output_asm_insn ("l 4,%1", s390_profile); \
+ if (BLOCK_OR_LABEL < 0x8000) { \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, (BLOCK_OR_LABEL)); \
+ output_asm_insn ("lhi 3,%8", s390_profile); \
+ } else { \
+ int bo = BLOCK_OR_LABEL; \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, (bo&0xffff8000)>>15); \
+ output_asm_insn ("lhi 3,%8", s390_profile); \
+ output_asm_insn ("sll 3,15", s390_profile); \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, bo&0x7fff); \
+ output_asm_insn ("ahi 3,%8", s390_profile); \
+ } \
+ break; \
+ default: \
+ output_asm_insn ("l 4,%0", s390_profile); \
+ output_asm_insn ("cli 3(2),0", s390_profile); \
+ output_asm_insn ("jne 2f", s390_profile); \
+ break; \
+ } \
+ if (flag_pic) \
+ output_asm_insn ("bas 14,0(4,13)", s390_profile); \
+ else \
+ output_asm_insn ("basr 14,4", s390_profile); \
+ output_asm_insn ("2:", s390_profile); \
+ output_asm_insn ("lm 14,5,96(15)", s390_profile); \
+ output_asm_insn ("ahi 15,128", s390_profile); \
+ output_asm_insn ("spm 0", s390_profile); \
+ } \
+ } while (0)
+
+/* The following macro shall output assembler code to FILE
+ to increment a counter associated with basic block number BLOCKNO.
+
+ If profile_block_flag == 2
+
+ Output code to initialize the global structure `__bb' and
+ call the function `__bb_trace_func' which will increment the
+ counter.
+
+ `__bb' consists of two words. In the first word the number
+ of the basic block has to be stored. In the second word
+ the address of a block allocated in the object module
+ has to be stored.
+
+ The basic block number is given by BLOCKNO.
+
+ The address of the block is given by the label created with
+
+ ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 0);
+
+ by FUNCTION_BLOCK_PROFILER.
+
+ Of course, since you are writing the definition of
+ `ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
+ can take a short cut in the definition of this macro and use the
+ name that you know will result.
+
+ If described in a virtual assembler language the code to be
+ output looks like:
+
+ move BLOCKNO -> (__bb)
+ move LPBX0 -> (__bb+4)
+ call __bb_trace_func
+
+ Note that function `__bb_trace_func' must not change the
+ machine state, especially the flag register. To grant
+ this, you must output code to save and restore registers
+ either in this macro or in the macros MACHINE_STATE_SAVE
+ and MACHINE_STATE_RESTORE. The last two macros will be
+ used in the function `__bb_trace_func', so you must make
+ sure that the function prologue does not change any
+ register prior to saving it with MACHINE_STATE_SAVE.
+
+ else if profile_block_flag != 0
+
+ Output code to increment the counter directly.
+ Basic blocks are numbered separately from zero within each
+ compiled object module. The count associated with block number
+ BLOCKNO is at index BLOCKNO in an array of words; the name of
+ this array is a local symbol made with this statement:
+
+ ASM_GENERATE_INTERNAL_LABEL (BUFFER, "LPBX", 2);
+
+ Of course, since you are writing the definition of
+ `ASM_GENERATE_INTERNAL_LABEL' as well as that of this macro, you
+ can take a short cut in the definition of this macro and use the
+ name that you know will result.
+
+ If described in a virtual assembler language the code to be
+ output looks like:
+
+ inc (LPBX2+4*BLOCKNO)
+
+*/
+
+#define BLOCK_PROFILER(FILE, BLOCKNO) \
+do \
+ { \
+ if (TARGET_64BIT) \
+ { \
+ rtx tmp[1]; \
+ fprintf (FILE, "# block profiler %d block %d \n", \
+ profile_block_flag,BLOCKNO); \
+ output_asm_insn ("ipm 14", tmp); \
+ output_asm_insn ("aghi 15,-224", tmp); \
+ output_asm_insn ("stmg 14,5,160(15)", tmp); \
+ output_asm_insn ("larl 2,_bb", tmp); \
+ if ((BLOCKNO*8) < 0x10000) { \
+ tmp[0] = gen_rtx_CONST_INT (Pmode, (BLOCKNO*8)); \
+ output_asm_insn ("llill 3,%x0", tmp); \
+ } else { \
+ int bo = BLOCKNO*8; \
+ tmp[0] = gen_rtx_CONST_INT (Pmode, bo&0xffff); \
+ output_asm_insn ("llill 3,%x0", tmp); \
+ tmp[0] = gen_rtx_CONST_INT (Pmode, (bo&0xffff0000)>>16); \
+ output_asm_insn ("iilh 3,%x0", tmp); \
+ } \
+ switch (profile_block_flag) \
+ { \
+ case 2: \
+ output_asm_insn ("stg 3,0(2)", tmp); \
+ output_asm_insn ("larl 3,.LPBX0", tmp); \
+ output_asm_insn ("stg 3,0(2)", tmp); \
+ tmp[0] = gen_rtx_SYMBOL_REF (Pmode, "__bb_trace_func"); \
+ if (flag_pic) \
+ { \
+ tmp[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, tmp[0]), 113); \
+ tmp[0] = gen_rtx_CONST (Pmode, tmp[0]); \
+ } \
+ output_asm_insn ("brasl\t14,%0", tmp); \
+ break; \
+ default: \
+ output_asm_insn ("larl 2,.LPBX2", tmp); \
+ output_asm_insn ("la 2,0(2,3)", tmp); \
+ output_asm_insn ("lg 3,0(2)", tmp); \
+ output_asm_insn ("aghi 3,1", tmp); \
+ output_asm_insn ("stg 3,0(2)", tmp); \
+ break; \
+ } \
+ output_asm_insn ("lmg 14,5,160(15)", tmp); \
+ output_asm_insn ("ahi 15,224", tmp); \
+ output_asm_insn ("spm 14", tmp); \
+ } \
+ else \
+ { \
+ extern rtx s390_profile[]; \
+ fprintf (FILE, "# block profiler %d block %d \n", \
+ profile_block_flag,BLOCKNO); \
+ output_asm_insn ("ipm 14", s390_profile); \
+ output_asm_insn ("ahi 15,-128", s390_profile); \
+ output_asm_insn ("stm 14,5,96(15)", s390_profile); \
+ switch (profile_block_flag) \
+ { \
+ case 2: \
+ output_asm_insn ("l 4,%2", s390_profile); \
+ output_asm_insn ("l 2,%5", s390_profile); \
+ if (flag_pic) \
+ output_asm_insn ("ar 2,13", s390_profile); \
+ if (BLOCKNO < 0x8000) { \
+ s390_profile[7] = gen_rtx_CONST_INT (Pmode, (BLOCKNO)*4); \
+ output_asm_insn ("lhi 3,%8", s390_profile); \
+ } else { \
+ int bo = BLOCKNO; \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, (bo&0xffff8000)>>15); \
+ output_asm_insn ("lhi 3,%8", s390_profile); \
+ output_asm_insn ("sll 3,15", s390_profile); \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, bo&0x7fff); \
+ output_asm_insn ("ahi 3,%7", s390_profile); \
+ } \
+ output_asm_insn ("st 3,0(2)", s390_profile); \
+ output_asm_insn ("mvc 0(4,2),%5", s390_profile); \
+ if (flag_pic) \
+ output_asm_insn ("bas 14,0(4,13)", s390_profile); \
+ else \
+ output_asm_insn ("basr 14,4", s390_profile); \
+ break; \
+ default: \
+ if (BLOCKNO < 0x2000) { \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, (BLOCKNO)*4); \
+ output_asm_insn ("lhi 2,%8", s390_profile); \
+ } else { \
+ int bo = BLOCKNO*4; \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, (bo&0xffff8000)>>15); \
+ output_asm_insn ("lhi 2,%8", s390_profile); \
+ output_asm_insn ("sll 2,15", s390_profile); \
+ s390_profile[8] = gen_rtx_CONST_INT (Pmode, bo&0x7fff); \
+ output_asm_insn ("ahi 2,%8", s390_profile); \
+ } \
+ output_asm_insn ("a 2,%7", s390_profile); \
+ if (flag_pic) \
+ output_asm_insn ("l 3,0(2,13)", s390_profile); \
+ else \
+ output_asm_insn ("l 3,0(2)", s390_profile); \
+ output_asm_insn ("ahi 3,1", s390_profile); \
+ if (flag_pic) \
+ output_asm_insn ("st 3,0(2,13)", s390_profile); \
+ else \
+ output_asm_insn ("st 3,0(2)", s390_profile); \
+ break; \
+ } \
+ output_asm_insn ("lm 14,5,96(15)", s390_profile); \
+ output_asm_insn ("ahi 15,128", s390_profile); \
+ output_asm_insn ("spm 14", s390_profile); \
+ } \
+ } while (0)
+
+
+/* The following macro shall output assembler code to FILE
+ to indicate a return from function during basic-block profiling.
+
+ If profiling_block_flag == 2:
+
+ Output assembler code to call function `__bb_trace_ret'.
+
+ Note that function `__bb_trace_ret' must not change the
+ machine state, especially the flag register. To grant
+ this, you must output code to save and restore registers
+ either in this macro or in the macros MACHINE_STATE_SAVE_RET
+ and MACHINE_STATE_RESTORE_RET. The last two macros will be
+ used in the function `__bb_trace_ret', so you must make
+ sure that the function prologue does not change any
+ register prior to saving it with MACHINE_STATE_SAVE_RET.
+
+ else if profiling_block_flag != 0:
+
+ The macro will not be used, so it need not distinguish
+ these cases.
+*/
+
+#define FUNCTION_BLOCK_PROFILER_EXIT(FILE) \
+do { \
+ if (TARGET_64BIT) \
+ { \
+ rtx tmp[1]; \
+ fprintf (FILE, "# block profiler exit \n"); \
+ output_asm_insn ("ipm 14", tmp); \
+ output_asm_insn ("aghi 15,-224", tmp); \
+ output_asm_insn ("stmg 14,5,160(15)", tmp); \
+ tmp[0] = gen_rtx_SYMBOL_REF (Pmode, "__bb_trace_ret"); \
+ if (flag_pic) \
+ { \
+ tmp[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, tmp[0]), 113); \
+ tmp[0] = gen_rtx_CONST (Pmode, tmp[0]); \
+ } \
+ output_asm_insn ("brasl 14,%0", tmp); \
+ output_asm_insn ("lmg 14,5,160(15)", tmp); \
+ output_asm_insn ("aghi 15,224", tmp); \
+ output_asm_insn ("spm 14", tmp); \
+ } \
+ else \
+ { \
+ extern rtx s390_profile[]; \
+ fprintf (FILE, "# block profiler exit \n"); \
+ output_asm_insn ("ipm 14", s390_profile); \
+ output_asm_insn ("ahi 15,-128", s390_profile); \
+ output_asm_insn ("stm 14,5,96(15)", s390_profile); \
+ output_asm_insn ("l 4,%3", s390_profile); \
+ if (flag_pic) \
+ output_asm_insn ("bas 14,0(4,13)", s390_profile); \
+ else \
+ output_asm_insn ("basr 14,4", s390_profile); \
+ output_asm_insn ("lm 14,5,96(15)", s390_profile); \
+ output_asm_insn ("ahi 15,128", s390_profile); \
+ output_asm_insn ("spm 14", s390_profile); \
+ } \
+ } while (0)
+
+/* The function `__bb_trace_func' is called in every basic block
+ and is not allowed to change the machine state. Saving (restoring)
+ the state can either be done in the BLOCK_PROFILER macro,
+ before calling function (rsp. after returning from function)
+ `__bb_trace_func', or it can be done inside the function by
+ defining the macros:
+
+ MACHINE_STATE_SAVE(ID)
+ MACHINE_STATE_RESTORE(ID)
+
+ In the latter case care must be taken, that the prologue code
+ of function `__bb_trace_func' does not already change the
+ state prior to saving it with MACHINE_STATE_SAVE.
+
+ The parameter `ID' is a string identifying a unique macro use.
+
+ On the s390 all save/restore is done in macros above
+*/
+
+/*
+#define MACHINE_STATE_SAVE(ID) \
+ fprintf (FILE, "\tahi 15,-128 # save state\n"); \
+ fprintf (FILE, "\tstm 14,5,96(15)\n"); \
+
+#define MACHINE_STATE_RESTORE(ID) \
+ fprintf (FILE, "\tlm 14,5,96(15) # restore state\n"); \
+ fprintf (FILE, "\tahi 15,128\n"); \
+*/
+
+
+/* Define EXIT_IGNORE_STACK if, when returning from a function, the stack
+ pointer does not matter (provided there is a frame pointer). */
+
+#define EXIT_IGNORE_STACK 1
+
+/* Addressing modes, and classification of registers for them. */
+
+/* #define HAVE_POST_INCREMENT */
+/* #define HAVE_POST_DECREMENT */
+
+/* #define HAVE_PRE_DECREMENT */
+/* #define HAVE_PRE_INCREMENT */
+
+/* These assume that REGNO is a hard or pseudo reg number. They give
+ nonzero only if REGNO is a hard reg of the suitable class or a pseudo
+ reg currently allocated to a suitable hard reg.
+ These definitions are NOT overridden anywhere. */
+
+#define REGNO_OK_FOR_INDEX_P(REGNO) \
+ (((REGNO) > 0 && (REGNO) < 16) || (REGNO) == ARG_POINTER_REGNUM \
+ /* || (REGNO) == FRAME_POINTER_REGNUM */ \
+ || (reg_renumber[REGNO] > 0 && reg_renumber[REGNO] < 16))
+
+#define REGNO_OK_FOR_BASE_P(REGNO) REGNO_OK_FOR_INDEX_P (REGNO)
+
+#define REGNO_OK_FOR_DATA_P(REGNO) \
+ ((REGNO) < 16 || (unsigned) reg_renumber[REGNO] < 16)
+
+#define REGNO_OK_FOR_FP_P(REGNO) \
+ FLOAT_REGNO_P(REGNO)
+
+/* Now macros that check whether X is a register and also,
+ strictly, whether it is in a specified class. */
+
+/* 1 if X is a data register. */
+
+#define DATA_REG_P(X) (REG_P (X) && REGNO_OK_FOR_DATA_P (REGNO (X)))
+
+/* 1 if X is an fp register. */
+
+#define FP_REG_P(X) (REG_P (X) && REGNO_OK_FOR_FP_P (REGNO (X)))
+
+/* 1 if X is an address register. */
+
+#define ADDRESS_REG_P(X) (REG_P (X) && REGNO_OK_FOR_BASE_P (REGNO (X)))
+
+/* Maximum number of registers that can appear in a valid memory address. */
+
+#define MAX_REGS_PER_ADDRESS 2
+
+/* Recognize any constant value that is a valid address. */
+
+#define CONSTANT_ADDRESS_P(X) 0
+
+#define SYMBOLIC_CONST(X) \
+(GET_CODE (X) == SYMBOL_REF \
+ || GET_CODE (X) == LABEL_REF \
+ || (GET_CODE (X) == CONST && symbolic_reference_mentioned_p (X)))
+
+/* General operand is everything except SYMBOL_REF, CONST and CONST_DOUBLE
+ they have to be forced to constant pool
+ CONST_INT have to be forced into constant pool, if greater than
+ 64k. Depending on the insn they have to be force into constant pool
+ for smaller value; in this case we have to work with nonimmediate operand. */
+
+#define LEGITIMATE_PIC_OPERAND_P(X) \
+ legitimate_pic_operand_p (X)
+
+/* Nonzero if the constant value X is a legitimate general operand.
+ It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
+
+#define LEGITIMATE_CONSTANT_P(X) \
+ legitimate_constant_p (X)
+
+/* The macros REG_OK_FOR..._P assume that the arg is a REG rtx and check
+ its validity for a certain class. We have two alternate definitions
+ for each of them. The usual definition accepts all pseudo regs; the
+ other rejects them all. The symbol REG_OK_STRICT causes the latter
+ definition to be used.
+
+ Most source files want to accept pseudo regs in the hope that they will
+ get allocated to the class that the insn wants them to be in.
+ Some source files that are used after register allocation
+ need to be strict. */
+
+/*
+ * Nonzero if X is a hard reg that can be used as an index or if it is
+ * a pseudo reg.
+ */
+
+#define REG_OK_FOR_INDEX_NONSTRICT_P(X) \
+((GET_MODE (X) == Pmode) && \
+ ((REGNO (X) > 0 && REGNO (X) < 16) || \
+ (REGNO (X) == ARG_POINTER_REGNUM) || \
+ (REGNO (X) >= FIRST_PSEUDO_REGISTER)))
+
+/* Nonzero if X is a hard reg that can be used as a base reg or if it is
+ a pseudo reg. */
+
+#define REG_OK_FOR_BASE_NONSTRICT_P(X) REG_OK_FOR_INDEX_NONSTRICT_P (X)
+
+/* Nonzero if X is a hard reg that can be used as an index. */
+
+#define REG_OK_FOR_INDEX_STRICT_P(X) \
+((GET_MODE (X) == Pmode) && (REGNO_OK_FOR_INDEX_P (REGNO (X))))
+
+/* Nonzero if X is a hard reg that can be used as a base reg. */
+
+#define REG_OK_FOR_BASE_STRICT_P(X) \
+((GET_MODE (X) == Pmode) && (REGNO_OK_FOR_BASE_P (REGNO (X))))
+
+
+#ifndef REG_OK_STRICT
+#define REG_OK_FOR_INDEX_P(X) REG_OK_FOR_INDEX_NONSTRICT_P(X)
+#define REG_OK_FOR_BASE_P(X) REG_OK_FOR_BASE_NONSTRICT_P(X)
+#else
+#define REG_OK_FOR_INDEX_P(X) REG_OK_FOR_INDEX_STRICT_P(X)
+#define REG_OK_FOR_BASE_P(X) REG_OK_FOR_BASE_STRICT_P(X)
+#endif
+
+
+/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression that is a
+ valid memory address for an instruction.
+ The MODE argument is the machine mode for the MEM expression
+ that wants to use this address.
+
+ The other macros defined here are used only in GO_IF_LEGITIMATE_ADDRESS,
+ except for CONSTANT_ADDRESS_P which is actually machine-independent. */
+
+#ifdef REG_OK_STRICT
+#define GO_IF_LEGITIMATE_ADDRESS(MODE, X, ADDR) \
+{ \
+ if (legitimate_address_p (MODE, X, 1)) \
+ goto ADDR; \
+}
+#else
+#define GO_IF_LEGITIMATE_ADDRESS(MODE, X, ADDR) \
+{ \
+ if (legitimate_address_p (MODE, X, 0)) \
+ goto ADDR; \
+}
+#endif
+
+
+/* S/390 has no mode dependent addresses. */
+
+#define GO_IF_MODE_DEPENDENT_ADDRESS(ADDR, LABEL)
+
+/* Try machine-dependent ways of modifying an illegitimate address
+ to be legitimate. If we find one, return the new, valid address.
+ This macro is used in only one place: `memory_address' in explow.c. */
+
+#define LEGITIMIZE_ADDRESS(X, OLDX, MODE, WIN) \
+{ \
+ (X) = legitimize_address (X, OLDX, MODE); \
+ if (memory_address_p (MODE, X)) \
+ goto WIN; \
+}
+
+/* Specify the machine mode that this machine uses for the index in the
+ tablejump instruction. */
+
+#define CASE_VECTOR_MODE (TARGET_64BIT ? DImode : SImode)
+
+/* Define this if the tablejump instruction expects the table to contain
+ offsets from the address of the table.
+ Do not define this if the table should contain absolute addresses. */
+
+/* #define CASE_VECTOR_PC_RELATIVE */
+
+/* Load from integral MODE < SI from memory into register makes sign_extend
+ or zero_extend
+ In our case sign_extension happens for Halfwords, other no extension. */
+
+#define LOAD_EXTEND_OP(MODE) \
+(TARGET_64BIT ? ((MODE) == QImode ? ZERO_EXTEND : \
+ (MODE) == HImode ? SIGN_EXTEND : NIL) \
+ : ((MODE) == HImode ? SIGN_EXTEND : NIL))
+
+/* Specify the tree operation to be used to convert reals to integers. */
+
+#define IMPLICIT_FIX_EXPR FIX_ROUND_EXPR
+
+/* Define this if fixuns_trunc is the same as fix_trunc. */
+
+/* #define FIXUNS_TRUNC_LIKE_FIX_TRUNC */
+
+/* We use "unsigned char" as default. */
+
+#define DEFAULT_SIGNED_CHAR 0
+
+/* This is the kind of divide that is easiest to do in the general case. */
+
+#define EASY_DIV_EXPR TRUNC_DIV_EXPR
+
+/* Max number of bytes we can move from memory to memory in one reasonably
+ fast instruction. */
+
+#define MOVE_MAX 256
+
+/* Define this if zero-extension is slow (more than one real instruction). */
+
+#define SLOW_ZERO_EXTEND
+
+/* Nonzero if access to memory by bytes is slow and undesirable. */
+
+#define SLOW_BYTE_ACCESS 1
+
+/* Define if shifts truncate the shift count which implies one can omit
+ a sign-extension or zero-extension of a shift count. */
+
+/* #define SHIFT_COUNT_TRUNCATED */
+
+/* Value is 1 if truncating an integer of INPREC bits to OUTPREC bits
+ is done just by pretending it is already truncated. */
+
+#define TRULY_NOOP_TRUNCATION(OUTPREC, INPREC) 1
+
+/* We assume that the store-condition-codes instructions store 0 for false
+ and some other value for true. This is the value stored for true. */
+
+/* #define STORE_FLAG_VALUE -1 */
+
+/* When a prototype says `char' or `short', really pass an `int'. */
+
+#define PROMOTE_PROTOTYPES 1
+
+/* Don't perform CSE on function addresses. */
+
+#define NO_FUNCTION_CSE
+
+/* Specify the machine mode that pointers have.
+ After generation of rtl, the compiler makes no further distinction
+ between pointers and any other objects of this machine mode. */
+
+#define Pmode (TARGET_64BIT ? DImode : SImode)
+
+/* A function address in a call instruction is a byte address (for
+ indexing purposes) so give the MEM rtx a byte's mode. */
+
+#define FUNCTION_MODE QImode
+
+
+/* A part of a C `switch' statement that describes the relative costs
+ of constant RTL expressions. It must contain `case' labels for
+ expression codes `const_int', `const', `symbol_ref', `label_ref'
+ and `const_double'. Each case must ultimately reach a `return'
+ statement to return the relative cost of the use of that kind of
+ constant value in an expression. The cost may depend on the
+ precise value of the constant, which is available for examination
+ in X, and the rtx code of the expression in which it is contained,
+ found in OUTER_CODE.
+
+ CODE is the expression code--redundant, since it can be obtained
+ with `GET_CODE (X)'. */
+/* Force_const_mem does not work out of reload, because the saveable_obstack
+ is set to reload_obstack, which does not live long enough.
+ Because of this we cannot use force_const_mem in addsi3.
+ This leads to problems with gen_add2_insn with a constant greater
+ than a short. Because of that we give a addition of greater
+ constants a cost of 3 (reload1.c 10096). */
+
+
+#define CONST_COSTS(RTX, CODE, OUTER_CODE) \
+ case CONST: \
+ if ((GET_CODE (XEXP (RTX, 0)) == MINUS) && \
+ (GET_CODE (XEXP (XEXP (RTX, 0), 1)) != CONST_INT)) \
+ return 1000; \
+ case CONST_INT: \
+ if ((OUTER_CODE == PLUS) && \
+ ((INTVAL (RTX) > 32767) || \
+ (INTVAL (RTX) < -32768))) \
+ return 3; \
+ case LABEL_REF: \
+ case SYMBOL_REF: \
+ case CONST_DOUBLE: \
+ return 1; \
+
+
+/* Like `CONST_COSTS' but applies to nonconstant RTL expressions.
+ This can be used, for example, to indicate how costly a multiply
+ instruction is. In writing this macro, you can use the construct
+ `COSTS_N_INSNS (N)' to specify a cost equal to N fast
+ instructions. OUTER_CODE is the code of the expression in which X
+ is contained.
+
+ This macro is optional; do not define it if the default cost
+ assumptions are adequate for the target machine. */
+
+#define RTX_COSTS(X, CODE, OUTER_CODE) \
+ case ASHIFT: \
+ case ASHIFTRT: \
+ case LSHIFTRT: \
+ case PLUS: \
+ case AND: \
+ case IOR: \
+ case XOR: \
+ case MINUS: \
+ case NEG: \
+ case NOT: \
+ return 1; \
+ case MULT: \
+ if (GET_MODE (XEXP (X, 0)) == DImode) \
+ return 40; \
+ else \
+ return 7; \
+ case DIV: \
+ case UDIV: \
+ case MOD: \
+ case UMOD: \
+ return 33;
+
+
+/* An expression giving the cost of an addressing mode that contains
+ ADDRESS. If not defined, the cost is computed from the ADDRESS
+ expression and the `CONST_COSTS' values.
+
+ For most CISC machines, the default cost is a good approximation
+ of the true cost of the addressing mode. However, on RISC
+ machines, all instructions normally have the same length and
+ execution time. Hence all addresses will have equal costs.
+
+ In cases where more than one form of an address is known, the form
+ with the lowest cost will be used. If multiple forms have the
+ same, lowest, cost, the one that is the most complex will be used.
+
+ For example, suppose an address that is equal to the sum of a
+ register and a constant is used twice in the same basic block.
+ When this macro is not defined, the address will be computed in a
+ register and memory references will be indirect through that
+ register. On machines where the cost of the addressing mode
+ containing the sum is no higher than that of a simple indirect
+ reference, this will produce an additional instruction and
+ possibly require an additional register. Proper specification of
+ this macro eliminates this overhead for such machines.
+
+ Similar use of this macro is made in strength reduction of loops.
+
+ ADDRESS need not be valid as an address. In such a case, the cost
+ is not relevant and can be any value; invalid addresses need not be
+ assigned a different cost.
+
+ On machines where an address involving more than one register is as
+ cheap as an address computation involving only one register,
+ defining `ADDRESS_COST' to reflect this can cause two registers to
+ be live over a region of code where only one would have been if
+ `ADDRESS_COST' were not defined in that manner. This effect should
+ be considered in the definition of this macro. Equivalent costs
+ should probably only be given to addresses with different numbers
+ of registers on machines with lots of registers.
+
+ This macro will normally either not be defined or be defined as a
+ constant.
+
+ On s390 symbols are expensive if compiled with fpic
+ lifetimes. */
+
+#define ADDRESS_COST(RTX) \
+ ((flag_pic && GET_CODE (RTX) == SYMBOL_REF) ? 2 : 1)
+
+/* On s390, copy between fprs and gprs is expensive. */
+
+#define REGISTER_MOVE_COST(MODE, CLASS1, CLASS2) \
+ (((CLASS1 != CLASS2) && \
+ (CLASS1 == FP_REGS || CLASS2 == FP_REGS)) ? 10 : 1)
+
+
+/* A C expression for the cost of moving data of mode M between a
+ register and memory. A value of 2 is the default; this cost is
+ relative to those in `REGISTER_MOVE_COST'.
+
+ If moving between registers and memory is more expensive than
+ between two registers, you should define this macro to express the
+ relative cost. */
+
+#define MEMORY_MOVE_COST(M, C, I) 1
+
+/* A C expression for the cost of a branch instruction. A value of 1
+ is the default; other values are interpreted relative to that. */
+
+#define BRANCH_COST 1
+
+/* Add any extra modes needed to represent the condition code. */
+#define EXTRA_CC_MODES \
+ CC (CCZmode, "CCZ") \
+ CC (CCAmode, "CCA") \
+ CC (CCUmode, "CCU") \
+ CC (CCSmode, "CCS") \
+ CC (CCTmode, "CCT")
+
+/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
+ return the mode to be used for the comparison. */
+
+#define SELECT_CC_MODE(OP,X,Y) \
+ ( (OP) == EQ || (OP) == NE ? CCZmode \
+ : (OP) == LE || (OP) == LT || \
+ (OP) == GE || (OP) == GT ? CCSmode \
+ : (OP) == LEU || (OP) == LTU || \
+ (OP) == GEU || (OP) == GTU ? CCUmode \
+ : CCmode )
+
+
+/* Define the information needed to generate branch and scc insns. This is
+ stored from the compare operation. Note that we can't use "rtx" here
+ since it hasn't been defined! */
+
+extern struct rtx_def *s390_compare_op0, *s390_compare_op1;
+
+extern int s390_match_ccmode PARAMS ((struct rtx_def *, int));
+
+
+/* How to refer to registers in assembler output. This sequence is
+ indexed by compiler's hard-register-number (see above). */
+
+#define REGISTER_NAMES \
+{ "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7", \
+ "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15", \
+ "%f0", "%f2", "%f4", "%f6", "%f1", "%f3", "%f5", "%f7", \
+ "%f8", "%f10", "%f12", "%f14", "%f9", "%f11", "%f13", "%f15", \
+ "%ap", "%cc" \
+}
+
+/* implicit call of memcpy, not bcopy */
+
+#define TARGET_MEM_FUNCTIONS
+
+/* Print operand X (an rtx) in assembler syntax to file FILE.
+ CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
+ For `%' followed by punctuation, CODE is the punctuation and X is null. */
+
+#define PRINT_OPERAND(FILE, X, CODE) print_operand (FILE, X, CODE)
+
+#define PRINT_OPERAND_ADDRESS(FILE, ADDR) print_operand_address (FILE, ADDR)
+
+
+/* Define the codes that are matched by predicates in aux-output.c. */
+
+#define PREDICATE_CODES \
+ {"s_operand", { MEM }}, \
+ {"bras_sym_operand",{ SYMBOL_REF, CONST }}, \
+ {"r_or_s_operand", { MEM, SUBREG, REG }}, \
+ {"r_or_im8_operand", { CONST_INT, SUBREG, REG }}, \
+ {"r_or_s_or_im8_operand", { MEM, SUBREG, REG, CONST_INT }}, \
+ {"r_or_x_or_im16_operand", { MEM, SUBREG, REG, CONST_INT }}, \
+ {"const0_operand", { CONST_INT, CONST_DOUBLE }}, \
+ {"const1_operand", { CONST_INT, CONST_DOUBLE }}, \
+ {"tmxx_operand", { CONST_INT, MEM }},
+
+
+/* A C statement (sans semicolon) to update the integer variable COST
+ based on the relationship between INSN that is dependent on
+ DEP_INSN through the dependence LINK. The default is to make no
+ adjustment to COST. This can be used for example to specify to
+ the scheduler that an output- or anti-dependence does not incur
+ the same cost as a data-dependence. */
+
+#define ADJUST_COST(insn, link, dep_insn, cost) \
+ (cost) = s390_adjust_cost (insn, link, dep_insn, cost)
+
+
+/* Constant Pool for all symbols operands which are changed with
+ force_const_mem during insn generation (expand_insn). */
+
+extern struct rtx_def *s390_pool_start_insn;
+extern int s390_pool_count;
+extern int s390_nr_constants;
+
+/* Function is splitted in chunk, if literal pool could overflow
+ Value need to be lowered, if problems with displacement overflow. */
+
+#define S390_REL_MAX 55000
+#define S390_CHUNK_MAX 0x2000
+#define S390_CHUNK_OV 0x8000
+#define S390_POOL_MAX 0xe00
+
+#define ASM_OUTPUT_POOL_PROLOGUE(FILE, FUNNAME, fndecl, size) \
+{ \
+ register rtx insn; \
+ struct pool_constant *pool; \
+ \
+ if (s390_pool_count == -1) \
+ { \
+ s390_nr_constants = 0; \
+ for (pool = first_pool; pool; pool = pool->next) \
+ if (pool->mark) s390_nr_constants++; \
+ return; \
+ } \
+ if (first_pool == 0) { \
+ s390_asm_output_pool_prologue (FILE, FUNNAME, fndecl, size); \
+ return; \
+ } \
+ for (pool = first_pool; pool; pool = pool->next) \
+ pool->mark = 0; \
+ \
+ insn = s390_pool_start_insn; \
+ \
+ if (insn==NULL_RTX) \
+ insn = get_insns (); \
+ else \
+ insn = NEXT_INSN (insn); \
+ for (; insn; insn = NEXT_INSN (insn)) { \
+ if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') { \
+ if (s390_stop_dump_lit_p (insn)) { \
+ mark_constants (PATTERN (insn)); \
+ break; \
+ } else \
+ mark_constants (PATTERN (insn)); \
+ } \
+ } \
+ \
+ /* Mark entries referenced by other entries */ \
+ for (pool = first_pool; pool; pool = pool->next) \
+ if (pool->mark) \
+ mark_constants(pool->constant); \
+ \
+ s390_asm_output_pool_prologue (FILE, FUNNAME, fndecl, size); \
+}
+
+/* We need to return, because otherwise the pool is deleted of the
+ constant pool after the first output. */
+
+#define ASM_OUTPUT_POOL_EPILOGUE(FILE, FUNNAME, fndecl, size) return;
+
+#define ASM_OUTPUT_SPECIAL_POOL_ENTRY(FILE, EXP, MODE, ALIGN, LABELNO, WIN) \
+{ \
+ if ((s390_pool_count == 0) || (s390_pool_count > 0 && LABELNO >= 0)) \
+ { \
+ fprintf (FILE, ".LC%d:\n", LABELNO); \
+ LABELNO = ~LABELNO; \
+ } \
+ if (s390_pool_count > 0) \
+ { \
+ fprintf (FILE, ".LC%d_%X:\n", ~LABELNO, s390_pool_count); \
+ } \
+ \
+ /* Output the value of the constant itself. */ \
+ switch (GET_MODE_CLASS (pool->mode)) \
+ { \
+ case MODE_FLOAT: \
+ if (GET_CODE (x) != CONST_DOUBLE) \
+ abort (); \
+ \
+ memcpy ((char *) &u, (char *) &CONST_DOUBLE_LOW (x), sizeof u); \
+ assemble_real (u.d, pool->mode); \
+ break; \
+ \
+ case MODE_INT: \
+ case MODE_PARTIAL_INT: \
+ if (flag_pic && (GET_CODE (x) == CONST || \
+ GET_CODE (x) == SYMBOL_REF || \
+ GET_CODE (x) == LABEL_REF )) \
+ { \
+ fprintf (FILE, "%s\t",TARGET_64BIT ? ASM_QUAD : ASM_LONG); \
+ s390_output_symbolic_const (FILE, x); \
+ fputc ('\n', (FILE)); \
+ } \
+ else \
+ assemble_integer (x, GET_MODE_SIZE (pool->mode), 1); \
+ break; \
+ \
+ default: \
+ abort (); \
+ } \
+ goto WIN; \
+}
+
+#endif
--- /dev/null
+;;- Machine description for GNU compiler -- S/390 / zSeries version.
+;; Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
+;; Contributed by Hartmut Penner (hpenner@de.ibm.com) and
+;; Ulrich Weigand (weigand@de.ibm.com).
+;; This file is part of GNU CC.
+
+;; GNU CC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 2, or (at your option)
+;; any later version.
+
+;; GNU CC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GNU CC; see the file COPYING. If not, write to
+;; the Free Software Foundation, 59 Temple Place - Suite 330,
+;; Boston, MA 02111-1307, USA.
+
+;;
+;; Special constraints for s/390 machine description:
+;;
+;; a -- Any address register from 1 to 15.
+;; d -- Any register from 0 to 15.
+;; I -- An 8-bit constant (0..255).
+;; J -- A 12-bit constant (0..4095).
+;; K -- A 16-bit constant (-32768..32767).
+;; Q -- A memory reference without index-register.
+;; S -- Valid operand for the LARL instruction.
+;;
+;; Special formats used for outputting 390 instructions.
+;;
+;; %b -- Print a constant byte integer. xy
+;; %h -- Print a signed 16-bit. wxyz
+;; %N -- Print next register (second word of a DImode reg) or next word.
+;; %M -- Print next register (second word of a TImode reg) or next word.
+;; %O -- Print the offset of a memory reference (PLUS (REG) (CONST_INT)).
+;; %R -- Print the register of a memory reference (PLUS (REG) (CONST_INT)).
+;;
+;; We have a special constraint for pattern matching.
+;;
+;; s_operand -- Matches a valid S operand in a RS, SI or SS type instruction.
+;;
+;; r_or_s_operand -- Matches a register or a valid S operand in a RS, SI
+;; or SS type instruction or a register
+;;
+
+;; Insn type. Used to default other attribute values.
+
+;
+; Insn are devide in two classes:
+; mem: Use of base and/or index register for address generation
+; reg: Use of second and third register not for address generation
+;
+
+(define_attr "atype" "mem,reg" (const_string "reg"))
+
+;
+; Insn may take 1,2,3 or many cycles
+; For the scheduling it does not matter, if a instruction has
+; a issue_delay from 4 or more cycles, since the address dependency
+; between two insns needs at least 4 cycles.
+;
+
+(define_attr "cycle" "1,2,3,n" (const_string "1"))
+
+;
+; There are three classes of insns:
+; set: instruction setting a (potential) address relevant register
+; xset: instruction setting no address relevant register
+; la: instruction setting a (potential) address relevant register,
+; but behave 'better' on the pipeline
+;
+
+(define_attr "type" "set,xset,la" (const_string "xset"))
+
+;
+; Set operations changing a target register, which could be used for
+; address generation. Adjust cost will check, if realy applicable.
+;
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "set")
+ (eq_attr "cycle" "1"))
+ 5 1 [(eq_attr "atype" "mem")] )
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "set")
+ (eq_attr "cycle" "2")) 5 2)
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "set")
+ (eq_attr "cycle" "3")) 5 3)
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "set")
+ (eq_attr "cycle" "n")) 5 4)
+
+(define_function_unit "memory" 1 0
+ (eq_attr "type" "la") 2 1)
+
+;
+; xset insns, which don't set any valid address register.
+; Only the issue delay matters.
+;
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "xset")
+ (eq_attr "cycle" "1")) 1 1)
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "xset")
+ (eq_attr "cycle" "2")) 1 2)
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "xset")
+ (eq_attr "cycle" "3")) 1 3)
+
+(define_function_unit "memory" 1 0
+ (and (eq_attr "type" "xset")
+ (eq_attr "cycle" "n")) 1 4)
+
+; Operand type. Used to default length attribute values
+
+(define_attr "op_type"
+ "NN,E,RR,RRE,RX,RS,RSI,RI,SI,S,SS,SSE,RXE,RSE,RIL,RIE"
+ (const_string "RX"))
+
+;; Length in bytes.
+
+(define_attr "length" ""
+(cond [ (eq_attr "op_type" "E") (const_int 2)
+ (eq_attr "op_type" "RR") (const_int 2)
+ (eq_attr "op_type" "RX") (const_int 4)
+ (eq_attr "op_type" "RI") (const_int 4)
+ (eq_attr "op_type" "RRE") (const_int 4)
+ (eq_attr "op_type" "RS") (const_int 4)
+ (eq_attr "op_type" "RSI") (const_int 4)
+ (eq_attr "op_type" "RX") (const_int 4)
+ (eq_attr "op_type" "S") (const_int 4)
+ (eq_attr "op_type" "SI") (const_int 4)
+ (eq_attr "op_type" "SS") (const_int 6)
+ (eq_attr "op_type" "SSE") (const_int 6)
+ (eq_attr "op_type" "RXE") (const_int 6)
+ (eq_attr "op_type" "RSE") (const_int 6)
+ (eq_attr "op_type" "RIL") (const_int 6)]
+ (const_int 4)))
+
+;; Define attributes for `asm' insns.
+
+(define_asm_attributes [(set_attr "type" "xset")
+ (set_attr "op_type" "NN")])
+
+;;
+;; Condition Codes
+;;
+;
+; CCL: Zero Nonzero Zero Nonzero (AL, ALR, SL, SLR, N, NC, NI, NR, O, OC, OI, OR, X, XC, XI, XR)
+; CCA: Zero <Zero >Zero Overflow (A, AR, AH, AHI, S, SR, SH, SHI, LTR, LCR, LNR, LPR, SLA, SLDA, SLA, SRDA)
+; CCU: Equal ULess UGreater -- (CL, CLR, CLI, CLM)
+; CCS: Equal SLess SGreater -- (C, CR, CH, CHI, ICM)
+; CCT: Zero Mixed Mixed Ones (TM, TMH, TML)
+
+; CCZ -> CCL / CCZ1
+; CCZ1 -> CCA/CCU/CCS/CCT
+; CCS -> CCA
+
+; String: CLC, CLCL, CLCLE, CLST, CUSE, MVCL, MVCLE, MVPG, MVST, SRST
+; Clobber: CKSM, CFC, CS, CDS, CUUTF, CUTFU, PLO, SPM, STCK, STCKE, TS, TRT, TRE, UPT
+
+
+;;
+;;- Compare instructions.
+;;
+
+(define_expand "cmpdi"
+ [(set (reg:CC 33)
+ (compare:CC (match_operand:DI 0 "register_operand" "")
+ (match_operand:DI 1 "general_operand" "")))]
+ "TARGET_64BIT"
+ "
+{
+ s390_compare_op0 = operands[0];
+ s390_compare_op1 = operands[1];
+ DONE;
+}")
+
+(define_expand "cmpsi"
+ [(set (reg:CC 33)
+ (compare:CC (match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "general_operand" "")))]
+ ""
+ "
+{
+ s390_compare_op0 = operands[0];
+ s390_compare_op1 = operands[1];
+ DONE;
+}")
+
+;(define_expand "cmphi"
+; [(set (reg:CC 33)
+; (compare:CC (match_operand:HI 0 "register_operand" "")
+; (match_operand:HI 1 "general_operand" "")))]
+; ""
+; "
+;{
+; s390_compare_op0 = operands[0];
+; s390_compare_op1 = operands[1];
+; DONE;
+;}")
+
+;(define_expand "cmpqi"
+; [(set (reg:CC 33)
+; (compare:CC (match_operand:QI 0 "register_operand" "")
+; (match_operand:QI 1 "general_operand" "")))]
+; ""
+; "
+;{
+; s390_compare_op0 = operands[0];
+; s390_compare_op1 = operands[1];
+; DONE;
+;}")
+
+(define_expand "cmpdf"
+ [(set (reg:CC 33)
+ (compare:CC (match_operand:DF 0 "register_operand" "")
+ (match_operand:DF 1 "general_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ s390_compare_op0 = operands[0];
+ s390_compare_op1 = operands[1];
+ DONE;
+}")
+
+(define_expand "cmpsf"
+ [(set (reg:CC 33)
+ (compare:CC (match_operand:SF 0 "register_operand" "")
+ (match_operand:SF 1 "general_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ s390_compare_op0 = operands[0];
+ s390_compare_op1 = operands[1];
+ DONE;
+}")
+
+
+; DI instructions
+
+(define_insn "*cmpdi_tm2"
+ [(set (reg 33)
+ (compare (zero_extract:DI (match_operand:DI 0 "register_operand" "d")
+ (match_operand:DI 1 "const1_operand" "")
+ (match_operand:DI 2 "immediate_operand" "I"))
+ (const_int 0)))]
+ "s390_match_ccmode(insn, CCTmode) &&
+ INTVAL(operands[2]) >= 0 && INTVAL(operands[2]) < 64"
+ "*
+{
+ if (INTVAL (operands[2]) > 47)
+ {
+ operands[1] = GEN_INT (1 << (63 - INTVAL(operands[2])));
+ return \"tmll\\t%0,%x1\";
+ }
+ else if (INTVAL (operands[2]) > 31)
+ {
+ operands[1] = GEN_INT (1 << (47 - INTVAL(operands[2])));
+ return \"tmlh\\t%0,%x1\";
+ }
+ else if (INTVAL (operands[2]) > 15)
+ {
+ operands[1] = GEN_INT (1 << (31 - INTVAL(operands[2])));
+ return \"tmhl\\t%0,%x1\";
+ }
+ operands[1] = GEN_INT (1 << (15 - INTVAL(operands[2])));
+ return \"tmhh\\t%0,%x1\";
+}"
+ [(set_attr "op_type" "RX")
+ (set_attr "type" "xset")])
+
+
+(define_insn "*cmpdi_tm"
+ [(set (reg 33)
+ (compare (and:DI (match_operand:DI 0 "register_operand" "%d")
+ (match_operand:DI 1 "tmxx_operand" "Lm"))
+ (const_int 0)))]
+ "s390_match_ccmode(insn, CCTmode)"
+ "*
+{
+ unsigned HOST_WIDEST_INT i;
+ if (GET_CODE (operands[1]) == MEM &&
+ GET_CODE (XEXP (operands[1],0)) == SYMBOL_REF &&
+ CONSTANT_POOL_ADDRESS_P (XEXP (operands[1],0)))
+ {
+ operands[1] = get_pool_constant (XEXP (operands[1],0));
+ }
+
+ i = (unsigned HOST_WIDEST_INT) INTVAL (operands[1]);
+
+ if (i >= 0x1000000000000ULL)
+ {
+ operands[1] = GEN_INT (i >> 48);
+ return \"tmhh\\t%0,%x1\";
+ }
+ else if (i > 0x100000000ULL)
+ {
+ operands[1] = GEN_INT (i >> 32);
+ return \"tmhl\\t%0,%x1\";
+ }
+ else if (i >= 0x10000ULL)
+ {
+ operands[1] = GEN_INT (i >> 16);
+ return \"tmlh\\t%0,%x1\";
+ }
+ else
+ return \"tmll\\t%0,%x1\";
+}"
+ [(set_attr "op_type" "RX")
+ (set_attr "type" "xset")])
+
+
+(define_insn "*ltgr"
+ [(set (reg 33)
+ (compare (match_operand:DI 0 "register_operand" "d")
+ (match_operand:DI 1 "const0_operand" "")))
+ (set (match_operand:DI 2 "register_operand" "=d")
+ (match_dup 0))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_64BIT"
+ "ltgr\\t%2,%0"
+ [(set_attr "op_type" "RRE")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpdi_ccs_0_64"
+ [(set (reg 33)
+ (compare (match_operand:DI 0 "register_operand" "d")
+ (match_operand:DI 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_64BIT"
+ "ltgr\\t%0,%0"
+ [(set_attr "op_type" "RRE")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpdi_ccs_0_31"
+ [(set (reg 33)
+ (compare (match_operand:DI 0 "register_operand" "d")
+ (match_operand:DI 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "srda\\t%0,0"
+ [(set_attr "op_type" "RS")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpdi_ccs"
+ [(set (reg 33)
+ (compare (match_operand:DI 0 "register_operand" "d,d,d")
+ (match_operand:DI 1 "general_operand" "d,K,m")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_64BIT"
+ "@
+ cgr\\t%0,%1
+ cghi\\t%0,%c1
+ cg\\t%0,%1"
+ [(set_attr "op_type" "RRE,RI,RXE")
+ (set_attr "atype" "reg,reg,mem")])
+
+(define_insn "*cmpdi_ccu"
+ [(set (reg 33)
+ (compare (match_operand:DI 0 "register_operand" "d,d")
+ (match_operand:DI 1 "general_operand" "d,m")))]
+ "s390_match_ccmode(insn, CCUmode) && TARGET_64BIT"
+ "@
+ clgr\\t%0,%1
+ clg\\t%0,%1"
+ [(set_attr "op_type" "RRE,RXE")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*cmpdi_ccu_mem"
+ [(set (reg 33)
+ (compare (match_operand:DI 0 "s_operand" "oQ")
+ (match_operand:DI 1 "s_operand" "oQ")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "clc\\t%O0(8,%R0),%1"
+ [(set_attr "op_type" "SS")
+ (set_attr "atype" "mem")])
+
+; SI instructions
+
+(define_insn "*cmpsi_cct"
+ [(set (reg 33)
+ (compare (zero_extract:SI (match_operand:SI 0 "register_operand" "%d")
+ (match_operand:SI 1 "const1_operand" "")
+ (match_operand:SI 2 "immediate_operand" "I"))
+ (const_int 0)))]
+ "s390_match_ccmode(insn, CCTmode) &&
+ INTVAL(operands[2]) >= 0 && INTVAL(operands[2]) < 32"
+ "*
+{
+ if (INTVAL (operands[2]) > 15)
+ {
+ operands[1] = GEN_INT (1 << (31 - INTVAL(operands[2])));
+ return \"tml\\t%0,%x1\";
+ }
+ operands[1] = GEN_INT (1 << (15 - INTVAL(operands[2])));
+ return \"tmh\\t%0,%x1\";
+}"
+ [(set_attr "op_type" "RI")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpsi_tm"
+ [(set (reg 33)
+ (compare (and:SI (match_operand:SI 0 "register_operand" "%d")
+ (match_operand:SI 1 "tmxx_operand" "Lm"))
+ (const_int 0)))]
+ "s390_match_ccmode(insn, CCTmode)"
+ "*
+{
+ unsigned long i;
+ if (GET_CODE (operands[1]) == MEM &&
+ GET_CODE (XEXP (operands[1],0)) == SYMBOL_REF &&
+ CONSTANT_POOL_ADDRESS_P (XEXP (operands[1],0)))
+ {
+ operands[1] = get_pool_constant (XEXP (operands[1],0));
+ }
+
+ i = (unsigned long) INTVAL (operands[1]);
+ if (i > 0xffff)
+ {
+ operands[1] = GEN_INT (i / 0x10000);
+ return \"tmh\\t%0,%x1\";
+ }
+ return \"tml\\t%0,%x1\";
+}"
+ [(set_attr "op_type" "RX")
+ (set_attr "type" "xset")])
+
+
+(define_insn "*ltr"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "register_operand" "d")
+ (match_operand:SI 1 "const0_operand" "")))
+ (set (match_operand:SI 2 "register_operand" "=d")
+ (match_dup 0))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "ltr\\t%2,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*icm15"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "s_operand" "Qo")
+ (match_operand:SI 1 "const0_operand" "")))
+ (set (match_operand:SI 2 "register_operand" "=d")
+ (match_dup 0))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "icm\\t%2,15,%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*icm15_cconly"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "s_operand" "Qo")
+ (match_operand:SI 1 "const0_operand" "")))
+ (clobber (match_scratch:SI 2 "=d"))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "icm\\t%2,15,%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpsi_ccs_0"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "register_operand" "d")
+ (match_operand:SI 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "ltr\\t%0,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpsidi_ccs"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "register_operand" "d")
+ (sign_extend:SI (match_operand:HI 1 "memory_operand" "m"))))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "ch\\t%0,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpsi_ccs"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "register_operand" "d,d,d")
+ (match_operand:SI 1 "general_operand" "d,K,m")))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "@
+ cr\\t%0,%1
+ chi\\t%0,%c1
+ c\\t%0,%1"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "xset,xset,xset")])
+
+(define_insn "*cmpsi_ccu"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "register_operand" "d,d")
+ (match_operand:SI 1 "general_operand" "d,m")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "@
+ clr\\t%0,%1
+ cl\\t%0,%1"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*cmpsi_ccu_mem"
+ [(set (reg 33)
+ (compare (match_operand:SI 0 "s_operand" "oQ")
+ (match_operand:SI 1 "s_operand" "oQ")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "clc\\t%O0(4,%R0),%1"
+ [(set_attr "op_type" "SS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+
+; HI instructions
+
+(define_insn "*icm3"
+ [(set (reg 33)
+ (compare (match_operand:HI 0 "s_operand" "Qo")
+ (match_operand:HI 1 "const0_operand" "")))
+ (set (match_operand:HI 2 "register_operand" "=d")
+ (match_dup 0))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "icm\\t%2,3,%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*cmphi_cct_0"
+ [(set (reg 33)
+ (compare (match_operand:HI 0 "register_operand" "d")
+ (match_operand:HI 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCTmode)"
+ "tml\\t%0,65535"
+ [(set_attr "op_type" "RX")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmphi_ccs_0"
+ [(set (reg 33)
+ (compare (match_operand:HI 0 "s_operand" "Qo")
+ (match_operand:HI 1 "const0_operand" "")))
+ (clobber (match_scratch:HI 2 "=d"))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "icm\\t%2,3,%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*cmphi_ccu"
+ [(set (reg 33)
+ (compare (match_operand:HI 0 "register_operand" "d")
+ (match_operand:HI 1 "s_operand" "Qo")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "clm\\t%0,3,%1"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmphi_ccu_mem"
+ [(set (reg 33)
+ (compare (match_operand:HI 0 "s_operand" "oQ")
+ (match_operand:HI 1 "s_operand" "oQ")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "clc\\t%O0(2,%R0),%1"
+ [(set_attr "op_type" "SS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+
+; QI instructions
+
+(define_insn "*icm1"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "s_operand" "Qo")
+ (match_operand:QI 1 "const0_operand" "")))
+ (set (match_operand:QI 2 "register_operand" "=d")
+ (match_dup 0))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "icm\\t%2,1,%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*tm_0"
+ [(set (reg 33)
+ (compare (zero_extend:SI (and:QI (match_operand:QI 0 "s_operand" "Qo")
+ (match_operand:QI 1 "immediate_operand" "")))
+ (const_int 0)))]
+ "s390_match_ccmode(insn, CCTmode) &&
+ INTVAL(operands[1]) >= 0 && INTVAL(operands[1]) < 256"
+ "tm\\t%0,%1"
+ [(set_attr "op_type" "RI")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpqi_cct_0"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "register_operand" "d")
+ (match_operand:QI 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCTmode)"
+ "tml\\t%0,255"
+ [(set_attr "op_type" "RI")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpqi_ccs_0"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "s_operand" "Qo")
+ (match_operand:QI 1 "const0_operand" "")))
+ (clobber (match_scratch:QI 2 "=d"))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "icm\\t%2,1,%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpqi_ccu_0"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "s_operand" "Qo")
+ (match_operand:QI 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "cli\\t%0,0"
+ [(set_attr "op_type" "SI")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpqi_ccu"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "register_operand" "d")
+ (match_operand:QI 1 "s_operand" "Qo")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "clm\\t%0,1,%1"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpqi_ccu_immed"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "s_operand" "Qo")
+ (match_operand:QI 1 "immediate_operand" "")))]
+ "s390_match_ccmode(insn, CCUmode) &&
+ INTVAL(operands[1]) >= 0 && INTVAL(operands[1]) < 256"
+ "cli\\t%0,%1"
+ [(set_attr "op_type" "SI")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*cmpqi_ccu_mem"
+ [(set (reg 33)
+ (compare (match_operand:QI 0 "s_operand" "oQ")
+ (match_operand:QI 1 "s_operand" "oQ")))]
+ "s390_match_ccmode(insn, CCUmode)"
+ "clc\\t%O0(1,%R0),%1"
+ [(set_attr "op_type" "SS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+
+; DF instructions
+
+(define_insn "*cmpdf_ccs_0"
+ [(set (reg 33)
+ (compare (match_operand:DF 0 "register_operand" "f")
+ (match_operand:DF 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "ltdbr\\t%0,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpdf_ccs_0_ibm"
+ [(set (reg 33)
+ (compare (match_operand:DF 0 "register_operand" "f")
+ (match_operand:DF 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "ltdr\\t%0,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpdf_ccs"
+ [(set (reg 33)
+ (compare (match_operand:DF 0 "register_operand" "f,f")
+ (match_operand:DF 1 "nonimmediate_operand" "f,m")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ cdbr\\t%0,%1
+ cdb\\t%0,%1"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "xset,xset")])
+
+(define_insn "*cmpdf_ccs_ibm"
+ [(set (reg 33)
+ (compare (match_operand:DF 0 "register_operand" "f,f")
+ (match_operand:DF 1 "nonimmediate_operand" "f,m")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ cdr\\t%0,%1
+ cd\\t%0,%1"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "xset,xset")])
+
+
+; SF instructions
+
+(define_insn "*cmpsf_ccs_0"
+ [(set (reg 33)
+ (compare (match_operand:SF 0 "register_operand" "f")
+ (match_operand:SF 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "ltebr\\t%0,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpsf_ccs_0_ibm"
+ [(set (reg 33)
+ (compare (match_operand:SF 0 "register_operand" "f")
+ (match_operand:SF 1 "const0_operand" "")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "lter\\t%0,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*cmpsf_ccs"
+ [(set (reg 33)
+ (compare (match_operand:SF 0 "register_operand" "f,f")
+ (match_operand:SF 1 "nonimmediate_operand" "f,m")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ cebr\\t%0,%1
+ ceb\\t%0,%1"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "xset,xset")])
+
+(define_insn "*cmpsf_ccs"
+ [(set (reg 33)
+ (compare (match_operand:SF 0 "register_operand" "f,f")
+ (match_operand:SF 1 "nonimmediate_operand" "f,m")))]
+ "s390_match_ccmode(insn, CCSmode) && TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ cer\\t%0,%1
+ ce\\t%0,%1"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "xset,xset")])
+
+
+;;
+;;- Move instructions.
+;;
+
+;
+; movti instruction pattern(s).
+;
+
+(define_insn "movti"
+ [(set (match_operand:TI 0 "nonimmediate_operand" "=d,d,d,m,Q")
+ (match_operand:TI 1 "general_operand" "d,K,m,d,Q"))]
+ "TARGET_64BIT"
+ "*
+{
+ switch (which_alternative)
+ {
+ case 0: /* d <- d */
+ if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
+ return \"lgr\\t%M0,%M1\;lgr\\t%0,%1\";
+ else
+ return \"lgr\\t%0,%1\;lgr\\t%M0,%M1\";
+
+ case 1: /* d <- K */
+ if (INTVAL(operands[1]) < 0)
+ return \"lghi\\t%0,-1\;lghi\\t%M0,%h1\";
+ else
+ return \"lghi\\t%0,0\;lghi\\t%M0,%h1\";
+
+ case 2: /* d <- m */
+ if (s_operand (operands[1], GET_MODE (operands[1])))
+ return \"lmg\\t%0,%M0,%1\";
+ else
+ return \"la\\t%M0,%1\;lmg\\t%0,%M0,0(%M0)\";
+
+ case 3: /* m <- d */
+ if (!s_operand (operands[0], GET_MODE (operands[0])))
+ return \"stg\\t%1,%0\;stg\\t%M1,%M0\";
+ else
+ return \"stmg\\t%1,%M1,%0\";
+
+ case 4: /* m <- m */
+ return \"mvc\\t%O0(16,%R0),%1\";
+ }
+}"
+ [(set_attr "op_type" "NN,NN,RS,RS,SS")
+ (set_attr "atype" "reg,reg,mem,mem,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "12,8,10,10,*")])
+
+;
+; movdi instruction pattern(s).
+;
+
+;; If generating PIC code and operands[1] is a symbolic CONST, emit a
+;; move to get the address of the symbolic object from the GOT.
+
+(define_expand "movdi"
+ [(set (match_operand:DI 0 "general_operand" "")
+ (match_operand:DI 1 "general_operand" ""))]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[1])
+ && !LEGITIMATE_CONSTANT_P (operands[1]))
+ operands[1] = force_const_mem (DImode, operands[1]);
+
+ if (TARGET_64BIT && flag_pic && SYMBOLIC_CONST (operands[1]))
+ emit_pic_move (operands, DImode);
+}")
+
+(define_insn "*movdi_64"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=d,d,d,d,m,Q")
+ (match_operand:DI 1 "general_operand" "d,K,S,m,d,Q"))]
+ "TARGET_64BIT"
+ "@
+ lgr\\t%0,%1
+ lghi\\t%0,%h1
+ larl\\t%0,%1
+ lg\\t%0,%1
+ stg\\t%1,%0
+ mvc\\t%O0(8,%R0),%1"
+ [(set_attr "op_type" "RRE,RI,RIL,RXE,RXE,SS")
+ (set_attr "atype" "reg,reg,reg,mem,mem,mem")
+ (set_attr "type" "set,set,la,set,set,set")])
+
+(define_insn "*movdi_31"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=d,d,d,m,Q")
+ (match_operand:DI 1 "general_operand" "d,K,m,d,Q"))]
+ "!TARGET_64BIT"
+ "*
+{
+ switch (which_alternative)
+ {
+ case 0: /* d <- d */
+ if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
+ return \"lr\\t%N0,%N1\;lr\\t%0,%1\";
+ else
+ return \"lr\\t%0,%1\;lr\\t%N0,%N1\";
+
+ case 1: /* d <- K */
+ if (INTVAL (operands[1]) < 0)
+ return \"lhi\\t%0,-1\;lhi\\t%N0,%h1\";
+ else
+ return \"lhi\\t%0,0\;lhi\\t%N0,%h1\";
+
+ case 2: /* d <- m */
+ if (s_operand (operands[1], GET_MODE (operands[1])))
+ return \"lm\\t%0,%N0,%1\";
+ else
+ return \"la\\t%N0,%1\;lm\\t%0,%N0,0(%N0)\";
+
+ case 3: /* m <- d */
+ if (s_operand (operands[0], GET_MODE (operands[0])))
+ return \"stm\\t%1,%N1,%0\";
+ else
+ return \"st\\t%1,%0\;st\\t%N1,%N0\";
+
+ case 4: /* m <- m */
+ return \"mvc\\t%O0(8,%R0),%1\";
+ }
+}"
+ [(set_attr "op_type" "NN,NN,RS,RS,SS")
+ (set_attr "atype" "reg,reg,mem,mem,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "4,8,8,8,*")])
+
+
+;
+; movsi instruction pattern(s).
+;
+
+;; If generating PIC code and operands[1] is a symbolic CONST, emit a
+;; move to get the address of the symbolic object from the GOT.
+
+(define_expand "movsi"
+ [(set (match_operand:SI 0 "general_operand" "")
+ (match_operand:SI 1 "general_operand" ""))]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[1])
+ && !LEGITIMATE_CONSTANT_P (operands[1]))
+ operands[1] = force_const_mem (SImode, operands[1]);
+
+ if (flag_pic && SYMBOLIC_CONST (operands[1]))
+ emit_pic_move (operands, SImode);
+}")
+
+(define_insn "*movsi"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=d,d,d,m,Q")
+ (match_operand:SI 1 "general_operand" "d,K,m,d,Q"))]
+ ""
+ "@
+ lr\\t%0,%1
+ lhi\\t%0,%h1
+ l\\t%0,%1
+ st\\t%1,%0
+ mvc\\t%O0(4,%R0),%1"
+ [(set_attr "op_type" "RR,RI,RX,RX,SS")
+ (set_attr "atype" "reg,reg,mem,mem,mem")
+ (set_attr "type" "set")])
+
+
+;
+; movhi instruction pattern(s).
+;
+
+(define_insn "movhi"
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=d,d,d,m")
+ (match_operand:HI 1 "r_or_x_or_im16_operand" "d,K,m,d"))]
+ ""
+ "@
+ lr\\t%0,%1
+ lhi\\t%0,%h1
+ lh\\t%0,%1
+ sth\\t%1,%0"
+ [(set_attr "op_type" "RR,RI,RX,RX")
+ (set_attr "atype" "reg,reg,mem,mem")
+ (set_attr "type" "xset")])
+
+
+;
+; movqi instruction pattern(s).
+;
+
+(define_insn "movqi_64"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=d,d,d,m,Q")
+ (match_operand:QI 1 "general_operand" "d,K,m,d,n"))]
+ "TARGET_64BIT"
+ "@
+ lr\\t%0,%1
+ llill\\t%0,%x1
+ llgc\\t%0,%1
+ stc\\t%1,%0
+ mvi\\t%0,%b1"
+ [(set_attr "op_type" "RR,RI,RXE,RX,SI")
+ (set_attr "atype" "reg,reg,mem,mem,mem")
+ (set_attr "type" "xset")])
+
+
+(define_insn "movqi"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=d,d,d,m,Q")
+ (match_operand:QI 1 "r_or_x_or_im16_operand" "d,n,m,d,n"))]
+ ""
+ "@
+ lr\\t%0,%1
+ lhi\\t%0,%c1
+ ic\\t%0,%1
+ stc\\t%1,%0
+ mvi\\t%0,%b1"
+ [(set_attr "op_type" "RR,RX,RX,RX,SI")
+ (set_attr "atype" "reg,reg,mem,mem,mem")
+ (set_attr "type" "xset")])
+
+
+;
+; moveqstrictqi instruction pattern(s).
+;
+
+(define_insn "*movstrictqi"
+ [(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+d,m"))
+ (match_operand:QI 1 "nonimmediate_operand" "m,d"))]
+ ""
+ "@
+ ic\\t%0,%1
+ stc\\t%1,%0"
+ [(set_attr "op_type" "RX,RX")
+ (set_attr "atype" "mem,mem")])
+
+
+;
+; movstricthi instruction pattern(s).
+;
+
+(define_insn "*movstricthi"
+ [(set (strict_low_part (match_operand:HI 0 "r_or_s_operand" "+d,Q"))
+ (match_operand:HI 1 "r_or_s_operand" "Q,d"))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ icm\\t%0,3,%1
+ stcm\\t%1,3,%0"
+ [(set_attr "op_type" "RS,RS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+
+;
+; movstrictsi instruction pattern(s).
+;
+
+(define_insn "movestrictsi"
+ [(set (strict_low_part (match_operand:SI 0 "nonimmediate_operand" "+d,d,m"))
+ (match_operand:SI 1 "nonimmediate_operand" "d,m,d"))]
+ "TARGET_64BIT"
+ "@
+ lr\\t%0,%1
+ l\\t%0,%1
+ st\\t%1,%0"
+ [(set_attr "op_type" "RR,RS,RS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "xset")])
+
+
+;
+; movdf instruction pattern(s).
+;
+
+(define_expand "movdf"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "")
+ (match_operand:DF 1 "general_operand" ""))]
+ ""
+ "
+{
+ if (GET_CODE (operands[1]) == CONST_DOUBLE)
+ operands[1] = force_const_mem (DFmode, operands[1]);
+}")
+
+(define_insn "*movdf_64"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f,m,d,m,d,Q")
+ (match_operand:DF 1 "general_operand" "f,m,f,m,d,d,Q"))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT"
+ "@
+ ldr\\t%0,%1
+ ld\\t%0,%1
+ std\\t%1,%0
+ lg\\t%0,%1
+ stg\\t%1,%0
+ lgr\\t%0,%1
+ mvc\\t%O0(8,%R0),%1"
+ [(set_attr "op_type" "RR,RX,RX,RXE,RXE,RR,SS")
+ (set_attr "atype" "reg,mem,mem,mem,mem,mem,mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*movdf_31"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f,m,d,m,d,Q")
+ (match_operand:DF 1 "general_operand" "f,m,f,m,d,d,Q"))]
+ "TARGET_HARD_FLOAT"
+ "*
+{
+ switch (which_alternative)
+ {
+ case 0: /* f <- f */
+ return \"ldr\\t%0,%1\";
+
+ case 1: /* f <- m */
+ return \"ld\\t%0,%1\";
+
+ case 2: /* m <- f */
+ return \"std\\t%1,%0\";
+
+ case 3: /* d <- m */
+ if (s_operand (operands[1], GET_MODE (operands[1])))
+ return \"lm\\t%0,%N0,%1\";
+ else
+ return \"la\\t%N0,%1\;lm\\t%0,%N0,0(%N0)\";
+
+ case 4: /* m <- d */
+ if (s_operand (operands[0], GET_MODE (operands[0])))
+ return \"stm\\t%1,%N1,%0\";
+ else
+ return \"st\\t%1,%0\;st\\t%N1,%N0\";
+
+ case 5: /* d <- d */
+ if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
+ return \"lr\\t%N0,%N1\;lr\\t%0,%1\";
+ else
+ return \"lr\\t%0,%1\;lr\\t%N0,%N1\";
+
+ case 6: /* m <- m */
+ return \"mvc\\t%O0(8,%R0),%1\";
+ }
+}"
+ [(set_attr "op_type" "RR,RX,RX,RS,RS,NN,SS")
+ (set_attr "atype" "reg,mem,mem,mem,mem,reg,mem")
+ (set_attr "length" "*,*,*,*,*,4,*")])
+
+(define_insn "*movdf_soft_64"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=d,m,d,Q")
+ (match_operand:DF 1 "general_operand" "m,d,d,Q"))]
+ "TARGET_64BIT && TARGET_SOFT_FLOAT"
+ "@
+ lg\\t%0,%1
+ stg\\t%1,%0
+ lgr\\t%0,%1
+ mvc\\t%O0(8,%R0),%1"
+ [(set_attr "op_type" "RXE,RXE,RR,SS")
+ (set_attr "atype" "mem,mem,mem,mem")
+ (set_attr "type" "xset")])
+
+(define_insn "*movdf_soft_31"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=!d,d,m,Q")
+ (match_operand:DF 1 "general_operand" "!d,m,d,Q"))]
+ "TARGET_SOFT_FLOAT"
+ "*
+{
+ switch (which_alternative)
+ {
+ case 0: /* d <- d */
+ if (REGNO (operands[0]) == REGNO (operands[1]) + 1)
+ return \"lr\\t%N0,%N1\;lr\\t%0,%1\";
+ else
+ return \"lr\\t%0,%1\;lr\\t%N0,%N1\";
+
+ case 1: /* d <- m */
+ if (s_operand (operands[1], GET_MODE (operands[1])))
+ return \"lm\\t%0,%N0,%1\";
+ else
+ return \"la\\t%N0,%1\;lm\\t%0,%N0,0(%N0)\";
+
+ case 2: /* m <- d */
+ if (s_operand (operands[0], GET_MODE (operands[0])))
+ return \"stm\\t%1,%N1,%0\";
+ else
+ return \"st\\t%1,%0\;st\\t%N1,%N0\";
+
+ case 3: /* m <- m */
+ return \"mvc\\t%O0(8,%R0),%1\";
+ }
+}"
+ [(set_attr "op_type" "NN,RS,RS,SS")
+ (set_attr "atype" "reg,mem,mem,mem")
+ (set_attr "length" "8,*,*,*")])
+
+
+;
+; movsf instruction pattern(s).
+;
+
+(define_expand "movsf"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "")
+ (match_operand:SF 1 "general_operand" ""))]
+ ""
+ "
+{
+ if (GET_CODE (operands[1]) == CONST_DOUBLE)
+ operands[1] = force_const_mem (SFmode, operands[1]);
+}")
+
+(define_insn "*movsf_64"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f,m,d,m,d,Q")
+ (match_operand:SF 1 "general_operand" "f,m,f,m,d,d,Q"))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT"
+ "@
+ ler\\t%0,%1
+ le\\t%0,%1
+ ste\\t%1,%0
+ llgf\\t%0,%1
+ st\\t%1,%0
+ lgr\\t%0,%1
+ mvc\\t%O0(4,%R0),%1"
+ [(set_attr "op_type" "RR,RX,RX,RXE,RX,RR,SS")
+ (set_attr "atype" "reg,mem,mem,mem,mem,reg,mem")])
+
+(define_insn "*movsf_31"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f,m,d,m,d,Q")
+ (match_operand:SF 1 "general_operand" "f,m,f,m,d,d,Q"))]
+ "TARGET_HARD_FLOAT"
+ "@
+ ler\\t%0,%1
+ le\\t%0,%1
+ ste\\t%1,%0
+ l\\t%0,%1
+ st\\t%1,%0
+ lr\\t%0,%1
+ mvc\\t%O0(4,%R0),%1"
+ [(set_attr "op_type" "RR,RX,RX,RX,RX,RR,SS")
+ (set_attr "atype" "reg,mem,mem,mem,mem,reg,mem")])
+
+(define_insn "*movsf_soft"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=d,d,m,Q")
+ (match_operand:SF 1 "general_operand" "d,m,d,Q"))]
+ "TARGET_SOFT_FLOAT"
+ "@
+ lr\\t%0,%1
+ l\\t%0,%1
+ st\\t%1,%0
+ mvc\\t%O0(4,%R0),%1"
+ [(set_attr "op_type" "RR,RX,RX,SS")
+ (set_attr "atype" "reg,mem,mem,mem")])
+;
+; load_multiple pattern(s).
+;
+
+(define_expand "load_multiple"
+ [(match_par_dup 3 [(set (match_operand 0 "" "")
+ (match_operand 1 "" ""))
+ (use (match_operand 2 "" ""))])]
+ ""
+ "
+{
+ int regno;
+ int count;
+ rtx from;
+ int i;
+
+ /* Support only loading a constant number of fixed-point registers from
+ memory and only bother with this if more than two */
+ if (GET_CODE (operands[2]) != CONST_INT
+ || INTVAL (operands[2]) <= 2
+ || INTVAL (operands[2]) > 16
+ || GET_CODE (operands[1]) != MEM
+ || GET_CODE (operands[0]) != REG
+ || REGNO (operands[0]) >= 16)
+ FAIL;
+
+ count = INTVAL (operands[2]);
+ regno = REGNO (operands[0]);
+
+ operands[3] = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
+ from = force_reg (Pmode, XEXP (operands[1], 0));
+
+ for (i = 0; i < count; i++)
+ XVECEXP (operands[3], 0, i)
+ = gen_rtx_SET (VOIDmode, gen_rtx_REG (Pmode, regno + i),
+ change_address (operands[1], Pmode,
+ plus_constant (from, i * 4)));
+}")
+
+(define_insn "*load_multiple_di"
+ [(match_parallel 0 "load_multiple_operation"
+ [(set (match_operand:DI 1 "register_operand" "=r")
+ (match_operand:DI 2 "s_operand" "oQ"))])]
+ ""
+ "*
+{
+ int words = XVECLEN (operands[0], 0);
+
+ if (XVECLEN (operands[0], 0) == 1)
+ return \"lg\\t%1,0(%2)\";
+
+ operands[0] = gen_rtx_REG (DImode, REGNO (operands[1]) + words - 1);
+ return \"lmg\\t%1,%0,%2\";
+}"
+ [(set_attr "op_type" "RXE")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*load_multiple_si"
+ [(match_parallel 0 "load_multiple_operation"
+ [(set (match_operand:SI 1 "register_operand" "=r")
+ (match_operand:SI 2 "s_operand" "oQ"))])]
+ ""
+ "*
+{
+ int words = XVECLEN (operands[0], 0);
+
+ if (XVECLEN (operands[0], 0) == 1)
+ return \"l\\t%1,0(%2)\";
+
+ operands[0] = gen_rtx_REG (SImode, REGNO (operands[1]) + words - 1);
+ return \"lm\\t%1,%0,%2\";
+}"
+ [(set_attr "op_type" "RXE")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+;
+; store multiple pattern(s).
+;
+
+(define_expand "store_multiple"
+ [(match_par_dup 3 [(set (match_operand 0 "" "")
+ (match_operand 1 "" ""))
+ (use (match_operand 2 "" ""))])]
+ ""
+ "
+{
+ int regno;
+ int count;
+ rtx to;
+ int i;
+
+ /* Support only storing a constant number of fixed-point registers to
+ memory and only bother with this if more than two. */
+ if (GET_CODE (operands[2]) != CONST_INT
+ || INTVAL (operands[2]) <= 2
+ || INTVAL (operands[2]) > 16
+ || GET_CODE (operands[0]) != MEM
+ || GET_CODE (operands[1]) != REG
+ || REGNO (operands[1]) >= 16)
+ FAIL;
+
+ count = INTVAL (operands[2]);
+ regno = REGNO (operands[1]);
+
+ operands[3] = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
+ to = force_reg (Pmode, XEXP (operands[0], 0));
+
+ for (i = 0; i < count; i++)
+ XVECEXP (operands[3], 0, i)
+ = gen_rtx_SET (VOIDmode,
+ change_address (operands[0], Pmode,
+ plus_constant (to, i * 4)),
+ gen_rtx_REG (Pmode, regno + i));
+}")
+
+(define_insn "*store_multiple_di"
+ [(match_parallel 0 "store_multiple_operation"
+ [(set (match_operand:DI 1 "s_operand" "=oQ")
+ (match_operand:DI 2 "register_operand" "r"))])]
+ ""
+ "*
+{
+ int words = XVECLEN (operands[0], 0);
+
+ if (XVECLEN (operands[0], 0) == 1)
+ return \"stg\\t%1,0(%2)\";
+
+ operands[0] = gen_rtx_REG (DImode, REGNO (operands[2]) + words - 1);
+ return \"stmg\\t%2,%0,%1\";
+}"
+ [(set_attr "op_type" "RXE")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+
+(define_insn "*store_multiple_si"
+ [(match_parallel 0 "store_multiple_operation"
+ [(set (match_operand:SI 1 "s_operand" "=oQ")
+ (match_operand:SI 2 "register_operand" "r"))])]
+ ""
+ "*
+{
+ int words = XVECLEN (operands[0], 0);
+
+ if (XVECLEN (operands[0], 0) == 1)
+ return \"st\\t%1,0(%2)\";
+
+ operands[0] = gen_rtx_REG (SImode, REGNO (operands[2]) + words - 1);
+ return \"stm\\t%2,%0,%1\";
+}"
+ [(set_attr "op_type" "RXE")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+;;
+;; String instructions.
+;;
+
+;
+; movstrdi instruction pattern(s).
+;
+
+(define_expand "movstrdi"
+ [(set (match_operand:BLK 0 "general_operand" "")
+ (match_operand:BLK 1 "general_operand" ""))
+ (use (match_operand:DI 2 "general_operand" ""))
+ (match_operand 3 "" "")]
+ "TARGET_64BIT"
+ "
+{
+ rtx addr0, addr1;
+
+ addr0 = force_operand (XEXP (operands[0], 0), NULL_RTX);
+ addr1 = force_operand (XEXP (operands[1], 0), NULL_RTX);
+
+ if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) <= 256)
+ {
+ operands[0] = change_address (operands[0], VOIDmode, addr0);
+ operands[1] = change_address (operands[1], VOIDmode, addr1);
+ operands[2] = GEN_INT (INTVAL (operands[2]) - 1);
+
+ emit_insn (gen_movstrsico (operands[0], operands[1], operands[2]));
+ DONE;
+ }
+ else
+ {
+ if (TARGET_MVCLE)
+ {
+ /* implementation suggested by Richard Henderson <rth@cygnus.com> */
+ rtx reg0 = gen_reg_rtx (TImode);
+ rtx reg1 = gen_reg_rtx (TImode);
+ rtx len = operands[2];
+
+ if (! CONSTANT_P (len))
+ len = force_reg (DImode, len);
+
+ /* Load up the address+length pairs. */
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg0, 0), addr0);
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg0, 8), len);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg1, 0), addr1);
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg1, 8), len);
+
+ /* MOVE */
+ emit_insn (gen_movstrdi_64 (reg0, reg1));
+ DONE;
+ }
+ else
+ {
+ rtx label = gen_label_rtx ();
+ rtx reg0, reg1, len;
+
+ reg0 = gen_reg_rtx (DImode);
+ reg1 = gen_reg_rtx (DImode);
+ len = gen_reg_rtx (DImode);
+
+ emit_move_insn (len, operands[2]);
+ emit_insn (gen_cmpdi (len, const0_rtx));
+ emit_jump_insn (gen_beq (label));
+ emit_move_insn (reg0, addr0);
+ emit_move_insn (reg1, addr1);
+ emit_insn (gen_adddi3 (len, len, constm1_rtx));
+ emit_insn (gen_movstrdix_64 (reg0, reg1, len));
+ emit_label (label);
+ DONE;
+ }
+ }
+}")
+
+;
+; movstrsi instruction pattern(s).
+;
+
+(define_expand "movstrsi"
+ [(set (match_operand:BLK 0 "general_operand" "")
+ (match_operand:BLK 1 "general_operand" ""))
+ (use (match_operand:SI 2 "general_operand" ""))
+ (match_operand 3 "" "")]
+ ""
+ "
+{
+ rtx addr0 = force_operand (XEXP (operands[0], 0), NULL_RTX);
+ rtx addr1 = force_operand (XEXP (operands[1], 0), NULL_RTX);
+
+ if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) <= 256)
+ {
+ operands[0] = change_address (operands[0], VOIDmode, addr0);
+ operands[1] = change_address (operands[1], VOIDmode, addr1);
+ operands[2] = GEN_INT (INTVAL (operands[2]) - 1);
+
+ emit_insn (gen_movstrsico (operands[0], operands[1], operands[2]));
+ DONE;
+ }
+ else
+ {
+ if (TARGET_64BIT)
+ FAIL;
+
+ if (TARGET_MVCLE)
+ {
+ /* implementation suggested by Richard Henderson <rth@cygnus.com> */
+ rtx reg0 = gen_reg_rtx (DImode);
+ rtx reg1 = gen_reg_rtx (DImode);
+ rtx len = operands[2];
+
+
+ if (! CONSTANT_P (len))
+ len = force_reg (SImode, len);
+
+ /* Load up the address+length pairs. */
+
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg0, 0), addr0);
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg0, 4), len);
+
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg1, 0), addr1);
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg1, 4), len);
+
+ /* MOVE */
+ emit_insn (gen_movstrsi_31 (reg0, reg1));
+ DONE;
+ }
+ else
+ {
+ rtx label = gen_label_rtx ();
+ rtx reg0, reg1, len;
+
+ reg0 = gen_reg_rtx (SImode);
+ reg1 = gen_reg_rtx (SImode);
+ len = gen_reg_rtx (SImode);
+
+ emit_move_insn (len, operands[2]);
+ emit_insn (gen_cmpsi (len, const0_rtx));
+ emit_jump_insn (gen_beq (label));
+ emit_move_insn (reg0, addr0);
+ emit_move_insn (reg1, addr1);
+ emit_insn (gen_addsi3 (len, len, constm1_rtx));
+ emit_insn (gen_movstrsix_31 (reg0, reg1, len));
+ emit_label (label);
+ DONE;
+ }
+ }
+}")
+
+; Move a block that is less than 256 bytes in length.
+
+(define_insn "movstrsico"
+ [(set (match_operand:BLK 0 "s_operand" "=oQ")
+ (match_operand:BLK 1 "s_operand" "oQ"))
+ (use (match_operand 2 "const_int_operand" "I"))]
+ "((unsigned) INTVAL (operands[2]) < 256)"
+ "mvc\\t%O0(%c2+1,%R0),%1"
+ [(set_attr "op_type" "SS")
+ (set_attr "atype" "mem")])
+
+; Move a block that is more than 256 bytes in lenght or length in register
+
+(define_insn "movstrdix_64"
+ [(set (mem:BLK (match_operand:DI 0 "register_operand" "a"))
+ (mem:BLK (match_operand:DI 1 "register_operand" "a")))
+ (use (match_operand:DI 2 "register_operand" "a"))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))
+ (clobber (match_scratch:DI 3 "=&a"))
+ (clobber (reg:CC 33))]
+ ""
+ "*
+{
+ rtx xop[4];
+ xop[0] = gen_label_rtx ();
+ xop[1] = gen_label_rtx ();
+ xop[2] = gen_label_rtx ();
+ xop[3] = operands[3];
+ output_asm_insn (\"srag\\t%3,%2,8\",operands);
+ output_asm_insn (\"jz\\t%l1\",xop);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[0]));
+ output_asm_insn (\"mvc\\t0(256,%0),0(%1)\",operands);
+ output_asm_insn (\"la\\t%0,256(%0)\",operands);
+ output_asm_insn (\"la\\t%1,256(%1)\",operands);
+ xop[3] = operands[3];
+ output_asm_insn (\"brct\\t%3,%l0\",xop);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[1]));
+ xop[3] = operands[3];
+ output_asm_insn (\"bras\\t%3,%l2\",xop);
+ output_asm_insn (\"mvc\\t0(1,%0),0(%1)\",operands);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[2]));
+ return \"ex\\t%2,0(%3)\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "atype" "mem")
+ (set_attr "length" "44")])
+
+(define_insn "movstrsix_31"
+ [(set (mem:BLK (match_operand:SI 0 "register_operand" "a"))
+ (mem:BLK (match_operand:SI 1 "register_operand" "a")))
+ (use (match_operand:SI 2 "register_operand" "a"))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))
+ (clobber (match_scratch:SI 3 "=&a"))
+ (clobber (reg:CC 33))]
+ ""
+ "*
+{
+ rtx xop[4];
+ xop[0] = gen_label_rtx ();
+ xop[1] = gen_label_rtx ();
+ xop[2] = gen_label_rtx ();
+ xop[3] = operands[3];
+ output_asm_insn (\"lr\\t%3,%2\",operands);
+ output_asm_insn (\"sra\\t%3,8\",operands);
+ output_asm_insn (\"jz\\t%l1\",xop);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[0]));
+ output_asm_insn (\"mvc\\t0(256,%0),0(%1)\",operands);
+ output_asm_insn (\"la\\t%0,256(%0)\",operands);
+ output_asm_insn (\"la\\t%1,256(%1)\",operands);
+ xop[3] = operands[3];
+ output_asm_insn (\"brct\\t%3,%l0\",xop);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[1]));
+ xop[3] = operands[3];
+ output_asm_insn (\"bras\\t%3,%l2\",xop);
+ output_asm_insn (\"mvc\\t0(1,%0),0(%1)\",operands);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[2]));
+ return \"ex\\t%2,0(%3)\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "42")
+ (set_attr "atype" "mem")])
+
+; Move a block that is larger than 255 bytes in length.
+
+(define_insn "movstrdi_64"
+ [(set (mem:BLK (subreg:DI (match_operand:TI 0 "register_operand" "d") 0))
+ (mem:BLK (subreg:DI (match_operand:TI 1 "register_operand" "d") 0)))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))
+ (clobber (reg:CC 33))]
+ ""
+ "mvcle\\t%0,%1,0\;jo\\t.-4"
+ [(set_attr "op_type" "NN")
+ (set_attr "atype" "mem")
+ (set_attr "length" "8")])
+
+(define_insn "movstrsi_31"
+ [(set (mem:BLK (subreg:SI (match_operand:DI 0 "register_operand" "d") 0))
+ (mem:BLK (subreg:SI (match_operand:DI 1 "register_operand" "d") 0)))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))
+ (clobber (reg:CC 33))]
+ ""
+ "mvcle\\t%0,%1,0\;jo\\t.-4"
+ [(set_attr "op_type" "NN")
+ (set_attr "atype" "mem")
+ (set_attr "length" "8")])
+
+;
+; clrstrdi instruction pattern(s).
+;
+
+(define_expand "clrstrdi"
+ [(set (match_operand:BLK 0 "general_operand" "")
+ (const_int 0))
+ (use (match_operand:DI 1 "general_operand" ""))
+ (match_operand 2 "" "")]
+ "TARGET_64BIT"
+ "
+{
+ rtx addr = force_operand (XEXP (operands[0], 0), NULL_RTX);
+
+ operands[0] = change_address (operands[0], VOIDmode, addr);
+
+ if (GET_CODE (operands[1]) == CONST_INT && INTVAL (operands[1]) < 256)
+ {
+ emit_insn (gen_clrstrsico (operands[0], operands[1]));
+ DONE;
+ }
+ else
+ {
+ rtx reg0 = gen_reg_rtx (TImode);
+ rtx reg1 = gen_reg_rtx (TImode);
+ rtx len = operands[1];
+
+ if (! CONSTANT_P (len))
+ len = force_reg (DImode, len);
+
+ /* Load up the address+length pairs. */
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg0, 0), addr);
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg0, 8), len);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg1, 8), const0_rtx);
+
+ /* Clear! */
+ emit_insn (gen_clrstrsi_64 (reg0, reg1));
+ DONE;
+ }
+}")
+
+;
+; clrstrsi instruction pattern(s).
+;
+
+(define_expand "clrstrsi"
+ [(set (match_operand:BLK 0 "general_operand" "")
+ (const_int 0))
+ (use (match_operand:SI 1 "general_operand" ""))
+ (match_operand 2 "" "")]
+ "!TARGET_64BIT"
+ "
+{
+ rtx addr = force_operand (XEXP (operands[0], 0), NULL_RTX);
+
+ operands[0] = change_address (operands[0], VOIDmode, addr);
+
+ if (GET_CODE (operands[1]) == CONST_INT && INTVAL (operands[1]) < 256)
+ {
+ emit_insn (gen_clrstrsico (operands[0], operands[1]));
+ DONE;
+ }
+ else
+ {
+ rtx reg0 = gen_reg_rtx (DImode);
+ rtx reg1 = gen_reg_rtx (DImode);
+ rtx len = operands[1];
+
+ if (! CONSTANT_P (len))
+ len = force_reg (SImode, len);
+
+ /* Load up the address+length pairs. */
+
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg0, 0), addr);
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg0, 4), len);
+
+ emit_move_insn (gen_rtx_SUBREG (SImode, reg1, 4), const0_rtx);
+
+ /* CLear! */
+ emit_insn (gen_clrstrsi_31 (reg0, reg1));
+ DONE;
+ }
+}")
+
+; Clear memory with length less than 256 bytes
+
+(define_insn "clrstrsico"
+ [(set (match_operand:BLK 0 "s_operand" "=Qo")
+ (const_int 0))
+ (use (match_operand 1 "immediate_operand" "I"))
+ (clobber (reg:CC 33))]
+ ""
+ "xc\\t%O0(%1,%R0),%0"
+ [(set_attr "op_type" "RS")
+ (set_attr "atype" "mem")])
+
+; Clear memory with length greater 256 bytes or lenght not constant
+
+(define_insn "clrstrsi_64"
+ [(set (mem:BLK (subreg:DI (match_operand:TI 0 "register_operand" "d") 0))
+ (const_int 0))
+ (use (match_operand:TI 1 "register_operand" "d"))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "mvcle\\t%0,%1,0\;jo\\t.-4"
+ [(set_attr "op_type" "NN")
+ (set_attr "atype" "mem")
+ (set_attr "cycle" "n")
+ (set_attr "length" "8")])
+
+(define_insn "clrstrsi_31"
+ [(set (mem:BLK (subreg:SI (match_operand:DI 0 "register_operand" "d") 0))
+ (const_int 0))
+ (use (match_operand:DI 1 "register_operand" "d"))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "mvcle\\t%0,%1,0\;jo\\t.-4"
+ [(set_attr "op_type" "NN")
+ (set_attr "atype" "mem")
+ (set_attr "cycle" "n")
+ (set_attr "length" "8")])
+
+;
+; cmpstrdi instruction pattern(s).
+;
+
+(define_expand "cmpstrdi"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (compare:DI (match_operand:BLK 1 "s_operand" "")
+ (match_operand:BLK 2 "s_operand" "") ) )
+ (use (match_operand:DI 3 "general_operand" ""))
+ (use (match_operand:DI 4 "" ""))]
+ "TARGET_64BIT"
+ "
+{
+ rtx addr0, addr1;
+
+ /* for pre/post increment */
+ operands[1] = protect_from_queue (operands[1], 0);
+ operands[2] = protect_from_queue (operands[2], 0);
+ operands[3] = protect_from_queue (operands[3], 0);
+
+ addr0 = force_operand (XEXP (operands[1], 0), NULL_RTX);
+ addr1 = force_operand (XEXP (operands[2], 0), NULL_RTX);
+
+ if (GET_CODE (operands[3]) == CONST_INT && INTVAL (operands[3]) < 256)
+ {
+ if (INTVAL (operands[3]) == 0) {
+ emit_move_insn (operands[0], operands[3]);
+ DONE;
+ }
+
+ operands[1] = change_address (operands[1], VOIDmode, addr0);
+ operands[2] = change_address (operands[2], VOIDmode, addr1);
+
+ emit_insn (gen_cmpstr_const (operands[1], operands[2], operands[3]));
+ emit_insn (gen_cmpint_di (operands[0]));
+ DONE;
+ }
+ else
+ {
+ /* implementation suggested by Richard Henderson <rth@cygnus.com> */
+ rtx reg0 = gen_reg_rtx (TImode);
+ rtx reg1 = gen_reg_rtx (TImode);
+ rtx len = operands[3];
+
+ if (! CONSTANT_P (len))
+ len = force_reg (DImode, len);
+
+ /* Load up the address+length pairs. */
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg0, 0), addr0);
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg0, 8), len);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg1, 0), addr1);
+ emit_move_insn (gen_rtx_SUBREG (DImode, reg1, 8), len);
+
+ /* Compare! */
+ emit_insn (gen_cmpstr_64 (reg0, reg1));
+ emit_insn (gen_cmpint_di (operands[0]));
+ DONE;
+ }
+}")
+
+;
+; cmpstrsi instruction pattern(s).
+;
+
+(define_expand "cmpstrsi"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (compare:SI (match_operand:BLK 1 "s_operand" "")
+ (match_operand:BLK 2 "s_operand" "") ) )
+ (use (match_operand:SI 3 "general_operand" ""))
+ (use (match_operand:SI 4 "" ""))]
+ ""
+ "
+{
+ rtx addr0, addr1;
+
+ /* for pre/post increment */
+ operands[1] = protect_from_queue (operands[1], 0);
+ operands[2] = protect_from_queue (operands[2], 0);
+ operands[3] = protect_from_queue (operands[3], 0);
+
+ addr0 = force_operand (XEXP (operands[1], 0), NULL_RTX);
+ addr1 = force_operand (XEXP (operands[2], 0), NULL_RTX);
+
+ if (GET_CODE (operands[3]) == CONST_INT && INTVAL (operands[3]) < 256)
+ {
+ if (INTVAL (operands[3]) == 0) {
+ emit_move_insn (operands[0], operands[3]);
+ DONE;
+ }
+
+ operands[1] = change_address (operands[1], VOIDmode, addr0);
+ operands[2] = change_address (operands[2], VOIDmode, addr1);
+
+ emit_insn (gen_cmpstr_const (operands[1], operands[2], operands[3]));
+ emit_insn (gen_cmpint_si (operands[0]));
+ DONE;
+ }
+ else
+ {
+ /* implementation suggested by Richard Henderson <rth@cygnus.com> */
+ rtx reg0, reg1;
+ rtx len = operands[3];
+
+ if (TARGET_64BIT)
+ {
+ reg0 = gen_reg_rtx (TImode);
+ reg1 = gen_reg_rtx (TImode);
+ }
+ else
+ {
+ reg0 = gen_reg_rtx (DImode);
+ reg1 = gen_reg_rtx (DImode);
+ }
+
+ if (! CONSTANT_P (len))
+ len = force_reg (Pmode, len);
+
+ /* Load up the address+length pairs. */
+ emit_move_insn (gen_rtx_SUBREG (Pmode, reg0, 0), addr0);
+ emit_move_insn (gen_rtx_SUBREG (Pmode, reg0,
+ GET_MODE_SIZE (Pmode)), len);
+
+ emit_move_insn (gen_rtx_SUBREG (Pmode, reg1, 0), addr1);
+ emit_move_insn (gen_rtx_SUBREG (Pmode, reg1,
+ GET_MODE_SIZE (Pmode)), len);
+
+ /* Compare! */
+ if (TARGET_64BIT)
+ emit_insn (gen_cmpstr_64 (reg0, reg1));
+ else
+ emit_insn (gen_cmpstr_31 (reg0, reg1));
+
+ emit_insn (gen_cmpint_si (operands[0]));
+ DONE;
+ }
+}")
+
+; Compare a block that is less than 256 bytes in length.
+
+(define_insn "cmpstr_const"
+ [(set (reg:CCU 33)
+ (compare:CCU (match_operand:BLK 0 "s_operand" "oQ")
+ (match_operand:BLK 1 "s_operand" "oQ")))
+ (use (match_operand 2 "immediate_operand" "I"))]
+ "(unsigned) INTVAL (operands[2]) < 256"
+ "clc\\t%O0(%c2,%R0),%1"
+ [(set_attr "op_type" "SS")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+; Compare a block that is larger than 255 bytes in length.
+
+(define_insn "cmpstr_64"
+ [(set (reg:CCU 33)
+ (compare:CCU (mem:BLK (subreg:DI (match_operand:TI 0 "register_operand" "d") 0))
+ (mem:BLK (subreg:DI (match_operand:TI 1 "register_operand" "d") 0))))
+ (clobber (subreg:DI (match_dup 0) 0))
+ (clobber (subreg:DI (match_dup 0) 8))
+ (clobber (subreg:DI (match_dup 1) 0))
+ (clobber (subreg:DI (match_dup 1) 8))]
+ "TARGET_64BIT"
+ "clcl\\t%0,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+(define_insn "cmpstr_31"
+ [(set (reg:CCU 33)
+ (compare:CCU (mem:BLK (subreg:SI (match_operand:DI 0 "register_operand" "d") 0))
+ (mem:BLK (subreg:SI (match_operand:DI 1 "register_operand" "d") 0))))
+ (clobber (subreg:SI (match_dup 0) 0))
+ (clobber (subreg:SI (match_dup 0) 4))
+ (clobber (subreg:SI (match_dup 1) 0))
+ (clobber (subreg:SI (match_dup 1) 4))]
+ "!TARGET_64BIT"
+ "clcl\\t%0,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "atype" "mem")
+ (set_attr "type" "xset")])
+
+; Convert condition code to integer in range (-1, 0, 1)
+
+(define_insn "cmpint_si"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (compare:SI (reg:CCU 33) (const_int 0)))]
+ ""
+ "*
+{
+ output_asm_insn (\"lhi\\t%0,1\", operands);
+ output_asm_insn (\"jh\\t.+12\", operands);
+ output_asm_insn (\"jl\\t.+6\", operands);
+ output_asm_insn (\"sr\\t%0,%0\", operands);
+ return \"lcr\\t%0,%0\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "16")
+ (set_attr "atype" "reg")
+ (set_attr "type" "xset")])
+
+(define_insn "cmpint_di"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (compare:DI (reg:CCU 33) (const_int 0)))]
+ "TARGET_64BIT"
+ "*
+{
+ output_asm_insn (\"lghi\\t%0,1\", operands);
+ output_asm_insn (\"jh\\t.+12\", operands);
+ output_asm_insn (\"jl\\t.+6\", operands);
+ output_asm_insn (\"sgr\\t%0,%0\", operands);
+ return \"lcgr\\t%0,%0\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "22")
+ (set_attr "atype" "reg")
+ (set_attr "type" "xset")])
+
+;;
+;;- Conversion instructions.
+;;
+
+;
+; extendsidi2 instruction pattern(s).
+;
+
+(define_insn "extendsidi2"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (sign_extend:DI (match_operand:SI 1 "nonimmediate_operand" "d,m")))]
+ "TARGET_64BIT"
+ "@
+ lgfr\\t%0,%1
+ lgf\\t%0,%1"
+ [(set_attr "op_type" "RRE,RXE")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+
+;
+; extendhidi2 instruction pattern(s).
+;
+
+(define_insn "extendhidi2"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (sign_extend:DI (match_operand:HI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "sllg\\t%0,%1,48\;srag\\t%0,%0,48"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "12")
+ (set_attr "cycle" "2")
+ (set_attr "type" "set")])
+
+
+;
+; extendqidi2 instruction pattern(s).
+;
+
+(define_insn "extendqidi2"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (sign_extend:DI (match_operand:QI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "sllg\\t%0,%1,56\;srag\\t%0,%0,56"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "12")
+ (set_attr "cycle" "2")
+ (set_attr "type" "set")])
+
+
+;
+; extendhisi2 instruction pattern(s).
+;
+
+(define_insn "extendhisi2"
+ [(set (match_operand:SI 0 "register_operand" "=d,!d,d")
+ (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "0,d,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%1,16\;sra\\t%1,16
+ lr\\t%0,%1\;sll\\t%0,16\;sra\\t%0,16
+ lh\\t%0,%1"
+ [(set_attr "op_type" "NN,NN,RX")
+ (set_attr "cycle" "2,3,1")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "8,10,*")])
+
+
+;
+; extendqisi2 instruction pattern(s).
+;
+
+(define_insn "extendqisi2"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (sign_extend:SI (match_operand:QI 1 "r_or_s_operand" "0,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%0,24\;sra\\t%0,24
+ icm\\t%0,8,%1\;sra\\t%0,24"
+ [(set_attr "op_type" "NN,NN")
+ (set_attr "cycle" "2")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "8,8")])
+
+
+;
+; extendqihi2 instruction pattern(s).
+;
+
+(define_insn "extendqihi2"
+ [(set (match_operand:HI 0 "register_operand" "=d,d")
+ (sign_extend:HI (match_operand:QI 1 "r_or_s_operand" "0,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%0,24\;sra\\t%0,24
+ icm\\t%0,8,%1\;sra\\t%0,24"
+ [(set_attr "op_type" "NN,NN")
+ (set_attr "cycle" "2")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "8,8")])
+
+
+;
+; zero_extendsidi2 instruction pattern(s).
+;
+
+(define_insn "zero_extendsidi2"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "d,m")))]
+ "TARGET_64BIT"
+ "@
+ llgfr\\t%0,%1
+ llgf\\t%0,%1"
+ [(set_attr "op_type" "RRE,RXE")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+
+;
+; zero_extendhidi2 instruction pattern(s).
+;
+
+(define_insn "zero_extendhidi2"
+ [(set (match_operand:DI 0 "register_operand" "=!d,d")
+ (zero_extend:DI (match_operand:HI 1 "nonimmediate_operand" "d,m")))]
+ "TARGET_64BIT"
+ "@
+ llgfr\\t%0,%1\;iilh\\t%0,0
+ llgh\\t%0,%1"
+ [(set_attr "op_type" "NN,RXE")
+ (set_attr "cycle" "2,1")
+ (set_attr "atype" "reg,mem")
+ (set_attr "length" "12,*")
+ (set_attr "type" "set")])
+
+
+;
+; zero_extendqidi2 instruction pattern(s)
+;
+
+(define_insn "zero_extendqidi2"
+ [(set (match_operand:DI 0 "register_operand" "=!d,d")
+ (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "d,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ sllg\\t%0,%1,56\;srlg\\t%0,%0,56
+ llgc\\t%0,%1"
+ [(set_attr "op_type" "NN,RXE")
+ (set_attr "cycle" "2,1")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "12,*")])
+
+
+;
+; zero_extendhisi2 instruction pattern(s).
+;
+
+(define_expand "zero_extendhisi2"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (zero_extend:SI (match_operand:HI 1 "r_or_s_operand" "")))]
+ ""
+ "
+{
+ if (!TARGET_64BIT)
+ {
+ emit_insn (gen_zero_extendhisi2_31 (operands[0], operands[1],
+ force_const_mem (SImode, const0_rtx)));
+ DONE;
+ }
+}")
+
+(define_insn "*zero_extendhisi2_64"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "0,m")))]
+ "TARGET_64BIT"
+ "@
+ iilh\\t%0,0
+ llgh\\t%0,%1"
+ [(set_attr "op_type" "RI,RXE")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "zero_extendhisi2_31"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (zero_extend:SI (match_operand:HI 1 "r_or_s_operand" "0,Q")))
+ (use (match_operand:SI 2 "memory_operand" "m,m"))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ icm\\t%0,12,%2
+ icm\\t%0,12,%1\;srl\\t%0,16"
+ [(set_attr "op_type" "RX,NN")
+ (set_attr "cycle" "1,2")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")
+ (set_attr "length" "*,8")])
+
+
+;
+; zero_extendqisi2 instruction pattern(s).
+;
+
+(define_insn "*zero_extendqisi2_mem_31"
+ [(set (match_operand:SI 0 "register_operand" "=&d")
+ (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))
+ (use (match_operand:SI 2 "memory_operand" "m" ))
+ (clobber (reg:CC 33))]
+ ""
+ "sr\\t%0,%0\;ic\\t%0,%1"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "2")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")
+ (set_attr "length" "6")])
+
+(define_insn "zero_extendqisi2_reg_31"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (zero_extend:SI (match_operand:QI 1 "register_operand" "0")))
+ (use (match_operand:SI 2 "memory_operand" "m" ))
+ (clobber (reg:CC 33))]
+ ""
+ "icm\\t%0,14,%2"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "set")])
+
+(define_insn "*zero_extendqisi2_64"
+ [(set (match_operand:SI 0 "register_operand" "=!d,d")
+ (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "d,m")))]
+ "TARGET_64BIT"
+ "@
+ sllg\\t%0,%1,56\;srlg\\t%0,%0,56
+ llgc\\t%0,%1"
+ [(set_attr "op_type" "NN,RXE")
+ (set_attr "cycle" "2,1")
+ (set_attr "atype" "reg,mem")
+ (set_attr "length" "12,*")])
+
+(define_expand "zero_extendqisi2"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (zero_extend:SI (match_operand:QI 1 "r_or_s_operand" "")))]
+ ""
+ "
+{
+ if (!TARGET_64BIT)
+ {
+ emit_insn (gen_zero_extendqisi2_reg_31 (operands[0], operands[1],
+ force_const_mem (SImode, const0_rtx)));
+ DONE;
+ }
+}")
+
+
+;
+; zero_extendqihi2 instruction pattern(s).
+;
+
+(define_insn "zero_extendqihi2_64"
+ [(set (match_operand:HI 0 "register_operand" "=d,d")
+ (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ nill\\t%0,0x00FF
+ llgc\\t%0,%1"
+ [(set_attr "op_type" "RI,RXE")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "zero_extendqihi2_31"
+ [(set (match_operand:HI 0 "register_operand" "=d,&d")
+ (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0,m")))
+ (use (match_operand:SI 2 "memory_operand" "m,m"))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ icm\\t%0,14,%2
+ sr\\t%0,%0\;ic\\t%0,%1"
+ [(set_attr "op_type" "RX,NN")
+ (set_attr "atype" "reg,mem")
+ (set_attr "length" "*,8")])
+
+(define_expand "zero_extendqihi2"
+ [(set (match_operand:HI 0 "register_operand" "")
+ (zero_extend:HI (match_operand:QI 1 "general_operand" "")))]
+ ""
+ "
+{
+ if (!TARGET_64BIT)
+ {
+ emit_insn (gen_zero_extendqihi2_31 (operands[0], operands[1],
+ force_const_mem (SImode, const0_rtx)));
+ DONE;
+ }
+ else
+ {
+ emit_insn (gen_zero_extendqihi2_64 (operands[0], operands[1]));
+ DONE;
+ }
+}")
+
+
+;
+; truncdisi2 instruction pattern(s).
+;
+
+(define_insn "truncdisi2"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (truncate:SI (match_operand:DI 1 "register_operand" "d")))]
+ "TARGET_64BIT"
+ "llgfr\\t%0,%1"
+ [(set_attr "op_type" "RRE")])
+
+
+;
+; truncdihi2 instruction pattern(s).
+;
+
+(define_insn "truncdihi2"
+ [(set (match_operand:HI 0 "register_operand" "=d")
+ (truncate:HI (match_operand:DI 1 "register_operand" "d")))]
+ "TARGET_64BIT"
+ "llgfr\\t%0,%1\;iilh\\t%0,0"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "10")])
+
+
+;
+; truncdiqi2 instruction pattern(s).
+;
+
+(define_insn "truncdiqi2"
+ [(set (match_operand:QI 0 "register_operand" "=d")
+ (truncate:QI (match_operand:DI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "sllg\\t%0,%1,56\;srlg\\t%0,%0,56"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "2")
+ (set_attr "length" "12")])
+
+
+;
+; truncsihi2 instruction pattern(s).
+;
+
+(define_expand "truncsihi2"
+ [(set (match_operand:HI 0 "register_operand" "")
+ (truncate:HI (match_operand:SI 1 "register_operand" "")))]
+ ""
+ "
+{
+ if (!TARGET_64BIT)
+ {
+ emit_insn (gen_do_truncsihi2 (operands[0], operands[1],
+ force_const_mem (SImode, const0_rtx)));
+ DONE;
+ }
+}")
+
+
+(define_insn "do_truncsihi2"
+ [(set (match_operand:HI 0 "register_operand" "=d")
+ (truncate:HI (match_operand:SI 1 "register_operand" "0")))
+ (use (match_operand:SI 2 "memory_operand" "m"))
+ (clobber (reg:CC 33))]
+ ""
+ "icm\\t%0,12,%2"
+ [(set_attr "op_type" "RX")])
+
+(define_insn "*truncsihi2_64"
+ [(set (match_operand:HI 0 "register_operand" "=d")
+ (truncate:HI (match_operand:SI 1 "register_operand" "0")))]
+ "TARGET_64BIT"
+ "iilh\\t%0,0"
+ [(set_attr "op_type" "RI")])
+
+
+;
+; truncsiqi2 instruction pattern(s).
+;
+
+(define_insn "truncsiqi2"
+ [(set (match_operand:QI 0 "register_operand" "=d")
+ (truncate:QI (match_operand:SI 1 "register_operand" "0")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "iilh\\t%0,0\;nill\\t%0,0x00FF"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "2")
+ (set_attr "length" "8")])
+
+
+;
+; trunchiqi2 instruction pattern(s).
+;
+
+(define_insn "trunchiqi2"
+ [(set (match_operand:QI 0 "register_operand" "=d")
+ (truncate:QI (match_operand:HI 1 "register_operand" "0")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "nill\\t%0,0x00FF"
+ [(set_attr "op_type" "RI")])
+
+
+;
+; fixuns_truncdfdi2 and fix_truncdfsi2 instruction pattern(s).
+;
+
+(define_expand "fixuns_truncdfdi2"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (unsigned_fix:DI (match_operand:DF 1 "register_operand" "")))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "
+{
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx temp = gen_reg_rtx (DFmode);
+ operands[1] = force_reg (DFmode, operands[1]);
+
+ emit_insn (gen_cmpdf (operands[1], force_const_mem (DFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x8000000000000000ULL, DFmode))));
+ emit_jump_insn (gen_blt (label1));
+
+ emit_insn (gen_subdf3 (temp, operands[1], force_const_mem (DFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x10000000000000000ULL, DFmode))));
+ emit_insn (gen_fix_truncdfdi2_ieee (operands[0], temp, GEN_INT(7)));
+ emit_jump_insn (gen_jump (label2));
+
+ emit_label (label1);
+ emit_insn (gen_fix_truncdfdi2_ieee (operands[0], operands[1], GEN_INT(5)));
+ emit_label (label2);
+ DONE;
+}")
+
+(define_expand "fix_truncdfdi2"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (fix:DI (match_operand:DF 1 "nonimmediate_operand" "")))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "
+{
+ operands[1] = force_reg (DFmode, operands[1]);
+ emit_insn (gen_fix_truncdfdi2_ieee (operands[0], operands[1], GEN_INT(5)));
+ DONE;
+}")
+
+(define_insn "fix_truncdfdi2_ieee"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (fix:DI (match_operand:DF 1 "register_operand" "f")))
+ (unspec:DI [(match_operand:DI 2 "immediate_operand" "K")] 1)
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cgdbr\\t%0,%h2,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n")])
+
+;
+; fixuns_truncdfsi2 and fix_truncdfsi2 instruction pattern(s).
+;
+
+(define_expand "fixuns_truncdfsi2"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unsigned_fix:SI (match_operand:DF 1 "register_operand" "")))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "
+{
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx temp = gen_reg_rtx (DFmode);
+
+ operands[1] = force_reg (DFmode,operands[1]);
+ emit_insn (gen_cmpdf (operands[1], force_const_mem (DFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x80000000, DFmode))));
+ emit_jump_insn (gen_blt (label1));
+ emit_insn (gen_subdf3 (temp, operands[1], force_const_mem (DFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x100000000ULL, DFmode))));
+ emit_insn (gen_fix_truncdfsi2_ieee (operands[0], temp, GEN_INT (7)));
+ emit_jump_insn (gen_jump (label2));
+
+ emit_label (label1);
+ emit_insn (gen_fix_truncdfsi2_ieee (operands[0], operands[1], GEN_INT (5)));
+ emit_label (label2);
+ DONE;
+}")
+
+(define_expand "fix_truncdfsi2"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (fix:SI (match_operand:DF 1 "nonimmediate_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ if (TARGET_IBM_FLOAT)
+ {
+ /* This is the algorithm from POP chapter A.5.7.2. */
+
+ rtx temp = assign_stack_local (BLKmode, 2 * UNITS_PER_WORD, BITS_PER_WORD);
+ rtx two31r = force_const_mem (DFmode,
+ gen_rtx (CONST_DOUBLE, VOIDmode, cc0_rtx,
+ 0x08000000, 0x4F000000));
+ rtx two32 = force_const_mem (DFmode,
+ gen_rtx (CONST_DOUBLE, VOIDmode, cc0_rtx,
+ 0x0, 0x4E000001));
+
+ operands[1] = force_reg (DFmode, operands[1]);
+ emit_insn (gen_fix_truncdfsi2_ibm (operands[0], operands[1],
+ two31r, two32, temp));
+ }
+ else
+ {
+ operands[1] = force_reg (DFmode, operands[1]);
+ emit_insn (gen_fix_truncdfsi2_ieee (operands[0], operands[1], GEN_INT (5)));
+ }
+
+ DONE;
+}")
+
+(define_insn "fix_truncdfsi2_ieee"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (fix:SI (match_operand:DF 1 "register_operand" "f")))
+ (unspec:SI [(match_operand:SI 2 "immediate_operand" "K")] 1)
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cfdbr\\t%0,%h2,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n" )])
+
+(define_insn "fix_truncdfsi2_ibm"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (fix:SI (match_operand:DF 1 "nonimmediate_operand" "+f")))
+ (use (match_operand:DF 2 "memory_operand" "m"))
+ (use (match_operand:DF 3 "memory_operand" "m"))
+ (use (match_operand:BLK 4 "memory_operand" "m"))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "*
+{
+ output_asm_insn (\"sd\\t%1,%2\", operands);
+ output_asm_insn (\"aw\\t%1,%3\", operands);
+ output_asm_insn (\"std\\t%1,%4\", operands);
+ output_asm_insn (\"xi\\t%N4,128\", operands);
+ return \"l\\t%0,%N4\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "n")
+ (set_attr "length" "20")])
+
+;
+; fixuns_truncsfdi2 and fix_truncsfdi2 instruction pattern(s).
+;
+
+(define_expand "fixuns_truncsfdi2"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (unsigned_fix:DI (match_operand:SF 1 "register_operand" "")))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "
+{
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx temp = gen_reg_rtx (SFmode);
+
+ operands[1] = force_reg (SFmode, operands[1]);
+ emit_insn (gen_cmpsf (operands[1], force_const_mem (SFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x8000000000000000ULL, SFmode))));
+ emit_jump_insn (gen_blt (label1));
+
+ emit_insn (gen_subsf3 (temp, operands[1], force_const_mem (SFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x10000000000000000ULL, SFmode))));
+ emit_insn (gen_fix_truncsfdi2_ieee (operands[0], temp, GEN_INT(7)));
+ emit_jump_insn (gen_jump (label2));
+
+ emit_label (label1);
+ emit_insn (gen_fix_truncsfdi2_ieee (operands[0], operands[1], GEN_INT(5)));
+ emit_label (label2);
+ DONE;
+}")
+
+(define_expand "fix_truncsfdi2"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (fix:DI (match_operand:SF 1 "nonimmediate_operand" "")))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "
+{
+ operands[1] = force_reg (SFmode, operands[1]);
+ emit_insn (gen_fix_truncsfdi2_ieee (operands[0], operands[1], GEN_INT(5)));
+ DONE;
+}")
+
+(define_insn "fix_truncsfdi2_ieee"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (fix:DI (match_operand:SF 1 "register_operand" "f")))
+ (unspec:DI [(match_operand:DI 2 "immediate_operand" "K")] 1)
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cgebr\\t%0,%h2,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n")])
+
+;
+; fixuns_truncsfsi2 and fix_truncsfsi2 instruction pattern(s).
+;
+
+(define_expand "fixuns_truncsfsi2"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (unsigned_fix:SI (match_operand:SF 1 "register_operand" "")))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "
+{
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx temp = gen_reg_rtx (SFmode);
+
+ operands[1] = force_reg (SFmode, operands[1]);
+ emit_insn (gen_cmpsf (operands[1], force_const_mem (SFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x80000000, SFmode))));
+ emit_jump_insn (gen_blt (label1));
+ emit_insn (gen_subsf3 (temp, operands[1], force_const_mem (SFmode,
+ CONST_DOUBLE_FROM_REAL_VALUE (0x100000000ULL, SFmode))));
+ emit_insn (gen_fix_truncsfsi2_ieee (operands[0], temp, GEN_INT (7)));
+ emit_jump_insn (gen_jump (label2));
+
+ emit_label (label1);
+ emit_insn (gen_fix_truncsfsi2_ieee (operands[0], operands[1], GEN_INT (5)));
+ emit_label (label2);
+ DONE;
+}")
+
+(define_expand "fix_truncsfsi2"
+ [(set (match_operand:SI 0 "register_operand" "")
+ (fix:SI (match_operand:SF 1 "nonimmediate_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ if (TARGET_IBM_FLOAT)
+ {
+ /* Convert to DFmode and then use the POP algorithm. */
+ rtx temp = gen_reg_rtx (DFmode);
+ emit_insn (gen_extendsfdf2 (temp, operands[1]));
+ emit_insn (gen_fix_truncdfsi2 (operands[0], temp));
+ }
+ else
+ {
+ operands[1] = force_reg (SFmode, operands[1]);
+ emit_insn (gen_fix_truncsfsi2_ieee (operands[0], operands[1], GEN_INT (5)));
+ }
+
+ DONE;
+}")
+
+(define_insn "fix_truncsfsi2_ieee"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (fix:SI (match_operand:SF 1 "register_operand" "f")))
+ (unspec:SI [(match_operand:SI 2 "immediate_operand" "K")] 1)
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cfebr\\t%0,%h2,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n")])
+
+;
+; floatdidf2 instruction pattern(s).
+;
+
+(define_insn "floatdidf2"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (float:DF (match_operand:DI 1 "register_operand" "d")))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cdgbr\\t%0,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n" )])
+
+;
+; floatdisf2 instruction pattern(s).
+;
+
+(define_insn "floatdisf2"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (float:SF (match_operand:DI 1 "register_operand" "d")))]
+ "TARGET_64BIT && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cegbr\\t%0,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n" )])
+
+;
+; floatsidf2 instruction pattern(s).
+;
+
+(define_expand "floatsidf2"
+ [(set (match_operand:DF 0 "register_operand" "")
+ (float:DF (match_operand:SI 1 "register_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ if (TARGET_IBM_FLOAT)
+ {
+ /* This is the algorithm from POP chapter A.5.7.1. */
+
+ rtx temp = assign_stack_local (BLKmode, 2 * UNITS_PER_WORD, BITS_PER_WORD);
+ rtx two31 = force_const_mem (DFmode,
+ gen_rtx (CONST_DOUBLE, VOIDmode, cc0_rtx,
+ 0x80000000, 0x4E000000));
+
+ emit_insn (gen_floatsidf2_ibm (operands[0], operands[1], two31, temp));
+ DONE;
+ }
+}")
+
+(define_insn "floatsidf2_ieee"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (float:DF (match_operand:SI 1 "register_operand" "d")))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cdfbr\\t%0,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n" )])
+
+(define_insn "floatsidf2_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (float:DF (match_operand:SI 1 "register_operand" "d")))
+ (use (match_operand:DF 2 "memory_operand" "m"))
+ (use (match_operand:BLK 3 "memory_operand" "m"))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "*
+{
+ output_asm_insn (\"st\\t%0,%N3\", operands);
+ output_asm_insn (\"xi\\t%N3,128\", operands);
+ output_asm_insn (\"mvc\\t%O3(4,%R3),%2\", operands);
+ output_asm_insn (\"ld\\t%0,%3\", operands);
+ return \"sd\\t%0,%2\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "n" )
+ (set_attr "length" "20")])
+
+;
+; floatsisf2 instruction pattern(s).
+;
+
+(define_expand "floatsisf2"
+ [(set (match_operand:SF 0 "register_operand" "")
+ (float:SF (match_operand:SI 1 "register_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ if (TARGET_IBM_FLOAT)
+ {
+ /* Use the POP algorithm to convert to DFmode and then truncate. */
+ rtx temp = gen_reg_rtx (DFmode);
+ emit_insn (gen_floatsidf2 (temp, operands[1]));
+ emit_insn (gen_truncdfsf2 (operands[0], temp));
+ DONE;
+ }
+}")
+
+(define_insn "floatsisf2_ieee"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (float:SF (match_operand:SI 1 "register_operand" "d")))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "cefbr\\t%0,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "cycle" "n" )])
+
+;
+; truncdfsf2 instruction pattern(s).
+;
+
+(define_expand "truncdfsf2"
+ [(set (match_operand:SF 0 "register_operand" "")
+ (float_truncate:SF (match_operand:DF 1 "general_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ if (CONSTANT_P(operands[1]))
+ operands[1] = force_const_mem (DFmode, operands[1]);
+}")
+
+(define_insn "truncdfsf2_ieee"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (float_truncate:SF (match_operand:DF 1 "nonimmediate_operand" "f")))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "ledbr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+(define_insn "truncdfsf2_ibm"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (float_truncate:SF (match_operand:DF 1 "nonimmediate_operand" "f,m")))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ lrer\\t%0,%1
+ le\\t%0,%1"
+ [(set_attr "op_type" "RR,RX")])
+
+;
+; extendsfdf2 instruction pattern(s).
+;
+
+(define_expand "extendsfdf2"
+ [(set (match_operand:DF 0 "register_operand" "")
+ (float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "")))]
+ "TARGET_HARD_FLOAT"
+ "
+{
+ if (TARGET_IBM_FLOAT)
+ {
+ emit_insn (gen_extendsfdf2_ibm (operands[0], operands[1]));
+ DONE;
+ }
+}")
+
+(define_insn "extendsfdf2_ieee"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "f,m")))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ ldebr\\t%0,%1
+ ldeb\\t%0,%1"
+ [(set_attr "op_type" "RRE,RXE")])
+
+(define_insn "extendsfdf2_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ sdr\\t%0,%0\;ler\\t%0,%1
+ sdr\\t%0,%0\;le\\t%0,%1"
+ [(set_attr "op_type" "RRE,RXE")])
+
+
+;;
+;; ARITHMETRIC OPERATIONS
+;;
+; arithmetric operations set the ConditionCode,
+; because of unpredictable Bits in Register for Halfword and Byte
+; the ConditionCode can be set wrong in operations for Halfword and Byte
+
+;;
+;;- Add instructions.
+;;
+
+;
+; adddi3 instruction pattern(s).
+;
+
+(define_insn "addaddr_esame"
+ [(set (match_operand:DI 0 "register_operand" "=a,a")
+ (plus:DI (match_operand:DI 1 "register_operand" "%a,a")
+ (match_operand:DI 2 "nonmemory_operand" "J,a")))]
+ "TARGET_64BIT && (((REGNO (operands[1]) == STACK_POINTER_REGNUM ) ||
+ (REGNO (operands[1]) == BASE_REGISTER)) &&
+ (GET_CODE (operands[2]) == REG ||
+ CONST_OK_FOR_LETTER_P (INTVAL (operands[2]),'J')))"
+ "@
+ la\\t%0,%c2(,%1)
+ la\\t%0,0(%1,%2)"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "adddi3_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d,d")
+ (plus:DI (match_operand:DI 1 "register_operand" "%0,0,0")
+ (match_operand:DI 2 "general_operand" "d,K,m") ) )
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ agr\\t%0,%2
+ aghi\\t%0,%h2
+ ag\\t%0,%2"
+ [(set_attr "op_type" "RRE,RI,RXE")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+;
+; For weakness of reload, need (set (reg x) (plus (reg y) (reg x)))
+;
+
+(define_insn "adddi3_inv_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d,d")
+ (plus:DI (match_operand:DI 1 "general_operand" "%d,K,m")
+ (match_operand:DI 2 "register_operand" "0,0,0") ) )
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ agr\\t%0,%1
+ aghi\\t%0,%h1
+ ag\\t%0,%1"
+ [(set_attr "op_type" "RRE,RI,RXE")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "adddi3_31"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (plus:DI (match_operand:DI 1 "register_operand" "0,0")
+ (match_operand:DI 2 "general_operand" "d,m") ) )
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "*
+{
+ switch(which_alternative)
+ {
+ case 0: /* d <- d */
+ output_asm_insn (\"ar\\t%0,%2\", operands);
+ output_asm_insn (\"alr\\t%N0,%N2\", operands);
+ break;
+
+ case 1: /* d <- m */
+ output_asm_insn (\"a\\t%0,%2\", operands);
+ output_asm_insn (\"al\\t%N0,%N2\", operands);
+ break;
+ }
+
+ output_asm_insn (\"brc\\t12,.+8\", operands);
+ return \"ahi\\t%0,1\";
+}"
+ [(set_attr "op_type" "NN,NN")
+ (set_attr "atype" "reg,mem")
+ (set_attr "length" "12,16")])
+
+(define_expand "adddi3"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (plus:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:DI 2 "general_operand" "")))]
+ ""
+ "
+{
+ if (TARGET_64BIT)
+ emit_insn(gen_adddi3_64 (operands[0],operands[1],operands[2]));
+ else
+ emit_insn(gen_adddi3_31 (operands[0],operands[1],operands[2]));
+ DONE;
+}")
+
+(define_insn "reload_load_address"
+ [(set (match_operand:DI 0 "register_operand" "=a")
+ (match_operand:QI 1 "address_operand" "p"))]
+ "TARGET_64BIT"
+ "la\\t%0,%a1"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "*reload_load_address_reg_0"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (plus:DI (match_operand:DI 1 "register_operand" "%0")
+ (match_operand:DI 2 "register_operand" "d")))]
+ "TARGET_64BIT"
+ "brxlg\\t%0,%2,.+6"
+ [(set_attr "op_type" "RIE")
+ (set_attr "atype" "reg")
+ (set_attr "type" "set")])
+
+(define_insn "*reload_la_64"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (plus:DI (match_operand:DI 1 "general_operand" "g")
+ (match_operand:DI 2 "general_operand" "g")))]
+ "TARGET_64BIT && reload_in_progress"
+ "#")
+
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (plus:DI (match_operand:DI 1 "general_operand" "")
+ (match_operand:DI 2 "register_operand" "")))]
+ "TARGET_64BIT && reload_completed
+ && !address_operand (gen_rtx_PLUS (DImode, operands[1], operands[2]), QImode)
+ && !rtx_equal_p (operands[0], operands[1])
+ && !rtx_equal_p (operands[0], operands[2])"
+ [(set (match_dup 0) (match_dup 1))
+ (set (match_dup 0) (plus:DI (match_dup 0) (match_dup 2)))]
+ "")
+
+(define_split
+ [(set (match_operand:DI 0 "register_operand" "")
+ (plus:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:DI 2 "general_operand" "")))]
+ "TARGET_64BIT && reload_completed
+ && !address_operand (gen_rtx_PLUS (DImode, operands[1], operands[2]), QImode)
+ && !rtx_equal_p (operands[0], operands[1])
+ && !rtx_equal_p (operands[0], operands[2])"
+ [(set (match_dup 0) (match_dup 2))
+ (set (match_dup 0) (plus:DI (match_dup 0) (match_dup 1)))]
+ "")
+
+;
+; addsi3 instruction pattern(s).
+;
+; The following insn is used when it is known that operand one is the stack pointer,
+; and operand two is small enough to fit in the displacement field
+; In this case, the result will be a address
+;
+
+(define_insn "addaddr"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (plus:SI (match_operand:SI 1 "register_operand" "%a,a")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))]
+ "(((REGNO (operands[1]) == STACK_POINTER_REGNUM ) ||
+ (REGNO (operands[1]) == BASE_REGISTER)) &&
+ (GET_CODE (operands[2]) == REG ||
+ CONST_OK_FOR_LETTER_P (INTVAL (operands[2]),'J')))"
+ "@
+ la\\t%0,%c2(,%1)
+ la\\t%0,0(%1,%2)"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "*addaddr_picR"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (plus:SI (match_operand:SI 1 "register_operand" "a")
+ (unspec:SI [(match_operand:SI 2 "register_operand" "a")] 101)))]
+ ""
+ "la\\t%0,0(%1,%2)"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "*addaddr_picL"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (plus:SI (unspec:SI [(match_operand:SI 2 "register_operand" "a")] 101)
+ (match_operand:SI 1 "register_operand" "a")))]
+ ""
+ "la\\t%0,0(%1,%2)"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "*addaddr_picN"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "a")] 101))]
+ ""
+ "la\\t%0,0(%1)"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "*addsi3_cc"
+ [(set (reg 33)
+ (compare (plus:SI (match_operand:SI 1 "register_operand" "%0,0,0")
+ (match_operand:SI 2 "general_operand" "d,K,m"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "register_operand" "=d,d,d")
+ (plus:SI (match_dup 1) (match_dup 2)))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "@
+ ar\\t%0,%2
+ ahi\\t%0,%h2
+ a\\t%0,%2"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "*addsi3_cconly"
+ [(set (reg 33)
+ (compare (plus:SI (match_operand:SI 1 "register_operand" "%0,0,0")
+ (match_operand:SI 2 "general_operand" "d,K,m"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=d,d,d"))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "@
+ ar\\t%0,%2
+ ahi\\t%0,%h2
+ a\\t%0,%2"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "*addsi3_cconly2"
+ [(set (reg 33)
+ (compare (match_operand:SI 1 "register_operand" "%0,0,0")
+ (neg:SI (match_operand:SI 2 "general_operand" "d,K,m"))))
+ (clobber (match_scratch:SI 0 "=d,d,d"))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "@
+ ar\\t%0,%2
+ ahi\\t%0,%h2
+ a\\t%0,%2"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "addsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d,d")
+ (plus:SI (match_operand:SI 1 "register_operand" "%0,0,0")
+ (match_operand:SI 2 "general_operand" "d,K,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ ar\\t%0,%2
+ ahi\\t%0,%h2
+ a\\t%0,%2"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "do_la"
+ [(set (match_operand:SI 0 "register_operand" "=a")
+ (match_operand:QI 1 "address_operand" "p"))]
+ "volatile_ok"
+ "la\\t%0,%a1"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+(define_insn "*do_la_reg_0"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (plus:SI (match_operand:SI 1 "register_operand" "%0")
+ (match_operand:SI 2 "register_operand" "d")))]
+ ""
+ "brxle\\t%0,%2,.+4"
+ [(set_attr "op_type" "RSI")
+ (set_attr "atype" "reg")
+ (set_attr "type" "set")])
+
+(define_insn "*reload_la_31"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (plus:SI (match_operand:SI 1 "general_operand" "g")
+ (match_operand:SI 2 "general_operand" "g")))]
+ "reload_in_progress"
+ "#")
+
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (plus:SI (match_operand:SI 1 "general_operand" "")
+ (match_operand:SI 2 "register_operand" "")))]
+ "reload_completed
+ && !address_operand (gen_rtx_PLUS (SImode, operands[1], operands[2]), QImode)
+ && !rtx_equal_p (operands[0], operands[1])
+ && !rtx_equal_p (operands[0], operands[2])"
+ [(set (match_dup 0) (match_dup 1))
+ (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 2)))]
+ "")
+
+(define_split
+ [(set (match_operand:SI 0 "register_operand" "")
+ (plus:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:SI 2 "general_operand" "")))]
+ "reload_completed
+ && !address_operand (gen_rtx_PLUS (SImode, operands[1], operands[2]), QImode)
+ && !rtx_equal_p (operands[0], operands[1])
+ && !rtx_equal_p (operands[0], operands[2])"
+ [(set (match_dup 0) (match_dup 2))
+ (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))]
+ "")
+
+(define_insn "addsi_64"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (plus:SI (match_operand:SI 1 "register_operand" "%a,a")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))]
+ "TARGET_64BIT"
+ "@
+ la\\t%0,%c2(,%1)
+ la\\t%0,0(%1,%2)"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")
+ (set_attr "type" "la")])
+
+;
+; addhi3 instruction pattern(s).
+;
+
+(define_insn "addhi3"
+ [(set (match_operand:HI 0 "register_operand" "=d,d,d")
+ (plus:HI (match_operand:HI 1 "register_operand" "%0,0,0")
+ (match_operand:HI 2 "general_operand" "d,K,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ ar\\t%0,%2
+ ahi\\t%0,%h2
+ ah\\t%0,%2"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")])
+
+
+;
+; addqi3 instruction pattern(s).
+;
+
+(define_insn "addqi3"
+ [(set (match_operand:QI 0 "register_operand" "=d,d")
+ (plus:QI (match_operand:QI 1 "register_operand" "%0,0")
+ (match_operand:QI 2 "general_operand" "a,n")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ ar\\t%0,%2
+ ahi\\t%0,%h2"
+ [(set_attr "op_type" "RX,RX")
+ (set_attr "atype" "reg,mem")])
+
+
+;
+; adddf3 instruction pattern(s).
+;
+
+(define_expand "adddf3"
+ [(parallel
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (plus:DF (match_operand:DF 1 "register_operand" "%0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*adddf3"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (plus:DF (match_operand:DF 1 "register_operand" "%0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ adbr\\t%0,%2
+ adb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*adddf3_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (plus:DF (match_operand:DF 1 "register_operand" "%0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ adr\\t%0,%2
+ ad\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+;
+; addsf3 instruction pattern(s).
+;
+
+(define_expand "addsf3"
+ [(parallel
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (plus:SF (match_operand:SF 1 "register_operand" "%0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*addsf3"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (plus:SF (match_operand:SF 1 "register_operand" "%0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ aebr\\t%0,%2
+ aeb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*addsf3"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (plus:SF (match_operand:SF 1 "register_operand" "%0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ aer\\t%0,%2
+ ae\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+
+;;
+;;- Subtract instructions.
+;;
+
+;
+; subdi3 instruction pattern(s).
+;
+
+(define_insn "*subdi3_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (minus:DI (match_operand:DI 1 "register_operand" "0,0")
+ (match_operand:DI 2 "general_operand" "d,m") ) )
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ sgr\\t%0,%2
+ sg\\t%0,%2"
+ [(set_attr "op_type" "RRE,RRE")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "subdi3"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (minus:DI (match_operand:DI 1 "register_operand" "0,0")
+ (match_operand:DI 2 "nonimmediate_operand" "d,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "*
+{
+ switch (which_alternative)
+ {
+ case 0: /* d <- d */
+ output_asm_insn (\"sr\\t%0,%2\", operands);
+ output_asm_insn (\"slr\\t%N0,%N2\", operands);
+ break;
+ case 1: /* d <- m */
+ output_asm_insn (\"s\\t%0,%2\", operands);
+ output_asm_insn (\"sl\\t%N0,%N2\", operands);
+ break;
+ }
+
+ output_asm_insn (\"brc\\t11,.+8\", operands);
+ return \"ahi\\t%0,-1\";
+}"
+ [(set_attr "op_type" "NN,NN")
+ (set_attr "atype" "reg,mem")
+ (set_attr "length" "12,16")])
+
+;
+; subsi3 instruction pattern(s).
+;
+
+(define_insn "*subsi3_cc"
+ [(set (reg 33)
+ (compare (minus:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "general_operand" "d,m"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "register_operand" "=d,d")
+ (minus:SI (match_dup 1) (match_dup 2)))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "@
+ sr\\t%0,%2
+ s\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "*subsi3_cconly"
+ [(set (reg 33)
+ (compare (minus:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "general_operand" "d,m"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=d,d"))]
+ "s390_match_ccmode(insn, CCSmode)"
+ "@
+ sr\\t%0,%2
+ s\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "subsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (minus:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "general_operand" "d,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sr\\t%0,%2
+ s\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+;
+; subhi3 instruction pattern(s).
+;
+
+(define_insn "subhi3"
+ [(set (match_operand:HI 0 "register_operand" "=d,d,d")
+ (minus:HI (match_operand:HI 1 "register_operand" "0,0,0")
+ (match_operand:HI 2 "nonimmediate_operand" "d,K,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sr\\t%0,%2
+ ahi\\t%0,-%h2
+ sh\\t%0,%2"
+ [(set_attr "op_type" "RR,RI,RX")
+ (set_attr "atype" "reg,reg,mem")])
+
+;
+; subqi3 instruction pattern(s).
+;
+
+(define_insn "subqi3"
+ [(set (match_operand:QI 0 "register_operand" "=d")
+ (minus:QI (match_operand:QI 1 "register_operand" "0")
+ (match_operand:QI 2 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ ""
+ "sr\\t%0,%2"
+ [(set_attr "op_type" "RR")])
+
+;
+; subdf3 instruction pattern(s).
+;
+
+(define_expand "subdf3"
+ [(parallel
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (minus:DF (match_operand:DF 1 "register_operand" "0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*subdf3"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (minus:DF (match_operand:DF 1 "register_operand" "0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ sdbr\\t%0,%2
+ sdb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*subdf3_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (minus:DF (match_operand:DF 1 "register_operand" "0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ sdr\\t%0,%2
+ sd\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+;
+; subsf3 instruction pattern(s).
+;
+
+(define_expand "subsf3"
+ [(parallel
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (minus:SF (match_operand:SF 1 "register_operand" "0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*subsf3"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (minus:SF (match_operand:SF 1 "register_operand" "0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ sebr\\t%0,%2
+ seb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*subsf3_ibm"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (minus:SF (match_operand:SF 1 "register_operand" "0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ ser\\t%0,%2
+ se\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")])
+
+
+;;
+;;- Multiply instructions.
+;;
+
+(define_expand "muldi3"
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "")
+ (mult:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:DI 2 "register_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "
+{
+ if (!TARGET_64BIT)
+ {
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx op0_0 = operand_subword (operands[0], 0 ,1, DImode);
+ rtx op0_1 = operand_subword (operands[0], 1 ,1, DImode);
+ rtx temp1_0 = gen_reg_rtx (SImode);
+ rtx temp1_1 = gen_reg_rtx (SImode);
+ rtx temp2_0 = gen_reg_rtx (SImode);
+ rtx temp2_1 = gen_reg_rtx (SImode);
+
+ emit_move_insn (temp1_0, operand_subword (operands[1], 0 ,1, DImode));
+ emit_move_insn (temp1_1, operand_subword (operands[1], 1 ,1, DImode));
+ emit_move_insn (temp2_0, operand_subword (operands[2], 0 ,1, DImode));
+ emit_move_insn (temp2_1, operand_subword (operands[2], 1 ,1, DImode));
+ emit_move_insn (op0_1, temp1_1);
+ emit_insn (gen_mulsi_6432 (operands[0], operands[0], temp2_1));
+
+ emit_insn (gen_cmpsi (temp1_1, const0_rtx));
+ emit_jump_insn (gen_bge (label1));
+ emit_insn (gen_addsi3 (op0_0, op0_0, temp2_1));
+ emit_label (label1);
+ emit_insn (gen_cmpsi (temp2_1, const0_rtx));
+ emit_jump_insn (gen_bge (label2));
+ emit_insn (gen_addsi3 (op0_0, op0_0, temp1_1));
+ emit_label (label2);
+
+ emit_insn (gen_mulsi3 (temp2_1, temp2_1, temp1_0));
+ emit_insn (gen_addsi3 (op0_0, op0_0, temp2_1));
+
+ emit_insn (gen_mulsi3 (temp1_1, temp1_1, temp2_0));
+ emit_insn (gen_addsi3 (op0_0, op0_0, temp1_1));
+
+ DONE;
+ }
+}")
+
+(define_insn "*muldi3_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d,d")
+ (mult:DI (match_operand:DI 1 "register_operand" "%0,0,0")
+ (match_operand:DI 2 "general_operand" "d,K,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ msgr\\t%0,%2
+ mghi\\t%0,%h2
+ msg\\t%0,%2"
+ [(set_attr "op_type" "RRE,RI,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+;
+; mulsi3 instruction pattern(s).
+;
+
+(define_insn "mulsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d,d")
+ (mult:SI (match_operand:SI 1 "register_operand" "%0,0,0")
+ (match_operand:SI 2 "general_operand" "d,K,m")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ msr\\t%0,%2
+ mhi\\t%0,%h2
+ ms\\t%0,%2"
+ [(set_attr "op_type" "RRE,RI,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "mulsi_6432"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (mult:DI (sign_extend:DI
+ (subreg:SI (match_operand:DI 1 "register_operand" "0,0") 4))
+ (sign_extend:DI
+ (match_operand:SI 2 "general_operand" "d,m"))))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "@
+ mr\\t%0,%2
+ m\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+
+;
+; muldf3 instruction pattern(s).
+;
+
+(define_expand "muldf3"
+ [(parallel
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (mult:DF (match_operand:DF 1 "register_operand" "%0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*muldf3"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (mult:DF (match_operand:DF 1 "register_operand" "%0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ mdbr\\t%0,%2
+ mdb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*muldf3_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f,f")
+ (mult:DF (match_operand:DF 1 "register_operand" "%0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ mdr\\t%0,%2
+ md\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+;
+; mulsf3 instruction pattern(s).
+;
+
+(define_expand "mulsf3"
+ [(parallel
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (mult:SF (match_operand:SF 1 "register_operand" "%0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*mulsf3"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (mult:SF (match_operand:SF 1 "register_operand" "%0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ meebr\\t%0,%2
+ meeb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*mulsf3_ibm"
+ [(set (match_operand:SF 0 "register_operand" "=f,f")
+ (mult:SF (match_operand:SF 1 "register_operand" "%0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ mer\\t%0,%2
+ me\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+
+;;
+;;- Divide and modulo instructions.
+;;
+
+;
+; divdi3 and moddi3 instruction pattern(s).
+;
+
+(define_expand "divdi3"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (div:DI (match_operand:DI 1 "register_operand" "d")
+ (match_operand:DI 2 "general_operand" "")))]
+ "TARGET_64BIT"
+ "
+{
+ rtx op3 = gen_reg_rtx (TImode);
+
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (DImode, operands[2]);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 8), operands[1]);
+ emit_insn (gen_divmodtidi3 (op3, op3, operands[2]));
+ emit_move_insn (operands[0], gen_rtx_SUBREG (DImode, op3, 8));
+ DONE;
+}")
+
+(define_expand "moddi3"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (mod:DI (match_operand:DI 1 "register_operand" "d")
+ (match_operand:DI 2 "general_operand" "")))]
+ "TARGET_64BIT"
+ "
+{
+ rtx op3 = gen_reg_rtx (TImode);
+
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (DImode, operands[2]);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 8), operands[1]);
+ emit_insn (gen_divmodtidi3 (op3, op3, operands[2]));
+ emit_move_insn (operands[0], gen_rtx_SUBREG (DImode, op3, 0));
+ DONE;
+}")
+
+(define_insn "divmodtidi3"
+ [(set (subreg:DI (match_operand:TI 0 "register_operand" "=d,d") 0)
+ (truncate:DI
+ (mod:TI (match_operand:TI 1 "register_operand" "0,0")
+ (sign_extend:TI
+ (match_operand:DI 2 "nonimmediate_operand" "d,m")))))
+ (set (subreg:DI (match_dup 0) 8)
+ (truncate:DI (div:TI (match_dup 1) (sign_extend:TI (match_dup 2)))))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ dsgr\\t%0,%2
+ dsg\\t%0,%2"
+ [(set_attr "op_type" "RRE,RXE")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+;
+; udivdi3 and umoddi3 instruction pattern(s).
+;
+
+(define_expand "udivdi3"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (udiv:DI (match_operand:DI 1 "register_operand" "d")
+ (match_operand:DI 2 "general_operand" "")))]
+ "TARGET_64BIT"
+ "
+{
+ rtx op3 = gen_reg_rtx(TImode);
+
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (DImode, operands[2]);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 8), operands[1]);
+ emit_insn (gen_udivmodtidi3 (op3, op3, operands[2]));
+ emit_move_insn (operands[0], gen_rtx_SUBREG (DImode, op3, 8));
+ DONE;
+}")
+
+(define_expand "umoddi3"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (umod:DI (match_operand:DI 1 "register_operand" "d")
+ (match_operand:DI 2 "general_operand" "")))]
+ "TARGET_64BIT"
+ "
+{
+ rtx op3 = gen_reg_rtx (TImode);
+
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (DImode, operands[2]);
+
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (DImode, op3, 8), operands[1]);
+ emit_insn (gen_udivmodtidi3 (op3, op3, operands[2]));
+ emit_move_insn (operands[0], gen_rtx_SUBREG (DImode, op3, 0));
+ DONE;
+}")
+
+(define_insn "udivmodtidi3"
+ [(set (subreg:DI (match_operand:TI 0 "register_operand" "=d,d") 0)
+ (truncate:DI
+ (umod:TI (match_operand:TI 1 "register_operand" "0,0")
+ (zero_extend:TI
+ (match_operand:DI 2 "nonimmediate_operand" "d,m")))))
+ (set (subreg:DI (match_dup 0) 8)
+ (truncate:DI (udiv:TI (match_dup 1) (zero_extend:TI (match_dup 2)))))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ dlgr\\t%0,%2
+ dlg\\t%0,%2"
+ [(set_attr "op_type" "RRE,RXE")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+;
+; divsi3 and modsi3 instruction pattern(s).
+;
+
+(define_expand "divsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (div:SI (match_operand:SI 1 "register_operand" "d")
+ (match_operand:SI 2 "nonimmediate_operand" "")))]
+ "!TARGET_64BIT"
+ "
+{
+ rtx tmp = gen_reg_rtx (DImode);
+
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (SImode, operands[2]);
+ else
+ operands[2] = force_reg (SImode, operands[2]);
+
+ emit_insn (gen_rtx_CLOBBER (DImode, tmp));
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 0), operands[1]);
+ emit_insn (gen_ashrdi3 (tmp, tmp, GEN_INT (32)));
+ emit_insn (gen_divmoddisi3 (tmp, tmp, operands[2]));
+ emit_move_insn (operands[0], gen_rtx_SUBREG (SImode, tmp, 4));
+ DONE;
+}")
+
+(define_expand "modsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (mod:SI (match_operand:SI 1 "register_operand" "d")
+ (match_operand:SI 2 "nonimmediate_operand" "")))]
+ "!TARGET_64BIT"
+ "
+{
+ rtx tmp = gen_reg_rtx (DImode);
+
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (SImode, operands[2]);
+ else
+ operands[2] = force_reg (SImode, operands[2]);
+
+ emit_insn (gen_rtx_CLOBBER (DImode, tmp));
+ emit_insn (gen_movsi (gen_rtx_SUBREG (SImode, tmp, 0), operands[1]));
+ emit_insn (gen_ashrdi3 (tmp, tmp, GEN_INT (32)));
+ emit_insn (gen_divmoddisi3 (tmp, tmp, operands[2]));
+ emit_move_insn (operands[0], gen_rtx_SUBREG (SImode, tmp, 0));
+ DONE;
+}")
+
+(define_insn "divmoddisi3"
+ [(set (subreg:SI (match_operand:DI 0 "register_operand" "=d,d") 0)
+ (truncate:SI
+ (mod:DI (match_operand:DI 1 "register_operand" "0,0")
+ (sign_extend:DI
+ (match_operand:SI 2 "nonimmediate_operand" "d,m")))))
+ (set (subreg:SI (match_dup 0) 4)
+ (truncate:SI (div:DI (match_dup 1) (sign_extend:DI (match_dup 2)))))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "@
+ dr\\t%0,%2
+ d\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+;
+; udivsi3 and umodsi3 instruction pattern(s).
+;
+
+(define_expand "udivsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (udiv:SI (match_operand:SI 1 "general_operand" "")
+ (match_operand:SI 2 "general_operand" "")))]
+ "!TARGET_64BIT"
+ "
+{
+ rtx dr_0, dr_1, tmp;
+
+ tmp = gen_reg_rtx (DImode);
+ dr_0 = gen_rtx_SUBREG (SImode, tmp, 0);
+ dr_1 = gen_rtx_SUBREG (SImode, tmp, 4);
+
+ if (CONSTANT_P (operands[2]))
+ {
+ if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) < 0)
+ {
+ rtx label1 = gen_label_rtx ();
+
+ emit_move_insn (dr_0, operands[1]);
+ emit_move_insn (dr_1, const0_rtx);
+ emit_insn (gen_cmpsi (dr_0, operands[2]));
+ emit_jump_insn (gen_bltu (label1));
+ emit_move_insn (dr_1, const1_rtx);
+ emit_label (label1);
+ }
+ else
+ {
+ operands[2] = force_const_mem (SImode, operands[2]);
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 4), operands[1]);
+ emit_insn (gen_divmoddisi3 (tmp, tmp, operands[2]));
+ }
+ }
+ else
+ {
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx label3 = gen_label_rtx ();
+
+ operands[1] = force_reg (SImode, operands[1]);
+ operands[2] = force_reg (SImode, operands[2]);
+
+ emit_move_insn (dr_1, const0_rtx);
+ emit_insn (gen_cmpsi (operands[2], operands[1]));
+ emit_jump_insn (gen_bgtu (label3));
+ emit_insn (gen_cmpsi (operands[2], const1_rtx));
+ emit_jump_insn (gen_blt (label2));
+ emit_insn (gen_cmpsi (operands[2], const1_rtx));
+ emit_jump_insn (gen_beq (label1));
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 4), operands[1]);
+ emit_insn (gen_divmoddisi3 (tmp, tmp, operands[2]));
+ emit_jump_insn (gen_jump (label3));
+ emit_label (label1);
+ emit_move_insn (dr_1, operands[1]);
+ emit_jump_insn (gen_jump (label3));
+ emit_label (label2);
+ emit_move_insn (dr_1, const1_rtx);
+ emit_label (label3);
+ }
+
+ emit_move_insn (operands[0], dr_1);
+ DONE;
+}")
+
+(define_expand "umodsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (umod:SI (match_operand:SI 1 "nonimmediate_operand" "")
+ (match_operand:SI 2 "nonimmediate_operand" "")))]
+ "!TARGET_64BIT"
+ "
+{
+ rtx dr_0, dr_1, tmp;
+
+ tmp = gen_reg_rtx (DImode);
+ dr_0 = gen_rtx_SUBREG (SImode, tmp, 0);
+ dr_1 = gen_rtx_SUBREG (SImode, tmp, 4);
+
+ if (CONSTANT_P (operands[2]))
+ {
+ if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) <= 0)
+ {
+ rtx label1 = gen_label_rtx ();
+
+ emit_move_insn (dr_0, operands[1]);
+ emit_insn (gen_cmpsi (dr_0, operands[2]));
+ emit_jump_insn (gen_bltu (label1));
+ emit_insn (gen_abssi2 (dr_0, operands[2]));
+ emit_insn (gen_addsi3 (dr_0,dr_0, operands[1]));
+ emit_label (label1);
+ }
+ else
+ {
+ operands[2] = force_const_mem (SImode, operands[2]);
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 4), operands[1]);
+ emit_insn (gen_divmoddisi3 (tmp, tmp, operands[2]));
+ }
+ }
+ else
+ {
+ rtx label1 = gen_label_rtx ();
+ rtx label2 = gen_label_rtx ();
+ rtx label3 = gen_label_rtx ();
+
+ operands[1] = force_reg (SImode, operands[1]);
+ operands[2] = force_reg (SImode, operands[2]);
+
+ emit_move_insn(dr_0, operands[1]);
+ emit_insn (gen_cmpsi (operands[2], dr_0));
+ emit_jump_insn (gen_bgtu (label3));
+ emit_insn (gen_cmpsi (operands[2], const1_rtx));
+ emit_jump_insn (gen_blt (label2));
+ emit_insn (gen_cmpsi (operands[2], const1_rtx));
+ emit_jump_insn (gen_beq (label1));
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 0), const0_rtx);
+ emit_move_insn (gen_rtx_SUBREG (SImode, tmp, 4), operands[1]);
+ emit_insn (gen_divmoddisi3 (tmp, tmp, operands[2]));
+ emit_jump_insn (gen_jump (label3));
+ emit_label (label1);
+ emit_move_insn (dr_0, const0_rtx);
+ emit_jump_insn (gen_jump (label3));
+ emit_label (label2);
+ emit_insn (gen_subsi3 (dr_0, dr_0, operands[2]));
+ emit_label (label3);
+ }
+
+ emit_move_insn (operands[0], dr_0);
+ DONE;
+}")
+
+;
+; divdf3 instruction pattern(s).
+;
+
+(define_expand "divdf3"
+ [(parallel
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f")
+ (div:DF (match_operand:DF 1 "general_operand" "0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*divdf3"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f")
+ (div:DF (match_operand:DF 1 "general_operand" "0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ ddbr\\t%0,%2
+ ddb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*divdf3_ibm"
+ [(set (match_operand:DF 0 "nonimmediate_operand" "=f,f")
+ (div:DF (match_operand:DF 1 "general_operand" "0,0")
+ (match_operand:DF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ ddr\\t%0,%2
+ dd\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+;
+; divsf3 instruction pattern(s).
+;
+
+(define_expand "divsf3"
+ [(parallel
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f")
+ (div:SF (match_operand:SF 1 "nonimmediate_operand" "0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*divsf3"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f")
+ (div:SF (match_operand:SF 1 "nonimmediate_operand" "0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "@
+ debr\\t%0,%2
+ deb\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+(define_insn "*divsf3"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=f,f")
+ (div:SF (match_operand:SF 1 "nonimmediate_operand" "0,0")
+ (match_operand:SF 2 "nonimmediate_operand" "f,m")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "@
+ der\\t%0,%2
+ de\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "reg,mem")])
+
+
+;;
+;;- And instructions.
+;;
+
+;
+; anddi3 instruction pattern(s).
+;
+
+(define_insn "*anddi3_cc"
+ [(set (reg 33)
+ (compare (and:DI (match_operand:DI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:DI 2 "r_or_s_operand" "d,m,Q"))
+ (const_int 0)))
+ (set (match_operand:DI 0 "r_or_s_operand" "=d,d,Q")
+ (and:DI (match_dup 1) (match_dup 2)))]
+ "s390_match_ccmode(insn, CCTmode) && TARGET_64BIT"
+ "@
+ ngr\\t%0,%2
+ ng\\t%0,%2
+ nc\\t%O0(8,%R0),%2"
+ [(set_attr "op_type" "RR,RX,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+(define_insn "*anddi3_cconly"
+ [(set (reg 33)
+ (compare (and:DI (match_operand:DI 1 "register_operand" "%0,0")
+ (match_operand:DI 2 "r_or_s_operand" "d,m"))
+ (const_int 0)))
+ (clobber (match_scratch:DI 0 "=d,d"))]
+ "s390_match_ccmode(insn, CCTmode) && TARGET_64BIT"
+ "@
+ ngr\\t%0,%2
+ ng\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "anddi3"
+ [(set (match_operand:DI 0 "r_or_s_operand" "=d,d,Q")
+ (and:DI (match_operand:DI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:DI 2 "r_or_s_operand" "d,m,Q")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ ngr\\t%0,%2
+ ng\\t%0,%2
+ nc\\t%O0(8,%R0),%2"
+ [(set_attr "op_type" "RR,RX,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+;
+; andsi3 instruction pattern(s).
+;
+
+(define_insn "*andsi3_cc"
+ [(set (reg 33)
+ (compare (and:SI (match_operand:SI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:SI 2 "r_or_s_operand" "d,m,Q"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "r_or_s_operand" "=d,d,Q")
+ (and:SI (match_dup 1) (match_dup 2)))]
+ "s390_match_ccmode(insn, CCTmode)"
+ "@
+ nr\\t%0,%2
+ n\\t%0,%2
+ nc\\t%O0(4,%R0),%2"
+ [(set_attr "op_type" "RR,RX,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+(define_insn "*andsi3_cconly"
+ [(set (reg 33)
+ (compare (and:SI (match_operand:SI 1 "register_operand" "%0,0")
+ (match_operand:SI 2 "r_or_s_operand" "d,m"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=d,d"))]
+ "s390_match_ccmode(insn, CCTmode)"
+ "@
+ nr\\t%0,%2
+ n\\t%0,%2"
+ [(set_attr "op_type" "RR,RX")
+ (set_attr "atype" "reg,mem")
+ (set_attr "type" "set")])
+
+(define_insn "andsi3"
+ [(set (match_operand:SI 0 "r_or_s_operand" "=d,d,Q")
+ (and:SI (match_operand:SI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:SI 2 "r_or_s_operand" "d,m,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ nr\\t%0,%2
+ n\\t%0,%2
+ nc\\t%O0(4,%R0),%2"
+ [(set_attr "op_type" "RR,RX,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+;
+; andhi3 instruction pattern(s).
+;
+
+(define_expand "andhi3"
+ [(parallel
+ [(set (match_operand:HI 0 "r_or_s_operand" "")
+ (and:HI (match_operand:HI 1 "r_or_s_operand" "")
+ (match_operand:HI 2 "r_or_s_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (HImode, operands[2]);
+}")
+
+(define_insn "*andhi3"
+ [(set (match_operand:HI 0 "r_or_s_operand" "=d,Q")
+ (and:HI (match_operand:HI 1 "r_or_s_operand" "%0,0")
+ (match_operand:HI 2 "r_or_s_operand" "d,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ nr\\t%0,%2
+ nc\\t%O0(2,%R0),%2"
+ [(set_attr "op_type" "RR,SS")
+ (set_attr "atype" "reg,mem")])
+
+;
+; andqi3 instruction pattern(s).
+;
+
+(define_insn "andqi3"
+ [(set (match_operand:QI 0 "r_or_s_operand" "=d,Q,Q")
+ (and:QI (match_operand:QI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:QI 2 "r_or_s_or_im8_operand" "d,n,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ nr\\t%0,%2
+ ni\\t%0,%b2
+ nc\\t%O0(1,%R0),%2"
+ [(set_attr "op_type" "RR,SI,SS")
+ (set_attr "atype" "reg,mem,mem")])
+
+
+;;
+;;- Bit set (inclusive or) instructions.
+;;
+
+;
+; iordi3 instruction pattern(s).
+;
+
+(define_insn "iordi3"
+ [(set (match_operand:DI 0 "r_or_s_operand" "=d,d,Q,d")
+ (ior:DI (match_operand:DI 1 "r_or_s_operand" "%0,0,0,0")
+ (match_operand:DI 2 "r_or_s_operand" "d,m,Q,L")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ ogr\\t%0,%2
+ og\\t%0,%2
+ oc\\t%O0(8,%R0),%2
+ oill\\t%0,%2"
+ [(set_attr "op_type" "RRE,RXE,SS,RI")
+ (set_attr "atype" "reg,mem,mem,reg")
+ (set_attr "type" "set")])
+
+;
+; iorsi3 instruction pattern(s).
+;
+
+(define_expand "iorsi3"
+ [(parallel
+ [(set (match_operand:SI 0 "r_or_s_operand" "")
+ (ior:SI (match_operand:SI 1 "r_or_s_operand" "")
+ (match_operand:SI 2 "r_or_s_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (SImode, operands[2]);
+}")
+
+(define_insn "*iorsi3"
+ [(set (match_operand:SI 0 "r_or_s_operand" "=d,d,Q")
+ (ior:SI (match_operand:SI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:SI 2 "r_or_s_operand" "d,m,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ or\\t%0,%2
+ o\\t%0,%2
+ oc\\t%O0(4,%R0),%2"
+ [(set_attr "op_type" "RR,RX,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+;
+; iorhi3 instruction pattern(s).
+;
+
+(define_expand "iorhi3"
+ [(parallel
+ [(set (match_operand:HI 0 "r_or_s_operand" "")
+ (ior:HI (match_operand:HI 1 "r_or_s_operand" "")
+ (match_operand:HI 2 "r_or_s_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (HImode, operands[2]);
+}")
+
+(define_insn "*iorhi3"
+ [(set (match_operand:HI 0 "r_or_s_operand" "=d,Q")
+ (ior:HI (match_operand:HI 1 "r_or_s_operand" "%0,0")
+ (match_operand:HI 2 "r_or_s_operand" "d,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ or\\t%0,%2
+ oc\\t%O0(2,%R0),%2"
+ [(set_attr "op_type" "RR,SS")
+ (set_attr "atype" "reg,mem")])
+
+;
+; iorqi3 instruction pattern(s).
+;
+
+(define_insn "iorqi3"
+ [(set (match_operand:QI 0 "r_or_s_operand" "=d,Q,Q")
+ (ior:QI (match_operand:QI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:QI 2 "r_or_s_or_im8_operand" "d,n,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ or\\t%0,%2
+ oi\\t%0,%b2
+ oc\\t%O0(1,%R0),%2"
+ [(set_attr "op_type" "RR,SI,SS")
+ (set_attr "atype" "reg,mem,mem")])
+
+
+;;
+;;- Xor instructions.
+;;
+
+;
+; xordi3 instruction pattern(s).
+;
+
+(define_insn "xordi3"
+ [(set (match_operand:DI 0 "r_or_s_operand" "=d,d,Q")
+ (xor:DI (match_operand:DI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:DI 2 "r_or_s_operand" "d,m,Q")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ xgr\\t%0,%2
+ xg\\t%0,%2
+ xc\\t%O0(8,%R0),%2"
+ [(set_attr "op_type" "RRE,RXE,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+;
+; xorsi3 instruction pattern(s).
+;
+
+(define_expand "xorsi3"
+ [(parallel
+ [(set (match_operand:SI 0 "r_or_s_operand" "")
+ (xor:SI (match_operand:SI 1 "r_or_s_operand" "")
+ (match_operand:SI 2 "r_or_s_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (SImode, operands[2]);
+}")
+
+(define_insn "*xorsi3"
+ [(set (match_operand:SI 0 "r_or_s_operand" "=d,d,Q")
+ (xor:SI (match_operand:SI 1 "r_or_s_operand" "%0,0,0")
+ (match_operand:SI 2 "r_or_s_operand" "d,m,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ xr\\t%0,%2
+ x\\t%0,%2
+ xc\\t%O0(4,%R0),%2"
+ [(set_attr "op_type" "RR,RX,SS")
+ (set_attr "atype" "reg,mem,mem")
+ (set_attr "type" "set")])
+
+;
+; xorhi3 instruction pattern(s).
+;
+
+(define_expand "xorhi3"
+ [(parallel
+ [(set (match_operand:HI 0 "r_or_s_operand" "")
+ (xor:HI (match_operand:HI 1 "r_or_s_operand" "")
+ (match_operand:HI 2 "r_or_s_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "
+{
+ if (CONSTANT_P (operands[2]))
+ operands[2] = force_const_mem (HImode, operands[2]);
+}")
+
+(define_insn "*xorhi3"
+ [(set (match_operand:HI 0 "r_or_s_operand" "=d,Q")
+ (xor:HI (match_operand:HI 1 "r_or_s_operand" "%0,0")
+ (match_operand:HI 2 "r_or_s_operand" "d,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ xr\\t%0,%2
+ xc\\t%O0(2,%R0),%2"
+ [(set_attr "op_type" "RR,SS")
+ (set_attr "atype" "reg,mem")])
+
+;
+; xorqi3 instruction pattern(s).
+;
+
+(define_insn "xorqi3"
+ [(set (match_operand:QI 0 "r_or_s_operand" "=d,Q,Q")
+ (xor:QI (match_operand:QI 1 "r_or_s_operand" "0,0,0")
+ (match_operand:QI 2 "r_or_s_or_im8_operand" "d,n,Q")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ xr\\t%0,%2
+ xi\\t%0,%b2
+ xc\\t%O0(1,%R0),%2"
+ [(set_attr "op_type" "RR,SI,SS")
+ (set_attr "atype" "reg,mem,mem")])
+
+
+;;
+;;- Negate instructions.
+;;
+
+;
+; negdi2 instruction pattern(s).
+;
+
+(define_expand "negdi2"
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (neg:DI (match_operand:DI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))])]
+ ""
+ "")
+
+(define_insn "*negdi2_64"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (neg:DI (match_operand:DI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "lcgr\\t%0,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+(define_insn "*negdi2_31"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (neg:DI (match_operand:DI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "*
+{
+ rtx xop[1];
+ xop[0] = gen_label_rtx ();
+ output_asm_insn (\"lcr\\t%0,%1\", operands);
+ output_asm_insn (\"lcr\\t%N0,%N1\", operands);
+ output_asm_insn (\"je\\t%l0\", xop);
+ output_asm_insn (\"bctr\\t%0,0\", operands);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
+ CODE_LABEL_NUMBER (xop[0]));
+ return \"\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "length" "10")])
+
+;
+; negsi2 instruction pattern(s).
+;
+
+(define_insn "negsi2"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (neg:SI (match_operand:SI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ ""
+ "lcr\\t%0,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+;
+; negdf2 instruction pattern(s).
+;
+
+(define_expand "negdf2"
+ [(parallel
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (neg:DF (match_operand:DF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*negdf2"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (neg:DF (match_operand:DF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "lcdbr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+(define_insn "*negdf2_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (neg:DF (match_operand:DF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "lcdr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+;
+; negsf2 instruction pattern(s).
+;
+
+(define_expand "negsf2"
+ [(parallel
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (neg:SF (match_operand:SF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*negsf2"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (neg:SF (match_operand:SF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "lcebr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+(define_insn "*negsf2"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (neg:SF (match_operand:SF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "lcer\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+
+;;
+;;- Absolute value instructions.
+;;
+
+;
+; absdi2 instruction pattern(s).
+;
+
+(define_insn "absdi2"
+ [(set (match_operand:DI 0 "register_operand" "=d")
+ (abs:DI (match_operand:DI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "lpgr\\t%0,%1"
+ [(set_attr "op_type" "RRE")
+ (set_attr "type" "set")])
+
+;
+; abssi2 instruction pattern(s).
+;
+
+(define_insn "abssi2"
+ [(set (match_operand:SI 0 "register_operand" "=d")
+ (abs:SI (match_operand:SI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ ""
+ "lpr\\t%0,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "type" "set")])
+
+;
+; abshi2 instruction pattern(s).
+;
+
+(define_insn "abshi2"
+ [(set (match_operand:HI 0 "register_operand" "=d")
+ (abs:HI (match_operand:HI 1 "register_operand" "d")))
+ (clobber (reg:CC 33))]
+ ""
+ "sll\\t%1,16\;sra\\t%1,16\;lpr\\t%0,%1"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "3")
+ (set_attr "length" "10")])
+
+;
+; absdf2 instruction pattern(s).
+;
+
+(define_expand "absdf2"
+ [(parallel
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (abs:DF (match_operand:DF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*absdf2"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (abs:DF (match_operand:DF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "lpdbr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+(define_insn "*absdf2_ibm"
+ [(set (match_operand:DF 0 "register_operand" "=f")
+ (abs:DF (match_operand:DF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "lpdr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+;
+; abssf2 instruction pattern(s).
+;
+
+(define_expand "abssf2"
+ [(parallel
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (abs:SF (match_operand:SF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))])]
+ "TARGET_HARD_FLOAT"
+ "")
+
+(define_insn "*abssf2"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (abs:SF (match_operand:SF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT"
+ "lpebr\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+(define_insn "*abssf2_ibm"
+ [(set (match_operand:SF 0 "register_operand" "=f")
+ (abs:SF (match_operand:SF 1 "register_operand" "f")))
+ (clobber (reg:CC 33))]
+ "TARGET_HARD_FLOAT && TARGET_IBM_FLOAT"
+ "lper\\t%0,%1"
+ [(set_attr "op_type" "RR")])
+
+
+;;
+;;- One complement instructions.
+;;
+
+;
+; one_cmpldi2 instruction pattern(s).
+;
+
+(define_expand "one_cmpldi2"
+ [(parallel
+ [(set (match_operand:DI 0 "r_or_s_operand" "=d")
+ (not:DI (match_operand:DI 1 "r_or_s_operand" "0")))
+ (use (match_dup 2))
+ (clobber (reg:CC 33))])]
+ "TARGET_64BIT"
+ "{ operands[2] = force_const_mem (DImode, constm1_rtx); }")
+
+(define_insn "*one_cmpldi2"
+ [(set (match_operand:DI 0 "r_or_s_operand" "=d,Q")
+ (not:DI (match_operand:DI 1 "r_or_s_operand" "0,0")))
+ (use (match_operand:DI 2 "memory_operand" "m,m"))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ xg\\t%0,%2
+ xc\\t%O0(8,%R0),%2"
+ [(set_attr "op_type" "RR,SS")
+ (set_attr "atype" "mem")])
+
+;
+; one_cmplsi2 instruction pattern(s).
+;
+
+(define_expand "one_cmplsi2"
+ [(parallel
+ [(set (match_operand:SI 0 "r_or_s_operand" "=d")
+ (not:SI (match_operand:SI 1 "r_or_s_operand" "0")))
+ (use (match_dup 2))
+ (clobber (reg:CC 33))])]
+ ""
+ "{ operands[2] = force_const_mem (SImode, constm1_rtx); }")
+
+(define_insn "*one_cmplsi2"
+ [(set (match_operand:SI 0 "r_or_s_operand" "=d,Q")
+ (not:SI (match_operand:SI 1 "r_or_s_operand" "0,0")))
+ (use (match_operand:SI 2 "memory_operand" "m,m"))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ x\\t%0,%2
+ xc\\t%O0(4,%R0),%2"
+ [(set_attr "op_type" "RR,SS")
+ (set_attr "atype" "mem")])
+
+;
+; one_cmplhi2 instruction pattern(s).
+;
+
+(define_expand "one_cmplhi2"
+ [(parallel
+ [(set (match_operand:HI 0 "r_or_s_operand" "=d")
+ (not:HI (match_operand:HI 1 "r_or_s_operand" "0")))
+ (use (match_dup 2))
+ (clobber (reg:CC 33))])]
+ ""
+ "{ operands[2] = force_const_mem (SImode, constm1_rtx); }")
+
+(define_insn "*one_cmplhi2"
+ [(set (match_operand:HI 0 "r_or_s_operand" "=d,Q")
+ (not:HI (match_operand:HI 1 "r_or_s_operand" "0,0")))
+ (use (match_operand:SI 2 "memory_operand" "m,m"))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ x\\t%0,%2
+ xc\\t%O0(2,%R0),%2"
+ [(set_attr "op_type" "RX,SS")
+ (set_attr "atype" "mem")])
+
+;
+; one_cmplqi2 instruction pattern(s).
+;
+
+(define_insn "one_cmpqi2"
+ [(set (match_operand:QI 0 "memory_operand" "=Q")
+ (not:QI (match_operand:QI 1 "memory_operand" "0")))
+ (clobber (reg:CC 33))]
+ ""
+ "xi\\t%0,255"
+ [(set_attr "op_type" "SI")])
+
+
+;;
+;;- Rotate instructions.
+;;
+
+;
+; rotldi3 instruction pattern(s).
+;
+
+(define_insn "rotldi3"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (rotate:DI (match_operand:DI 1 "register_operand" "d,d")
+ (match_operand:DI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ rllg\\t%0,%1,%c2
+ rllg\\t%0,%1,0(%2)"
+ [(set_attr "op_type" "RSE")
+ (set_attr "type" "set")])
+
+;
+; rotlsi3 instruction pattern(s).
+;
+
+(define_insn "rotlsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (rotate:SI (match_operand:SI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ rll\\t%0,%1,%c2
+ rll\\t%0,%1,0(%2)"
+ [(set_attr "op_type" "RSE")
+ (set_attr "type" "set")])
+
+
+;;
+;;- Arithmetic shift instructions.
+;;
+;; for left shifts always setal shifts are used (ANSI-C)
+
+;
+; ashldi3 instruction pattern(s).
+;
+
+(define_expand "ashldi3"
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "")
+ (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:SI 2 "nonmemory_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "")
+
+(define_insn "*ashldi3_31"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (ashift:DI (match_operand:DI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "@
+ sldl\\t%0,%c2
+ sldl\\t%0,0(%2)"
+ [(set_attr "op_type" "RS")
+ (set_attr "type" "set")])
+
+(define_insn "*ashldi3_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (ashift:DI (match_operand:DI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ sllg\\t%0,%1,%2
+ sllg\\t%0,%1,0(%2)"
+ [(set_attr "op_type" "RSE")
+ (set_attr "type" "set")])
+
+;
+; ashrdi3 instruction pattern(s).
+;
+
+(define_expand "ashrdi3"
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "")
+ (ashiftrt:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:SI 2 "nonmemory_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "")
+
+(define_insn "*ashrdi3_31"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (ashiftrt:DI (match_operand:DI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "@
+ srda\\t%0,%c2
+ srda\\t%0,0(%2)"
+ [(set_attr "op_type" "RS")])
+
+(define_insn "*ashrdi3_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (ashiftrt:DI (match_operand:DI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ srag\\t%0,%1,%c2
+ srag\\t%0,%1,0(%2)"
+ [(set_attr "op_type" "RSE")
+ (set_attr "type" "set")])
+
+;
+; ashlsi3 instruction pattern(s).
+;
+; all 32 bits has to be shifted (testcase co750c)
+
+(define_insn "ashlsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (ashift:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "r_or_im8_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%0,%c2
+ sll\\t%0,0(%2)"
+ [(set_attr "op_type" "RS")
+ (set_attr "type" "set")])
+
+;
+; ashrsi3 instruction pattern(s).
+;
+
+(define_insn "ashrsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "r_or_im8_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sra\\t%0,%c2
+ sra\\t%0,0(%2)"
+ [(set_attr "op_type" "RS")
+ (set_attr "type" "set")])
+
+;
+; ashlhi3 instruction pattern(s).
+;
+
+(define_insn "ashlhi3"
+ [(set (match_operand:HI 0 "register_operand" "=d,d")
+ (ashift:HI (match_operand:HI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "r_or_im8_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%0,%c2
+ sll\\t%0,0(%2)"
+ [(set_attr "op_type" "RS,RS")])
+
+;
+; ashrhi3 instruction pattern(s).
+;
+
+(define_insn "ashrhi3"
+ [(set (match_operand:HI 0 "register_operand" "=d,d")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "r_or_im8_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%0,16\;sra\\t%0,16+%c2
+ sll\\t%0,16\;sra\\t%0,16(%2)"
+ [(set_attr "op_type" "NN,NN")
+ (set_attr "length" "8,8")])
+
+
+;;
+;;- logical shift instructions.
+;;
+
+;
+; lshrdi3 instruction pattern(s).
+;
+
+(define_expand "lshrdi3"
+ [(parallel
+ [(set (match_operand:DI 0 "register_operand" "")
+ (lshiftrt:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:SI 2 "nonmemory_operand" "")))
+ (clobber (reg:CC 33))])]
+ ""
+ "")
+
+(define_insn "*lshrdi3_31"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (lshiftrt:DI (match_operand:DI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "!TARGET_64BIT"
+ "@
+ srdl\\t%0,%c2
+ srdl\\t%0,0(%2)"
+ [(set_attr "op_type" "RS,RS")])
+
+(define_insn "*lshrdi3_64"
+ [(set (match_operand:DI 0 "register_operand" "=d,d")
+ (lshiftrt:DI (match_operand:DI 1 "register_operand" "d,d")
+ (match_operand:SI 2 "nonmemory_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ "TARGET_64BIT"
+ "@
+ srlg\\t%0,%1,%c2
+ srlg\\t%0,%1,0(%2)"
+ [(set_attr "op_type" "RS,RS")
+ (set_attr "type" "set")])
+
+;
+; lshrsi3 instruction pattern(s).
+;
+
+(define_insn "lshrsi3"
+ [(set (match_operand:SI 0 "register_operand" "=d,d")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "r_or_im8_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ srl\\t%0,%c2
+ srl\\t%0,0(%2)"
+ [(set_attr "op_type" "RS")
+ (set_attr "type" "set")])
+
+;
+; lshrhi3 instruction pattern(s).
+;
+
+(define_insn "lshrhi3"
+ [(set (match_operand:HI 0 "register_operand" "=d,d")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "r_or_im8_operand" "J,a")))
+ (clobber (reg:CC 33))]
+ ""
+ "@
+ sll\\t%0,16\;srl\\t%0,16+%c2
+ sll\\t%0,16\;srl\\t%0,16(%2)"
+ [(set_attr "op_type" "NN,NN")
+ (set_attr "length" "8,8")])
+
+
+;;
+;; Branch instruction patterns.
+;;
+
+(define_expand "beq"
+ [(set (reg:CCZ 33) (compare:CCZ (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (eq (reg:CCZ 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bne"
+ [(set (reg:CCZ 33) (compare:CCZ (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (ne (reg:CCZ 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bgt"
+ [(set (reg:CCS 33) (compare:CCS (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (gt (reg:CCS 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bgtu"
+ [(set (reg:CCU 33) (compare:CCU (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (gtu (reg:CCU 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "blt"
+ [(set (reg:CCS 33) (compare:CCS (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (lt (reg:CCS 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bltu"
+ [(set (reg:CCU 33) (compare:CCU (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (ltu (reg:CCU 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bge"
+ [(set (reg:CCS 33) (compare:CCS (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (ge (reg:CCS 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bgeu"
+ [(set (reg:CCU 33) (compare:CCU (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (geu (reg:CCU 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "ble"
+ [(set (reg:CCS 33) (compare:CCS (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (le (reg:CCS 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+(define_expand "bleu"
+ [(set (reg:CCU 33) (compare:CCU (match_dup 1) (match_dup 2)))
+ (set (pc)
+ (if_then_else (leu (reg:CCU 33) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "{ operands[1] = s390_compare_op0; operands[2] = s390_compare_op1; }")
+
+
+;;
+;;- Conditional jump instructions.
+;;
+
+(define_insn "cjump"
+ [(set (pc)
+ (if_then_else
+ (match_operator 1 "comparison_operator" [(reg 33) (const_int 0)])
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "*
+{
+ if (get_attr_length (insn) == 4 || !TARGET_64BIT)
+ return \"j%C1\\t%l0\";
+ else
+ return \"jg%C1\\t%l0\";
+}"
+ [(set_attr "op_type" "RI")
+ (set (attr "length") (if_then_else
+ (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
+ (const_int 4) (const_int 6)))])
+
+(define_insn "cjump_long"
+ [(set (pc)
+ (if_then_else
+ (match_operator 1 "comparison_operator" [(reg 33) (const_int 0)])
+ (match_operand 0 "memory_operand" "m")
+ (pc)))]
+ ""
+ "b%C1\\t%0"
+ [(set_attr "op_type" "RX")])
+
+
+;;
+;;- Negated conditional jump instructions.
+;;
+
+(define_insn "icjump"
+ [(set (pc)
+ (if_then_else
+ (match_operator 1 "comparison_operator" [(reg 33) (const_int 0)])
+ (pc)
+ (label_ref (match_operand 0 "" ""))))]
+ ""
+ "*
+{
+ if (get_attr_length (insn) == 4 || !TARGET_64BIT)
+ return \"j%D1\\t%l0\";
+ else
+ return \"jg%D1\\t%l0\";
+}"
+ [(set_attr "op_type" "RI")
+ (set (attr "length") (if_then_else
+ (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
+ (const_int 4) (const_int 6)))])
+
+(define_insn "icjump_long"
+ [(set (pc)
+ (if_then_else
+ (match_operator 1 "comparison_operator" [(reg 33) (const_int 0)])
+ (pc)
+ (match_operand 0 "memory_operand" "m")))]
+ ""
+ "b%D1\\t%0"
+ [(set_attr "op_type" "RX")])
+
+
+;;
+;;- Subtract one and jump if not zero.
+;;
+
+;(define_expand "decrement_and_branch_on_count"
+; [(use (match_operand 0 "register_operand" ""))
+; (use (label_ref (match_operand 1 "" "")))]
+; ""
+; "
+;{
+;/* if (TARGET_64BIT)
+; emit_jump_insn (gen_brctdi (operands[0], operands[1]));
+; else */
+; emit_jump_insn (gen_brctsi (operands[0], operands[1]));
+; DONE;
+;}")
+;
+;(define_insn "brctsi"
+; [(set (pc)
+; (if_then_else
+; (ne (match_operand:SI 0 "register_operand" "+a")
+; (const_int 1))
+; (label_ref (match_operand 1 "" ""))
+; (pc)))
+; (set (match_dup 0)
+; (plus:SI (match_dup 0) (const_int -1)))]
+; ""
+; "brct\\t%0,%l1"
+; [(set_attr "op_type" "RI")
+; (set_attr "type" "branch")]
+;)
+;
+;(define_insn "ibrctsi"
+; [(set (pc)
+; (if_then_else
+; (eq (match_operand:SI 0 "register_operand" "+a")
+; (const_int 1))
+; (pc)
+; (label_ref (match_operand 1 "" ""))))
+; (set (match_dup 0)
+; (plus:SI (match_dup 0) (const_int -1)))]
+; ""
+; "brct\\t%0,%l1"
+; [(set_attr "op_type" "RI")
+; (set_attr "type" "branch")]
+;)
+
+
+;;
+;;- Unconditional jump instructions.
+;;
+
+;
+; jump instruction pattern(s).
+;
+
+(define_insn "jump"
+ [(set (pc) (label_ref (match_operand 0 "" "")))]
+ ""
+ "*
+{
+ if (get_attr_length (insn) == 4 || !TARGET_64BIT)
+ return \"j\\t%l0\";
+ else
+ return \"jg\\t%l0\";
+}"
+ [(set_attr "op_type" "RI")
+ (set (attr "length") (if_then_else
+ (lt (abs (minus (pc) (match_dup 0))) (const_int 60000))
+ (const_int 4) (const_int 6)))])
+
+;
+; indirect-jump instruction pattern(s).
+;
+
+(define_insn "indirect_jump"
+ [(set (pc) (match_operand 0 "register_operand" "a"))]
+ ""
+ "br\\t%0"
+ [(set_attr "op_type" "RX")])
+
+(define_insn "jump_long"
+ [(set (pc) (match_operand 0 "address_operand" "p"))]
+ ""
+ "b\\t%a0"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")])
+
+
+;
+; tablejump instruction pattern(s).
+;
+
+(define_expand "tablejump"
+ [(parallel
+ [(set (pc) (match_operand 0 "register_operand" "a"))
+ (use (label_ref (match_operand 1 "" "")))])]
+ ""
+ "
+{
+ if (flag_pic)
+ {
+ rtx base;
+ base = gen_rtx_REG (Pmode, BASE_REGISTER);
+ base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
+ operands[0] = gen_rtx_PLUS (Pmode, base, operands[0]);
+ }
+}")
+
+(define_insn "*tablejump1"
+ [(set (pc) (match_operand 0 "register_operand" "a"))
+ (use (label_ref (match_operand 1 "" "")))]
+ ""
+ "br\\t%0"
+ [(set_attr "op_type" "RX")])
+
+(define_insn "*tablejump2"
+ [(set (pc) (match_operand 0 "address_operand" "p"))
+ (use (label_ref (match_operand 1 "" "")))]
+ ""
+ "b\\t%a0"
+ [(set_attr "op_type" "RX")
+ (set_attr "atype" "mem")])
+
+
+;;
+;;- Jump to subroutine.
+;;
+;;
+
+;
+; untyped call instruction pattern(s).
+;
+
+;; Call subroutine returning any type.
+(define_expand "untyped_call"
+ [(parallel [(call (match_operand 0 "" "")
+ (const_int 0))
+ (match_operand 1 "" "")
+ (match_operand 2 "" "")])]
+ ""
+ "
+{
+ int i;
+
+ emit_call_insn (gen_call (operands[0], const0_rtx, const0_rtx));
+
+ for (i = 0; i < XVECLEN (operands[2], 0); i++)
+ {
+ rtx set = XVECEXP (operands[2], 0, i);
+ emit_move_insn (SET_DEST (set), SET_SRC (set));
+ }
+
+ /* The optimizer does not know that the call sets the function value
+ registers we stored in the result block. We avoid problems by
+ claiming that all hard registers are used and clobbered at this
+ point. */
+ emit_insn (gen_blockage ());
+
+ DONE;
+}")
+
+;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
+;; all of memory. This blocks insns from being moved across this point.
+
+(define_insn "blockage"
+ [(unspec_volatile [(const_int 0)] 0)]
+ ""
+ "")
+
+
+;
+; call instruction pattern(s).
+;
+
+(define_expand "call"
+ [(parallel [(call (match_operand 0 "" "")
+ (match_operand 1 "" ""))
+ (clobber (match_operand 2 "" ""))])]
+ ""
+ "
+{
+ /* Abuse operand 2 to hold the return register. */
+ operands[2] = gen_rtx_REG (Pmode, RETURN_REGNUM);
+
+ /* In 31-bit, we must load the GOT register even if the
+ compiler doesn't know about it, because the PLT glue
+ code uses it. In 64-bit, this is not necessary. */
+ if (flag_pic && !TARGET_64BIT)
+ current_function_uses_pic_offset_table = 1;
+
+ /* Direct function calls need special treatment. */
+ if (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF)
+ {
+ rtx sym = XEXP (operands[0], 0);
+
+ /* When calling a global routine in PIC mode, we must
+ replace the symbol itself with the PLT stub. */
+ if (flag_pic && !SYMBOL_REF_FLAG(sym))
+ {
+ sym = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), 113);
+ sym = gen_rtx_CONST (Pmode, sym);
+ }
+
+ /* Unless we can use the bras(l) insn, force the
+ routine address into a register. */
+ if (!TARGET_SMALL_EXEC && !TARGET_64BIT)
+ {
+ rtx target = gen_reg_rtx (Pmode);
+ emit_move_insn (target, sym);
+ sym = target;
+ }
+
+ operands[0] = gen_rtx_MEM (QImode, sym);
+ }
+}")
+
+(define_insn "brasl"
+ [(call (mem:QI (match_operand:DI 0 "bras_sym_operand" "X"))
+ (match_operand:SI 1 "const_int_operand" "n"))
+ (clobber (match_operand:DI 2 "register_operand" "=r"))]
+ "TARGET_64BIT"
+ "brasl\\t%2,%0"
+ [(set_attr "op_type" "RIL")
+ (set_attr "cycle" "n")])
+
+(define_insn "bras"
+ [(call (mem:QI (match_operand:SI 0 "bras_sym_operand" "X"))
+ (match_operand:SI 1 "const_int_operand" "n"))
+ (clobber (match_operand:SI 2 "register_operand" "=r"))]
+ "TARGET_SMALL_EXEC"
+ "bras\\t%2,%0"
+ [(set_attr "op_type" "RI")
+ (set_attr "cycle" "n")])
+
+(define_insn "basr_64"
+ [(call (mem:QI (match_operand:DI 0 "register_operand" "a"))
+ (match_operand:SI 1 "const_int_operand" "n"))
+ (clobber (match_operand:DI 2 "register_operand" "=r"))]
+ "TARGET_64BIT"
+ "basr\\t%2,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+(define_insn "basr_31"
+ [(call (mem:QI (match_operand:SI 0 "register_operand" "a"))
+ (match_operand:SI 1 "const_int_operand" "n"))
+ (clobber (match_operand:SI 2 "register_operand" "=r"))]
+ "!TARGET_64BIT"
+ "basr\\t%2,%0"
+ [(set_attr "op_type" "RR")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+(define_insn "bas_64"
+ [(call (mem:QI (match_operand:QI 0 "address_operand" "p"))
+ (match_operand:SI 1 "const_int_operand" "n"))
+ (clobber (match_operand:DI 2 "register_operand" "=r"))]
+ "TARGET_64BIT"
+ "bas\\t%2,%a0"
+ [(set_attr "op_type" "RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+(define_insn "bas_31"
+ [(call (mem:QI (match_operand:QI 0 "address_operand" "p"))
+ (match_operand:SI 1 "const_int_operand" "n"))
+ (clobber (match_operand:SI 2 "register_operand" "=r"))]
+ "!TARGET_64BIT"
+ "bas\\t%2,%a0"
+ [(set_attr "op_type" "RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+
+;
+; call_value instruction pattern(s).
+;
+
+(define_expand "call_value"
+ [(parallel [(set (match_operand 0 "" "")
+ (call (match_operand 1 "" "")
+ (match_operand 2 "" "")))
+ (clobber (match_operand 3 "" ""))])]
+ ""
+ "
+{
+ /* Abuse operand 3 to hold the return register. */
+ operands[3] = gen_rtx_REG (Pmode, RETURN_REGNUM);
+
+ /* In 31-bit, we must load the GOT register even if the
+ compiler doesn't know about it, because the PLT glue
+ code uses it. In 64-bit, this is not necessary. */
+ if (flag_pic && !TARGET_64BIT)
+ current_function_uses_pic_offset_table = 1;
+
+ /* Direct function calls need special treatment. */
+ if (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF)
+ {
+ rtx sym = XEXP (operands[1], 0);
+
+ /* When calling a global routine in PIC mode, we must
+ replace the symbol itself with the PLT stub. */
+ if (flag_pic && !SYMBOL_REF_FLAG(sym))
+ {
+ sym = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), 113);
+ sym = gen_rtx_CONST (Pmode, sym);
+ }
+
+ /* Unless we can use the bras(l) insn, force the
+ routine address into a register. */
+ if (!TARGET_SMALL_EXEC && !TARGET_64BIT)
+ {
+ rtx target = gen_reg_rtx (Pmode);
+ emit_move_insn (target, sym);
+ sym = target;
+ }
+
+ operands[1] = gen_rtx_MEM (QImode, sym);
+ }
+}")
+
+(define_insn "brasl_r"
+ [(set (match_operand 0 "register_operand" "=df")
+ (call (mem:QI (match_operand:DI 1 "bras_sym_operand" "X"))
+ (match_operand:SI 2 "const_int_operand" "n")))
+ (clobber (match_operand:DI 3 "register_operand" "=r"))]
+ "TARGET_64BIT"
+ "brasl\\t%3,%1"
+ [(set_attr "op_type" "RIL")
+ (set_attr "cycle" "n")])
+
+(define_insn "bras_r"
+ [(set (match_operand 0 "register_operand" "=df")
+ (call (mem:QI (match_operand:SI 1 "bras_sym_operand" "X"))
+ (match_operand:SI 2 "const_int_operand" "n")))
+ (clobber (match_operand:SI 3 "register_operand" "=r"))]
+ "TARGET_SMALL_EXEC"
+ "bras\\t%3,%1"
+ [(set_attr "op_type" "RI")
+ (set_attr "cycle" "n")])
+
+(define_insn "basr_r_64"
+ [(set (match_operand 0 "register_operand" "=df")
+ (call (mem:QI (match_operand:DI 1 "register_operand" "a"))
+ (match_operand:SI 2 "const_int_operand" "n")))
+ (clobber (match_operand:DI 3 "register_operand" "=r"))]
+ "TARGET_64BIT"
+ "basr\\t%3,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "cycle" "n")])
+
+(define_insn "basr_r_31"
+ [(set (match_operand 0 "register_operand" "=df")
+ (call (mem:QI (match_operand:SI 1 "register_operand" "a"))
+ (match_operand:SI 2 "const_int_operand" "n")))
+ (clobber (match_operand:SI 3 "register_operand" "=r"))]
+ "!TARGET_64BIT"
+ "basr\\t%3,%1"
+ [(set_attr "op_type" "RR")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+(define_insn "bas_r_64"
+ [(set (match_operand 0 "register_operand" "=df")
+ (call (mem:QI (match_operand:QI 1 "address_operand" "p"))
+ (match_operand:SI 2 "const_int_operand" "n")))
+ (clobber (match_operand:DI 3 "register_operand" "=r"))]
+ "TARGET_64BIT"
+ "bas\\t%3,%a1"
+ [(set_attr "op_type" "RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+(define_insn "bas_r_31"
+ [(set (match_operand 0 "register_operand" "=df")
+ (call (mem:QI (match_operand:QI 1 "address_operand" "p"))
+ (match_operand:SI 2 "const_int_operand" "n")))
+ (clobber (match_operand:SI 3 "register_operand" "=r"))]
+ "!TARGET_64BIT"
+ "bas\\t%3,%a1"
+ [(set_attr "op_type" "RX")
+ (set_attr "cycle" "n")
+ (set_attr "atype" "mem")])
+
+
+;;
+;;- Miscellaneous instructions.
+;;
+
+;
+; allocate stack instruction pattern(s).
+;
+
+(define_expand "allocate_stack"
+ [(set (reg 15)
+ (plus (reg 15) (match_operand 1 "general_operand" "")))
+ (set (match_operand 0 "general_operand" "")
+ (reg 15))]
+ ""
+ "
+{
+ rtx stack = gen_rtx (REG, Pmode, STACK_POINTER_REGNUM);
+ rtx chain = gen_rtx (MEM, Pmode, stack);
+ rtx temp = gen_reg_rtx (Pmode);
+
+ emit_move_insn (temp, chain);
+
+ if (TARGET_64BIT)
+ emit_insn (gen_adddi3 (stack, stack, negate_rtx (Pmode, operands[1])));
+ else
+ emit_insn (gen_addsi3 (stack, stack, negate_rtx (Pmode, operands[1])));
+
+ emit_move_insn (chain, temp);
+
+ emit_move_insn (operands[0], virtual_stack_dynamic_rtx);
+ DONE;
+}")
+
+
+;
+; setjmp/longjmp instruction pattern(s).
+;
+
+(define_expand "builtin_setjmp_setup"
+ [(unspec [(match_operand 0 "register_operand" "a")] 1)]
+ ""
+ "
+{
+ emit_insn (gen_do_builtin_setjmp_setup (operands[0]));
+ DONE;
+}")
+
+(define_expand "builtin_setjmp_receiver"
+ [(unspec_volatile [(label_ref (match_operand 0 "" ""))] 2)]
+ ""
+ "
+{
+ emit_insn (gen_blockage ());
+ DONE;
+}")
+
+(define_expand "do_builtin_setjmp_setup"
+ [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "a")
+ (const_int 12)))
+ (reg:SI 12))
+ (set (mem:SI (plus:SI (match_dup 0)
+ (const_int 16)))
+ (reg:SI 13)) ]
+ ""
+ "")
+
+(define_expand "builtin_longjmp"
+ [(unspec_volatile [(match_operand 0 "register_operand" "r")] 3)]
+ ""
+ "
+{
+ /* The elements of the buffer are, in order: */
+ rtx fp = gen_rtx_MEM (Pmode, operands[0]);
+ rtx lab = gen_rtx_MEM (Pmode, plus_constant (operands[0], 4));
+ rtx stack = gen_rtx_MEM (Pmode, plus_constant (operands[0], 8));
+ rtx gotv = gen_rtx_MEM (Pmode, plus_constant (operands[0], 12));
+ rtx basev = gen_rtx_MEM (Pmode, plus_constant (operands[0], 16));
+ rtx base = gen_rtx_REG (Pmode, 13);
+ rtx got = gen_rtx_REG (Pmode, 12);
+ rtx jmp = gen_rtx_REG (Pmode, 14);
+
+ emit_move_insn (jmp, lab);
+ emit_move_insn (got, gotv);
+ emit_move_insn (base, basev);
+ emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
+ emit_move_insn (hard_frame_pointer_rtx, fp);
+
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, got));
+ emit_insn (gen_rtx_USE (VOIDmode, base));
+ emit_indirect_jump (jmp);
+ DONE;
+}")
+
+
+;; These patterns say how to save and restore the stack pointer. We need not
+;; save the stack pointer at function level since we are careful to
+;; preserve the backchain. At block level, we have to restore the backchain
+;; when we restore the stack pointer.
+;;
+;; For nonlocal gotos, we must save both the stack pointer and its
+;; backchain and restore both. Note that in the nonlocal case, the
+;; save area is a memory location.
+
+(define_expand "save_stack_function"
+ [(match_operand 0 "general_operand" "")
+ (match_operand 1 "general_operand" "")]
+ ""
+ "DONE;")
+
+(define_expand "restore_stack_function"
+ [(match_operand 0 "general_operand" "")
+ (match_operand 1 "general_operand" "")]
+ ""
+ "DONE;")
+
+(define_expand "restore_stack_block"
+ [(use (match_operand 0 "register_operand" ""))
+ (set (match_dup 2) (match_dup 3))
+ (set (match_dup 0) (match_operand 1 "register_operand" ""))
+ (set (match_dup 3) (match_dup 2))]
+ ""
+ "
+{
+ operands[2] = gen_reg_rtx (Pmode);
+ operands[3] = gen_rtx_MEM (Pmode, operands[0]);
+}")
+
+(define_expand "save_stack_nonlocal"
+ [(match_operand 0 "memory_operand" "")
+ (match_operand 1 "register_operand" "")]
+ ""
+ "
+{
+ rtx temp = gen_reg_rtx (Pmode);
+
+ /* Copy the backchain to the first word, sp to the second. */
+ emit_move_insn (temp, gen_rtx_MEM (Pmode, operands[1]));
+ emit_move_insn (operand_subword (operands[0], 0, 0,
+ TARGET_64BIT ? TImode : DImode),
+ temp);
+ emit_move_insn (operand_subword (operands[0], 1, 0,
+ TARGET_64BIT ? TImode : DImode),
+ operands[1]);
+ DONE;
+}")
+
+(define_expand "restore_stack_nonlocal"
+ [(match_operand 0 "register_operand" "")
+ (match_operand 1 "memory_operand" "")]
+ ""
+ "
+{
+ rtx temp = gen_reg_rtx (Pmode);
+
+ /* Restore the backchain from the first word, sp from the second. */
+ emit_move_insn (temp,
+ operand_subword (operands[1], 0, 0,
+ TARGET_64BIT ? TImode : DImode));
+ emit_move_insn (operands[0],
+ operand_subword (operands[1], 1, 0,
+ TARGET_64BIT ? TImode : DImode));
+ emit_move_insn (gen_rtx_MEM (Pmode, operands[0]), temp);
+ DONE;
+}")
+
+
+;
+; nop instruction pattern(s).
+;
+
+(define_insn "nop"
+ [(const_int 0)]
+ ""
+ "lr\\t0,0"
+ [(set_attr "op_type" "RR")])
+
+
+;
+; Special literal pool access instruction pattern(s).
+;
+
+(define_insn "reload_base"
+ [(parallel [(set (reg 13) (pc))
+ (use (label_ref (match_operand 0 "" "")))])]
+ ""
+ "*
+{
+ if (TARGET_64BIT)
+ return \"larl\\t13,%y0\";
+ else
+ return \"basr\\t13,0\;ahi\\t13,%Y0\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "2")
+ (set_attr "length" "8")])
+
+(define_insn "ltorg"
+ [(parallel [(set (reg 13) (pc))
+ (use (match_operand:SI 0 "const_int_operand" ""))])]
+ ""
+ "*
+{
+ s390_dump_literal_pool (insn, operands[0]);
+ return \"0:\";
+}"
+ [(set_attr "op_type" "NN")
+ (set_attr "cycle" "n")
+ (set_attr "length" "4096")])
+
+
+;;
+;; Peephole optimization patterns.
+;;
+
+(define_peephole
+ [(set (match_operand:SI 0 "memory_operand" "m")
+ (match_operand:SI 1 "register_operand" "d"))
+ (set (match_dup 1)
+ (match_dup 0))]
+ ""
+ "st\\t%1,%0")
+
+(define_peephole
+ [(set (match_operand:SI 0 "memory_operand" "m")
+ (match_operand:SI 1 "register_operand" "d"))
+ (set (match_dup 0)
+ (match_dup 1))]
+ ""
+ "st\\t%1,%0")
+
+(define_peephole
+ [(set (match_operand:SI 0 "register_operand" "")
+ (match_operand:SI 1 "register_operand" ""))
+ (parallel
+ [(set (match_dup 0)
+ (plus:SI (match_dup 0)
+ (match_operand:SI 2 "immediate_operand" "")))
+ (clobber (reg:CC 33))])]
+ "(REGNO (operands[0]) == STACK_POINTER_REGNUM ||
+ REGNO (operands[1]) == STACK_POINTER_REGNUM ||
+ REGNO (operands[0]) == BASE_REGISTER ||
+ REGNO (operands[1]) == BASE_REGISTER) &&
+ INTVAL (operands[2]) > 0 && INTVAL (operands[2]) < 4096"
+ "la\\t%0,%c2(%1)")
+
+;
+; peepholes for fast char instructions
+;
+
+;(define_peephole
+; [(set (match_operand:QI 0 "register_operand" "d")
+; (match_operand:QI 1 "s_operand" "Q"))
+; (set (match_operand:SI 2 "register_operand" "0")
+; (zero_extend:SI (match_dup 0)))]
+; "REGNO(operands[0]) == REGNO(operands[2])"
+; "icm\\t%0,8,%1\;srl\\t%0,24")
+
+;(define_peephole
+; [(set (match_operand:QI 0 "register_operand" "d")
+; (match_operand:QI 1 "s_operand" "Q"))
+; (set (match_operand:SI 2 "register_operand" "0")
+; (sign_extend:SI (match_dup 0)))]
+; "REGNO(operands[0]) == REGNO(operands[2])"
+; "icm\\t%0,8,%1\;sra\\t%0,24")
+
+(define_peephole
+ [(set (match_operand:QI 0 "register_operand" "d")
+ (match_operand:QI 1 "immediate_operand" "J"))
+ (set (match_operand:SI 2 "register_operand" "0" )
+ (sign_extend:SI (match_dup 0) ) )]
+ "REGNO(operands[0]) == REGNO(operands[2])"
+ "lhi\\t%0,%h1")
+
+;
+; peepholes for fast short instructions
+;
+
+;(define_peephole
+; [(set (match_operand:HI 0 "register_operand" "d")
+; (match_operand:HI 1 "s_operand" "Q"))
+; (set (match_operand:SI 2 "register_operand" "0" )
+; (zero_extend:SI (match_dup 0)))]
+; "REGNO(operands[0]) == REGNO(operands[2])"
+; "icm\\t%0,12,%1\;srl\\t%0,16")
+
+(define_peephole
+ [(set (match_operand:HI 0 "register_operand" "d")
+ (match_operand:HI 1 "memory_operand" "m"))
+ (set (match_operand:SI 2 "register_operand" "0" )
+ (sign_extend:SI (match_dup 0)))]
+ "REGNO(operands[0]) == REGNO(operands[2])"
+ "lh\\t%0,%1")
+
+(define_peephole
+ [(set (match_operand:HI 0 "register_operand" "d")
+ (match_operand:HI 1 "immediate_operand" "K"))
+ (set (match_operand:SI 2 "register_operand" "0" )
+ (sign_extend:SI (match_dup 0) ) )]
+ "REGNO(operands[0]) == REGNO(operands[2])"
+ "lhi\\t%0,%h1")
+
+;
+; peepholes for divide instructions
+;
+
+(define_peephole
+ [(set (match_operand:DI 0 "register_operand" "d")
+ (match_operand:DI 1 "memory_operand" "m"))
+ (set (match_dup 0)
+ (lshiftrt:DI (match_dup 0)
+ (match_operand:SI 2 "immediate_operand" "J")))
+ (set (match_dup 0)
+ (div:SI (match_dup 0)
+ (match_operand:SI 3 "nonimmediate_operand" "g")))
+ (set (match_dup 1)
+ (match_dup 0))]
+ ""
+ "*
+{
+ output_asm_insn (\"l\\t%0,%1\", operands);
+ output_asm_insn (\"srdl\\t%0,%b2\", operands);
+
+ if (REG_P (operands[3]))
+ output_asm_insn (\"dr\\t%0,%3\", operands);
+ else
+ output_asm_insn (\"d\\t%0,%3\", operands);
+
+ return \"st\\t%N0,%N1\";
+}")
+
+(define_peephole
+ [(set (match_operand:DI 0 "register_operand" "d")
+ (match_operand:DI 1 "memory_operand" "m"))
+ (set (match_dup 0)
+ (lshiftrt:DI (match_dup 0)
+ (match_operand:SI 2 "immediate_operand" "J")))
+ (set (match_dup 0)
+ (mod:SI (match_dup 0)
+ (match_operand:SI 3 "nonimmediate_operand" "g")))
+ (set (match_dup 1)
+ (match_dup 0))]
+ ""
+ "*
+{
+ output_asm_insn (\"l\\t%0,%1\", operands);
+ output_asm_insn (\"srdl\\t%0,%b2\", operands);
+
+ if (REG_P (operands[3]))
+ output_asm_insn (\"dr\\t%0,%3\", operands);
+ else
+ output_asm_insn (\"d\\t%0,%3\", operands);
+
+ return \"st\\t%0,%1\";
+}")
+