From ca695ac93dca6da6f9bcb2916cd3798f016084b2 Mon Sep 17 00:00:00 2001 From: Jan Brittenson Date: Tue, 21 Sep 1993 14:25:24 -0700 Subject: [PATCH] bytecode From-SVN: r5379 --- gcc/Makefile.in | 83 +- gcc/c-pragma.c | 36 +- gcc/emit-rtl.c | 41 +- gcc/expr.c | 5926 +++++++++++++++++++++++++++++------------------ gcc/integrate.c | 10 +- gcc/regclass.c | 8 + gcc/rtl.h | 17 + gcc/stmt.c | 930 +++++++- gcc/toplev.c | 153 +- gcc/varasm.c | 554 ++++- 10 files changed, 5248 insertions(+), 2510 deletions(-) diff --git a/gcc/Makefile.in b/gcc/Makefile.in index e9993c97fc1..2c044eedadf 100644 --- a/gcc/Makefile.in +++ b/gcc/Makefile.in @@ -399,6 +399,9 @@ CPLUS_OBJS = cp-parse.o cp-decl.o cp-decl2.o \ cp-expr.o cp-pt.o cp-edsel.o cp-xref.o \ $(CPLUS_INPUT) cp-spew.o c-common.o +# Files specific to the C interpreter bytecode compiler(s). +BC_OBJS = bc-emit.o bc-optab.o + # Language-independent object files. OBJS = toplev.o version.o tree.o print-tree.o stor-layout.o fold-const.o \ function.o stmt.o expr.o calls.o expmed.o explow.o optabs.o varasm.o \ @@ -461,6 +464,7 @@ CONFIG_H = RTL_H = rtl.h rtl.def machmode.h machmode.def TREE_H = tree.h real.h tree.def machmode.h machmode.def CPLUS_TREE_H = $(TREE_H) cp-tree.h cp-tree.def +BYTECODE_H = bytecode.h bc-emit.h bc-optab.h # Avoid a lot of time thinking about remaking Makefile.in and *.def. .SUFFIXES: .in .def @@ -484,7 +488,7 @@ for-bootstrap: start.encap $(LIBGCC) rest.encap: $(LIBGCC) stmp-headers $(STMP_FIXPROTO) $(EXTRA_PARTS) # This is what is made with the host's compiler # whether making a cross compiler or not. -native: config.status cpp $(LANGUAGES) $(EXTRA_PASSES) $(EXTRA_PROGRAMS) $(USE_COLLECT2) +native: bytecode config.status cpp $(LANGUAGES) $(EXTRA_PASSES) $(EXTRA_PROGRAMS) $(USE_COLLECT2) # Define the names for selecting languages in LANGUAGES. C c: cc1 @@ -545,14 +549,14 @@ g++-cross: $(srcdir)/g++.c $(CC) $(ALL_CFLAGS) $(INCLUDES) $(LDFLAGS) -o g++-cross \ -DGCC_NAME=\"$(target)-gcc\" $(srcdir)/g++.c version.o $(LIBS) -cc1:$(P) $(C_OBJS) $(OBJS) $(LIBDEPS) - $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o cc1 $(C_OBJS) $(OBJS) $(LIBS) +cc1:$(P) $(C_OBJS) $(OBJS) $(BC_OBJS) $(LIBDEPS) + $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o cc1 $(C_OBJS) $(OBJS) $(BC_OBJS) $(LIBS) -cc1plus:$(P) $(CPLUS_OBJS) $(OBJS) $(LIBDEPS) - $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o cc1plus $(CPLUS_OBJS) $(OBJS) $(LIBS) +cc1plus:$(P) $(CPLUS_OBJS) $(OBJS) $(BC_OBJS) $(LIBDEPS) + $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o cc1plus $(CPLUS_OBJS) $(BC_OBJS) $(OBJS) $(LIBS) -cc1obj:$(P) $(OBJC_OBJS) $(OBJS) $(LIBDEPS) - $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o cc1obj $(OBJC_OBJS) $(OBJS) $(LIBS) +cc1obj:$(P) $(OBJC_OBJS) $(OBJS) $(BC_OBJS) $(LIBDEPS) + $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o cc1obj $(OBJC_OBJS) $(OBJS) $(BC_OBJS) $(LIBS) # Copy float.h from its source. gfloat.h: $(FLOAT_H) @@ -1247,6 +1251,63 @@ $(HOST_PREFIX_1)malloc.o: malloc.c $(HOST_PREFIX_1): touch $(HOST_PREFIX_1) +# Remake bytecode files. +# BI_ALL=bi-run.o +BI_ALL= +BC_ALL=bc-opname.h bc-opcode.h bc-arity.h +BI_OBJ=bi-parser.o bi-lexer.o bi-reverse.o + + +bc-emit.o : bc-emit.c $(CONFIG_H) $(BYTECODE_H) +bc-optab.o : bc-optab.c bc-typecd.def $(CONFIG_H) $(BYTECODE_H) + + +bytecode: $(BI_ALL) $(BC_ALL) + +bi-arity: bi-arity.o +bi-opcode: bi-opcode.o +bi-opname: bi-opname.o +bi-unparse: bi-unparse.o +bi-lexer: bi-lexer.o + +bi-arity bi-opcode bi-opname bi-unparse bi-lexer: $(BI_OBJ) + $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $^ $(LEXLIB) + +bi-run.o: $(srcdir)/bi-run.c $(srcdir)/bi-run.h $(srcdir)/bc-typecd.h bc-opname.h bc-arity.h bc-opcode.h + $(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) -c $< + +bi-parser.c: $(srcdir)/bi-parser.y $(srcdir)/bi-parser.h + +bi-parser.o: $(srcdir)/bi-parser.c $(srcdir)/bi-defs.h + $(CC) $(CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) -c $< + +bi-lexer.c: $(srcdir)/bi-lexer.l $(srcdir)/bi-parser.h + +bi-lexer.o: bi-lexer.c bi-parser.h + $(CC) $(CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) -c $< + +bc-arity.h: $(srcdir)/bytecode.def bi-arity + -rm -f $@ + bi-arity <$< >$@ + +bc-opcode.h: $(srcdir)/bytecode.def bi-opcode + -rm -f $@ + bi-opcode <$< >$@ + +bc-opname.h: $(srcdir)/bytecode.def bi-opname + -rm -f $@ + bi-opname <$< >$@ + +bytecode.mostlyclean: + -rm -f bc-arity.h bc-opcode.h bc-opname.h + +bytecode.distclean bytecode.clean: bytecode.mostlyclean + -rm -f bi-arity bi-opcode bi-opname bi-unparse bi-lexer + +bytecode.realclean: bytecode.clean + -rm -f bi-parser.c bi-lexer.c bi-parser.h + + # Remake cpp and protoize. # Making the preprocessor @@ -1507,7 +1568,7 @@ $(srcdir)/INSTALL: install1.texi install.texi # `realclean' also deletes everything that could be regenerated automatically. -mostlyclean: +mostlyclean: bytecode.mostlyclean -rm -f $(STAGESTUFF) # Clean the objc subdir if we created one. if [ -d objc ]; then \ @@ -1545,7 +1606,7 @@ mostlyclean: # Delete all files made by compilation # that don't exist in the distribution. -clean: mostlyclean +clean: mostlyclean bytecode.clean # It may not be quite desirable to delete unprotoize.c here, # but the spec for `make clean' requires it. # Using unprotoize.c is not quite right in the first place, @@ -1557,7 +1618,7 @@ clean: mostlyclean # Delete all files that users would normally create # while building and installing GCC. -distclean: clean +distclean: clean bytecode.distclean -rm -f tm.h aux-output.c config.h md config.status tconfig.h hconfig.h -rm -f Makefile *.oaux -rm -fr stage1 stage2 stage3 stage4 @@ -1581,7 +1642,7 @@ extraclean: distclean # Get rid of every file that's generated from some other file. # Most of these files ARE PRESENT in the GCC distribution. -realclean: distclean +realclean: distclean bytecode.realclean -rm -f c-parse.y objc-parse.y -rm -f cp-parse.c cp-parse.h cp-parse.output -rm -f objc-parse.c objc-parse.output diff --git a/gcc/c-pragma.c b/gcc/c-pragma.c index 9bb92661d2b..002c0946780 100644 --- a/gcc/c-pragma.c +++ b/gcc/c-pragma.c @@ -20,6 +20,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include #include "config.h" #include "tree.h" +#include "function.h" #ifdef HANDLE_SYSV_PRAGMA @@ -45,20 +46,7 @@ handle_pragma_token (string, token) char *string; tree token; { - static enum pragma_state - { - ps_start, - ps_done, - ps_bad, - ps_weak, - ps_name, - ps_equals, - ps_value, - ps_pack, - ps_left, - ps_align, - ps_right - } state = ps_start, type; + static enum pragma_state state = ps_start, type; static char *name; static char *value; static int align; @@ -76,24 +64,8 @@ handle_pragma_token (string, token) { #ifdef HANDLE_PRAGMA_WEAK if (HANDLE_PRAGMA_WEAK) - { - if (state == ps_name || state == ps_value) - { - fprintf (asm_out_file, "\t%s\t", WEAK_ASM_OP); - ASM_OUTPUT_LABELREF (asm_out_file, name); - fputc ('\n', asm_out_file); - if (state == ps_value) - { - fprintf (asm_out_file, "\t%s\t", SET_ASM_OP); - ASM_OUTPUT_LABELREF (asm_out_file, name); - fputc (',', asm_out_file); - ASM_OUTPUT_LABELREF (asm_out_file, value); - fputc ('\n', asm_out_file); - } - } - else if (! (state == ps_done || state == ps_start)) - warning ("malformed `#pragma weak'"); - } + handle_pragma_weak (state, asm_out_file, name, value); + #endif /* HANDLE_PRAMA_WEAK */ } diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index 04b9069ea58..a927fd0a5ce 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -42,8 +42,29 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "regs.h" #include "insn-config.h" #include "real.h" +#include "obstack.h" + +#include "bytecode.h" +#include "machmode.h" +#include "bc-opcode.h" +#include "bc-typecd.h" +#include "bc-optab.h" +#include "bc-emit.h" + #include + +/* Opcode names */ +#ifdef BCDEBUG_PRINT_CODE +char *opcode_name[] = +{ +#include "bc-opname.h" + +"***END***" +}; +#endif + + /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function. After rtl generation, it is 1 plus the largest register number used. */ @@ -203,6 +224,11 @@ extern int emit_lineno; rtx change_address (); void init_emit (); +extern struct obstack *rtl_obstack; + +extern int stack_depth; +extern int max_stack_depth; + /* rtx gen_rtx (code, mode, [element1, ..., elementn]) ** ** This routine generates an RTX of the size specified by @@ -1216,8 +1242,12 @@ change_address (memref, mode, addr) rtx gen_label_rtx () { - register rtx label = gen_rtx (CODE_LABEL, VOIDmode, 0, 0, 0, - label_num++, NULL_PTR); + register rtx label; + + label = output_bytecode + ? bc_gen_rtx (0, 0, bc_get_bytecode_label ()) + : gen_rtx (CODE_LABEL, VOIDmode, 0, 0, 0, label_num++, NULL_PTR); + LABEL_NUSES (label) = 0; return label; } @@ -2559,6 +2589,13 @@ emit_line_note (file, line) char *file; int line; { + if (output_bytecode) + { + /* FIXME: for now we do nothing, but eventually we will have to deal with + debugging information. */ + return 0; + } + emit_filename = file; emit_lineno = line; diff --git a/gcc/expr.c b/gcc/expr.c index 33dccf4f0e0..33f1bae588a 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -19,8 +19,10 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "config.h" +#include "machmode.h" #include "rtl.h" #include "tree.h" +#include "obstack.h" #include "flags.h" #include "function.h" #include "insn-flags.h" @@ -31,6 +33,13 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "output.h" #include "typeclass.h" +#include "bytecode.h" +#include "bc-opcode.h" +#include "bc-typecd.h" +#include "bc-optab.h" +#include "bc-emit.h" + + #define CEIL(x,y) (((x) + (y) - 1) / (y)) /* Decide whether a function's arguments should be processed @@ -137,6 +146,20 @@ static rtx expand_builtin_apply_args PROTO((void)); static rtx expand_builtin_apply PROTO((rtx, rtx, rtx)); static void expand_builtin_return PROTO((rtx)); static rtx expand_increment PROTO((tree, int)); +rtx bc_expand_increment PROTO((struct increment_operator *, tree)); +tree bc_runtime_type_code PROTO((tree)); +rtx bc_allocate_local PROTO((int, int)); +void bc_store_memory PROTO((tree, tree)); +tree bc_expand_component_address PROTO((tree)); +tree bc_expand_address PROTO((tree)); +void bc_expand_constructor PROTO((tree)); +void bc_adjust_stack PROTO((int)); +tree bc_canonicalize_array_ref PROTO((tree)); +void bc_load_memory PROTO((tree, tree)); +void bc_load_externaddr PROTO((rtx)); +void bc_load_externaddr_id PROTO((tree, int)); +void bc_load_localaddr PROTO((rtx)); +void bc_load_parmaddr PROTO((rtx)); static void preexpand_calls PROTO((tree)); static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx)); static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx)); @@ -183,6 +206,32 @@ enum insn_code movstr_optab[NUM_MACHINE_MODES]; #define OUTGOING_REGNO(IN) (IN) #endif +/* Maps used to convert modes to const, load, and store bytecodes. */ +enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE]; +enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE]; +enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE]; + +/* Initialize maps used to convert modes to const, load, and store + bytecodes. */ +void +bc_init_mode_to_opcode_maps () +{ + int mode; + + for (mode = 0; mode < MAX_MACHINE_MODE; mode++) + mode_to_const_map[mode] = + mode_to_load_map[mode] = + mode_to_store_map[mode] = neverneverland; + +#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \ + mode_to_const_map[(enum machine_mode) SYM] = CONST; \ + mode_to_load_map[(enum machine_mode) SYM] = LOAD; \ + mode_to_store_map[(enum machine_mode) SYM] = STORE; + +#include "modemap.def" +#undef DEF_MODEMAP +} + /* This is run once per compilation to set up which modes can be used directly in memory and to initialize the block move optab. */ @@ -2224,6 +2273,22 @@ expand_assignment (to, from, want_value, suggest_reg) return want_value ? result : NULL_RTX; } + if (output_bytecode) + { + tree dest_innermost; + + bc_expand_expr (from); + bc_emit_instruction (dup); + + dest_innermost = bc_expand_address (to); + + /* Can't deduce from TYPE that we're dealing with a bitfield, so + take care of it here. */ + + bc_store_memory (TREE_TYPE (to), dest_innermost); + return NULL; + } + /* Assignment of a structure component needs special treatment if the structure component's rtx is not simply a MEM. Assignment of an array element at a constant index @@ -3428,6 +3493,7 @@ expand_expr (exp, target, tmode, modifier) /* Use subtarget as the target for operand 0 of a binary operation. */ rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0); rtx original_target = target; + /* Maybe defer this until sure not doing bytecode? */ int ignore = (target == const0_rtx || ((code == NON_LVALUE_EXPR || code == NOP_EXPR || code == CONVERT_EXPR || code == REFERENCE_EXPR @@ -3435,6 +3501,13 @@ expand_expr (exp, target, tmode, modifier) && TREE_CODE (type) == VOID_TYPE)); tree context; + + if (output_bytecode) + { + bc_expand_expr (exp); + return NULL; + } + /* Don't use hard regs as subtargets, because the combiner can only handle pseudo regs. */ if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER) @@ -5615,2728 +5688,3963 @@ expand_expr (exp, target, tmode, modifier) abort (); return temp; } - -/* Return the alignment in bits of EXP, a pointer valued expression. - But don't return more than MAX_ALIGN no matter what. - The alignment returned is, by default, the alignment of the thing that - EXP points to (if it is not a POINTER_TYPE, 0 is returned). - Otherwise, look at the expression to see if we can do better, i.e., if the - expression is actually pointing at an object whose alignment is tighter. */ -static int -get_pointer_alignment (exp, max_align) - tree exp; - unsigned max_align; +/* Emit bytecode to evaluate the given expression EXP to the stack. */ +void +bc_expand_expr (exp) + tree exp; { - unsigned align, inner; - - if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) - return 0; - - align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); - align = MIN (align, max_align); - - while (1) + enum tree_code code; + tree type, arg0; + rtx r; + struct binary_operator *binoptab; + struct unary_operator *unoptab; + struct increment_operator *incroptab; + struct bc_label *lab, *lab1; + enum bytecode_opcode opcode; + + + code = TREE_CODE (exp); + + switch (code) { - switch (TREE_CODE (exp)) + case PARM_DECL: + + if (DECL_RTL (exp) == 0) { - case NOP_EXPR: - case CONVERT_EXPR: - case NON_LVALUE_EXPR: - exp = TREE_OPERAND (exp, 0); - if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) - return align; - inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); - inner = MIN (inner, max_align); - align = MAX (align, inner); - break; - - case PLUS_EXPR: - /* If sum of pointer + int, restrict our maximum alignment to that - imposed by the integer. If not, we can't do any better than - ALIGN. */ - if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) - return align; - - while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) - & (max_align - 1)) - != 0) - max_align >>= 1; - - exp = TREE_OPERAND (exp, 0); - break; - - case ADDR_EXPR: - /* See what we are pointing at and look at its alignment. */ - exp = TREE_OPERAND (exp, 0); - if (TREE_CODE (exp) == FUNCTION_DECL) - align = MAX (align, FUNCTION_BOUNDARY); - else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') - align = MAX (align, DECL_ALIGN (exp)); -#ifdef CONSTANT_ALIGNMENT - else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') - align = CONSTANT_ALIGNMENT (exp, align); -#endif - return MIN (align, max_align); - - default: - return align; + error_with_decl (exp, "prior parameter's size depends on `%s'"); + return; } - } -} - -/* Return the tree node and offset if a given argument corresponds to - a string constant. */ - -static tree -string_constant (arg, ptr_offset) - tree arg; - tree *ptr_offset; -{ - STRIP_NOPS (arg); - - if (TREE_CODE (arg) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) - { - *ptr_offset = integer_zero_node; - return TREE_OPERAND (arg, 0); - } - else if (TREE_CODE (arg) == PLUS_EXPR) - { - tree arg0 = TREE_OPERAND (arg, 0); - tree arg1 = TREE_OPERAND (arg, 1); - - STRIP_NOPS (arg0); - STRIP_NOPS (arg1); - - if (TREE_CODE (arg0) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) + + bc_load_parmaddr (DECL_RTL (exp)); + bc_load_memory (TREE_TYPE (exp), exp); + + return; + + case VAR_DECL: + + if (DECL_RTL (exp) == 0) + abort (); + +#if 0 + if (DECL_RTL (exp)->label) + bc_load_externaddr (DECL_RTL (exp)); + else + bc_load_localaddr (DECL_RTL (exp)); +#endif + if (TREE_PUBLIC (exp)) + bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp), DECL_RTL (exp)->offset); + else + bc_load_localaddr (DECL_RTL (exp)); + + bc_load_memory (TREE_TYPE (exp), exp); + return; + + case INTEGER_CST: + +#ifdef DEBUG_PRINT_CODE + fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp)); +#endif + bc_emit_instruction (mode_to_const_map[DECL_BIT_FIELD (exp) + ? SImode + : TYPE_MODE (TREE_TYPE (exp))], + (HOST_WIDE_INT) TREE_INT_CST_LOW (exp)); + return; + + case REAL_CST: + +#ifdef DEBUG_PRINT_CODE + fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp)); +#endif + bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))], + (double) TREE_REAL_CST (exp)); + return; + + case CALL_EXPR: + + /* We build a call description vector describing the type of + the return value and of the arguments; this call vector, + together with a pointer to a location for the return value + and the base of the argument list, is passed to the low + level machine dependent call subroutine, which is responsible + for putting the arguments wherever real functions expect + them, as well as getting the return value back. */ + { + tree calldesc = 0, arg; + int nargs = 0, i; + rtx retval; + + /* Push the evaluated args on the evaluation stack in reverse + order. Also make an entry for each arg in the calldesc + vector while we're at it. */ + + TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1)); + + for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg)) + { + ++nargs; + bc_expand_expr (TREE_VALUE (arg)); + + calldesc = tree_cons ((tree) 0, + size_in_bytes (TREE_TYPE (TREE_VALUE (arg))), + calldesc); + calldesc = tree_cons ((tree) 0, + bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))), + calldesc); + } + + TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1)); + + /* Allocate a location for the return value and push its + address on the evaluation stack. Also make an entry + at the front of the calldesc for the return value type. */ + + type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))); + retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type)); + bc_load_localaddr (retval); + + calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc); + calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc); + + /* Prepend the argument count. */ + calldesc = tree_cons ((tree) 0, + build_int_2 (nargs, 0), + calldesc); + + /* Push the address of the call description vector on the stack. */ + calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc); + TREE_TYPE (calldesc) = build_array_type (integer_type_node, + build_index_type (build_int_2 (nargs * 2, 0))); + r = output_constant_def (calldesc); + bc_load_externaddr (r); + + /* Push the address of the function to be called. */ + bc_expand_expr (TREE_OPERAND (exp, 0)); + + /* Call the function, popping its address and the calldesc vector + address off the evaluation stack in the process. */ + bc_emit_instruction (call); + + /* Pop the arguments off the stack. */ + bc_adjust_stack (nargs); + + /* Load the return value onto the stack. */ + bc_load_localaddr (retval); + bc_load_memory (type, TREE_OPERAND (exp, 0)); + } + return; + + case SAVE_EXPR: + + if (!SAVE_EXPR_RTL (exp)) { - *ptr_offset = arg1; - return TREE_OPERAND (arg0, 0); + /* First time around: copy to local variable */ + SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)), + TYPE_ALIGN (TREE_TYPE(exp))); + bc_expand_expr (TREE_OPERAND (exp, 0)); + bc_emit_instruction (dup); + + bc_load_localaddr (SAVE_EXPR_RTL (exp)); + bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0)); } - else if (TREE_CODE (arg1) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) + else { - *ptr_offset = arg0; - return TREE_OPERAND (arg1, 0); + /* Consecutive reference: use saved copy */ + bc_load_localaddr (SAVE_EXPR_RTL (exp)); + bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0)); } - } - - return 0; -} - -/* Compute the length of a C string. TREE_STRING_LENGTH is not the right - way, because it could contain a zero byte in the middle. - TREE_STRING_LENGTH is the size of the character array, not the string. - - Unfortunately, string_constant can't access the values of const char - arrays with initializers, so neither can we do so here. */ - -static tree -c_strlen (src) - tree src; -{ - tree offset_node; - int offset, max; - char *ptr; - - src = string_constant (src, &offset_node); - if (src == 0) - return 0; - max = TREE_STRING_LENGTH (src); - ptr = TREE_STRING_POINTER (src); - if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) - { - /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't - compute the offset to the following null if we don't know where to - start searching for it. */ - int i; - for (i = 0; i < max; i++) - if (ptr[i] == 0) - return 0; - /* We don't know the starting offset, but we do know that the string - has no internal zero bytes. We can assume that the offset falls - within the bounds of the string; otherwise, the programmer deserves - what he gets. Subtract the offset from the length of the string, - and return that. */ - /* This would perhaps not be valid if we were dealing with named - arrays in addition to literal string constants. */ - return size_binop (MINUS_EXPR, size_int (max), offset_node); - } - - /* We have a known offset into the string. Start searching there for - a null character. */ - if (offset_node == 0) - offset = 0; - else - { - /* Did we get a long long offset? If so, punt. */ - if (TREE_INT_CST_HIGH (offset_node) != 0) - return 0; - offset = TREE_INT_CST_LOW (offset_node); - } - /* If the offset is known to be out of bounds, warn, and call strlen at - runtime. */ - if (offset < 0 || offset > max) - { - warning ("offset outside bounds of constant string"); - return 0; - } - /* Use strlen to search for the first zero byte. Since any strings - constructed with build_string will have nulls appended, we win even - if we get handed something like (char[4])"abcd". - - Since OFFSET is our starting index into the string, no further - calculation is needed. */ - return size_int (strlen (ptr + offset)); -} - -/* Expand an expression EXP that calls a built-in function, - with result going to TARGET if that's convenient - (and in mode MODE if that's convenient). - SUBTARGET may be used as the target for computing one of EXP's operands. - IGNORE is nonzero if the value is to be ignored. */ + return; + +#if 0 + /* FIXME: the XXXX_STMT codes have been removed in GCC2, but + how are they handled instead? */ + case LET_STMT: + + TREE_USED (exp) = 1; + bc_expand_expr (STMT_BODY (exp)); + return; +#endif + + case NOP_EXPR: + case CONVERT_EXPR: + + bc_expand_expr (TREE_OPERAND (exp, 0)); + bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp)); + return; + + case MODIFY_EXPR: + + expand_assignment (TREE_TYPE (exp), TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1)); + return; + + case ADDR_EXPR: + + bc_expand_address (TREE_OPERAND (exp, 0)); + return; + + case INDIRECT_REF: + + bc_expand_expr (TREE_OPERAND (exp, 0)); + bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0)); + return; + + case ARRAY_REF: + + bc_expand_expr (bc_canonicalize_array_ref (exp)); + return; + + case COMPONENT_REF: + + bc_expand_component_address (exp); + + /* If we have a bitfield, generate a proper load */ + bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1)); + return; + + case COMPOUND_EXPR: + + bc_expand_expr (TREE_OPERAND (exp, 0)); + bc_emit_instruction (drop); + bc_expand_expr (TREE_OPERAND (exp, 1)); + return; + + case COND_EXPR: + + bc_expand_expr (TREE_OPERAND (exp, 0)); + bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0))); + lab = bc_get_bytecode_label (); + bc_emit_bytecode (jumpifnot); + bc_emit_bytecode_labelref (lab); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + bc_expand_expr (TREE_OPERAND (exp, 1)); + lab1 = bc_get_bytecode_label (); + bc_emit_bytecode (jump); + bc_emit_bytecode_labelref (lab1); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + + bc_emit_bytecode_labeldef (lab); + bc_expand_expr (TREE_OPERAND (exp, 2)); + bc_emit_bytecode_labeldef (lab1); + return; + + case TRUTH_ANDIF_EXPR: + + opcode = jumpifnot; + goto andorif; + + case TRUTH_ORIF_EXPR: + + opcode = jumpif; + goto andorif; + + case PLUS_EXPR: + + binoptab = optab_plus_expr; + goto binop; + + case MINUS_EXPR: + + binoptab = optab_minus_expr; + goto binop; + + case MULT_EXPR: + + binoptab = optab_mult_expr; + goto binop; + + case TRUNC_DIV_EXPR: + case FLOOR_DIV_EXPR: + case CEIL_DIV_EXPR: + case ROUND_DIV_EXPR: + case EXACT_DIV_EXPR: + + binoptab = optab_trunc_div_expr; + goto binop; + + case TRUNC_MOD_EXPR: + case FLOOR_MOD_EXPR: + case CEIL_MOD_EXPR: + case ROUND_MOD_EXPR: + + binoptab = optab_trunc_mod_expr; + goto binop; + + case FIX_ROUND_EXPR: + case FIX_FLOOR_EXPR: + case FIX_CEIL_EXPR: + abort (); /* Not used for C. */ + + case FIX_TRUNC_EXPR: + case FLOAT_EXPR: + case MAX_EXPR: + case MIN_EXPR: + case FFS_EXPR: + case LROTATE_EXPR: + case RROTATE_EXPR: + abort (); /* FIXME */ + + case RDIV_EXPR: + + binoptab = optab_rdiv_expr; + goto binop; + + case BIT_AND_EXPR: + + binoptab = optab_bit_and_expr; + goto binop; + + case BIT_IOR_EXPR: + + binoptab = optab_bit_ior_expr; + goto binop; + + case BIT_XOR_EXPR: + + binoptab = optab_bit_xor_expr; + goto binop; + + case LSHIFT_EXPR: + + binoptab = optab_lshift_expr; + goto binop; + + case RSHIFT_EXPR: + + binoptab = optab_rshift_expr; + goto binop; + + case TRUTH_AND_EXPR: + + binoptab = optab_truth_and_expr; + goto binop; + + case TRUTH_OR_EXPR: + + binoptab = optab_truth_or_expr; + goto binop; + + case LT_EXPR: + + binoptab = optab_lt_expr; + goto binop; + + case LE_EXPR: + + binoptab = optab_le_expr; + goto binop; + + case GE_EXPR: + + binoptab = optab_ge_expr; + goto binop; + + case GT_EXPR: + + binoptab = optab_gt_expr; + goto binop; + + case EQ_EXPR: + + binoptab = optab_eq_expr; + goto binop; + + case NE_EXPR: + + binoptab = optab_ne_expr; + goto binop; + + case NEGATE_EXPR: + + unoptab = optab_negate_expr; + goto unop; + + case BIT_NOT_EXPR: + + unoptab = optab_bit_not_expr; + goto unop; + + case TRUTH_NOT_EXPR: + + unoptab = optab_truth_not_expr; + goto unop; + + case PREDECREMENT_EXPR: + + incroptab = optab_predecrement_expr; + goto increment; + + case PREINCREMENT_EXPR: + + incroptab = optab_preincrement_expr; + goto increment; + + case POSTDECREMENT_EXPR: + + incroptab = optab_postdecrement_expr; + goto increment; + + case POSTINCREMENT_EXPR: + + incroptab = optab_postincrement_expr; + goto increment; + + case CONSTRUCTOR: + + bc_expand_constructor (exp); + return; + + case ERROR_MARK: + case RTL_EXPR: + + return; + + case BIND_EXPR: + { + tree vars = TREE_OPERAND (exp, 0); + int vars_need_expansion = 0; + + /* Need to open a binding contour here because + if there are any cleanups they most be contained here. */ + expand_start_bindings (0); + + /* Mark the corresponding BLOCK for output. */ + if (TREE_OPERAND (exp, 2) != 0) + TREE_USED (TREE_OPERAND (exp, 2)) = 1; + + /* If VARS have not yet been expanded, expand them now. */ + while (vars) + { + if (DECL_RTL (vars) == 0) + { + vars_need_expansion = 1; + bc_expand_decl (vars, 0); + } + bc_expand_decl_init (vars); + vars = TREE_CHAIN (vars); + } + + bc_expand_expr (TREE_OPERAND (exp, 1)); + + expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0); + + return; + } + } + + abort (); + + binop: + + bc_expand_binary_operation (binoptab, TREE_TYPE (exp), + TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1)); + return; + + + unop: + + bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0)); + return; + + + andorif: + + bc_expand_expr (TREE_OPERAND (exp, 0)); + bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0))); + lab = bc_get_bytecode_label (); + + bc_emit_instruction (dup); + bc_emit_bytecode (opcode); + bc_emit_bytecode_labelref (lab); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + + bc_emit_instruction (drop); + + bc_expand_expr (TREE_OPERAND (exp, 1)); + bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1))); + bc_emit_bytecode_labeldef (lab); + return; + + + increment: + + type = TREE_TYPE (TREE_OPERAND (exp, 0)); + + /* Push the quantum. */ + bc_expand_expr (TREE_OPERAND (exp, 1)); + + /* Convert it to the lvalue's type. */ + bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type); + + /* Push the address of the lvalue */ + expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0))); + + /* Perform actual increment */ + expand_increment (incroptab, type); + return; +} + +/* Return the alignment in bits of EXP, a pointer valued expression. + But don't return more than MAX_ALIGN no matter what. + The alignment returned is, by default, the alignment of the thing that + EXP points to (if it is not a POINTER_TYPE, 0 is returned). + + Otherwise, look at the expression to see if we can do better, i.e., if the + expression is actually pointing at an object whose alignment is tighter. */ + +static int +get_pointer_alignment (exp, max_align) + tree exp; + unsigned max_align; +{ + unsigned align, inner; + + if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) + return 0; + + align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); + align = MIN (align, max_align); + + while (1) + { + switch (TREE_CODE (exp)) + { + case NOP_EXPR: + case CONVERT_EXPR: + case NON_LVALUE_EXPR: + exp = TREE_OPERAND (exp, 0); + if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) + return align; + inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); + inner = MIN (inner, max_align); + align = MAX (align, inner); + break; + + case PLUS_EXPR: + /* If sum of pointer + int, restrict our maximum alignment to that + imposed by the integer. If not, we can't do any better than + ALIGN. */ + if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) + return align; + + while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) + & (max_align - 1)) + != 0) + max_align >>= 1; + + exp = TREE_OPERAND (exp, 0); + break; + + case ADDR_EXPR: + /* See what we are pointing at and look at its alignment. */ + exp = TREE_OPERAND (exp, 0); + if (TREE_CODE (exp) == FUNCTION_DECL) + align = MAX (align, FUNCTION_BOUNDARY); + else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') + align = MAX (align, DECL_ALIGN (exp)); +#ifdef CONSTANT_ALIGNMENT + else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') + align = CONSTANT_ALIGNMENT (exp, align); +#endif + return MIN (align, max_align); + + default: + return align; + } + } +} + +/* Return the tree node and offset if a given argument corresponds to + a string constant. */ + +static tree +string_constant (arg, ptr_offset) + tree arg; + tree *ptr_offset; +{ + STRIP_NOPS (arg); + + if (TREE_CODE (arg) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) + { + *ptr_offset = integer_zero_node; + return TREE_OPERAND (arg, 0); + } + else if (TREE_CODE (arg) == PLUS_EXPR) + { + tree arg0 = TREE_OPERAND (arg, 0); + tree arg1 = TREE_OPERAND (arg, 1); + + STRIP_NOPS (arg0); + STRIP_NOPS (arg1); + + if (TREE_CODE (arg0) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) + { + *ptr_offset = arg1; + return TREE_OPERAND (arg0, 0); + } + else if (TREE_CODE (arg1) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) + { + *ptr_offset = arg0; + return TREE_OPERAND (arg1, 0); + } + } + + return 0; +} + +/* Compute the length of a C string. TREE_STRING_LENGTH is not the right + way, because it could contain a zero byte in the middle. + TREE_STRING_LENGTH is the size of the character array, not the string. + + Unfortunately, string_constant can't access the values of const char + arrays with initializers, so neither can we do so here. */ + +static tree +c_strlen (src) + tree src; +{ + tree offset_node; + int offset, max; + char *ptr; + + src = string_constant (src, &offset_node); + if (src == 0) + return 0; + max = TREE_STRING_LENGTH (src); + ptr = TREE_STRING_POINTER (src); + if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) + { + /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't + compute the offset to the following null if we don't know where to + start searching for it. */ + int i; + for (i = 0; i < max; i++) + if (ptr[i] == 0) + return 0; + /* We don't know the starting offset, but we do know that the string + has no internal zero bytes. We can assume that the offset falls + within the bounds of the string; otherwise, the programmer deserves + what he gets. Subtract the offset from the length of the string, + and return that. */ + /* This would perhaps not be valid if we were dealing with named + arrays in addition to literal string constants. */ + return size_binop (MINUS_EXPR, size_int (max), offset_node); + } + + /* We have a known offset into the string. Start searching there for + a null character. */ + if (offset_node == 0) + offset = 0; + else + { + /* Did we get a long long offset? If so, punt. */ + if (TREE_INT_CST_HIGH (offset_node) != 0) + return 0; + offset = TREE_INT_CST_LOW (offset_node); + } + /* If the offset is known to be out of bounds, warn, and call strlen at + runtime. */ + if (offset < 0 || offset > max) + { + warning ("offset outside bounds of constant string"); + return 0; + } + /* Use strlen to search for the first zero byte. Since any strings + constructed with build_string will have nulls appended, we win even + if we get handed something like (char[4])"abcd". + + Since OFFSET is our starting index into the string, no further + calculation is needed. */ + return size_int (strlen (ptr + offset)); +} + +/* Expand an expression EXP that calls a built-in function, + with result going to TARGET if that's convenient + (and in mode MODE if that's convenient). + SUBTARGET may be used as the target for computing one of EXP's operands. + IGNORE is nonzero if the value is to be ignored. */ + +static rtx +expand_builtin (exp, target, subtarget, mode, ignore) + tree exp; + rtx target; + rtx subtarget; + enum machine_mode mode; + int ignore; +{ + tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); + tree arglist = TREE_OPERAND (exp, 1); + rtx op0; + rtx lab1, insns; + enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); + optab builtin_optab; + + switch (DECL_FUNCTION_CODE (fndecl)) + { + case BUILT_IN_ABS: + case BUILT_IN_LABS: + case BUILT_IN_FABS: + /* build_function_call changes these into ABS_EXPR. */ + abort (); + + case BUILT_IN_SIN: + case BUILT_IN_COS: + case BUILT_IN_FSQRT: + /* If not optimizing, call the library function. */ + if (! optimize) + break; + + if (arglist == 0 + /* Arg could be wrong type if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) + return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp))); + + /* Stabilize and compute the argument. */ + if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL + && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) + { + exp = copy_node (exp); + arglist = copy_node (arglist); + TREE_OPERAND (exp, 1) = arglist; + TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); + } + op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); + + /* Make a suitable register to place result in. */ + target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); + + emit_queue (); + start_sequence (); + + switch (DECL_FUNCTION_CODE (fndecl)) + { + case BUILT_IN_SIN: + builtin_optab = sin_optab; break; + case BUILT_IN_COS: + builtin_optab = cos_optab; break; + case BUILT_IN_FSQRT: + builtin_optab = sqrt_optab; break; + default: + abort (); + } + + /* Compute into TARGET. + Set TARGET to wherever the result comes back. */ + target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), + builtin_optab, op0, target, 0); + + /* If we were unable to expand via the builtin, stop the + sequence (without outputting the insns) and break, causing + a call the the library function. */ + if (target == 0) + { + end_sequence (); + break; + } + + /* Check the results by default. But if flag_fast_math is turned on, + then assume sqrt will always be called with valid arguments. */ + + if (! flag_fast_math) + { + /* Don't define the builtin FP instructions + if your machine is not IEEE. */ + if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) + abort (); + + lab1 = gen_label_rtx (); + + /* Test the result; if it is NaN, set errno=EDOM because + the argument was not in the domain. */ + emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0); + emit_jump_insn (gen_beq (lab1)); + +#if TARGET_EDOM + { +#ifdef GEN_ERRNO_RTX + rtx errno_rtx = GEN_ERRNO_RTX; +#else + rtx errno_rtx + = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno")); +#endif + + emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); + } +#else + /* We can't set errno=EDOM directly; let the library call do it. + Pop the arguments right away in case the call gets deleted. */ + NO_DEFER_POP; + expand_call (exp, target, 0); + OK_DEFER_POP; +#endif + + emit_label (lab1); + } + + /* Output the entire sequence. */ + insns = get_insns (); + end_sequence (); + emit_insns (insns); + + return target; + + /* __builtin_apply_args returns block of memory allocated on + the stack into which is stored the arg pointer, structure + value address, static chain, and all the registers that might + possibly be used in performing a function call. The code is + moved to the start of the function so the incoming values are + saved. */ + case BUILT_IN_APPLY_ARGS: + /* Don't do __builtin_apply_args more than once in a function. + Save the result of the first call and reuse it. */ + if (apply_args_value != 0) + return apply_args_value; + { + /* When this function is called, it means that registers must be + saved on entry to this function. So we migrate the + call to the first insn of this function. */ + rtx temp; + rtx seq; + + start_sequence (); + temp = expand_builtin_apply_args (); + seq = get_insns (); + end_sequence (); + + apply_args_value = temp; + + /* Put the sequence after the NOTE that starts the function. + If this is inside a SEQUENCE, make the outer-level insn + chain current, so the code is placed at the start of the + function. */ + push_topmost_sequence (); + emit_insns_before (seq, NEXT_INSN (get_insns ())); + pop_topmost_sequence (); + return temp; + } + + /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes + FUNCTION with a copy of the parameters described by + ARGUMENTS, and ARGSIZE. It returns a block of memory + allocated on the stack into which is stored all the registers + that might possibly be used for returning the result of a + function. ARGUMENTS is the value returned by + __builtin_apply_args. ARGSIZE is the number of bytes of + arguments that must be copied. ??? How should this value be + computed? We'll also need a safe worst case value for varargs + functions. */ + case BUILT_IN_APPLY: + if (arglist == 0 + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE + || TREE_CHAIN (arglist) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE + || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) + return const0_rtx; + else + { + int i; + tree t; + rtx ops[3]; + + for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) + ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); + + return expand_builtin_apply (ops[0], ops[1], ops[2]); + } + + /* __builtin_return (RESULT) causes the function to return the + value described by RESULT. RESULT is address of the block of + memory returned by __builtin_apply. */ + case BUILT_IN_RETURN: + if (arglist + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) + expand_builtin_return (expand_expr (TREE_VALUE (arglist), + NULL_RTX, VOIDmode, 0)); + return const0_rtx; + + case BUILT_IN_SAVEREGS: + /* Don't do __builtin_saveregs more than once in a function. + Save the result of the first call and reuse it. */ + if (saveregs_value != 0) + return saveregs_value; + { + /* When this function is called, it means that registers must be + saved on entry to this function. So we migrate the + call to the first insn of this function. */ + rtx temp; + rtx seq; + rtx valreg, saved_valreg; + + /* Now really call the function. `expand_call' does not call + expand_builtin, so there is no danger of infinite recursion here. */ + start_sequence (); + +#ifdef EXPAND_BUILTIN_SAVEREGS + /* Do whatever the machine needs done in this case. */ + temp = EXPAND_BUILTIN_SAVEREGS (arglist); +#else + /* The register where the function returns its value + is likely to have something else in it, such as an argument. + So preserve that register around the call. */ + if (value_mode != VOIDmode) + { + valreg = hard_libcall_value (value_mode); + saved_valreg = gen_reg_rtx (value_mode); + emit_move_insn (saved_valreg, valreg); + } + + /* Generate the call, putting the value in a pseudo. */ + temp = expand_call (exp, target, ignore); + + if (value_mode != VOIDmode) + emit_move_insn (valreg, saved_valreg); +#endif + + seq = get_insns (); + end_sequence (); + + saveregs_value = temp; + + /* Put the sequence after the NOTE that starts the function. + If this is inside a SEQUENCE, make the outer-level insn + chain current, so the code is placed at the start of the + function. */ + push_topmost_sequence (); + emit_insns_before (seq, NEXT_INSN (get_insns ())); + pop_topmost_sequence (); + return temp; + } + + /* __builtin_args_info (N) returns word N of the arg space info + for the current function. The number and meanings of words + is controlled by the definition of CUMULATIVE_ARGS. */ + case BUILT_IN_ARGS_INFO: + { + int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); + int i; + int *word_ptr = (int *) ¤t_function_args_info; + tree type, elts, result; + + if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) + fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d", + __FILE__, __LINE__); + + if (arglist != 0) + { + tree arg = TREE_VALUE (arglist); + if (TREE_CODE (arg) != INTEGER_CST) + error ("argument of `__builtin_args_info' must be constant"); + else + { + int wordnum = TREE_INT_CST_LOW (arg); + + if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) + error ("argument of `__builtin_args_info' out of range"); + else + return GEN_INT (word_ptr[wordnum]); + } + } + else + error ("missing argument in `__builtin_args_info'"); + + return const0_rtx; + +#if 0 + for (i = 0; i < nwords; i++) + elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); + + type = build_array_type (integer_type_node, + build_index_type (build_int_2 (nwords, 0))); + result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); + TREE_CONSTANT (result) = 1; + TREE_STATIC (result) = 1; + result = build (INDIRECT_REF, build_pointer_type (type), result); + TREE_CONSTANT (result) = 1; + return expand_expr (result, NULL_RTX, VOIDmode, 0); +#endif + } + + /* Return the address of the first anonymous stack arg. */ + case BUILT_IN_NEXT_ARG: + { + tree fntype = TREE_TYPE (current_function_decl); + if (!(TYPE_ARG_TYPES (fntype) != 0 + && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) + != void_type_node))) + { + error ("`va_start' used in function with fixed args"); + return const0_rtx; + } + } + + return expand_binop (Pmode, add_optab, + current_function_internal_arg_pointer, + current_function_arg_offset_rtx, + NULL_RTX, 0, OPTAB_LIB_WIDEN); + + case BUILT_IN_CLASSIFY_TYPE: + if (arglist != 0) + { + tree type = TREE_TYPE (TREE_VALUE (arglist)); + enum tree_code code = TREE_CODE (type); + if (code == VOID_TYPE) + return GEN_INT (void_type_class); + if (code == INTEGER_TYPE) + return GEN_INT (integer_type_class); + if (code == CHAR_TYPE) + return GEN_INT (char_type_class); + if (code == ENUMERAL_TYPE) + return GEN_INT (enumeral_type_class); + if (code == BOOLEAN_TYPE) + return GEN_INT (boolean_type_class); + if (code == POINTER_TYPE) + return GEN_INT (pointer_type_class); + if (code == REFERENCE_TYPE) + return GEN_INT (reference_type_class); + if (code == OFFSET_TYPE) + return GEN_INT (offset_type_class); + if (code == REAL_TYPE) + return GEN_INT (real_type_class); + if (code == COMPLEX_TYPE) + return GEN_INT (complex_type_class); + if (code == FUNCTION_TYPE) + return GEN_INT (function_type_class); + if (code == METHOD_TYPE) + return GEN_INT (method_type_class); + if (code == RECORD_TYPE) + return GEN_INT (record_type_class); + if (code == UNION_TYPE || code == QUAL_UNION_TYPE) + return GEN_INT (union_type_class); + if (code == ARRAY_TYPE) + return GEN_INT (array_type_class); + if (code == STRING_TYPE) + return GEN_INT (string_type_class); + if (code == SET_TYPE) + return GEN_INT (set_type_class); + if (code == FILE_TYPE) + return GEN_INT (file_type_class); + if (code == LANG_TYPE) + return GEN_INT (lang_type_class); + } + return GEN_INT (no_type_class); + + case BUILT_IN_CONSTANT_P: + if (arglist == 0) + return const0_rtx; + else + return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c' + ? const1_rtx : const0_rtx); + + case BUILT_IN_FRAME_ADDRESS: + /* The argument must be a nonnegative integer constant. + It counts the number of frames to scan up the stack. + The value is the address of that frame. */ + case BUILT_IN_RETURN_ADDRESS: + /* The argument must be a nonnegative integer constant. + It counts the number of frames to scan up the stack. + The value is the return address saved in that frame. */ + if (arglist == 0) + /* Warning about missing arg was already issued. */ + return const0_rtx; + else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST) + { + error ("invalid arg to `__builtin_return_address'"); + return const0_rtx; + } + else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node)) + { + error ("invalid arg to `__builtin_return_address'"); + return const0_rtx; + } + else + { + int count = TREE_INT_CST_LOW (TREE_VALUE (arglist)); + rtx tem = frame_pointer_rtx; + int i; + + /* Some machines need special handling before we can access arbitrary + frames. For example, on the sparc, we must first flush all + register windows to the stack. */ +#ifdef SETUP_FRAME_ADDRESSES + SETUP_FRAME_ADDRESSES (); +#endif + + /* On the sparc, the return address is not in the frame, it is + in a register. There is no way to access it off of the current + frame pointer, but it can be accessed off the previous frame + pointer by reading the value from the register window save + area. */ +#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME + if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS) + count--; +#endif + + /* Scan back COUNT frames to the specified frame. */ + for (i = 0; i < count; i++) + { + /* Assume the dynamic chain pointer is in the word that + the frame address points to, unless otherwise specified. */ +#ifdef DYNAMIC_CHAIN_ADDRESS + tem = DYNAMIC_CHAIN_ADDRESS (tem); +#endif + tem = memory_address (Pmode, tem); + tem = copy_to_reg (gen_rtx (MEM, Pmode, tem)); + } + + /* For __builtin_frame_address, return what we've got. */ + if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) + return tem; + + /* For __builtin_return_address, + Get the return address from that frame. */ +#ifdef RETURN_ADDR_RTX + return RETURN_ADDR_RTX (count, tem); +#else + tem = memory_address (Pmode, + plus_constant (tem, GET_MODE_SIZE (Pmode))); + return copy_to_reg (gen_rtx (MEM, Pmode, tem)); +#endif + } + + case BUILT_IN_ALLOCA: + if (arglist == 0 + /* Arg could be non-integer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) + return const0_rtx; + current_function_calls_alloca = 1; + /* Compute the argument. */ + op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); + + /* Allocate the desired space. */ + target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); + + /* Record the new stack level for nonlocal gotos. */ + if (nonlocal_goto_handler_slot != 0) + emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX); + return target; + + case BUILT_IN_FFS: + /* If not optimizing, call the library function. */ + if (!optimize) + break; + + if (arglist == 0 + /* Arg could be non-integer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) + return const0_rtx; + + /* Compute the argument. */ + op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); + /* Compute ffs, into TARGET if possible. + Set TARGET to wherever the result comes back. */ + target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), + ffs_optab, op0, target, 1); + if (target == 0) + abort (); + return target; + + case BUILT_IN_STRLEN: + /* If not optimizing, call the library function. */ + if (!optimize) + break; + + if (arglist == 0 + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) + return const0_rtx; + else + { + tree src = TREE_VALUE (arglist); + tree len = c_strlen (src); + + int align + = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; + + rtx result, src_rtx, char_rtx; + enum machine_mode insn_mode = value_mode, char_mode; + enum insn_code icode; + + /* If the length is known, just return it. */ + if (len != 0) + return expand_expr (len, target, mode, 0); + + /* If SRC is not a pointer type, don't do this operation inline. */ + if (align == 0) + break; + + /* Call a function if we can't compute strlen in the right mode. */ + + while (insn_mode != VOIDmode) + { + icode = strlen_optab->handlers[(int) insn_mode].insn_code; + if (icode != CODE_FOR_nothing) + break; -static rtx -expand_builtin (exp, target, subtarget, mode, ignore) - tree exp; - rtx target; - rtx subtarget; - enum machine_mode mode; - int ignore; -{ - tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); - tree arglist = TREE_OPERAND (exp, 1); - rtx op0; - rtx lab1, insns; - enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); - optab builtin_optab; + insn_mode = GET_MODE_WIDER_MODE (insn_mode); + } + if (insn_mode == VOIDmode) + break; - switch (DECL_FUNCTION_CODE (fndecl)) - { - case BUILT_IN_ABS: - case BUILT_IN_LABS: - case BUILT_IN_FABS: - /* build_function_call changes these into ABS_EXPR. */ - abort (); + /* Make a place to write the result of the instruction. */ + result = target; + if (! (result != 0 + && GET_CODE (result) == REG + && GET_MODE (result) == insn_mode + && REGNO (result) >= FIRST_PSEUDO_REGISTER)) + result = gen_reg_rtx (insn_mode); - case BUILT_IN_SIN: - case BUILT_IN_COS: - case BUILT_IN_FSQRT: + /* Make sure the operands are acceptable to the predicates. */ + + if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode)) + result = gen_reg_rtx (insn_mode); + + src_rtx = memory_address (BLKmode, + expand_expr (src, NULL_RTX, Pmode, + EXPAND_NORMAL)); + if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode)) + src_rtx = copy_to_mode_reg (Pmode, src_rtx); + + char_rtx = const0_rtx; + char_mode = insn_operand_mode[(int)icode][2]; + if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode)) + char_rtx = copy_to_mode_reg (char_mode, char_rtx); + + emit_insn (GEN_FCN (icode) (result, + gen_rtx (MEM, BLKmode, src_rtx), + char_rtx, GEN_INT (align))); + + /* Return the value in the proper mode for this function. */ + if (GET_MODE (result) == value_mode) + return result; + else if (target != 0) + { + convert_move (target, result, 0); + return target; + } + else + return convert_to_mode (value_mode, result, 0); + } + + case BUILT_IN_STRCPY: /* If not optimizing, call the library function. */ - if (! optimize) + if (!optimize) break; if (arglist == 0 - /* Arg could be wrong type if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) - return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp))); - - /* Stabilize and compute the argument. */ - if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL - && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE + || TREE_CHAIN (arglist) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) + return const0_rtx; + else { - exp = copy_node (exp); - arglist = copy_node (arglist); - TREE_OPERAND (exp, 1) = arglist; - TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); - } - op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); + tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); - /* Make a suitable register to place result in. */ - target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); + if (len == 0) + break; - emit_queue (); - start_sequence (); + len = size_binop (PLUS_EXPR, len, integer_one_node); - switch (DECL_FUNCTION_CODE (fndecl)) - { - case BUILT_IN_SIN: - builtin_optab = sin_optab; break; - case BUILT_IN_COS: - builtin_optab = cos_optab; break; - case BUILT_IN_FSQRT: - builtin_optab = sqrt_optab; break; - default: - abort (); + chainon (arglist, build_tree_list (NULL_TREE, len)); } - /* Compute into TARGET. - Set TARGET to wherever the result comes back. */ - target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), - builtin_optab, op0, target, 0); + /* Drops in. */ + case BUILT_IN_MEMCPY: + /* If not optimizing, call the library function. */ + if (!optimize) + break; - /* If we were unable to expand via the builtin, stop the - sequence (without outputting the insns) and break, causing - a call the the library function. */ - if (target == 0) + if (arglist == 0 + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE + || TREE_CHAIN (arglist) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE + || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) + return const0_rtx; + else { - end_sequence (); - break; - } + tree dest = TREE_VALUE (arglist); + tree src = TREE_VALUE (TREE_CHAIN (arglist)); + tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); - /* Check the results by default. But if flag_fast_math is turned on, - then assume sqrt will always be called with valid arguments. */ + int src_align + = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; + int dest_align + = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; + rtx dest_rtx, dest_mem, src_mem; - if (! flag_fast_math) - { - /* Don't define the builtin FP instructions - if your machine is not IEEE. */ - if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) - abort (); + /* If either SRC or DEST is not a pointer type, don't do + this operation in-line. */ + if (src_align == 0 || dest_align == 0) + { + if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY) + TREE_CHAIN (TREE_CHAIN (arglist)) = 0; + break; + } + + dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL); + dest_mem = gen_rtx (MEM, BLKmode, + memory_address (BLKmode, dest_rtx)); + src_mem = gen_rtx (MEM, BLKmode, + memory_address (BLKmode, + expand_expr (src, NULL_RTX, + Pmode, + EXPAND_NORMAL))); + + /* Copy word part most expediently. */ + emit_block_move (dest_mem, src_mem, + expand_expr (len, NULL_RTX, VOIDmode, 0), + MIN (src_align, dest_align)); + return dest_rtx; + } + +/* These comparison functions need an instruction that returns an actual + index. An ordinary compare that just sets the condition codes + is not enough. */ +#ifdef HAVE_cmpstrsi + case BUILT_IN_STRCMP: + /* If not optimizing, call the library function. */ + if (!optimize) + break; + + if (arglist == 0 + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE + || TREE_CHAIN (arglist) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) + return const0_rtx; + else if (!HAVE_cmpstrsi) + break; + { + tree arg1 = TREE_VALUE (arglist); + tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); + tree offset; + tree len, len2; + + len = c_strlen (arg1); + if (len) + len = size_binop (PLUS_EXPR, integer_one_node, len); + len2 = c_strlen (arg2); + if (len2) + len2 = size_binop (PLUS_EXPR, integer_one_node, len2); + + /* If we don't have a constant length for the first, use the length + of the second, if we know it. We don't require a constant for + this case; some cost analysis could be done if both are available + but neither is constant. For now, assume they're equally cheap. + + If both strings have constant lengths, use the smaller. This + could arise if optimization results in strcpy being called with + two fixed strings, or if the code was machine-generated. We should + add some code to the `memcmp' handler below to deal with such + situations, someday. */ + if (!len || TREE_CODE (len) != INTEGER_CST) + { + if (len2) + len = len2; + else if (len == 0) + break; + } + else if (len2 && TREE_CODE (len2) == INTEGER_CST) + { + if (tree_int_cst_lt (len2, len)) + len = len2; + } + + chainon (arglist, build_tree_list (NULL_TREE, len)); + } + + /* Drops in. */ + case BUILT_IN_MEMCMP: + /* If not optimizing, call the library function. */ + if (!optimize) + break; + + if (arglist == 0 + /* Arg could be non-pointer if user redeclared this fcn wrong. */ + || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE + || TREE_CHAIN (arglist) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE + || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 + || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) + return const0_rtx; + else if (!HAVE_cmpstrsi) + break; + { + tree arg1 = TREE_VALUE (arglist); + tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); + tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); + rtx result; + + int arg1_align + = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; + int arg2_align + = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; + enum machine_mode insn_mode + = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0]; - lab1 = gen_label_rtx (); + /* If we don't have POINTER_TYPE, call the function. */ + if (arg1_align == 0 || arg2_align == 0) + { + if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP) + TREE_CHAIN (TREE_CHAIN (arglist)) = 0; + break; + } - /* Test the result; if it is NaN, set errno=EDOM because - the argument was not in the domain. */ - emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0); - emit_jump_insn (gen_beq (lab1)); + /* Make a place to write the result of the instruction. */ + result = target; + if (! (result != 0 + && GET_CODE (result) == REG && GET_MODE (result) == insn_mode + && REGNO (result) >= FIRST_PSEUDO_REGISTER)) + result = gen_reg_rtx (insn_mode); -#if TARGET_EDOM - { -#ifdef GEN_ERRNO_RTX - rtx errno_rtx = GEN_ERRNO_RTX; -#else - rtx errno_rtx - = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno")); -#endif + emit_insn (gen_cmpstrsi (result, + gen_rtx (MEM, BLKmode, + expand_expr (arg1, NULL_RTX, Pmode, + EXPAND_NORMAL)), + gen_rtx (MEM, BLKmode, + expand_expr (arg2, NULL_RTX, Pmode, + EXPAND_NORMAL)), + expand_expr (len, NULL_RTX, VOIDmode, 0), + GEN_INT (MIN (arg1_align, arg2_align)))); - emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); + /* Return the value in the proper mode for this function. */ + mode = TYPE_MODE (TREE_TYPE (exp)); + if (GET_MODE (result) == mode) + return result; + else if (target != 0) + { + convert_move (target, result, 0); + return target; } + else + return convert_to_mode (mode, result, 0); + } #else - /* We can't set errno=EDOM directly; let the library call do it. - Pop the arguments right away in case the call gets deleted. */ - NO_DEFER_POP; - expand_call (exp, target, 0); - OK_DEFER_POP; + case BUILT_IN_STRCMP: + case BUILT_IN_MEMCMP: + break; #endif - emit_label (lab1); - } + default: /* just do library call, if unknown builtin */ + error ("built-in function `%s' not currently supported", + IDENTIFIER_POINTER (DECL_NAME (fndecl))); + } - /* Output the entire sequence. */ - insns = get_insns (); - end_sequence (); - emit_insns (insns); - - return target; + /* The switch statement above can drop through to cause the function + to be called normally. */ - /* __builtin_apply_args returns block of memory allocated on - the stack into which is stored the arg pointer, structure - value address, static chain, and all the registers that might - possibly be used in performing a function call. The code is - moved to the start of the function so the incoming values are - saved. */ - case BUILT_IN_APPLY_ARGS: - /* Don't do __builtin_apply_args more than once in a function. - Save the result of the first call and reuse it. */ - if (apply_args_value != 0) - return apply_args_value; - { - /* When this function is called, it means that registers must be - saved on entry to this function. So we migrate the - call to the first insn of this function. */ - rtx temp; - rtx seq; + return expand_call (exp, target, ignore); +} + +/* Built-in functions to perform an untyped call and return. */ - start_sequence (); - temp = expand_builtin_apply_args (); - seq = get_insns (); - end_sequence (); +/* For each register that may be used for calling a function, this + gives a mode used to copy the register's value. VOIDmode indicates + the register is not used for calling a function. If the machine + has register windows, this gives only the outbound registers. + INCOMING_REGNO gives the corresponding inbound register. */ +static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; - apply_args_value = temp; +/* For each register that may be used for returning values, this gives + a mode used to copy the register's value. VOIDmode indicates the + register is not used for returning values. If the machine has + register windows, this gives only the outbound registers. + INCOMING_REGNO gives the corresponding inbound register. */ +static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; - /* Put the sequence after the NOTE that starts the function. - If this is inside a SEQUENCE, make the outer-level insn - chain current, so the code is placed at the start of the - function. */ - push_topmost_sequence (); - emit_insns_before (seq, NEXT_INSN (get_insns ())); - pop_topmost_sequence (); - return temp; - } +/* For each register that may be used for calling a function, this + gives the offset of that register into the block returned by + __bultin_apply_args. 0 indicates that the register is not + used for calling a function. */ +static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; - /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes - FUNCTION with a copy of the parameters described by - ARGUMENTS, and ARGSIZE. It returns a block of memory - allocated on the stack into which is stored all the registers - that might possibly be used for returning the result of a - function. ARGUMENTS is the value returned by - __builtin_apply_args. ARGSIZE is the number of bytes of - arguments that must be copied. ??? How should this value be - computed? We'll also need a safe worst case value for varargs - functions. */ - case BUILT_IN_APPLY: - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) - return const0_rtx; - else - { - int i; - tree t; - rtx ops[3]; +/* Return the offset of register REGNO into the block returned by + __builtin_apply_args. This is not declared static, since it is + needed in objc-act.c. */ - for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) - ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); +int +apply_args_register_offset (regno) + int regno; +{ + apply_args_size (); - return expand_builtin_apply (ops[0], ops[1], ops[2]); - } + /* Arguments are always put in outgoing registers (in the argument + block) if such make sense. */ +#ifdef OUTGOING_REGNO + regno = OUTGOING_REGNO(regno); +#endif + return apply_args_reg_offset[regno]; +} - /* __builtin_return (RESULT) causes the function to return the - value described by RESULT. RESULT is address of the block of - memory returned by __builtin_apply. */ - case BUILT_IN_RETURN: - if (arglist - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) - expand_builtin_return (expand_expr (TREE_VALUE (arglist), - NULL_RTX, VOIDmode, 0)); - return const0_rtx; +/* Return the size required for the block returned by __builtin_apply_args, + and initialize apply_args_mode. */ - case BUILT_IN_SAVEREGS: - /* Don't do __builtin_saveregs more than once in a function. - Save the result of the first call and reuse it. */ - if (saveregs_value != 0) - return saveregs_value; - { - /* When this function is called, it means that registers must be - saved on entry to this function. So we migrate the - call to the first insn of this function. */ - rtx temp; - rtx seq; - rtx valreg, saved_valreg; +static int +apply_args_size () +{ + static int size = -1; + int align, regno; + enum machine_mode mode; - /* Now really call the function. `expand_call' does not call - expand_builtin, so there is no danger of infinite recursion here. */ - start_sequence (); + /* The values computed by this function never change. */ + if (size < 0) + { + /* The first value is the incoming arg-pointer. */ + size = GET_MODE_SIZE (Pmode); -#ifdef EXPAND_BUILTIN_SAVEREGS - /* Do whatever the machine needs done in this case. */ - temp = EXPAND_BUILTIN_SAVEREGS (arglist); -#else - /* The register where the function returns its value - is likely to have something else in it, such as an argument. - So preserve that register around the call. */ - if (value_mode != VOIDmode) + /* The second value is the structure value address unless this is + passed as an "invisible" first argument. */ + if (struct_value_rtx) + size += GET_MODE_SIZE (Pmode); + + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if (FUNCTION_ARG_REGNO_P (regno)) { - valreg = hard_libcall_value (value_mode); - saved_valreg = gen_reg_rtx (value_mode); - emit_move_insn (saved_valreg, valreg); + /* Search for the proper mode for copying this register's + value. I'm not sure this is right, but it works so far. */ + enum machine_mode best_mode = VOIDmode; + + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + if (HARD_REGNO_MODE_OK (regno, mode) + && HARD_REGNO_NREGS (regno, mode) == 1) + best_mode = mode; + + if (best_mode == VOIDmode) + for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + if (HARD_REGNO_MODE_OK (regno, mode) + && (mov_optab->handlers[(int) mode].insn_code + != CODE_FOR_nothing)) + best_mode = mode; + + mode = best_mode; + if (mode == VOIDmode) + abort (); + + align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; + if (size % align != 0) + size = CEIL (size, align) * align; + apply_args_reg_offset[regno] = size; + size += GET_MODE_SIZE (mode); + apply_args_mode[regno] = mode; + } + else + { + apply_args_mode[regno] = VOIDmode; + apply_args_reg_offset[regno] = 0; } + } + return size; +} - /* Generate the call, putting the value in a pseudo. */ - temp = expand_call (exp, target, ignore); - - if (value_mode != VOIDmode) - emit_move_insn (valreg, saved_valreg); -#endif +/* Return the size required for the block returned by __builtin_apply, + and initialize apply_result_mode. */ - seq = get_insns (); - end_sequence (); +static int +apply_result_size () +{ + static int size = -1; + int align, regno; + enum machine_mode mode; - saveregs_value = temp; + /* The values computed by this function never change. */ + if (size < 0) + { + size = 0; - /* Put the sequence after the NOTE that starts the function. - If this is inside a SEQUENCE, make the outer-level insn - chain current, so the code is placed at the start of the - function. */ - push_topmost_sequence (); - emit_insns_before (seq, NEXT_INSN (get_insns ())); - pop_topmost_sequence (); - return temp; - } + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if (FUNCTION_VALUE_REGNO_P (regno)) + { + /* Search for the proper mode for copying this register's + value. I'm not sure this is right, but it works so far. */ + enum machine_mode best_mode = VOIDmode; - /* __builtin_args_info (N) returns word N of the arg space info - for the current function. The number and meanings of words - is controlled by the definition of CUMULATIVE_ARGS. */ - case BUILT_IN_ARGS_INFO: - { - int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); - int i; - int *word_ptr = (int *) ¤t_function_args_info; - tree type, elts, result; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); + mode != TImode; + mode = GET_MODE_WIDER_MODE (mode)) + if (HARD_REGNO_MODE_OK (regno, mode)) + best_mode = mode; - if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) - fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d", - __FILE__, __LINE__); + if (best_mode == VOIDmode) + for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + if (HARD_REGNO_MODE_OK (regno, mode) + && (mov_optab->handlers[(int) mode].insn_code + != CODE_FOR_nothing)) + best_mode = mode; - if (arglist != 0) - { - tree arg = TREE_VALUE (arglist); - if (TREE_CODE (arg) != INTEGER_CST) - error ("argument of `__builtin_args_info' must be constant"); - else - { - int wordnum = TREE_INT_CST_LOW (arg); + mode = best_mode; + if (mode == VOIDmode) + abort (); - if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) - error ("argument of `__builtin_args_info' out of range"); - else - return GEN_INT (word_ptr[wordnum]); - } + align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; + if (size % align != 0) + size = CEIL (size, align) * align; + size += GET_MODE_SIZE (mode); + apply_result_mode[regno] = mode; } else - error ("missing argument in `__builtin_args_info'"); + apply_result_mode[regno] = VOIDmode; - return const0_rtx; + /* Allow targets that use untyped_call and untyped_return to override + the size so that machine-specific information can be stored here. */ +#ifdef APPLY_RESULT_SIZE + size = APPLY_RESULT_SIZE; +#endif + } + return size; +} -#if 0 - for (i = 0; i < nwords; i++) - elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); +#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) +/* Create a vector describing the result block RESULT. If SAVEP is true, + the result block is used to save the values; otherwise it is used to + restore the values. */ - type = build_array_type (integer_type_node, - build_index_type (build_int_2 (nwords, 0))); - result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); - TREE_CONSTANT (result) = 1; - TREE_STATIC (result) = 1; - result = build (INDIRECT_REF, build_pointer_type (type), result); - TREE_CONSTANT (result) = 1; - return expand_expr (result, NULL_RTX, VOIDmode, 0); -#endif +static rtx +result_vector (savep, result) + int savep; + rtx result; +{ + int regno, size, align, nelts; + enum machine_mode mode; + rtx reg, mem; + rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); + + size = nelts = 0; + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if ((mode = apply_result_mode[regno]) != VOIDmode) + { + align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; + if (size % align != 0) + size = CEIL (size, align) * align; + reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno); + mem = change_address (result, mode, + plus_constant (XEXP (result, 0), size)); + savevec[nelts++] = (savep + ? gen_rtx (SET, VOIDmode, mem, reg) + : gen_rtx (SET, VOIDmode, reg, mem)); + size += GET_MODE_SIZE (mode); } + return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec)); +} +#endif /* HAVE_untyped_call or HAVE_untyped_return */ - /* Return the address of the first anonymous stack arg. */ - case BUILT_IN_NEXT_ARG: +/* Save the state required to perform an untyped call with the same + arguments as were passed to the current function. */ + +static rtx +expand_builtin_apply_args () +{ + rtx registers; + int size, align, regno; + enum machine_mode mode; + + /* Create a block where the arg-pointer, structure value address, + and argument registers can be saved. */ + registers = assign_stack_local (BLKmode, apply_args_size (), -1); + + /* Walk past the arg-pointer and structure value address. */ + size = GET_MODE_SIZE (Pmode); + if (struct_value_rtx) + size += GET_MODE_SIZE (Pmode); + + /* Save each register used in calling a function to the block. */ + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if ((mode = apply_args_mode[regno]) != VOIDmode) { - tree fntype = TREE_TYPE (current_function_decl); - if (!(TYPE_ARG_TYPES (fntype) != 0 - && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) - != void_type_node))) - { - error ("`va_start' used in function with fixed args"); - return const0_rtx; - } + align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; + if (size % align != 0) + size = CEIL (size, align) * align; + emit_move_insn (change_address (registers, mode, + plus_constant (XEXP (registers, 0), + size)), + gen_rtx (REG, mode, INCOMING_REGNO (regno))); + size += GET_MODE_SIZE (mode); } - return expand_binop (Pmode, add_optab, - current_function_internal_arg_pointer, - current_function_arg_offset_rtx, - NULL_RTX, 0, OPTAB_LIB_WIDEN); + /* Save the arg pointer to the block. */ + emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), + copy_to_reg (virtual_incoming_args_rtx)); + size = GET_MODE_SIZE (Pmode); - case BUILT_IN_CLASSIFY_TYPE: - if (arglist != 0) - { - tree type = TREE_TYPE (TREE_VALUE (arglist)); - enum tree_code code = TREE_CODE (type); - if (code == VOID_TYPE) - return GEN_INT (void_type_class); - if (code == INTEGER_TYPE) - return GEN_INT (integer_type_class); - if (code == CHAR_TYPE) - return GEN_INT (char_type_class); - if (code == ENUMERAL_TYPE) - return GEN_INT (enumeral_type_class); - if (code == BOOLEAN_TYPE) - return GEN_INT (boolean_type_class); - if (code == POINTER_TYPE) - return GEN_INT (pointer_type_class); - if (code == REFERENCE_TYPE) - return GEN_INT (reference_type_class); - if (code == OFFSET_TYPE) - return GEN_INT (offset_type_class); - if (code == REAL_TYPE) - return GEN_INT (real_type_class); - if (code == COMPLEX_TYPE) - return GEN_INT (complex_type_class); - if (code == FUNCTION_TYPE) - return GEN_INT (function_type_class); - if (code == METHOD_TYPE) - return GEN_INT (method_type_class); - if (code == RECORD_TYPE) - return GEN_INT (record_type_class); - if (code == UNION_TYPE || code == QUAL_UNION_TYPE) - return GEN_INT (union_type_class); - if (code == ARRAY_TYPE) - return GEN_INT (array_type_class); - if (code == STRING_TYPE) - return GEN_INT (string_type_class); - if (code == SET_TYPE) - return GEN_INT (set_type_class); - if (code == FILE_TYPE) - return GEN_INT (file_type_class); - if (code == LANG_TYPE) - return GEN_INT (lang_type_class); - } - return GEN_INT (no_type_class); + /* Save the structure value address unless this is passed as an + "invisible" first argument. */ + if (struct_value_incoming_rtx) + { + emit_move_insn (change_address (registers, Pmode, + plus_constant (XEXP (registers, 0), + size)), + copy_to_reg (struct_value_incoming_rtx)); + size += GET_MODE_SIZE (Pmode); + } + + /* Return the address of the block. */ + return copy_addr_to_reg (XEXP (registers, 0)); +} + +/* Perform an untyped call and save the state required to perform an + untyped return of whatever value was returned by the given function. */ + +static rtx +expand_builtin_apply (function, arguments, argsize) + rtx function, arguments, argsize; +{ + int size, align, regno; + enum machine_mode mode; + rtx incoming_args, result, reg, dest, call_insn; + rtx old_stack_level = 0; + rtx use_insns = 0; - case BUILT_IN_CONSTANT_P: - if (arglist == 0) - return const0_rtx; - else - return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c' - ? const1_rtx : const0_rtx); + /* Create a block where the return registers can be saved. */ + result = assign_stack_local (BLKmode, apply_result_size (), -1); - case BUILT_IN_FRAME_ADDRESS: - /* The argument must be a nonnegative integer constant. - It counts the number of frames to scan up the stack. - The value is the address of that frame. */ - case BUILT_IN_RETURN_ADDRESS: - /* The argument must be a nonnegative integer constant. - It counts the number of frames to scan up the stack. - The value is the return address saved in that frame. */ - if (arglist == 0) - /* Warning about missing arg was already issued. */ - return const0_rtx; - else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST) - { - error ("invalid arg to `__builtin_return_address'"); - return const0_rtx; - } - else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node)) - { - error ("invalid arg to `__builtin_return_address'"); - return const0_rtx; - } - else - { - int count = TREE_INT_CST_LOW (TREE_VALUE (arglist)); - rtx tem = frame_pointer_rtx; - int i; + /* ??? The argsize value should be adjusted here. */ - /* Some machines need special handling before we can access arbitrary - frames. For example, on the sparc, we must first flush all - register windows to the stack. */ -#ifdef SETUP_FRAME_ADDRESSES - SETUP_FRAME_ADDRESSES (); + /* Fetch the arg pointer from the ARGUMENTS block. */ + incoming_args = gen_reg_rtx (Pmode); + emit_move_insn (incoming_args, + gen_rtx (MEM, Pmode, arguments)); +#ifndef STACK_GROWS_DOWNWARD + incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, + incoming_args, 0, OPTAB_LIB_WIDEN); #endif - /* On the sparc, the return address is not in the frame, it is - in a register. There is no way to access it off of the current - frame pointer, but it can be accessed off the previous frame - pointer by reading the value from the register window save - area. */ -#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS) - count--; -#endif + /* Perform postincrements before actually calling the function. */ + emit_queue (); - /* Scan back COUNT frames to the specified frame. */ - for (i = 0; i < count; i++) - { - /* Assume the dynamic chain pointer is in the word that - the frame address points to, unless otherwise specified. */ -#ifdef DYNAMIC_CHAIN_ADDRESS - tem = DYNAMIC_CHAIN_ADDRESS (tem); -#endif - tem = memory_address (Pmode, tem); - tem = copy_to_reg (gen_rtx (MEM, Pmode, tem)); - } + /* Push a new argument block and copy the arguments. */ + do_pending_stack_adjust (); + emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); - /* For __builtin_frame_address, return what we've got. */ - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) - return tem; + /* Push a block of memory onto the stack to store the memory arguments. + Save the address in a register, and copy the memory arguments. ??? I + haven't figured out how the calling convention macros effect this, + but it's likely that the source and/or destination addresses in + the block copy will need updating in machine specific ways. */ + dest = copy_addr_to_reg (push_block (argsize, 0, 0)); + emit_block_move (gen_rtx (MEM, BLKmode, dest), + gen_rtx (MEM, BLKmode, incoming_args), + argsize, + PARM_BOUNDARY / BITS_PER_UNIT); - /* For __builtin_return_address, - Get the return address from that frame. */ -#ifdef RETURN_ADDR_RTX - return RETURN_ADDR_RTX (count, tem); -#else - tem = memory_address (Pmode, - plus_constant (tem, GET_MODE_SIZE (Pmode))); - return copy_to_reg (gen_rtx (MEM, Pmode, tem)); -#endif + /* Refer to the argument block. */ + apply_args_size (); + arguments = gen_rtx (MEM, BLKmode, arguments); + + /* Walk past the arg-pointer and structure value address. */ + size = GET_MODE_SIZE (Pmode); + if (struct_value_rtx) + size += GET_MODE_SIZE (Pmode); + + /* Restore each of the registers previously saved. Make USE insns + for each of these registers for use in making the call. */ + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if ((mode = apply_args_mode[regno]) != VOIDmode) + { + align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; + if (size % align != 0) + size = CEIL (size, align) * align; + reg = gen_rtx (REG, mode, regno); + emit_move_insn (reg, + change_address (arguments, mode, + plus_constant (XEXP (arguments, 0), + size))); + + push_to_sequence (use_insns); + emit_insn (gen_rtx (USE, VOIDmode, reg)); + use_insns = get_insns (); + end_sequence (); + size += GET_MODE_SIZE (mode); + } + + /* Restore the structure value address unless this is passed as an + "invisible" first argument. */ + size = GET_MODE_SIZE (Pmode); + if (struct_value_rtx) + { + rtx value = gen_reg_rtx (Pmode); + emit_move_insn (value, + change_address (arguments, Pmode, + plus_constant (XEXP (arguments, 0), + size))); + emit_move_insn (struct_value_rtx, value); + if (GET_CODE (struct_value_rtx) == REG) + { + push_to_sequence (use_insns); + emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx)); + use_insns = get_insns (); + end_sequence (); } + size += GET_MODE_SIZE (Pmode); + } - case BUILT_IN_ALLOCA: - if (arglist == 0 - /* Arg could be non-integer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) - return const0_rtx; - current_function_calls_alloca = 1; - /* Compute the argument. */ - op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); + /* All arguments and registers used for the call are set up by now! */ + function = prepare_call_address (function, NULL_TREE, &use_insns); - /* Allocate the desired space. */ - target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); + /* Ensure address is valid. SYMBOL_REF is already valid, so no need, + and we don't want to load it into a register as an optimization, + because prepare_call_address already did it if it should be done. */ + if (GET_CODE (function) != SYMBOL_REF) + function = memory_address (FUNCTION_MODE, function); - /* Record the new stack level for nonlocal gotos. */ - if (nonlocal_goto_handler_slot != 0) - emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX); - return target; + /* Generate the actual call instruction and save the return value. */ +#ifdef HAVE_untyped_call + if (HAVE_untyped_call) + emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function), + result, result_vector (1, result))); + else +#endif +#ifdef HAVE_call_value + if (HAVE_call_value) + { + rtx valreg = 0; - case BUILT_IN_FFS: - /* If not optimizing, call the library function. */ - if (!optimize) - break; + /* Locate the unique return register. It is not possible to + express a call that sets more than one return register using + call_value; use untyped_call for that. In fact, untyped_call + only needs to save the return registers in the given block. */ + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if ((mode = apply_result_mode[regno]) != VOIDmode) + { + if (valreg) + abort (); /* HAVE_untyped_call required. */ + valreg = gen_rtx (REG, mode, regno); + } - if (arglist == 0 - /* Arg could be non-integer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) - return const0_rtx; + emit_call_insn (gen_call_value (valreg, + gen_rtx (MEM, FUNCTION_MODE, function), + const0_rtx, NULL_RTX, const0_rtx)); - /* Compute the argument. */ - op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); - /* Compute ffs, into TARGET if possible. - Set TARGET to wherever the result comes back. */ - target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), - ffs_optab, op0, target, 1); - if (target == 0) - abort (); - return target; + emit_move_insn (change_address (result, GET_MODE (valreg), + XEXP (result, 0)), + valreg); + } + else +#endif + abort (); - case BUILT_IN_STRLEN: - /* If not optimizing, call the library function. */ - if (!optimize) - break; + /* Find the CALL insn we just emitted and write the USE insns before it. */ + for (call_insn = get_last_insn (); + call_insn && GET_CODE (call_insn) != CALL_INSN; + call_insn = PREV_INSN (call_insn)) + ; - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) - return const0_rtx; - else - { - tree src = TREE_VALUE (arglist); - tree len = c_strlen (src); + if (! call_insn) + abort (); - int align - = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; + /* Put the USE insns before the CALL. */ + emit_insns_before (use_insns, call_insn); - rtx result, src_rtx, char_rtx; - enum machine_mode insn_mode = value_mode, char_mode; - enum insn_code icode; + /* Restore the stack. */ + emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); - /* If the length is known, just return it. */ - if (len != 0) - return expand_expr (len, target, mode, 0); + /* Return the address of the result block. */ + return copy_addr_to_reg (XEXP (result, 0)); +} - /* If SRC is not a pointer type, don't do this operation inline. */ - if (align == 0) - break; +/* Perform an untyped return. */ - /* Call a function if we can't compute strlen in the right mode. */ +static void +expand_builtin_return (result) + rtx result; +{ + int size, align, regno; + enum machine_mode mode; + rtx reg; + rtx use_insns = 0; - while (insn_mode != VOIDmode) - { - icode = strlen_optab->handlers[(int) insn_mode].insn_code; - if (icode != CODE_FOR_nothing) - break; + apply_result_size (); + result = gen_rtx (MEM, BLKmode, result); - insn_mode = GET_MODE_WIDER_MODE (insn_mode); - } - if (insn_mode == VOIDmode) - break; +#ifdef HAVE_untyped_return + if (HAVE_untyped_return) + { + emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); + emit_barrier (); + return; + } +#endif - /* Make a place to write the result of the instruction. */ - result = target; - if (! (result != 0 - && GET_CODE (result) == REG - && GET_MODE (result) == insn_mode - && REGNO (result) >= FIRST_PSEUDO_REGISTER)) - result = gen_reg_rtx (insn_mode); + /* Restore the return value and note that each value is used. */ + size = 0; + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) + if ((mode = apply_result_mode[regno]) != VOIDmode) + { + align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; + if (size % align != 0) + size = CEIL (size, align) * align; + reg = gen_rtx (REG, mode, INCOMING_REGNO (regno)); + emit_move_insn (reg, + change_address (result, mode, + plus_constant (XEXP (result, 0), + size))); - /* Make sure the operands are acceptable to the predicates. */ + push_to_sequence (use_insns); + emit_insn (gen_rtx (USE, VOIDmode, reg)); + use_insns = get_insns (); + end_sequence (); + size += GET_MODE_SIZE (mode); + } - if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode)) - result = gen_reg_rtx (insn_mode); + /* Put the USE insns before the return. */ + emit_insns (use_insns); - src_rtx = memory_address (BLKmode, - expand_expr (src, NULL_RTX, Pmode, - EXPAND_NORMAL)); - if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode)) - src_rtx = copy_to_mode_reg (Pmode, src_rtx); + /* Return whatever values was restored by jumping directly to the end + of the function. */ + expand_null_return (); +} + +/* Expand code for a post- or pre- increment or decrement + and return the RTX for the result. + POST is 1 for postinc/decrements and 0 for preinc/decrements. */ - char_rtx = const0_rtx; - char_mode = insn_operand_mode[(int)icode][2]; - if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode)) - char_rtx = copy_to_mode_reg (char_mode, char_rtx); +static rtx +expand_increment (exp, post) + register tree exp; + int post; +{ + register rtx op0, op1; + register rtx temp, value; + register tree incremented = TREE_OPERAND (exp, 0); + optab this_optab = add_optab; + int icode; + enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); + int op0_is_copy = 0; + int single_insn = 0; - emit_insn (GEN_FCN (icode) (result, - gen_rtx (MEM, BLKmode, src_rtx), - char_rtx, GEN_INT (align))); + if (output_bytecode) + return bc_expand_increment (exp, post); - /* Return the value in the proper mode for this function. */ - if (GET_MODE (result) == value_mode) - return result; - else if (target != 0) - { - convert_move (target, result, 0); - return target; - } - else - return convert_to_mode (value_mode, result, 0); - } + /* Stabilize any component ref that might need to be + evaluated more than once below. */ + if (!post + || TREE_CODE (incremented) == BIT_FIELD_REF + || (TREE_CODE (incremented) == COMPONENT_REF + && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF + || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1))))) + incremented = stabilize_reference (incremented); + /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost + ones into save exprs so that they don't accidentally get evaluated + more than once by the code below. */ + if (TREE_CODE (incremented) == PREINCREMENT_EXPR + || TREE_CODE (incremented) == PREDECREMENT_EXPR) + incremented = save_expr (incremented); - case BUILT_IN_STRCPY: - /* If not optimizing, call the library function. */ - if (!optimize) - break; + /* Compute the operands as RTX. + Note whether OP0 is the actual lvalue or a copy of it: + I believe it is a copy iff it is a register or subreg + and insns were generated in computing it. */ - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) - return const0_rtx; - else - { - tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); + temp = get_last_insn (); + op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0); - if (len == 0) - break; + /* If OP0 is a SUBREG made for a promoted variable, we cannot increment + in place but intead must do sign- or zero-extension during assignment, + so we copy it into a new register and let the code below use it as + a copy. - len = size_binop (PLUS_EXPR, len, integer_one_node); + Note that we can safely modify this SUBREG since it is know not to be + shared (it was made by the expand_expr call above). */ - chainon (arglist, build_tree_list (NULL_TREE, len)); - } + if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0)) + SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0)); - /* Drops in. */ - case BUILT_IN_MEMCPY: - /* If not optimizing, call the library function. */ - if (!optimize) - break; + op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG) + && temp != get_last_insn ()); + op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) - return const0_rtx; - else - { - tree dest = TREE_VALUE (arglist); - tree src = TREE_VALUE (TREE_CHAIN (arglist)); - tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); + /* Decide whether incrementing or decrementing. */ + if (TREE_CODE (exp) == POSTDECREMENT_EXPR + || TREE_CODE (exp) == PREDECREMENT_EXPR) + this_optab = sub_optab; - int src_align - = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - int dest_align - = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - rtx dest_rtx, dest_mem, src_mem; + /* Convert decrement by a constant into a negative increment. */ + if (this_optab == sub_optab + && GET_CODE (op1) == CONST_INT) + { + op1 = GEN_INT (- INTVAL (op1)); + this_optab = add_optab; + } - /* If either SRC or DEST is not a pointer type, don't do - this operation in-line. */ - if (src_align == 0 || dest_align == 0) - { - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY) - TREE_CHAIN (TREE_CHAIN (arglist)) = 0; - break; - } + /* For a preincrement, see if we can do this with a single instruction. */ + if (!post) + { + icode = (int) this_optab->handlers[(int) mode].insn_code; + if (icode != (int) CODE_FOR_nothing + /* Make sure that OP0 is valid for operands 0 and 1 + of the insn we want to queue. */ + && (*insn_operand_predicate[icode][0]) (op0, mode) + && (*insn_operand_predicate[icode][1]) (op0, mode) + && (*insn_operand_predicate[icode][2]) (op1, mode)) + single_insn = 1; + } - dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL); - dest_mem = gen_rtx (MEM, BLKmode, - memory_address (BLKmode, dest_rtx)); - src_mem = gen_rtx (MEM, BLKmode, - memory_address (BLKmode, - expand_expr (src, NULL_RTX, - Pmode, - EXPAND_NORMAL))); + /* If OP0 is not the actual lvalue, but rather a copy in a register, + then we cannot just increment OP0. We must therefore contrive to + increment the original value. Then, for postincrement, we can return + OP0 since it is a copy of the old value. For preincrement, expand here + unless we can do it with a single insn. */ + if (op0_is_copy || (!post && !single_insn)) + { + /* This is the easiest way to increment the value wherever it is. + Problems with multiple evaluation of INCREMENTED are prevented + because either (1) it is a component_ref or preincrement, + in which case it was stabilized above, or (2) it is an array_ref + with constant index in an array in a register, which is + safe to reevaluate. */ + tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR + || TREE_CODE (exp) == PREDECREMENT_EXPR) + ? MINUS_EXPR : PLUS_EXPR), + TREE_TYPE (exp), + incremented, + TREE_OPERAND (exp, 1)); + temp = expand_assignment (incremented, newexp, ! post, 0); + return post ? op0 : temp; + } - /* Copy word part most expediently. */ - emit_block_move (dest_mem, src_mem, - expand_expr (len, NULL_RTX, VOIDmode, 0), - MIN (src_align, dest_align)); - return dest_rtx; - } + if (post) + { + /* We have a true reference to the value in OP0. + If there is an insn to add or subtract in this mode, queue it. + Queueing the increment insn avoids the register shuffling + that often results if we must increment now and first save + the old value for subsequent use. */ -/* These comparison functions need an instruction that returns an actual - index. An ordinary compare that just sets the condition codes - is not enough. */ -#ifdef HAVE_cmpstrsi - case BUILT_IN_STRCMP: - /* If not optimizing, call the library function. */ - if (!optimize) - break; +#if 0 /* Turned off to avoid making extra insn for indexed memref. */ + op0 = stabilize (op0); +#endif - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) - return const0_rtx; - else if (!HAVE_cmpstrsi) - break; - { - tree arg1 = TREE_VALUE (arglist); - tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); - tree offset; - tree len, len2; + icode = (int) this_optab->handlers[(int) mode].insn_code; + if (icode != (int) CODE_FOR_nothing + /* Make sure that OP0 is valid for operands 0 and 1 + of the insn we want to queue. */ + && (*insn_operand_predicate[icode][0]) (op0, mode) + && (*insn_operand_predicate[icode][1]) (op0, mode)) + { + if (! (*insn_operand_predicate[icode][2]) (op1, mode)) + op1 = force_reg (mode, op1); - len = c_strlen (arg1); - if (len) - len = size_binop (PLUS_EXPR, integer_one_node, len); - len2 = c_strlen (arg2); - if (len2) - len2 = size_binop (PLUS_EXPR, integer_one_node, len2); + return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); + } + } - /* If we don't have a constant length for the first, use the length - of the second, if we know it. We don't require a constant for - this case; some cost analysis could be done if both are available - but neither is constant. For now, assume they're equally cheap. + /* Preincrement, or we can't increment with one simple insn. */ + if (post) + /* Save a copy of the value before inc or dec, to return it later. */ + temp = value = copy_to_reg (op0); + else + /* Arrange to return the incremented value. */ + /* Copy the rtx because expand_binop will protect from the queue, + and the results of that would be invalid for us to return + if our caller does emit_queue before using our result. */ + temp = copy_rtx (value = op0); - If both strings have constant lengths, use the smaller. This - could arise if optimization results in strcpy being called with - two fixed strings, or if the code was machine-generated. We should - add some code to the `memcmp' handler below to deal with such - situations, someday. */ - if (!len || TREE_CODE (len) != INTEGER_CST) - { - if (len2) - len = len2; - else if (len == 0) - break; - } - else if (len2 && TREE_CODE (len2) == INTEGER_CST) - { - if (tree_int_cst_lt (len2, len)) - len = len2; - } + /* Increment however we can. */ + op1 = expand_binop (mode, this_optab, value, op1, op0, + TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); + /* Make sure the value is stored into OP0. */ + if (op1 != op0) + emit_move_insn (op0, op1); - chainon (arglist, build_tree_list (NULL_TREE, len)); - } + return temp; +} + +/* Expand all function calls contained within EXP, innermost ones first. + But don't look within expressions that have sequence points. + For each CALL_EXPR, record the rtx for its value + in the CALL_EXPR_RTL field. */ - /* Drops in. */ - case BUILT_IN_MEMCMP: - /* If not optimizing, call the library function. */ - if (!optimize) - break; +static void +preexpand_calls (exp) + tree exp; +{ + register int nops, i; + int type = TREE_CODE_CLASS (TREE_CODE (exp)); - if (arglist == 0 - /* Arg could be non-pointer if user redeclared this fcn wrong. */ - || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE - || TREE_CHAIN (arglist) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE - || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 - || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) - return const0_rtx; - else if (!HAVE_cmpstrsi) - break; - { - tree arg1 = TREE_VALUE (arglist); - tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); - tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); - rtx result; + if (! do_preexpand_calls) + return; - int arg1_align - = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - int arg2_align - = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; - enum machine_mode insn_mode - = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0]; + /* Only expressions and references can contain calls. */ - /* If we don't have POINTER_TYPE, call the function. */ - if (arg1_align == 0 || arg2_align == 0) - { - if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP) - TREE_CHAIN (TREE_CHAIN (arglist)) = 0; - break; - } + if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r') + return; - /* Make a place to write the result of the instruction. */ - result = target; - if (! (result != 0 - && GET_CODE (result) == REG && GET_MODE (result) == insn_mode - && REGNO (result) >= FIRST_PSEUDO_REGISTER)) - result = gen_reg_rtx (insn_mode); + switch (TREE_CODE (exp)) + { + case CALL_EXPR: + /* Do nothing if already expanded. */ + if (CALL_EXPR_RTL (exp) != 0) + return; - emit_insn (gen_cmpstrsi (result, - gen_rtx (MEM, BLKmode, - expand_expr (arg1, NULL_RTX, Pmode, - EXPAND_NORMAL)), - gen_rtx (MEM, BLKmode, - expand_expr (arg2, NULL_RTX, Pmode, - EXPAND_NORMAL)), - expand_expr (len, NULL_RTX, VOIDmode, 0), - GEN_INT (MIN (arg1_align, arg2_align)))); + /* Do nothing to built-in functions. */ + if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR + || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL + || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) + CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0); + return; - /* Return the value in the proper mode for this function. */ - mode = TYPE_MODE (TREE_TYPE (exp)); - if (GET_MODE (result) == mode) - return result; - else if (target != 0) - { - convert_move (target, result, 0); - return target; - } - else - return convert_to_mode (mode, result, 0); - } -#else - case BUILT_IN_STRCMP: - case BUILT_IN_MEMCMP: - break; -#endif + case COMPOUND_EXPR: + case COND_EXPR: + case TRUTH_ANDIF_EXPR: + case TRUTH_ORIF_EXPR: + /* If we find one of these, then we can be sure + the adjust will be done for it (since it makes jumps). + Do it now, so that if this is inside an argument + of a function, we don't get the stack adjustment + after some other args have already been pushed. */ + do_pending_stack_adjust (); + return; - default: /* just do library call, if unknown builtin */ - error ("built-in function `%s' not currently supported", - IDENTIFIER_POINTER (DECL_NAME (fndecl))); - } + case BLOCK: + case RTL_EXPR: + case WITH_CLEANUP_EXPR: + return; - /* The switch statement above can drop through to cause the function - to be called normally. */ + case SAVE_EXPR: + if (SAVE_EXPR_RTL (exp) != 0) + return; + } - return expand_call (exp, target, ignore); + nops = tree_code_length[(int) TREE_CODE (exp)]; + for (i = 0; i < nops; i++) + if (TREE_OPERAND (exp, i) != 0) + { + type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i))); + if (type == 'e' || type == '<' || type == '1' || type == '2' + || type == 'r') + preexpand_calls (TREE_OPERAND (exp, i)); + } } -/* Built-in functions to perform an untyped call and return. */ - -/* For each register that may be used for calling a function, this - gives a mode used to copy the register's value. VOIDmode indicates - the register is not used for calling a function. If the machine - has register windows, this gives only the outbound registers. - INCOMING_REGNO gives the corresponding inbound register. */ -static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; - -/* For each register that may be used for returning values, this gives - a mode used to copy the register's value. VOIDmode indicates the - register is not used for returning values. If the machine has - register windows, this gives only the outbound registers. - INCOMING_REGNO gives the corresponding inbound register. */ -static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; +/* At the start of a function, record that we have no previously-pushed + arguments waiting to be popped. */ -/* For each register that may be used for calling a function, this - gives the offset of that register into the block returned by - __bultin_apply_args. 0 indicates that the register is not - used for calling a function. */ -static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; +void +init_pending_stack_adjust () +{ + pending_stack_adjust = 0; +} -/* Return the offset of register REGNO into the block returned by - __builtin_apply_args. This is not declared static, since it is - needed in objc-act.c. */ +/* When exiting from function, if safe, clear out any pending stack adjust + so the adjustment won't get done. */ -int -apply_args_register_offset (regno) - int regno; +void +clear_pending_stack_adjust () { - apply_args_size (); - - /* Arguments are always put in outgoing registers (in the argument - block) if such make sense. */ -#ifdef OUTGOING_REGNO - regno = OUTGOING_REGNO(regno); +#ifdef EXIT_IGNORE_STACK + if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK + && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) + && ! flag_inline_functions) + pending_stack_adjust = 0; #endif - return apply_args_reg_offset[regno]; } -/* Return the size required for the block returned by __builtin_apply_args, - and initialize apply_args_mode. */ +/* Pop any previously-pushed arguments that have not been popped yet. */ + +void +do_pending_stack_adjust () +{ + if (inhibit_defer_pop == 0) + { + if (pending_stack_adjust != 0) + adjust_stack (GEN_INT (pending_stack_adjust)); + pending_stack_adjust = 0; + } +} + +/* Expand all cleanups up to OLD_CLEANUPS. + Needed here, and also for language-dependent calls. */ -static int -apply_args_size () +void +expand_cleanups_to (old_cleanups) + tree old_cleanups; { - static int size = -1; - int align, regno; - enum machine_mode mode; - - /* The values computed by this function never change. */ - if (size < 0) + while (cleanups_this_call != old_cleanups) { - /* The first value is the incoming arg-pointer. */ - size = GET_MODE_SIZE (Pmode); + expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0); + cleanups_this_call = TREE_CHAIN (cleanups_this_call); + } +} + +/* Expand conditional expressions. */ - /* The second value is the structure value address unless this is - passed as an "invisible" first argument. */ - if (struct_value_rtx) - size += GET_MODE_SIZE (Pmode); +/* Generate code to evaluate EXP and jump to LABEL if the value is zero. + LABEL is an rtx of code CODE_LABEL, in this function and all the + functions here. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if (FUNCTION_ARG_REGNO_P (regno)) - { - /* Search for the proper mode for copying this register's - value. I'm not sure this is right, but it works so far. */ - enum machine_mode best_mode = VOIDmode; +void +jumpifnot (exp, label) + tree exp; + rtx label; +{ + do_jump (exp, label, NULL_RTX); +} - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); - mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode) - && HARD_REGNO_NREGS (regno, mode) == 1) - best_mode = mode; +/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ - if (best_mode == VOIDmode) - for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); - mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode) - && (mov_optab->handlers[(int) mode].insn_code - != CODE_FOR_nothing)) - best_mode = mode; +void +jumpif (exp, label) + tree exp; + rtx label; +{ + do_jump (exp, NULL_RTX, label); +} - mode = best_mode; - if (mode == VOIDmode) - abort (); +/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if + the result is zero, or IF_TRUE_LABEL if the result is one. + Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, + meaning fall through in that case. - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - apply_args_reg_offset[regno] = size; - size += GET_MODE_SIZE (mode); - apply_args_mode[regno] = mode; - } - else - { - apply_args_mode[regno] = VOIDmode; - apply_args_reg_offset[regno] = 0; - } - } - return size; -} + do_jump always does any pending stack adjust except when it does not + actually perform a jump. An example where there is no jump + is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. -/* Return the size required for the block returned by __builtin_apply, - and initialize apply_result_mode. */ + This function is responsible for optimizing cases such as + &&, || and comparison operators in EXP. */ -static int -apply_result_size () +void +do_jump (exp, if_false_label, if_true_label) + tree exp; + rtx if_false_label, if_true_label; { - static int size = -1; - int align, regno; - enum machine_mode mode; - - /* The values computed by this function never change. */ - if (size < 0) - { - size = 0; + register enum tree_code code = TREE_CODE (exp); + /* Some cases need to create a label to jump to + in order to properly fall through. + These cases set DROP_THROUGH_LABEL nonzero. */ + rtx drop_through_label = 0; + rtx temp; + rtx comparison = 0; + int i; + tree type; - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if (FUNCTION_VALUE_REGNO_P (regno)) - { - /* Search for the proper mode for copying this register's - value. I'm not sure this is right, but it works so far. */ - enum machine_mode best_mode = VOIDmode; + emit_queue (); - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); - mode != TImode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode)) - best_mode = mode; + switch (code) + { + case ERROR_MARK: + break; - if (best_mode == VOIDmode) - for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); - mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - if (HARD_REGNO_MODE_OK (regno, mode) - && (mov_optab->handlers[(int) mode].insn_code - != CODE_FOR_nothing)) - best_mode = mode; + case INTEGER_CST: + temp = integer_zerop (exp) ? if_false_label : if_true_label; + if (temp) + emit_jump (temp); + break; - mode = best_mode; - if (mode == VOIDmode) - abort (); +#if 0 + /* This is not true with #pragma weak */ + case ADDR_EXPR: + /* The address of something can never be zero. */ + if (if_true_label) + emit_jump (if_true_label); + break; +#endif - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - size += GET_MODE_SIZE (mode); - apply_result_mode[regno] = mode; - } - else - apply_result_mode[regno] = VOIDmode; + case NOP_EXPR: + if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF + || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF + || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF) + goto normal; + case CONVERT_EXPR: + /* If we are narrowing the operand, we have to do the compare in the + narrower mode. */ + if ((TYPE_PRECISION (TREE_TYPE (exp)) + < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) + goto normal; + case NON_LVALUE_EXPR: + case REFERENCE_EXPR: + case ABS_EXPR: + case NEGATE_EXPR: + case LROTATE_EXPR: + case RROTATE_EXPR: + /* These cannot change zero->non-zero or vice versa. */ + do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); + break; - /* Allow targets that use untyped_call and untyped_return to override - the size so that machine-specific information can be stored here. */ -#ifdef APPLY_RESULT_SIZE - size = APPLY_RESULT_SIZE; +#if 0 + /* This is never less insns than evaluating the PLUS_EXPR followed by + a test and can be longer if the test is eliminated. */ + case PLUS_EXPR: + /* Reduce to minus. */ + exp = build (MINUS_EXPR, TREE_TYPE (exp), + TREE_OPERAND (exp, 0), + fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), + TREE_OPERAND (exp, 1)))); + /* Process as MINUS. */ #endif - } - return size; -} -#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) -/* Create a vector describing the result block RESULT. If SAVEP is true, - the result block is used to save the values; otherwise it is used to - restore the values. */ + case MINUS_EXPR: + /* Non-zero iff operands of minus differ. */ + comparison = compare (build (NE_EXPR, TREE_TYPE (exp), + TREE_OPERAND (exp, 0), + TREE_OPERAND (exp, 1)), + NE, NE); + break; -static rtx -result_vector (savep, result) - int savep; - rtx result; -{ - int regno, size, align, nelts; - enum machine_mode mode; - rtx reg, mem; - rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); - - size = nelts = 0; - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_result_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno); - mem = change_address (result, mode, - plus_constant (XEXP (result, 0), size)); - savevec[nelts++] = (savep - ? gen_rtx (SET, VOIDmode, mem, reg) - : gen_rtx (SET, VOIDmode, reg, mem)); - size += GET_MODE_SIZE (mode); - } - return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec)); -} -#endif /* HAVE_untyped_call or HAVE_untyped_return */ + case BIT_AND_EXPR: + /* If we are AND'ing with a small constant, do this comparison in the + smallest type that fits. If the machine doesn't have comparisons + that small, it will be converted back to the wider comparison. + This helps if we are testing the sign bit of a narrower object. + combine can't do this for us because it can't know whether a + ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ -/* Save the state required to perform an untyped call with the same - arguments as were passed to the current function. */ + if (! SLOW_BYTE_ACCESS + && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST + && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT + && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0 + && (type = type_for_size (i + 1, 1)) != 0 + && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) + && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code + != CODE_FOR_nothing)) + { + do_jump (convert (type, exp), if_false_label, if_true_label); + break; + } + goto normal; -static rtx -expand_builtin_apply_args () -{ - rtx registers; - int size, align, regno; - enum machine_mode mode; + case TRUTH_NOT_EXPR: + do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); + break; - /* Create a block where the arg-pointer, structure value address, - and argument registers can be saved. */ - registers = assign_stack_local (BLKmode, apply_args_size (), -1); + case TRUTH_ANDIF_EXPR: + if (if_false_label == 0) + if_false_label = drop_through_label = gen_label_rtx (); + do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); + do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); + break; - /* Walk past the arg-pointer and structure value address. */ - size = GET_MODE_SIZE (Pmode); - if (struct_value_rtx) - size += GET_MODE_SIZE (Pmode); + case TRUTH_ORIF_EXPR: + if (if_true_label == 0) + if_true_label = drop_through_label = gen_label_rtx (); + do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); + do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); + break; - /* Save each register used in calling a function to the block. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_args_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - emit_move_insn (change_address (registers, mode, - plus_constant (XEXP (registers, 0), - size)), - gen_rtx (REG, mode, INCOMING_REGNO (regno))); - size += GET_MODE_SIZE (mode); - } + case COMPOUND_EXPR: + expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); + free_temp_slots (); + emit_queue (); + do_pending_stack_adjust (); + do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); + break; - /* Save the arg pointer to the block. */ - emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), - copy_to_reg (virtual_incoming_args_rtx)); - size = GET_MODE_SIZE (Pmode); + case COMPONENT_REF: + case BIT_FIELD_REF: + case ARRAY_REF: + { + int bitsize, bitpos, unsignedp; + enum machine_mode mode; + tree type; + tree offset; + int volatilep = 0; - /* Save the structure value address unless this is passed as an - "invisible" first argument. */ - if (struct_value_incoming_rtx) - { - emit_move_insn (change_address (registers, Pmode, - plus_constant (XEXP (registers, 0), - size)), - copy_to_reg (struct_value_incoming_rtx)); - size += GET_MODE_SIZE (Pmode); - } + /* Get description of this reference. We don't actually care + about the underlying object here. */ + get_inner_reference (exp, &bitsize, &bitpos, &offset, + &mode, &unsignedp, &volatilep); - /* Return the address of the block. */ - return copy_addr_to_reg (XEXP (registers, 0)); -} + type = type_for_size (bitsize, unsignedp); + if (! SLOW_BYTE_ACCESS + && type != 0 && bitsize >= 0 + && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) + && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code + != CODE_FOR_nothing)) + { + do_jump (convert (type, exp), if_false_label, if_true_label); + break; + } + goto normal; + } -/* Perform an untyped call and save the state required to perform an - untyped return of whatever value was returned by the given function. */ + case COND_EXPR: + /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ + if (integer_onep (TREE_OPERAND (exp, 1)) + && integer_zerop (TREE_OPERAND (exp, 2))) + do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); -static rtx -expand_builtin_apply (function, arguments, argsize) - rtx function, arguments, argsize; -{ - int size, align, regno; - enum machine_mode mode; - rtx incoming_args, result, reg, dest, call_insn; - rtx old_stack_level = 0; - rtx use_insns = 0; + else if (integer_zerop (TREE_OPERAND (exp, 1)) + && integer_onep (TREE_OPERAND (exp, 2))) + do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); - /* Create a block where the return registers can be saved. */ - result = assign_stack_local (BLKmode, apply_result_size (), -1); + else + { + register rtx label1 = gen_label_rtx (); + drop_through_label = gen_label_rtx (); + do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); + /* Now the THEN-expression. */ + do_jump (TREE_OPERAND (exp, 1), + if_false_label ? if_false_label : drop_through_label, + if_true_label ? if_true_label : drop_through_label); + /* In case the do_jump just above never jumps. */ + do_pending_stack_adjust (); + emit_label (label1); + /* Now the ELSE-expression. */ + do_jump (TREE_OPERAND (exp, 2), + if_false_label ? if_false_label : drop_through_label, + if_true_label ? if_true_label : drop_through_label); + } + break; - /* ??? The argsize value should be adjusted here. */ + case EQ_EXPR: + if (integer_zerop (TREE_OPERAND (exp, 1))) + do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); + else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == MODE_INT) + && + !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + do_jump_by_parts_equality (exp, if_false_label, if_true_label); + else + comparison = compare (exp, EQ, EQ); + break; - /* Fetch the arg pointer from the ARGUMENTS block. */ - incoming_args = gen_reg_rtx (Pmode); - emit_move_insn (incoming_args, - gen_rtx (MEM, Pmode, arguments)); -#ifndef STACK_GROWS_DOWNWARD - incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, - incoming_args, 0, OPTAB_LIB_WIDEN); -#endif + case NE_EXPR: + if (integer_zerop (TREE_OPERAND (exp, 1))) + do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); + else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == MODE_INT) + && + !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + do_jump_by_parts_equality (exp, if_true_label, if_false_label); + else + comparison = compare (exp, NE, NE); + break; - /* Perform postincrements before actually calling the function. */ - emit_queue (); + case LT_EXPR: + if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == MODE_INT) + && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); + else + comparison = compare (exp, LT, LTU); + break; - /* Push a new argument block and copy the arguments. */ - do_pending_stack_adjust (); - emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); + case LE_EXPR: + if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == MODE_INT) + && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); + else + comparison = compare (exp, LE, LEU); + break; - /* Push a block of memory onto the stack to store the memory arguments. - Save the address in a register, and copy the memory arguments. ??? I - haven't figured out how the calling convention macros effect this, - but it's likely that the source and/or destination addresses in - the block copy will need updating in machine specific ways. */ - dest = copy_addr_to_reg (push_block (argsize, 0, 0)); - emit_block_move (gen_rtx (MEM, BLKmode, dest), - gen_rtx (MEM, BLKmode, incoming_args), - argsize, - PARM_BOUNDARY / BITS_PER_UNIT); + case GT_EXPR: + if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == MODE_INT) + && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); + else + comparison = compare (exp, GT, GTU); + break; - /* Refer to the argument block. */ - apply_args_size (); - arguments = gen_rtx (MEM, BLKmode, arguments); + case GE_EXPR: + if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + == MODE_INT) + && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) + do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); + else + comparison = compare (exp, GE, GEU); + break; - /* Walk past the arg-pointer and structure value address. */ - size = GET_MODE_SIZE (Pmode); - if (struct_value_rtx) - size += GET_MODE_SIZE (Pmode); + default: + normal: + temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); +#if 0 + /* This is not needed any more and causes poor code since it causes + comparisons and tests from non-SI objects to have different code + sequences. */ + /* Copy to register to avoid generating bad insns by cse + from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ + if (!cse_not_expected && GET_CODE (temp) == MEM) + temp = copy_to_reg (temp); +#endif + do_pending_stack_adjust (); + if (GET_CODE (temp) == CONST_INT) + comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx); + else if (GET_CODE (temp) == LABEL_REF) + comparison = const_true_rtx; + else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT + && !can_compare_p (GET_MODE (temp))) + /* Note swapping the labels gives us not-equal. */ + do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); + else if (GET_MODE (temp) != VOIDmode) + comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)), + NE, TREE_UNSIGNED (TREE_TYPE (exp)), + GET_MODE (temp), NULL_RTX, 0); + else + abort (); + } - /* Restore each of the registers previously saved. Make USE insns - for each of these registers for use in making the call. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_args_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - reg = gen_rtx (REG, mode, regno); - emit_move_insn (reg, - change_address (arguments, mode, - plus_constant (XEXP (arguments, 0), - size))); + /* Do any postincrements in the expression that was tested. */ + emit_queue (); - push_to_sequence (use_insns); - emit_insn (gen_rtx (USE, VOIDmode, reg)); - use_insns = get_insns (); - end_sequence (); - size += GET_MODE_SIZE (mode); - } + /* If COMPARISON is nonzero here, it is an rtx that can be substituted + straight into a conditional jump instruction as the jump condition. + Otherwise, all the work has been done already. */ - /* Restore the structure value address unless this is passed as an - "invisible" first argument. */ - size = GET_MODE_SIZE (Pmode); - if (struct_value_rtx) + if (comparison == const_true_rtx) { - rtx value = gen_reg_rtx (Pmode); - emit_move_insn (value, - change_address (arguments, Pmode, - plus_constant (XEXP (arguments, 0), - size))); - emit_move_insn (struct_value_rtx, value); - if (GET_CODE (struct_value_rtx) == REG) - { - push_to_sequence (use_insns); - emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx)); - use_insns = get_insns (); - end_sequence (); - } - size += GET_MODE_SIZE (Pmode); + if (if_true_label) + emit_jump (if_true_label); } + else if (comparison == const0_rtx) + { + if (if_false_label) + emit_jump (if_false_label); + } + else if (comparison) + do_jump_for_compare (comparison, if_false_label, if_true_label); - /* All arguments and registers used for the call are set up by now! */ - function = prepare_call_address (function, NULL_TREE, &use_insns); - - /* Ensure address is valid. SYMBOL_REF is already valid, so no need, - and we don't want to load it into a register as an optimization, - because prepare_call_address already did it if it should be done. */ - if (GET_CODE (function) != SYMBOL_REF) - function = memory_address (FUNCTION_MODE, function); + free_temp_slots (); - /* Generate the actual call instruction and save the return value. */ -#ifdef HAVE_untyped_call - if (HAVE_untyped_call) - emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function), - result, result_vector (1, result))); - else -#endif -#ifdef HAVE_call_value - if (HAVE_call_value) + if (drop_through_label) { - rtx valreg = 0; - - /* Locate the unique return register. It is not possible to - express a call that sets more than one return register using - call_value; use untyped_call for that. In fact, untyped_call - only needs to save the return registers in the given block. */ - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_result_mode[regno]) != VOIDmode) - { - if (valreg) - abort (); /* HAVE_untyped_call required. */ - valreg = gen_rtx (REG, mode, regno); - } + /* If do_jump produces code that might be jumped around, + do any stack adjusts from that code, before the place + where control merges in. */ + do_pending_stack_adjust (); + emit_label (drop_through_label); + } +} + +/* Given a comparison expression EXP for values too wide to be compared + with one insn, test the comparison and jump to the appropriate label. + The code of EXP is ignored; we always test GT if SWAP is 0, + and LT if SWAP is 1. */ - emit_call_insn (gen_call_value (valreg, - gen_rtx (MEM, FUNCTION_MODE, function), - const0_rtx, NULL_RTX, const0_rtx)); +static void +do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label) + tree exp; + int swap; + rtx if_false_label, if_true_label; +{ + rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); + rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); + enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); + rtx drop_through_label = 0; + int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); + int i; - emit_move_insn (change_address (result, GET_MODE (valreg), - XEXP (result, 0)), - valreg); - } - else -#endif - abort (); + if (! if_true_label || ! if_false_label) + drop_through_label = gen_label_rtx (); + if (! if_true_label) + if_true_label = drop_through_label; + if (! if_false_label) + if_false_label = drop_through_label; - /* Find the CALL insn we just emitted and write the USE insns before it. */ - for (call_insn = get_last_insn (); - call_insn && GET_CODE (call_insn) != CALL_INSN; - call_insn = PREV_INSN (call_insn)) - ; + /* Compare a word at a time, high order first. */ + for (i = 0; i < nwords; i++) + { + rtx comp; + rtx op0_word, op1_word; - if (! call_insn) - abort (); + if (WORDS_BIG_ENDIAN) + { + op0_word = operand_subword_force (op0, i, mode); + op1_word = operand_subword_force (op1, i, mode); + } + else + { + op0_word = operand_subword_force (op0, nwords - 1 - i, mode); + op1_word = operand_subword_force (op1, nwords - 1 - i, mode); + } - /* Put the USE insns before the CALL. */ - emit_insns_before (use_insns, call_insn); + /* All but high-order word must be compared as unsigned. */ + comp = compare_from_rtx (op0_word, op1_word, + (unsignedp || i > 0) ? GTU : GT, + unsignedp, word_mode, NULL_RTX, 0); + if (comp == const_true_rtx) + emit_jump (if_true_label); + else if (comp != const0_rtx) + do_jump_for_compare (comp, NULL_RTX, if_true_label); - /* Restore the stack. */ - emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); + /* Consider lower words only if these are equal. */ + comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode, + NULL_RTX, 0); + if (comp == const_true_rtx) + emit_jump (if_false_label); + else if (comp != const0_rtx) + do_jump_for_compare (comp, NULL_RTX, if_false_label); + } - /* Return the address of the result block. */ - return copy_addr_to_reg (XEXP (result, 0)); + if (if_false_label) + emit_jump (if_false_label); + if (drop_through_label) + emit_label (drop_through_label); } -/* Perform an untyped return. */ +/* Compare OP0 with OP1, word at a time, in mode MODE. + UNSIGNEDP says to do unsigned comparison. + Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ static void -expand_builtin_return (result) - rtx result; +do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label) + enum machine_mode mode; + int unsignedp; + rtx op0, op1; + rtx if_false_label, if_true_label; { - int size, align, regno; - enum machine_mode mode; - rtx reg; - rtx use_insns = 0; + int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); + rtx drop_through_label = 0; + int i; - apply_result_size (); - result = gen_rtx (MEM, BLKmode, result); + if (! if_true_label || ! if_false_label) + drop_through_label = gen_label_rtx (); + if (! if_true_label) + if_true_label = drop_through_label; + if (! if_false_label) + if_false_label = drop_through_label; -#ifdef HAVE_untyped_return - if (HAVE_untyped_return) + /* Compare a word at a time, high order first. */ + for (i = 0; i < nwords; i++) { - emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); - emit_barrier (); - return; - } -#endif + rtx comp; + rtx op0_word, op1_word; - /* Restore the return value and note that each value is used. */ - size = 0; - for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) - if ((mode = apply_result_mode[regno]) != VOIDmode) - { - align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; - if (size % align != 0) - size = CEIL (size, align) * align; - reg = gen_rtx (REG, mode, INCOMING_REGNO (regno)); - emit_move_insn (reg, - change_address (result, mode, - plus_constant (XEXP (result, 0), - size))); + if (WORDS_BIG_ENDIAN) + { + op0_word = operand_subword_force (op0, i, mode); + op1_word = operand_subword_force (op1, i, mode); + } + else + { + op0_word = operand_subword_force (op0, nwords - 1 - i, mode); + op1_word = operand_subword_force (op1, nwords - 1 - i, mode); + } - push_to_sequence (use_insns); - emit_insn (gen_rtx (USE, VOIDmode, reg)); - use_insns = get_insns (); - end_sequence (); - size += GET_MODE_SIZE (mode); - } + /* All but high-order word must be compared as unsigned. */ + comp = compare_from_rtx (op0_word, op1_word, + (unsignedp || i > 0) ? GTU : GT, + unsignedp, word_mode, NULL_RTX, 0); + if (comp == const_true_rtx) + emit_jump (if_true_label); + else if (comp != const0_rtx) + do_jump_for_compare (comp, NULL_RTX, if_true_label); - /* Put the USE insns before the return. */ - emit_insns (use_insns); + /* Consider lower words only if these are equal. */ + comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode, + NULL_RTX, 0); + if (comp == const_true_rtx) + emit_jump (if_false_label); + else if (comp != const0_rtx) + do_jump_for_compare (comp, NULL_RTX, if_false_label); + } - /* Return whatever values was restored by jumping directly to the end - of the function. */ - expand_null_return (); + if (if_false_label) + emit_jump (if_false_label); + if (drop_through_label) + emit_label (drop_through_label); } - -/* Expand code for a post- or pre- increment or decrement - and return the RTX for the result. - POST is 1 for postinc/decrements and 0 for preinc/decrements. */ -static rtx -expand_increment (exp, post) - register tree exp; - int post; +/* Given an EQ_EXPR expression EXP for values too wide to be compared + with one insn, test the comparison and jump to the appropriate label. */ + +static void +do_jump_by_parts_equality (exp, if_false_label, if_true_label) + tree exp; + rtx if_false_label, if_true_label; { - register rtx op0, op1; - register rtx temp, value; - register tree incremented = TREE_OPERAND (exp, 0); - optab this_optab = add_optab; - int icode; - enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); - int op0_is_copy = 0; - int single_insn = 0; + rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); + rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); + enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); + int i; + rtx drop_through_label = 0; - /* Stabilize any component ref that might need to be - evaluated more than once below. */ - if (!post - || TREE_CODE (incremented) == BIT_FIELD_REF - || (TREE_CODE (incremented) == COMPONENT_REF - && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF - || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1))))) - incremented = stabilize_reference (incremented); - /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost - ones into save exprs so that they don't accidentally get evaluated - more than once by the code below. */ - if (TREE_CODE (incremented) == PREINCREMENT_EXPR - || TREE_CODE (incremented) == PREDECREMENT_EXPR) - incremented = save_expr (incremented); + if (! if_false_label) + drop_through_label = if_false_label = gen_label_rtx (); - /* Compute the operands as RTX. - Note whether OP0 is the actual lvalue or a copy of it: - I believe it is a copy iff it is a register or subreg - and insns were generated in computing it. */ + for (i = 0; i < nwords; i++) + { + rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode), + operand_subword_force (op1, i, mode), + EQ, TREE_UNSIGNED (TREE_TYPE (exp)), + word_mode, NULL_RTX, 0); + if (comp == const_true_rtx) + emit_jump (if_false_label); + else if (comp != const0_rtx) + do_jump_for_compare (comp, if_false_label, NULL_RTX); + } - temp = get_last_insn (); - op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0); + if (if_true_label) + emit_jump (if_true_label); + if (drop_through_label) + emit_label (drop_through_label); +} + +/* Jump according to whether OP0 is 0. + We assume that OP0 has an integer mode that is too wide + for the available compare insns. */ - /* If OP0 is a SUBREG made for a promoted variable, we cannot increment - in place but intead must do sign- or zero-extension during assignment, - so we copy it into a new register and let the code below use it as - a copy. +static void +do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) + rtx op0; + rtx if_false_label, if_true_label; +{ + int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; + int i; + rtx drop_through_label = 0; - Note that we can safely modify this SUBREG since it is know not to be - shared (it was made by the expand_expr call above). */ + if (! if_false_label) + drop_through_label = if_false_label = gen_label_rtx (); - if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0)) - SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0)); + for (i = 0; i < nwords; i++) + { + rtx comp = compare_from_rtx (operand_subword_force (op0, i, + GET_MODE (op0)), + const0_rtx, EQ, 1, word_mode, NULL_RTX, 0); + if (comp == const_true_rtx) + emit_jump (if_false_label); + else if (comp != const0_rtx) + do_jump_for_compare (comp, if_false_label, NULL_RTX); + } - op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG) - && temp != get_last_insn ()); - op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); + if (if_true_label) + emit_jump (if_true_label); + if (drop_through_label) + emit_label (drop_through_label); +} - /* Decide whether incrementing or decrementing. */ - if (TREE_CODE (exp) == POSTDECREMENT_EXPR - || TREE_CODE (exp) == PREDECREMENT_EXPR) - this_optab = sub_optab; +/* Given a comparison expression in rtl form, output conditional branches to + IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */ - /* Convert decrement by a constant into a negative increment. */ - if (this_optab == sub_optab - && GET_CODE (op1) == CONST_INT) +static void +do_jump_for_compare (comparison, if_false_label, if_true_label) + rtx comparison, if_false_label, if_true_label; +{ + if (if_true_label) { - op1 = GEN_INT (- INTVAL (op1)); - this_optab = add_optab; - } + if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0) + emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label)); + else + abort (); - /* For a preincrement, see if we can do this with a single instruction. */ - if (!post) - { - icode = (int) this_optab->handlers[(int) mode].insn_code; - if (icode != (int) CODE_FOR_nothing - /* Make sure that OP0 is valid for operands 0 and 1 - of the insn we want to queue. */ - && (*insn_operand_predicate[icode][0]) (op0, mode) - && (*insn_operand_predicate[icode][1]) (op0, mode) - && (*insn_operand_predicate[icode][2]) (op1, mode)) - single_insn = 1; + if (if_false_label) + emit_jump (if_false_label); } - - /* If OP0 is not the actual lvalue, but rather a copy in a register, - then we cannot just increment OP0. We must therefore contrive to - increment the original value. Then, for postincrement, we can return - OP0 since it is a copy of the old value. For preincrement, expand here - unless we can do it with a single insn. */ - if (op0_is_copy || (!post && !single_insn)) + else if (if_false_label) { - /* This is the easiest way to increment the value wherever it is. - Problems with multiple evaluation of INCREMENTED are prevented - because either (1) it is a component_ref or preincrement, - in which case it was stabilized above, or (2) it is an array_ref - with constant index in an array in a register, which is - safe to reevaluate. */ - tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR - || TREE_CODE (exp) == PREDECREMENT_EXPR) - ? MINUS_EXPR : PLUS_EXPR), - TREE_TYPE (exp), - incremented, - TREE_OPERAND (exp, 1)); - temp = expand_assignment (incremented, newexp, ! post, 0); - return post ? op0 : temp; - } + rtx insn; + rtx prev = PREV_INSN (get_last_insn ()); + rtx branch = 0; - if (post) - { - /* We have a true reference to the value in OP0. - If there is an insn to add or subtract in this mode, queue it. - Queueing the increment insn avoids the register shuffling - that often results if we must increment now and first save - the old value for subsequent use. */ + /* Output the branch with the opposite condition. Then try to invert + what is generated. If more than one insn is a branch, or if the + branch is not the last insn written, abort. If we can't invert + the branch, emit make a true label, redirect this jump to that, + emit a jump to the false label and define the true label. */ -#if 0 /* Turned off to avoid making extra insn for indexed memref. */ - op0 = stabilize (op0); -#endif + if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0) + emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label)); + else + abort (); - icode = (int) this_optab->handlers[(int) mode].insn_code; - if (icode != (int) CODE_FOR_nothing - /* Make sure that OP0 is valid for operands 0 and 1 - of the insn we want to queue. */ - && (*insn_operand_predicate[icode][0]) (op0, mode) - && (*insn_operand_predicate[icode][1]) (op0, mode)) - { - if (! (*insn_operand_predicate[icode][2]) (op1, mode)) - op1 = force_reg (mode, op1); + /* Here we get the insn before what was just emitted. + On some machines, emitting the branch can discard + the previous compare insn and emit a replacement. */ + if (prev == 0) + /* If there's only one preceding insn... */ + insn = get_insns (); + else + insn = NEXT_INSN (prev); - return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); + for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == JUMP_INSN) + { + if (branch) + abort (); + branch = insn; + } + + if (branch != get_last_insn ()) + abort (); + + if (! invert_jump (branch, if_false_label)) + { + if_true_label = gen_label_rtx (); + redirect_jump (branch, if_true_label); + emit_jump (if_false_label); + emit_label (if_true_label); } } +} + +/* Generate code for a comparison expression EXP + (including code to compute the values to be compared) + and set (CC0) according to the result. + SIGNED_CODE should be the rtx operation for this comparison for + signed data; UNSIGNED_CODE, likewise for use if data is unsigned. + + We force a stack adjustment unless there are currently + things pushed on the stack that aren't yet used. */ + +static rtx +compare (exp, signed_code, unsigned_code) + register tree exp; + enum rtx_code signed_code, unsigned_code; +{ + register rtx op0 + = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); + register rtx op1 + = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); + register tree type = TREE_TYPE (TREE_OPERAND (exp, 0)); + register enum machine_mode mode = TYPE_MODE (type); + int unsignedp = TREE_UNSIGNED (type); + enum rtx_code code = unsignedp ? unsigned_code : signed_code; - /* Preincrement, or we can't increment with one simple insn. */ - if (post) - /* Save a copy of the value before inc or dec, to return it later. */ - temp = value = copy_to_reg (op0); - else - /* Arrange to return the incremented value. */ - /* Copy the rtx because expand_binop will protect from the queue, - and the results of that would be invalid for us to return - if our caller does emit_queue before using our result. */ - temp = copy_rtx (value = op0); + return compare_from_rtx (op0, op1, code, unsignedp, mode, + ((mode == BLKmode) + ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), + TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); +} - /* Increment however we can. */ - op1 = expand_binop (mode, this_optab, value, op1, op0, - TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); - /* Make sure the value is stored into OP0. */ - if (op1 != op0) - emit_move_insn (op0, op1); +/* Like compare but expects the values to compare as two rtx's. + The decision as to signed or unsigned comparison must be made by the caller. - return temp; -} - -/* Expand all function calls contained within EXP, innermost ones first. - But don't look within expressions that have sequence points. - For each CALL_EXPR, record the rtx for its value - in the CALL_EXPR_RTL field. */ + If MODE is BLKmode, SIZE is an RTX giving the size of the objects being + compared. -static void -preexpand_calls (exp) - tree exp; -{ - register int nops, i; - int type = TREE_CODE_CLASS (TREE_CODE (exp)); + If ALIGN is non-zero, it is the alignment of this type; if zero, the + size of MODE should be used. */ - if (! do_preexpand_calls) - return; +rtx +compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) + register rtx op0, op1; + enum rtx_code code; + int unsignedp; + enum machine_mode mode; + rtx size; + int align; +{ + rtx tem; - /* Only expressions and references can contain calls. */ + /* If one operand is constant, make it the second one. Only do this + if the other operand is not constant as well. */ - if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r') - return; + if ((CONSTANT_P (op0) && ! CONSTANT_P (op1)) + || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT)) + { + tem = op0; + op0 = op1; + op1 = tem; + code = swap_condition (code); + } - switch (TREE_CODE (exp)) + if (flag_force_mem) { - case CALL_EXPR: - /* Do nothing if already expanded. */ - if (CALL_EXPR_RTL (exp) != 0) - return; + op0 = force_not_mem (op0); + op1 = force_not_mem (op1); + } - /* Do nothing to built-in functions. */ - if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR - || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL - || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) - CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0); - return; + do_pending_stack_adjust (); - case COMPOUND_EXPR: - case COND_EXPR: - case TRUTH_ANDIF_EXPR: - case TRUTH_ORIF_EXPR: - /* If we find one of these, then we can be sure - the adjust will be done for it (since it makes jumps). - Do it now, so that if this is inside an argument - of a function, we don't get the stack adjustment - after some other args have already been pushed. */ - do_pending_stack_adjust (); - return; + if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT + && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) + return tem; - case BLOCK: - case RTL_EXPR: - case WITH_CLEANUP_EXPR: - return; +#if 0 + /* There's no need to do this now that combine.c can eliminate lots of + sign extensions. This can be less efficient in certain cases on other + machines. */ - case SAVE_EXPR: - if (SAVE_EXPR_RTL (exp) != 0) - return; + /* If this is a signed equality comparison, we can do it as an + unsigned comparison since zero-extension is cheaper than sign + extension and comparisons with zero are done as unsigned. This is + the case even on machines that can do fast sign extension, since + zero-extension is easier to combine with other operations than + sign-extension is. If we are comparing against a constant, we must + convert it to what it would look like unsigned. */ + if ((code == EQ || code == NE) && ! unsignedp + && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) + { + if (GET_CODE (op1) == CONST_INT + && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) + op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); + unsignedp = 1; } +#endif + + emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align); - nops = tree_code_length[(int) TREE_CODE (exp)]; - for (i = 0; i < nops; i++) - if (TREE_OPERAND (exp, i) != 0) - { - type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i))); - if (type == 'e' || type == '<' || type == '1' || type == '2' - || type == 'r') - preexpand_calls (TREE_OPERAND (exp, i)); - } + return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx); } -/* At the start of a function, record that we have no previously-pushed - arguments waiting to be popped. */ +/* Generate code to calculate EXP using a store-flag instruction + and return an rtx for the result. EXP is either a comparison + or a TRUTH_NOT_EXPR whose operand is a comparison. -void -init_pending_stack_adjust () -{ - pending_stack_adjust = 0; -} + If TARGET is nonzero, store the result there if convenient. -/* When exiting from function, if safe, clear out any pending stack adjust - so the adjustment won't get done. */ + If ONLY_CHEAP is non-zero, only do this if it is likely to be very + cheap. -void -clear_pending_stack_adjust () -{ -#ifdef EXIT_IGNORE_STACK - if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK - && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) - && ! flag_inline_functions) - pending_stack_adjust = 0; -#endif -} + Return zero if there is no suitable set-flag instruction + available on this machine. -/* Pop any previously-pushed arguments that have not been popped yet. */ + Once expand_expr has been called on the arguments of the comparison, + we are committed to doing the store flag, since it is not safe to + re-evaluate the expression. We emit the store-flag insn by calling + emit_store_flag, but only expand the arguments if we have a reason + to believe that emit_store_flag will be successful. If we think that + it will, but it isn't, we have to simulate the store-flag with a + set/jump/set sequence. */ -void -do_pending_stack_adjust () +static rtx +do_store_flag (exp, target, mode, only_cheap) + tree exp; + rtx target; + enum machine_mode mode; + int only_cheap; { - if (inhibit_defer_pop == 0) - { - if (pending_stack_adjust != 0) - adjust_stack (GEN_INT (pending_stack_adjust)); - pending_stack_adjust = 0; - } -} + enum rtx_code code; + tree arg0, arg1, type; + tree tem; + enum machine_mode operand_mode; + int invert = 0; + int unsignedp; + rtx op0, op1; + enum insn_code icode; + rtx subtarget = target; + rtx result, label, pattern, jump_pat; -/* Expand all cleanups up to OLD_CLEANUPS. - Needed here, and also for language-dependent calls. */ + /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the + result at the end. We can't simply invert the test since it would + have already been inverted if it were valid. This case occurs for + some floating-point comparisons. */ + + if (TREE_CODE (exp) == TRUTH_NOT_EXPR) + invert = 1, exp = TREE_OPERAND (exp, 0); + + arg0 = TREE_OPERAND (exp, 0); + arg1 = TREE_OPERAND (exp, 1); + type = TREE_TYPE (arg0); + operand_mode = TYPE_MODE (type); + unsignedp = TREE_UNSIGNED (type); + + /* We won't bother with BLKmode store-flag operations because it would mean + passing a lot of information to emit_store_flag. */ + if (operand_mode == BLKmode) + return 0; + + STRIP_NOPS (arg0); + STRIP_NOPS (arg1); + + /* Get the rtx comparison code to use. We know that EXP is a comparison + operation of some type. Some comparisons against 1 and -1 can be + converted to comparisons with zero. Do so here so that the tests + below will be aware that we have a comparison with zero. These + tests will not catch constants in the first operand, but constants + are rarely passed as the first operand. */ + + switch (TREE_CODE (exp)) + { + case EQ_EXPR: + code = EQ; + break; + case NE_EXPR: + code = NE; + break; + case LT_EXPR: + if (integer_onep (arg1)) + arg1 = integer_zero_node, code = unsignedp ? LEU : LE; + else + code = unsignedp ? LTU : LT; + break; + case LE_EXPR: + if (! unsignedp && integer_all_onesp (arg1)) + arg1 = integer_zero_node, code = LT; + else + code = unsignedp ? LEU : LE; + break; + case GT_EXPR: + if (! unsignedp && integer_all_onesp (arg1)) + arg1 = integer_zero_node, code = GE; + else + code = unsignedp ? GTU : GT; + break; + case GE_EXPR: + if (integer_onep (arg1)) + arg1 = integer_zero_node, code = unsignedp ? GTU : GT; + else + code = unsignedp ? GEU : GE; + break; + default: + abort (); + } -void -expand_cleanups_to (old_cleanups) - tree old_cleanups; -{ - while (cleanups_this_call != old_cleanups) + /* Put a constant second. */ + if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) { - expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0); - cleanups_this_call = TREE_CHAIN (cleanups_this_call); + tem = arg0; arg0 = arg1; arg1 = tem; + code = swap_condition (code); } -} - -/* Expand conditional expressions. */ -/* Generate code to evaluate EXP and jump to LABEL if the value is zero. - LABEL is an rtx of code CODE_LABEL, in this function and all the - functions here. */ + /* If this is an equality or inequality test of a single bit, we can + do this by shifting the bit being tested to the low-order bit and + masking the result with the constant 1. If the condition was EQ, + we xor it with 1. This does not require an scc insn and is faster + than an scc insn even if we have it. */ -void -jumpifnot (exp, label) - tree exp; - rtx label; -{ - do_jump (exp, label, NULL_RTX); -} + if ((code == NE || code == EQ) + && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) + && integer_pow2p (TREE_OPERAND (arg0, 1)) + && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT) + { + tree inner = TREE_OPERAND (arg0, 0); + int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1), + NULL_RTX, VOIDmode, 0))); + int ops_unsignedp; -/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ + /* If INNER is a right shift of a constant and it plus BITNUM does + not overflow, adjust BITNUM and INNER. */ -void -jumpif (exp, label) - tree exp; - rtx label; -{ - do_jump (exp, NULL_RTX, label); -} + if (TREE_CODE (inner) == RSHIFT_EXPR + && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST + && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 + && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)) + < TYPE_PRECISION (type))) + { + bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); + inner = TREE_OPERAND (inner, 0); + } -/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if - the result is zero, or IF_TRUE_LABEL if the result is one. - Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, - meaning fall through in that case. + /* If we are going to be able to omit the AND below, we must do our + operations as unsigned. If we must use the AND, we have a choice. + Normally unsigned is faster, but for some machines signed is. */ + ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1 +#ifdef BYTE_LOADS_SIGN_EXTEND + : 0 +#else + : 1 +#endif + ); - do_jump always does any pending stack adjust except when it does not - actually perform a jump. An example where there is no jump - is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. + if (subtarget == 0 || GET_CODE (subtarget) != REG + || GET_MODE (subtarget) != operand_mode + || ! safe_from_p (subtarget, inner)) + subtarget = 0; - This function is responsible for optimizing cases such as - &&, || and comparison operators in EXP. */ + op0 = expand_expr (inner, subtarget, VOIDmode, 0); -void -do_jump (exp, if_false_label, if_true_label) - tree exp; - rtx if_false_label, if_true_label; -{ - register enum tree_code code = TREE_CODE (exp); - /* Some cases need to create a label to jump to - in order to properly fall through. - These cases set DROP_THROUGH_LABEL nonzero. */ - rtx drop_through_label = 0; - rtx temp; - rtx comparison = 0; - int i; - tree type; + if (bitnum != 0) + op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0, + size_int (bitnum), target, ops_unsignedp); - emit_queue (); + if (GET_MODE (op0) != mode) + op0 = convert_to_mode (mode, op0, ops_unsignedp); - switch (code) - { - case ERROR_MARK: - break; + if ((code == EQ && ! invert) || (code == NE && invert)) + op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, + ops_unsignedp, OPTAB_LIB_WIDEN); - case INTEGER_CST: - temp = integer_zerop (exp) ? if_false_label : if_true_label; - if (temp) - emit_jump (temp); - break; + /* Put the AND last so it can combine with more things. */ + if (bitnum != TYPE_PRECISION (type) - 1) + op0 = expand_and (op0, const1_rtx, target); -#if 0 - /* This is not true with #pragma weak */ - case ADDR_EXPR: - /* The address of something can never be zero. */ - if (if_true_label) - emit_jump (if_true_label); - break; -#endif + return op0; + } - case NOP_EXPR: - if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF - || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF - || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF) - goto normal; - case CONVERT_EXPR: - /* If we are narrowing the operand, we have to do the compare in the - narrower mode. */ - if ((TYPE_PRECISION (TREE_TYPE (exp)) - < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) - goto normal; - case NON_LVALUE_EXPR: - case REFERENCE_EXPR: - case ABS_EXPR: - case NEGATE_EXPR: - case LROTATE_EXPR: - case RROTATE_EXPR: - /* These cannot change zero->non-zero or vice versa. */ - do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); - break; + /* Now see if we are likely to be able to do this. Return if not. */ + if (! can_compare_p (operand_mode)) + return 0; + icode = setcc_gen_code[(int) code]; + if (icode == CODE_FOR_nothing + || (only_cheap && insn_operand_mode[(int) icode][0] != mode)) + { + /* We can only do this if it is one of the special cases that + can be handled without an scc insn. */ + if ((code == LT && integer_zerop (arg1)) + || (! only_cheap && code == GE && integer_zerop (arg1))) + ; + else if (BRANCH_COST >= 0 + && ! only_cheap && (code == NE || code == EQ) + && TREE_CODE (type) != REAL_TYPE + && ((abs_optab->handlers[(int) operand_mode].insn_code + != CODE_FOR_nothing) + || (ffs_optab->handlers[(int) operand_mode].insn_code + != CODE_FOR_nothing))) + ; + else + return 0; + } + + preexpand_calls (exp); + if (subtarget == 0 || GET_CODE (subtarget) != REG + || GET_MODE (subtarget) != operand_mode + || ! safe_from_p (subtarget, arg1)) + subtarget = 0; -#if 0 - /* This is never less insns than evaluating the PLUS_EXPR followed by - a test and can be longer if the test is eliminated. */ - case PLUS_EXPR: - /* Reduce to minus. */ - exp = build (MINUS_EXPR, TREE_TYPE (exp), - TREE_OPERAND (exp, 0), - fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), - TREE_OPERAND (exp, 1)))); - /* Process as MINUS. */ -#endif + op0 = expand_expr (arg0, subtarget, VOIDmode, 0); + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); - case MINUS_EXPR: - /* Non-zero iff operands of minus differ. */ - comparison = compare (build (NE_EXPR, TREE_TYPE (exp), - TREE_OPERAND (exp, 0), - TREE_OPERAND (exp, 1)), - NE, NE); - break; + if (target == 0) + target = gen_reg_rtx (mode); - case BIT_AND_EXPR: - /* If we are AND'ing with a small constant, do this comparison in the - smallest type that fits. If the machine doesn't have comparisons - that small, it will be converted back to the wider comparison. - This helps if we are testing the sign bit of a narrower object. - combine can't do this for us because it can't know whether a - ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ + /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe + because, if the emit_store_flag does anything it will succeed and + OP0 and OP1 will not be used subsequently. */ - if (! SLOW_BYTE_ACCESS - && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST - && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT - && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0 - && (type = type_for_size (i + 1, 1)) != 0 - && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) - && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code - != CODE_FOR_nothing)) - { - do_jump (convert (type, exp), if_false_label, if_true_label); - break; - } - goto normal; + result = emit_store_flag (target, code, + queued_subexp_p (op0) ? copy_rtx (op0) : op0, + queued_subexp_p (op1) ? copy_rtx (op1) : op1, + operand_mode, unsignedp, 1); - case TRUTH_NOT_EXPR: - do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); - break; + if (result) + { + if (invert) + result = expand_binop (mode, xor_optab, result, const1_rtx, + result, 0, OPTAB_LIB_WIDEN); + return result; + } - case TRUTH_ANDIF_EXPR: - if (if_false_label == 0) - if_false_label = drop_through_label = gen_label_rtx (); - do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); - do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); - break; + /* If this failed, we have to do this with set/compare/jump/set code. */ + if (target == 0 || GET_CODE (target) != REG + || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) + target = gen_reg_rtx (GET_MODE (target)); - case TRUTH_ORIF_EXPR: - if (if_true_label == 0) - if_true_label = drop_through_label = gen_label_rtx (); - do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); - do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); - break; + emit_move_insn (target, invert ? const0_rtx : const1_rtx); + result = compare_from_rtx (op0, op1, code, unsignedp, + operand_mode, NULL_RTX, 0); + if (GET_CODE (result) == CONST_INT) + return (((result == const0_rtx && ! invert) + || (result != const0_rtx && invert)) + ? const0_rtx : const1_rtx); - case COMPOUND_EXPR: - expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); - free_temp_slots (); - emit_queue (); - do_pending_stack_adjust (); - do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); - break; + label = gen_label_rtx (); + if (bcc_gen_fctn[(int) code] == 0) + abort (); - case COMPONENT_REF: - case BIT_FIELD_REF: - case ARRAY_REF: - { - int bitsize, bitpos, unsignedp; - enum machine_mode mode; - tree type; - tree offset; - int volatilep = 0; + emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); + emit_move_insn (target, invert ? const1_rtx : const0_rtx); + emit_label (label); - /* Get description of this reference. We don't actually care - about the underlying object here. */ - get_inner_reference (exp, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep); + return target; +} + +/* Generate a tablejump instruction (used for switch statements). */ - type = type_for_size (bitsize, unsignedp); - if (! SLOW_BYTE_ACCESS - && type != 0 && bitsize >= 0 - && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) - && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code - != CODE_FOR_nothing)) - { - do_jump (convert (type, exp), if_false_label, if_true_label); - break; - } - goto normal; - } +#ifdef HAVE_tablejump - case COND_EXPR: - /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ - if (integer_onep (TREE_OPERAND (exp, 1)) - && integer_zerop (TREE_OPERAND (exp, 2))) - do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); +/* INDEX is the value being switched on, with the lowest value + in the table already subtracted. + MODE is its expected mode (needed if INDEX is constant). + RANGE is the length of the jump table. + TABLE_LABEL is a CODE_LABEL rtx for the table itself. - else if (integer_zerop (TREE_OPERAND (exp, 1)) - && integer_onep (TREE_OPERAND (exp, 2))) - do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); + DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the + index value is out of range. */ - else - { - register rtx label1 = gen_label_rtx (); - drop_through_label = gen_label_rtx (); - do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); - /* Now the THEN-expression. */ - do_jump (TREE_OPERAND (exp, 1), - if_false_label ? if_false_label : drop_through_label, - if_true_label ? if_true_label : drop_through_label); - /* In case the do_jump just above never jumps. */ - do_pending_stack_adjust (); - emit_label (label1); - /* Now the ELSE-expression. */ - do_jump (TREE_OPERAND (exp, 2), - if_false_label ? if_false_label : drop_through_label, - if_true_label ? if_true_label : drop_through_label); - } - break; +void +do_tablejump (index, mode, range, table_label, default_label) + rtx index, range, table_label, default_label; + enum machine_mode mode; +{ + register rtx temp, vector; - case EQ_EXPR: - if (integer_zerop (TREE_OPERAND (exp, 1))) - do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); - else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && - !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) - do_jump_by_parts_equality (exp, if_false_label, if_true_label); - else - comparison = compare (exp, EQ, EQ); - break; + /* Do an unsigned comparison (in the proper mode) between the index + expression and the value which represents the length of the range. + Since we just finished subtracting the lower bound of the range + from the index expression, this comparison allows us to simultaneously + check that the original index expression value is both greater than + or equal to the minimum value of the range and less than or equal to + the maximum value of the range. */ - case NE_EXPR: - if (integer_zerop (TREE_OPERAND (exp, 1))) - do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); - else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && - !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) - do_jump_by_parts_equality (exp, if_true_label, if_false_label); - else - comparison = compare (exp, NE, NE); - break; + emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0); + emit_jump_insn (gen_bltu (default_label)); - case LT_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) - do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); - else - comparison = compare (exp, LT, LTU); - break; + /* If index is in range, it must fit in Pmode. + Convert to Pmode so we can index with it. */ + if (mode != Pmode) + index = convert_to_mode (Pmode, index, 1); - case LE_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) - do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); - else - comparison = compare (exp, LE, LEU); - break; + /* Don't let a MEM slip thru, because then INDEX that comes + out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, + and break_out_memory_refs will go to work on it and mess it up. */ +#ifdef PIC_CASE_VECTOR_ADDRESS + if (flag_pic && GET_CODE (index) != REG) + index = copy_to_mode_reg (Pmode, index); +#endif - case GT_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) - do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); - else - comparison = compare (exp, GT, GTU); - break; + /* If flag_force_addr were to affect this address + it could interfere with the tricky assumptions made + about addresses that contain label-refs, + which may be valid only very near the tablejump itself. */ + /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the + GET_MODE_SIZE, because this indicates how large insns are. The other + uses should all be Pmode, because they are addresses. This code + could fail if addresses and insns are not the same size. */ + index = gen_rtx (PLUS, Pmode, + gen_rtx (MULT, Pmode, index, + GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), + gen_rtx (LABEL_REF, Pmode, table_label)); +#ifdef PIC_CASE_VECTOR_ADDRESS + if (flag_pic) + index = PIC_CASE_VECTOR_ADDRESS (index); + else +#endif + index = memory_address_noforce (CASE_VECTOR_MODE, index); + temp = gen_reg_rtx (CASE_VECTOR_MODE); + vector = gen_rtx (MEM, CASE_VECTOR_MODE, index); + RTX_UNCHANGING_P (vector) = 1; + convert_move (temp, vector, 0); - case GE_EXPR: - if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) - == MODE_INT) - && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) - do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); - else - comparison = compare (exp, GE, GEU); - break; + emit_jump_insn (gen_tablejump (temp, table_label)); - default: - normal: - temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); -#if 0 - /* This is not needed any more and causes poor code since it causes - comparisons and tests from non-SI objects to have different code - sequences. */ - /* Copy to register to avoid generating bad insns by cse - from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ - if (!cse_not_expected && GET_CODE (temp) == MEM) - temp = copy_to_reg (temp); +#ifndef CASE_VECTOR_PC_RELATIVE + /* If we are generating PIC code or if the table is PC-relative, the + table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ + if (! flag_pic) + emit_barrier (); #endif - do_pending_stack_adjust (); - if (GET_CODE (temp) == CONST_INT) - comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx); - else if (GET_CODE (temp) == LABEL_REF) - comparison = const_true_rtx; - else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT - && !can_compare_p (GET_MODE (temp))) - /* Note swapping the labels gives us not-equal. */ - do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); - else if (GET_MODE (temp) != VOIDmode) - comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)), - NE, TREE_UNSIGNED (TREE_TYPE (exp)), - GET_MODE (temp), NULL_RTX, 0); - else - abort (); - } +} - /* Do any postincrements in the expression that was tested. */ - emit_queue (); +#endif /* HAVE_tablejump */ - /* If COMPARISON is nonzero here, it is an rtx that can be substituted - straight into a conditional jump instruction as the jump condition. - Otherwise, all the work has been done already. */ - if (comparison == const_true_rtx) - { - if (if_true_label) - emit_jump (if_true_label); - } - else if (comparison == const0_rtx) - { - if (if_false_label) - emit_jump (if_false_label); - } - else if (comparison) - do_jump_for_compare (comparison, if_false_label, if_true_label); +/* Emit a suitable bytecode to load a value from memory, assuming a pointer + to that value is on the top of the stack. The resulting type is TYPE, and + the source declaration is DECL. */ - free_temp_slots (); +void +bc_load_memory (type, decl) + tree type, decl; +{ + enum bytecode_opcode opcode; + + + /* Bit fields are special. We only know about signed and + unsigned ints, and enums. The latter are treated as + signed integers. */ + + if (DECL_BIT_FIELD (decl)) + if (TREE_CODE (type) == ENUMERAL_TYPE + || TREE_CODE (type) == INTEGER_TYPE) + opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI; + else + abort (); + else + /* See corresponding comment in bc_store_memory(). */ + if (TYPE_MODE (type) == BLKmode + || TYPE_MODE (type) == VOIDmode) + return; + else + opcode = mode_to_load_map [TYPE_MODE (type)]; - if (drop_through_label) - { - /* If do_jump produces code that might be jumped around, - do any stack adjusts from that code, before the place - where control merges in. */ - do_pending_stack_adjust (); - emit_label (drop_through_label); - } + if (opcode == neverneverland) + abort (); + + bc_emit_bytecode (opcode); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif } - -/* Given a comparison expression EXP for values too wide to be compared - with one insn, test the comparison and jump to the appropriate label. - The code of EXP is ignored; we always test GT if SWAP is 0, - and LT if SWAP is 1. */ -static void -do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label) - tree exp; - int swap; - rtx if_false_label, if_true_label; -{ - rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); - rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); - enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); - int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); - rtx drop_through_label = 0; - int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); - int i; - if (! if_true_label || ! if_false_label) - drop_through_label = gen_label_rtx (); - if (! if_true_label) - if_true_label = drop_through_label; - if (! if_false_label) - if_false_label = drop_through_label; +/* Store the contents of the second stack slot to the address in the + top stack slot. DECL is the declaration of the destination and is used + to determine whether we're dealing with a bitfield. */ - /* Compare a word at a time, high order first. */ - for (i = 0; i < nwords; i++) +void +bc_store_memory (type, decl) + tree type, decl; +{ + enum bytecode_opcode opcode; + + + if (DECL_BIT_FIELD (decl)) { - rtx comp; - rtx op0_word, op1_word; - - if (WORDS_BIG_ENDIAN) - { - op0_word = operand_subword_force (op0, i, mode); - op1_word = operand_subword_force (op1, i, mode); - } + if (TREE_CODE (type) == ENUMERAL_TYPE + || TREE_CODE (type) == INTEGER_TYPE) + opcode = sstoreBI; else - { - op0_word = operand_subword_force (op0, nwords - 1 - i, mode); - op1_word = operand_subword_force (op1, nwords - 1 - i, mode); - } - - /* All but high-order word must be compared as unsigned. */ - comp = compare_from_rtx (op0_word, op1_word, - (unsignedp || i > 0) ? GTU : GT, - unsignedp, word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_true_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_true_label); - - /* Consider lower words only if these are equal. */ - comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode, - NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_false_label); + abort (); } + else + if (TYPE_MODE (type) == BLKmode) + { + /* Copy structure. This expands to a block copy instruction, storeBLK. + In addition to the arguments expected by the other store instructions, + it also expects a type size (SImode) on top of the stack, which is the + structure size in size units (usually bytes). The two first arguments + are already on the stack; so we just put the size on level 1. For some + other languages, the size may be variable, this is why we don't encode + it as a storeBLK literal, but rather treat it as a full-fledged expression. */ + + bc_expand_expr (TYPE_SIZE (type)); + opcode = storeBLK; + } + else + opcode = mode_to_store_map [TYPE_MODE (type)]; - if (if_false_label) - emit_jump (if_false_label); - if (drop_through_label) - emit_label (drop_through_label); + if (opcode == neverneverland) + abort (); + + bc_emit_bytecode (opcode); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif } -/* Compare OP0 with OP1, word at a time, in mode MODE. - UNSIGNEDP says to do unsigned comparison. - Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ -static void -do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label) - enum machine_mode mode; - int unsignedp; - rtx op0, op1; - rtx if_false_label, if_true_label; +/* Allocate local stack space sufficient to hold a value of the given + SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an + integral power of 2. A special case is locals of type VOID, which + have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is + remapped into the corresponding attribute of SI. */ + +rtx +bc_allocate_local (size, alignment) + int size, alignment; { - int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); - rtx drop_through_label = 0; - int i; + rtx retval; + int byte_alignment; - if (! if_true_label || ! if_false_label) - drop_through_label = gen_label_rtx (); - if (! if_true_label) - if_true_label = drop_through_label; - if (! if_false_label) - if_false_label = drop_through_label; + if (size < 0) + abort (); - /* Compare a word at a time, high order first. */ - for (i = 0; i < nwords; i++) - { - rtx comp; - rtx op0_word, op1_word; + /* Normalize size and alignment */ + if (!size) + size = UNITS_PER_WORD; - if (WORDS_BIG_ENDIAN) - { - op0_word = operand_subword_force (op0, i, mode); - op1_word = operand_subword_force (op1, i, mode); - } - else - { - op0_word = operand_subword_force (op0, nwords - 1 - i, mode); - op1_word = operand_subword_force (op1, nwords - 1 - i, mode); - } + if (alignment < BITS_PER_UNIT) + byte_alignment = 1 << (INT_ALIGN - 1); + else + /* Align */ + byte_alignment = alignment / BITS_PER_UNIT; - /* All but high-order word must be compared as unsigned. */ - comp = compare_from_rtx (op0_word, op1_word, - (unsignedp || i > 0) ? GTU : GT, - unsignedp, word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_true_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_true_label); + if (local_vars_size & (byte_alignment - 1)) + local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1)); - /* Consider lower words only if these are equal. */ - comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode, - NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, NULL_RTX, if_false_label); - } + retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0); + local_vars_size += size; - if (if_false_label) - emit_jump (if_false_label); - if (drop_through_label) - emit_label (drop_through_label); + return retval; } -/* Given an EQ_EXPR expression EXP for values too wide to be compared - with one insn, test the comparison and jump to the appropriate label. */ -static void -do_jump_by_parts_equality (exp, if_false_label, if_true_label) - tree exp; - rtx if_false_label, if_true_label; +/* Allocate variable-sized local array. Variable-sized arrays are + actually pointers to the address in memory where they are stored. */ + +rtx +bc_allocate_variable_array (size) + tree size; { - rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); - rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); - enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); - int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); - int i; - rtx drop_through_label = 0; + rtx retval; + const int ptralign = (1 << (PTR_ALIGN - 1)); - if (! if_false_label) - drop_through_label = if_false_label = gen_label_rtx (); + /* Align pointer */ + if (local_vars_size & ptralign) + local_vars_size += ptralign - (local_vars_size & ptralign); - for (i = 0; i < nwords; i++) - { - rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode), - operand_subword_force (op1, i, mode), - EQ, TREE_UNSIGNED (TREE_TYPE (exp)), - word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, if_false_label, NULL_RTX); - } + /* Note down local space needed: pointer to block; also return + dummy rtx */ - if (if_true_label) - emit_jump (if_true_label); - if (drop_through_label) - emit_label (drop_through_label); + retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0); + local_vars_size += POINTER_SIZE / BITS_PER_UNIT; + return retval; } - -/* Jump according to whether OP0 is 0. - We assume that OP0 has an integer mode that is too wide - for the available compare insns. */ - -static void -do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) - rtx op0; - rtx if_false_label, if_true_label; -{ - int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; - int i; - rtx drop_through_label = 0; - if (! if_false_label) - drop_through_label = if_false_label = gen_label_rtx (); - for (i = 0; i < nwords; i++) - { - rtx comp = compare_from_rtx (operand_subword_force (op0, i, - GET_MODE (op0)), - const0_rtx, EQ, 1, word_mode, NULL_RTX, 0); - if (comp == const_true_rtx) - emit_jump (if_false_label); - else if (comp != const0_rtx) - do_jump_for_compare (comp, if_false_label, NULL_RTX); - } +/* Push the machine address for the given external variable offset. */ +void +bc_load_externaddr (externaddr) + rtx externaddr; +{ + bc_emit_bytecode (constP); + bc_emit_code_labelref (externaddr->label, externaddr->offset); - if (if_true_label) - emit_jump (if_true_label); - if (drop_through_label) - emit_label (drop_through_label); +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif } -/* Given a comparison expression in rtl form, output conditional branches to - IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */ -static void -do_jump_for_compare (comparison, if_false_label, if_true_label) - rtx comparison, if_false_label, if_true_label; +static char * +bc_strdup (s) + char *s; { - if (if_true_label) - { - if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0) - emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label)); - else - abort (); - - if (if_false_label) - emit_jump (if_false_label); - } - else if (if_false_label) - { - rtx insn; - rtx prev = PREV_INSN (get_last_insn ()); - rtx branch = 0; + return strcpy (xmalloc ((strlen (s) + 1) * sizeof *s), s); +} - /* Output the branch with the opposite condition. Then try to invert - what is generated. If more than one insn is a branch, or if the - branch is not the last insn written, abort. If we can't invert - the branch, emit make a true label, redirect this jump to that, - emit a jump to the false label and define the true label. */ - if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0) - emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label)); - else - abort (); +/* Like above, but expects an IDENTIFIER. */ +void +bc_load_externaddr_id (id, offset) + tree id; + int offset; +{ + if (!IDENTIFIER_POINTER (id)) + abort (); - /* Here we get the insn before what was just emitted. - On some machines, emitting the branch can discard - the previous compare insn and emit a replacement. */ - if (prev == 0) - /* If there's only one preceding insn... */ - insn = get_insns (); - else - insn = NEXT_INSN (prev); + bc_emit_bytecode (constP); + bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset); - for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn)) - if (GET_CODE (insn) == JUMP_INSN) - { - if (branch) - abort (); - branch = insn; - } +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif +} - if (branch != get_last_insn ()) - abort (); - if (! invert_jump (branch, if_false_label)) - { - if_true_label = gen_label_rtx (); - redirect_jump (branch, if_true_label); - emit_jump (if_false_label); - emit_label (if_true_label); - } - } +/* Push the machine address for the given local variable offset. */ +void +bc_load_localaddr (localaddr) + rtx localaddr; +{ + bc_emit_instruction (localP, (HOST_WIDE_INT) localaddr->offset); } - -/* Generate code for a comparison expression EXP - (including code to compute the values to be compared) - and set (CC0) according to the result. - SIGNED_CODE should be the rtx operation for this comparison for - signed data; UNSIGNED_CODE, likewise for use if data is unsigned. - We force a stack adjustment unless there are currently - things pushed on the stack that aren't yet used. */ -static rtx -compare (exp, signed_code, unsigned_code) - register tree exp; - enum rtx_code signed_code, unsigned_code; +/* Push the machine address for the given parameter offset. + NOTE: offset is in bits. */ +void +bc_load_parmaddr (parmaddr) + rtx parmaddr; { - register rtx op0 - = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); - register rtx op1 - = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); - register tree type = TREE_TYPE (TREE_OPERAND (exp, 0)); - register enum machine_mode mode = TYPE_MODE (type); - int unsignedp = TREE_UNSIGNED (type); - enum rtx_code code = unsignedp ? unsigned_code : signed_code; + bc_emit_instruction (argP, (HOST_WIDE_INT) parmaddr->offset / BITS_PER_UNIT); +} - return compare_from_rtx (op0, op1, code, unsignedp, mode, - ((mode == BLKmode) - ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), - TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); + +/* Convert a[i] into *(a + i). */ +tree +bc_canonicalize_array_ref (exp) + tree exp; +{ + tree type = TREE_TYPE (exp); + tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type), + TREE_OPERAND (exp, 0)); + tree index = TREE_OPERAND (exp, 1); + + + /* Convert the integer argument to a type the same size as a pointer + so the multiply won't overflow spuriously. */ + + if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE) + index = convert (type_for_size (POINTER_SIZE, 0), index); + + /* The array address isn't volatile even if the array is. + (Of course this isn't terribly relevant since the bytecode + translator treats nearly everything as volatile anyway.) */ + TREE_THIS_VOLATILE (array_adr) = 0; + + return build1 (INDIRECT_REF, type, + fold (build (PLUS_EXPR, + TYPE_POINTER_TO (type), + array_adr, + fold (build (MULT_EXPR, + TYPE_POINTER_TO (type), + index, + size_in_bytes (type)))))); } -/* Like compare but expects the values to compare as two rtx's. - The decision as to signed or unsigned comparison must be made by the caller. - If MODE is BLKmode, SIZE is an RTX giving the size of the objects being - compared. +/* Load the address of the component referenced by the given + COMPONENT_REF expression. - If ALIGN is non-zero, it is the alignment of this type; if zero, the - size of MODE should be used. */ + Returns innermost lvalue. */ -rtx -compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) - register rtx op0, op1; - enum rtx_code code; - int unsignedp; - enum machine_mode mode; - rtx size; - int align; +tree +bc_expand_component_address (exp) + tree exp; { - rtx tem; + tree tem, chain; + enum machine_mode mode; + int bitpos = 0; + HOST_WIDE_INT SIval; - /* If one operand is constant, make it the second one. Only do this - if the other operand is not constant as well. */ - if ((CONSTANT_P (op0) && ! CONSTANT_P (op1)) - || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT)) - { - tem = op0; - op0 = op1; - op1 = tem; - code = swap_condition (code); - } + tem = TREE_OPERAND (exp, 1); + mode = DECL_MODE (tem); - if (flag_force_mem) + + /* Compute cumulative bit offset for nested component refs + and array refs, and find the ultimate containing object. */ + + for (tem = exp;; tem = TREE_OPERAND (tem, 0)) { - op0 = force_not_mem (op0); - op1 = force_not_mem (op1); - } + if (TREE_CODE (tem) == COMPONENT_REF) + bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1))); + else + if (TREE_CODE (tem) == ARRAY_REF + && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST + && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST) - do_pending_stack_adjust (); + bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)) + * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem))) + /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */); + else + break; + } - if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT - && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) - return tem; + expand_expr (tem); -#if 0 - /* There's no need to do this now that combine.c can eliminate lots of - sign extensions. This can be less efficient in certain cases on other - machines. */ - /* If this is a signed equality comparison, we can do it as an - unsigned comparison since zero-extension is cheaper than sign - extension and comparisons with zero are done as unsigned. This is - the case even on machines that can do fast sign extension, since - zero-extension is easier to combine with other operations than - sign-extension is. If we are comparing against a constant, we must - convert it to what it would look like unsigned. */ - if ((code == EQ || code == NE) && ! unsignedp - && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) - { - if (GET_CODE (op1) == CONST_INT - && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) - op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); - unsignedp = 1; - } -#endif - - emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align); + /* For bitfields also push their offset and size */ + if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) + bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1))); + else + if (SIval = bitpos / BITS_PER_UNIT) + bc_emit_instruction (addconstPSI, SIval); - return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx); + return (TREE_OPERAND (exp, 1)); } - -/* Generate code to calculate EXP using a store-flag instruction - and return an rtx for the result. EXP is either a comparison - or a TRUTH_NOT_EXPR whose operand is a comparison. - If TARGET is nonzero, store the result there if convenient. - If ONLY_CHEAP is non-zero, only do this if it is likely to be very - cheap. +/* Emit code to push two SI constants */ +void +bc_push_offset_and_size (offset, size) + HOST_WIDE_INT offset, size; +{ + bc_emit_instruction (constSI, offset); + bc_emit_instruction (constSI, size); +} - Return zero if there is no suitable set-flag instruction - available on this machine. - Once expand_expr has been called on the arguments of the comparison, - we are committed to doing the store flag, since it is not safe to - re-evaluate the expression. We emit the store-flag insn by calling - emit_store_flag, but only expand the arguments if we have a reason - to believe that emit_store_flag will be successful. If we think that - it will, but it isn't, we have to simulate the store-flag with a - set/jump/set sequence. */ +/* Emit byte code to push the address of the given lvalue expression to + the stack. If it's a bit field, we also push offset and size info. -static rtx -do_store_flag (exp, target, mode, only_cheap) + Returns innermost component, which allows us to determine not only + its type, but also whether it's a bitfield. */ + +tree +bc_expand_address (exp) tree exp; - rtx target; - enum machine_mode mode; - int only_cheap; { - enum rtx_code code; - tree arg0, arg1, type; - tree tem; - enum machine_mode operand_mode; - int invert = 0; - int unsignedp; - rtx op0, op1; - enum insn_code icode; - rtx subtarget = target; - rtx result, label, pattern, jump_pat; + /* Safeguard */ + if (!exp || TREE_CODE (exp) == ERROR_MARK) + return (exp); - /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the - result at the end. We can't simply invert the test since it would - have already been inverted if it were valid. This case occurs for - some floating-point comparisons. */ - if (TREE_CODE (exp) == TRUTH_NOT_EXPR) - invert = 1, exp = TREE_OPERAND (exp, 0); + switch (TREE_CODE (exp)) + { + case ARRAY_REF: - arg0 = TREE_OPERAND (exp, 0); - arg1 = TREE_OPERAND (exp, 1); - type = TREE_TYPE (arg0); - operand_mode = TYPE_MODE (type); - unsignedp = TREE_UNSIGNED (type); + return (bc_expand_address (bc_canonicalize_array_ref (exp))); - /* We won't bother with BLKmode store-flag operations because it would mean - passing a lot of information to emit_store_flag. */ - if (operand_mode == BLKmode) - return 0; + case COMPONENT_REF: - STRIP_NOPS (arg0); - STRIP_NOPS (arg1); + return (bc_expand_component_address (exp)); - /* Get the rtx comparison code to use. We know that EXP is a comparison - operation of some type. Some comparisons against 1 and -1 can be - converted to comparisons with zero. Do so here so that the tests - below will be aware that we have a comparison with zero. These - tests will not catch constants in the first operand, but constants - are rarely passed as the first operand. */ + case INDIRECT_REF: - switch (TREE_CODE (exp)) - { - case EQ_EXPR: - code = EQ; - break; - case NE_EXPR: - code = NE; + bc_expand_expr (TREE_OPERAND (exp, 0)); + + /* For variable-sized types: retrieve pointer. Sometimes the + TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's + also make sure we have an operand, just in case... */ + + if (TREE_OPERAND (exp, 0) + && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0))) + && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST) + bc_emit_instruction (loadP); + + /* If packed, also return offset and size */ + if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0))) + + bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))), + TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0)))); + + return (TREE_OPERAND (exp, 0)); + + case FUNCTION_DECL: + + bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp), DECL_RTL (exp)->offset); break; - case LT_EXPR: - if (integer_onep (arg1)) - arg1 = integer_zero_node, code = unsignedp ? LEU : LE; - else - code = unsignedp ? LTU : LT; + + case PARM_DECL: + + bc_load_parmaddr (DECL_RTL (exp)); + + /* For variable-sized types: retrieve pointer */ + if (TYPE_SIZE (TREE_TYPE (exp)) + && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST) + bc_emit_instruction (loadP); + + /* If packed, also return offset and size */ + if (DECL_BIT_FIELD (exp)) + bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)), + TREE_INT_CST_LOW (DECL_SIZE (exp))); + break; - case LE_EXPR: - if (! unsignedp && integer_all_onesp (arg1)) - arg1 = integer_zero_node, code = LT; - else - code = unsignedp ? LEU : LE; + + case RESULT_DECL: + + bc_emit_instruction (returnP); break; - case GT_EXPR: - if (! unsignedp && integer_all_onesp (arg1)) - arg1 = integer_zero_node, code = GE; + + case VAR_DECL: + +#if 0 + if (DECL_RTL (exp)->label) + bc_load_externaddr (DECL_RTL (exp)); +#endif + + if (DECL_EXTERNAL (exp)) + bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp), DECL_RTL (exp)->offset); else - code = unsignedp ? GTU : GT; + bc_load_localaddr (DECL_RTL (exp)); + + /* For variable-sized types: retrieve pointer */ + if (TYPE_SIZE (TREE_TYPE (exp)) + && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST) + bc_emit_instruction (loadP); + + /* If packed, also return offset and size */ + if (DECL_BIT_FIELD (exp)) + bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)), + TREE_INT_CST_LOW (DECL_SIZE (exp))); + break; - case GE_EXPR: - if (integer_onep (arg1)) - arg1 = integer_zero_node, code = unsignedp ? GTU : GT; - else - code = unsignedp ? GEU : GE; + + case STRING_CST: + { + rtx r; + + bc_emit_bytecode (constP); + r = output_constant_def (exp); + bc_emit_code_labelref (r->label, r->offset); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + } break; + default: - abort (); - } - /* Put a constant second. */ - if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) - { - tem = arg0; arg0 = arg1; arg1 = tem; - code = swap_condition (code); + abort(); + break; } - /* If this is an equality or inequality test of a single bit, we can - do this by shifting the bit being tested to the low-order bit and - masking the result with the constant 1. If the condition was EQ, - we xor it with 1. This does not require an scc insn and is faster - than an scc insn even if we have it. */ + /* Most lvalues don't have components. */ + return (exp); +} - if ((code == NE || code == EQ) - && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) - && integer_pow2p (TREE_OPERAND (arg0, 1)) - && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT) + +/* Emit a type code to be used by the runtime support in handling + parameter passing. The type code consists of the machine mode + plus the minimal alignment shifted left 8 bits. */ + +tree +bc_runtime_type_code (type) + tree type; +{ + int val; + + switch (TREE_CODE (type)) { - tree inner = TREE_OPERAND (arg0, 0); - int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1), - NULL_RTX, VOIDmode, 0))); - int ops_unsignedp; + case VOID_TYPE: + case INTEGER_TYPE: + case REAL_TYPE: + case COMPLEX_TYPE: + case ENUMERAL_TYPE: + case POINTER_TYPE: + case RECORD_TYPE: + + val = TYPE_MODE (type) | TYPE_ALIGN (type) << 8; + break; + + case ERROR_MARK: + + val = 0; + break; + + default: - /* If INNER is a right shift of a constant and it plus BITNUM does - not overflow, adjust BITNUM and INNER. */ + abort (); + } + return build_int_2 (val, 0); +} - if (TREE_CODE (inner) == RSHIFT_EXPR - && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST - && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 - && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)) - < TYPE_PRECISION (type))) - { - bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); - inner = TREE_OPERAND (inner, 0); - } - /* If we are going to be able to omit the AND below, we must do our - operations as unsigned. If we must use the AND, we have a choice. - Normally unsigned is faster, but for some machines signed is. */ - ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1 -#ifdef BYTE_LOADS_SIGN_EXTEND - : 0 -#else - : 1 -#endif - ); +/* Generate constructor label */ +char * +bc_gen_constr_label () +{ + static int label_counter; + static char label[20]; - if (subtarget == 0 || GET_CODE (subtarget) != REG - || GET_MODE (subtarget) != operand_mode - || ! safe_from_p (subtarget, inner)) - subtarget = 0; + sprintf (label, "*LR%d", label_counter++); - op0 = expand_expr (inner, subtarget, VOIDmode, 0); + return (obstack_copy0 (&permanent_obstack, label, strlen (label))); +} - if (bitnum != 0) - op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0, - size_int (bitnum), target, ops_unsignedp); - if (GET_MODE (op0) != mode) - op0 = convert_to_mode (mode, op0, ops_unsignedp); +/* Evaluate constructor CONSTR and return pointer to it on level one. We + expand the constructor data as static data, and push a pointer to it. + The pointer is put in the pointer table and is retrieved by a constP + bytecode instruction. We then loop and store each constructor member in + the corresponding component. Finally, we return the original pointer on + the stack. */ - if ((code == EQ && ! invert) || (code == NE && invert)) - op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, - ops_unsignedp, OPTAB_LIB_WIDEN); +void +bc_expand_constructor (constr) + tree constr; +{ + char *l; + HOST_WIDE_INT ptroffs; + rtx constr_rtx; - /* Put the AND last so it can combine with more things. */ - if (bitnum != TYPE_PRECISION (type) - 1) - op0 = expand_and (op0, const1_rtx, target); + + /* Literal constructors are handled as constants, whereas + non-literals are evaluated and stored element by element + into the data segment. */ + + /* Allocate space in proper segment and push pointer to space on stack. + */ - return op0; - } + l = bc_gen_constr_label (); - /* Now see if we are likely to be able to do this. Return if not. */ - if (! can_compare_p (operand_mode)) - return 0; - icode = setcc_gen_code[(int) code]; - if (icode == CODE_FOR_nothing - || (only_cheap && insn_operand_mode[(int) icode][0] != mode)) + if (TREE_CONSTANT (constr)) { - /* We can only do this if it is one of the special cases that - can be handled without an scc insn. */ - if ((code == LT && integer_zerop (arg1)) - || (! only_cheap && code == GE && integer_zerop (arg1))) - ; - else if (BRANCH_COST >= 0 - && ! only_cheap && (code == NE || code == EQ) - && TREE_CODE (type) != REAL_TYPE - && ((abs_optab->handlers[(int) operand_mode].insn_code - != CODE_FOR_nothing) - || (ffs_optab->handlers[(int) operand_mode].insn_code - != CODE_FOR_nothing))) - ; - else - return 0; + text_section (); + + bc_emit_const_labeldef (l); + bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr))); } - - preexpand_calls (exp); - if (subtarget == 0 || GET_CODE (subtarget) != REG - || GET_MODE (subtarget) != operand_mode - || ! safe_from_p (subtarget, arg1)) - subtarget = 0; + else + { + data_section (); - op0 = expand_expr (arg0, subtarget, VOIDmode, 0); - op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); + bc_emit_data_labeldef (l); + bc_output_data_constructor (constr); + } - if (target == 0) - target = gen_reg_rtx (mode); + + /* Add reference to pointer table and recall pointer to stack; + this code is common for both types of constructors: literals + and non-literals. */ - /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe - because, if the emit_store_flag does anything it will succeed and - OP0 and OP1 will not be used subsequently. */ + bc_emit_instruction (constP, (HOST_WIDE_INT) ptroffs = bc_define_pointer (l)); - result = emit_store_flag (target, code, - queued_subexp_p (op0) ? copy_rtx (op0) : op0, - queued_subexp_p (op1) ? copy_rtx (op1) : op1, - operand_mode, unsignedp, 1); + /* This is all that has to be done if it's a literal. */ + if (TREE_CONSTANT (constr)) + return; - if (result) + + /* At this point, we have the pointer to the structure on top of the stack. + Generate sequences of store_memory calls for the constructor. */ + + /* constructor type is structure */ + if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE) { - if (invert) - result = expand_binop (mode, xor_optab, result, const1_rtx, - result, 0, OPTAB_LIB_WIDEN); - return result; + register tree elt; + + /* If the constructor has fewer fields than the structure, + clear the whole structure first. */ + + if (list_length (CONSTRUCTOR_ELTS (constr)) + != list_length (TYPE_FIELDS (TREE_TYPE (constr)))) + { + bc_emit_instruction (dup); + bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr))); + bc_emit_instruction (clearBLK); + } + + /* Store each element of the constructor into the corresponding + field of TARGET. */ + + for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt)) + { + register tree field = TREE_PURPOSE (elt); + register enum machine_mode mode; + int bitsize; + int bitpos; + int unsignedp; + + bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */; + mode = DECL_MODE (field); + unsignedp = TREE_UNSIGNED (field); + + bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field)); + + bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)), + /* The alignment of TARGET is + at least what its type requires. */ + VOIDmode, 0, + TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT, + int_size_in_bytes (TREE_TYPE (constr))); + } } + else + + /* Constructor type is array */ + if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE) + { + register tree elt; + register int i; + tree domain = TYPE_DOMAIN (TREE_TYPE (constr)); + int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain)); + int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain)); + tree elttype = TREE_TYPE (TREE_TYPE (constr)); + + /* If the constructor has fewer fields than the structure, + clear the whole structure first. */ + + if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1) + { + bc_emit_instruction (dup); + bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr))); + bc_emit_instruction (clearBLK); + } + + + /* Store each element of the constructor into the corresponding + element of TARGET, determined by counting the elements. */ + + for (elt = CONSTRUCTOR_ELTS (constr), i = 0; + elt; + elt = TREE_CHAIN (elt), i++) + { + register enum machine_mode mode; + int bitsize; + int bitpos; + int unsignedp; + + mode = TYPE_MODE (elttype); + bitsize = GET_MODE_BITSIZE (mode); + unsignedp = TREE_UNSIGNED (elttype); + + bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)) + /* * TYPE_SIZE_UNIT (elttype) */ ); + + bc_store_field (elt, bitsize, bitpos, mode, + TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)), + /* The alignment of TARGET is + at least what its type requires. */ + VOIDmode, 0, + TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT, + int_size_in_bytes (TREE_TYPE (constr))); + } + + } +} - /* If this failed, we have to do this with set/compare/jump/set code. */ - if (target == 0 || GET_CODE (target) != REG - || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) - target = gen_reg_rtx (GET_MODE (target)); - emit_move_insn (target, invert ? const0_rtx : const1_rtx); - result = compare_from_rtx (op0, op1, code, unsignedp, - operand_mode, NULL_RTX, 0); - if (GET_CODE (result) == CONST_INT) - return (((result == const0_rtx && ! invert) - || (result != const0_rtx && invert)) - ? const0_rtx : const1_rtx); +/* Store the value of EXP (an expression tree) into member FIELD of + structure at address on stack, which has type TYPE, mode MODE and + occupies BITSIZE bits, starting BITPOS bits from the beginning of the + structure. - label = gen_label_rtx (); - if (bcc_gen_fctn[(int) code] == 0) - abort (); + ALIGN is the alignment that TARGET is known to have, measured in bytes. + TOTAL_SIZE is its size in bytes, or -1 if variable. */ - emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); - emit_move_insn (target, invert ? const1_rtx : const0_rtx); - emit_label (label); +void +bc_store_field (field, bitsize, bitpos, mode, exp, type, + value_mode, unsignedp, align, total_size) + int bitsize, bitpos; + enum machine_mode mode; + tree field, exp, type; + enum machine_mode value_mode; + int unsignedp; + int align; + int total_size; +{ - return target; -} - -/* Generate a tablejump instruction (used for switch statements). */ + /* Expand expression and copy pointer */ + bc_expand_expr (exp); + bc_emit_instruction (over); -#ifdef HAVE_tablejump -/* INDEX is the value being switched on, with the lowest value - in the table already subtracted. - MODE is its expected mode (needed if INDEX is constant). - RANGE is the length of the jump table. - TABLE_LABEL is a CODE_LABEL rtx for the table itself. + /* If the component is a bit field, we cannot use addressing to access + it. Use bit-field techniques to store in it. */ - DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the - index value is out of range. */ + if (DECL_BIT_FIELD (field)) + { + bc_store_bit_field (bitpos, bitsize, unsignedp); + return; + } + else + /* Not bit field */ + { + HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT; + + /* Advance pointer to the desired member */ + if (offset) + bc_emit_instruction (addconstPSI, offset); + + /* Store */ + bc_store_memory (type, field); + } +} + +/* Store SI/SU in bitfield */ void -do_tablejump (index, mode, range, table_label, default_label) - rtx index, range, table_label, default_label; - enum machine_mode mode; +bc_store_bit_field (offset, size, unsignedp) + int offset, size, unsignedp; { - register rtx temp, vector; + /* Push bitfield offset and size */ + bc_push_offset_and_size (offset, size); - /* Do an unsigned comparison (in the proper mode) between the index - expression and the value which represents the length of the range. - Since we just finished subtracting the lower bound of the range - from the index expression, this comparison allows us to simultaneously - check that the original index expression value is both greater than - or equal to the minimum value of the range and less than or equal to - the maximum value of the range. */ + /* Store */ + bc_emit_instruction (sstoreBI); +} - emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0); - emit_jump_insn (gen_bltu (default_label)); - /* If index is in range, it must fit in Pmode. - Convert to Pmode so we can index with it. */ - if (mode != Pmode) - index = convert_to_mode (Pmode, index, 1); +/* Load SI/SU from bitfield */ +void +bc_load_bit_field (offset, size, unsignedp) + int offset, size, unsignedp; +{ + /* Push bitfield offset and size */ + bc_push_offset_and_size (offset, size); - /* Don't let a MEM slip thru, because then INDEX that comes - out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, - and break_out_memory_refs will go to work on it and mess it up. */ -#ifdef PIC_CASE_VECTOR_ADDRESS - if (flag_pic && GET_CODE (index) != REG) - index = copy_to_mode_reg (Pmode, index); -#endif + /* Load: sign-extend if signed, else zero-extend */ + bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI); +} - /* If flag_force_addr were to affect this address - it could interfere with the tricky assumptions made - about addresses that contain label-refs, - which may be valid only very near the tablejump itself. */ - /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the - GET_MODE_SIZE, because this indicates how large insns are. The other - uses should all be Pmode, because they are addresses. This code - could fail if addresses and insns are not the same size. */ - index = gen_rtx (PLUS, Pmode, - gen_rtx (MULT, Pmode, index, - GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), - gen_rtx (LABEL_REF, Pmode, table_label)); -#ifdef PIC_CASE_VECTOR_ADDRESS - if (flag_pic) - index = PIC_CASE_VECTOR_ADDRESS (index); - else -#endif - index = memory_address_noforce (CASE_VECTOR_MODE, index); - temp = gen_reg_rtx (CASE_VECTOR_MODE); - vector = gen_rtx (MEM, CASE_VECTOR_MODE, index); - RTX_UNCHANGING_P (vector) = 1; - convert_move (temp, vector, 0); - emit_jump_insn (gen_tablejump (temp, table_label)); +/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS + (adjust stack pointer upwards), negative means add that number of + levels (adjust the stack pointer downwards). Only positive values + normally make sense. */ -#ifndef CASE_VECTOR_PC_RELATIVE - /* If we are generating PIC code or if the table is PC-relative, the - table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ - if (! flag_pic) - emit_barrier (); +void +bc_adjust_stack (nlevels) + int nlevels; +{ + switch (nlevels) + { + case 0: + break; + + case 2: + bc_emit_instruction (drop); + + case 1: + bc_emit_instruction (drop); + break; + + default: + + bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels); + stack_depth -= nlevels; + } + +#if defined (VALIDATE_STACK) + VALIDATE_STACK (); #endif } - -#endif /* HAVE_tablejump */ diff --git a/gcc/integrate.c b/gcc/integrate.c index a41e9b6df15..c5e04948476 100644 --- a/gcc/integrate.c +++ b/gcc/integrate.c @@ -32,6 +32,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "integrate.h" #include "real.h" #include "function.h" +#include "bytecode.h" #include "obstack.h" #define obstack_chunk_alloc xmalloc @@ -2850,9 +2851,16 @@ void output_inline_function (fndecl) tree fndecl; { - rtx head = DECL_SAVED_INSNS (fndecl); + rtx head; rtx last; + if (output_bytecode) + { + warning ("`inline' ignored for bytecode output"); + return; + } + + head = DECL_SAVED_INSNS (fndecl); current_function_decl = fndecl; /* This call is only used to initialize global variables. */ diff --git a/gcc/regclass.c b/gcc/regclass.c index 19603559724..47da72fc69c 100644 --- a/gcc/regclass.c +++ b/gcc/regclass.c @@ -32,6 +32,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "recog.h" #include "reload.h" #include "real.h" +#include "bytecode.h" #ifndef REGISTER_MOVE_COST #define REGISTER_MOVE_COST(x, y) 2 @@ -413,6 +414,13 @@ fix_register (name, fixed, call_used) { int i; + if (output_bytecode) + { + warning ("request to mark `%s' as %s ignored by bytecode compiler", + name, call_used ? "call-used" : "fixed"); + return; + } + /* Decode the name and update the primary form of the register info. */ diff --git a/gcc/rtl.h b/gcc/rtl.h index d6bfac837e5..945fbc8d22c 100644 --- a/gcc/rtl.h +++ b/gcc/rtl.h @@ -139,6 +139,22 @@ typedef struct rtx_def The number of operands and their types are controlled by the `code' field, according to rtl.def. */ rtunion fld[1]; + + /* The rest is used instead of the above if bytecode is being output */ + + /* For static or external objects. */ + char *label; + + /* From the named label, or the local variable pointer or the + argument pointer, depending on context. */ + + int offset; + + /* For goto labels inside bytecode functions. */ + struct bc_label *bc_label; + + /* A unique identifier */ + int uid; } *rtx; /* Add prototype support. */ @@ -640,6 +656,7 @@ extern rtx gen_rtx PROTO((enum rtx_code, enum machine_mode, ...)); extern rtvec gen_rtvec PROTO((int, ...)); #else +extern rtx bc_gen_rtx (); extern rtx gen_rtx (); extern rtvec gen_rtvec (); #endif diff --git a/gcc/stmt.c b/gcc/stmt.c index c4e34c9b9c6..2b9ff9145dd 100644 --- a/gcc/stmt.c +++ b/gcc/stmt.c @@ -49,6 +49,13 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "obstack.h" #include "loop.h" #include "recog.h" +#include "machmode.h" + +#include "bytecode.h" +#include "bc-typecd.h" +#include "bc-opcode.h" +#include "bc-optab.h" +#include "bc-emit.h" #define obstack_chunk_alloc xmalloc #define obstack_chunk_free free @@ -181,13 +188,28 @@ static void emit_jump_if_reachable (); static int warn_if_unused_value (); static void expand_goto_internal (); +static void bc_expand_goto_internal (); static int expand_fixup (); +static void bc_expand_fixup (); void fixup_gotos (); +static void bc_fixup_gotos (); void free_temp_slots (); static void expand_cleanups (); static void expand_null_return_1 (); static int tail_recursion_args (); static void do_jump_if_equal (); +int bc_expand_exit_loop_if_false (); +void bc_expand_start_cond (); +void bc_expand_end_cond (); +void bc_expand_start_else (); +void bc_expand_end_bindings (); +void bc_expand_start_case (); +void bc_check_for_full_enumeration_handling (); +void bc_expand_end_case (); +void bc_expand_decl (); + +extern rtx bc_allocate_local (); +extern rtx bc_allocate_variable_array (); /* Stack of control and binding constructs we are currently inside. @@ -250,7 +272,8 @@ struct nesting /* Sequence number of this binding contour within the function, in order of entry. */ int block_start_count; - /* Nonzero => value to restore stack to on exit. */ + /* Nonzero => value to restore stack to on exit. Complemented by + bc_stack_level (see below) when generating bytecodes. */ rtx stack_level; /* The NOTE that starts this contour. Used by expand_goto to check whether the destination @@ -277,6 +300,8 @@ struct nesting struct label_chain *label_chain; /* Number of function calls seen, as of start of this block. */ int function_call_count; + /* Bytecode specific: stack level to restore stack to on exit. */ + int bc_stack_level; } block; /* For switch (C) or case (Pascal) statements, and also for dummies (see `expand_start_case_dummy'). */ @@ -285,6 +310,10 @@ struct nesting /* The insn after which the case dispatch should finally be emitted. Zero for a dummy. */ rtx start; + /* For bytecodes, the case table is in-lined right in the code. + A label is needed for skipping over this block. It is only + used when generating bytecodes. */ + rtx skip_label; /* A list of case labels, kept in ascending order by value as the list is built. During expand_end_case, this list may be rearranged into a @@ -425,6 +454,21 @@ struct goto_fixup time this goto was seen. The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */ tree cleanup_list_list; + + /* Bytecode specific members follow */ + + /* The label that this jump is jumping to, or 0 for break, continue + or return. */ + struct bc_label *bc_target; + + /* The label we use for the fixup patch */ + struct bc_label *label; + + /* True (non-0) if fixup has been handled */ + int bc_handled:1; + + /* Like stack_level above, except refers to the interpreter stack */ + int bc_stack_level; }; static struct goto_fixup *goto_fixup_chain; @@ -514,11 +558,16 @@ restore_stmt_status (p) void emit_nop () { - rtx last_insn = get_last_insn (); - if (!optimize - && (GET_CODE (last_insn) == CODE_LABEL - || prev_real_insn (last_insn) == 0)) - emit_insn (gen_nop ()); + rtx last_insn; + + if (!output_bytecode) + { + last_insn = get_last_insn (); + if (!optimize + && (GET_CODE (last_insn) == CODE_LABEL + || prev_real_insn (last_insn) == 0)) + emit_insn (gen_nop ()); + } } /* Return the rtx-label that corresponds to a LABEL_DECL, @@ -555,9 +604,17 @@ void expand_computed_goto (exp) tree exp; { - rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0); - emit_queue (); - emit_indirect_jump (x); + if (output_bytecode) + { + bc_expand_expr (exp); + bc_emit_instruction (jumpP); + } + else + { + rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0); + emit_queue (); + emit_indirect_jump (x); + } } /* Handle goto statements and the labels that they can go to. */ @@ -579,6 +636,15 @@ expand_label (label) { struct label_chain *p; + if (output_bytecode) + { + if (! DECL_RTL (label)) + DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ()); + if (! bc_emit_bytecode_labeldef (DECL_RTL (label)->bc_label)) + error ("multiply defined label"); + return; + } + do_pending_stack_adjust (); emit_label (label_rtx (label)); if (DECL_NAME (label)) @@ -620,8 +686,16 @@ void expand_goto (label) tree label; { + tree context; + + if (output_bytecode) + { + expand_goto_internal (label, label_rtx (label), NULL_RTX); + return; + } + /* Check for a nonlocal goto to a containing function. */ - tree context = decl_function_context (label); + context = decl_function_context (label); if (context != 0 && context != current_function_decl) { struct function *p = find_function_data (context); @@ -701,6 +775,16 @@ expand_goto_internal (body, label, last_insn) struct nesting *block; rtx stack_level = 0; + /* NOTICE! If a bytecode instruction other than `jump' is needed, + then the caller has to call bc_expand_goto_internal() + directly. This is rather an exceptional case, and there aren't + that many places where this is necessary. */ + if (output_bytecode) + { + expand_goto_internal (body, label, last_insn); + return; + } + if (GET_CODE (label) != CODE_LABEL) abort (); @@ -753,6 +837,77 @@ expand_goto_internal (body, label, last_insn) emit_jump (label); } +/* Generate a jump with OPCODE to the given bytecode LABEL which is + found within BODY. */ +static void +bc_expand_goto_internal (opcode, label, body) + enum bytecode_opcode opcode; + struct bc_label *label; + tree body; +{ + struct nesting *block; + int stack_level = -1; + + /* If the label is defined, adjust the stack as necessary. + If it's not defined, we have to push the reference on the + fixup list. */ + + if (label->defined) + { + + /* Find the innermost pending block that contains the label. + (Check containment by comparing bytecode uids.) Then restore the + outermost stack level within that block. */ + + for (block = block_stack; block; block = block->next) + { + if (block->data.block.first_insn->uid < label->uid) + break; + if (block->data.block.bc_stack_level) + stack_level = block->data.block.bc_stack_level; + + /* Execute the cleanups for blocks we are exiting. */ + if (block->data.block.cleanups != 0) + { + expand_cleanups (block->data.block.cleanups, NULL_TREE); + do_pending_stack_adjust (); + } + } + + /* Restore the stack level. If we need to adjust the stack, we + must do so after the jump, since the jump may depend on + what's on the stack. Thus, any stack-modifying conditional + jumps (these are the only ones that rely on what's on the + stack) go into the fixup list. */ + + if (stack_level >= 0 + && stack_depth != stack_level + && opcode != jump) + + bc_expand_fixup (opcode, label, stack_level); + else + { + if (stack_level >= 0) + bc_adjust_stack (stack_depth - stack_level); + + if (body && DECL_BIT_FIELD (body)) + error ("jump to `%s' invalidly jumps into binding contour", + IDENTIFIER_POINTER (DECL_NAME (body))); + + /* Emit immediate jump */ + bc_emit_bytecode (opcode); + bc_emit_bytecode_labelref (label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + } + } + else + /* Put goto in the fixup list */ + bc_expand_fixup (opcode, label, stack_level); +} + /* Generate if necessary a fixup for a goto whose target label in tree structure (if any) is TREE_LABEL and whose target in rtl is RTL_LABEL. @@ -884,6 +1039,37 @@ expand_fixup (tree_label, rtl_label, last_insn) return block != 0; } + +/* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL. + Make the fixup restore the stack level to STACK_LEVEL. */ + +static void +bc_expand_fixup (opcode, label, stack_level) + enum bytecode_opcode opcode; + struct bc_label *label; + int stack_level; +{ + struct goto_fixup *fixup + = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup)); + + fixup->label = bc_get_bytecode_label (); + fixup->bc_target = label; + fixup->bc_stack_level = stack_level; + fixup->bc_handled = FALSE; + + fixup->next = goto_fixup_chain; + goto_fixup_chain = fixup; + + /* Insert a jump to the fixup code */ + bc_emit_bytecode (opcode); + bc_emit_bytecode_labelref (fixup->label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif +} + + /* When exiting a binding contour, process all pending gotos requiring fixups. THISBLOCK is the structure that describes the block being exited. STACK_LEVEL is the rtx for the stack level to restore exiting this contour. @@ -907,6 +1093,12 @@ fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in) { register struct goto_fixup *f, *prev; + if (output_bytecode) + { + bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in); + return; + } + /* F is the fixup we are considering; PREV is the previous one. */ /* We run this loop in two passes so that cleanups of exited blocks are run first, and blocks that are exited are marked so @@ -1039,6 +1231,72 @@ fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in) f->stack_level = stack_level; } } + + +/* When exiting a binding contour, process all pending gotos requiring fixups. + Note: STACK_DEPTH is not altered. + + The arguments are currently not used in the bytecode compiler, but we may need + them one day for languages other than C. + + THISBLOCK is the structure that describes the block being exited. + STACK_LEVEL is the rtx for the stack level to restore exiting this contour. + CLEANUP_LIST is a list of expressions to evaluate on exiting this contour. + FIRST_INSN is the insn that began this contour. + + Gotos that jump out of this contour must restore the + stack level and do the cleanups before actually jumping. + + DONT_JUMP_IN nonzero means report error there is a jump into this + contour from before the beginning of the contour. + This is also done if STACK_LEVEL is nonzero. */ + +static void +bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in) + struct nesting *thisblock; + int stack_level; + tree cleanup_list; + rtx first_insn; + int dont_jump_in; +{ + register struct goto_fixup *f, *prev; + int saved_stack_depth; + + /* F is the fixup we are considering; PREV is the previous one. */ + + for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next) + { + /* Test for a fixup that is inactive because it is already handled. */ + if (f->before_jump == 0) + { + /* Delete inactive fixup from the chain, if that is easy to do. */ + if (prev) + prev->next = f->next; + } + + /* Emit code to restore the stack and continue */ + bc_emit_bytecode_labeldef (f->label); + + /* Save stack_depth across call, since bc_adjust_stack () will alter + the perceived stack depth via the instructions generated. */ + + if (f->bc_stack_level >= 0) + { + saved_stack_depth = stack_depth; + bc_adjust_stack (stack_depth - f->bc_stack_level); + stack_depth = saved_stack_depth; + } + + bc_emit_bytecode (jump); + bc_emit_bytecode_labelref (f->bc_target); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + } + + goto_fixup_chain = NULL; +} /* Generate RTL for an asm statement (explicit assembler code). BODY is a STRING_CST node containing the assembler code text, @@ -1048,6 +1306,12 @@ void expand_asm (body) tree body; { + if (output_bytecode) + { + error ("`asm' is illegal when generating bytecode"); + return; + } + if (TREE_CODE (body) == ADDR_EXPR) body = TREE_OPERAND (body, 0); @@ -1090,6 +1354,12 @@ expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line) /* The insn we have emitted. */ rtx insn; + if (output_bytecode) + { + error ("`asm' is illegal when generating bytecode"); + return; + } + /* Count the number of meaningful clobbered registers, ignoring what we would ignore later. */ nclobbers = 0; @@ -1310,6 +1580,22 @@ void expand_expr_stmt (exp) tree exp; { + if (output_bytecode) + { + int org_stack_depth = stack_depth; + + bc_expand_expr (exp); + + /* Restore stack depth */ + if (stack_depth < org_stack_depth) + abort (); + + bc_emit_instruction (drop); + + last_expr_type = TREE_TYPE (exp); + return; + } + /* If -W, warn about statements with no side effects, except for an explicit cast to void (e.g. for assert()), and except inside a ({...}) where they may be useful. */ @@ -1459,10 +1745,17 @@ clear_last_expr () tree expand_start_stmt_expr () { + int momentary; + tree t; + + /* When generating bytecode just note down the stack depth */ + if (output_bytecode) + return (build_int_2 (stack_depth, 0)); + /* Make the RTL_EXPR node temporary, not momentary, so that rtl_expr_chain doesn't become garbage. */ - int momentary = suspend_momentary (); - tree t = make_node (RTL_EXPR); + momentary = suspend_momentary (); + t = make_node (RTL_EXPR); resume_momentary (momentary); start_sequence (); NO_DEFER_POP; @@ -1486,6 +1779,38 @@ tree expand_end_stmt_expr (t) tree t; { + if (output_bytecode) + { + int i; + tree t; + + + /* At this point, all expressions have been evaluated in order. + However, all expression values have been popped when evaluated, + which means we have to recover the last expression value. This is + the last value removed by means of a `drop' instruction. Instead + of adding code to inhibit dropping the last expression value, it + is here recovered by undoing the `drop'. Since `drop' is + equivalent to `adjustackSI [1]', it can be undone with `adjstackSI + [-1]'. */ + + bc_adjust_stack (-1); + + if (!last_expr_type) + last_expr_type = void_type_node; + + t = make_node (RTL_EXPR); + TREE_TYPE (t) = last_expr_type; + RTL_EXPR_RTL (t) = NULL; + RTL_EXPR_SEQUENCE (t) = NULL; + + /* Don't consider deleting this expr or containing exprs at tree level. */ + TREE_THIS_VOLATILE (t) = 1; + + last_expr_type = 0; + return t; + } + OK_DEFER_POP; if (last_expr_type == 0) @@ -1849,7 +2174,10 @@ expand_start_cond (cond, exitflag) cond_stack = thiscond; nesting_stack = thiscond; - do_jump (cond, thiscond->data.cond.next_label, NULL_RTX); + if (output_bytecode) + bc_expand_start_cond (cond, exitflag); + else + do_jump (cond, thiscond->data.cond.next_label, NULL_RTX); } /* Generate RTL between then-clause and the elseif-clause @@ -1875,6 +2203,13 @@ expand_start_else () { if (cond_stack->data.cond.endif_label == 0) cond_stack->data.cond.endif_label = gen_label_rtx (); + + if (output_bytecode) + { + bc_expand_start_else (); + return; + } + emit_jump (cond_stack->data.cond.endif_label); emit_label (cond_stack->data.cond.next_label); cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */ @@ -1888,15 +2223,71 @@ expand_end_cond () { struct nesting *thiscond = cond_stack; - do_pending_stack_adjust (); - if (thiscond->data.cond.next_label) - emit_label (thiscond->data.cond.next_label); - if (thiscond->data.cond.endif_label) - emit_label (thiscond->data.cond.endif_label); + if (output_bytecode) + bc_expand_end_cond (); + else + { + do_pending_stack_adjust (); + if (thiscond->data.cond.next_label) + emit_label (thiscond->data.cond.next_label); + if (thiscond->data.cond.endif_label) + emit_label (thiscond->data.cond.endif_label); + } POPSTACK (cond_stack); last_expr_type = 0; } + + +/* Generate code for the start of an if-then. COND is the expression + whose truth is to be tested; if EXITFLAG is nonzero this conditional + is to be visible to exit_something. It is assumed that the caller + has pushed the previous context on the cond stack. */ +void +bc_expand_start_cond (cond, exitflag) + tree cond; + int exitflag; +{ + struct nesting *thiscond = cond_stack; + + thiscond->data.case_stmt.nominal_type = cond; + bc_expand_expr (cond); + bc_emit_bytecode (jumpifnot); + bc_emit_bytecode_labelref (thiscond->exit_label->bc_label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif +} + +/* Generate the label for the end of an if with + no else- clause. */ +void +bc_expand_end_cond () +{ + struct nesting *thiscond = cond_stack; + + bc_emit_bytecode_labeldef (thiscond->exit_label->bc_label); +} + +/* Generate code for the start of the else- clause of + an if-then-else. */ +void +bc_expand_start_else () +{ + struct nesting *thiscond = cond_stack; + + thiscond->data.cond.endif_label = thiscond->exit_label; + thiscond->exit_label = gen_label_rtx (); + bc_emit_bytecode (jump); + bc_emit_bytecode_labelref (thiscond->exit_label->bc_label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + + bc_emit_bytecode_labeldef (thiscond->data.cond.endif_label->bc_label); +} /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this loop should be exited by `exit_something'. This is a loop for which @@ -1923,6 +2314,12 @@ expand_start_loop (exit_flag) loop_stack = thisloop; nesting_stack = thisloop; + if (output_bytecode) + { + bc_emit_bytecode_labeldef (thisloop->data.loop.start_label->bc_label); + return thisloop; + } + do_pending_stack_adjust (); emit_queue (); emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG); @@ -1951,21 +2348,54 @@ expand_start_loop_continue_elsewhere (exit_flag) void expand_loop_continue_here () { + if (output_bytecode) + { + bc_emit_bytecode_labeldef (loop_stack->data.loop.continue_label->bc_label); + return; + } do_pending_stack_adjust (); emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT); emit_label (loop_stack->data.loop.continue_label); } +/* End a loop. */ +static void +bc_expand_end_loop () +{ + struct nesting *thisloop = loop_stack; + + bc_emit_bytecode (jump); + bc_emit_bytecode_labelref (thisloop->data.loop.start_label->bc_label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + + bc_emit_bytecode_labeldef (thisloop->exit_label->bc_label); + POPSTACK (loop_stack); + last_expr_type = 0; +} + + /* Finish a loop. Generate a jump back to the top and the loop-exit label. Pop the block off of loop_stack. */ void expand_end_loop () { - register rtx insn = get_last_insn (); - register rtx start_label = loop_stack->data.loop.start_label; + register rtx insn; + register rtx start_label; rtx last_test_insn = 0; int num_insns = 0; + + if (output_bytecode) + { + bc_expand_end_loop (); + return; + } + + insn = get_last_insn (); + start_label = loop_stack->data.loop.start_label; /* Mark the continue-point at the top of the loop if none elsewhere. */ if (start_label == loop_stack->data.loop.continue_label) @@ -2113,7 +2543,15 @@ expand_exit_loop_if_false (whichloop, cond) whichloop = loop_stack; if (whichloop == 0) return 0; - do_jump (cond, whichloop->data.loop.end_label, NULL_RTX); + if (output_bytecode) + { + bc_expand_expr (cond); + bc_expand_goto_internal (jumpifnot, + whichloop->exit_label->bc_label, NULL_RTX); + } + else + do_jump (cond, whichloop->data.loop.end_label, NULL_RTX); + return 1; } @@ -2176,6 +2614,12 @@ expand_null_return () struct nesting *block = block_stack; rtx last_insn = 0; + if (output_bytecode) + { + bc_emit_instruction (ret); + return; + } + /* Does any pending block have cleanups? */ while (block && block->data.block.cleanups == 0) @@ -2298,6 +2742,15 @@ expand_return (retval) int cleanups; struct nesting *block; + /* Bytecode returns are quite simple, just leave the result on the + arithmetic stack. */ + if (output_bytecode) + { + bc_expand_expr (retval); + bc_emit_instruction (ret); + return; + } + /* If function wants no value, give it none. */ if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) { @@ -2536,8 +2989,10 @@ expand_start_bindings (exit_flag) int exit_flag; { struct nesting *thisblock = ALLOC_NESTING (); + rtx note; - rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG); + if (!output_bytecode) + note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG); /* Make an entry on block_stack for the block we are entering. */ @@ -2580,8 +3035,11 @@ expand_start_bindings (exit_flag) block_stack = thisblock; nesting_stack = thisblock; - /* Make a new level for allocating stack slots. */ - push_temp_slots (); + if (!output_bytecode) + { + /* Make a new level for allocating stack slots. */ + push_temp_slots (); + } } /* Given a pointer to a BLOCK node, save a pointer to the most recently @@ -2614,6 +3072,12 @@ expand_end_bindings (vars, mark_ends, dont_jump_in) register struct nesting *thisblock = block_stack; register tree decl; + if (output_bytecode) + { + bc_expand_end_bindings (vars, mark_ends, dont_jump_in); + return; + } + if (warn_unused) for (decl = vars; decl; decl = TREE_CHAIN (decl)) if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL @@ -2830,6 +3294,35 @@ expand_end_bindings (vars, mark_ends, dont_jump_in) /* Pop the stack slot nesting and free any slots at this level. */ pop_temp_slots (); } + + +/* End a binding contour. + VARS is the chain of VAR_DECL nodes for the variables bound + in this contour. MARK_ENDS is nonzer if we should put a note + at the beginning and end of this binding contour. + DONT_JUMP_IN is nonzero if it is not valid to jump into this + contour. */ + +void +bc_expand_end_bindings (vars, mark_ends, dont_jump_in) + tree vars; + int mark_ends; + int dont_jump_in; +{ + struct nesting *thisbind = nesting_stack; + tree decl; + + if (warn_unused) + for (decl = vars; decl; decl = TREE_CHAIN (decl)) + if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL) + warning_with_decl (decl, "unused variable `%s'"); + + bc_emit_bytecode_labeldef (thisbind->exit_label->bc_label); + + /* Pop block/bindings off stack */ + POPSTACK (nesting_stack); + POPSTACK (block_stack); +} /* Generate RTL for the automatic variable declaration DECL. (Other kinds of declarations are simply ignored if seen here.) @@ -2854,7 +3347,15 @@ expand_decl (decl) register tree decl; { struct nesting *thisblock = block_stack; - tree type = TREE_TYPE (decl); + tree type; + + if (output_bytecode) + { + bc_expand_decl (decl, 0); + return; + } + + type = TREE_TYPE (decl); /* Only automatic variables need any expansion done. Static and external variables, and external functions, @@ -3046,6 +3547,52 @@ expand_decl (decl) if (obey_regdecls) use_variable (DECL_RTL (decl)); } + + +/* Generate code for the automatic variable declaration DECL. For + most variables this just means we give it a stack offset. The + compiler sometimes emits cleanups without variables and we will + have to deal with those too. */ + +void +bc_expand_decl (decl, cleanup) + tree decl; + tree cleanup; +{ + tree type; + + if (!decl) + { + /* A cleanup with no variable. */ + if (!cleanup) + abort (); + + return; + } + + /* Only auto variables need any work. */ + if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl)) + return; + + type = TREE_TYPE (decl); + + if (type == error_mark_node) + DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0); + + else if (DECL_SIZE (decl) == 0) + + /* Variable with incomplete type. The stack offset herein will be + fixed later in expand_decl_init (). */ + DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0); + + else if (TREE_CONSTANT (DECL_SIZE (decl))) + { + DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT, + DECL_ALIGN (decl)); + } + else + DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl)); +} /* Emit code to perform the initialization of a declaration DECL. */ @@ -3083,6 +3630,82 @@ expand_decl_init (decl) free_temp_slots (); } +/* Expand initialization for variable-sized types. Allocate array + using newlocalSI and set local variable, which is a pointer to the + storage. */ + +bc_expand_variable_local_init (decl) + tree decl; +{ + /* Evaluate size expression and coerce to SI */ + bc_expand_expr (DECL_SIZE (decl)); + + /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so + no coercion is necessary (?) */ + +/* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)), + TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */ + + /* Emit code to allocate array */ + bc_emit_instruction (newlocalSI); + + /* Store array pointer in local variable. This is the only instance + where we actually want the address of the pointer to the + variable-size block, rather than the pointer itself. We avoid + using expand_address() since that would cause the pointer to be + pushed rather than its address. Hence the hard-coded reference; + notice also that the variable is always local (no global + variable-size type variables). */ + + bc_load_localaddr (DECL_RTL (decl)); + bc_emit_instruction (storeP); +} + + +/* Emit code to initialize a declaration. */ +void +bc_expand_decl_init (decl) + tree decl; +{ + int org_stack_depth; + + /* Statical initializers are handled elsewhere */ + + if (TREE_STATIC (decl)) + return; + + /* Memory original stack depth */ + org_stack_depth = stack_depth; + + /* If the type is variable-size, we first create its space (we ASSUME + it CAN'T be static). We do this regardless of whether there's an + initializer assignment or not. */ + + if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) + bc_expand_variable_local_init (decl); + + /* Expand initializer assignment */ + if (DECL_INITIAL (decl) == error_mark_node) + { + enum tree_code code = TREE_CODE (TREE_TYPE (decl)); + + if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE + || code == POINTER_TYPE) + + expand_assignment (TREE_TYPE (decl), decl, + convert (TREE_TYPE (decl), integer_zero_node)); + } + else if (DECL_INITIAL (decl)) + expand_assignment (TREE_TYPE (decl), decl, DECL_INITIAL (decl)); + + /* Restore stack depth */ + if (org_stack_depth > stack_depth) + abort (); + + bc_adjust_stack (stack_depth - org_stack_depth); +} + + /* CLEANUP is an expression to be executed at exit from this binding contour; for example, in C++, it might call the destructor for this variable. @@ -3301,6 +3924,12 @@ expand_start_case (exit_flag, expr, type, printname) case_stack = thiscase; nesting_stack = thiscase; + if (output_bytecode) + { + bc_expand_start_case (thiscase, expr, type, printname); + return; + } + do_pending_stack_adjust (); /* Make sure case_stmt.start points to something that won't @@ -3311,6 +3940,32 @@ expand_start_case (exit_flag, expr, type, printname) thiscase->data.case_stmt.start = get_last_insn (); } + +/* Enter a case statement. It is assumed that the caller has pushed + the current context onto the case stack. */ +void +bc_expand_start_case (thiscase, expr, type, printname) + struct nesting *thiscase; + tree expr; + tree type; + char *printname; +{ + bc_expand_expr (expr); + bc_expand_conversion (TREE_TYPE (expr), type); + + /* For cases, the skip is a place we jump to that's emitted after + the size of the jump table is known. */ + + thiscase->data.case_stmt.skip_label = gen_label_rtx (); + bc_emit_bytecode (jump); + bc_emit_bytecode_labelref (thiscase->data.case_stmt.skip_label->bc_label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif +} + + /* Start a "dummy case statement" within which case labels are invalid and are not connected to any larger real case statement. This can be used if you don't want to let a case statement jump @@ -3382,6 +4037,9 @@ pushcase (value, converter, label, duplicate) tree index_type; tree nominal_type; + if (output_bytecode) + return bc_pushcase (value, label); + /* Fail if not inside a real case statement. */ if (! (case_stack && case_stack->data.case_stmt.start)) return 1; @@ -3588,6 +4246,62 @@ pushcase_range (value1, value2, converter, label, duplicate) return 0; } + + +/* Accumulate one case or default label; VALUE is the value of the + case, or nil for a default label. If not currently inside a case, + return 1 and do nothing. If VALUE is a duplicate or overlaps, return + 2 and do nothing. If VALUE is out of range, return 3 and do nothing. + Return 0 on success. This function is a leftover from the earlier + bytecode compiler, which was based on gcc 1.37. It should be + merged into pushcase. */ + +int +bc_pushcase (value, label) + tree value; + tree label; +{ + struct nesting *thiscase = case_stack; + struct case_node *case_label, *new_label; + + if (! thiscase) + return 1; + + /* Fail if duplicate, overlap, or out of type range. */ + if (value) + { + value = convert (thiscase->data.case_stmt.nominal_type, value); + if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type)) + return 3; + + for (case_label = thiscase->data.case_stmt.case_list; + case_label->left; case_label = case_label->left) + if (! tree_int_cst_lt (case_label->left->high, value)) + break; + + if (case_label != thiscase->data.case_stmt.case_list + && ! tree_int_cst_lt (case_label->high, value) + || case_label->left && ! tree_int_cst_lt (value, case_label->left->low)) + return 2; + + new_label = (struct case_node *) oballoc (sizeof (struct case_node)); + new_label->low = new_label->high = copy_node (value); + new_label->code_label = label; + new_label->left = case_label->left; + + case_label->left = new_label; + thiscase->data.case_stmt.num_ranges++; + } + else + { + if (thiscase->data.case_stmt.default_label) + return 2; + thiscase->data.case_stmt.default_label = label; + } + + expand_label (label); + return 0; +} /* Called when the index of a switch statement is an enumerated type and there is no default label. @@ -3609,6 +4323,12 @@ check_for_full_enumeration_handling (type) register tree chain; int all_values = 1; + if (output_bytecode) + { + bc_check_for_full_enumeration_handling (type); + return; + } + /* The time complexity of this loop is currently O(N * M), with N being the number of members in the enumerated type, and M being the number of case expressions in the switch. */ @@ -3707,6 +4427,46 @@ check_for_full_enumeration_handling (type) } #endif /* 0 */ } + + +/* Check that all enumeration literals are covered by the case + expressions of a switch. Also warn if there are any cases + that are not elements of the enumerated type. */ +void +bc_check_for_full_enumeration_handling (type) + tree type; +{ + struct nesting *thiscase = case_stack; + struct case_node *c; + tree e; + + /* Check for enums not handled. */ + for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e)) + { + for (c = thiscase->data.case_stmt.case_list->left; + c && tree_int_cst_lt (c->high, TREE_VALUE (e)); + c = c->left) + ; + if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e)))) + warning ("enumerated value `%s' not handled in switch", + IDENTIFIER_POINTER (TREE_PURPOSE (e))); + } + + /* Check for cases not in the enumeration. */ + for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left) + { + for (e = TYPE_VALUES (type); + e && !tree_int_cst_equal (c->low, TREE_VALUE (e)); + e = TREE_CHAIN (e)) + ; + if (! e) + warning ("case value `%d' not in enumerated type `%s'", + TREE_INT_CST_LOW (c->low), + IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE + ? TYPE_NAME (type) + : DECL_NAME (TYPE_NAME (type)))); + } +} /* Terminate a case (Pascal) or switch (C) statement in which ORIG_INDEX is the expression to be tested. @@ -3721,14 +4481,24 @@ expand_end_case (orig_index) register struct case_node *n; int count; rtx index; - rtx table_label = gen_label_rtx (); + rtx table_label; int ncases; rtx *labelvec; register int i; rtx before_case; register struct nesting *thiscase = case_stack; - tree index_expr = thiscase->data.case_stmt.index_expr; - int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr)); + tree index_expr; + int unsignedp; + + if (output_bytecode) + { + bc_expand_end_case (orig_index); + return; + } + + table_label = gen_label_rtx (); + index_expr = thiscase->data.case_stmt.index_expr; + unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr)); do_pending_stack_adjust (); @@ -4069,6 +4839,110 @@ expand_end_case (orig_index) free_temp_slots (); } + +/* Terminate a case statement. EXPR is the original index + expression. */ +void +bc_expand_end_case (expr) + tree expr; +{ + struct nesting *thiscase = case_stack; + enum bytecode_opcode opcode; + struct bc_label *jump_label; + struct case_node *c; + + bc_emit_bytecode (jump); + bc_emit_bytecode_labelref (thiscase->exit_label->bc_label); + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + + /* Now that the size of the jump table is known, emit the actual + indexed jump instruction. */ + bc_emit_bytecode_labeldef (thiscase->data.case_stmt.skip_label->bc_label); + + opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode + ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI + : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI; + + bc_emit_bytecode (opcode); + + /* Now emit the case instructions literal arguments, in order. + In addition to the value on the stack, it uses: + 1. The address of the jump table. + 2. The size of the jump table. + 3. The default label. */ + + jump_label = bc_get_bytecode_label (); + bc_emit_bytecode_labelref (jump_label); + bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges, + sizeof thiscase->data.case_stmt.num_ranges); + + if (thiscase->data.case_stmt.default_label) + bc_emit_bytecode_labelref (DECL_RTL (thiscase-> + data.case_stmt.default_label)->bc_label); + else + bc_emit_bytecode_labelref (thiscase->exit_label->bc_label); + + /* Output the jump table. */ + + bc_align_bytecode (3 /* PTR_ALIGN */); + bc_emit_bytecode_labeldef (jump_label); + + if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode) + for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left) + { + opcode = TREE_INT_CST_LOW (c->low); + bc_emit_bytecode_const ((char *) &opcode, sizeof opcode); + + opcode = TREE_INT_CST_LOW (c->high); + bc_emit_bytecode_const ((char *) &opcode, sizeof opcode); + + bc_emit_bytecode_labelref (DECL_RTL (c->code_label)->bc_label); + } + else + if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode) + for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left) + { + bc_emit_bytecode_DI_const (c->low); + bc_emit_bytecode_DI_const (c->high); + + bc_emit_bytecode_labelref (DECL_RTL (c->code_label)->bc_label); + } + else + /* Bad mode */ + abort (); + + + bc_emit_bytecode_labeldef (thiscase->exit_label->bc_label); + + /* Possibly issue enumeration warnings. */ + + if (!thiscase->data.case_stmt.default_label + && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE + && TREE_CODE (expr) != INTEGER_CST + && warn_switch) + check_for_full_enumeration_handling (TREE_TYPE (expr)); + + +#ifdef DEBUG_PRINT_CODE + fputc ('\n', stderr); +#endif + + POPSTACK (case_stack); +} + + +/* Return unique bytecode ID. */ +int +bc_new_uid () +{ + static int bc_uid = 0; + + return (++bc_uid); +} + /* Generate code to jump to LABEL if OP1 and OP2 are equal. */ static void diff --git a/gcc/toplev.c b/gcc/toplev.c index 498610f84ae..94e428000a8 100644 --- a/gcc/toplev.c +++ b/gcc/toplev.c @@ -57,6 +57,9 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #ifdef XCOFF_DEBUGGING_INFO #include "xcoffout.h" #endif + +#include "bytecode.h" +#include "bc-emit.h" #ifdef VMS /* The extra parameters substantially improve the I/O performance. */ @@ -211,6 +214,9 @@ int errorcount = 0; int warningcount = 0; int sorrycount = 0; +/* Flag to output bytecode instead of native assembler */ +int output_bytecode = 0; + /* Pointer to function to compute the name to use to print a declaration. */ char *(*decl_printable_name) (); @@ -515,6 +521,7 @@ struct { char *string; int *variable; int on_value;} f_options[] = {"inhibit-size-directive", &flag_inhibit_size_directive, 1}, {"verbose-asm", &flag_verbose_asm, 1}, {"gnu-linker", &flag_gnu_linker, 1} + {"bytecode", &output_bytecode, 1} }; /* Table of language-specific options. */ @@ -885,11 +892,14 @@ void fatal_insn_not_found (insn) rtx insn; { - if (INSN_CODE (insn) < 0) - error ("internal error--unrecognizable insn:", 0); - else - error ("internal error--insn does not satisfy its constraints:", 0); - debug_rtx (insn); + if (!output_bytecode) + { + if (INSN_CODE (insn) < 0) + error ("internal error--unrecognizable insn:", 0); + else + error ("internal error--insn does not satisfy its constraints:", 0); + debug_rtx (insn); + } if (asm_out_file) fflush (asm_out_file); if (aux_info_file) @@ -1585,6 +1595,8 @@ compile_file (name) init_obstacks (); init_tree_codes (); init_lex (); + /* Some of these really don't need to be called when generating bytecode, + but the options would have to be parsed first to know that. -bson */ init_rtl (); init_emit_once (debug_info_level == DINFO_LEVEL_NORMAL || debug_info_level == DINFO_LEVEL_VERBOSE); @@ -1813,34 +1825,51 @@ compile_file (name) input_file_stack->next = 0; input_file_stack->name = input_filename; - ASM_FILE_START (asm_out_file); + if (!output_bytecode) + { + ASM_FILE_START (asm_out_file); + } - /* Output something to inform GDB that this compilation was by GCC. */ + /* Output something to inform GDB that this compilation was by GCC. Also + serves to tell GDB file consists of bytecodes. */ + if (output_bytecode) + fprintf (asm_out_file, "bc_gcc2_compiled.:\n"); + else + { #ifndef ASM_IDENTIFY_GCC - fprintf (asm_out_file, "gcc2_compiled.:\n"); + fprintf (asm_out_file, "gcc2_compiled.:\n"); #else - ASM_IDENTIFY_GCC (asm_out_file); + ASM_IDENTIFY_GCC (asm_out_file); #endif + } /* Output something to identify which front-end produced this file. */ #ifdef ASM_IDENTIFY_LANGUAGE ASM_IDENTIFY_LANGUAGE (asm_out_file); #endif -/* ??? Note: There used to be a conditional here - to call assemble_zeros without fail if DBX_DEBUGGING_INFO is defined. - This was to guarantee separation between gcc_compiled. and - the first function, for the sake of dbx on Suns. - However, having the extra zero here confused the Emacs - code for unexec, and might confuse other programs too. - Therefore, I took out that change. - In future versions we should find another way to solve - that dbx problem. -- rms, 23 May 93. */ - - /* Don't let the first function fall at the same address - as gcc_compiled., if profiling. */ - if (profile_flag || profile_block_flag) - assemble_zeros (UNITS_PER_WORD); + if (output_bytecode) + { + if (profile_flag || profile_block_flag) + error ("profiling not supported in bytecode compilation"); + } + else + { + /* ??? Note: There used to be a conditional here + to call assemble_zeros without fail if DBX_DEBUGGING_INFO is defined. + This was to guarantee separation between gcc_compiled. and + the first function, for the sake of dbx on Suns. + However, having the extra zero here confused the Emacs + code for unexec, and might confuse other programs too. + Therefore, I took out that change. + In future versions we should find another way to solve + that dbx problem. -- rms, 23 May 93. */ + + /* Don't let the first function fall at the same address + as gcc_compiled., if profiling. */ + if (profile_flag || profile_block_flag) + assemble_zeros (UNITS_PER_WORD); + } /* If dbx symbol table desired, initialize writing it and output the predefined types. */ @@ -1861,7 +1890,8 @@ compile_file (name) /* Initialize yet another pass. */ - init_final (main_input_filename); + if (!output_bytecode) + init_final (main_input_filename); start_time = get_run_time (); @@ -2031,11 +2061,14 @@ compile_file (name) /* Output some stuff at end of file if nec. */ - end_final (main_input_filename); + if (!output_bytecode) + { + end_final (main_input_filename); #ifdef ASM_FILE_END - ASM_FILE_END (asm_out_file); + ASM_FILE_END (asm_out_file); #endif + } after_finish_compilation: @@ -2113,24 +2146,28 @@ compile_file (name) { fprintf (stderr,"\n"); print_time ("parse", parse_time); - print_time ("integration", integration_time); - print_time ("jump", jump_time); - print_time ("cse", cse_time); - print_time ("loop", loop_time); - print_time ("cse2", cse2_time); - print_time ("flow", flow_time); - print_time ("combine", combine_time); - print_time ("sched", sched_time); - print_time ("local-alloc", local_alloc_time); - print_time ("global-alloc", global_alloc_time); - print_time ("sched2", sched2_time); - print_time ("dbranch", dbr_sched_time); - print_time ("shorten-branch", shorten_branch_time); - print_time ("stack-reg", stack_reg_time); - print_time ("final", final_time); - print_time ("varconst", varconst_time); - print_time ("symout", symout_time); - print_time ("dump", dump_time); + + if (!output_bytecode) + { + print_time ("integration", integration_time); + print_time ("jump", jump_time); + print_time ("cse", cse_time); + print_time ("loop", loop_time); + print_time ("cse2", cse2_time); + print_time ("flow", flow_time); + print_time ("combine", combine_time); + print_time ("sched", sched_time); + print_time ("local-alloc", local_alloc_time); + print_time ("global-alloc", global_alloc_time); + print_time ("sched2", sched2_time); + print_time ("dbranch", dbr_sched_time); + print_time ("shorten-branch", shorten_branch_time); + print_time ("stack-reg", stack_reg_time); + print_time ("final", final_time); + print_time ("varconst", varconst_time); + print_time ("symout", symout_time); + print_time ("dump", dump_time); + } } } @@ -2236,6 +2273,9 @@ rest_of_compilation (decl) tree saved_arguments = 0; int failure = 0; + if (output_bytecode) + return; + /* If we are reconsidering an inline function at the end of compilation, skip the stuff for making it inline. */ @@ -3166,7 +3206,12 @@ main (argc, argv, envp) error ("Invalid option `%s'", argv[i]); } else if (!strcmp (str, "p")) - profile_flag = 1; + { + if (!output_bytecode) + profile_flag = 1; + else + error ("profiling not supported in bytecode compilation"); + } else if (!strcmp (str, "a")) { #if !defined (BLOCK_PROFILER) || !defined (FUNCTION_BLOCK_PROFILER) @@ -3325,6 +3370,18 @@ You Lose! You must define PREFERRED_DEBUGGING_TYPE! filename = argv[i]; } + /* Initialize for bytecode output. A good idea to do this as soon as + possible after the "-f" options have been parsed. */ + if (output_bytecode) + { +#ifndef TARGET_SUPPORTS_BYTECODE + /* Just die with a fatal error if not supported */ + fatal ("-fbytecode can not be used for this target"); +#else + bc_initialize (); +#endif + } + if (optimize == 0) { /* Inlining does not work if not optimizing, @@ -3398,10 +3455,14 @@ You Lose! You must define PREFERRED_DEBUGGING_TYPE! } /* Now that register usage is specified, convert it to HARD_REG_SETs. */ - init_reg_sets_1 (); + if (!output_bytecode) + init_reg_sets_1 (); compile_file (filename); + if (output_bytecode) + bc_write_file (stdout); + #ifndef OS2 #ifndef VMS if (flag_print_mem) diff --git a/gcc/varasm.c b/gcc/varasm.c index 66128bb9bef..7d16f36a2a3 100644 --- a/gcc/varasm.c +++ b/gcc/varasm.c @@ -38,6 +38,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ #include "regs.h" #include "defaults.h" #include "real.h" +#include "bytecode.h" #include "obstack.h" @@ -96,9 +97,11 @@ void assemble_name (); int output_addressed_constants (); void output_constant (); void output_constructor (); +void output_byte_asm (); void text_section (); void readonly_data_section (); void data_section (); +static void bc_assemble_integer (); #ifdef EXTRA_SECTIONS static enum in_section {no_section, in_text, in_data, EXTRA_SECTIONS} in_section @@ -120,7 +123,11 @@ text_section () { if (in_section != in_text) { - fprintf (asm_out_file, "%s\n", TEXT_SECTION_ASM_OP); + if (output_bytecode) + bc_text (); + else + fprintf (asm_out_file, "%s\n", TEXT_SECTION_ASM_OP); + in_section = in_text; } } @@ -132,16 +139,21 @@ data_section () { if (in_section != in_data) { - if (flag_shared_data) + if (output_bytecode) + bc_data (); + else { + if (flag_shared_data) + { #ifdef SHARED_SECTION_ASM_OP - fprintf (asm_out_file, "%s\n", SHARED_SECTION_ASM_OP); + fprintf (asm_out_file, "%s\n", SHARED_SECTION_ASM_OP); #else - fprintf (asm_out_file, "%s\n", DATA_SECTION_ASM_OP); + fprintf (asm_out_file, "%s\n", DATA_SECTION_ASM_OP); #endif + } + else + fprintf (asm_out_file, "%s\n", DATA_SECTION_ASM_OP); } - else - fprintf (asm_out_file, "%s\n", DATA_SECTION_ASM_OP); in_section = in_data; } @@ -178,6 +190,16 @@ make_function_rtl (decl) { char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); + if (output_bytecode) + { + if (DECL_RTL (decl) == 0) + DECL_RTL (decl) = bc_gen_rtx (name, 0, (struct bc_label *) 0); + + /* Record that at least one function has been defined. */ + function_defined = 1; + return; + } + /* Rename a nested function to avoid conflicts. */ if (decl_function_context (decl) != 0 && DECL_INITIAL (decl) != 0 @@ -211,6 +233,48 @@ make_function_rtl (decl) function_defined = 1; } +/* Create the DECL_RTL for a declaration for a static or external + variable or static or external function. + ASMSPEC, if not 0, is the string which the user specified + as the assembler symbol name. + TOP_LEVEL is nonzero if this is a file-scope variable. + This is never called for PARM_DECLs. */ +void +bc_make_decl_rtl (decl, asmspec, top_level) + tree decl; + char *asmspec; + int top_level; +{ + register char *name = TREE_STRING_POINTER (DECL_ASSEMBLER_NAME (decl)); + + if (DECL_RTL (decl) == 0) + { + /* Print an error message for register variables. */ + if (DECL_REGISTER (decl) && TREE_CODE (decl) == FUNCTION_DECL) + error ("function declared `register'"); + else if (DECL_REGISTER (decl)) + error ("global register variables not supported in the interpreter"); + + /* Handle ordinary static variables and functions. */ + if (DECL_RTL (decl) == 0) + { + /* Can't use just the variable's own name for a variable + whose scope is less than the whole file. + Concatenate a distinguishing number. */ + if (!top_level && !DECL_EXTERNAL (decl) && asmspec == 0) + { + char *label; + + ASM_FORMAT_PRIVATE_NAME (label, name, var_labelno); + name = obstack_copy0 (saveable_obstack, label, strlen (label)); + var_labelno++; + } + + DECL_RTL (decl) = bc_gen_rtx (name, 0, (struct bc_label *) 0); + } + } +} + /* Given NAME, a putative register name, discard any customary prefixes. */ static char * @@ -301,7 +365,15 @@ make_decl_rtl (decl, asmspec, top_level) int top_level; { register char *name; - int reg_number = decode_reg_name (asmspec); + int reg_number; + + if (output_bytecode) + { + bc_make_decl_rtl (decl, asmspec, top_level); + return; + } + + reg_number = decode_reg_name (asmspec); if (DECL_ASSEMBLER_NAME (decl) != NULL_TREE) name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); @@ -465,6 +537,12 @@ void assemble_asm (string) tree string; { + if (output_bytecode) + { + error ("asm statements not allowed in interpreter"); + return; + } + app_enable (); if (TREE_CODE (string) == ADDR_EXPR) @@ -576,7 +654,12 @@ assemble_start_function (decl, fnname) /* Tell assembler to move to target machine's alignment for functions. */ align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT); if (align > 0) - ASM_OUTPUT_ALIGN (asm_out_file, align); + { + if (output_bytecode) + BC_OUTPUT_ALIGN (asm_out_file, align); + else + ASM_OUTPUT_ALIGN (asm_out_file, align); + } #ifdef ASM_OUTPUT_FUNCTION_PREFIX ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname); @@ -600,7 +683,10 @@ assemble_start_function (decl, fnname) { if (!first_global_object_name) STRIP_NAME_ENCODING (first_global_object_name, fnname); - ASM_GLOBALIZE_LABEL (asm_out_file, fnname); + if (output_bytecode) + BC_GLOBALIZE_LABEL (asm_out_file, fnname); + else + ASM_GLOBALIZE_LABEL (asm_out_file, fnname); } /* Do any machine/system dependent processing of the function name */ @@ -608,7 +694,10 @@ assemble_start_function (decl, fnname) ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl); #else /* Standard thing is just output label for the function. */ - ASM_OUTPUT_LABEL (asm_out_file, fnname); + if (output_bytecode) + BC_OUTPUT_LABEL (asm_out_file, fnname); + else + ASM_OUTPUT_LABEL (asm_out_file, fnname); #endif /* ASM_DECLARE_FUNCTION_NAME */ } @@ -631,6 +720,12 @@ void assemble_zeros (size) int size; { + if (output_bytecode) + { + bc_emit_const_skip (size); + return; + } + #ifdef ASM_NO_SKIP_IN_TEXT /* The `space' pseudo in the text section outputs nop insns rather than 0s, so we must output 0s explicitly in the text section. */ @@ -664,7 +759,12 @@ assemble_zeros (size) else #endif if (size > 0) - ASM_OUTPUT_SKIP (asm_out_file, size); + { + if (output_bytecode) + BC_OUTPUT_SKIP (asm_out_file, size); + else + ASM_OUTPUT_SKIP (asm_out_file, size); + } } /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */ @@ -688,6 +788,12 @@ assemble_string (p, size) int pos = 0; int maximum = 2000; + if (output_bytecode) + { + bc_emit (p, size); + return; + } + /* If the string is very long, split it up. */ while (pos < size) @@ -696,7 +802,10 @@ assemble_string (p, size) if (thissize > maximum) thissize = maximum; - ASM_OUTPUT_ASCII (asm_out_file, p, thissize); + if (output_bytecode) + BC_OUTPUT_ASCII (asm_out_file, p, thissize); + else + ASM_OUTPUT_ASCII (asm_out_file, p, thissize); pos += thissize; p += thissize; @@ -725,6 +834,9 @@ assemble_variable (decl, top_level, at_end, dont_output_data) int reloc = 0; enum in_section saved_in_section; + if (output_bytecode) + return; + if (GET_CODE (DECL_RTL (decl)) == REG) { /* Do output symbol info for global register variables, but do nothing @@ -734,19 +846,22 @@ assemble_variable (decl, top_level, at_end, dont_output_data) return; TREE_ASM_WRITTEN (decl) = 1; + if (!output_bytecode) + { #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO) - /* File-scope global variables are output here. */ - if ((write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG) - && top_level) - dbxout_symbol (decl, 0); + /* File-scope global variables are output here. */ + if ((write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG) + && top_level) + dbxout_symbol (decl, 0); #endif #ifdef SDB_DEBUGGING_INFO - if (write_symbols == SDB_DEBUG && top_level - /* Leave initialized global vars for end of compilation; - see comment in compile_file. */ - && (TREE_PUBLIC (decl) == 0 || DECL_INITIAL (decl) == 0)) - sdbout_symbol (decl, 0); + if (write_symbols == SDB_DEBUG && top_level + /* Leave initialized global vars for end of compilation; + see comment in compile_file. */ + && (TREE_PUBLIC (decl) == 0 || DECL_INITIAL (decl) == 0)) + sdbout_symbol (decl, 0); #endif + } /* Don't output any DWARF debugging information for variables here. In the case of local variables, the information for them is output @@ -880,12 +995,17 @@ assemble_variable (decl, top_level, at_end, dont_output_data) ASM_OUTPUT_SHARED_COMMON (asm_out_file, name, size, rounded); else #endif + if (output_bytecode) + BC_OUTPUT_COMMON (asm_out_file, name, size, rounded); + else + { #ifdef ASM_OUTPUT_ALIGNED_COMMON - ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size, - DECL_ALIGN (decl)); + ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size, + DECL_ALIGN (decl)); #else - ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded); + ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded); #endif + } } else { @@ -894,12 +1014,17 @@ assemble_variable (decl, top_level, at_end, dont_output_data) ASM_OUTPUT_SHARED_LOCAL (asm_out_file, name, size, rounded); else #endif + if (output_bytecode) + BC_OUTPUT_LOCAL (asm_out_file, name, size, rounded); + else + { #ifdef ASM_OUTPUT_ALIGNED_LOCAL - ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, - DECL_ALIGN (decl)); + ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, + DECL_ALIGN (decl)); #else - ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); + ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); #endif + } } goto finish; } @@ -1017,14 +1142,22 @@ assemble_variable (decl, top_level, at_end, dont_output_data) DECL_ALIGN (decl) = align; if (align > BITS_PER_UNIT) - ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); + { + if (output_bytecode) + BC_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); + else + ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); + } /* Do any machine/system dependent processing of the object. */ #ifdef ASM_DECLARE_OBJECT_NAME ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl); #else /* Standard thing is just output label for the object. */ - ASM_OUTPUT_LABEL (asm_out_file, name); + if (output_bytecode) + BC_OUTPUT_LABEL (asm_out_file, name); + else + ASM_OUTPUT_LABEL (asm_out_file, name); #endif /* ASM_DECLARE_OBJECT_NAME */ if (!dont_output_data) @@ -1110,6 +1243,55 @@ contains_pointers_p (type) } } +/* Output text storage for constructor CONSTR. Returns rtx of + storage. */ + +rtx +bc_output_constructor (constr) + tree constr; +{ + int i; + + /* Must always be a literal; non-literal constructors are handled + differently. */ + + if (!TREE_CONSTANT (constr)) + abort (); + + /* Always const */ + text_section (); + + /* Align */ + for (i = 0; TYPE_ALIGN (constr) >= BITS_PER_UNIT << (i + 1); i++); + if (i > 0) + BC_OUTPUT_ALIGN (asm_out_file, i); + + /* Output data */ + output_constant (constr, int_size_in_bytes (TREE_TYPE (constr))); +} + + +/* Create storage for constructor CONSTR. */ + +void +bc_output_data_constructor (constr) + tree constr; +{ + int i; + + /* Put in data section */ + data_section (); + + /* Align */ + for (i = 0; TYPE_ALIGN (constr) >= BITS_PER_UNIT << (i + 1); i++); + if (i > 0) + BC_OUTPUT_ALIGN (asm_out_file, i); + + /* The constructor is filled in at runtime. */ + BC_OUTPUT_SKIP (asm_out_file, int_size_in_bytes (TREE_TYPE (constr))); +} + + /* Output something to declare an external symbol to the assembler. (Most assemblers don't need this, so we normally output nothing.) Do nothing if DECL is not external. */ @@ -1118,6 +1300,9 @@ void assemble_external (decl) tree decl; { + if (output_bytecode) + return; + #ifdef ASM_OUTPUT_EXTERNAL if (TREE_CODE_CLASS (TREE_CODE (decl)) == 'd' && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl)) @@ -1142,11 +1327,14 @@ assemble_external_libcall (fun) rtx fun; { #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL - /* Declare library function name external when first used, if nec. */ - if (! SYMBOL_REF_USED (fun)) + if (!output_bytecode) { - SYMBOL_REF_USED (fun) = 1; - ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun); + /* Declare library function name external when first used, if nec. */ + if (! SYMBOL_REF_USED (fun)) + { + SYMBOL_REF_USED (fun) = 1; + ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun); + } } #endif } @@ -1166,7 +1354,10 @@ void assemble_label (name) char *name; { - ASM_OUTPUT_LABEL (asm_out_file, name); + if (output_bytecode) + BC_OUTPUT_LABEL (asm_out_file, name); + else + ASM_OUTPUT_LABEL (asm_out_file, name); } /* Output to FILE a reference to the assembler name of a C-level name NAME. @@ -1181,9 +1372,19 @@ assemble_name (file, name) char *name; { if (name[0] == '*') - fputs (&name[1], file); + { + if (output_bytecode) + bc_emit_labelref (name); + else + fputs (&name[1], file); + } else - ASM_OUTPUT_LABELREF (file, name); + { + if (output_bytecode) + BC_OUTPUT_LABELREF (file, name); + else + ASM_OUTPUT_LABELREF (file, name); + } } /* Allocate SIZE bytes writable static space with a gensym name @@ -1214,12 +1415,21 @@ assemble_static_space (size) strlen (name) + 2); strcpy (namestring, name); - x = gen_rtx (SYMBOL_REF, Pmode, namestring); + if (output_bytecode) + x = bc_gen_rtx (namestring, 0, (struct bc_label *) 0); + else + x = gen_rtx (SYMBOL_REF, Pmode, namestring); + + if (output_bytecode) + BC_OUTPUT_LOCAL (asm_out_file, name, size, rounded); + else + { #ifdef ASM_OUTPUT_ALIGNED_LOCAL - ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT); + ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT); #else - ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); + ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); #endif + } return x; } @@ -1234,6 +1444,10 @@ assemble_trampoline_template () char *name; int align; + /* Shouldn't get here */ + if (output_bytecode) + abort (); + /* By default, put trampoline templates in read-only data section. */ #ifdef TRAMPOLINE_SECTION @@ -1683,9 +1897,13 @@ decode_addr_const (exp, value) break; case LABEL_DECL: - x = gen_rtx (MEM, FUNCTION_MODE, - gen_rtx (LABEL_REF, VOIDmode, - label_rtx (TREE_OPERAND (exp, 0)))); + if (output_bytecode) + /* FIXME: this may not be correct, check it */ + x = bc_gen_rtx (TREE_STRING_POINTER (target), 0, (struct bc_label *) 0); + else + x = gen_rtx (MEM, FUNCTION_MODE, + gen_rtx (LABEL_REF, VOIDmode, + label_rtx (TREE_OPERAND (exp, 0)))); break; case REAL_CST: @@ -1699,9 +1917,12 @@ decode_addr_const (exp, value) abort (); } - if (GET_CODE (x) != MEM) - abort (); - x = XEXP (x, 0); + if (!output_bytecode) + { + if (GET_CODE (x) != MEM) + abort (); + x = XEXP (x, 0); + } value->base = x; value->offset = offset; @@ -2171,47 +2392,57 @@ output_constant_def (exp) to see if any of them describes EXP. If yes, the descriptor records the label number already assigned. */ - hash = const_hash (exp) % MAX_HASH_TABLE; - - for (desc = const_hash_table[hash]; desc; desc = desc->next) - if (compare_constant (exp, desc)) - { - found = desc->label; - break; - } - - if (found == 0) + if (!output_bytecode) { - /* No constant equal to EXP is known to have been output. - Make a constant descriptor to enter EXP in the hash table. - Assign the label number and record it in the descriptor for - future calls to this function to find. */ - - /* Create a string containing the label name, in LABEL. */ - ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno); - - desc = record_constant (exp); - desc->next = const_hash_table[hash]; - desc->label - = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label)); - const_hash_table[hash] = desc; + hash = const_hash (exp) % MAX_HASH_TABLE; + + for (desc = const_hash_table[hash]; desc; desc = desc->next) + if (compare_constant (exp, desc)) + { + found = desc->label; + break; + } + + if (found == 0) + { + /* No constant equal to EXP is known to have been output. + Make a constant descriptor to enter EXP in the hash table. + Assign the label number and record it in the descriptor for + future calls to this function to find. */ + + /* Create a string containing the label name, in LABEL. */ + ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno); + + desc = record_constant (exp); + desc->next = const_hash_table[hash]; + desc->label + = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label)); + const_hash_table[hash] = desc; + } + else + { + /* Create a string containing the label name, in LABEL. */ + ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno); + } } - + /* We have a symbol name; construct the SYMBOL_REF and the MEM. */ push_obstacks_nochange (); if (TREE_PERMANENT (exp)) end_temporary_allocation (); - def = gen_rtx (SYMBOL_REF, Pmode, desc->label); - - TREE_CST_RTL (exp) - = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)), def); - RTX_UNCHANGING_P (TREE_CST_RTL (exp)) = 1; - if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE - || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE) - MEM_IN_STRUCT_P (TREE_CST_RTL (exp)) = 1; - + if (!output_bytecode) + { + def = gen_rtx (SYMBOL_REF, Pmode, desc->label); + + TREE_CST_RTL (exp) + = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)), def); + RTX_UNCHANGING_P (TREE_CST_RTL (exp)) = 1; + if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE + || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE) + MEM_IN_STRUCT_P (TREE_CST_RTL (exp)) = 1; + } pop_obstacks (); /* Optionally set flags or add text to the name to record information @@ -2283,7 +2514,12 @@ output_constant_def_contents (exp, reloc, labelno) #endif if (align > BITS_PER_UNIT) - ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); + { + if (!output_bytecode) + ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); + else + BC_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); + } /* Output the label itself. */ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LC", labelno); @@ -2891,6 +3127,22 @@ output_addressed_constants (exp) } return reloc; } + + +/* Output assembler for byte constant */ +void +output_byte_asm (byte) + int byte; +{ + if (output_bytecode) + bc_emit_const ((char *) &byte, sizeof (char)); +#ifdef ASM_OUTPUT_BYTE + else + { + ASM_OUTPUT_BYTE (asm_out_file, byte); + } +#endif +} /* Output assembler code for constant EXP to FILE, with no label. This includes the pseudo-op such as ".int" or ".byte", and a newline. @@ -2925,7 +3177,10 @@ output_constant (exp, size) This means to fill the space with zeros. */ if (TREE_CODE (exp) == CONSTRUCTOR && CONSTRUCTOR_ELTS (exp) == 0) { - assemble_zeros (size); + if (output_bytecode) + bc_emit_const_skip (size); + else + assemble_zeros (size); return; } @@ -3005,6 +3260,101 @@ output_constant (exp, size) if (size > 0) assemble_zeros (size); } + + +/* Bytecode specific code to output assembler for integer. */ +void +bc_assemble_integer (exp, size) + tree exp; + int size; +{ + tree const_part; + tree addr_part; + tree tmp; + + /* FIXME: is this fold() business going to be as good as the + expand_expr() using EXPAND_SUM above in the RTL case? I + hate RMS. + FIXME: Copied as is from BC-GCC1; may need work. Don't hate. -bson */ + + exp = fold (exp); + + while (TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR) + exp = TREE_OPERAND (exp, 0); + if (TREE_CODE (exp) == INTEGER_CST) + { + const_part = exp; + addr_part = 0; + } + else if (TREE_CODE (exp) == PLUS_EXPR) + { + const_part = TREE_OPERAND (exp, 0); + while (TREE_CODE (const_part) == NOP_EXPR + || TREE_CODE (const_part) == CONVERT_EXPR) + const_part = TREE_OPERAND (const_part, 0); + addr_part = TREE_OPERAND (exp, 1); + while (TREE_CODE (addr_part) == NOP_EXPR + || TREE_CODE (addr_part) == CONVERT_EXPR) + addr_part = TREE_OPERAND (addr_part, 0); + if (TREE_CODE (const_part) != INTEGER_CST) + tmp = const_part, const_part = addr_part, addr_part = tmp; + if (TREE_CODE (const_part) != INTEGER_CST + || TREE_CODE (addr_part) != ADDR_EXPR) + abort (); /* FIXME: we really haven't considered + all the possible cases here. */ + } + else if (TREE_CODE (exp) == ADDR_EXPR) + { + const_part = integer_zero_node; + addr_part = exp; + } + else + abort (); /* FIXME: ditto previous. */ + + if (addr_part == 0) + { + if (size == 1) + { + char c = TREE_INT_CST_LOW (const_part); + bc_emit (&c, 1); + size -= 1; + } + else if (size == 2) + { + short s = TREE_INT_CST_LOW (const_part); + bc_emit ((char *) &s, 2); + size -= 2; + } + else if (size == 4) + { + int i = TREE_INT_CST_LOW (const_part); + bc_emit ((char *) &i, 4); + size -= 4; + } + else if (size == 8) + { +#if WORDS_BIG_ENDIAN + int i = TREE_INT_CST_HIGH (const_part); + bc_emit ((char *) &i, 4); + i = TREE_INT_CST_LOW (const_part); + bc_emit ((char *) &i, 4); +#else + int i = TREE_INT_CST_LOW (const_part); + bc_emit ((char *) &i, 4); + i = TREE_INT_CST_HIGH (const_part); + bc_emit ((char *) &i, 4); +#endif + size -= 8; + } + } + else + if (size == 4 + && TREE_CODE (TREE_OPERAND (addr_part, 0)) == VAR_DECL) + bc_emit_labelref (DECL_ASSEMBLER_NAME (TREE_OPERAND (addr_part, 0)), + TREE_INT_CST_LOW (const_part)); + else + abort (); /* FIXME: there may be more cases. */ +} /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants). @@ -3083,7 +3433,10 @@ output_constructor (exp, size) if each element has the proper size. */ if ((field != 0 || index != 0) && bitpos != total_bytes) { - assemble_zeros (bitpos - total_bytes); + if (!output_bytecode) + assemble_zeros (bitpos - total_bytes); + else + bc_emit_const_skip (bitpos - total_bytes); total_bytes = bitpos; } @@ -3254,3 +3607,42 @@ output_constructor (exp, size) if (total_bytes < size) assemble_zeros (size - total_bytes); } + + +/* Output asm to handle ``#pragma weak'' */ +void +handle_pragma_weak (what, asm_out_file, name, value) + enum pragma_state what; + FILE *asm_out_file; + char *name, *value; +{ + if (what == ps_name || what == ps_value) + { + fprintf (asm_out_file, "\t%s\t", WEAK_ASM_OP); + + if (output_bytecode) + BC_OUTPUT_LABELREF (asm_out_file, name); + else + ASM_OUTPUT_LABELREF (asm_out_file, name); + + fputc ('\n', asm_out_file); + if (what == ps_value) + { + fprintf (asm_out_file, "\t%s\t", SET_ASM_OP); + if (output_bytecode) + BC_OUTPUT_LABELREF (asm_out_file, name); + else + ASM_OUTPUT_LABELREF (asm_out_file, name); + + fputc (',', asm_out_file); + if (output_bytecode) + BC_OUTPUT_LABELREF (asm_out_file, value); + else + ASM_OUTPUT_LABELREF (asm_out_file, value); + + fputc ('\n', asm_out_file); + } + } + else if (! (what == ps_done || what == ps_start)) + warning ("malformed `#pragma weak'"); +} -- 2.30.2