X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=gcc%2Ffunction.c;h=7d2d7e4abcaa4ee11222783391bfb6e0c554e7cd;hb=3a37ecec8934dc378bfce06d9ea2325a98159f43;hp=a0a3bc7526bcdc1048c64cf5fce1939b70c27c9d;hpb=558d2559432daceaf2bdb0a627dd02df1090d0e4;p=gcc.git diff --git a/gcc/function.c b/gcc/function.c index a0a3bc7526b..7d2d7e4abca 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -1,5 +1,5 @@ /* Expands front end tree to back end RTL for GCC. - Copyright (C) 1987-2014 Free Software Foundation, Inc. + Copyright (C) 1987-2015 Free Software Foundation, Inc. This file is part of GCC. @@ -36,22 +36,43 @@ along with GCC; see the file COPYING3. If not see #include "coretypes.h" #include "tm.h" #include "rtl-error.h" +#include "hash-set.h" +#include "machmode.h" +#include "vec.h" +#include "double-int.h" +#include "input.h" +#include "alias.h" +#include "symtab.h" +#include "wide-int.h" +#include "inchash.h" #include "tree.h" +#include "fold-const.h" #include "stor-layout.h" #include "varasm.h" #include "stringpool.h" #include "flags.h" #include "except.h" +#include "hashtab.h" +#include "hard-reg-set.h" #include "function.h" +#include "rtl.h" +#include "statistics.h" +#include "real.h" +#include "fixed-value.h" +#include "insn-config.h" +#include "expmed.h" +#include "dojump.h" +#include "explow.h" +#include "calls.h" +#include "emit-rtl.h" +#include "stmt.h" #include "expr.h" +#include "insn-codes.h" #include "optabs.h" #include "libfuncs.h" #include "regs.h" -#include "hard-reg-set.h" -#include "insn-config.h" #include "recog.h" #include "output.h" -#include "hashtab.h" #include "tm_p.h" #include "langhooks.h" #include "target.h" @@ -60,11 +81,21 @@ along with GCC; see the file COPYING3. If not see #include "gimplify.h" #include "tree-pass.h" #include "predict.h" +#include "dominance.h" +#include "cfg.h" +#include "cfgrtl.h" +#include "cfganal.h" +#include "cfgbuild.h" +#include "cfgcleanup.h" +#include "basic-block.h" #include "df.h" #include "params.h" #include "bb-reorder.h" #include "shrink-wrap.h" #include "toplev.h" +#include "rtl-iter.h" +#include "tree-chkp.h" +#include "rtl-chkp.h" /* So we can assign to cfun in this file. */ #undef cfun @@ -101,28 +132,36 @@ struct machine_function * (*init_machine_status) (void); struct function *cfun = 0; /* These hashes record the prologue and epilogue insns. */ -static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) - htab_t prologue_insn_hash; -static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) - htab_t epilogue_insn_hash; + +struct insn_cache_hasher : ggc_cache_hasher +{ + static hashval_t hash (rtx x) { return htab_hash_pointer (x); } + static bool equal (rtx a, rtx b) { return a == b; } +}; + +static GTY((cache)) + hash_table *prologue_insn_hash; +static GTY((cache)) + hash_table *epilogue_insn_hash; -htab_t types_used_by_vars_hash = NULL; +hash_table *types_used_by_vars_hash = NULL; vec *types_used_by_cur_var_decl; /* Forward declarations. */ static struct temp_slot *find_temp_slot_from_address (rtx); static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); -static void pad_below (struct args_size *, enum machine_mode, tree); -static void reorder_blocks_1 (rtx, tree, vec *); +static void pad_below (struct args_size *, machine_mode, tree); +static void reorder_blocks_1 (rtx_insn *, tree, vec *); static int all_blocks (tree, tree *); static tree *get_block_vector (tree, int *); extern tree debug_find_var_in_block_tree (tree, tree); /* We always define `record_insns' even if it's not used so that we can always export `prologue_epilogue_contains'. */ -static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED; -static bool contains (const_rtx, htab_t); +static void record_insns (rtx_insn *, rtx, hash_table **) + ATTRIBUTE_UNUSED; +static bool contains (const_rtx, hash_table *); static void prepare_function_start (void); static void do_clobber_return_reg (rtx, void *); static void do_use_return_reg (rtx, void *); @@ -229,7 +268,7 @@ frame_offset_overflow (HOST_WIDE_INT offset, tree func) /* Return stack slot alignment in bits for TYPE and MODE. */ static unsigned int -get_stack_local_alignment (tree type, enum machine_mode mode) +get_stack_local_alignment (tree type, machine_mode mode) { unsigned int alignment; @@ -333,7 +372,7 @@ add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) We do not round to stack_boundary here. */ rtx -assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, +assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size, int align, int kind) { rtx x, addr; @@ -492,7 +531,7 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, /* Wrap up assign_stack_local_1 with last parameter as false. */ rtx -assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align) +assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align) { return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); } @@ -539,18 +578,24 @@ struct GTY(()) temp_slot { HOST_WIDE_INT full_size; }; -/* A table of addresses that represent a stack slot. The table is a mapping - from address RTXen to a temp slot. */ -static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table; -static size_t n_temp_slots_in_use; - -/* Entry for the above hash table. */ -struct GTY(()) temp_slot_address_entry { +/* Entry for the below hash table. */ +struct GTY((for_user)) temp_slot_address_entry { hashval_t hash; rtx address; struct temp_slot *temp_slot; }; +struct temp_address_hasher : ggc_hasher +{ + static hashval_t hash (temp_slot_address_entry *); + static bool equal (temp_slot_address_entry *, temp_slot_address_entry *); +}; + +/* A table of addresses that represent a stack slot. The table is a mapping + from address RTXen to a temp slot. */ +static GTY(()) hash_table *temp_slot_address_table; +static size_t n_temp_slots_in_use; + /* Removes temporary slot TEMP from LIST. */ static void @@ -633,21 +678,17 @@ temp_slot_address_compute_hash (struct temp_slot_address_entry *t) } /* Return the hash value for an address -> temp slot mapping. */ -static hashval_t -temp_slot_address_hash (const void *p) +hashval_t +temp_address_hasher::hash (temp_slot_address_entry *t) { - const struct temp_slot_address_entry *t; - t = (const struct temp_slot_address_entry *) p; return t->hash; } /* Compare two address -> temp slot mapping entries. */ -static int -temp_slot_address_eq (const void *p1, const void *p2) +bool +temp_address_hasher::equal (temp_slot_address_entry *t1, + temp_slot_address_entry *t2) { - const struct temp_slot_address_entry *t1, *t2; - t1 = (const struct temp_slot_address_entry *) p1; - t2 = (const struct temp_slot_address_entry *) p2; return exp_equiv_p (t1->address, t2->address, 0, true); } @@ -655,24 +696,21 @@ temp_slot_address_eq (const void *p1, const void *p2) static void insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) { - void **slot; struct temp_slot_address_entry *t = ggc_alloc (); t->address = address; t->temp_slot = temp_slot; t->hash = temp_slot_address_compute_hash (t); - slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT); - *slot = t; + *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t; } /* Remove an address -> temp slot mapping entry if the temp slot is not in use anymore. Callback for remove_unused_temp_slot_addresses. */ -static int -remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED) +int +remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *) { - const struct temp_slot_address_entry *t; - t = (const struct temp_slot_address_entry *) *slot; + const struct temp_slot_address_entry *t = *slot; if (! t->temp_slot->in_use) - htab_clear_slot (temp_slot_address_table, slot); + temp_slot_address_table->clear_slot (slot); return 1; } @@ -682,11 +720,10 @@ remove_unused_temp_slot_addresses (void) { /* Use quicker clearing if there aren't any active temp slots. */ if (n_temp_slots_in_use) - htab_traverse (temp_slot_address_table, - remove_unused_temp_slot_addresses_1, - NULL); + temp_slot_address_table->traverse + (NULL); else - htab_empty (temp_slot_address_table); + temp_slot_address_table->empty (); } /* Find the temp slot corresponding to the object at address X. */ @@ -702,8 +739,7 @@ find_temp_slot_from_address (rtx x) tmp.address = x; tmp.temp_slot = NULL; tmp.hash = temp_slot_address_compute_hash (&tmp); - t = (struct temp_slot_address_entry *) - htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash); + t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash); if (t) return t->temp_slot; @@ -745,7 +781,7 @@ find_temp_slot_from_address (rtx x) TYPE is the type that will be used for the stack slot. */ rtx -assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, +assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size, tree type) { unsigned int align; @@ -906,7 +942,7 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, reuse. First two arguments are same as in preceding function. */ rtx -assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size) +assign_stack_temp (machine_mode mode, HOST_WIDE_INT size) { return assign_stack_temp_for_type (mode, size, NULL_TREE); } @@ -925,7 +961,7 @@ assign_temp (tree type_or_decl, int memory_required, int dont_promote ATTRIBUTE_UNUSED) { tree type, decl; - enum machine_mode mode; + machine_mode mode; #ifdef PROMOTE_MODE int unsignedp; #endif @@ -1194,12 +1230,9 @@ init_temp_slots (void) /* Set up the table to map addresses to temp slots. */ if (! temp_slot_address_table) - temp_slot_address_table = htab_create_ggc (32, - temp_slot_address_hash, - temp_slot_address_eq, - NULL); + temp_slot_address_table = hash_table::create_ggc (32); else - htab_empty (temp_slot_address_table); + temp_slot_address_table->empty (); } /* Functions and data structures to keep track of the values hard regs @@ -1243,7 +1276,7 @@ get_hard_reg_initial_reg (rtx reg) initial value of hard register REGNO. Return an rtx for such a pseudo. */ rtx -get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno) +get_hard_reg_initial_val (machine_mode mode, unsigned int regno) { struct initial_value_struct *ivs; rtx rv; @@ -1280,7 +1313,7 @@ get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno) the associated pseudo if so, otherwise return NULL. */ rtx -has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno) +has_hard_reg_initial_val (machine_mode mode, unsigned int regno) { struct initial_value_struct *ivs; int i; @@ -1300,7 +1333,7 @@ emit_initial_value_sets (void) { struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; int i; - rtx seq; + rtx_insn *seq; if (ivs == 0) return 0; @@ -1431,57 +1464,60 @@ instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) return new_rtx; } -/* A subroutine of instantiate_virtual_regs, called via for_each_rtx. - Instantiate any virtual registers present inside of *LOC. The expression - is simplified, as much as possible, but is not to be considered "valid" - in any sense implied by the target. If any change is made, set CHANGED - to true. */ +/* A subroutine of instantiate_virtual_regs. Instantiate any virtual + registers present inside of *LOC. The expression is simplified, + as much as possible, but is not to be considered "valid" in any sense + implied by the target. Return true if any change is made. */ -static int -instantiate_virtual_regs_in_rtx (rtx *loc, void *data) +static bool +instantiate_virtual_regs_in_rtx (rtx *loc) { - HOST_WIDE_INT offset; - bool *changed = (bool *) data; - rtx x, new_rtx; - - x = *loc; - if (x == 0) - return 0; - - switch (GET_CODE (x)) + if (!*loc) + return false; + bool changed = false; + subrtx_ptr_iterator::array_type array; + FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST) { - case REG: - new_rtx = instantiate_new_reg (x, &offset); - if (new_rtx) + rtx *loc = *iter; + if (rtx x = *loc) { - *loc = plus_constant (GET_MODE (x), new_rtx, offset); - if (changed) - *changed = true; - } - return -1; + rtx new_rtx; + HOST_WIDE_INT offset; + switch (GET_CODE (x)) + { + case REG: + new_rtx = instantiate_new_reg (x, &offset); + if (new_rtx) + { + *loc = plus_constant (GET_MODE (x), new_rtx, offset); + changed = true; + } + iter.skip_subrtxes (); + break; - case PLUS: - new_rtx = instantiate_new_reg (XEXP (x, 0), &offset); - if (new_rtx) - { - XEXP (x, 0) = new_rtx; - *loc = plus_constant (GET_MODE (x), x, offset, true); - if (changed) - *changed = true; - return -1; - } + case PLUS: + new_rtx = instantiate_new_reg (XEXP (x, 0), &offset); + if (new_rtx) + { + XEXP (x, 0) = new_rtx; + *loc = plus_constant (GET_MODE (x), x, offset, true); + changed = true; + iter.skip_subrtxes (); + break; + } - /* FIXME -- from old code */ - /* If we have (plus (subreg (virtual-reg)) (const_int)), we know - we can commute the PLUS and SUBREG because pointers into the - frame are well-behaved. */ - break; + /* FIXME -- from old code */ + /* If we have (plus (subreg (virtual-reg)) (const_int)), we know + we can commute the PLUS and SUBREG because pointers into the + frame are well-behaved. */ + break; - default: - break; + default: + break; + } + } } - - return 0; + return changed; } /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X @@ -1497,12 +1533,13 @@ safe_insn_predicate (int code, int operand, rtx x) registers present inside of insn. The result will be a valid insn. */ static void -instantiate_virtual_regs_in_insn (rtx insn) +instantiate_virtual_regs_in_insn (rtx_insn *insn) { HOST_WIDE_INT offset; int insn_code, i; bool any_change = false; - rtx set, new_rtx, x, seq; + rtx set, new_rtx, x; + rtx_insn *seq; /* There are some special cases to be handled first. */ set = single_set (insn); @@ -1517,7 +1554,7 @@ instantiate_virtual_regs_in_insn (rtx insn) { start_sequence (); - for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL); + instantiate_virtual_regs_in_rtx (&SET_SRC (set)); x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set), gen_int_mode (-offset, GET_MODE (new_rtx))); x = force_operand (x, new_rtx); @@ -1620,10 +1657,8 @@ instantiate_virtual_regs_in_insn (rtx insn) case MEM: { rtx addr = XEXP (x, 0); - bool changed = false; - for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed); - if (!changed) + if (!instantiate_virtual_regs_in_rtx (&addr)) continue; start_sequence (); @@ -1789,7 +1824,7 @@ instantiate_decl_rtl (rtx x) || REGNO (addr) > LAST_VIRTUAL_REGISTER))) return; - for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL); + instantiate_virtual_regs_in_rtx (&XEXP (x, 0)); } /* Helper for instantiate_decls called via walk_tree: Process all decls @@ -1898,7 +1933,7 @@ instantiate_decls (tree fndecl) static unsigned int instantiate_virtual_regs (void) { - rtx insn; + rtx_insn *insn; /* Compute the offsets to use for this function. */ in_arg_offset = FIRST_PARM_OFFSET (current_function_decl); @@ -1926,20 +1961,18 @@ instantiate_virtual_regs (void) || GET_CODE (PATTERN (insn)) == ASM_INPUT) continue; else if (DEBUG_INSN_P (insn)) - for_each_rtx (&INSN_VAR_LOCATION (insn), - instantiate_virtual_regs_in_rtx, NULL); + instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn)); else instantiate_virtual_regs_in_insn (insn); - if (INSN_DELETED_P (insn)) + if (insn->deleted ()) continue; - for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL); + instantiate_virtual_regs_in_rtx (®_NOTES (insn)); /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ if (CALL_P (insn)) - for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn), - instantiate_virtual_regs_in_rtx, NULL); + instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn)); } /* Instantiate the virtual registers in the DECLs for debugging purposes. */ @@ -2011,9 +2044,14 @@ aggregate_value_p (const_tree exp, const_tree fntype) case CALL_EXPR: { tree fndecl = get_callee_fndecl (fntype); - fntype = (fndecl - ? TREE_TYPE (fndecl) - : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)))); + if (fndecl) + fntype = TREE_TYPE (fndecl); + else if (CALL_EXPR_FN (fntype)) + fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))); + else + /* For internal functions, assume nothing needs to be + returned in memory. */ + return 0; } break; case FUNCTION_DECL: @@ -2094,6 +2132,14 @@ use_register_for_decl (const_tree decl) if (TREE_ADDRESSABLE (decl)) return false; + /* Decl is implicitly addressible by bound stores and loads + if it is an aggregate holding bounds. */ + if (chkp_function_instrumented_p (current_function_decl) + && TREE_TYPE (decl) + && !BOUNDED_P (decl) + && chkp_type_has_pointer (TREE_TYPE (decl))) + return false; + /* Only register-like things go in registers. */ if (DECL_MODE (decl) == BLKmode) return false; @@ -2124,7 +2170,7 @@ use_register_for_decl (const_tree decl) /* When not optimizing, disregard register keyword for variables with types containing methods, otherwise the methods won't be callable from the debugger. */ - if (TYPE_METHODS (TREE_TYPE (decl))) + if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl)))) return false; break; default: @@ -2137,7 +2183,7 @@ use_register_for_decl (const_tree decl) /* Return true if TYPE should be passed by invisible reference. */ bool -pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode, +pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode, tree type, bool named_arg) { if (type) @@ -2168,7 +2214,7 @@ pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode, copied instead of caller copied. */ bool -reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode, +reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode, tree type, bool named_arg) { if (type && TREE_ADDRESSABLE (type)) @@ -2190,8 +2236,8 @@ struct assign_parm_data_all struct args_size stack_args_size; tree function_result_decl; tree orig_fnargs; - rtx first_conversion_insn; - rtx last_conversion_insn; + rtx_insn *first_conversion_insn; + rtx_insn *last_conversion_insn; HOST_WIDE_INT pretend_args_size; HOST_WIDE_INT extra_pretend_bytes; int reg_parm_stack_space; @@ -2203,9 +2249,9 @@ struct assign_parm_data_one tree passed_type; rtx entry_parm; rtx stack_parm; - enum machine_mode nominal_mode; - enum machine_mode passed_mode; - enum machine_mode promoted_mode; + machine_mode nominal_mode; + machine_mode passed_mode; + machine_mode promoted_mode; struct locate_and_pad_arg_data locate; int partial; BOOL_BITFIELD named_arg : 1; @@ -2214,6 +2260,15 @@ struct assign_parm_data_one BOOL_BITFIELD loaded_in_reg : 1; }; +struct bounds_parm_data +{ + assign_parm_data_one parm_data; + tree bounds_parm; + tree ptr_parm; + rtx ptr_entry; + int bound_no; +}; + /* A subroutine of assign_parms. Initialize ALL. */ static void @@ -2325,6 +2380,23 @@ assign_parms_augmented_arg_list (struct assign_parm_data_all *all) fnargs.safe_insert (0, decl); all->function_result_decl = decl; + + /* If function is instrumented then bounds of the + passed structure address is the second argument. */ + if (chkp_function_instrumented_p (fndecl)) + { + decl = build_decl (DECL_SOURCE_LOCATION (fndecl), + PARM_DECL, get_identifier (".result_bnd"), + pointer_bounds_type_node); + DECL_ARG_TYPE (decl) = pointer_bounds_type_node; + DECL_ARTIFICIAL (decl) = 1; + DECL_NAMELESS (decl) = 1; + TREE_CONSTANT (decl) = 1; + + DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs); + DECL_CHAIN (all->orig_fnargs) = decl; + fnargs.safe_insert (1, decl); + } } /* If the target wants to split complex arguments into scalars, do so. */ @@ -2343,7 +2415,7 @@ assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, struct assign_parm_data_one *data) { tree nominal_type, passed_type; - enum machine_mode nominal_mode, passed_mode, promoted_mode; + machine_mode nominal_mode, passed_mode, promoted_mode; int unsignedp; memset (data, 0, sizeof (*data)); @@ -2465,7 +2537,7 @@ assign_parm_find_entry_rtl (struct assign_parm_data_all *all, it came in a register so that REG_PARM_STACK_SPACE isn't skipped. In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 as it was the previous time. */ - in_regs = entry_parm != 0; + in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type); #ifdef STACK_PARMS_IN_REG_PARM_AREA in_regs = true; #endif @@ -2554,8 +2626,12 @@ static bool assign_parm_is_stack_parm (struct assign_parm_data_all *all, struct assign_parm_data_one *data) { + /* Bounds are never passed on the stack to keep compatibility + with not instrumented code. */ + if (POINTER_BOUNDS_TYPE_P (data->passed_type)) + return false; /* Trivially true if we've no incoming register. */ - if (data->entry_parm == NULL) + else if (data->entry_parm == NULL) ; /* Also true if we're partially in registers and partially not, since we've arranged to drop the entire argument on the stack. */ @@ -2662,13 +2738,14 @@ assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data) /* Handle calls that pass values in multiple non-contiguous locations. The Irix 6 ABI has examples of this. */ if (GET_CODE (entry_parm) == PARALLEL) - emit_group_store (validize_mem (stack_parm), entry_parm, + emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm, data->passed_type, int_size_in_bytes (data->passed_type)); else { gcc_assert (data->partial % UNITS_PER_WORD == 0); - move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm), + move_block_from_reg (REGNO (entry_parm), + validize_mem (copy_rtx (stack_parm)), data->partial / UNITS_PER_WORD); } @@ -2837,7 +2914,7 @@ assign_parm_setup_block (struct assign_parm_data_all *all, else gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD)); - mem = validize_mem (stack_parm); + mem = validize_mem (copy_rtx (stack_parm)); /* Handle values in multiple non-contiguous locations. */ if (GET_CODE (entry_parm) == PARALLEL) @@ -2857,7 +2934,7 @@ assign_parm_setup_block (struct assign_parm_data_all *all, that mode's store operation. */ else if (size <= UNITS_PER_WORD) { - enum machine_mode mode + machine_mode mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0); if (mode != BLKmode @@ -2938,7 +3015,7 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, { rtx parmreg, validated_mem; rtx equiv_stack_parm; - enum machine_mode promoted_nominal_mode; + machine_mode promoted_nominal_mode; int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); bool did_conversion = false; bool need_conversion, moved; @@ -2972,7 +3049,7 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, assign_parm_find_data_types and expand_expr_real_1. */ equiv_stack_parm = data->stack_parm; - validated_mem = validize_mem (data->entry_parm); + validated_mem = validize_mem (copy_rtx (data->entry_parm)); need_conversion = (data->nominal_mode != data->passed_mode || promoted_nominal_mode != data->promoted_mode); @@ -3017,7 +3094,8 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, && insn_operand_matches (icode, 1, op1)) { enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND; - rtx insn, insns, t = op1; + rtx_insn *insn, *insns; + rtx t = op1; HARD_REG_SET hardregs; start_sequence (); @@ -3036,9 +3114,9 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, } else t = op1; - insn = gen_extend_insn (op0, t, promoted_nominal_mode, - data->passed_mode, unsignedp); - emit_insn (insn); + rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode, + data->passed_mode, unsignedp); + emit_insn (pat); insns = get_insns (); moved = true; @@ -3093,7 +3171,7 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, /* The argument is already sign/zero extended, so note it into the subreg. */ SUBREG_PROMOTED_VAR_P (tempreg) = 1; - SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp); + SUBREG_PROMOTED_SET (tempreg, unsignedp); } /* TREE_USED gets set erroneously during expand_assignment. */ @@ -3171,13 +3249,14 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, && reg_mentioned_p (virtual_incoming_args_rtx, XEXP (data->stack_parm, 0))) { - rtx linsn = get_last_insn (); - rtx sinsn, set; + rtx_insn *linsn = get_last_insn (); + rtx_insn *sinsn; + rtx set; /* Mark complex types separately. */ if (GET_CODE (parmreg) == CONCAT) { - enum machine_mode submode + machine_mode submode = GET_MODE_INNER (GET_MODE (parmreg)); int regnor = REGNO (XEXP (parmreg, 0)); int regnoi = REGNO (XEXP (parmreg, 1)); @@ -3228,7 +3307,7 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, /* Conversion is required. */ rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); - emit_move_insn (tempreg, validize_mem (data->entry_parm)); + emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm))); push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); to_conversion = true; @@ -3265,8 +3344,8 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, set_mem_attributes (data->stack_parm, parm, 1); } - dest = validize_mem (data->stack_parm); - src = validize_mem (data->entry_parm); + dest = validize_mem (copy_rtx (data->stack_parm)); + src = validize_mem (copy_rtx (data->entry_parm)); if (MEM_P (src)) { @@ -3311,7 +3390,7 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, && targetm.calls.split_complex_arg (TREE_TYPE (parm))) { rtx tmp, real, imag; - enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm)); + machine_mode inner = GET_MODE_INNER (DECL_MODE (parm)); real = DECL_RTL (fnargs[i]); imag = DECL_RTL (fnargs[i + 1]); @@ -3361,6 +3440,123 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, } } +/* Load bounds of PARM from bounds table. */ +static void +assign_parm_load_bounds (struct assign_parm_data_one *data, + tree parm, + rtx entry, + unsigned bound_no) +{ + bitmap_iterator bi; + unsigned i, offs = 0; + int bnd_no = -1; + rtx slot = NULL, ptr = NULL; + + if (parm) + { + bitmap slots; + bitmap_obstack_initialize (NULL); + slots = BITMAP_ALLOC (NULL); + chkp_find_bound_slots (TREE_TYPE (parm), slots); + EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi) + { + if (bound_no) + bound_no--; + else + { + bnd_no = i; + break; + } + } + BITMAP_FREE (slots); + bitmap_obstack_release (NULL); + } + + /* We may have bounds not associated with any pointer. */ + if (bnd_no != -1) + offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; + + /* Find associated pointer. */ + if (bnd_no == -1) + { + /* If bounds are not associated with any bounds, + then it is passed in a register or special slot. */ + gcc_assert (data->entry_parm); + ptr = const0_rtx; + } + else if (MEM_P (entry)) + slot = adjust_address (entry, Pmode, offs); + else if (REG_P (entry)) + ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no); + else if (GET_CODE (entry) == PARALLEL) + ptr = chkp_get_value_with_offs (entry, GEN_INT (offs)); + else + gcc_unreachable (); + data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr, + data->entry_parm); +} + +/* Assign RTL expressions to the function's bounds parameters BNDARGS. */ + +static void +assign_bounds (vec &bndargs, + struct assign_parm_data_all &all) +{ + unsigned i, pass, handled = 0; + bounds_parm_data *pbdata; + + if (!bndargs.exists ()) + return; + + /* We make few passes to store input bounds. Firstly handle bounds + passed in registers. After that we load bounds passed in special + slots. Finally we load bounds from Bounds Table. */ + for (pass = 0; pass < 3; pass++) + FOR_EACH_VEC_ELT (bndargs, i, pbdata) + { + /* Pass 0 => regs only. */ + if (pass == 0 + && (!pbdata->parm_data.entry_parm + || GET_CODE (pbdata->parm_data.entry_parm) != REG)) + continue; + /* Pass 1 => slots only. */ + else if (pass == 1 + && (!pbdata->parm_data.entry_parm + || GET_CODE (pbdata->parm_data.entry_parm) == REG)) + continue; + /* Pass 2 => BT only. */ + else if (pass == 2 + && pbdata->parm_data.entry_parm) + continue; + + if (!pbdata->parm_data.entry_parm + || GET_CODE (pbdata->parm_data.entry_parm) != REG) + assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm, + pbdata->ptr_entry, pbdata->bound_no); + + set_decl_incoming_rtl (pbdata->bounds_parm, + pbdata->parm_data.entry_parm, false); + + if (assign_parm_setup_block_p (&pbdata->parm_data)) + assign_parm_setup_block (&all, pbdata->bounds_parm, + &pbdata->parm_data); + else if (pbdata->parm_data.passed_pointer + || use_register_for_decl (pbdata->bounds_parm)) + assign_parm_setup_reg (&all, pbdata->bounds_parm, + &pbdata->parm_data); + else + assign_parm_setup_stack (&all, pbdata->bounds_parm, + &pbdata->parm_data); + + /* Count handled bounds to make sure we miss nothing. */ + handled++; + } + + gcc_assert (handled == bndargs.length ()); + + bndargs.release (); +} + /* Assign RTL expressions to the function's parameters. This may involve copying them into registers and using those registers as the DECL_RTL. */ @@ -3370,7 +3566,11 @@ assign_parms (tree fndecl) struct assign_parm_data_all all; tree parm; vec fnargs; - unsigned i; + unsigned i, bound_no = 0; + tree last_arg = NULL; + rtx last_arg_entry = NULL; + vec bndargs = vNULL; + bounds_parm_data bdata; crtl->args.internal_arg_pointer = targetm.calls.internal_arg_pointer (); @@ -3412,9 +3612,6 @@ assign_parms (tree fndecl) } } - if (cfun->stdarg && !DECL_CHAIN (parm)) - assign_parms_setup_varargs (&all, &data, false); - /* Find out where the parameter arrives in this function. */ assign_parm_find_entry_rtl (&all, &data); @@ -3424,7 +3621,15 @@ assign_parms (tree fndecl) assign_parm_find_stack_rtl (parm, &data); assign_parm_adjust_entry_rtl (&data); } - + if (!POINTER_BOUNDS_TYPE_P (data.passed_type)) + { + /* Remember where last non bounds arg was passed in case + we have to load associated bounds for it from Bounds + Table. */ + last_arg = parm; + last_arg_entry = data.entry_parm; + bound_no = 0; + } /* Record permanently how this parm was passed. */ if (data.passed_pointer) { @@ -3436,20 +3641,63 @@ assign_parms (tree fndecl) else set_decl_incoming_rtl (parm, data.entry_parm, false); + /* Boudns should be loaded in the particular order to + have registers allocated correctly. Collect info about + input bounds and load them later. */ + if (POINTER_BOUNDS_TYPE_P (data.passed_type)) + { + /* Expect bounds in instrumented functions only. */ + gcc_assert (chkp_function_instrumented_p (fndecl)); + + bdata.parm_data = data; + bdata.bounds_parm = parm; + bdata.ptr_parm = last_arg; + bdata.ptr_entry = last_arg_entry; + bdata.bound_no = bound_no; + bndargs.safe_push (bdata); + } + else + { + assign_parm_adjust_stack_rtl (&data); + + if (assign_parm_setup_block_p (&data)) + assign_parm_setup_block (&all, parm, &data); + else if (data.passed_pointer || use_register_for_decl (parm)) + assign_parm_setup_reg (&all, parm, &data); + else + assign_parm_setup_stack (&all, parm, &data); + } + + if (cfun->stdarg && !DECL_CHAIN (parm)) + { + int pretend_bytes = 0; + + assign_parms_setup_varargs (&all, &data, false); + + if (chkp_function_instrumented_p (fndecl)) + { + /* We expect this is the last parm. Otherwise it is wrong + to assign bounds right now. */ + gcc_assert (i == (fnargs.length () - 1)); + assign_bounds (bndargs, all); + targetm.calls.setup_incoming_vararg_bounds (all.args_so_far, + data.promoted_mode, + data.passed_type, + &pretend_bytes, + false); + } + } + /* Update info on where next arg arrives in registers. */ targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, data.passed_type, data.named_arg); - assign_parm_adjust_stack_rtl (&data); - - if (assign_parm_setup_block_p (&data)) - assign_parm_setup_block (&all, parm, &data); - else if (data.passed_pointer || use_register_for_decl (parm)) - assign_parm_setup_reg (&all, parm, &data); - else - assign_parm_setup_stack (&all, parm, &data); + if (POINTER_BOUNDS_TYPE_P (data.passed_type)) + bound_no++; } + assign_bounds (bndargs, all); + if (targetm.calls.split_complex_arg) assign_parms_unsplit_complex (&all, fnargs); @@ -3465,7 +3713,7 @@ assign_parms (tree fndecl) if (DECL_RESULT (fndecl)) { tree type = TREE_TYPE (DECL_RESULT (fndecl)); - enum machine_mode mode = TYPE_MODE (type); + machine_mode mode = TYPE_MODE (type); if (mode != BLKmode && mode != VOIDmode @@ -3523,15 +3771,16 @@ assign_parms (tree fndecl) crtl->args.size = CEIL_ROUND (crtl->args.size, PARM_BOUNDARY / BITS_PER_UNIT); -#ifdef ARGS_GROW_DOWNWARD - crtl->args.arg_offset_rtx - = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) - : expand_expr (size_diffop (all.stack_args_size.var, - size_int (-all.stack_args_size.constant)), - NULL_RTX, VOIDmode, EXPAND_NORMAL)); -#else - crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); -#endif + if (ARGS_GROW_DOWNWARD) + { + crtl->args.arg_offset_rtx + = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) + : expand_expr (size_diffop (all.stack_args_size.var, + size_int (-all.stack_args_size.constant)), + NULL_RTX, VOIDmode, EXPAND_NORMAL)); + } + else + crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); /* See how many bytes, if any, of its args a function should try to pop on return. */ @@ -3570,6 +3819,10 @@ assign_parms (tree fndecl) real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), fndecl, true); + if (chkp_function_instrumented_p (fndecl)) + crtl->return_bnd + = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result), + fndecl, true); REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; /* The delay slot scheduler assumes that crtl->return_rtx holds the hard register containing the return value, not a @@ -3745,7 +3998,7 @@ gimplify_parameters (void) INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */ void -locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, +locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs, int reg_parm_stack_space, int partial, tree fndecl ATTRIBUTE_UNUSED, struct args_size *initial_offset_ptr, @@ -3818,68 +4071,71 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, if (crtl->preferred_stack_boundary < boundary) crtl->preferred_stack_boundary = boundary; -#ifdef ARGS_GROW_DOWNWARD - locate->slot_offset.constant = -initial_offset_ptr->constant; - if (initial_offset_ptr->var) - locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0), - initial_offset_ptr->var); + if (ARGS_GROW_DOWNWARD) + { + locate->slot_offset.constant = -initial_offset_ptr->constant; + if (initial_offset_ptr->var) + locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0), + initial_offset_ptr->var); - { - tree s2 = sizetree; - if (where_pad != none - && (!tree_fits_uhwi_p (sizetree) - || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) - s2 = round_up (s2, round_boundary / BITS_PER_UNIT); - SUB_PARM_SIZE (locate->slot_offset, s2); - } + { + tree s2 = sizetree; + if (where_pad != none + && (!tree_fits_uhwi_p (sizetree) + || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) + s2 = round_up (s2, round_boundary / BITS_PER_UNIT); + SUB_PARM_SIZE (locate->slot_offset, s2); + } + + locate->slot_offset.constant += part_size_in_regs; - locate->slot_offset.constant += part_size_in_regs; - - if (!in_regs || reg_parm_stack_space > 0) - pad_to_arg_alignment (&locate->slot_offset, boundary, - &locate->alignment_pad); - - locate->size.constant = (-initial_offset_ptr->constant - - locate->slot_offset.constant); - if (initial_offset_ptr->var) - locate->size.var = size_binop (MINUS_EXPR, - size_binop (MINUS_EXPR, - ssize_int (0), - initial_offset_ptr->var), - locate->slot_offset.var); - - /* Pad_below needs the pre-rounded size to know how much to pad - below. */ - locate->offset = locate->slot_offset; - if (where_pad == downward) - pad_below (&locate->offset, passed_mode, sizetree); - -#else /* !ARGS_GROW_DOWNWARD */ - if (!in_regs || reg_parm_stack_space > 0) - pad_to_arg_alignment (initial_offset_ptr, boundary, - &locate->alignment_pad); - locate->slot_offset = *initial_offset_ptr; + if (!in_regs || reg_parm_stack_space > 0) + pad_to_arg_alignment (&locate->slot_offset, boundary, + &locate->alignment_pad); + + locate->size.constant = (-initial_offset_ptr->constant + - locate->slot_offset.constant); + if (initial_offset_ptr->var) + locate->size.var = size_binop (MINUS_EXPR, + size_binop (MINUS_EXPR, + ssize_int (0), + initial_offset_ptr->var), + locate->slot_offset.var); + + /* Pad_below needs the pre-rounded size to know how much to pad + below. */ + locate->offset = locate->slot_offset; + if (where_pad == downward) + pad_below (&locate->offset, passed_mode, sizetree); + + } + else + { + if (!in_regs || reg_parm_stack_space > 0) + pad_to_arg_alignment (initial_offset_ptr, boundary, + &locate->alignment_pad); + locate->slot_offset = *initial_offset_ptr; #ifdef PUSH_ROUNDING - if (passed_mode != BLKmode) - sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); + if (passed_mode != BLKmode) + sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); #endif - /* Pad_below needs the pre-rounded size to know how much to pad below - so this must be done before rounding up. */ - locate->offset = locate->slot_offset; - if (where_pad == downward) - pad_below (&locate->offset, passed_mode, sizetree); + /* Pad_below needs the pre-rounded size to know how much to pad below + so this must be done before rounding up. */ + locate->offset = locate->slot_offset; + if (where_pad == downward) + pad_below (&locate->offset, passed_mode, sizetree); - if (where_pad != none - && (!tree_fits_uhwi_p (sizetree) - || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) - sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT); + if (where_pad != none + && (!tree_fits_uhwi_p (sizetree) + || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) + sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT); - ADD_PARM_SIZE (locate->size, sizetree); + ADD_PARM_SIZE (locate->size, sizetree); - locate->size.constant -= part_size_in_regs; -#endif /* ARGS_GROW_DOWNWARD */ + locate->size.constant -= part_size_in_regs; + } #ifdef FUNCTION_ARG_OFFSET locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type); @@ -3923,11 +4179,11 @@ pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, tree offset = size_binop (PLUS_EXPR, ARGS_SIZE_TREE (*offset_ptr), sp_offset_tree); -#ifdef ARGS_GROW_DOWNWARD - tree rounded = round_down (offset, boundary / BITS_PER_UNIT); -#else - tree rounded = round_up (offset, boundary / BITS_PER_UNIT); -#endif + tree rounded; + if (ARGS_GROW_DOWNWARD) + rounded = round_down (offset, boundary / BITS_PER_UNIT); + else + rounded = round_up (offset, boundary / BITS_PER_UNIT); offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree); /* ARGS_SIZE_TREE includes constant term. */ @@ -3939,11 +4195,10 @@ pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, else { offset_ptr->constant = -sp_offset + -#ifdef ARGS_GROW_DOWNWARD - FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes); -#else - CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes); -#endif + (ARGS_GROW_DOWNWARD + ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes) + : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)); + if (boundary > PARM_BOUNDARY) alignment_pad->constant = offset_ptr->constant - save_constant; } @@ -3951,7 +4206,7 @@ pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, } static void -pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree) +pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) { if (passed_mode != BLKmode) { @@ -4155,9 +4410,10 @@ clear_block_marks (tree block) } static void -reorder_blocks_1 (rtx insns, tree current_block, vec *p_block_stack) +reorder_blocks_1 (rtx_insn *insns, tree current_block, + vec *p_block_stack) { - rtx insn; + rtx_insn *insn; tree prev_beg = NULL_TREE, prev_end = NULL_TREE; for (insn = insns; insn; insn = NEXT_INSN (insn)) @@ -4549,6 +4805,10 @@ allocate_struct_function (tree fndecl, bool abstract_p) /* ??? This could be set on a per-function basis by the front-end but is this worth the hassle? */ cfun->can_throw_non_call_exceptions = flag_non_call_exceptions; + cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; + + if (!profile_flag && !flag_instrument_function_entry_exit) + DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; } } @@ -4574,7 +4834,7 @@ push_struct_function (tree fndecl) static void prepare_function_start (void) { - gcc_assert (!crtl->emit.x_last_insn); + gcc_assert (!get_last_insn ()); init_temp_slots (); init_emit (); init_varasm_status (); @@ -4605,6 +4865,29 @@ prepare_function_start (void) frame_pointer_needed = 0; } +void +push_dummy_function (bool with_decl) +{ + tree fn_decl, fn_type, fn_result_decl; + + gcc_assert (!in_dummy_function); + in_dummy_function = true; + + if (with_decl) + { + fn_type = build_function_type_list (void_type_node, NULL_TREE); + fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, + fn_type); + fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL, + NULL_TREE, void_type_node); + DECL_RESULT (fn_decl) = fn_result_decl; + } + else + fn_decl = NULL_TREE; + + push_struct_function (fn_decl); +} + /* Initialize the rtl expansion mechanism so that we can do simple things like generate sequences. This is used to provide a context during global initialization of some passes. You must call expand_dummy_function_end @@ -4613,9 +4896,7 @@ prepare_function_start (void) void init_dummy_function_start (void) { - gcc_assert (!in_dummy_function); - in_dummy_function = true; - push_struct_function (NULL_TREE); + push_dummy_function (false); prepare_function_start (); } @@ -4655,7 +4936,7 @@ void stack_protect_epilogue (void) { tree guard_decl = targetm.stack_protect_guard (); - rtx label = gen_label_rtx (); + rtx_code_label *label = gen_label_rtx (); rtx x, y, tmp; x = expand_normal (crtl->stack_protect_guard); @@ -4686,7 +4967,7 @@ stack_protect_epilogue (void) except adding the prediction by hand. */ tmp = get_last_insn (); if (JUMP_P (tmp)) - predict_insn_def (tmp, PRED_NORETURN, TAKEN); + predict_insn_def (as_a (tmp), PRED_NORETURN, TAKEN); expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true); free_temp_slots (); @@ -4795,6 +5076,14 @@ expand_function_start (tree subr) /* Set DECL_REGISTER flag so that expand_function_end will copy the result to the real return register(s). */ DECL_REGISTER (DECL_RESULT (subr)) = 1; + + if (chkp_function_instrumented_p (current_function_decl)) + { + tree return_type = TREE_TYPE (DECL_RESULT (subr)); + rtx bounds = targetm.calls.chkp_function_value_bounds (return_type, + subr, 1); + SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds); + } } /* Initialize rtx for parameters and local variables. @@ -4805,7 +5094,8 @@ expand_function_start (tree subr) if (cfun->static_chain_decl) { tree parm = cfun->static_chain_decl; - rtx local, chain, insn; + rtx local, chain; + rtx_insn *insn; local = gen_reg_rtx (Pmode); chain = targetm.calls.static_chain (current_function_decl, true); @@ -4879,6 +5169,13 @@ expand_function_start (tree subr) stack_check_probe_note = emit_note (NOTE_INSN_DELETED); } +void +pop_dummy_function (void) +{ + pop_cfun (); + in_dummy_function = false; +} + /* Undo the effects of init_dummy_function_start. */ void expand_dummy_function_end (void) @@ -4894,18 +5191,14 @@ expand_dummy_function_end (void) free_after_parsing (cfun); free_after_compilation (cfun); - pop_cfun (); - in_dummy_function = false; + pop_dummy_function (); } -/* Call DOIT for each hard register used as a return value from - the current function. */ +/* Helper for diddle_return_value. */ void -diddle_return_value (void (*doit) (rtx, void *), void *arg) +diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing) { - rtx outgoing = crtl->return_rtx; - if (! outgoing) return; @@ -4925,6 +5218,16 @@ diddle_return_value (void (*doit) (rtx, void *), void *arg) } } +/* Call DOIT for each hard register used as a return value from + the current function. */ + +void +diddle_return_value (void (*doit) (rtx, void *), void *arg) +{ + diddle_return_value_1 (doit, arg, crtl->return_bnd); + diddle_return_value_1 (doit, arg, crtl->return_rtx); +} + static void do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) { @@ -4977,9 +5280,9 @@ do_warn_unused_parameter (tree fn) /* Set the location of the insn chain starting at INSN to LOC. */ static void -set_insn_locations (rtx insn, int loc) +set_insn_locations (rtx_insn *insn, int loc) { - while (insn != NULL_RTX) + while (insn != NULL) { if (INSN_P (insn)) INSN_LOCATION (insn) = loc; @@ -5004,7 +5307,7 @@ expand_function_end (void) space for another stack frame. */ if (flag_stack_check == GENERIC_STACK_CHECK) { - rtx insn, seq; + rtx_insn *insn, *seq; for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) if (CALL_P (insn)) @@ -5157,8 +5460,8 @@ expand_function_end (void) If returning a structure PCC style, the caller also depends on this value. And cfun->returns_pcc_struct is not necessarily set. */ - if (cfun->returns_struct - || cfun->returns_pcc_struct) + if ((cfun->returns_struct || cfun->returns_pcc_struct) + && !targetm.calls.omit_struct_return_reg) { rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl)); tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); @@ -5260,7 +5563,7 @@ get_arg_pointer_save_area (void) generated stack slot may not be a valid memory address, so we have to check it and fix it if necessary. */ start_sequence (); - emit_move_insn (validize_mem (ret), + emit_move_insn (validize_mem (copy_rtx (ret)), crtl->args.internal_arg_pointer); seq = get_insns (); end_sequence (); @@ -5279,18 +5582,17 @@ get_arg_pointer_save_area (void) for the first time. */ static void -record_insns (rtx insns, rtx end, htab_t *hashp) +record_insns (rtx_insn *insns, rtx end, hash_table **hashp) { - rtx tmp; - htab_t hash = *hashp; + rtx_insn *tmp; + hash_table *hash = *hashp; if (hash == NULL) - *hashp = hash - = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL); + *hashp = hash = hash_table::create_ggc (17); for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp)) { - void **slot = htab_find_slot (hash, tmp, INSERT); + rtx *slot = hash->find_slot (tmp, INSERT); gcc_assert (*slot == NULL); *slot = tmp; } @@ -5303,18 +5605,18 @@ record_insns (rtx insns, rtx end, htab_t *hashp) void maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy) { - htab_t hash; - void **slot; + hash_table *hash; + rtx *slot; hash = epilogue_insn_hash; - if (!hash || !htab_find (hash, insn)) + if (!hash || !hash->find (insn)) { hash = prologue_insn_hash; - if (!hash || !htab_find (hash, insn)) + if (!hash || !hash->find (insn)) return; } - slot = htab_find_slot (hash, copy, INSERT); + slot = hash->find_slot (copy, INSERT); gcc_assert (*slot == NULL); *slot = copy; } @@ -5323,21 +5625,22 @@ maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy) we can be running after reorg, SEQUENCE rtl is possible. */ static bool -contains (const_rtx insn, htab_t hash) +contains (const_rtx insn, hash_table *hash) { if (hash == NULL) return false; if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) { + rtx_sequence *seq = as_a (PATTERN (insn)); int i; - for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) - if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i))) + for (i = seq->len () - 1; i >= 0; i--) + if (hash->find (seq->element (i))) return true; return false; } - return htab_find (hash, insn) != NULL; + return hash->find (const_cast (insn)) != NULL; } int @@ -5350,22 +5653,21 @@ prologue_epilogue_contains (const_rtx insn) return 0; } -#ifdef HAVE_return /* Insert use of return register before the end of BB. */ static void emit_use_return_register_into_block (basic_block bb) { - rtx seq, insn; + rtx seq; + rtx_insn *insn; start_sequence (); use_return_register (); seq = get_insns (); end_sequence (); insn = BB_END (bb); -#ifdef HAVE_cc0 - if (reg_mentioned_p (cc0_rtx, PATTERN (insn))) + if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))) insn = prev_cc0_setter (insn); -#endif + emit_insn_before (seq, insn); } @@ -5376,12 +5678,10 @@ emit_use_return_register_into_block (basic_block bb) static rtx gen_return_pattern (bool simple_p) { -#ifdef HAVE_simple_return + if (!HAVE_simple_return) + gcc_assert (!simple_p); + return simple_p ? gen_simple_return () : gen_return (); -#else - gcc_assert (!simple_p); - return gen_return (); -#endif } /* Insert an appropriate return pattern at the end of block BB. This @@ -5399,12 +5699,11 @@ emit_return_into_block (bool simple_p, basic_block bb) gcc_assert (ANY_RETURN_P (pat)); JUMP_LABEL (jump) = pat; } -#endif /* Set JUMP_LABEL for a return insn. */ void -set_return_jump_label (rtx returnjump) +set_return_jump_label (rtx_insn *returnjump) { rtx pat = PATTERN (returnjump); if (GET_CODE (pat) == PARALLEL) @@ -5415,10 +5714,9 @@ set_return_jump_label (rtx returnjump) JUMP_LABEL (returnjump) = ret_rtx; } -#if defined (HAVE_return) || defined (HAVE_simple_return) /* Return true if there are any active insns between HEAD and TAIL. */ bool -active_insn_between (rtx head, rtx tail) +active_insn_between (rtx_insn *head, rtx_insn *tail) { while (tail) { @@ -5453,7 +5751,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p, FOR_EACH_VEC_ELT (src_bbs, i, bb) { - rtx jump = BB_END (bb); + rtx_insn *jump = BB_END (bb); if (!JUMP_P (jump) || JUMP_LABEL (jump) != label) continue; @@ -5488,17 +5786,15 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p, dest = simple_return_rtx; else dest = ret_rtx; - if (!redirect_jump (jump, dest, 0)) + if (!redirect_jump (as_a (jump), dest, 0)) { -#ifdef HAVE_simple_return - if (simple_p) + if (HAVE_simple_return && simple_p) { if (dump_file) fprintf (dump_file, "Failed to redirect bb %d branch.\n", bb->index); unconverted.safe_push (e); } -#endif continue; } @@ -5513,15 +5809,13 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p, } else { -#ifdef HAVE_simple_return - if (simple_p) + if (HAVE_simple_return && simple_p) { if (dump_file) fprintf (dump_file, "Failed to redirect bb %d branch.\n", bb->index); unconverted.safe_push (e); } -#endif continue; } @@ -5549,7 +5843,6 @@ emit_return_for_exit (edge exit_fallthru_edge, bool simple_p) exit_fallthru_edge->flags &= ~EDGE_FALLTHRU; return last_bb; } -#endif /* Generate the prologue and epilogue RTL if the machine supports it. Thread @@ -5600,17 +5893,15 @@ emit_return_for_exit (edge exit_fallthru_edge, bool simple_p) in a sibcall omit the sibcall_epilogue if the block is not in ANTIC. */ -static void +void thread_prologue_and_epilogue_insns (void) { bool inserted; -#ifdef HAVE_simple_return vec unconverted_simple_returns = vNULL; bitmap_head bb_flags; -#endif - rtx returnjump; - rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED; - rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED; + rtx_insn *returnjump; + rtx_insn *epilogue_end ATTRIBUTE_UNUSED; + rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED; edge e, entry_edge, orig_entry_edge, exit_fallthru_edge; edge_iterator ei; @@ -5619,9 +5910,8 @@ thread_prologue_and_epilogue_insns (void) rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun)); inserted = false; - seq = NULL_RTX; - epilogue_end = NULL_RTX; - returnjump = NULL_RTX; + epilogue_end = NULL; + returnjump = NULL; /* Can't deal with multiple successors of the entry block at the moment. Function should always have at least one entry @@ -5630,7 +5920,7 @@ thread_prologue_and_epilogue_insns (void) entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); orig_entry_edge = entry_edge; - split_prologue_seq = NULL_RTX; + split_prologue_seq = NULL; if (flag_split_stack && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)) == NULL)) @@ -5650,12 +5940,12 @@ thread_prologue_and_epilogue_insns (void) #endif } - prologue_seq = NULL_RTX; + prologue_seq = NULL; #ifdef HAVE_prologue if (HAVE_prologue) { start_sequence (); - seq = gen_prologue (); + rtx_insn *seq = safe_as_a (gen_prologue ()); emit_insn (seq); /* Insert an explicit USE for the frame pointer @@ -5679,7 +5969,6 @@ thread_prologue_and_epilogue_insns (void) } #endif -#ifdef HAVE_simple_return bitmap_initialize (&bb_flags, &bitmap_default_obstack); /* Try to perform a kind of shrink-wrapping, making sure the @@ -5687,7 +5976,6 @@ thread_prologue_and_epilogue_insns (void) function that require it. */ try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq); -#endif if (split_prologue_seq != NULL_RTX) { @@ -5712,14 +6000,11 @@ thread_prologue_and_epilogue_insns (void) exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); -#ifdef HAVE_simple_return - if (entry_edge != orig_entry_edge) + if (HAVE_simple_return && entry_edge != orig_entry_edge) exit_fallthru_edge = get_unconverted_simple_return (exit_fallthru_edge, bb_flags, &unconverted_simple_returns, &returnjump); -#endif -#ifdef HAVE_return if (HAVE_return) { if (exit_fallthru_edge == NULL) @@ -5738,17 +6023,16 @@ thread_prologue_and_epilogue_insns (void) { last_bb = emit_return_for_exit (exit_fallthru_edge, false); epilogue_end = returnjump = BB_END (last_bb); -#ifdef HAVE_simple_return + /* Emitting the return may add a basic block. Fix bb_flags for the added block. */ - if (last_bb != exit_fallthru_edge->src) + if (HAVE_simple_return && last_bb != exit_fallthru_edge->src) bitmap_set_bit (&bb_flags, last_bb->index); -#endif + goto epilogue_done; } } } -#endif /* A small fib -- epilogue is not yet completed, but we wish to re-use this marker for the splits of EH_RETURN patterns, and nothing else @@ -5763,7 +6047,7 @@ thread_prologue_and_epilogue_insns (void) EPILOGUE_BEG note and mark the insns as epilogue insns. */ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) { - rtx prev, last, trial; + rtx_insn *prev, *last, *trial; if (e->flags & EDGE_FALLTHRU) continue; @@ -5787,12 +6071,11 @@ thread_prologue_and_epilogue_insns (void) if (exit_fallthru_edge == NULL) goto epilogue_done; -#ifdef HAVE_epilogue if (HAVE_epilogue) { start_sequence (); epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG); - seq = gen_epilogue (); + rtx_insn *seq = as_a (gen_epilogue ()); if (seq) emit_jump_insn (seq); @@ -5811,7 +6094,6 @@ thread_prologue_and_epilogue_insns (void) set_return_jump_label (returnjump); } else -#endif { basic_block cur_bb; @@ -5860,10 +6142,9 @@ epilogue_done: } } -#ifdef HAVE_simple_return - convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump, - unconverted_simple_returns); -#endif + if (HAVE_simple_return) + convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, + returnjump, unconverted_simple_returns); #ifdef HAVE_sibcall_epilogue /* Emit sibling epilogues before any sibling call sites. */ @@ -5872,16 +6153,13 @@ epilogue_done: ) { basic_block bb = e->src; - rtx insn = BB_END (bb); + rtx_insn *insn = BB_END (bb); rtx ep_seq; if (!CALL_P (insn) || ! SIBLING_CALL_P (insn) -#ifdef HAVE_simple_return - || (entry_edge != orig_entry_edge - && !bitmap_bit_p (&bb_flags, bb->index)) -#endif - ) + || (HAVE_simple_return && (entry_edge != orig_entry_edge + && !bitmap_bit_p (&bb_flags, bb->index)))) { ei_next (&ei); continue; @@ -5893,7 +6171,7 @@ epilogue_done: start_sequence (); emit_note (NOTE_INSN_EPILOGUE_BEG); emit_insn (ep_seq); - seq = get_insns (); + rtx_insn *seq = get_insns (); end_sequence (); /* Retain a map of the epilogue insns. Used in life analysis to @@ -5908,10 +6186,9 @@ epilogue_done: } #endif -#ifdef HAVE_epilogue if (epilogue_end) { - rtx insn, next; + rtx_insn *insn, *next; /* Similarly, move any line notes that appear after the epilogue. There is no need, however, to be quite so anal about the existence @@ -5926,11 +6203,8 @@ epilogue_done: reorder_insns (insn, insn, PREV_INSN (epilogue_end)); } } -#endif -#ifdef HAVE_simple_return bitmap_clear (&bb_flags); -#endif /* Threading the prologue and epilogue changes the artificial refs in the entry and exit blocks. */ @@ -5944,14 +6218,17 @@ epilogue_done: void reposition_prologue_and_epilogue_notes (void) { -#if defined (HAVE_prologue) || defined (HAVE_epilogue) \ - || defined (HAVE_sibcall_epilogue) +#if ! defined (HAVE_prologue) && ! defined (HAVE_sibcall_epilogue) + if (!HAVE_epilogue) + return; +#endif + /* Since the hash table is created on demand, the fact that it is non-null is a signal that it is non-empty. */ if (prologue_insn_hash != NULL) { - size_t len = htab_elements (prologue_insn_hash); - rtx insn, last = NULL, note = NULL; + size_t len = prologue_insn_hash->elements (); + rtx_insn *insn, *last = NULL, *note = NULL; /* Scan from the beginning until we reach the last prologue insn. */ /* ??? While we do have the CFG intact, there are two problems: @@ -6002,7 +6279,7 @@ reposition_prologue_and_epilogue_notes (void) FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) { - rtx insn, first = NULL, note = NULL; + rtx_insn *insn, *first = NULL, *note = NULL; basic_block bb = e->src; /* Scan from the beginning until we reach the first epilogue insn. */ @@ -6042,7 +6319,6 @@ reposition_prologue_and_epilogue_notes (void) } } } -#endif /* HAVE_prologue or HAVE_epilogue */ } /* Returns the name of function declared by FNDECL. */ @@ -6087,14 +6363,10 @@ used_types_insert_helper (tree type, struct function *func) { if (type != NULL && func != NULL) { - void **slot; - if (func->used_types_hash == NULL) - func->used_types_hash = htab_create_ggc (37, htab_hash_pointer, - htab_eq_pointer, NULL); - slot = htab_find_slot (func->used_types_hash, type, INSERT); - if (*slot == NULL) - *slot = type; + func->used_types_hash = hash_set::create_ggc (37); + + func->used_types_hash->add (type); } } @@ -6140,24 +6412,17 @@ hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry) /* Hash function of the types_used_by_vars_entry hash table. */ hashval_t -types_used_by_vars_do_hash (const void *x) +used_type_hasher::hash (types_used_by_vars_entry *entry) { - const struct types_used_by_vars_entry *entry = - (const struct types_used_by_vars_entry *) x; - return hash_types_used_by_vars_entry (entry); } /*Equality function of the types_used_by_vars_entry hash table. */ -int -types_used_by_vars_eq (const void *x1, const void *x2) +bool +used_type_hasher::equal (types_used_by_vars_entry *e1, + types_used_by_vars_entry *e2) { - const struct types_used_by_vars_entry *e1 = - (const struct types_used_by_vars_entry *) x1; - const struct types_used_by_vars_entry *e2 = - (const struct types_used_by_vars_entry *)x2; - return (e1->var_decl == e2->var_decl && e1->type == e2->type); } @@ -6168,16 +6433,15 @@ types_used_by_var_decl_insert (tree type, tree var_decl) { if (type != NULL && var_decl != NULL) { - void **slot; + types_used_by_vars_entry **slot; struct types_used_by_vars_entry e; e.var_decl = var_decl; e.type = type; if (types_used_by_vars_hash == NULL) - types_used_by_vars_hash = - htab_create_ggc (37, types_used_by_vars_do_hash, - types_used_by_vars_eq, NULL); - slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e, - hash_types_used_by_vars_entry (&e), INSERT); + types_used_by_vars_hash + = hash_table::create_ggc (37); + + slot = types_used_by_vars_hash->find_slot (&e, INSERT); if (*slot == NULL) { struct types_used_by_vars_entry *entry; @@ -6332,7 +6596,7 @@ make_pass_thread_prologue_and_epilogue (gcc::context *ctxt) asm ("": "=mr" (inout_2) : "0" (inout_2)); */ static void -match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs) +match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs) { int i; bool changed = false; @@ -6344,7 +6608,8 @@ match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs) memset (output_matched, 0, noutputs * sizeof (bool)); for (i = 0; i < ninputs; i++) { - rtx input, output, insns; + rtx input, output; + rtx_insn *insns; const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i); char *end; int match, j; @@ -6433,6 +6698,15 @@ match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs) df_insn_rescan (insn); } +/* Add the decl D to the local_decls list of FUN. */ + +void +add_local_decl (struct function *fun, tree d) +{ + gcc_assert (TREE_CODE (d) == VAR_DECL); + vec_safe_push (fun->local_decls, d); +} + namespace { const pass_data pass_data_match_asm_constraints = @@ -6464,7 +6738,8 @@ unsigned pass_match_asm_constraints::execute (function *fun) { basic_block bb; - rtx insn, pat, *p_sets; + rtx_insn *insn; + rtx pat, *p_sets; int noutputs; if (!crtl->has_asm_statement)