+2008-04-18 Jan Hubicka <jh@suse.cz>
+
+ * except.c (dw2_size_of_call_site_table,
+ sjlj_size_of_call_site_table): Use vector API for call_site_record.
+
+ * cgraphbuild.c (build_cgraph_edges): Update.
+ * tree-pass.h: Update comment.
+ * final.c (leaf_function_p): Update.
+ (leaf_renumber_regs): Update.
+ (rest_of_clean_state): Update.
+ * omp-low.c (expand_omp_parallel): Update.
+ * ipa-reference.c (analyze_function): Update.
+ * reorg.c (find_end_label): Update.
+ (optimize_skip): Update.
+ (fill_simple_delay_slots): Update.
+ (fill_simple_delay_slots): Update.
+ (make_return_insns): Update.
+ (dbr_schedule): Update.
+ * gimple-low.c (record_vars_into): Update.
+ * cfgbuild.c (make_edges): Update.
+ * function.c (assign_stack_local): Update.
+ (assign_parm_adjust_stack_rtl): Update.
+ (locate_and_pad_parm): Update.
+ (allocate_struct_function): Do not initialize stack_alignment_needed
+ and preferred_stack_boundary here.
+ (stack_protect_prologue): Update.
+ (stack_protect_epilogue): Update.
+ (expand_function_start): Initialize stack_alignment_needed,
+ preferred_stack_boundary and max_jumptable_ents.
+ (expand_function_end): Update.
+ (free_after_compilation): Do not NULLify epilogue_delay_list.
+ * function.h (struct rtl_data): Add stack_protect_guard,
+ stack_alignment_needed,
+ preferred_stack_boundary, epilogue_delay_list.
+ (struct function): Remove value_histograms, stack_alignment_needed,
+ preferred_stack_boundary, epilogue_delay_list, max_jumptable_ents,
+ last_label_uid,
+ unexpanded_var_list, stack_protect_guard.
+ (current_function_epilogue_delay_list): Remove.
+ * ipa-type-escape.c (analyze_function): Update.
+ * gimplify.c (pop_gimplify_context): Update comment.
+ * calls.c (expand_call): Update.
+ (emit_library_call_value_1): Update.
+ * except.c (set_nothrow_function_flags): Update.
+ * cfgexpand.c (get_decl_align_unit): Update.
+ (create_stack_guard): Update.
+ (estimated_stack_frame_size): Update.
+ (expand_used_vars): Update.
+ (tree_expand_cfg): Free histogram earliers, init expansion variables.
+ * explow.c (allocate_dynamic_stack_space): Update.
+ * tree-ssa-live.c (remove_unused_locals): Update.
+ * varasm.c (mark_constant_pool): Update.
+ * tree-inline.c (remap_decls): Update.
+ (initialize_cfun): Update.
+ (declare_return_variable): Update.
+ (inline_forbidden_p): Update.
+ (expand_call_inline): Update.
+ (declare_inline_vars): Update.
+ (tree_function_versioning): Update.
+ * tree-flow.h (value_histograms): New.
+ (VALUE_HISTOGRAMS): New macro.
+ * basic-block.h (control_flow_graph): Add max_jumptable_ents,
+ last_label_uid.
+ * tree-cfg.c (set_bb_for_stmt): Update.
+ (replace_by_duplicate_decl): Update.
+ (move_block_to_fn): Update.
+ (new_label_mapper): Update.
+ (dump_function_to_file): Update.
+ * ipa-struct-reorg.c (build_data_structure): Update.
+ * cfgrtl.c (print_rtl_with_bb): Update.
+ * reload1.c (reload): Update.
+ (reload): Update.
+ * config/i386/i386.c (setup_incoming_varargs_64,
+ ix86_compute_frame_layout): Update.
+ * config/arc/arc.c (arc_output_function_epilogue): Update.
+
2008-04-18 Marius Strobl <marius@FreeBSD.org>
* gthr-posix.h (__gthread_active_p): Use the Solaris implementation
/* Number of basic blocks in the dominance tree. */
unsigned x_n_bbs_in_dom_tree[2];
+
+ /* Maximal number of entities in the single jumptable. Used to estimate
+ final flowgraph size. */
+ int max_jumptable_ents;
+
+ /* UIDs for LABEL_DECLs. */
+ int last_label_uid;
};
/* Defines for accessing the fields of the CFG structure for function FN. */
/* Ensure current function's preferred stack boundary is at least
what we need. We don't have to increase alignment for recursive
functions. */
- if (cfun->preferred_stack_boundary < preferred_stack_boundary
+ if (crtl->preferred_stack_boundary < preferred_stack_boundary
&& fndecl != current_function_decl)
- cfun->preferred_stack_boundary = preferred_stack_boundary;
+ crtl->preferred_stack_boundary = preferred_stack_boundary;
if (fndecl == current_function_decl)
cfun->recursive_call_emit = true;
if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
start_sequence ();
- if (pass == 0 && cfun->stack_protect_guard)
+ if (pass == 0 && crtl->stack_protect_guard)
stack_protect_epilogue ();
adjusted_args_size = args_size;
/* Ensure current function's preferred stack boundary is at least
what we need. */
- if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
- cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
+ if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
+ crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* If this kind of value comes back in memory,
decide where in memory it should come back. */
/* Heavy use of computed goto in machine-generated code can lead to
nearly fully-connected CFGs. In that case we spend a significant
amount of time searching the edge lists for duplicates. */
- if (forced_labels || cfun->max_jumptable_ents > 100)
+ if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
edge_cache = sbitmap_alloc (last_basic_block);
/* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
if (align > PREFERRED_STACK_BOUNDARY)
align = PREFERRED_STACK_BOUNDARY;
- if (cfun->stack_alignment_needed < align)
- cfun->stack_alignment_needed = align;
+ if (crtl->stack_alignment_needed < align)
+ crtl->stack_alignment_needed = align;
return align / BITS_PER_UNIT;
}
TREE_THIS_VOLATILE (guard) = 1;
TREE_USED (guard) = 1;
expand_one_stack_var (guard);
- cfun->stack_protect_guard = guard;
+ crtl->stack_protect_guard = guard;
}
/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
init_vars_expansion (void)
{
tree t;
- /* Set TREE_USED on all variables in the unexpanded_var_list. */
- for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
+ /* Set TREE_USED on all variables in the local_decls. */
+ for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
TREE_USED (TREE_VALUE (t)) = 1;
/* Clear TREE_USED on all variables associated with a block scope. */
init_vars_expansion ();
- /* At this point all variables on the unexpanded_var_list with TREE_USED
+ /* At this point all variables on the local_decls with TREE_USED
set are not associated with any block scope. Lay them out. */
- for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
+ for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
{
tree var = TREE_VALUE (t);
init_vars_expansion ();
- /* At this point all variables on the unexpanded_var_list with TREE_USED
+ /* At this point all variables on the local_decls with TREE_USED
set are not associated with any block scope. Lay them out. */
- for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
+ for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
{
tree var = TREE_VALUE (t);
bool expand_now = false;
if (expand_now)
expand_one_var (var, true, true);
}
- cfun->unexpanded_var_list = NULL_TREE;
+ cfun->local_decls = NULL_TREE;
/* At this point, all variables within the block tree with TREE_USED
set are actually used by the optimized function. Lay them out. */
discover_nonconstant_array_refs ();
targetm.expand_to_rtl_hook ();
+ crtl->stack_alignment_needed = STACK_BOUNDARY;
+ crtl->preferred_stack_boundary = STACK_BOUNDARY;
+ cfun->cfg->max_jumptable_ents = 0;
+
/* Expand the variables recorded during gimple lowering. */
expand_used_vars ();
if (current_function_calls_alloca)
warning (OPT_Wstack_protector,
"not protecting local variables: variable length buffer");
- if (has_short_buffer && !cfun->stack_protect_guard)
+ if (has_short_buffer && !crtl->stack_protect_guard)
warning (OPT_Wstack_protector,
"not protecting function: no buffer at least %d bytes long",
(int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
/* Initialize the stack_protect_guard field. This must happen after the
call to __main (if any) so that the external decl is initialized. */
- if (cfun->stack_protect_guard)
+ if (crtl->stack_protect_guard)
stack_protect_prologue ();
/* Register rtl specific functions for cfg. */
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
bb = expand_gimple_basic_block (bb);
pointer_map_destroy (lab_rtx_for_bb);
+ free_histograms ();
construct_exit_block ();
set_curr_insn_block (DECL_INITIAL (current_function_decl));
/* After expanding, the return labels are no longer needed. */
return_label = NULL;
naked_return_label = NULL;
- free_histograms ();
/* Tag the blocks with a depth number so that change_scope can find
the common parent easily. */
set_block_levels (DECL_INITIAL (cfun->decl), 0);
free (in_bb_p);
}
- if (current_function_epilogue_delay_list != 0)
+ if (crtl->epilogue_delay_list != 0)
{
fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
- for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
+ for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
tmp_rtx = XEXP (tmp_rtx, 1))
print_rtl_single (outf, XEXP (tmp_rtx, 0));
}
}
/* Look for initializers of constant variables and private statics. */
- for (step = cfun->unexpanded_var_list;
+ for (step = cfun->local_decls;
step;
step = TREE_CHAIN (step))
{
static void
arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
{
- rtx epilogue_delay = current_function_epilogue_delay_list;
+ rtx epilogue_delay = crtl->epilogue_delay_list;
int noepilogue = FALSE;
enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
We also may end up assuming that only 64bit values are stored in SSE
register let some floating point program work. */
if (ix86_preferred_stack_boundary >= BIGGEST_ALIGNMENT)
- cfun->stack_alignment_needed = BIGGEST_ALIGNMENT;
+ crtl->stack_alignment_needed = BIGGEST_ALIGNMENT;
save_area = frame_pointer_rtx;
set = get_varargs_alias_set ();
frame->nregs = ix86_nsaved_regs ();
total_size = size;
- stack_alignment_needed = cfun->stack_alignment_needed / BITS_PER_UNIT;
- preferred_alignment = cfun->preferred_stack_boundary / BITS_PER_UNIT;
+ stack_alignment_needed = crtl->stack_alignment_needed / BITS_PER_UNIT;
+ preferred_alignment = crtl->preferred_stack_boundary / BITS_PER_UNIT;
/* During reload iteration the amount of registers saved can change.
Recompute the value as needed. Do not recompute when amount of registers
}
}
- for (insn = current_function_epilogue_delay_list; insn;
+ for (insn = crtl->epilogue_delay_list; insn;
insn = XEXP (insn, 1))
if (can_throw_external (insn))
{
static int
dw2_size_of_call_site_table (void)
{
- int n = cfun->eh->call_site_data_used;
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record);
int size = n * (4 + 4 + 4);
int i;
for (i = 0; i < n; ++i)
{
- struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
size += size_of_uleb128 (cs->action);
}
static int
sjlj_size_of_call_site_table (void)
{
- int n = cfun->eh->call_site_data_used;
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record);
int size = 0;
int i;
for (i = 0; i < n; ++i)
{
- struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
size += size_of_uleb128 (INTVAL (cs->landing_pad));
size += size_of_uleb128 (cs->action);
}
/* We can't attempt to minimize alignment necessary, because we don't
know the final value of preferred_stack_boundary yet while executing
this code. */
- cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
+ crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* We will need to ensure that the address we return is aligned to
BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
{
rtx temp, vector;
- if (INTVAL (range) > cfun->max_jumptable_ents)
- cfun->max_jumptable_ents = INTVAL (range);
+ if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
+ cfun->cfg->max_jumptable_ents = INTVAL (range);
/* Do an unsigned comparison (in the proper mode) between the index
expression and the value which represents the length of the range.
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}
- for (link = current_function_epilogue_delay_list;
+ for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
{
for (insn = first; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
leaf_renumber_regs_insn (PATTERN (insn));
- for (insn = current_function_epilogue_delay_list;
+ for (insn = crtl->epilogue_delay_list;
insn;
insn = XEXP (insn, 1))
if (INSN_P (XEXP (insn, 0)))
if (targetm.binds_local_p (current_function_decl))
{
- int pref = cfun->preferred_stack_boundary;
- if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
- pref = cfun->stack_alignment_needed;
+ int pref = crtl->preferred_stack_boundary;
+ if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
+ pref = crtl->stack_alignment_needed;
cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
= pref;
}
f->machine = NULL;
f->cfg = NULL;
- f->epilogue_delay_list = NULL;
regno_reg_rtx = NULL;
}
\f
if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
- cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
+ if (crtl->stack_alignment_needed < alignment * BITS_PER_UNIT)
+ crtl->stack_alignment_needed = alignment * BITS_PER_UNIT;
/* Calculate how many bytes the start of local variables is off from
stack alignment. */
/* If stack protection is in effect for this function, don't leave any
pointers in their passed stack slots. */
- else if (cfun->stack_protect_guard
+ else if (crtl->stack_protect_guard
&& (flag_stack_protect == 2
|| data->passed_pointer
|| POINTER_TYPE_P (data->nominal_type)))
calling function side. */
if (boundary > PREFERRED_STACK_BOUNDARY)
boundary = PREFERRED_STACK_BOUNDARY;
- if (cfun->stack_alignment_needed < boundary)
- cfun->stack_alignment_needed = boundary;
+ if (crtl->stack_alignment_needed < boundary)
+ crtl->stack_alignment_needed = boundary;
#ifdef ARGS_GROW_DOWNWARD
locate->slot_offset.constant = -initial_offset_ptr->constant;
cfun = ggc_alloc_cleared (sizeof (struct function));
- cfun->stack_alignment_needed = STACK_BOUNDARY;
- cfun->preferred_stack_boundary = STACK_BOUNDARY;
-
current_function_funcdef_no = get_next_funcdef_no ();
cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
/* Avoid expand_expr here, because we don't want guard_decl pulled
into registers unless absolutely necessary. And we know that
- cfun->stack_protect_guard is a local stack slot, so this skips
+ crtl->stack_protect_guard is a local stack slot, so this skips
all the fluff. */
- x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
+ x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
y = validize_mem (DECL_RTL (guard_decl));
/* Allow the target to copy from Y to X without leaking Y into a
/* Avoid expand_expr here, because we don't want guard_decl pulled
into registers unless absolutely necessary. And we know that
- cfun->stack_protect_guard is a local stack slot, so this skips
+ crtl->stack_protect_guard is a local stack slot, so this skips
all the fluff. */
- x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
+ x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
y = validize_mem (DECL_RTL (guard_decl));
/* Allow the target to compare Y with X without leaking either into
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
- if (cfun->stack_protect_guard)
+ if (crtl->stack_protect_guard)
stack_protect_epilogue ();
/* If we had calls to alloca, and this machine needs
has_hard_reg_initial_val (see integrate.[hc]). */
struct initial_value_struct *hard_reg_initial_vals;
+ /* A variable living at the top of the frame that holds a known value.
+ Used for detecting stack clobbers. */
+ tree stack_protect_guard;
+
/* List (chain of EXPR_LIST) of labels heading the current handlers for
nonlocal gotos. */
rtx x_nonlocal_goto_handler_labels;
rtx x_naked_return_label;
/* List (chain of EXPR_LISTs) of all stack slots in this function.
- Made for the sake of unshare_all_crtl-> */
+ Made for the sake of unshare_all_rtl. */
rtx x_stack_slot_list;
/* Place after which to insert the tail_recursion_label if we need one. */
/* Current nesting level for temporaries. */
int x_temp_slot_level;
+ /* The largest alignment of slot allocated on the stack. */
+ unsigned int stack_alignment_needed;
+
+ /* Preferred alignment of the end of stack frame. */
+ unsigned int preferred_stack_boundary;
+
+ /* For reorg. */
+
+ /* If some insns can be deferred to the delay slots of the epilogue, the
+ delay list for them is recorded here. */
+ rtx epilogue_delay_list;
};
#define return_label (crtl->x_return_label)
/* Function sequence number for profiling, debugging, etc. */
int funcdef_no;
+ /* List of function local variables, functions, types and constants. */
+ tree local_decls;
+
/* For md files. */
/* tm.h can use this to store whatever it likes. */
struct machine_function * GTY ((maybe_undef)) machine;
- /* The largest alignment of slot allocated on the stack. */
- unsigned int stack_alignment_needed;
-
- /* Preferred alignment of the end of stack frame. */
- unsigned int preferred_stack_boundary;
-
/* Language-specific code can use this to store whatever it likes. */
struct language_function * language;
/* Used types hash table. */
htab_t GTY ((param_is (union tree_node))) used_types_hash;
- /* For reorg. */
-
- /* If some insns can be deferred to the delay slots of the epilogue, the
- delay list for them is recorded here. */
- rtx epilogue_delay_list;
-
- /* Maximal number of entities in the single jumptable. Used to estimate
- final flowgraph size. */
- int max_jumptable_ents;
-
- /* UIDs for LABEL_DECLs. */
- int last_label_uid;
-
/* Line number of the end of the function. */
location_t function_end_locus;
- /* The variables unexpanded so far. */
- tree unexpanded_var_list;
-
- /* A variable living at the top of the frame that holds a known value.
- Used for detecting stack clobbers. */
- tree stack_protect_guard;
-
/* Properties used by the pass manager. */
unsigned int curr_properties;
unsigned int last_verified;
#define current_function_limit_stack (cfun->limit_stack)
#define current_function_uses_pic_offset_table (cfun->uses_pic_offset_table)
#define current_function_uses_const_pool (cfun->uses_const_pool)
-#define current_function_epilogue_delay_list (cfun->epilogue_delay_list)
#define current_function_has_nonlocal_label (cfun->has_nonlocal_label)
#define current_function_saves_all_registers (cfun->saves_all_registers)
#define current_function_has_nonlocal_goto (cfun->has_nonlocal_goto)
continue;
/* Record the variable. */
- cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
- cfun->unexpanded_var_list);
+ cfun->local_decls = tree_cons (NULL_TREE, var,
+ cfun->local_decls);
}
if (fn != current_function_decl)
/* Tear down a context for the gimplifier. If BODY is non-null, then
put the temporaries into the outer BIND_EXPR. Otherwise, put them
- in the unexpanded_var_list. */
+ in the local_decls. */
void
pop_gimplify_context (tree body)
if (DECL_STRUCT_FUNCTION (decl))
{
tree step;
- for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
+ for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
step;
step = TREE_CHAIN (step))
{
add_structure (type);
/* Check function local variables. */
- for (var_list = fn->unexpanded_var_list; var_list;
+ for (var_list = fn->local_decls; var_list;
var_list = TREE_CHAIN (var_list))
{
var = TREE_VALUE (var_list);
if (DECL_STRUCT_FUNCTION (decl))
{
tree step;
- for (step = DECL_STRUCT_FUNCTION (decl)->unexpanded_var_list;
+ for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
step;
step = TREE_CHAIN (step))
{
/* Declare local variables needed in CHILD_CFUN. */
block = DECL_INITIAL (child_fn);
- BLOCK_VARS (block) = list2chain (child_cfun->unexpanded_var_list);
+ BLOCK_VARS (block) = list2chain (child_cfun->local_decls);
DECL_SAVED_TREE (child_fn) = bb_stmt_list (single_succ (entry_bb));
/* Reset DECL_CONTEXT on function arguments. */
/* If we allocated another stack slot, redo elimination bookkeeping. */
if (starting_frame_size != get_frame_size ())
continue;
- if (starting_frame_size && cfun->stack_alignment_needed)
+ if (starting_frame_size && crtl->stack_alignment_needed)
{
/* If we have a stack frame, we must align it now. The
stack size may be a part of the offset computation for
stack frame when none is needed should
STARTING_FRAME_OFFSET not be already aligned to
STACK_BOUNDARY. */
- assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
+ assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
if (starting_frame_size != get_frame_size ())
continue;
}
epilogue has filled delay-slots; we would have to try and
move the delay-slot fillers to the delay-slots for the new
return insn or in front of the new return insn. */
- if (current_function_epilogue_delay_list == NULL
+ if (crtl->epilogue_delay_list == NULL
&& HAVE_return)
{
/* The return we make may have delay slots too. */
In both of these cases, inverting the jump and annulling the delay
slot give the same effect in fewer insns. */
if ((next_trial == next_active_insn (JUMP_LABEL (insn))
- && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
+ && ! (next_trial == 0 && crtl->epilogue_delay_list != 0))
|| (next_trial != 0
&& JUMP_P (next_trial)
&& JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
The only thing we can do is scan backwards from the end of the
function. If we did this in a previous pass, it is incorrect to do it
again. */
- if (current_function_epilogue_delay_list)
+ if (crtl->epilogue_delay_list)
return;
slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
/* Here as well we are searching backward, so put the
insns we find on the head of the list. */
- current_function_epilogue_delay_list
+ crtl->epilogue_delay_list
= gen_rtx_INSN_LIST (VOIDmode, trial,
- current_function_epilogue_delay_list);
+ crtl->epilogue_delay_list);
mark_end_of_function_resources (trial, 1);
update_block (trial, trial);
delete_related_insns (trial);
delay slot filler insns. It is also unknown whether such a
transformation would actually be profitable. Note that the existing
code only cares for branches with (some) filled delay slots. */
- if (current_function_epilogue_delay_list != NULL)
+ if (crtl->epilogue_delay_list != NULL)
return;
#endif
{
rtx link;
- for (link = current_function_epilogue_delay_list;
+ for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
INSN_LOCATOR (XEXP (link, 0)) = 0;
if (uid == -1)
{
unsigned old_len = VEC_length (basic_block, label_to_block_map);
- LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
+ LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
if (old_len <= (unsigned) uid)
{
unsigned new_len = 3 * uid / 2;
if (SSA_VAR_P (t))
{
new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
- f->unexpanded_var_list
- = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
+ f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
}
else
{
gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
- if (uid >= dest_cfun->last_label_uid)
- dest_cfun->last_label_uid = uid + 1;
+ if (uid >= dest_cfun->cfg->last_label_uid)
+ dest_cfun->cfg->last_label_uid = uid + 1;
}
else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
TREE_OPERAND (stmt, 0) =
m->base.from = decl;
m->to = create_artificial_label ();
LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
- if (LABEL_DECL_UID (m->to) >= cfun->last_label_uid)
- cfun->last_label_uid = LABEL_DECL_UID (m->to) + 1;
+ if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
+ cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
gcc_assert (*slot == NULL);
/* When GIMPLE is lowered, the variables are no longer available in
BIND_EXPRs, so display them separately. */
- if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
+ if (cfun && cfun->decl == fn && cfun->local_decls)
{
ignore_topmost_bind = true;
fprintf (file, "{\n");
- for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
+ for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
{
var = TREE_VALUE (vars);
{
tree new_var;
- /* We can not chain the local static declarations into the unexpanded_var_list
+ /* We can not chain the local static declarations into the local_decls
as we can't duplicate them or break one decl rule. Go ahead and link
- them into unexpanded_var_list. */
+ them into local_decls. */
if (!auto_var_in_fn_p (old_var, id->src_fn)
&& !DECL_EXTERNAL (old_var))
{
- cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
- cfun->unexpanded_var_list);
+ cfun->local_decls = tree_cons (NULL_TREE, old_var,
+ cfun->local_decls);
continue;
}
*new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
new_cfun->funcdef_no = get_next_funcdef_no ();
VALUE_HISTOGRAMS (new_cfun) = NULL;
- new_cfun->unexpanded_var_list = NULL;
+ new_cfun->local_decls = NULL;
new_cfun->cfg = NULL;
new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
}
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
- DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
+ DECL_STRUCT_FUNCTION (caller)->local_decls
= tree_cons (NULL_TREE, var,
- DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
+ DECL_STRUCT_FUNCTION (caller)->local_decls);
/* Do not have the rest of GCC warn about this variable as it should
not be visible to the user. */
goto egress;
}
- for (step = fun->unexpanded_var_list; step; step = TREE_CHAIN (step))
+ for (step = fun->local_decls; step; step = TREE_CHAIN (step))
{
tree decl = TREE_VALUE (step);
if (TREE_CODE (decl) == VAR_DECL
copy_body (id, bb->count, bb->frequency, bb, return_block);
/* Add local vars in this inlined callee to caller. */
- t_step = id->src_cfun->unexpanded_var_list;
+ t_step = id->src_cfun->local_decls;
for (; t_step; t_step = TREE_CHAIN (t_step))
{
var = TREE_VALUE (t_step);
if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
- cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
- cfun->unexpanded_var_list);
+ cfun->local_decls = tree_cons (NULL_TREE, var,
+ cfun->local_decls);
else
- cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
- cfun->unexpanded_var_list);
+ cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
+ cfun->local_decls);
}
/* Clean up. */
{
DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
- cfun->unexpanded_var_list =
- tree_cons (NULL_TREE, t,
- cfun->unexpanded_var_list);
+ cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
}
if (block)
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (id.dst_fn);
- if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
+ if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
/* Add local vars. */
- for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
+ for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
t_step; t_step = TREE_CHAIN (t_step))
{
tree var = TREE_VALUE (t_step);
if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
- cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
- cfun->unexpanded_var_list);
+ cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
else
- cfun->unexpanded_var_list =
+ cfun->local_decls =
tree_cons (NULL_TREE, remap_decl (var, &id),
- cfun->unexpanded_var_list);
+ cfun->local_decls);
}
/* Copy the Function's body. */
#define TODO_update_ssa_only_virtuals (1 << 14)
/* Some passes leave unused local variables that can be removed from
- cfun->unexpanded_var_list. This reduces the size of dump files and
- the memory footprint for VAR_DECLs. */
+ cfun->local_decls. This reduces the size of dump files
+ and the memory footprint for VAR_DECLs. */
#define TODO_remove_unused_locals (1 << 15)
/* Internally used for the first in a sequence of passes. It is set
}
}
- /* Remove unmarked local vars from unexpanded_var_list. */
- for (cell = &cfun->unexpanded_var_list; *cell; )
+ /* Remove unmarked local vars from local_decls. */
+ for (cell = &cfun->local_decls; *cell; )
{
tree var = TREE_VALUE (*cell);
cell = &TREE_CHAIN (*cell);
}
- /* Remove unmarked global vars from unexpanded_var_list. */
+ /* Remove unmarked global vars from local_decls. */
if (global_unused_vars != NULL)
{
- for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
+ for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
{
tree var = TREE_VALUE (t);
mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
}
- for (cell = &cfun->unexpanded_var_list; *cell; )
+ for (cell = &cfun->local_decls; *cell; )
{
tree var = TREE_VALUE (*cell);
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
mark_constants (insn);
- for (link = current_function_epilogue_delay_list;
+ for (link = crtl->epilogue_delay_list;
link;
link = XEXP (link, 1))
mark_constants (XEXP (link, 0));