/* Structure for saving state for a nested function.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
This file is part of GCC.
#include "tree.h"
#include "hashtab.h"
-#include "varray.h"
+#include "vecprim.h"
+#include "tm.h" /* For CUMULATIVE_ARGS. */
+#include "hard-reg-set.h"
/* Stack of pending (incomplete) sequences saved by `start_sequence'.
Each element describes one pending sequence.
};
-/* Indexed by pseudo register number, gives the rtx for that pseudo.
- Allocated in parallel with regno_pointer_align.
- FIXME: We could put it into emit_status struct, but gengtype is not able to deal
- with length attribute nested in top level structures. */
+/* Indexed by register number, gives an rtx for that register (and only
+ that register). For pseudo registers, it is the unique rtx for
+ that pseudo. For hard registers, it is an rtx of the mode specified
+ by reg_raw_mode.
+
+ FIXME: We could put it into emit_status struct, but gengtype is not
+ able to deal with length attribute nested in top level structures. */
extern GTY ((length ("crtl->emit.x_reg_rtx_no"))) rtx * regno_reg_rtx;
/* RTL representation of exception handling. */
struct GTY(()) rtl_eh {
- rtx filter;
- rtx exc_ptr;
-
- int built_landing_pads;
-
rtx ehr_stackadj;
rtx ehr_handler;
rtx ehr_label;
rtx sjlj_fc;
rtx sjlj_exit_after;
- VEC(tree,gc) *ttype_data;
- varray_type ehspec_data;
- varray_type action_record_data;
+ VEC(uchar,gc) *action_record_data;
VEC(call_site_record,gc) *call_site_record[2];
};
struct temp_slot;
typedef struct temp_slot *temp_slot_p;
struct call_site_record_d;
+struct dw_fde_struct;
DEF_VEC_P(temp_slot_p);
DEF_VEC_ALLOC_P(temp_slot_p,gc);
DEF_VEC_P(ipa_opt_pass);
DEF_VEC_ALLOC_P(ipa_opt_pass,heap);
-enum function_frequency {
- /* This function most likely won't be executed at all.
- (set only when profile feedback is available or via function attribute). */
- FUNCTION_FREQUENCY_UNLIKELY_EXECUTED,
- /* The default value. */
- FUNCTION_FREQUENCY_NORMAL,
- /* Optimize this function hard
- (set only when profile feedback is available or via function attribute). */
- FUNCTION_FREQUENCY_HOT
-};
-
struct GTY(()) varasm_status {
/* If we're using a per-function constant pool, this is it. */
struct rtx_constant_pool *pool;
const char *cold_section_label;
const char *hot_section_end_label;
const char *cold_section_end_label;
+};
- /* String to be used for name of cold text sections, via
- targetm.asm_out.named_section. */
+/* Describe an empty area of space in the stack frame. These can be chained
+ into a list; this is used to keep track of space wasted for alignment
+ reasons. */
+struct GTY(()) frame_space
+{
+ struct frame_space *next;
- const char *unlikely_text_section_name;
+ HOST_WIDE_INT start;
+ HOST_WIDE_INT length;
};
/* Datastructures maintained for currently processed function in RTL form. */
Made for the sake of unshare_all_rtl. */
rtx x_stack_slot_list;
+ /* List of empty areas in the stack frame. */
+ struct frame_space *frame_space_list;
+
/* Place after which to insert the tail_recursion_label if we need one. */
rtx x_stack_check_probe_note;
/* The stack alignment estimated before reload, with consideration of
following factors:
1. Alignment of local stack variables (max_used_stack_slot_alignment)
- 2. Alignment requirement to call other functions
+ 2. Alignment requirement to call other functions
(preferred_stack_boundary)
3. Alignment of non-local stack variables but might be spilled in
local stack. */
/* Nonzero if function being compiled has nonlocal gotos to parent
function. */
bool has_nonlocal_goto;
-
+
/* Nonzero if function being compiled has an asm statement. */
bool has_asm_statement;
TREE_NOTHROW (current_function_decl) it is set even for overwritable
function where currently compiled version of it is nothrow. */
bool nothrow;
+
+ /* True if we performed shrink-wrapping for the current function. */
+ bool shrink_wrapped;
+
+ /* Like regs_ever_live, but 1 if a reg is set or clobbered from an
+ asm. Unlike regs_ever_live, elements of this array corresponding
+ to eliminable regs (like the frame pointer) are set if an asm
+ sets them. */
+ HARD_REG_SET asm_clobbers;
};
#define return_label (crtl->x_return_label)
want to do differently. */
#define crtl (&x_rtl)
+struct GTY(()) stack_usage
+{
+ /* # of bytes of static stack space allocated by the function. */
+ HOST_WIDE_INT static_stack_size;
+
+ /* # of bytes of dynamic stack space allocated by the function. This is
+ meaningful only if has_unbounded_dynamic_stack_size is zero. */
+ HOST_WIDE_INT dynamic_stack_size;
+
+ /* # of bytes of space pushed onto the stack after the prologue. If
+ !ACCUMULATE_OUTGOING_ARGS, it contains the outgoing arguments. */
+ int pushed_stack_size;
+
+ /* Nonzero if the amount of stack space allocated dynamically cannot
+ be bounded at compile-time. */
+ unsigned int has_unbounded_dynamic_stack_size : 1;
+};
+
+#define current_function_static_stack_size (cfun->su->static_stack_size)
+#define current_function_dynamic_stack_size (cfun->su->dynamic_stack_size)
+#define current_function_pushed_stack_size (cfun->su->pushed_stack_size)
+#define current_function_has_unbounded_dynamic_stack_size \
+ (cfun->su->has_unbounded_dynamic_stack_size)
+#define current_function_allocates_dynamic_stack_space \
+ (current_function_dynamic_stack_size != 0 \
+ || current_function_has_unbounded_dynamic_stack_size)
+
/* This structure can save all the important global and static variables
describing the status of the current function. */
/* The loops in this function. */
struct loops *x_current_loops;
+ /* The stack usage of this function. */
+ struct stack_usage *su;
+
/* Value histograms attached to particular statements. */
htab_t GTY((skip)) value_histograms;
tree static_chain_decl;
/* An expression that contains the non-local goto save area. The first
- word is the saved frame pointer and the second is the saved stack
+ word is the saved frame pointer and the second is the saved stack
pointer. */
tree nonlocal_goto_save_area;
- /* List of function local variables, functions, types and constants. */
- tree local_decls;
+ /* Vector of function local variables, functions, types and constants. */
+ VEC(tree,gc) *local_decls;
/* For md files. */
/* Used types hash table. */
htab_t GTY ((param_is (union tree_node))) used_types_hash;
+ /* Dwarf2 Frame Description Entry, containing the Call Frame Instructions
+ used for unwinding. Only set when either dwarf2 unwinding or dwarf2
+ debugging is enabled. */
+ struct dw_fde_struct *fde;
+
/* Last statement uid. */
int last_stmt_uid;
unsigned int curr_properties;
unsigned int last_verified;
- /* Interprocedural passes scheduled to have their transform functions
- applied next time we execute local pass on them. We maintain it
- per-function in order to allow IPA passes to introduce new functions. */
- VEC(ipa_opt_pass,heap) * GTY((skip)) ipa_transforms_to_apply;
-
/* Non-null if the function does something that would prevent it from
being copied; this applies to both versioning and inlining. Set to
a string describing the reason for failure. */
function. */
unsigned int va_list_fpr_size : 8;
- /* How commonly executed the function is. Initialized during branch
- probabilities pass. */
- ENUM_BITFIELD (function_frequency) function_frequency : 2;
-
/* Nonzero if function being compiled can call setjmp. */
unsigned int calls_setjmp : 1;
from nested functions. */
unsigned int has_nonlocal_label : 1;
- /* Nonzero if we've set cannot_be_copied_reason. I.e. if
+ /* Nonzero if we've set cannot_be_copied_reason. I.e. if
(cannot_be_copied_set && !cannot_be_copied_reason), the function
can in fact be copied. */
unsigned int cannot_be_copied_set : 1;
/* Nonzero if current function uses stdarg.h or equivalent. */
unsigned int stdarg : 1;
- /* Nonzero if the back-end should not keep track of expressions that
- determine the size of variable-sized objects. Normally, such
- expressions are saved away, and then expanded when the next
- function is started. For example, if a parameter has a
- variable-sized type, then the size of the parameter is computed
- when the function body is entered. However, some front-ends do
- not desire this behavior. */
- unsigned int dont_save_pending_sizes_p : 1;
-
unsigned int after_inlining : 1;
unsigned int always_inline_functions_inlined : 1;
+ /* Nonzero if function being compiled can throw synchronous non-call
+ exceptions. */
+ unsigned int can_throw_non_call_exceptions : 1;
+
/* Fields below this point are not set for abstract functions; see
allocate_struct_function. */
unsigned int is_thunk : 1;
};
+/* Add the decl D to the local_decls list of FUN. */
+
+static inline void
+add_local_decl (struct function *fun, tree d)
+{
+ VEC_safe_push (tree, gc, fun->local_decls, d);
+}
+
+#define FOR_EACH_LOCAL_DECL(FUN, I, D) \
+ FOR_EACH_VEC_ELT_REVERSE (tree, (FUN)->local_decls, I, D)
+
/* If va_list_[gf]pr_size is set to this, it means we don't know how
many units need to be saved. */
#define VA_LIST_MAX_GPR_SIZE 255
/* Nonzero if at least one trampoline has been created. */
extern int trampolines_created;
+struct GTY(()) types_used_by_vars_entry {
+ tree type;
+ tree var_decl;
+};
+
+/* Hash table making the relationship between a global variable
+ and the types it references in its initializer. The key of the
+ entry is a referenced type, and the value is the DECL of the global
+ variable. types_use_by_vars_do_hash and types_used_by_vars_eq below are
+ the hash and equality functions to use for this hash table. */
+extern GTY((param_is (struct types_used_by_vars_entry))) htab_t
+ types_used_by_vars_hash;
+
+hashval_t types_used_by_vars_do_hash (const void*);
+int types_used_by_vars_eq (const void *, const void *);
+void types_used_by_var_decl_insert (tree type, tree var_decl);
+
+/* During parsing of a global variable, this vector contains the types
+ referenced by the global variable. */
+extern GTY(()) VEC(tree,gc) *types_used_by_cur_var_decl;
+
+
/* cfun shouldn't be set directly; use one of these functions instead. */
extern void set_cfun (struct function *new_cfun);
extern void push_cfun (struct function *new_cfun);
extern void clear_block_marks (tree);
extern tree blocks_nreverse (tree);
+extern tree block_chainon (tree, tree);
/* Return size needed for stack frame based on slots so far allocated.
This size counts from zero. It is not rounded to STACK_BOUNDARY;
extern void used_types_insert (tree);
extern int get_next_funcdef_no (void);
+extern int get_last_funcdef_no (void);
+
+#ifdef HAVE_simple_return
+extern bool requires_stack_frame_p (rtx, HARD_REG_SET, HARD_REG_SET);
+#endif
+
+/* In predict.c */
+extern bool optimize_function_for_size_p (struct function *);
+extern bool optimize_function_for_speed_p (struct function *);
+
#endif /* GCC_FUNCTION_H */