* tree-ssa-ccp.c (prop_value_d): Rename to ...
(ccp_prop_value_t): ... this one to avoid ODR violation; update uses.
* ipa-prop.c (struct type_change_info): Rename to ...
(prop_type_change_info): ... this; update uses.
* ggc-page.c (globals): Rename to ...
(static struct ggc_globals): ... this; update uses.
* tree-ssa-loop-im.c (mem_ref): Rename to ...
(im_mem_ref): ... this; update uses.
* ggc-common.c (loc_descriptor): Rename to ...
(ggc_loc_descriptor): ... this; update uses.
* lra-eliminations.c (elim_table): Rename to ...
(lra_elim_table): ... this; update uses.
* bitmap.c (output_info): Rename to ...
(bitmap_output_info): ... this; update uses.
* gcse.c (expr): Rename to ...
(gcse_expr) ... this; update uses.
(occr): Rename to ...
(gcse_occr): .. this; update uses.
* tree-ssa-copy.c (prop_value_d): Rename to ...
(prop_value_t): ... this.
* predict.c (block_info_def): Rename to ...
(block_info): ... this; update uses.
(edge_info_def): Rename to ...
(edge_info): ... this; update uses.
* profile.c (bb_info): Rename to ...
(bb_profile_info): ... this; update uses.
* alloc-pool.c (output_info): Rename to ...
(pool_output_info): ... this; update uses.
* ipa-cp.c (topo_info): Rename to ..
(ipa_topo_info): ... this; update uses.
* tree-nrv.c (nrv_data): Rename to ...
(nrv_data_t): ... this; update uses.
* ipa-split.c (bb_info): Rename to ...
(split_bb_info): ... this one.
* profile.h (edge_info): Rename to ...
(edge_profile_info): ... this one; update uses.
* dse.c (bb_info): Rename to ...
(dse_bb_info): ... this one; update uses.
* cprop.c (occr): Rename to ...
(cprop_occr): ... this one; update uses.
(expr): Rename to ...
(cprop_expr): ... this one; update uses.
From-SVN: r215480
+2014-09-22 Jan Hubicka <hubicka@ucw.cz>
+
+ * tree-ssa-ccp.c (prop_value_d): Rename to ...
+ (ccp_prop_value_t): ... this one to avoid ODR violation; update uses.
+ * ipa-prop.c (struct type_change_info): Rename to ...
+ (prop_type_change_info): ... this; update uses.
+ * ggc-page.c (globals): Rename to ...
+ (static struct ggc_globals): ... this; update uses.
+ * tree-ssa-loop-im.c (mem_ref): Rename to ...
+ (im_mem_ref): ... this; update uses.
+ * ggc-common.c (loc_descriptor): Rename to ...
+ (ggc_loc_descriptor): ... this; update uses.
+ * lra-eliminations.c (elim_table): Rename to ...
+ (lra_elim_table): ... this; update uses.
+ * bitmap.c (output_info): Rename to ...
+ (bitmap_output_info): ... this; update uses.
+ * gcse.c (expr): Rename to ...
+ (gcse_expr) ... this; update uses.
+ (occr): Rename to ...
+ (gcse_occr): .. this; update uses.
+ * tree-ssa-copy.c (prop_value_d): Rename to ...
+ (prop_value_t): ... this.
+ * predict.c (block_info_def): Rename to ...
+ (block_info): ... this; update uses.
+ (edge_info_def): Rename to ...
+ (edge_info): ... this; update uses.
+ * profile.c (bb_info): Rename to ...
+ (bb_profile_info): ... this; update uses.
+ * alloc-pool.c (output_info): Rename to ...
+ (pool_output_info): ... this; update uses.
+ * ipa-cp.c (topo_info): Rename to ..
+ (ipa_topo_info): ... this; update uses.
+ * tree-nrv.c (nrv_data): Rename to ...
+ (nrv_data_t): ... this; update uses.
+ * ipa-split.c (bb_info): Rename to ...
+ (split_bb_info): ... this one.
+ * profile.h (edge_info): Rename to ...
+ (edge_profile_info): ... this one; update uses.
+ * dse.c (bb_info): Rename to ...
+ (dse_bb_info): ... this one; update uses.
+ * cprop.c (occr): Rename to ...
+ (cprop_occr): ... this one; update uses.
+ (expr): Rename to ...
+ (cprop_expr): ... this one; update uses.
+
2014-09-22 Jason Merrill <jason@redhat.com>
* Makefile.in (check-parallel-%): Add @.
/* Output per-alloc_pool statistics. */
/* Used to accumulate statistics about alloc_pool sizes. */
-struct output_info
+struct pool_output_info
{
unsigned long total_created;
unsigned long total_allocated;
bool
print_alloc_pool_statistics (const char *const &name,
const alloc_pool_descriptor &d,
- struct output_info *i)
+ struct pool_output_info *i)
{
if (d.allocated)
{
void
dump_alloc_pool_statistics (void)
{
- struct output_info info;
+ struct pool_output_info info;
if (! GATHER_STATISTICS)
return;
fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
info.total_created = 0;
info.total_allocated = 0;
- alloc_pool_hash->traverse <struct output_info *,
+ alloc_pool_hash->traverse <struct pool_output_info *,
print_alloc_pool_statistics> (&info);
fprintf (stderr, "--------------------------------------------------------------------------------------------------------------\n");
fprintf (stderr, "%-22s %7lu %10lu\n",
/* Used to accumulate statistics about bitmap sizes. */
-struct output_info
+struct bitmap_output_info
{
uint64_t size;
uint64_t count;
/* Called via hash_table::traverse. Output bitmap descriptor pointed out by
SLOT and update statistics. */
int
-print_statistics (bitmap_descriptor_d **slot, output_info *i)
+print_statistics (bitmap_descriptor_d **slot, bitmap_output_info *i)
{
bitmap_descriptor d = *slot;
char s[4096];
void
dump_bitmap_statistics (void)
{
- struct output_info info;
+ struct bitmap_output_info info;
if (! GATHER_STATISTICS)
return;
fprintf (stderr, "---------------------------------------------------------------------------------\n");
info.count = 0;
info.size = 0;
- bitmap_desc_hash->traverse <output_info *, print_statistics> (&info);
+ bitmap_desc_hash->traverse <bitmap_output_info *, print_statistics> (&info);
fprintf (stderr, "---------------------------------------------------------------------------------\n");
fprintf (stderr,
"%-41s %9"PRId64" %15"PRId64"\n",
There is one per basic block. If a pattern appears more than once the
last appearance is used. */
-struct occr
+struct cprop_occr
{
/* Next occurrence of this expression. */
- struct occr *next;
+ struct cprop_occr *next;
/* The insn that computes the expression. */
rtx_insn *insn;
};
-typedef struct occr *occr_t;
+typedef struct cprop_occr *occr_t;
/* Hash table entry for assignment expressions. */
-struct expr
+struct cprop_expr
{
/* The expression (DEST := SRC). */
rtx dest;
/* Index in the available expression bitmaps. */
int bitmap_index;
/* Next entry with the same hash. */
- struct expr *next_same_hash;
+ struct cprop_expr *next_same_hash;
/* List of available occurrence in basic blocks in the function.
An "available occurrence" is one that is the last occurrence in the
basic block and whose operands are not modified by following statements
in the basic block [including this insn]. */
- struct occr *avail_occr;
+ struct cprop_occr *avail_occr;
};
/* Hash table for copy propagation expressions.
{
/* The table itself.
This is an array of `set_hash_table_size' elements. */
- struct expr **table;
+ struct cprop_expr **table;
/* Size of the hash table, in elements. */
unsigned int size;
{
bool found = false;
unsigned int hash;
- struct expr *cur_expr, *last_expr = NULL;
- struct occr *cur_occr;
+ struct cprop_expr *cur_expr, *last_expr = NULL;
+ struct cprop_occr *cur_occr;
hash = hash_mod (REGNO (dest), table->size);
if (! found)
{
- cur_expr = GOBNEW (struct expr);
- bytes_used += sizeof (struct expr);
+ cur_expr = GOBNEW (struct cprop_expr);
+ bytes_used += sizeof (struct cprop_expr);
if (table->table[hash] == NULL)
/* This is the first pattern that hashed to this index. */
table->table[hash] = cur_expr;
else
{
/* First occurrence of this expression in this basic block. */
- cur_occr = GOBNEW (struct occr);
- bytes_used += sizeof (struct occr);
+ cur_occr = GOBNEW (struct cprop_occr);
+ bytes_used += sizeof (struct cprop_occr);
cur_occr->insn = insn;
cur_occr->next = cur_expr->avail_occr;
cur_expr->avail_occr = cur_occr;
{
int i;
/* Flattened out table, so it's printed in proper order. */
- struct expr **flat_table;
+ struct cprop_expr **flat_table;
unsigned int *hash_val;
- struct expr *expr;
+ struct cprop_expr *expr;
- flat_table = XCNEWVEC (struct expr *, table->n_elems);
+ flat_table = XCNEWVEC (struct cprop_expr *, table->n_elems);
hash_val = XNEWVEC (unsigned int, table->n_elems);
for (i = 0; i < (int) table->size; i++)
Making it an odd number is simplest for now.
??? Later take some measurements. */
table->size |= 1;
- n = table->size * sizeof (struct expr *);
- table->table = XNEWVAR (struct expr *, n);
+ n = table->size * sizeof (struct cprop_expr *);
+ table->table = XNEWVAR (struct cprop_expr *, n);
}
/* Free things allocated by alloc_hash_table. */
{
/* Initialize count of number of entries in hash table. */
table->n_elems = 0;
- memset (table->table, 0, table->size * sizeof (struct expr *));
+ memset (table->table, 0, table->size * sizeof (struct cprop_expr *));
compute_hash_table_work (table);
}
/* Lookup REGNO in the set TABLE. The result is a pointer to the
table entry, or NULL if not found. */
-static struct expr *
+static struct cprop_expr *
lookup_set (unsigned int regno, struct hash_table_d *table)
{
unsigned int hash = hash_mod (regno, table->size);
- struct expr *expr;
+ struct cprop_expr *expr;
expr = table->table[hash];
/* Return the next entry for REGNO in list EXPR. */
-static struct expr *
-next_set (unsigned int regno, struct expr *expr)
+static struct cprop_expr *
+next_set (unsigned int regno, struct cprop_expr *expr)
{
do
expr = expr->next_same_hash;
for (i = 0; i < table->size; i++)
{
- struct expr *expr;
+ struct cprop_expr *expr;
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
{
int indx = expr->bitmap_index;
df_ref def;
- struct occr *occr;
+ struct cprop_occr *occr;
/* For each definition of the destination pseudo-reg, the expression
is killed in the block where the definition is. */
/* Find a set of REGNOs that are available on entry to INSN's block. Return
NULL no such set is found. */
-static struct expr *
+static struct cprop_expr *
find_avail_set (int regno, rtx_insn *insn)
{
/* SET1 contains the last set found that can be returned to the caller for
use in a substitution. */
- struct expr *set1 = 0;
+ struct cprop_expr *set1 = 0;
/* Loops are not possible here. To get a loop we would need two sets
available at the start of the block containing INSN. i.e. we would
while (1)
{
rtx src;
- struct expr *set = lookup_set (regno, &set_hash_table);
+ struct cprop_expr *set = lookup_set (regno, &set_hash_table);
/* Find a set that is available at the start of the block
which contains INSN. */
rtx reg_used = reg_use_table[i];
unsigned int regno = REGNO (reg_used);
rtx src;
- struct expr *set;
+ struct cprop_expr *set;
/* If the register has already been set in this block, there's
nothing we can do. */
block BB. Return NULL if no such set is found. Based heavily upon
find_avail_set. */
-static struct expr *
+static struct cprop_expr *
find_bypass_set (int regno, int bb)
{
- struct expr *result = 0;
+ struct cprop_expr *result = 0;
for (;;)
{
rtx src;
- struct expr *set = lookup_set (regno, &set_hash_table);
+ struct cprop_expr *set = lookup_set (regno, &set_hash_table);
while (set)
{
rtx reg_used = reg_use_table[i];
unsigned int regno = REGNO (reg_used);
basic_block dest, old_dest;
- struct expr *set;
+ struct cprop_expr *set;
rtx src, new_rtx;
set = find_bypass_set (regno, e->src->index);
static insn_info_t active_local_stores;
static int active_local_stores_len;
-struct bb_info
+struct dse_bb_info
{
/* Pointer to the insn info for the last insn in the block. These
bitmap regs_live;
};
-typedef struct bb_info *bb_info_t;
+typedef struct dse_bb_info *bb_info_t;
static alloc_pool bb_info_pool;
/* Table to hold all bb_infos. */
sizeof (struct insn_info), 100);
bb_info_pool
= create_alloc_pool ("bb_info_pool",
- sizeof (struct bb_info), 100);
+ sizeof (struct dse_bb_info), 100);
rtx_group_info_pool
= create_alloc_pool ("rtx_group_info_pool",
sizeof (struct group_info), 100);
insn_info_t ptr;
bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
- memset (bb_info, 0, sizeof (struct bb_info));
+ memset (bb_info, 0, sizeof (struct dse_bb_info));
bitmap_set_bit (all_blocks, bb->index);
bb_info->regs_live = regs_live;
/* Hash table of expressions. */
-struct expr
+struct gcse_expr
{
/* The expression. */
rtx expr;
/* Index in the available expression bitmaps. */
int bitmap_index;
/* Next entry with the same hash. */
- struct expr *next_same_hash;
+ struct gcse_expr *next_same_hash;
/* List of anticipatable occurrences in basic blocks in the function.
An "anticipatable occurrence" is one that is the first occurrence in the
basic block, the operands are not modified in the basic block prior
to the occurrence and the output is not used between the start of
the block and the occurrence. */
- struct occr *antic_occr;
+ struct gcse_occr *antic_occr;
/* List of available occurrence in basic blocks in the function.
An "available occurrence" is one that is the last occurrence in the
basic block and the operands are not modified by following statements in
the basic block [including this insn]. */
- struct occr *avail_occr;
+ struct gcse_occr *avail_occr;
/* Non-null if the computation is PRE redundant.
The value is the newly created pseudo-reg to record a copy of the
expression in all the places that reach the redundant copy. */
There is one per basic block. If a pattern appears more than once the
last appearance is used [or first for anticipatable expressions]. */
-struct occr
+struct gcse_occr
{
/* Next occurrence of this expression. */
- struct occr *next;
+ struct gcse_occr *next;
/* The insn that computes the expression. */
rtx_insn *insn;
/* Nonzero if this [anticipatable] occurrence has been deleted. */
char copied_p;
};
-typedef struct occr *occr_t;
+typedef struct gcse_occr *occr_t;
/* Expression hash tables.
Each hash table is an array of buckets.
[one could build a mapping table without holes afterwards though].
Someday I'll perform the computation and figure it out. */
-struct hash_table_d
+struct gcse_hash_table_d
{
/* The table itself.
This is an array of `expr_hash_table_size' elements. */
- struct expr **table;
+ struct gcse_expr **table;
/* Size of the hash table, in elements. */
unsigned int size;
};
/* Expression hash table. */
-static struct hash_table_d expr_hash_table;
+static struct gcse_hash_table_d expr_hash_table;
/* This is a list of expressions which are MEMs and will be used by load
or store motion.
struct ls_expr
{
- struct expr * expr; /* Gcse expression reference for LM. */
+ struct gcse_expr * expr; /* Gcse expression reference for LM. */
rtx pattern; /* Pattern of this mem. */
rtx pattern_regs; /* List of registers mentioned by the mem. */
rtx_insn_list *loads; /* INSN list of loads seen. */
static void *gcse_alloc (unsigned long);
static void alloc_gcse_mem (void);
static void free_gcse_mem (void);
-static void hash_scan_insn (rtx_insn *, struct hash_table_d *);
-static void hash_scan_set (rtx, rtx_insn *, struct hash_table_d *);
-static void hash_scan_clobber (rtx, rtx_insn *, struct hash_table_d *);
-static void hash_scan_call (rtx, rtx_insn *, struct hash_table_d *);
+static void hash_scan_insn (rtx_insn *, struct gcse_hash_table_d *);
+static void hash_scan_set (rtx, rtx_insn *, struct gcse_hash_table_d *);
+static void hash_scan_clobber (rtx, rtx_insn *, struct gcse_hash_table_d *);
+static void hash_scan_call (rtx, rtx_insn *, struct gcse_hash_table_d *);
static int want_to_gcse_p (rtx, int *);
static int oprs_unchanged_p (const_rtx, const rtx_insn *, int);
static int oprs_anticipatable_p (const_rtx, const rtx_insn *);
static int oprs_available_p (const_rtx, const rtx_insn *);
static void insert_expr_in_table (rtx, enum machine_mode, rtx_insn *, int, int,
- int, struct hash_table_d *);
+ int, struct gcse_hash_table_d *);
static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
static void record_last_reg_set_info (rtx, int);
static void record_last_mem_set_info (rtx_insn *);
static void record_last_set_info (rtx, const_rtx, void *);
-static void compute_hash_table (struct hash_table_d *);
-static void alloc_hash_table (struct hash_table_d *);
-static void free_hash_table (struct hash_table_d *);
-static void compute_hash_table_work (struct hash_table_d *);
-static void dump_hash_table (FILE *, const char *, struct hash_table_d *);
+static void compute_hash_table (struct gcse_hash_table_d *);
+static void alloc_hash_table (struct gcse_hash_table_d *);
+static void free_hash_table (struct gcse_hash_table_d *);
+static void compute_hash_table_work (struct gcse_hash_table_d *);
+static void dump_hash_table (FILE *, const char *, struct gcse_hash_table_d *);
static void compute_transp (const_rtx, int, sbitmap *);
static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
- struct hash_table_d *);
+ struct gcse_hash_table_d *);
static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
static void canon_list_insert (rtx, const_rtx, void *);
static void alloc_pre_mem (int, int);
static void free_pre_mem (void);
static struct edge_list *compute_pre_data (void);
-static int pre_expr_reaches_here_p (basic_block, struct expr *,
+static int pre_expr_reaches_here_p (basic_block, struct gcse_expr *,
basic_block);
-static void insert_insn_end_basic_block (struct expr *, basic_block);
-static void pre_insert_copy_insn (struct expr *, rtx_insn *);
+static void insert_insn_end_basic_block (struct gcse_expr *, basic_block);
+static void pre_insert_copy_insn (struct gcse_expr *, rtx_insn *);
static void pre_insert_copies (void);
static int pre_delete (void);
static int pre_gcse (struct edge_list *);
static void free_code_hoist_mem (void);
static void compute_code_hoist_vbeinout (void);
static void compute_code_hoist_data (void);
-static int should_hoist_expr_to_dom (basic_block, struct expr *, basic_block,
+static int should_hoist_expr_to_dom (basic_block, struct gcse_expr *, basic_block,
sbitmap, int, int *, enum reg_class,
int *, bitmap, rtx_insn *);
static int hoist_code (void);
static enum reg_class get_regno_pressure_class (int regno, int *nregs);
static enum reg_class get_pressure_class_and_nregs (rtx_insn *insn, int *nregs);
static int one_code_hoisting_pass (void);
-static rtx_insn *process_insert_insn (struct expr *);
-static int pre_edge_insert (struct edge_list *, struct expr **);
-static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
+static rtx_insn *process_insert_insn (struct gcse_expr *);
+static int pre_edge_insert (struct edge_list *, struct gcse_expr **);
+static int pre_expr_reaches_here_p_work (basic_block, struct gcse_expr *,
basic_block, char *);
static struct ls_expr * ldst_entry (rtx);
static void free_ldst_entry (struct ls_expr *);
static void invalidate_any_buried_refs (rtx);
static void compute_ld_motion_mems (void);
static void trim_ld_motion_mems (void);
-static void update_ld_motion_stores (struct expr *);
+static void update_ld_motion_stores (struct gcse_expr *);
static void clear_modify_mem_tables (void);
static void free_modify_mem_tables (void);
static rtx gcse_emit_move_after (rtx, rtx, rtx_insn *);
static void
compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
- struct hash_table_d *table)
+ struct gcse_hash_table_d *table)
{
unsigned int i;
for (i = 0; i < table->size; i++)
{
- struct expr *expr;
+ struct gcse_expr *expr;
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
{
int indx = expr->bitmap_index;
- struct occr *occr;
+ struct gcse_occr *occr;
/* The expression is transparent in this block if it is not killed.
We start by assuming all are transparent [none are killed], and
static void
insert_expr_in_table (rtx x, enum machine_mode mode, rtx_insn *insn,
int antic_p,
- int avail_p, int max_distance, struct hash_table_d *table)
+ int avail_p, int max_distance, struct gcse_hash_table_d *table)
{
int found, do_not_record_p;
unsigned int hash;
- struct expr *cur_expr, *last_expr = NULL;
- struct occr *antic_occr, *avail_occr;
+ struct gcse_expr *cur_expr, *last_expr = NULL;
+ struct gcse_occr *antic_occr, *avail_occr;
hash = hash_expr (x, mode, &do_not_record_p, table->size);
if (! found)
{
- cur_expr = GOBNEW (struct expr);
- bytes_used += sizeof (struct expr);
+ cur_expr = GOBNEW (struct gcse_expr);
+ bytes_used += sizeof (struct gcse_expr);
if (table->table[hash] == NULL)
/* This is the first pattern that hashed to this index. */
table->table[hash] = cur_expr;
else
{
/* First occurrence of this expression in this basic block. */
- antic_occr = GOBNEW (struct occr);
- bytes_used += sizeof (struct occr);
+ antic_occr = GOBNEW (struct gcse_occr);
+ bytes_used += sizeof (struct gcse_occr);
antic_occr->insn = insn;
antic_occr->next = cur_expr->antic_occr;
antic_occr->deleted_p = 0;
else
{
/* First occurrence of this expression in this basic block. */
- avail_occr = GOBNEW (struct occr);
- bytes_used += sizeof (struct occr);
+ avail_occr = GOBNEW (struct gcse_occr);
+ bytes_used += sizeof (struct gcse_occr);
avail_occr->insn = insn;
avail_occr->next = cur_expr->avail_occr;
avail_occr->deleted_p = 0;
/* Scan SET present in INSN and add an entry to the hash TABLE. */
static void
-hash_scan_set (rtx set, rtx_insn *insn, struct hash_table_d *table)
+hash_scan_set (rtx set, rtx_insn *insn, struct gcse_hash_table_d *table)
{
rtx src = SET_SRC (set);
rtx dest = SET_DEST (set);
static void
hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
- struct hash_table_d *table ATTRIBUTE_UNUSED)
+ struct gcse_hash_table_d *table ATTRIBUTE_UNUSED)
{
/* Currently nothing to do. */
}
static void
hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx_insn *insn ATTRIBUTE_UNUSED,
- struct hash_table_d *table ATTRIBUTE_UNUSED)
+ struct gcse_hash_table_d *table ATTRIBUTE_UNUSED)
{
/* Currently nothing to do. */
}
/* Process INSN and add hash table entries as appropriate. */
static void
-hash_scan_insn (rtx_insn *insn, struct hash_table_d *table)
+hash_scan_insn (rtx_insn *insn, struct gcse_hash_table_d *table)
{
rtx pat = PATTERN (insn);
int i;
/* Dump the hash table TABLE to file FILE under the name NAME. */
static void
-dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
+dump_hash_table (FILE *file, const char *name, struct gcse_hash_table_d *table)
{
int i;
/* Flattened out table, so it's printed in proper order. */
- struct expr **flat_table;
+ struct gcse_expr **flat_table;
unsigned int *hash_val;
- struct expr *expr;
+ struct gcse_expr *expr;
- flat_table = XCNEWVEC (struct expr *, table->n_elems);
+ flat_table = XCNEWVEC (struct gcse_expr *, table->n_elems);
hash_val = XNEWVEC (unsigned int, table->n_elems);
for (i = 0; i < (int) table->size; i++)
TABLE is the table computed. */
static void
-compute_hash_table_work (struct hash_table_d *table)
+compute_hash_table_work (struct gcse_hash_table_d *table)
{
int i;
It is used to determine the number of buckets to use. */
static void
-alloc_hash_table (struct hash_table_d *table)
+alloc_hash_table (struct gcse_hash_table_d *table)
{
int n;
Making it an odd number is simplest for now.
??? Later take some measurements. */
table->size |= 1;
- n = table->size * sizeof (struct expr *);
- table->table = GNEWVAR (struct expr *, n);
+ n = table->size * sizeof (struct gcse_expr *);
+ table->table = GNEWVAR (struct gcse_expr *, n);
}
/* Free things allocated by alloc_hash_table. */
static void
-free_hash_table (struct hash_table_d *table)
+free_hash_table (struct gcse_hash_table_d *table)
{
free (table->table);
}
/* Compute the expression hash table TABLE. */
static void
-compute_hash_table (struct hash_table_d *table)
+compute_hash_table (struct gcse_hash_table_d *table)
{
/* Initialize count of number of entries in hash table. */
table->n_elems = 0;
- memset (table->table, 0, table->size * sizeof (struct expr *));
+ memset (table->table, 0, table->size * sizeof (struct gcse_expr *));
compute_hash_table_work (table);
}
prune_expressions (bool pre_p)
{
sbitmap prune_exprs;
- struct expr *expr;
+ struct gcse_expr *expr;
unsigned int ui;
basic_block bb;
the closest such expression. */
static int
-pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
+pre_expr_reaches_here_p_work (basic_block occr_bb, struct gcse_expr *expr,
basic_block bb, char *visited)
{
edge pred;
memory allocated for that function is returned. */
static int
-pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
+pre_expr_reaches_here_p (basic_block occr_bb, struct gcse_expr *expr, basic_block bb)
{
int rval;
char *visited = XCNEWVEC (char, last_basic_block_for_fn (cfun));
/* Generate RTL to copy an EXPR to its `reaching_reg' and return it. */
static rtx_insn *
-process_insert_insn (struct expr *expr)
+process_insert_insn (struct gcse_expr *expr)
{
rtx reg = expr->reaching_reg;
/* Copy the expression to make sure we don't have any sharing issues. */
This is used by both the PRE and code hoisting. */
static void
-insert_insn_end_basic_block (struct expr *expr, basic_block bb)
+insert_insn_end_basic_block (struct gcse_expr *expr, basic_block bb)
{
rtx_insn *insn = BB_END (bb);
rtx_insn *new_insn;
the expressions fully redundant. */
static int
-pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
+pre_edge_insert (struct edge_list *edge_list, struct gcse_expr **index_map)
{
int e, i, j, num_edges, set_size, did_insert = 0;
sbitmap *inserted;
j++, insert >>= 1)
if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
{
- struct expr *expr = index_map[j];
- struct occr *occr;
+ struct gcse_expr *expr = index_map[j];
+ struct gcse_occr *occr;
/* Now look at each deleted occurrence of this expression. */
for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
MEM <- reaching_reg. */
static void
-pre_insert_copy_insn (struct expr *expr, rtx_insn *insn)
+pre_insert_copy_insn (struct gcse_expr *expr, rtx_insn *insn)
{
rtx reg = expr->reaching_reg;
int regno = REGNO (reg);
pre_insert_copies (void)
{
unsigned int i, added_copy;
- struct expr *expr;
- struct occr *occr;
- struct occr *avail;
+ struct gcse_expr *expr;
+ struct gcse_occr *occr;
+ struct gcse_occr *avail;
/* For each available expression in the table, copy the result to
`reaching_reg' if the expression reaches a deleted one.
{
unsigned int i;
int changed;
- struct expr *expr;
- struct occr *occr;
+ struct gcse_expr *expr;
+ struct gcse_occr *occr;
changed = 0;
for (i = 0; i < expr_hash_table.size; i++)
{
unsigned int i;
int did_insert, changed;
- struct expr **index_map;
- struct expr *expr;
+ struct gcse_expr **index_map;
+ struct gcse_expr *expr;
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
- index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
+ index_map = XCNEWVEC (struct gcse_expr *, expr_hash_table.n_elems);
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
index_map[expr->bitmap_index] = expr;
paths. */
static int
-should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
+should_hoist_expr_to_dom (basic_block expr_bb, struct gcse_expr *expr,
basic_block bb, sbitmap visited, int distance,
int *bb_size, enum reg_class pressure_class,
int *nregs, bitmap hoisted_bbs, rtx_insn *from)
\f
/* Find occurrence in BB. */
-static struct occr *
-find_occr_in_bb (struct occr *occr, basic_block bb)
+static struct gcse_occr *
+find_occr_in_bb (struct gcse_occr *occr, basic_block bb)
{
/* Find the right occurrence of this expression. */
while (occr && BLOCK_FOR_INSN (occr->insn) != bb)
unsigned int dom_tree_walk_index;
vec<basic_block> domby;
unsigned int i, j, k;
- struct expr **index_map;
- struct expr *expr;
+ struct gcse_expr **index_map;
+ struct gcse_expr *expr;
int *to_bb_head;
int *bb_size;
int changed = 0;
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
- index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
+ index_map = XCNEWVEC (struct gcse_expr *, expr_hash_table.n_elems);
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
index_map[expr->bitmap_index] = expr;
int nregs = 0;
enum reg_class pressure_class = NO_REGS;
/* Current expression. */
- struct expr *expr = index_map[i];
+ struct gcse_expr *expr = index_map[i];
/* Number of occurrences of EXPR that can be hoisted to BB. */
int hoistable = 0;
/* Occurrences reachable from BB. */
while (ptr != NULL)
{
- struct expr * expr;
+ struct gcse_expr * expr;
/* Delete if entry has been made invalid. */
if (! ptr->invalid)
break;
}
else
- expr = (struct expr *) 0;
+ expr = (struct gcse_expr *) 0;
if (expr)
{
correct value in the reaching register for the loads. */
static void
-update_ld_motion_stores (struct expr * expr)
+update_ld_motion_stores (struct gcse_expr * expr)
{
struct ls_expr * mem_ptr;
}
/* Datastructure used to store per-call-site statistics. */
-struct loc_descriptor
+struct ggc_loc_descriptor
{
const char *file;
int line;
/* Hash table helper. */
-struct loc_desc_hasher : typed_noop_remove <loc_descriptor>
+struct ggc_loc_desc_hasher : typed_noop_remove <ggc_loc_descriptor>
{
- typedef loc_descriptor value_type;
- typedef loc_descriptor compare_type;
+ typedef ggc_loc_descriptor value_type;
+ typedef ggc_loc_descriptor compare_type;
static inline hashval_t hash (const value_type *);
static inline bool equal (const value_type *, const compare_type *);
};
inline hashval_t
-loc_desc_hasher::hash (const value_type *d)
+ggc_loc_desc_hasher::hash (const value_type *d)
{
return htab_hash_pointer (d->function) | d->line;
}
inline bool
-loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
+ggc_loc_desc_hasher::equal (const value_type *d, const compare_type *d2)
{
return (d->file == d2->file && d->line == d2->line
&& d->function == d2->function);
}
/* Hashtable used for statistics. */
-static hash_table<loc_desc_hasher> *loc_hash;
+static hash_table<ggc_loc_desc_hasher> *loc_hash;
-struct ptr_hash_entry
+struct ggc_ptr_hash_entry
{
void *ptr;
- struct loc_descriptor *loc;
+ struct ggc_loc_descriptor *loc;
size_t size;
};
/* Helper for ptr_hash table. */
-struct ptr_hash_hasher : typed_noop_remove <ptr_hash_entry>
+struct ptr_hash_hasher : typed_noop_remove <ggc_ptr_hash_entry>
{
- typedef ptr_hash_entry value_type;
+ typedef ggc_ptr_hash_entry value_type;
typedef void compare_type;
static inline hashval_t hash (const value_type *);
static inline bool equal (const value_type *, const compare_type *);
static hash_table<ptr_hash_hasher> *ptr_hash;
/* Return descriptor for given call site, create new one if needed. */
-static struct loc_descriptor *
+static struct ggc_loc_descriptor *
make_loc_descriptor (const char *name, int line, const char *function)
{
- struct loc_descriptor loc;
- struct loc_descriptor **slot;
+ struct ggc_loc_descriptor loc;
+ struct ggc_loc_descriptor **slot;
loc.file = name;
loc.line = line;
loc.function = function;
if (!loc_hash)
- loc_hash = new hash_table<loc_desc_hasher> (10);
+ loc_hash = new hash_table<ggc_loc_desc_hasher> (10);
slot = loc_hash->find_slot (&loc, INSERT);
if (*slot)
return *slot;
- *slot = XCNEW (struct loc_descriptor);
+ *slot = XCNEW (struct ggc_loc_descriptor);
(*slot)->file = name;
(*slot)->line = line;
(*slot)->function = function;
ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
const char *name, int line, const char *function)
{
- struct loc_descriptor *loc = make_loc_descriptor (name, line, function);
- struct ptr_hash_entry *p = XNEW (struct ptr_hash_entry);
- ptr_hash_entry **slot;
+ struct ggc_loc_descriptor *loc = make_loc_descriptor (name, line, function);
+ struct ggc_ptr_hash_entry *p = XNEW (struct ggc_ptr_hash_entry);
+ ggc_ptr_hash_entry **slot;
p->ptr = ptr;
p->loc = loc;
/* Helper function for prune_overhead_list. See if SLOT is still marked and
remove it from hashtable if it is not. */
int
-ggc_prune_ptr (ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
+ggc_prune_ptr (ggc_ptr_hash_entry **slot, void *b ATTRIBUTE_UNUSED)
{
- struct ptr_hash_entry *p = *slot;
+ struct ggc_ptr_hash_entry *p = *slot;
if (!ggc_marked_p (p->ptr))
{
p->loc->collected += p->size;
void
ggc_free_overhead (void *ptr)
{
- ptr_hash_entry **slot
+ ggc_ptr_hash_entry **slot
= ptr_hash->find_slot_with_hash (ptr, htab_hash_pointer (ptr), NO_INSERT);
- struct ptr_hash_entry *p;
+ struct ggc_ptr_hash_entry *p;
/* The pointer might be not found if a PCH read happened between allocation
and ggc_free () call. FIXME: account memory properly in the presence of
PCH. */
if (!slot)
return;
- p = (struct ptr_hash_entry *) *slot;
+ p = (struct ggc_ptr_hash_entry *) *slot;
p->loc->freed += p->size;
ptr_hash->clear_slot (slot);
free (p);
static int
final_cmp_statistic (const void *loc1, const void *loc2)
{
- const struct loc_descriptor *const l1 =
- *(const struct loc_descriptor *const *) loc1;
- const struct loc_descriptor *const l2 =
- *(const struct loc_descriptor *const *) loc2;
+ const struct ggc_loc_descriptor *const l1 =
+ *(const struct ggc_loc_descriptor *const *) loc1;
+ const struct ggc_loc_descriptor *const l2 =
+ *(const struct ggc_loc_descriptor *const *) loc2;
long diff;
diff = ((long)(l1->allocated + l1->overhead - l1->freed) -
(l2->allocated + l2->overhead - l2->freed));
static int
cmp_statistic (const void *loc1, const void *loc2)
{
- const struct loc_descriptor *const l1 =
- *(const struct loc_descriptor *const *) loc1;
- const struct loc_descriptor *const l2 =
- *(const struct loc_descriptor *const *) loc2;
+ const struct ggc_loc_descriptor *const l1 =
+ *(const struct ggc_loc_descriptor *const *) loc1;
+ const struct ggc_loc_descriptor *const l2 =
+ *(const struct ggc_loc_descriptor *const *) loc2;
long diff;
diff = ((long)(l1->allocated + l1->overhead - l1->freed - l1->collected) -
}
/* Collect array of the descriptors from hashtable. */
-static struct loc_descriptor **loc_array;
+static struct ggc_loc_descriptor **loc_array;
int
-ggc_add_statistics (loc_descriptor **slot, int *n)
+ggc_add_statistics (ggc_loc_descriptor **slot, int *n)
{
loc_array[*n] = *slot;
(*n)++;
ggc_force_collect = true;
ggc_collect ();
- loc_array = XCNEWVEC (struct loc_descriptor *,
+ loc_array = XCNEWVEC (struct ggc_loc_descriptor *,
loc_hash->elements_with_deleted ());
fprintf (stderr, "-------------------------------------------------------\n");
fprintf (stderr, "\n%-48s %10s %10s %10s %10s %10s\n",
final ? final_cmp_statistic : cmp_statistic);
for (i = 0; i < nentries; i++)
{
- struct loc_descriptor *d = loc_array[i];
+ struct ggc_loc_descriptor *d = loc_array[i];
allocated += d->allocated;
times += d->times;
freed += d->freed;
}
for (i = 0; i < nentries; i++)
{
- struct loc_descriptor *d = loc_array[i];
+ struct ggc_loc_descriptor *d = loc_array[i];
if (d->allocated)
{
const char *s1 = d->file;
#endif
/* The rest of the global variables. */
-static struct globals
+static struct ggc_globals
{
/* The Nth element in this array is a page with objects of size 2^N.
If there are any pages with free objects, they will be at the
/* Arrays representing a topological ordering of call graph nodes and a stack
of noes used during constant propagation. */
-struct topo_info
+struct ipa_topo_info
{
struct cgraph_node **order;
struct cgraph_node **stack;
/* Allocate the arrays in TOPO and topologically sort the nodes into order. */
static void
-build_toporder_info (struct topo_info *topo)
+build_toporder_info (struct ipa_topo_info *topo)
{
topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
TOPO. */
static void
-free_toporder_info (struct topo_info *topo)
+free_toporder_info (struct ipa_topo_info *topo)
{
ipa_free_postorder_info ();
free (topo->order);
/* Add NODE to the stack in TOPO, unless it is already there. */
static inline void
-push_node_to_stack (struct topo_info *topo, struct cgraph_node *node)
+push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
if (info->node_enqueued)
is empty. */
static struct cgraph_node *
-pop_node_from_stack (struct topo_info *topo)
+pop_node_from_stack (struct ipa_topo_info *topo)
{
if (topo->stack_top)
{
connected components. */
static void
-propagate_constants_topo (struct topo_info *topo)
+propagate_constants_topo (struct ipa_topo_info *topo)
{
int i;
interprocedurally. */
static void
-ipcp_propagate_stage (struct topo_info *topo)
+ipcp_propagate_stage (struct ipa_topo_info *topo)
{
struct cgraph_node *node;
TOPO and make specialized clones if deemed beneficial. */
static void
-ipcp_decision_stage (struct topo_info *topo)
+ipcp_decision_stage (struct ipa_topo_info *topo)
{
int i;
{
struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
struct cgraph_edge_hook_list *edge_removal_hook_holder;
- struct topo_info topo;
+ struct ipa_topo_info topo;
ipa_check_create_node_params ();
ipa_check_create_edge_args ();
/* Structure to be passed in between detect_type_change and
check_stmt_for_type_change. */
-struct type_change_info
+struct prop_type_change_info
{
/* Offset into the object where there is the virtual method pointer we are
looking for. */
identified, return the type. Otherwise return NULL_TREE. */
static tree
-extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
+extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
{
HOST_WIDE_INT offset, size, max_size;
tree lhs, rhs, base, binfo;
detect_type_change to check whether a particular statement may modify
the virtual table pointer, and if possible also determine the new type of
the (sub-)object. It stores its result into DATA, which points to a
- type_change_info structure. */
+ prop_type_change_info structure. */
static bool
check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
{
gimple stmt = SSA_NAME_DEF_STMT (vdef);
- struct type_change_info *tci = (struct type_change_info *) data;
+ struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
if (stmt_may_be_vtbl_ptr_store (stmt))
{
gimple call, struct ipa_jump_func *jfunc,
HOST_WIDE_INT offset)
{
- struct type_change_info tci;
+ struct prop_type_change_info tci;
ao_ref ao;
bool entry_reached = false;
{
unsigned int size;
unsigned int time;
-} bb_info;
+} split_bb_info;
-static vec<bb_info> bb_info_vec;
+static vec<split_bb_info> bb_info_vec;
/* Description of split point. */
/* This structure is used to record information about hard register
eliminations. */
-struct elim_table
+struct lra_elim_table
{
/* Hard register number to be eliminated. */
int from;
of eliminating a register in favor of another. If there is more
than one way of eliminating a particular register, the most
preferred should be specified first. */
-static struct elim_table *reg_eliminate = 0;
+static struct lra_elim_table *reg_eliminate = 0;
/* This is an intermediate structure to initialize the table. It has
exactly the members provided by ELIMINABLE_REGS. */
static void
print_elim_table (FILE *f)
{
- struct elim_table *ep;
+ struct lra_elim_table *ep;
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
fprintf (f, "%s eliminate %d to %d (offset=" HOST_WIDE_INT_PRINT_DEC
VALUE. Setup FRAME_POINTER_NEEDED if elimination from frame
pointer to stack pointer is not possible anymore. */
static void
-setup_can_eliminate (struct elim_table *ep, bool value)
+setup_can_eliminate (struct lra_elim_table *ep, bool value)
{
ep->can_eliminate = ep->prev_can_eliminate = value;
if (! value
or NULL if none. The elimination table may contain more than
one elimination for the same hard register, but this map specifies
the one that we are currently using. */
-static struct elim_table *elimination_map[FIRST_PSEUDO_REGISTER];
+static struct lra_elim_table *elimination_map[FIRST_PSEUDO_REGISTER];
/* When an eliminable hard register becomes not eliminable, we use the
following special structure to restore original offsets for the
register. */
-static struct elim_table self_elim_table;
+static struct lra_elim_table self_elim_table;
/* Offsets should be used to restore original offsets for eliminable
hard register which just became not eliminable. Zero,
setup_elimination_map (void)
{
int i;
- struct elim_table *ep;
+ struct lra_elim_table *ep;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
elimination_map[i] = NULL;
int
lra_get_elimination_hard_regno (int hard_regno)
{
- struct elim_table *ep;
+ struct lra_elim_table *ep;
if (hard_regno < 0 || hard_regno >= FIRST_PSEUDO_REGISTER)
return hard_regno;
/* Return elimination which will be used for hard reg REG, NULL
otherwise. */
-static struct elim_table *
+static struct lra_elim_table *
get_elimination (rtx reg)
{
int hard_regno;
- struct elim_table *ep;
+ struct lra_elim_table *ep;
HOST_WIDE_INT offset;
lra_assert (REG_P (reg));
bool subst_p, bool update_p, bool full_p)
{
enum rtx_code code = GET_CODE (x);
- struct elim_table *ep;
+ struct lra_elim_table *ep;
rtx new_rtx;
int i, j;
const char *fmt;
mark_not_eliminable (rtx x, enum machine_mode mem_mode)
{
enum rtx_code code = GET_CODE (x);
- struct elim_table *ep;
+ struct lra_elim_table *ep;
int i, j;
const char *fmt;
int i;
rtx substed_operand[MAX_RECOG_OPERANDS];
rtx orig_operand[MAX_RECOG_OPERANDS];
- struct elim_table *ep;
+ struct lra_elim_table *ep;
rtx plus_src, plus_cst_src;
lra_insn_recog_data_t id;
struct lra_static_insn_data *static_id;
update_reg_eliminate (bitmap insns_with_changed_offsets)
{
bool prev, result;
- struct elim_table *ep, *ep1;
+ struct lra_elim_table *ep, *ep1;
HARD_REG_SET temp_hard_reg_set;
/* Clear self elimination offsets. */
static void
init_elim_table (void)
{
- struct elim_table *ep;
+ struct lra_elim_table *ep;
#ifdef ELIMINABLE_REGS
bool value_p;
const struct elim_table_1 *ep1;
#endif
if (!reg_eliminate)
- reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
+ reg_eliminate = XCNEWVEC (struct lra_elim_table, NUM_ELIMINABLE_REGS);
memset (self_elim_offsets, 0, sizeof (self_elim_offsets));
/* Initiate member values which will be never changed. */
bool stop_to_sp_elimination_p;
basic_block bb;
rtx_insn *insn;
- struct elim_table *ep;
+ struct lra_elim_table *ep;
init_elim_table ();
FOR_EACH_BB_FN (bb, cfun)
lra_eliminate_reg_if_possible (rtx *loc)
{
int regno;
- struct elim_table *ep;
+ struct lra_elim_table *ep;
lra_assert (REG_P (*loc));
if ((regno = REGNO (*loc)) >= FIRST_PSEUDO_REGISTER
unsigned int uid;
bitmap_head insns_with_changed_offsets;
bitmap_iterator bi;
- struct elim_table *ep;
+ struct lra_elim_table *ep;
gcc_assert (! final_p || ! first_p);
/* This is used to carry information about basic blocks. It is
attached to the AUX field of the standard CFG block. */
-typedef struct block_info_def
+struct block_info
{
/* Estimated frequency of execution of basic_block. */
sreal frequency;
/* Number of predecessors we need to visit first. */
int npredecessors;
-} *block_info;
+};
/* Similar information for edges. */
-typedef struct edge_info_def
+struct edge_prob_info
{
/* In case edge is a loopback edge, the probability edge will be reached
in case header is. Estimated number of iterations of the loop can be
sreal back_edge_prob;
/* True if the edge is a loopback edge in the natural loop. */
unsigned int back_edge:1;
-} *edge_info;
+};
-#define BLOCK_INFO(B) ((block_info) (B)->aux)
-#define EDGE_INFO(E) ((edge_info) (E)->aux)
+#define BLOCK_INFO(B) ((block_info *) (B)->aux)
+#define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
/* Helper function for estimate_bb_frequencies.
Propagate the frequencies in blocks marked in
REG_BR_PROB_BASE;
/* Set up block info for each basic block. */
- alloc_aux_for_blocks (sizeof (struct block_info_def));
- alloc_aux_for_edges (sizeof (struct edge_info_def));
+ alloc_aux_for_blocks (sizeof (block_info));
+ alloc_aux_for_edges (sizeof (edge_prob_info));
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
{
edge e;
#define PROFILE_H
/* Additional information about edges. */
-struct edge_info
+struct edge_profile_info
{
unsigned int count_valid:1;
unsigned int ignore:1;
};
-#define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
+#define EDGE_INFO(e) ((struct edge_profile_info *) (e)->aux)
/* Smoothes the initial assigned basic block and edge counts using
a minimum cost flow algorithm. */
This is basically a generic equivalent to the C++ front-end's
Named Return Value optimization. */
-struct nrv_data
+struct nrv_data_t
{
/* This is the temporary (a VAR_DECL) which appears in all of
this function's RETURN_EXPR statements. */
finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
{
struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
- struct nrv_data *dp = (struct nrv_data *) wi->info;
+ struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
/* No need to walk into types. */
if (TYPE_P (*tp))
tree found = NULL;
basic_block bb;
gimple_stmt_iterator gsi;
- struct nrv_data data;
+ struct nrv_data_t data;
/* If this function does not return an aggregate type in memory, then
there is nothing to do. */
VARYING
} ccp_lattice_t;
-struct prop_value_d {
+struct ccp_prop_value_t {
/* Lattice value. */
ccp_lattice_t lattice_val;
widest_int mask;
};
-typedef struct prop_value_d prop_value_t;
-
/* Array of propagated constant values. After propagation,
CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
the constant is held in an SSA name representing a memory store
(i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
memory reference used to store (i.e., the LHS of the assignment
doing the store). */
-static prop_value_t *const_val;
+static ccp_prop_value_t *const_val;
static unsigned n_const_val;
-static void canonicalize_value (prop_value_t *);
+static void canonicalize_value (ccp_prop_value_t *);
static bool ccp_fold_stmt (gimple_stmt_iterator *);
/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
static void
-dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
+dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
{
switch (val.lattice_val)
{
/* Print lattice value VAL to stderr. */
-void debug_lattice_value (prop_value_t val);
+void debug_lattice_value (ccp_prop_value_t val);
DEBUG_FUNCTION void
-debug_lattice_value (prop_value_t val)
+debug_lattice_value (ccp_prop_value_t val)
{
dump_lattice_value (stderr, "", val);
fprintf (stderr, "\n");
4- Initial values of variables that are not GIMPLE registers are
considered VARYING. */
-static prop_value_t
+static ccp_prop_value_t
get_default_value (tree var)
{
- prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
+ ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
gimple stmt;
stmt = SSA_NAME_DEF_STMT (var);
/* Get the constant value associated with variable VAR. */
-static inline prop_value_t *
+static inline ccp_prop_value_t *
get_value (tree var)
{
- prop_value_t *val;
+ ccp_prop_value_t *val;
if (const_val == NULL
|| SSA_NAME_VERSION (var) >= n_const_val)
static inline tree
get_constant_value (tree var)
{
- prop_value_t *val;
+ ccp_prop_value_t *val;
if (TREE_CODE (var) != SSA_NAME)
{
if (is_gimple_min_invariant (var))
static inline void
set_value_varying (tree var)
{
- prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
+ ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
val->lattice_val = VARYING;
val->value = NULL_TREE;
For other constants, make sure to drop TREE_OVERFLOW. */
static void
-canonicalize_value (prop_value_t *val)
+canonicalize_value (ccp_prop_value_t *val)
{
enum machine_mode mode;
tree type;
/* Return whether the lattice transition is valid. */
static bool
-valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
+valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
{
/* Lattice transitions must always be monotonically increasing in
value. */
value is different from VAR's previous value. */
static bool
-set_lattice_value (tree var, prop_value_t new_val)
+set_lattice_value (tree var, ccp_prop_value_t new_val)
{
/* We can deal with old UNINITIALIZED values just fine here. */
- prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
+ ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
canonicalize_value (&new_val);
return false;
}
-static prop_value_t get_value_for_expr (tree, bool);
-static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
+static ccp_prop_value_t get_value_for_expr (tree, bool);
+static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
tree, const widest_int &, const widest_int &,
tree, const widest_int &, const widest_int &);
from VAL. */
static widest_int
-value_to_wide_int (prop_value_t val)
+value_to_wide_int (ccp_prop_value_t val)
{
if (val.value
&& TREE_CODE (val.value) == INTEGER_CST)
/* Return the value for the address expression EXPR based on alignment
information. */
-static prop_value_t
+static ccp_prop_value_t
get_value_from_alignment (tree expr)
{
tree type = TREE_TYPE (expr);
- prop_value_t val;
+ ccp_prop_value_t val;
unsigned HOST_WIDE_INT bitpos;
unsigned int align;
return constant bits extracted from alignment information for
invariant addresses. */
-static prop_value_t
+static ccp_prop_value_t
get_value_for_expr (tree expr, bool for_bits_p)
{
- prop_value_t val;
+ ccp_prop_value_t val;
if (TREE_CODE (expr) == SSA_NAME)
{
all_undefined_operands = true;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
{
- prop_value_t *val = get_value (use);
+ ccp_prop_value_t *val = get_value (use);
if (val->lattice_val == UNDEFINED)
has_undefined_operand = true;
basic_block bb;
n_const_val = num_ssa_names;
- const_val = XCNEWVEC (prop_value_t, n_const_val);
+ const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
/* Initialize simulation flags for PHI nodes and statements. */
FOR_EACH_BB_FN (bb, cfun)
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);
- prop_value_t *val;
+ ccp_prop_value_t *val;
unsigned int tem, align;
if (!name
*/
static void
-ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
+ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
{
if (val1->lattice_val == UNDEFINED)
{
{
/* When not equal addresses are involved try meeting for
alignment. */
- prop_value_t tem = *val2;
+ ccp_prop_value_t tem = *val2;
if (TREE_CODE (val1->value) == ADDR_EXPR)
*val1 = get_value_for_expr (val1->value, true);
if (TREE_CODE (val2->value) == ADDR_EXPR)
ccp_visit_phi_node (gimple phi)
{
unsigned i;
- prop_value_t *old_val, new_val;
+ ccp_prop_value_t *old_val, new_val;
if (dump_file && (dump_flags & TDF_DETAILS))
{
if (e->flags & EDGE_EXECUTABLE)
{
tree arg = gimple_phi_arg (phi, i)->def;
- prop_value_t arg_val = get_value_for_expr (arg, false);
+ ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
ccp_lattice_meet (&new_val, &arg_val);
/* Return the propagation value when applying the operation CODE to
the value RHS yielding type TYPE. */
-static prop_value_t
+static ccp_prop_value_t
bit_value_unop (enum tree_code code, tree type, tree rhs)
{
- prop_value_t rval = get_value_for_expr (rhs, true);
+ ccp_prop_value_t rval = get_value_for_expr (rhs, true);
widest_int value, mask;
- prop_value_t val;
+ ccp_prop_value_t val;
if (rval.lattice_val == UNDEFINED)
return rval;
/* Return the propagation value when applying the operation CODE to
the values RHS1 and RHS2 yielding type TYPE. */
-static prop_value_t
+static ccp_prop_value_t
bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
{
- prop_value_t r1val = get_value_for_expr (rhs1, true);
- prop_value_t r2val = get_value_for_expr (rhs2, true);
+ ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
+ ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
widest_int value, mask;
- prop_value_t val;
+ ccp_prop_value_t val;
if (r1val.lattice_val == UNDEFINED
|| r2val.lattice_val == UNDEFINED)
is false, for alloc_aligned attribute ATTR is non-NULL and
ALLOC_ALIGNED is true. */
-static prop_value_t
-bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval,
+static ccp_prop_value_t
+bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
bool alloc_aligned)
{
tree align, misalign = NULL_TREE, type;
unsigned HOST_WIDE_INT aligni, misaligni = 0;
- prop_value_t alignval;
+ ccp_prop_value_t alignval;
widest_int value, mask;
- prop_value_t val;
+ ccp_prop_value_t val;
if (attr == NULL_TREE)
{
/* Evaluate statement STMT.
Valid only for assignments, calls, conditionals, and switches. */
-static prop_value_t
+static ccp_prop_value_t
evaluate_stmt (gimple stmt)
{
- prop_value_t val;
+ ccp_prop_value_t val;
tree simplified = NULL_TREE;
ccp_lattice_t likelyvalue = likely_value (stmt);
bool is_constant = false;
{
case GIMPLE_COND:
{
- prop_value_t val;
+ ccp_prop_value_t val;
/* Statement evaluation will handle type mismatches in constants
more gracefully than the final propagation. This allows us to
fold more conditionals here. */
static enum ssa_prop_result
visit_assignment (gimple stmt, tree *output_p)
{
- prop_value_t val;
+ ccp_prop_value_t val;
enum ssa_prop_result retval;
tree lhs = gimple_get_lhs (stmt);
static enum ssa_prop_result
visit_cond_stmt (gimple stmt, edge *taken_edge_p)
{
- prop_value_t val;
+ ccp_prop_value_t val;
basic_block block;
block = gimple_bb (stmt);
Mark them VARYING. */
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
{
- prop_value_t v = { VARYING, NULL_TREE, -1 };
+ ccp_prop_value_t v = { VARYING, NULL_TREE, -1 };
set_lattice_value (def, v);
}
When visiting a statement or PHI node the lattice value for an
SSA name can transition from UNDEFINED to COPY to VARYING. */
-struct prop_value_d {
+struct prop_value_t {
/* Copy-of value. */
tree value;
};
-typedef struct prop_value_d prop_value_t;
static prop_value_t *copy_of;
static unsigned n_copy_of;