+2008-06-25 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
+
+ * ra.h (add_neighbor): Fix -Wc++-compat and/or -Wcast-qual
+ warnings.
+ * recog.c (check_asm_operands, validate_change_1): Likewise.
+ * reg-stack.c (check_asm_stack_operands, subst_asm_stack_regs,
+ subst_asm_stack_regs): Likewise.
+ * regclass.c (regclass, som_hash, som_eq, record_subregs_of_mode,
+ cannot_change_mode_set_regs, invalid_mode_change_p): Likewise.
+ * regmove.c (reg_is_remote_constant_p): Likewise.
+ * regrename.c (regrename_optimize, scan_rtx_reg,
+ kill_clobbered_value, kill_set_value, kill_autoinc_value):
+ Likewise.
+ * regstat.c (regstat_init_n_sets_and_refs, regstat_compute_ri,
+ regstat_compute_calls_crossed): Likewise.
+ * reload1.c (init_reload, new_insn_chain,
+ has_nonexceptional_receiver, reload, copy_reloads,
+ calculate_needs_all_insns, init_elim_table): Likewise.
+ * rtl-factoring.c (compute_rtx_cost, fill_hash_bucket): Likewise.
+ * rtl.c (shallow_copy_rtx_stat): Likewise.
+ * rtlanal.c (parms_set): Likewise.
+ * sbitmap.c (sbitmap_alloc, sbitmap_alloc_with_popcount,
+ sbitmap_resize, sbitmap_vector_alloc): Likewise.
+ * sched-ebb.c (earliest_block_with_similiar_load,
+ add_deps_for_risky_insns): Likewise.
+ * sched-rgn.c (find_rgns, gather_region_statistics, extend_rgns,
+ schedule_region): Likewise.
+ * see.c (eq_descriptor_pre_extension,
+ hash_descriptor_pre_extension, hash_del_pre_extension,
+ eq_descriptor_properties, hash_descriptor_properties,
+ hash_del_properties, see_seek_pre_extension_expr,
+ see_initialize_data_structures, see_print_register_properties,
+ see_print_pre_extension_expr, see_delete_merged_def_extension,
+ see_delete_unmerged_def_extension, see_emit_use_extension,
+ see_pre_delete_extension, see_map_extension, see_commit_changes,
+ see_analyze_merged_def_local_prop,
+ see_analyze_merged_def_local_prop,
+ see_analyze_unmerged_def_local_prop, see_analyze_use_local_prop,
+ see_set_prop_merged_def, see_set_prop_unmerged_def,
+ see_set_prop_unmerged_use, see_print_one_extension,
+ see_merge_one_use_extension, see_merge_one_def_extension,
+ see_store_reference_and_extension, see_update_uses_relevancy,
+ see_update_defs_relevancy): Likewise.
+ * statistics.c (hash_statistics_hash, hash_statistics_eq,
+ hash_statistics_free, curr_statistics_hash): Likewise.
+ * stmt.c (parse_output_constraint, decl_overlaps_hard_reg_set_p,
+ expand_asm_operands, expand_return, case_bit_test_cmp,
+ expand_case): Likewise.
+ * stor-layout.c (start_record_layout): Likewise.
+ * stringpool.c (ggc_alloc_string, gt_pch_n_S,
+ gt_pch_save_stringpool): Likewise.
+ * tree-data-ref.c (hash_stmt_vertex_info,
+ have_similar_memory_accesses_1, ref_base_address_1): Likewise.
+ * tree-ssa-phiopt.c (name_to_bb_hash): Likewise.
+
2008-06-25 Uros Bizjak <ubizjak@gmail.com>
PR target/36627
if (adjlist == NULL || adjlist->index == ADJACENCY_VEC_LENGTH)
{
- adjacency_t *new = pool_alloc (adjacency_pool);
+ adjacency_t *new = (adjacency_t *) pool_alloc (adjacency_pool);
new->index = 0;
new->next = adjlist;
adjlist = new;
if (noperands == 0)
return 1;
- operands = alloca (noperands * sizeof (rtx));
- constraints = alloca (noperands * sizeof (char *));
+ operands = XALLOCAVEC (rtx, noperands);
+ constraints = XALLOCAVEC (const char *, noperands);
decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
else
changes_allocated *= 2;
- changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
+ changes = XRESIZEVEC (change_t, changes, changes_allocated);
}
changes[num_changes].object = object;
if (GET_CODE (body) == PARALLEL)
{
- clobber_reg = alloca (XVECLEN (body, 0) * sizeof (rtx));
+ clobber_reg = XALLOCAVEC (rtx, XVECLEN (body, 0));
for (i = 0; i < XVECLEN (body, 0); i++)
if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
for (i = 0, note = REG_NOTES (insn); note; note = XEXP (note, 1))
i++;
- note_reg = alloca (i * sizeof (rtx));
- note_loc = alloca (i * sizeof (rtx *));
- note_kind = alloca (i * sizeof (enum reg_note));
+ note_reg = XALLOCAVEC (rtx, i);
+ note_loc = XALLOCAVEC (rtx *, i);
+ note_kind = XALLOCAVEC (enum reg_note, i);
n_notes = 0;
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (GET_CODE (body) == PARALLEL)
{
- clobber_reg = alloca (XVECLEN (body, 0) * sizeof (rtx));
- clobber_loc = alloca (XVECLEN (body, 0) * sizeof (rtx *));
+ clobber_reg = XALLOCAVEC (rtx, XVECLEN (body, 0));
+ clobber_loc = XALLOCAVEC (rtx *, XVECLEN (body, 0));
for (i = 0; i < XVECLEN (body, 0); i++)
if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
init_recog ();
- reg_renumber = xmalloc (max_regno * sizeof (short));
+ reg_renumber = XNEWVEC (short, max_regno);
reg_pref = XCNEWVEC (struct reg_pref, max_regno);
memset (reg_renumber, -1, max_regno * sizeof (short));
static hashval_t
som_hash (const void *x)
{
- const struct subregs_of_mode_node *a = x;
+ const struct subregs_of_mode_node *const a =
+ (const struct subregs_of_mode_node *) x;
return a->block;
}
static int
som_eq (const void *x, const void *y)
{
- const struct subregs_of_mode_node *a = x;
- const struct subregs_of_mode_node *b = y;
+ const struct subregs_of_mode_node *const a =
+ (const struct subregs_of_mode_node *) x;
+ const struct subregs_of_mode_node *const b =
+ (const struct subregs_of_mode_node *) y;
return a->block == b->block;
}
dummy.block = regno & -8;
slot = htab_find_slot_with_hash (subregs_of_mode, &dummy,
dummy.block, INSERT);
- node = *slot;
+ node = (struct subregs_of_mode_node *) *slot;
if (node == NULL)
{
node = XCNEW (struct subregs_of_mode_node);
gcc_assert (subregs_of_mode);
dummy.block = regno & -8;
- node = htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
+ node = (struct subregs_of_mode_node *)
+ htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
if (node == NULL)
return;
gcc_assert (subregs_of_mode);
dummy.block = regno & -8;
- node = htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
+ node = (struct subregs_of_mode_node *)
+ htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
if (node == NULL)
return false;
if (!reg_set_in_bb)
{
max_reg_computed = max = max_reg_num ();
- reg_set_in_bb = xcalloc (max, sizeof (*reg_set_in_bb));
+ reg_set_in_bb = XCNEWVEC (basic_block, max);
FOR_EACH_BB (bb)
FOR_BB_INSNS (bb, p)
memset (tick, 0, sizeof tick);
gcc_obstack_init (&rename_obstack);
- first_obj = obstack_alloc (&rename_obstack, 0);
+ first_obj = XOBNEWVAR (&rename_obstack, char, 0);
FOR_EACH_BB (bb)
{
{
if (type == OP_OUT)
{
- struct du_chain *this
- = obstack_alloc (&rename_obstack, sizeof (struct du_chain));
+ struct du_chain *this = XOBNEW (&rename_obstack, struct du_chain);
this->next_use = 0;
this->next_chain = open_chains;
this->loc = loc;
be replaced with, terminate the chain. */
if (cl != NO_REGS)
{
- this = obstack_alloc (&rename_obstack, sizeof (struct du_chain));
+ this = XOBNEW (&rename_obstack, struct du_chain);
this->next_use = 0;
this->next_chain = (*p)->next_chain;
this->loc = loc;
static void
kill_clobbered_value (rtx x, const_rtx set, void *data)
{
- struct value_data *vd = data;
+ struct value_data *const vd = (struct value_data *) data;
if (GET_CODE (set) == CLOBBER)
kill_value (x, vd);
}
static void
kill_set_value (rtx x, const_rtx set, void *data)
{
- struct value_data *vd = data;
+ struct value_data *const vd = (struct value_data *) data;
if (GET_CODE (set) != CLOBBER)
{
kill_value (x, vd);
kill_autoinc_value (rtx *px, void *data)
{
rtx x = *px;
- struct value_data *vd = data;
+ struct value_data *const vd = (struct value_data *) data;
if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
{
df_grow_reg_info ();
gcc_assert (!regstat_n_sets_and_refs);
- regstat_n_sets_and_refs = xmalloc (max_regno * sizeof (struct regstat_n_sets_and_refs_t));
+ regstat_n_sets_and_refs = XNEWVEC (struct regstat_n_sets_and_refs_t, max_regno);
for (i = 0; i < max_regno; i++)
{
setjmp_crosses = BITMAP_ALLOC (&df_bitmap_obstack);
max_regno = max_reg_num ();
reg_info_p_size = max_regno;
- reg_info_p = xcalloc (max_regno, sizeof (struct reg_info_t));
+ reg_info_p = XCNEWVEC (struct reg_info_t, max_regno);
FOR_EACH_BB (bb)
{
timevar_push (TV_REG_STATS);
max_regno = max_reg_num ();
reg_info_p_size = max_regno;
- reg_info_p = xcalloc (max_regno, sizeof (struct reg_info_t));
+ reg_info_p = XCNEWVEC (struct reg_info_t, max_regno);
FOR_EACH_BB (bb)
{
/* Initialize obstack for our rtl allocation. */
gcc_obstack_init (&reload_obstack);
- reload_startobj = obstack_alloc (&reload_obstack, 0);
+ reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
INIT_REG_SET (&spilled_pseudos);
INIT_REG_SET (&pseudos_counted);
if (unused_insn_chains == 0)
{
- c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
+ c = XOBNEW (&reload_obstack, struct insn_chain);
INIT_REG_SET (&c->live_throughout);
INIT_REG_SET (&c->dead_or_set);
}
return true;
/* First determine which blocks can reach exit via normal paths. */
- tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+ tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
FOR_EACH_BB (bb)
bb->flags &= ~BB_REACHABLE;
failure = 0;
- reload_firstobj = obstack_alloc (&reload_obstack, 0);
+ reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
/* Make sure that the last insn in the chain
is not something that needs reloading. */
{
save_call_clobbered_regs ();
/* That might have allocated new insn_chain structures. */
- reload_firstobj = obstack_alloc (&reload_obstack, 0);
+ reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
}
calculate_needs_all_insns (global);
copy_reloads (struct insn_chain *chain)
{
chain->n_reloads = n_reloads;
- chain->rld = obstack_alloc (&reload_obstack,
- n_reloads * sizeof (struct reload));
+ chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
- reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
+ reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
}
/* Walk the chain of insns, and determine for each whether it needs reloads
something_needs_elimination = 0;
- reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
+ reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
for (chain = reload_insn_chain; chain != 0; chain = next)
{
rtx insn = chain->insn;
#endif
if (!reg_eliminate)
- reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
+ reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
/* Does this function require a frame pointer? */
tmp_bucket.hash = compute_hash (insn);
/* Select the hash group. */
- bucket = htab_find (hash_buckets, &tmp_bucket);
+ bucket = (p_hash_bucket) htab_find (hash_buckets, &tmp_bucket);
if (bucket)
{
tmp_elem.insn = insn;
/* Select the insn. */
- elem = htab_find (bucket->seq_candidates, &tmp_elem);
+ elem = (p_hash_elem) htab_find (bucket->seq_candidates, &tmp_elem);
/* If INSN is parsed the cost will be the cached length. */
if (elem)
tmp_bucket.hash = compute_hash (insn);
/* Select the hash group. */
- bucket = htab_find (hash_buckets, &tmp_bucket);
+ bucket = (p_hash_bucket) htab_find (hash_buckets, &tmp_bucket);
if (!bucket)
{
{
const unsigned int size = rtx_size (orig);
rtx const copy = (rtx) ggc_alloc_zone_pass_stat (size, &rtl_zone);
- return memcpy (copy, orig, size);
+ return (rtx) memcpy (copy, orig, size);
}
\f
/* Nonzero when we are generating CONCATs. */
static void
parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
{
- struct parms_set_data *d = data;
+ struct parms_set_data *const d = (struct parms_set_data *) data;
if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
&& TEST_HARD_REG_BIT (d->regs, REGNO (x)))
{
bytes = size * sizeof (SBITMAP_ELT_TYPE);
amt = (sizeof (struct simple_bitmap_def)
+ bytes - sizeof (SBITMAP_ELT_TYPE));
- bmap = xmalloc (amt);
+ bmap = (sbitmap) xmalloc (amt);
bmap->n_bits = n_elms;
bmap->size = size;
bmap->popcount = NULL;
sbitmap_alloc_with_popcount (unsigned int n_elms)
{
sbitmap const bmap = sbitmap_alloc (n_elms);
- bmap->popcount = xmalloc (bmap->size * sizeof (unsigned char));
+ bmap->popcount = XNEWVEC (unsigned char, bmap->size);
return bmap;
}
{
amt = (sizeof (struct simple_bitmap_def)
+ bytes - sizeof (SBITMAP_ELT_TYPE));
- bmap = xrealloc (bmap, amt);
+ bmap = (sbitmap) xrealloc (bmap, amt);
if (bmap->popcount)
- bmap->popcount = xrealloc (bmap->popcount,
- size * sizeof (unsigned char));
+ bmap->popcount = XRESIZEVEC (unsigned char, bmap->popcount, size);
}
if (n_elms > bmap->n_bits)
}
amt = vector_bytes + (n_vecs * elm_bytes);
- bitmap_vector = xmalloc (amt);
+ bitmap_vector = (sbitmap *) xmalloc (amt);
for (i = 0, offset = vector_bytes; i < n_vecs; i++, offset += elm_bytes)
{
/* insn2 not guaranteed to be a 1 base reg load. */
continue;
- for (bb = last_block; bb; bb = bb->aux)
+ for (bb = last_block; bb; bb = (basic_block) bb->aux)
if (insn2_block == bb)
break;
bb = earliest_block_with_similiar_load (last_block, insn);
if (bb)
{
- bb = bb->aux;
+ bb = (basic_block) bb->aux;
if (!bb)
break;
prev = BB_END (bb);
/* Maintain the invariant that bb->aux is clear after use. */
while (last_block)
{
- bb = last_block->aux;
+ bb = (basic_block) last_block->aux;
last_block->aux = NULL;
last_block = bb;
}
extend_regions_p = PARAM_VALUE (PARAM_MAX_SCHED_EXTEND_REGIONS_ITERS) > 0;
if (extend_regions_p)
{
- degree1 = xmalloc (last_basic_block * sizeof (int));
+ degree1 = XNEWVEC (int, last_basic_block);
extended_rgn_header = sbitmap_alloc (last_basic_block);
sbitmap_zero (extended_rgn_header);
}
if (nr_blocks > a_sz)
{
- a = xrealloc (a, nr_blocks * sizeof (*a));
+ a = XRESIZEVEC (int, a, nr_blocks);
do
a[a_sz++] = 0;
while (a_sz != nr_blocks);
max_iter = PARAM_VALUE (PARAM_MAX_SCHED_EXTEND_REGIONS_ITERS);
- max_hdr = xmalloc (last_basic_block * sizeof (*max_hdr));
+ max_hdr = XNEWVEC (int, last_basic_block);
- order = xmalloc (last_basic_block * sizeof (*order));
+ order = XNEWVEC (int, last_basic_block);
post_order_compute (order, false, false);
for (i = nblocks - 1; i >= 0; i--)
current_blocks = RGN_BLOCKS (rgn);
/* See comments in add_block1, for what reasons we allocate +1 element. */
- ebb_head = xrealloc (ebb_head, (current_nr_blocks + 1) * sizeof (*ebb_head));
+ ebb_head = XRESIZEVEC (int, ebb_head, current_nr_blocks + 1);
for (bb = 0; bb <= current_nr_blocks; bb++)
ebb_head[bb] = current_blocks + bb;
static int
eq_descriptor_pre_extension (const void *p1, const void *p2)
{
- const struct see_pre_extension_expr *extension1 = p1;
- const struct see_pre_extension_expr *extension2 = p2;
+ const struct see_pre_extension_expr *const extension1 =
+ (const struct see_pre_extension_expr *) p1;
+ const struct see_pre_extension_expr *const extension2 =
+ (const struct see_pre_extension_expr *) p2;
rtx set1 = single_set (extension1->se_insn);
rtx set2 = single_set (extension2->se_insn);
rtx rhs1, rhs2;
static hashval_t
hash_descriptor_pre_extension (const void *p)
{
- const struct see_pre_extension_expr *extension = p;
+ const struct see_pre_extension_expr *const extension =
+ (const struct see_pre_extension_expr *) p;
rtx set = single_set (extension->se_insn);
rtx rhs;
static void
hash_del_pre_extension (void *p)
{
- struct see_pre_extension_expr *extension = p;
+ struct see_pre_extension_expr *const extension =
+ (struct see_pre_extension_expr *) p;
struct see_occr *curr_occr = extension->antic_occr;
struct see_occr *next_occr = NULL;
static int
eq_descriptor_properties (const void *p1, const void *p2)
{
- const struct see_register_properties *curr_prop1 = p1;
- const struct see_register_properties *curr_prop2 = p2;
+ const struct see_register_properties *const curr_prop1 =
+ (const struct see_register_properties *) p1;
+ const struct see_register_properties *const curr_prop2 =
+ (const struct see_register_properties *) p2;
return curr_prop1->regno == curr_prop2->regno;
}
static hashval_t
hash_descriptor_properties (const void *p)
{
- const struct see_register_properties *curr_prop = p;
+ const struct see_register_properties *const curr_prop =
+ (const struct see_register_properties *) p;
return curr_prop->regno;
}
static void
hash_del_properties (void *p)
{
- struct see_register_properties *curr_prop = p;
+ struct see_register_properties *const curr_prop =
+ (struct see_register_properties *) p;
free (curr_prop);
}
/* This is the first time this extension instruction is encountered. Store
it in the hash. */
{
- (*slot_pre_exp) = xmalloc (sizeof (struct see_pre_extension_expr));
+ (*slot_pre_exp) = XNEW (struct see_pre_extension_expr);
(*slot_pre_exp)->se_insn = extension;
(*slot_pre_exp)->bitmap_index =
(htab_elements (see_pre_extension_hash) - 1);
}
/* Allocate web entries array for the union-find data structure. */
- def_entry = xcalloc (defs_num, sizeof (struct web_entry));
- use_entry = xcalloc (uses_num, sizeof (struct web_entry));
+ def_entry = XCNEWVEC (struct web_entry, defs_num);
+ use_entry = XCNEWVEC (struct web_entry, uses_num);
/* Allocate an array of splay trees.
One splay tree for each basic block. */
- see_bb_splay_ar = xcalloc (last_bb, sizeof (splay_tree));
+ see_bb_splay_ar = XCNEWVEC (splay_tree, last_bb);
/* Allocate an array of hashes.
One hash for each basic block. */
- see_bb_hash_ar = xcalloc (last_bb, sizeof (htab_t));
+ see_bb_hash_ar = XCNEWVEC (htab_t, last_bb);
/* Allocate the extension hash. It will hold the extensions that we want
to PRE. */
static int
see_print_register_properties (void **slot, void *b ATTRIBUTE_UNUSED)
{
- struct see_register_properties *prop = *slot;
+ const struct see_register_properties *const prop =
+ (const struct see_register_properties *) *slot;
gcc_assert (prop);
fprintf (dump_file, "Property found for register %d\n", prop->regno);
static int
see_print_pre_extension_expr (void **slot, void *b ATTRIBUTE_UNUSED)
{
- struct see_pre_extension_expr *pre_extension = *slot;
+ const struct see_pre_extension_expr *const pre_extension =
+ (const struct see_pre_extension_expr *) *slot;
gcc_assert (pre_extension
&& pre_extension->se_insn
static int
see_delete_merged_def_extension (void **slot, void *b ATTRIBUTE_UNUSED)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
if (dump_file)
{
static int
see_delete_unmerged_def_extension (void **slot, void *b ATTRIBUTE_UNUSED)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
if (dump_file)
{
static int
see_emit_use_extension (void **slot, void *b)
{
- rtx use_se = *slot;
+ rtx use_se = (rtx) *slot;
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
if (INSN_DELETED_P (use_se))
static int
see_pre_delete_extension (void **slot, void *b ATTRIBUTE_UNUSED)
{
- struct see_pre_extension_expr *expr = *slot;
+ struct see_pre_extension_expr *const expr =
+ (struct see_pre_extension_expr *) *slot;
struct see_occr *occr;
int indx = expr->bitmap_index;
static int
see_map_extension (void **slot, void *b)
{
- struct see_pre_extension_expr *expr = *slot;
- struct see_pre_extension_expr **index_map =
+ struct see_pre_extension_expr *const expr =
+ (struct see_pre_extension_expr *) *slot;
+ struct see_pre_extension_expr **const index_map =
(struct see_pre_extension_expr **) b;
index_map[expr->bitmap_index] = expr;
bool did_insert = false;
int i;
- index_map = xcalloc (pre_extension_num,
- sizeof (struct see_pre_extension_expr *));
+ index_map = XCNEWVEC (struct see_pre_extension_expr *, pre_extension_num);
if (dump_file)
fprintf (dump_file,
static int
see_analyze_merged_def_local_prop (void **slot, void *b)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
rtx ref = curr_ref_s->insn;
struct see_pre_extension_expr *extension_expr;
/* Set the available bit. */
SET_BIT (comp[bb_num], indx);
/* Record the available occurrence. */
- curr_occr = xmalloc (sizeof (struct see_occr));
+ curr_occr = XNEW (struct see_occr);
curr_occr->next = NULL;
curr_occr->insn = def_se;
curr_occr->block_num = bb_num;
static int
see_analyze_unmerged_def_local_prop (void **slot, void *b)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
rtx ref = curr_ref_s->insn;
struct see_pre_extension_expr *extension_expr;
see_analyze_use_local_prop (void **slot, void *b)
{
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
- rtx use_se = *slot;
+ rtx use_se = (rtx) *slot;
rtx ref = curr_ref_s->insn;
rtx dest_extension_reg = see_get_extension_reg (use_se, 1);
struct see_pre_extension_expr *extension_expr;
/* Set the anticipatable bit. */
SET_BIT (antloc[bb_num], indx);
/* Record the anticipatable occurrence. */
- curr_occr = xmalloc (sizeof (struct see_occr));
+ curr_occr = XNEW (struct see_occr);
curr_occr->next = NULL;
curr_occr->insn = use_se;
curr_occr->block_num = bb_num;
/* Set the available bit. */
SET_BIT (comp[bb_num], indx);
/* Record the available occurrence. */
- curr_occr = xmalloc (sizeof (struct see_occr));
+ curr_occr = XNEW (struct see_occr);
curr_occr->next = NULL;
curr_occr->insn = use_se;
curr_occr->block_num = bb_num;
/* Reset the killed bit. */
RESET_BIT (ae_kill[bb_num], indx);
/* Record the available occurrence. */
- curr_occr = xmalloc (sizeof (struct see_occr));
+ curr_occr = XNEW (struct see_occr);
curr_occr->next = NULL;
curr_occr->insn = use_se;
curr_occr->block_num = bb_num;
static int
see_set_prop_merged_def (void **slot, void *b)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
rtx insn = curr_ref_s->insn;
rtx dest_extension_reg = see_get_extension_reg (def_se, 1);
else
{
/* Property doesn't exist yet. */
- curr_prop = xmalloc (sizeof (struct see_register_properties));
+ curr_prop = XNEW (struct see_register_properties);
curr_prop->regno = REGNO (dest_extension_reg);
curr_prop->last_def = ref_luid;
curr_prop->first_se_before_any_def = -1;
static int
see_set_prop_unmerged_def (void **slot, void *b)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
rtx insn = curr_ref_s->insn;
rtx dest_extension_reg = see_get_extension_reg (def_se, 1);
else
{
/* Property doesn't exist yet. */
- curr_prop = xmalloc (sizeof (struct see_register_properties));
+ curr_prop = XNEW (struct see_register_properties);
curr_prop->regno = REGNO (dest_extension_reg);
curr_prop->last_def = ref_luid;
curr_prop->first_se_before_any_def = -1;
static int
see_set_prop_unmerged_use (void **slot, void *b)
{
- rtx use_se = *slot;
+ rtx use_se = (rtx) *slot;
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
rtx insn = curr_ref_s->insn;
rtx dest_extension_reg = see_get_extension_reg (use_se, 1);
else
{
/* Property doesn't exist yet. Create a new one. */
- curr_prop = xmalloc (sizeof (struct see_register_properties));
+ curr_prop = XNEW (struct see_register_properties);
curr_prop->regno = REGNO (dest_extension_reg);
curr_prop->last_def = -1;
curr_prop->first_se_before_any_def = ref_luid;
static int
see_print_one_extension (void **slot, void *b ATTRIBUTE_UNUSED)
{
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
gcc_assert (def_se && INSN_P (def_se));
print_rtl_single (dump_file, def_se);
see_merge_one_use_extension (void **slot, void *b)
{
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
- rtx use_se = *slot;
+ rtx use_se = (rtx) *slot;
rtx ref = curr_ref_s->merged_insn
? curr_ref_s->merged_insn : curr_ref_s->insn;
rtx merged_ref_next = curr_ref_s->merged_insn
see_merge_one_def_extension (void **slot, void *b)
{
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
- rtx def_se = *slot;
+ rtx def_se = (rtx) *slot;
/* If the original insn was already merged with an extension before,
take the merged one. */
rtx ref = curr_ref_s->merged_insn
tree. */
if (!stn)
{
- ref_s = xmalloc (sizeof (struct see_ref_s));
+ ref_s = XNEW (struct see_ref_s);
ref_s->luid = DF_INSN_LUID (ref_insn);
ref_s->insn = ref_insn;
ref_s->merged_insn = NULL;
}
DF_REF_ID (ref) = index;
- curr_entry_extra_info = xmalloc (sizeof (struct see_entry_extra_info));
+ curr_entry_extra_info = XNEW (struct see_entry_extra_info);
curr_entry_extra_info->relevancy = et;
curr_entry_extra_info->local_relevancy = et;
use_entry[index].extra_info = curr_entry_extra_info;
unsigned int index)
{
struct see_entry_extra_info *curr_entry_extra_info
- = xmalloc (sizeof (struct see_entry_extra_info));
+ = XNEW (struct see_entry_extra_info);
curr_entry_extra_info->relevancy = et;
curr_entry_extra_info->local_relevancy = et;
static hashval_t
hash_statistics_hash (const void *p)
{
- statistics_counter_t *c = (statistics_counter_t *)p;
+ const statistics_counter_t *const c = (const statistics_counter_t *)p;
return htab_hash_string (c->id) + c->val;
}
static int
hash_statistics_eq (const void *p, const void *q)
{
- statistics_counter_t *c1 = (statistics_counter_t *)p;
- statistics_counter_t *c2 = (statistics_counter_t *)q;
+ const statistics_counter_t *const c1 = (const statistics_counter_t *)p;
+ const statistics_counter_t *const c2 = (const statistics_counter_t *)q;
return c1->val == c2->val && strcmp (c1->id, c2->id) == 0;
}
static void
hash_statistics_free (void *p)
{
- free ((void *)((statistics_counter_t *)p)->id);
+ free (CONST_CAST(char *, ((statistics_counter_t *)p)->id));
free (p);
}
if (idx >= nr_statistics_hashes)
{
- statistics_hashes = xrealloc (statistics_hashes,
- (idx + 1) * sizeof (htab_t));
+ statistics_hashes = XRESIZEVEC (struct htab *, statistics_hashes, idx+1);
memset (statistics_hashes + nr_statistics_hashes, 0,
(idx + 1 - nr_statistics_hashes) * sizeof (htab_t));
nr_statistics_hashes = idx + 1;
*p, operand_num);
/* Make a copy of the constraint. */
- buf = alloca (c_len + 1);
+ buf = XALLOCAVEC (char, c_len + 1);
strcpy (buf, constraint);
/* Swap the first character and the `=' or `+'. */
buf[p - constraint] = buf[0];
void *data)
{
tree decl = *declp;
- const HARD_REG_SET *regs = data;
+ const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
if (TREE_CODE (decl) == VAR_DECL)
{
tree t;
int i;
/* Vector of RTX's of evaluated output operands. */
- rtx *output_rtx = alloca (noutputs * sizeof (rtx));
- int *inout_opnum = alloca (noutputs * sizeof (int));
- rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
- enum machine_mode *inout_mode
- = alloca (noutputs * sizeof (enum machine_mode));
- const char **constraints
- = alloca ((noutputs + ninputs) * sizeof (const char *));
+ rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
+ int *inout_opnum = XALLOCAVEC (int, noutputs);
+ rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
+ enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
+ const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
int old_generating_concat_p = generating_concat_p;
/* An ASM with no outputs needs to be treated as volatile, for now. */
int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
unsigned int bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
- rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
+ rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
rtx result_val = expand_normal (retval_rhs);
enum machine_mode tmpmode, result_reg_mode;
static int
case_bit_test_cmp (const void *p1, const void *p2)
{
- const struct case_bit_test *d1 = p1;
- const struct case_bit_test *d2 = p2;
+ const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
+ const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
if (d2->bits != d1->bits)
return d2->bits - d1->bits;
/* Get table of labels to jump to, in order of case index. */
ncases = tree_low_cst (range, 0) + 1;
- labelvec = alloca (ncases * sizeof (rtx));
+ labelvec = XALLOCAVEC (rtx, ncases);
memset (labelvec, 0, ncases * sizeof (rtx));
for (n = case_list; n; n = n->right)
record_layout_info
start_record_layout (tree t)
{
- record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
+ record_layout_info rli = XNEW (struct record_layout_info_s);
rli->t = t;
if (length == 1 && ISDIGIT (contents[0]))
return digit_string (contents[0] - '0');
- result = ggc_alloc (length + 1);
+ result = GGC_NEWVAR (char, length + 1);
memcpy (result, contents, length + 1);
return (const char *) result;
}
void
gt_pch_n_S (const void *x)
{
- gt_pch_note_object ((void *)x, (void *)x, >_pch_p_S,
- gt_types_enum_last);
+ gt_pch_note_object (CONST_CAST (void *, x), CONST_CAST (void *, x),
+ >_pch_p_S, gt_types_enum_last);
}
\f
/* Handle saving and restoring the string pool for PCH. */
void
gt_pch_save_stringpool (void)
{
- spd = ggc_alloc (sizeof (*spd));
+ spd = GGC_NEW (struct string_pool_data);
spd->nslots = ident_hash->nslots;
spd->nelements = ident_hash->nelements;
- spd->entries = ggc_alloc (sizeof (spd->entries[0]) * spd->nslots);
+ spd->entries = GGC_NEWVEC (struct ht_identifier *, spd->nslots);
memcpy (spd->entries, ident_hash->entries,
spd->nslots * sizeof (spd->entries[0]));
}
static hashval_t
hash_stmt_vertex_info (const void *elt)
{
- struct rdg_vertex_info *rvi = (struct rdg_vertex_info *) elt;
- tree stmt = rvi->stmt;
+ const struct rdg_vertex_info *const rvi =
+ (const struct rdg_vertex_info *) elt;
+ const_tree stmt = rvi->stmt;
return htab_hash_pointer (stmt);
}
static int
have_similar_memory_accesses_1 (const void *s1, const void *s2)
{
- return have_similar_memory_accesses ((tree) s1, (tree) s2);
+ return have_similar_memory_accesses (CONST_CAST_TREE ((const_tree)s1),
+ CONST_CAST_TREE ((const_tree)s2));
}
/* Helper function for the hashtab. */
static hashval_t
ref_base_address_1 (const void *s)
{
- tree stmt = (tree) s;
+ tree stmt = CONST_CAST_TREE((const_tree)s);
unsigned i;
VEC (data_ref_loc, heap) *refs;
data_ref_loc *ref;
static hashval_t
name_to_bb_hash (const void *p)
{
- tree n = ((struct name_to_bb *)p)->ssa_name;
- return htab_hash_pointer (n) ^ ((struct name_to_bb *)p)->store;
+ const_tree n = ((const struct name_to_bb *)p)->ssa_name;
+ return htab_hash_pointer (n) ^ ((const struct name_to_bb *)p)->store;
}
/* The equality function of *P1 and *P2. SSA_NAMEs are shared, so