+2008-06-25 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
+
+ * alias.c (record_alias_subset, init_alias_analysis): Fix
+ -Wc++-compat and/or -Wcast-qual warnings.
+ * attribs.c (lookup_attribute_spec): Likewise.
+ * bb-reorder.c (find_traces, rotate_loop, find_traces_1_round,
+ copy_bb, connect_traces,
+ find_rarely_executed_basic_blocks_and_cr): Likewise.
+ * bt-load.c (find_btr_def_group, add_btr_def, new_btr_user,
+ note_btr_set, migrate_btr_defs): Likewise.
+ * builtins.c (result_vector, expand_builtin_memcpy,
+ expand_builtin_mempcpy_args, expand_builtin_strncpy,
+ builtin_memset_read_str, expand_builtin_printf,
+ fold_builtin_memchr, rewrite_call_expr, fold_builtin_printf):
+ Likewise.
+ * caller-save.c (mark_set_regs): Likewise.
+ * calls.c (expand_call, emit_library_call_value_1): Likewise.
+ * cgraph.c (cgraph_edge): Likewise.
+ * combine.c (likely_spilled_retval_1): Likewise.
+ * coverage.c (htab_counts_entry_hash, htab_counts_entry_eq,
+ htab_counts_entry_del, get_coverage_counts): Likewise.
+ * cselib.c (new_elt_list, new_elt_loc_list, entry_and_rtx_equal_p,
+ new_cselib_val): Likewise.
+ * dbgcnt.c (dbg_cnt_process_opt): Likewise.
+ * dbxout.c (dbxout_init, dbxout_type, output_used_types_helper):
+ Likewise.
+ * df-core.c (df_compact_blocks): Likewise.
+ * df-problems.c (df_grow_bb_info, df_chain_create): Likewise.
+ * df-scan.c (df_grow_reg_info, df_ref_create,
+ df_insn_create_insn_record, df_insn_rescan, df_notes_rescan,
+ df_ref_compare, df_ref_create_structure, df_bb_refs_record,
+ df_record_entry_block_defs, df_record_exit_block_uses,
+ df_bb_verify): Likewise.
+ * df.h (DF_REF_EXTRACT_WIDTH_CONST, DF_REF_EXTRACT_OFFSET_CONST,
+ DF_REF_EXTRACT_MODE_CONST): New.
+ * dominance.c (get_immediate_dominator, get_dominated_by,
+ nearest_common_dominator, root_of_dom_tree,
+ iterate_fix_dominators, first_dom_son, next_dom_son): Fix
+ -Wc++-compat and/or -Wcast-qual warnings.
+ * dse.c (clear_alias_set_lookup, get_group_info, gen_rtx_MEM,
+ record_store, replace_read, check_mem_read_rtx, scan_insn,
+ dse_step1, dse_record_singleton_alias_set): Likewise.
+ * dwarf2asm.c (dw2_force_const_mem): Likewise.
+
2008-06-25 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
* dwarf2out.c (new_cfi, queue_reg_save, dwarf2out_begin_prologue,
{
/* Create an entry for the SUPERSET, so that we have a place to
attach the SUBSET. */
- superset_entry = ggc_alloc (sizeof (struct alias_set_entry));
+ superset_entry = GGC_NEW (struct alias_set_entry);
superset_entry->alias_set = superset;
superset_entry->children
= splay_tree_new_ggc (splay_tree_compare_ints);
timevar_push (TV_ALIAS_ANALYSIS);
reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
- reg_known_value = ggc_calloc (reg_known_value_size, sizeof (rtx));
- reg_known_equiv_p = xcalloc (reg_known_value_size, sizeof (bool));
+ reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
+ reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
/* If we have memory allocated from the previous run, use it. */
if (old_reg_base_value)
attr.str = IDENTIFIER_POINTER (name);
attr.length = IDENTIFIER_LENGTH (name);
extract_attribute_substring (&attr);
- return htab_find_with_hash (attribute_hash, &attr,
- substring_hash (attr.str, attr.length));
+ return (const struct attribute_spec *)
+ htab_find_with_hash (attribute_hash, &attr,
+ substring_hash (attr.str, attr.length));
}
\f
/* Process the attributes listed in ATTRIBUTES and install them in *NODE,
basic_block bb;
fprintf (dump_file, "Trace %d (round %d): ", i + 1,
traces[i].round + 1);
- for (bb = traces[i].first; bb != traces[i].last; bb = bb->aux)
+ for (bb = traces[i].first; bb != traces[i].last; bb = (basic_block) bb->aux)
fprintf (dump_file, "%d [%d] ", bb->index, bb->frequency);
fprintf (dump_file, "%d [%d]\n", bb->index, bb->frequency);
}
}
}
}
- bb = bb->aux;
+ bb = (basic_block) bb->aux;
}
while (bb != back_edge->dest);
the trace. */
if (back_edge->dest == trace->first)
{
- trace->first = best_bb->aux;
+ trace->first = (basic_block) best_bb->aux;
}
else
{
for (prev_bb = trace->first;
prev_bb->aux != back_edge->dest;
- prev_bb = prev_bb->aux)
+ prev_bb = (basic_block) prev_bb->aux)
;
prev_bb->aux = best_bb->aux;
fibheapkey_t key;
edge_iterator ei;
- bb = fibheap_extract_min (*heap);
+ bb = (basic_block) fibheap_extract_min (*heap);
bbd[bb->index].heap = NULL;
bbd[bb->index].node = NULL;
new_size = MAX (last_basic_block, new_bb->index + 1);
new_size = GET_ARRAY_SIZE (new_size);
- bbd = xrealloc (bbd, new_size * sizeof (bbro_basic_block_data));
+ bbd = XRESIZEVEC (bbro_basic_block_data, bbd, new_size);
for (i = array_size; i < new_size; i++)
{
bbd[i].start_of_trace = -1;
basic_block bb;
fprintf (dump_file, "Final order:\n");
- for (bb = traces[0].first; bb; bb = bb->aux)
+ for (bb = traces[0].first; bb; bb = (basic_block) bb->aux)
fprintf (dump_file, "%d ", bb->index);
fprintf (dump_file, "\n");
fflush (dump_file);
if (i == *max_idx)
{
*max_idx *= 2;
- *crossing_edges = xrealloc (*crossing_edges,
- (*max_idx) * sizeof (edge));
+ *crossing_edges = XRESIZEVEC (edge, *crossing_edges, *max_idx);
}
(*crossing_edges)[i++] = e;
}
if (!this_group)
{
- this_group = obstack_alloc (&migrate_btrl_obstack,
- sizeof (struct btr_def_group_s));
+ this_group = XOBNEW (&migrate_btrl_obstack, struct btr_def_group_s);
this_group->src = def_src;
this_group->members = NULL;
this_group->next = *all_btr_def_groups;
btr_def_group *all_btr_def_groups)
{
btr_def this
- = obstack_alloc (&migrate_btrl_obstack, sizeof (struct btr_def_s));
+ = XOBNEW (&migrate_btrl_obstack, struct btr_def_s);
this->bb = bb;
this->luid = insn_luid;
this->insn = insn;
usep = NULL;
}
use = usep ? *usep : NULL_RTX;
- user = obstack_alloc (&migrate_btrl_obstack, sizeof (struct btr_user_s));
+ user = XOBNEW (&migrate_btrl_obstack, struct btr_user_s);
user->bb = bb;
user->luid = insn_luid;
user->insn = insn;
static void
note_btr_set (rtx dest, const_rtx set ATTRIBUTE_UNUSED, void *data)
{
- defs_uses_info *info = data;
+ defs_uses_info *info = (defs_uses_info *) data;
int regno, end_regno;
if (!REG_P (dest))
first_btr = reg;
}
- btrs_live = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
- btrs_live_at_end = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
+ btrs_live = XCNEWVEC (HARD_REG_SET, n_basic_blocks);
+ btrs_live_at_end = XCNEWVEC (HARD_REG_SET, n_basic_blocks);
build_btr_def_use_webs (all_btr_defs);
while (!fibheap_empty (all_btr_defs))
{
- btr_def def = fibheap_extract_min (all_btr_defs);
+ btr_def def = (btr_def) fibheap_extract_min (all_btr_defs);
int min_cost = -fibheap_min_key (all_btr_defs);
if (migrate_btr_def (def, min_cost))
{
int regno, size, align, nelts;
enum machine_mode mode;
rtx reg, mem;
- rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
+ rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
size = nelts = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
&& GET_CODE (len_rtx) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (void *) src_str, dest_align, false))
+ CONST_CAST (char *, src_str),
+ dest_align, false))
{
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- (void *) src_str, dest_align, false, 0);
+ CONST_CAST (char *, src_str),
+ dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
&& GET_CODE (len_rtx) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (void *) src_str, dest_align, false))
+ CONST_CAST (char *, src_str),
+ dest_align, false))
{
dest_mem = get_memory_rtx (dest, len);
set_mem_align (dest_mem, dest_align);
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- (void *) src_str, dest_align,
- false, endp);
+ CONST_CAST (char *, src_str),
+ dest_align, false, endp);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
if (!p || dest_align == 0 || !host_integerp (len, 1)
|| !can_store_by_pieces (tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (void *) p, dest_align, false))
+ CONST_CAST (char *, p),
+ dest_align, false))
return NULL_RTX;
dest_mem = get_memory_rtx (dest, len);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (void *) p, dest_align, false, 0);
+ CONST_CAST (char *, p), dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
enum machine_mode mode)
{
const char *c = (const char *) data;
- char *p = alloca (GET_MODE_SIZE (mode));
+ char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
memset (p, *c, GET_MODE_SIZE (mode));
if (size == 1)
return (rtx) data;
- p = alloca (size);
+ p = XALLOCAVEC (char, size);
memset (p, 1, size);
coeff = c_readstr (p, mode);
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = alloca (len);
+ char *newstr = XALLOCAVEC (char, len);
memcpy (newstr, fmt_str, len - 1);
newstr[len - 1] = 0;
arg = build_string_literal (len, newstr);
if (target_char_cast (arg2, &c))
return NULL_TREE;
- r = memchr (p1, c, tree_low_cst (len, 1));
+ r = (char *) memchr (p1, c, tree_low_cst (len, 1));
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
int i, j;
va_list ap;
- buffer = alloca (nargs * sizeof (tree));
+ buffer = XALLOCAVEC (tree, nargs);
va_start (ap, n);
for (i = 0; i < n; i++)
buffer[i] = va_arg (ap, tree);
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = alloca (len);
+ char *newstr = XALLOCAVEC (char, len);
memcpy (newstr, str, len - 1);
newstr[len - 1] = 0;
mark_set_regs (rtx reg, const_rtx setter ATTRIBUTE_UNUSED, void *data)
{
int regno, endregno, i;
- HARD_REG_SET *this_insn_sets = data;
+ HARD_REG_SET *this_insn_sets = (HARD_REG_SET *) data;
if (GET_CODE (reg) == SUBREG)
{
n_named_args = num_actuals;
/* Make a vector to hold all the information about each arg. */
- args = alloca (num_actuals * sizeof (struct arg_data));
+ args = XALLOCAVEC (struct arg_data, num_actuals);
memset (args, 0, num_actuals * sizeof (struct arg_data));
/* Build up entries in the ARGS array, compute the size of the
of the full argument passing conventions to limit complexity here since
library functions shouldn't have many args. */
- argvec = alloca ((nargs + 1) * sizeof (struct arg));
+ argvec = XALLOCAVEC (struct arg, nargs + 1);
memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
int n = 0;
if (node->call_site_hash)
- return htab_find_with_hash (node->call_site_hash, call_stmt,
- htab_hash_pointer (call_stmt));
+ return (struct cgraph_edge *)
+ htab_find_with_hash (node->call_site_hash, call_stmt,
+ htab_hash_pointer (call_stmt));
/* This loop may turn out to be performance problem. In such case adding
hashtables into call nodes with very many edges is probably best
static void
likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
{
- struct likely_spilled_retval_info *info = data;
+ struct likely_spilled_retval_info *const info =
+ (struct likely_spilled_retval_info *) data;
unsigned regno, nregs;
unsigned new_mask;
static hashval_t
htab_counts_entry_hash (const void *of)
{
- const counts_entry_t *entry = of;
+ const counts_entry_t *const entry = (const counts_entry_t *) of;
return entry->ident * GCOV_COUNTERS + entry->ctr;
}
static int
htab_counts_entry_eq (const void *of1, const void *of2)
{
- const counts_entry_t *entry1 = of1;
- const counts_entry_t *entry2 = of2;
+ const counts_entry_t *const entry1 = (const counts_entry_t *) of1;
+ const counts_entry_t *const entry2 = (const counts_entry_t *) of2;
return entry1->ident == entry2->ident && entry1->ctr == entry2->ctr;
}
static void
htab_counts_entry_del (void *of)
{
- counts_entry_t *entry = of;
+ counts_entry_t *const entry = (counts_entry_t *) of;
free (entry->counts);
free (entry);
elt.ident = current_function_funcdef_no + 1;
elt.ctr = counter;
- entry = htab_find (counts_hash, &elt);
+ entry = (counts_entry_t *) htab_find (counts_hash, &elt);
if (!entry)
{
warning (0, "no coverage for function %qs found", IDENTIFIER_POINTER
new_elt_list (struct elt_list *next, cselib_val *elt)
{
struct elt_list *el;
- el = pool_alloc (elt_list_pool);
+ el = (struct elt_list *) pool_alloc (elt_list_pool);
el->next = next;
el->elt = elt;
return el;
new_elt_loc_list (struct elt_loc_list *next, rtx loc)
{
struct elt_loc_list *el;
- el = pool_alloc (elt_loc_list_pool);
+ el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
el->next = next;
el->loc = loc;
el->setting_insn = cselib_current_insn;
{
struct elt_loc_list *l;
const cselib_val *const v = (const cselib_val *) entry;
- rtx x = (rtx) x_arg;
+ rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
enum machine_mode mode = GET_MODE (x);
gcc_assert (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED
static inline cselib_val *
new_cselib_val (unsigned int value, enum machine_mode mode)
{
- cselib_val *e = pool_alloc (cselib_val_pool);
+ cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
gcc_assert (value);
precisely when we can have VALUE RTXen (when cselib is active)
so we don't need to put them in garbage collected memory.
??? Why should a VALUE be an RTX in the first place? */
- e->val_rtx = pool_alloc (value_pool);
+ e->val_rtx = (rtx) pool_alloc (value_pool);
memset (e->val_rtx, 0, RTX_HDR_SIZE);
PUT_CODE (e->val_rtx, VALUE);
PUT_MODE (e->val_rtx, mode);
if (next == NULL || *next != 0)
{
- char *buffer = alloca (arg - start + 2);
+ char *buffer = XALLOCAVEC (char, arg - start + 2);
sprintf (buffer, "%*c", (int)(1 + (arg - start)), '^');
error ("Can not find a valid counter:value pair:");
error ("-fdbg-cnt=%s", start);
const char *mapped_name;
typevec_len = 100;
- typevec = ggc_calloc (typevec_len, sizeof typevec[0]);
+ typevec = GGC_CNEWVEC (struct typeinfo, typevec_len);
/* stabstr_ob contains one string, which will be just fine with
1-byte alignment. */
if (next_type_number == typevec_len)
{
- typevec
- = ggc_realloc (typevec, (typevec_len * 2 * sizeof typevec[0]));
+ typevec = GGC_RESIZEVEC (struct typeinfo, typevec, typevec_len * 2);
memset (typevec + typevec_len, 0, typevec_len * sizeof typevec[0]);
typevec_len *= 2;
}
static int
output_used_types_helper (void **slot, void *data)
{
- tree type = *slot;
- VEC(tree, heap) **types_p = data;
+ tree type = (tree) *slot;
+ VEC(tree, heap) **types_p = (VEC(tree, heap) **) data;
if ((TREE_CODE (type) == RECORD_TYPE
|| TREE_CODE (type) == UNION_TYPE
void **problem_temps;
int size = last_basic_block * sizeof (void *);
bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
- problem_temps = xmalloc (size);
+ problem_temps = XNEWVAR (void *, size);
for (p = 0; p < df->num_problems_defined; p++)
{
if (dflow->block_info_size < new_size)
{
new_size += new_size / 4;
- dflow->block_info = xrealloc (dflow->block_info,
- new_size *sizeof (void*));
+ dflow->block_info = XRESIZEVEC (void *, dflow->block_info, new_size);
memset (dflow->block_info + dflow->block_info_size, 0,
(new_size - dflow->block_info_size) *sizeof (void *));
dflow->block_info_size = new_size;
df_chain_create (struct df_ref *src, struct df_ref *dst)
{
struct df_link *head = DF_REF_CHAIN (src);
- struct df_link *link = pool_alloc (df_chain->block_pool);
+ struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
DF_REF_CHAIN (src) = link;
link->next = head;
if (df->regs_size < new_size)
{
new_size += new_size / 4;
- df->def_regs = xrealloc (df->def_regs,
- new_size *sizeof (struct df_reg_info*));
- df->use_regs = xrealloc (df->use_regs,
- new_size *sizeof (struct df_reg_info*));
- df->eq_use_regs = xrealloc (df->eq_use_regs,
- new_size *sizeof (struct df_reg_info*));
- df->def_info.begin = xrealloc (df->def_info.begin,
- new_size *sizeof (int));
- df->def_info.count = xrealloc (df->def_info.count,
- new_size *sizeof (int));
- df->use_info.begin = xrealloc (df->use_info.begin,
- new_size *sizeof (int));
- df->use_info.count = xrealloc (df->use_info.count,
- new_size *sizeof (int));
+ df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
+ df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
+ df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
+ new_size);
+ df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
+ df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
+ df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
+ df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
df->regs_size = new_size;
}
{
struct df_reg_info *reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->def_regs[i] = reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->use_regs[i] = reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->eq_use_regs[i] = reg_info;
df->def_info.begin[i] = 0;
{
if (ref_info->refs_size < new_size)
{
- ref_info->refs = xrealloc (ref_info->refs,
- new_size *sizeof (struct df_ref *));
+ ref_info->refs = XRESIZEVEC (struct df_ref *, ref_info->refs, new_size);
memset (ref_info->refs + ref_info->refs_size, 0,
(new_size - ref_info->refs_size) *sizeof (struct df_ref *));
ref_info->refs_size = new_size;
if (DF_INSN_SIZE () < new_size)
{
new_size += new_size / 4;
- df->insns = xrealloc (df->insns,
- new_size *sizeof (struct df_insn_info *));
+ df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
memset (df->insns + df->insns_size, 0,
(new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
DF_INSN_SIZE () = new_size;
ref_rec = *ref_rec_ptr;
if (count)
{
- ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
+ ref_rec = XRESIZEVEC (struct df_ref *, ref_rec, count+2);
*ref_rec_ptr = ref_rec;
ref_rec[count] = ref;
ref_rec[count+1] = NULL;
insn_rec = DF_INSN_INFO_GET (insn);
if (!insn_rec)
{
- insn_rec = pool_alloc (problem_data->insn_pool);
+ insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
DF_INSN_INFO_SET (insn, insn_rec);
}
memset (insn_rec, 0, sizeof (struct df_insn_info));
struct df_insn_info *insn_info = NULL;
basic_block bb = BLOCK_FOR_INSN (insn);
struct df_collection_rec collection_rec;
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if ((!df) || (!INSN_P (insn)))
return false;
unsigned int num_deleted;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
df_ref_chain_delete (insn_info->eq_uses);
at ref1. */
if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
{
- if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
- return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
- if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
- return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
- if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
- return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
+ if (DF_REF_EXTRACT_OFFSET_CONST (ref1) != DF_REF_EXTRACT_OFFSET_CONST (ref2))
+ return DF_REF_EXTRACT_OFFSET_CONST (ref1) - DF_REF_EXTRACT_OFFSET_CONST (ref2);
+ if (DF_REF_EXTRACT_WIDTH_CONST (ref1) != DF_REF_EXTRACT_WIDTH_CONST (ref2))
+ return DF_REF_EXTRACT_WIDTH_CONST (ref1) - DF_REF_EXTRACT_WIDTH_CONST (ref2);
+ if (DF_REF_EXTRACT_MODE_CONST (ref1) != DF_REF_EXTRACT_MODE_CONST (ref2))
+ return DF_REF_EXTRACT_MODE_CONST (ref1) - DF_REF_EXTRACT_MODE_CONST (ref2);
}
return 0;
}
if (ref_flags & (DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
{
- this_ref = pool_alloc (problem_data->ref_extract_pool);
+ this_ref = (struct df_ref *) pool_alloc (problem_data->ref_extract_pool);
DF_REF_EXTRACT_WIDTH (this_ref) = width;
DF_REF_EXTRACT_OFFSET (this_ref) = offset;
DF_REF_EXTRACT_MODE (this_ref) = mode;
}
else
- this_ref = pool_alloc (problem_data->ref_pool);
+ this_ref = (struct df_ref *) pool_alloc (problem_data->ref_pool);
DF_REF_ID (this_ref) = -1;
DF_REF_REG (this_ref) = reg;
DF_REF_REGNO (this_ref) = regno;
ref_flags |= DF_REF_PARTIAL;
ref_flags |= DF_REF_MW_HARDREG;
- hardreg = pool_alloc (problem_data->mw_reg_pool);
+ hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
hardreg->type = ref_type;
hardreg->flags = ref_flags;
hardreg->mw_reg = reg;
int luid = 0;
struct df_scan_bb_info *bb_info;
struct df_collection_rec collection_rec;
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if (!df)
return;
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
+ collection_rec.def_vec = XALLOCAVEC (struct df_ref *, FIRST_PSEUDO_REGISTER);
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
+ collection_rec.use_vec = XALLOCAVEC (struct df_ref *, FIRST_PSEUDO_REGISTER);
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
gcc_assert (bb_info);
/* The following two macros may only be applied if one of
DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT is true. */
#define DF_REF_EXTRACT_WIDTH(REF) (((struct df_ref_extract *)(REF))->width)
+#define DF_REF_EXTRACT_WIDTH_CONST(REF) (((const struct df_ref_extract *)(REF))->width)
#define DF_REF_EXTRACT_OFFSET(REF) (((struct df_ref_extract *)(REF))->offset)
+#define DF_REF_EXTRACT_OFFSET_CONST(REF) (((const struct df_ref_extract *)(REF))->offset)
#define DF_REF_EXTRACT_MODE(REF) (((struct df_ref_extract *)(REF))->mode)
+#define DF_REF_EXTRACT_MODE_CONST(REF) (((const struct df_ref_extract *)(REF))->mode)
/* Macros to determine the reference type. */
#define DF_REF_REG_DEF_P(REF) (DF_REF_TYPE (REF) == DF_REF_REG_DEF)
if (!node->father)
return NULL;
- return node->father->data;
+ return (basic_block) node->father->data;
}
/* Set the immediate dominator of the block possibly removing
if (!son)
return NULL;
- VEC_safe_push (basic_block, heap, bbs, son->data);
+ VEC_safe_push (basic_block, heap, bbs, (basic_block) son->data);
for (ason = son->right, n = 1; ason != son; ason = ason->right)
- VEC_safe_push (basic_block, heap, bbs, ason->data);
+ VEC_safe_push (basic_block, heap, bbs, (basic_block) ason->data);
return bbs;
}
if (!bb2)
return bb1;
- return et_nca (bb1->dom[dir_index], bb2->dom[dir_index])->data;
+ return (basic_block) et_nca (bb1->dom[dir_index], bb2->dom[dir_index])->data;
}
static basic_block
root_of_dom_tree (enum cdi_direction dir, basic_block bb)
{
- return et_root (bb->dom[dom_convert_dir_to_idx (dir)])->data;
+ return (basic_block) et_root (bb->dom[dom_convert_dir_to_idx (dir)])->data;
}
/* See the comment in iterate_fix_dominators. Finds the immediate dominators
dom_i = (size_t) *pointer_map_contains (map, dom);
/* Do not include parallel edges to G. */
- if (bitmap_bit_p (g->vertices[dom_i].data, i))
+ if (bitmap_bit_p ((bitmap) g->vertices[dom_i].data, i))
continue;
- bitmap_set_bit (g->vertices[dom_i].data, i);
+ bitmap_set_bit ((bitmap) g->vertices[dom_i].data, i);
add_edge (g, dom_i, i);
}
}
unsigned int dir_index = dom_convert_dir_to_idx (dir);
struct et_node *son = bb->dom[dir_index]->son;
- return son ? son->data : NULL;
+ return (basic_block) (son ? son->data : NULL);
}
/* Returns the next dominance son after BB in the dominator or postdominator
unsigned int dir_index = dom_convert_dir_to_idx (dir);
struct et_node *next = bb->dom[dir_index]->right;
- return next->father->son == next ? NULL : next->data;
+ return (basic_block) (next->father->son == next ? NULL : next->data);
}
/* Return dominance availability for dominance info DIR. */
slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
gcc_assert (*slot);
- return *slot;
+ return (struct clear_alias_mode_holder *) *slot;
}
{
if (!clear_alias_group)
{
- clear_alias_group = gi = pool_alloc (rtx_group_info_pool);
+ clear_alias_group = gi =
+ (group_info_t) pool_alloc (rtx_group_info_pool);
memset (gi, 0, sizeof (struct group_info));
gi->id = rtx_group_next_id++;
gi->store1_n = BITMAP_ALLOC (NULL);
if (gi == NULL)
{
- *slot = gi = pool_alloc (rtx_group_info_pool);
+ *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
gi->rtx_base = base;
gi->id = rtx_group_next_id++;
gi->base_mem = gen_rtx_MEM (QImode, base);
if (clear_alias_group->offset_map_size_p < spill_alias_set)
clear_alias_group->offset_map_size_p = spill_alias_set;
- store_info = pool_alloc (rtx_store_info_pool);
+ store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
if (dump_file)
fprintf (dump_file, " processing spill store %d(%s)\n",
group_info_t group
= VEC_index (group_info_t, rtx_group_vec, group_id);
- store_info = pool_alloc (rtx_store_info_pool);
+ store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
set_usage_bits (group, offset, width);
if (dump_file)
insn_info->stack_pointer_based = true;
insn_info->contains_cselib_groups = true;
- store_info = pool_alloc (cse_store_info_pool);
+ store_info = (store_info_t) pool_alloc (cse_store_info_pool);
group_id = -1;
if (dump_file)
if (validate_change (read_insn->insn, loc, read_reg, 0))
{
- deferred_change_t deferred_change = pool_alloc (deferred_change_pool);
+ deferred_change_t deferred_change =
+ (deferred_change_t) pool_alloc (deferred_change_pool);
/* Insert this right before the store insn where it will be safe
from later insns that might change it before the read. */
else
width = GET_MODE_SIZE (GET_MODE (mem));
- read_info = pool_alloc (read_info_pool);
+ read_info = (read_info_t) pool_alloc (read_info_pool);
read_info->group_id = group_id;
read_info->mem = mem;
read_info->alias_set = spill_alias_set;
scan_insn (bb_info_t bb_info, rtx insn)
{
rtx body;
- insn_info_t insn_info = pool_alloc (insn_info_pool);
+ insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
int mems_found = 0;
memset (insn_info, 0, sizeof (struct insn_info));
FOR_ALL_BB (bb)
{
insn_info_t ptr;
- bb_info_t bb_info = pool_alloc (bb_info_pool);
+ bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
memset (bb_info, 0, sizeof (struct bb_info));
bitmap_set_bit (all_blocks, bb->index);
slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
gcc_assert (*slot == NULL);
- *slot = entry = pool_alloc (clear_alias_mode_pool);
+ *slot = entry =
+ (struct clear_alias_mode_holder *) pool_alloc (clear_alias_mode_pool);
entry->alias_set = alias_set;
entry->mode = mode;
}
if (public && USE_LINKONCE_INDIRECT)
{
- char *ref_name = alloca (strlen (str) + sizeof "DW.ref.");
+ char *ref_name = XALLOCAVEC (char, strlen (str) + sizeof "DW.ref.");
sprintf (ref_name, "DW.ref.%s", str);
id = get_identifier (ref_name);