+2016-08-27 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
+
+ * cfgbuild.c (make_edges): Adjust.
+ * cfgrtl.c (can_delete_label_p): Likewise.
+ * dwarf2cfi.c (create_trace_edges): Likewise.
+ * except.c (sjlj_emit_dispatch_table): Likewise.
+ * function.h (struct expr_status): make x_forced_labels a vector.
+ * jump.c (rebuild_jump_labels_1): Adjust.
+ * reload1.c (set_initial_label_offsets): Likewise.
+ * stmt.c (force_label_rtx): Likewise.
+ (expand_label): Likewise.
+
2016-08-27 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* haifa-sched.c (fix_recovery_deps): Make ready_list a vector.
/* Heavy use of computed goto in machine-generated code can lead to
nearly fully-connected CFGs. In that case we spend a significant
amount of time searching the edge lists for duplicates. */
- if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
+ if (!vec_safe_is_empty (forced_labels)
+ || cfun->cfg->max_jumptable_ents > 100)
edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun));
/* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
everything on the forced_labels list. */
else if (computed_jump_p (insn))
{
- for (rtx_insn_list *x = forced_labels; x; x = x->next ())
- make_label_edge (edge_cache, bb, x->insn (), EDGE_ABNORMAL);
+ rtx_insn *insn;
+ unsigned int i;
+ FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
+ make_label_edge (edge_cache, bb, insn, EDGE_ABNORMAL);
}
/* Returns create an exit out. */
return (!LABEL_PRESERVE_P (label)
/* User declared labels must be preserved. */
&& LABEL_NAME (label) == 0
- && !in_insn_list_p (forced_labels, label));
+ && !vec_safe_contains<rtx_insn *> (forced_labels,
+ const_cast<rtx_code_label *> (label)));
}
/* Delete INSN by patching it out. */
}
else if (computed_jump_p (insn))
{
- for (rtx_insn_list *lab = forced_labels; lab; lab = lab->next ())
- maybe_record_trace_start (lab->insn (), insn);
+ rtx_insn *temp;
+ unsigned int i;
+ FOR_EACH_VEC_SAFE_ELT (forced_labels, i, temp)
+ maybe_record_trace_start (temp, insn);
}
else if (returnjump_p (insn))
;
label on the nonlocal_goto_label list. Since we're modeling these
CFG edges more exactly, we can use the forced_labels list instead. */
LABEL_PRESERVE_P (dispatch_label) = 1;
- forced_labels
- = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
+ vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
#endif
/* Load up exc_ptr and filter values from the function context. */
rtx x_apply_args_value;
/* List of labels that must never be deleted. */
- rtx_insn_list *x_forced_labels;
+ vec<rtx_insn *, va_gc> *x_forced_labels;
};
typedef struct call_site_record_d *call_site_record;
static void
rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
{
- rtx_insn_list *insn;
-
timevar_push (TV_REBUILD_JUMP);
init_label_info (f);
mark_all_labels (f);
count doesn't drop to zero. */
if (count_forced)
- for (insn = forced_labels; insn; insn = insn->next ())
- if (LABEL_P (insn->insn ()))
- LABEL_NUSES (insn->insn ())++;
+ {
+ rtx_insn *insn;
+ unsigned int i;
+ FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
+ if (LABEL_P (insn))
+ LABEL_NUSES (insn)++;
+ }
timevar_pop (TV_REBUILD_JUMP);
}
{
memset (offsets_known_at, 0, num_labels);
- for (rtx_insn_list *x = forced_labels; x; x = x->next ())
- if (x->insn ())
- set_label_offsets (x->insn (), NULL, 1);
+ unsigned int i;
+ rtx_insn *insn;
+ FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
+ set_label_offsets (insn, NULL, 1);
for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
if (x->insn ())
gcc_assert (function);
- forced_labels = gen_rtx_INSN_LIST (VOIDmode, ref, forced_labels);
+ vec_safe_push (forced_labels, ref);
return ref;
}
}
if (FORCED_LABEL (label))
- forced_labels = gen_rtx_INSN_LIST (VOIDmode, label_r, forced_labels);
+ vec_safe_push<rtx_insn *> (forced_labels, label_r);
if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
maybe_set_first_label_num (label_r);