+2014-09-15 David Malcolm <dmalcolm@redhat.com>
+
+ * config/arc/arc-protos.h (arc_attr_type): Strengthen param from
+ rtx to rtx_insn *.
+ (arc_sets_cc_p): Likewise.
+ * config/arc/arc.c (arc_print_operand): Use methods of
+ "final_sequence" for clarity, and to enable strengthening of
+ locals "jump" and "delay" from rtx to rtx_insn *.
+ (arc_adjust_insn_length): Strengthen local "prev" from rtx to
+ rtx_insn *; use method of rtx_sequence for typesafety.
+ (arc_get_insn_variants): Use insn method of rtx_sequence for
+ typesafety.
+ (arc_pad_return): Likewise.
+ (arc_attr_type): Strengthen param from rtx to rtx_insn *.
+ (arc_sets_cc_p): Likewise. Also, convert a GET_CODE check to a
+ dyn_cast to rtx_sequence *, using insn method for typesafety.
+ * config/arc/arc.h (ADJUST_INSN_LENGTH): Add checked casts to
+ rtx_sequence * and use insn method when invoking get_attr_length.
+ * config/bfin/bfin.c (type_for_anomaly): Strengthen param from rtx
+ to rtx_insn *. Replace a GET_CODE check with a dyn_cast to
+ rtx_sequence *, introducing a local "seq", using its insn method
+ from typesafety and clarity.
+ (add_sched_insns_for_speculation): Strengthen local "next" from
+ rtx to rtx_insn *.
+ * config/c6x/c6x.c (get_insn_side): Likewise for param "insn".
+ (predicate_insn): Likewise.
+ * config/cris/cris-protos.h (cris_notice_update_cc): Likewise for
+ second param.
+ * config/cris/cris.c (cris_notice_update_cc): Likewise.
+ * config/epiphany/epiphany-protos.h
+ (extern void epiphany_insert_mode_switch_use): Likewise for param
+ "insn".
+ (get_attr_sched_use_fpu): Likewise for param.
+ * config/epiphany/epiphany.c (epiphany_insert_mode_switch_use):
+ Likewise for param "insn".
+ * config/epiphany/mode-switch-use.c (insert_uses): Likewise for
+ param "insn" of "target_insert_mode_switch_use" callback.
+ * config/frv/frv.c (frv_insn_unit): Likewise for param "insn".
+ (frv_issues_to_branch_unit_p): Likewise.
+ (frv_pack_insn_p): Likewise.
+ (frv_compare_insns): Strengthen locals "insn1" and "insn2" from
+ const rtx * (i.e. mutable rtx_def * const *) to
+ rtx_insn * const *.
+ * config/i386/i386-protos.h (standard_sse_constant_opcode):
+ Strengthen first param from rtx to rtx_insn *.
+ (output_fix_trunc): Likewise.
+ * config/i386/i386.c (standard_sse_constant_opcode): Likewise.
+ (output_fix_trunc): Likewise.
+ (core2i7_first_cycle_multipass_filter_ready_try): Likewise for
+ local "insn".
+ (min_insn_size): Likewise for param "insn".
+ (get_mem_group): Likewise.
+ (is_cmp): Likewise.
+ (get_insn_path): Likewise.
+ (get_insn_group): Likewise.
+ (count_num_restricted): Likewise.
+ (fits_dispatch_window): Likewise.
+ (add_insn_window): Likewise.
+ (add_to_dispatch_window): Likewise.
+ (debug_insn_dispatch_info_file): Likewise.
+ * config/m32c/m32c-protos.h (m32c_output_compare): Likewise for
+ first param.
+ * config/m32c/m32c.c (m32c_compare_redundant): Likewise for param
+ "cmp" and local "prev".
+ (m32c_output_compare): Likewise for param "insn".
+ * config/m32r/predicates.md (define_predicate "small_insn_p"): Add
+ a checked cast to rtx_insn * on "op" after we know it's an INSN_P.
+ (define_predicate "large_insn_p"): Likewise.
+ * config/m68k/m68k-protos.h (m68k_sched_attr_size): Strengthen
+ param from rtx to rtx_insn *.
+ (attr_op_mem m68k_sched_attr_op_mem): Likewise.
+ * config/m68k/m68k.c (sched_get_attr_size_int): Likewise.
+ (m68k_sched_attr_size): Likewise.
+ (sched_get_opxy_mem_type): Likewise for param "insn".
+ (m68k_sched_attr_op_mem): Likewise.
+ (sched_mem_operand_p): Likewise.
+ * config/mep/mep-protos.h (mep_multi_slot): Likewise for param.
+ * config/mep/mep.c (mep_multi_slot): Likewise.
+ * config/mips/mips-protos.h (mips_output_sync_loop): Likewise for
+ first param.
+ (mips_sync_loop_insns): Likewise.
+ * config/mips/mips.c (mips_print_operand_punctuation): Use insn
+ method of "final_sequence" for typesafety.
+ (mips_process_sync_loop): Strengthen param "insn" from rtx to
+ rtx_insn *.
+ (mips_output_sync_loop): Likewise.
+ (mips_sync_loop_insns): Likewise.
+ (mips_74k_agen_init): Likewise.
+ (mips_sched_init): Use NULL rather than NULL_RTX when working with
+ insns.
+ * config/nds32/nds32-fp-as-gp.c (nds32_symbol_load_store_p):
+ Strengthen param "insn" from rtx to rtx_insn *.
+ * config/nds32/nds32.c (nds32_target_alignment): Likewise for
+ local "insn".
+ * config/pa/pa-protos.h (pa_insn_refs_are_delayed): Likewise for
+ param.
+ * config/pa/pa.c (pa_output_function_epilogue): Likewise for local
+ "insn". Use method of rtx_sequence for typesafety.
+ (branch_to_delay_slot_p): Strengthen param "insn" from rtx to
+ rtx_insn *.
+ (branch_needs_nop_p): Likewise.
+ (use_skip_p): Likewise.
+ (pa_insn_refs_are_delayed): Likewise.
+ * config/rl78/rl78.c (rl78_propogate_register_origins): Likewise
+ for locals "insn", "ninsn".
+ * config/rs6000/rs6000.c (is_microcoded_insn): Likewise for param
+ "insn".
+ (is_cracked_insn): Likewise.
+ (is_branch_slot_insn): Likewise.
+ (is_nonpipeline_insn): Likewise.
+ (insn_terminates_group_p): Likewise.
+ (insn_must_be_first_in_group): Likewise.
+ (insn_must_be_last_in_group): Likewise.
+ (force_new_group): Likewise for param "next_insn".
+ * config/s390/s390.c (s390_get_sched_attrmask): Likewise for param
+ "insn".
+ (s390_sched_score): Likewise.
+ * config/sh/sh-protos.h (output_branch): Likewise for param 2.
+ (rtx sfunc_uses_reg): Likewise for sole param.
+ * config/sh/sh.c (sh_print_operand): Use insn method of
+ final_sequence for typesafety.
+ (output_branch): Strengthen param "insn" from rtx to rtx_insn *.
+ Use insn method of final_sequence for typesafety.
+ (sfunc_uses_reg): Strengthen param "insn" from rtx to rtx_insn *.
+ * config/sparc/sparc-protos.h (eligible_for_call_delay): Likewise
+ for param.
+ (eligible_for_return_delay): Likewise.
+ (eligible_for_sibcall_delay): Likewise.
+ * config/sparc/sparc.c (eligible_for_call_delay): Likewise.
+ (eligible_for_return_delay): Likewise.
+ (eligible_for_sibcall_delay): Likewise.
+ * config/stormy16/stormy16-protos.h
+ (xstormy16_output_cbranch_hi): Likewise for final param.
+ (xstormy16_output_cbranch_si): Likewise.
+ * config/stormy16/stormy16.c (xstormy16_output_cbranch_hi): LIkewise.
+ (xstormy16_output_cbranch_si): Likewise.
+ * config/v850/v850-protos.h (notice_update_cc): Likewise.
+ * config/v850/v850.c (notice_update_cc): Likewise.
+
+ * final.c (get_attr_length_1): Strengthen param "insn" and param
+ of "fallback_fn" from rtx to rtx_insn *, eliminating a checked cast.
+ (get_attr_length): Strengthen param "insn" from rtx to rtx_insn *.
+ (get_attr_min_length): Likewise.
+ (shorten_branches): Likewise for signature of locals "length_fun"
+ and "inner_length_fun". Introduce local rtx_sequence * "seqn"
+ from a checked cast and use its methods for clarity and to enable
+ strengthening local "inner_insn" from rtx to rtx_insn *.
+ * genattr.c (gen_attr): When writing out the prototypes of the
+ various generated "get_attr_" functions, strengthen the params of
+ the non-const functions from rtx to rtx_insn *.
+ Similarly, strengthen the params of insn_default_length,
+ insn_min_length, insn_variable_length_p, insn_current_length.
+ (main): Similarly, strengthen the param of num_delay_slots,
+ internal_dfa_insn_code, insn_default_latency, bypass_p,
+ insn_latency, min_issue_delay, print_reservation,
+ insn_has_dfa_reservation_p and of the "internal_dfa_insn_code" and
+ "insn_default_latency" callbacks. Rename hook_int_rtx_unreachable
+ to hook_int_rtx_insn_unreachable.
+ * genattrtab.c (write_attr_get): When writing out the generated
+ "get_attr_" functions, strengthen the param "insn" from rtx to
+ rtx_insn *, eliminating a checked cast.
+ (make_automaton_attrs): When writing out prototypes of
+ "internal_dfa_insn_code_", "insn_default_latency_" functions
+ and the "internal_dfa_insn_code" and "insn_default_latency"
+ callbacks, strengthen their params from rtx to rtx_insn *
+ * genautomata.c (output_internal_insn_code_evaluation): When
+ writing out code, add a checked cast from rtx to rtx_insn * when
+ invoking DFA_INSN_CODE_FUNC_NAME aka dfa_insn_code.
+ (output_dfa_insn_code_func): Strengthen param of generated
+ function "dfa_insn_code_enlarge" from rtx to rtx_insn *.
+ (output_trans_func): Likewise for generated function
+ "state_transition".
+ (output_internal_insn_latency_func): When writing out generated
+ function "internal_insn_latency", rename params from "insn" and
+ "insn2" to "insn_or_const0" and "insn2_or_const0". Reintroduce
+ locals "insn" and "insn2" as rtx_insn * with checked casts once
+ we've proven that we're not dealing with const0_rtx.
+ (output_insn_latency_func): Strengthen param of generated
+ function "insn_latency" from rtx to rtx_insn *.
+ (output_print_reservation_func): Likewise for generated function
+ "print_reservation".
+ (output_insn_has_dfa_reservation_p): Likewise for generated
+ function "insn_has_dfa_reservation_p".
+ * hooks.c (hook_int_rtx_unreachable): Rename to...
+ (hook_int_rtx_insn_unreachable): ...this, and strengthen param
+ from rtx to rtx_insn *.
+ * hooks.h (hook_int_rtx_unreachable): Likewise.
+ (extern int hook_int_rtx_insn_unreachable): Likewise.
+ * output.h (get_attr_length): Strengthen param from rtx to rtx_insn *.
+ (get_attr_min_length): Likewise.
+ * recog.c (get_enabled_alternatives): Likewise.
+ * recog.h (alternative_mask get_enabled_alternatives): Likewise.
+ * reorg.c (find_end_label): Introduce local rtx "pat" and
+ strengthen local "insn" from rtx to rtx_insn *.
+ (redundant_insn): Use insn method of "seq" rather than element for
+ typesafety; strengthen local "control" from rtx to rtx_insn *.
+ * resource.c (mark_referenced_resources): Add checked cast to
+ rtx_insn * within INSN/JUMP_INSN case.
+ (mark_set_resources): Likewise.
+ * sel-sched.c (estimate_insn_cost): Strengthen param "insn" from
+ rtx to rtx_insn *.
+
2014-09-15 David Malcolm <dmalcolm@redhat.com>
* config/rs6000/rs6000.c (rs6000_loop_align_max_skip): Strengthen
extern int arc_verify_short (rtx_insn *insn, int unalign, int);
extern const char *arc_short_long (rtx_insn *insn, const char *, const char *);
extern rtx arc_regno_use_in (unsigned int, rtx);
-extern int arc_attr_type (rtx);
+extern int arc_attr_type (rtx_insn *);
extern bool arc_scheduling_not_expected (void);
-extern bool arc_sets_cc_p (rtx insn);
+extern bool arc_sets_cc_p (rtx_insn *insn);
extern int arc_label_align (rtx label);
extern bool arc_need_delay (rtx_insn *insn);
extern bool arc_text_label (rtx_insn *insn);
/* Unconditional branches / branches not depending on condition codes.
This could also be a CALL_INSN.
Output the appropriate delay slot suffix. */
- if (final_sequence && XVECLEN (final_sequence, 0) != 1)
+ if (final_sequence && final_sequence->len () != 1)
{
- rtx jump = XVECEXP (final_sequence, 0, 0);
- rtx delay = XVECEXP (final_sequence, 0, 1);
+ rtx_insn *jump = final_sequence->insn (0);
+ rtx_insn *delay = final_sequence->insn (1);
/* For TARGET_PAD_RETURN we might have grabbed the delay insn. */
if (INSN_DELETED_P (delay))
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
&& get_attr_type (insn) == TYPE_RETURN)
{
- rtx prev = prev_active_insn (insn);
+ rtx_insn *prev = prev_active_insn (insn);
if (!prev || !(prev = prev_active_insn (prev))
|| ((NONJUMP_INSN_P (prev)
&& GET_CODE (PATTERN (prev)) == SEQUENCE)
- ? CALL_ATTR (XVECEXP (PATTERN (prev), 0, 0), NON_SIBCALL)
+ ? CALL_ATTR (as_a <rtx_sequence *> (PATTERN (prev))->insn (0),
+ NON_SIBCALL)
: CALL_ATTR (prev, NON_SIBCALL)))
return len + 4;
}
entire SEQUENCE. */
rtx_insn *inner;
if (TARGET_UPSIZE_DBR
- && get_attr_length (XVECEXP (pat, 0, 1)) <= 2
+ && get_attr_length (pat->insn (1)) <= 2
&& (((type = get_attr_type (inner = pat->insn (0)))
== TYPE_UNCOND_BRANCH)
|| type == TYPE_BRANCH)
rtx_insn *prev = prev_active_insn (insn);
if (prev && arc_next_active_insn (prev, 0) == insn
&& ((NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
- ? CALL_ATTR (XVECEXP (PATTERN (prev), 0, 0), NON_SIBCALL)
+ ? CALL_ATTR (as_a <rtx_sequence *> (PATTERN (prev))->insn (0),
+ NON_SIBCALL)
: (CALL_ATTR (prev, NON_SIBCALL)
&& NEXT_INSN (PREV_INSN (prev)) == prev)))
force_target = true;
}
if (!prev
|| ((NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
- ? CALL_ATTR (XVECEXP (PATTERN (prev), 0, 0), NON_SIBCALL)
+ ? CALL_ATTR (as_a <rtx_sequence *> (PATTERN (prev))->insn (0),
+ NON_SIBCALL)
: CALL_ATTR (prev, NON_SIBCALL)))
{
if (want_long)
INSN can't have attributes. */
int
-arc_attr_type (rtx insn)
+arc_attr_type (rtx_insn *insn)
{
if (NONJUMP_INSN_P (insn)
? (GET_CODE (PATTERN (insn)) == USE
/* Return true if insn sets the condition codes. */
bool
-arc_sets_cc_p (rtx insn)
+arc_sets_cc_p (rtx_insn *insn)
{
- if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
+ if (NONJUMP_INSN_P (insn))
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
+ insn = seq->insn (seq->len () - 1);
return arc_attr_type (insn) == TYPE_COMPARE;
}
((LENGTH) \
= (GET_CODE (PATTERN (X)) == SEQUENCE \
? ((LENGTH) \
- + arc_adjust_insn_length (as_a <rtx_sequence *> (PATTERN (X))->insn (0), \
- get_attr_length (XVECEXP (PATTERN (X), \
- 0, 0)), \
- true) \
- - get_attr_length (XVECEXP (PATTERN (X), 0, 0)) \
- + arc_adjust_insn_length (as_a <rtx_sequence *> (PATTERN (X))->insn (1), \
- get_attr_length (XVECEXP (PATTERN (X), \
- 0, 1)), \
- true) \
- - get_attr_length (XVECEXP (PATTERN (X), 0, 1))) \
+ + arc_adjust_insn_length ( \
+ as_a <rtx_sequence *> (PATTERN (X))->insn (0), \
+ get_attr_length (as_a <rtx_sequence *> (PATTERN (X))->insn (0)), \
+ true) \
+ - get_attr_length (as_a <rtx_sequence *> (PATTERN (X))->insn (0)) \
+ + arc_adjust_insn_length ( \
+ as_a <rtx_sequence *> (PATTERN (X))->insn (1), \
+ get_attr_length (as_a <rtx_sequence *> (PATTERN (X))->insn (1)), \
+ true) \
+ - get_attr_length (as_a <rtx_sequence *> (PATTERN (X))->insn (1))) \
: arc_adjust_insn_length ((X), (LENGTH), false)))
#define IS_ASM_LOGICAL_LINE_SEPARATOR(C,STR) ((C) == '`')
SEQUENCEs. */
static enum attr_type
-type_for_anomaly (rtx insn)
+type_for_anomaly (rtx_insn *insn)
{
rtx pat = PATTERN (insn);
- if (GET_CODE (pat) == SEQUENCE)
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
{
enum attr_type t;
- t = get_attr_type (XVECEXP (pat, 0, 1));
+ t = get_attr_type (seq->insn (1));
if (t == TYPE_MCLD)
return t;
- t = get_attr_type (XVECEXP (pat, 0, 2));
+ t = get_attr_type (seq->insn (2));
if (t == TYPE_MCLD)
return t;
return TYPE_MCST;
&& (cbranch_predicted_taken_p (insn)))
{
rtx target = JUMP_LABEL (insn);
- rtx next = next_real_insn (target);
+ rtx_insn *next = next_real_insn (target);
if (GET_CODE (PATTERN (next)) == UNSPEC_VOLATILE
&& get_attr_type (next) == TYPE_STALL)
/* Compute the side of the machine used by INSN, which reserves UNITS.
This must match the reservations in the scheduling description. */
static int
-get_insn_side (rtx insn, enum attr_units units)
+get_insn_side (rtx_insn *insn, enum attr_units units)
{
if (units == UNITS_D_ADDR)
return (get_attr_addr_regfile (insn) == ADDR_REGFILE_A ? 0 : 1);
already has that predicate. If DOIT is true, also perform the
modification. */
static bool
-predicate_insn (rtx insn, rtx cond, bool doit)
+predicate_insn (rtx_insn *insn, rtx cond, bool doit)
{
int icode;
if (cond == NULL_RTX)
extern bool cris_simple_epilogue (void);
#ifdef RTX_CODE
extern const char *cris_op_str (rtx);
-extern void cris_notice_update_cc (rtx, rtx);
+extern void cris_notice_update_cc (rtx, rtx_insn *);
extern bool cris_reload_address_legitimized (rtx, enum machine_mode, int, int, int);
extern int cris_side_effect_mode_ok (enum rtx_code, rtx *, int, int,
int, int, int);
check-cc-attribute methods. */
void
-cris_notice_update_cc (rtx exp, rtx insn)
+cris_notice_update_cc (rtx exp, rtx_insn *insn)
{
enum attr_cc attrval = get_attr_cc (insn);
extern void emit_set_fp_mode (int entity, int mode, int prev_mode,
HARD_REG_SET regs_live);
#endif
-extern void epiphany_insert_mode_switch_use (rtx insn, int, int);
+extern void epiphany_insert_mode_switch_use (rtx_insn *insn, int, int);
extern void epiphany_expand_set_fp_mode (rtx *operands);
extern int epiphany_mode_needed (int entity, rtx_insn *insn);
extern int epiphany_mode_after (int entity, int last_mode, rtx_insn *insn);
can't / won't include that. In particular:
PR other/55523: gencondmd file includes / dependencies are messed up,
it uses peephole2 predicates without having all the necessary headers. */
-extern int get_attr_sched_use_fpu (rtx);
+extern int get_attr_sched_use_fpu (rtx_insn *);
}
void
-epiphany_insert_mode_switch_use (rtx insn,
+epiphany_insert_mode_switch_use (rtx_insn *insn,
int entity ATTRIBUTE_UNUSED,
int mode ATTRIBUTE_UNUSED)
{
static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
#define N_ENTITIES ARRAY_SIZE (num_modes)
int e;
- void (*target_insert_mode_switch_use) (rtx insn, int, int)
+ void (*target_insert_mode_switch_use) (rtx_insn *insn, int, int)
= TARGET_INSERT_MODE_SWITCH_USE;
for (e = N_ENTITIES - 1; e >= 0; e--)
static rtx frv_ifcvt_rewrite_mem (rtx, enum machine_mode, rtx);
static rtx frv_ifcvt_load_value (rtx, rtx);
static int frv_acc_group_1 (rtx *, void *);
-static unsigned int frv_insn_unit (rtx);
-static bool frv_issues_to_branch_unit_p (rtx);
+static unsigned int frv_insn_unit (rtx_insn *);
+static bool frv_issues_to_branch_unit_p (rtx_insn *);
static int frv_cond_flags (rtx);
static bool frv_regstate_conflict_p (regstate_t, regstate_t);
static int frv_registers_conflict_p_1 (rtx *, void *);
static void frv_start_packet (void);
static void frv_start_packet_block (void);
static void frv_finish_packet (void (*) (void));
-static bool frv_pack_insn_p (rtx);
+static bool frv_pack_insn_p (rtx_insn *);
static void frv_add_insn_to_packet (rtx_insn *);
static void frv_insert_nop_in_packet (rtx_insn *);
static bool frv_for_each_packet (void (*) (void));
type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
static unsigned int
-frv_insn_unit (rtx insn)
+frv_insn_unit (rtx_insn *insn)
{
enum attr_type type;
/* Return true if INSN issues to a branch unit. */
static bool
-frv_issues_to_branch_unit_p (rtx insn)
+frv_issues_to_branch_unit_p (rtx_insn *insn)
{
return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
}
the DFA state on success. */
static bool
-frv_pack_insn_p (rtx insn)
+frv_pack_insn_p (rtx_insn *insn)
{
/* See if the packet is already as long as it can be. */
if (frv_packet.num_insns == frv_packet.issue_rate)
static int
frv_compare_insns (const void *first, const void *second)
{
- const rtx *const insn1 = (rtx const *) first,
- *const insn2 = (rtx const *) second;
+ rtx_insn * const *insn1 = (rtx_insn * const *) first;
+ rtx_insn * const *insn2 = (rtx_insn * const *) second;
return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
}
extern const char *standard_80387_constant_opcode (rtx);
extern rtx standard_80387_constant_rtx (int);
extern int standard_sse_constant_p (rtx);
-extern const char *standard_sse_constant_opcode (rtx, rtx);
+extern const char *standard_sse_constant_opcode (rtx_insn *, rtx);
extern bool symbolic_reference_mentioned_p (rtx);
extern bool extended_reg_mentioned_p (rtx);
extern bool x86_extended_QIreg_mentioned_p (rtx_insn *);
extern const char *output_set_got (rtx, rtx);
extern const char *output_387_binary_op (rtx, rtx*);
extern const char *output_387_reg_move (rtx, rtx*);
-extern const char *output_fix_trunc (rtx, rtx*, bool);
+extern const char *output_fix_trunc (rtx_insn *, rtx*, bool);
extern const char *output_fp_compare (rtx, rtx*, bool, bool);
extern const char *output_adjust_stack_and_probe (rtx);
extern const char *output_probe_stack_range (rtx, rtx);
the constant X. */
const char *
-standard_sse_constant_opcode (rtx insn, rtx x)
+standard_sse_constant_opcode (rtx_insn *insn, rtx x)
{
switch (standard_sse_constant_p (x))
{
operand may be [SDX]Fmode. */
const char *
-output_fix_trunc (rtx insn, rtx *operands, bool fisttp)
+output_fix_trunc (rtx_insn *insn, rtx *operands, bool fisttp)
{
int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
int dimode_p = GET_MODE (operands[0]) == DImode;
data->ifetch_block_n_insns = 0;
}
-static int min_insn_size (rtx);
+static int min_insn_size (rtx_insn *);
/* Filter out insns from ready_try that the core will not be able to issue
on current cycle due to decoder. */
{
while (n_ready--)
{
- rtx insn;
+ rtx_insn *insn;
int insn_size;
if (ready_try[n_ready])
99% of cases. */
static int
-min_insn_size (rtx insn)
+min_insn_size (rtx_insn *insn)
{
int l = 0, len;
/* Get dispatch group of insn. */
static enum dispatch_group
-get_mem_group (rtx insn)
+get_mem_group (rtx_insn *insn)
{
enum attr_memory memory;
/* Return true if insn is a compare instruction. */
static bool
-is_cmp (rtx insn)
+is_cmp (rtx_insn *insn)
{
enum attr_type type;
/* Return single or double path for instructions. */
static enum insn_path
-get_insn_path (rtx insn)
+get_insn_path (rtx_insn *insn)
{
enum attr_amdfam10_decode path = get_attr_amdfam10_decode (insn);
/* Return insn dispatch group. */
static enum dispatch_group
-get_insn_group (rtx insn)
+get_insn_group (rtx_insn *insn)
{
enum dispatch_group group = get_mem_group (insn);
if (group)
window WINDOW_LIST. */
static int
-count_num_restricted (rtx insn, dispatch_windows *window_list)
+count_num_restricted (rtx_insn *insn, dispatch_windows *window_list)
{
enum dispatch_group group = get_insn_group (insn);
int imm_size;
last window scheduled. */
static bool
-fits_dispatch_window (rtx insn)
+fits_dispatch_window (rtx_insn *insn)
{
dispatch_windows *window_list = dispatch_window_list;
dispatch_windows *window_list_next = dispatch_window_list->next;
dispatch window WINDOW_LIST. */
static void
-add_insn_window (rtx insn, dispatch_windows *window_list, int num_uops)
+add_insn_window (rtx_insn *insn, dispatch_windows *window_list, int num_uops)
{
int byte_len = min_insn_size (insn);
int num_insn = window_list->num_insn;
the window exceed allowable, it allocates a new window. */
static void
-add_to_dispatch_window (rtx insn)
+add_to_dispatch_window (rtx_insn *insn)
{
int byte_len;
dispatch_windows *window_list;
/* Print INSN dispatch information to FILE. */
DEBUG_FUNCTION static void
-debug_insn_dispatch_info_file (FILE *file, rtx insn)
+debug_insn_dispatch_info_file (FILE *file, rtx_insn *insn)
{
int byte_len;
enum insn_path path;
int m32c_limit_reload_class (enum machine_mode, int);
int m32c_modes_tieable_p (enum machine_mode, enum machine_mode);
bool m32c_mov_ok (rtx *, enum machine_mode);
-char * m32c_output_compare (rtx, rtx *);
+char * m32c_output_compare (rtx_insn *, rtx *);
int m32c_prepare_move (rtx *, enum machine_mode);
int m32c_prepare_shift (rtx *, int, int);
int m32c_reg_ok_for_base_p (rtx, int);
/* Returns true if a compare insn is redundant because it would only
set flags that are already set correctly. */
static bool
-m32c_compare_redundant (rtx cmp, rtx *operands)
+m32c_compare_redundant (rtx_insn *cmp, rtx *operands)
{
int flags_needed;
int pflags;
- rtx prev, pp, next;
+ rtx_insn *prev;
+ rtx pp, next;
rtx op0, op1;
#if DEBUG_CMP
int prev_icode, i;
the compare is redundant, else a normal pattern is returned. Thus,
the assembler output says where the compare would have been. */
char *
-m32c_output_compare (rtx insn, rtx *operands)
+m32c_output_compare (rtx_insn *insn, rtx *operands)
{
static char templ[] = ";cmp.b\t%1,%0";
/* ^ 5 */
if (! INSN_P (op))
return 0;
- return get_attr_length (op) == 2;
+ return get_attr_length (as_a <rtx_insn *> (op)) == 2;
})
;; Return true if op is an integer constant, less than or equal to
if (! INSN_P (op))
return 0;
- return get_attr_length (op) != 2;
+ return get_attr_length (as_a <rtx_insn *> (op)) != 2;
})
;; Returns 1 if OP is an acceptable operand for seth/add3.
extern enum attr_opx_type m68k_sched_attr_opx_type (rtx_insn *, int);
extern enum attr_opy_type m68k_sched_attr_opy_type (rtx_insn *, int);
-extern enum attr_size m68k_sched_attr_size (rtx);
-extern enum attr_op_mem m68k_sched_attr_op_mem (rtx);
+extern enum attr_size m68k_sched_attr_size (rtx_insn *);
+extern enum attr_op_mem m68k_sched_attr_op_mem (rtx_insn *);
#endif /* HAVE_ATTR_cpu */
#endif /* RTX_CODE */
/* Return size of INSN as int. */
static int
-sched_get_attr_size_int (rtx insn)
+sched_get_attr_size_int (rtx_insn *insn)
{
int size;
/* Return size of INSN as attribute enum value. */
enum attr_size
-m68k_sched_attr_size (rtx insn)
+m68k_sched_attr_size (rtx_insn *insn)
{
switch (sched_get_attr_size_int (insn))
{
/* Return operand X or Y (depending on OPX_P) of INSN,
if it is a MEM, or NULL overwise. */
static enum attr_op_type
-sched_get_opxy_mem_type (rtx insn, bool opx_p)
+sched_get_opxy_mem_type (rtx_insn *insn, bool opx_p)
{
if (opx_p)
{
/* Implement op_mem attribute. */
enum attr_op_mem
-m68k_sched_attr_op_mem (rtx insn)
+m68k_sched_attr_op_mem (rtx_insn *insn)
{
enum attr_op_type opx;
enum attr_op_type opy;
/* Return true, if X or Y (depending on OPX_P) operand of INSN
is a MEM. */
static bool
-sched_mem_operand_p (rtx insn, bool opx_p)
+sched_mem_operand_p (rtx_insn *insn, bool opx_p)
{
switch (sched_get_opxy_mem_type (insn, opx_p))
{
extern bool mep_split_mov (rtx *, int);
extern bool mep_vliw_mode_match (rtx);
extern bool mep_vliw_jmp_match (rtx);
-extern bool mep_multi_slot (rtx);
+extern bool mep_multi_slot (rtx_insn *);
extern bool mep_legitimate_address (enum machine_mode, rtx, int);
extern int mep_legitimize_address (rtx *, rtx, enum machine_mode);
extern int mep_legitimize_reload_address (rtx *, enum machine_mode, int, /*enum reload_type*/ int, int);
}
bool
-mep_multi_slot (rtx x)
+mep_multi_slot (rtx_insn *x)
{
return get_attr_slot (x) == SLOT_MULTI;
}
extern const char *mips_output_order_conditional_branch (rtx_insn *, rtx *,
bool);
extern const char *mips_output_sync (void);
-extern const char *mips_output_sync_loop (rtx, rtx *);
-extern unsigned int mips_sync_loop_insns (rtx, rtx *);
+extern const char *mips_output_sync_loop (rtx_insn *, rtx *);
+extern unsigned int mips_sync_loop_insns (rtx_insn *, rtx *);
extern const char *mips_output_division (const char *, rtx *);
extern const char *mips_output_probe_stack_range (rtx, rtx);
extern unsigned int mips_hard_regno_nregs (int, enum machine_mode);
/* If the delay slot instruction is short, then use the
compact version. */
if (final_sequence == 0
- || get_attr_length (XVECEXP (final_sequence, 0, 1)) == 2)
+ || get_attr_length (final_sequence->insn (1)) == 2)
putc ('s', file);
break;
sequence for it. */
static void
-mips_process_sync_loop (rtx insn, rtx *operands)
+mips_process_sync_loop (rtx_insn *insn, rtx *operands)
{
rtx at, mem, oldval, newval, inclusive_mask, exclusive_mask;
rtx required_oldval, insn1_op2, tmp1, tmp2, tmp3, cmp;
the operands given by OPERANDS. */
const char *
-mips_output_sync_loop (rtx insn, rtx *operands)
+mips_output_sync_loop (rtx_insn *insn, rtx *operands)
{
mips_process_sync_loop (insn, operands);
which has the operands given by OPERANDS. */
unsigned int
-mips_sync_loop_insns (rtx insn, rtx *operands)
+mips_sync_loop_insns (rtx_insn *insn, rtx *operands)
{
mips_process_sync_loop (insn, operands);
return mips_multi_num_insns;
resets to TYPE_UNKNOWN state. */
static void
-mips_74k_agen_init (rtx insn)
+mips_74k_agen_init (rtx_insn *insn)
{
if (!insn || CALL_P (insn) || JUMP_P (insn))
mips_last_74k_agen_insn = TYPE_UNKNOWN;
{
mips_macc_chains_last_hilo = 0;
vr4130_last_insn = 0;
- mips_74k_agen_init (NULL_RTX);
+ mips_74k_agen_init (NULL);
/* When scheduling for Loongson2, branch instructions go to ALU1,
therefore basic block is most likely to start with round-robin counter
/* Return true if is load/store with SYMBOL_REF addressing mode
and memory mode is SImode. */
static bool
-nds32_symbol_load_store_p (rtx insn)
+nds32_symbol_load_store_p (rtx_insn *insn)
{
rtx mem_src = NULL_RTX;
int
nds32_target_alignment (rtx label)
{
- rtx insn;
+ rtx_insn *insn;
if (optimize_size)
return 0;
extern enum direction pa_function_arg_padding (enum machine_mode, const_tree);
#endif
#endif /* ARGS_SIZE_RTX */
-extern int pa_insn_refs_are_delayed (rtx);
+extern int pa_insn_refs_are_delayed (rtx_insn *);
extern rtx pa_get_deferred_plabel (rtx);
#endif /* RTX_CODE */
static void
pa_output_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
{
- rtx insn = get_last_insn ();
+ rtx_insn *insn = get_last_insn ();
bool extra_nop;
/* pa_expand_epilogue does the dirty work now. We just need
/* If it is a sequence, then look inside. */
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))-> insn (0);
/* If insn is a CALL_INSN, then it must be a call to a volatile
function (otherwise there would be epilogue insns). */
it branches into the delay slot. Otherwise, return FALSE. */
static bool
-branch_to_delay_slot_p (rtx insn)
+branch_to_delay_slot_p (rtx_insn *insn)
{
rtx jump_insn;
when this occurs. */
static bool
-branch_needs_nop_p (rtx insn)
+branch_needs_nop_p (rtx_insn *insn)
{
rtx jump_insn;
to a mis-predicted branch when we fall through. */
static bool
-use_skip_p (rtx insn)
+use_skip_p (rtx_insn *insn)
{
rtx jump_insn = next_active_insn (JUMP_LABEL (insn));
filter out things it will not accept -- SEQUENCE, USE and CLOBBER insns
in particular. */
int
-pa_insn_refs_are_delayed (rtx insn)
+pa_insn_refs_are_delayed (rtx_insn *insn)
{
return ((NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != SEQUENCE
int origins[FIRST_PSEUDO_REGISTER];
int age[FIRST_PSEUDO_REGISTER];
int i;
- rtx insn, ninsn = NULL_RTX;
+ rtx_insn *insn, *ninsn = NULL;
rtx pat;
reset_origins (origins, age);
static int rs6000_debug_address_cost (rtx, enum machine_mode, addr_space_t,
bool);
static int rs6000_debug_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
-static bool is_microcoded_insn (rtx);
-static bool is_nonpipeline_insn (rtx);
-static bool is_cracked_insn (rtx);
+static bool is_microcoded_insn (rtx_insn *);
+static bool is_nonpipeline_insn (rtx_insn *);
+static bool is_cracked_insn (rtx_insn *);
static bool is_load_insn (rtx, rtx *);
static bool is_store_insn (rtx, rtx *);
static bool set_to_load_agen (rtx_insn *,rtx_insn *);
-static bool insn_terminates_group_p (rtx , enum group_termination);
-static bool insn_must_be_first_in_group (rtx);
-static bool insn_must_be_last_in_group (rtx);
+static bool insn_terminates_group_p (rtx_insn *, enum group_termination);
+static bool insn_must_be_first_in_group (rtx_insn *);
+static bool insn_must_be_last_in_group (rtx_insn *);
static void altivec_init_builtins (void);
static tree builtin_function_type (enum machine_mode, enum machine_mode,
enum machine_mode, enum machine_mode,
Return false otherwise. */
static bool
-is_microcoded_insn (rtx insn)
+is_microcoded_insn (rtx_insn *insn)
{
if (!insn || !NONDEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
by the processor (and therefore occupies 2 issue slots). */
static bool
-is_cracked_insn (rtx insn)
+is_cracked_insn (rtx_insn *insn)
{
if (!insn || !NONDEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
the branch slot. */
static bool
-is_branch_slot_insn (rtx insn)
+is_branch_slot_insn (rtx_insn *insn)
{
if (!insn || !NONDEBUG_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
/* Return true if the instruction is nonpipelined on the Cell. */
static bool
-is_nonpipeline_insn (rtx insn)
+is_nonpipeline_insn (rtx_insn *insn)
{
enum attr_type type;
if (!insn || !NONDEBUG_INSN_P (insn)
the first insn in the group it belongs to). */
static bool
-insn_terminates_group_p (rtx insn, enum group_termination which_group)
+insn_terminates_group_p (rtx_insn *insn, enum group_termination which_group)
{
bool first, last;
static bool
-insn_must_be_first_in_group (rtx insn)
+insn_must_be_first_in_group (rtx_insn *insn)
{
enum attr_type type;
}
static bool
-insn_must_be_last_in_group (rtx insn)
+insn_must_be_last_in_group (rtx_insn *insn)
{
enum attr_type type;
static int
force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
- rtx next_insn, bool *group_end, int can_issue_more,
+ rtx_insn *next_insn, bool *group_end, int can_issue_more,
int *group_count)
{
rtx nop;
#define S390_OOO_SCHED_ATTR_MASK_GROUPALONE 0x8
static unsigned int
-s390_get_sched_attrmask (rtx insn)
+s390_get_sched_attrmask (rtx_insn *insn)
{
unsigned int mask = 0;
better. The score is calculated from the OOO scheduling attributes
of INSN and the scheduling state s390_sched_state. */
static int
-s390_sched_score (rtx insn)
+s390_sched_score (rtx_insn *insn)
{
unsigned int mask = s390_get_sched_attrmask (insn);
int score = 0;
/* Declare functions defined in sh.c and used in templates. */
-extern const char *output_branch (int, rtx, rtx *);
+extern const char *output_branch (int, rtx_insn *, rtx *);
extern const char *output_ieee_ccmpeq (rtx_insn *, rtx *);
extern const char *output_branchy_insn (enum rtx_code, const char *,
rtx_insn *, rtx *);
extern const char *output_movepcrel (rtx, rtx[], enum machine_mode);
extern const char *output_far_jump (rtx_insn *, rtx);
-extern rtx sfunc_uses_reg (rtx);
+extern rtx sfunc_uses_reg (rtx_insn *);
extern int barrier_align (rtx_insn *);
extern int sh_loop_align (rtx_insn *);
extern bool fp_zero_operand (rtx);
case '.':
if (final_sequence
- && ! INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0))
- && get_attr_length (XVECEXP (final_sequence, 0, 1)))
+ && ! INSN_ANNULLED_BRANCH_P (final_sequence->insn (0))
+ && get_attr_length (final_sequence->insn (1)))
fprintf (stream, ASSEMBLER_DIALECT ? "/s" : ".s");
break;
case ',':
/* Output code for ordinary branches. */
const char *
-output_branch (int logic, rtx insn, rtx *operands)
+output_branch (int logic, rtx_insn *insn, rtx *operands)
{
switch (get_attr_length (insn))
{
place for it is after the label. final will do that by default. */
if (final_sequence
- && ! INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0))
- && get_attr_length (XVECEXP (final_sequence, 0, 1)))
+ && ! INSN_ANNULLED_BRANCH_P (final_sequence->insn (0))
+ && get_attr_length (final_sequence->insn (1)))
{
asm_fprintf (asm_out_file, "\tb%s%ss\t%LLF%d\n", logic ? "f" : "t",
ASSEMBLER_DIALECT ? "/" : ".", label);
register is not used anywhere else in this instruction - except as the
destination of a set, return this register; else, return 0. */
rtx
-sfunc_uses_reg (rtx insn)
+sfunc_uses_reg (rtx_insn *insn)
{
int i;
rtx pattern, part, reg_part, reg;
extern rtx widen_mem_for_ldd_peep (rtx, rtx, enum machine_mode);
extern int empty_delay_slot (rtx_insn *);
extern int emit_cbcond_nop (rtx);
-extern int eligible_for_call_delay (rtx);
-extern int eligible_for_return_delay (rtx);
-extern int eligible_for_sibcall_delay (rtx);
+extern int eligible_for_call_delay (rtx_insn *);
+extern int eligible_for_return_delay (rtx_insn *);
+extern int eligible_for_sibcall_delay (rtx_insn *);
extern int emit_move_sequence (rtx, enum machine_mode);
extern int fp_sethi_p (rtx);
extern int fp_mov_p (rtx);
/* Return nonzero if TRIAL can go into the call delay slot. */
int
-eligible_for_call_delay (rtx trial)
+eligible_for_call_delay (rtx_insn *trial)
{
rtx pat;
/* Return nonzero if TRIAL can go into the function return's delay slot. */
int
-eligible_for_return_delay (rtx trial)
+eligible_for_return_delay (rtx_insn *trial)
{
int regno;
rtx pat;
/* Return nonzero if TRIAL can go into the sibling call's delay slot. */
int
-eligible_for_sibcall_delay (rtx trial)
+eligible_for_sibcall_delay (rtx_insn *trial)
{
rtx pat;
#ifdef RTX_CODE
extern void xstormy16_emit_cbranch (enum rtx_code, rtx, rtx, rtx);
-extern char *xstormy16_output_cbranch_hi (rtx, const char *, int, rtx);
-extern char *xstormy16_output_cbranch_si (rtx, const char *, int, rtx);
+extern char *xstormy16_output_cbranch_hi (rtx, const char *, int, rtx_insn *);
+extern char *xstormy16_output_cbranch_si (rtx, const char *, int, rtx_insn *);
extern void xstormy16_expand_casesi (rtx, rtx, rtx, rtx, rtx);
extern void xstormy16_output_addr_vec (FILE *, rtx, rtx);
INSN is the insn. */
char *
-xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
+xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
+ rtx_insn *insn)
{
static char string[64];
int need_longbranch = (op != NULL_RTX
INSN is the insn. */
char *
-xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
+xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
+ rtx_insn *insn)
{
static char string[64];
int need_longbranch = get_attr_length (insn) >= 8;
#ifdef RTX_CODE
extern rtx v850_return_addr (int);
extern const char *output_move_single (rtx *);
-extern void notice_update_cc (rtx, rtx);
+extern void notice_update_cc (rtx, rtx_insn *);
extern char * construct_save_jarl (rtx);
extern char * construct_restore_jr (rtx);
#ifdef HAVE_MACHINE_MODES
/* Update the condition code from the insn. */
void
-notice_update_cc (rtx body, rtx insn)
+notice_update_cc (rtx body, rtx_insn *insn)
{
switch (get_attr_cc (insn))
{
get its actual length. Otherwise, use FALLBACK_FN to calculate the
length. */
static int
-get_attr_length_1 (rtx uncast_insn, int (*fallback_fn) (rtx))
+get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
{
- rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
rtx body;
int i;
int length = 0;
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, get its maximum length. */
int
-get_attr_length (rtx insn)
+get_attr_length (rtx_insn *insn)
{
return get_attr_length_1 (insn, insn_default_length);
}
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, get its minimum length. */
int
-get_attr_min_length (rtx insn)
+get_attr_min_length (rtx_insn *insn)
{
return get_attr_length_1 (insn, insn_min_length);
}
#endif /* CASE_VECTOR_SHORTEN_MODE */
/* Compute initial lengths, addresses, and varying flags for each insn. */
- int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
+ int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
for (insn_current_address = 0, insn = first;
insn != 0;
#else
const_delay_slots = 0;
#endif
- int (*inner_length_fun) (rtx)
+ int (*inner_length_fun) (rtx_insn *)
= const_delay_slots ? length_fun : insn_default_length;
/* Inside a delay slot sequence, we do not do any branch shortening
if the shortening could change the number of delay slots
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
+ rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
int i;
body = PATTERN (insn);
new_length = 0;
- for (i = 0; i < XVECLEN (body, 0); i++)
+ for (i = 0; i < seqn->len (); i++)
{
- rtx inner_insn = XVECEXP (body, 0, i);
+ rtx_insn *inner_insn = seqn->insn (i);
int inner_uid = INSN_UID (inner_insn);
int inner_length;
/* If numeric attribute, don't need to write an enum. */
if (GET_CODE (attr) == DEFINE_ENUM_ATTR)
printf ("extern enum %s get_attr_%s (%s);\n\n",
- XSTR (attr, 1), XSTR (attr, 0), (is_const ? "void" : "rtx"));
+ XSTR (attr, 1), XSTR (attr, 0),
+ (is_const ? "void" : "rtx_insn *"));
else
{
p = XSTR (attr, 1);
if (*p == '\0')
printf ("extern int get_attr_%s (%s);\n", XSTR (attr, 0),
- (is_const ? "void" : "rtx"));
+ (is_const ? "void" : "rtx_insn *"));
else
printf ("extern enum attr_%s get_attr_%s (%s);\n\n",
- XSTR (attr, 0), XSTR (attr, 0), (is_const ? "void" : "rtx"));
+ XSTR (attr, 0), XSTR (attr, 0),
+ (is_const ? "void" : "rtx_insn *"));
}
/* If `length' attribute, write additional function definitions and define
{
puts ("\
extern void shorten_branches (rtx_insn *);\n\
-extern int insn_default_length (rtx);\n\
-extern int insn_min_length (rtx);\n\
-extern int insn_variable_length_p (rtx);\n\
-extern int insn_current_length (rtx);\n\n\
+extern int insn_default_length (rtx_insn *);\n\
+extern int insn_min_length (rtx_insn *);\n\
+extern int insn_variable_length_p (rtx_insn *);\n\
+extern int insn_current_length (rtx_insn *);\n\n\
#include \"insn-addr.h\"\n");
}
}
{
if (! have_delay)
{
- printf ("extern int num_delay_slots (rtx);\n");
+ printf ("extern int num_delay_slots (rtx_insn *);\n");
printf ("extern int eligible_for_delay (rtx_insn *, int, rtx_insn *, int);\n\n");
printf ("extern int const_num_delay_slots (rtx_insn *);\n\n");
have_delay = 1;
printf (" and insn_default_latency. */\n");
printf ("extern void init_sched_attrs (void);\n\n");
printf ("/* Internal insn code number used by automata. */\n");
- printf ("extern int (*internal_dfa_insn_code) (rtx);\n\n");
+ printf ("extern int (*internal_dfa_insn_code) (rtx_insn *);\n\n");
printf ("/* Insn latency time defined in define_insn_reservation. */\n");
- printf ("extern int (*insn_default_latency) (rtx);\n\n");
+ printf ("extern int (*insn_default_latency) (rtx_insn *);\n\n");
}
else
{
printf ("#define init_sched_attrs() do { } while (0)\n\n");
printf ("/* Internal insn code number used by automata. */\n");
- printf ("extern int internal_dfa_insn_code (rtx);\n\n");
+ printf ("extern int internal_dfa_insn_code (rtx_insn *);\n\n");
printf ("/* Insn latency time defined in define_insn_reservation. */\n");
- printf ("extern int insn_default_latency (rtx);\n\n");
+ printf ("extern int insn_default_latency (rtx_insn *);\n\n");
}
printf ("/* Return nonzero if there is a bypass for given insn\n");
printf (" which is a data producer. */\n");
- printf ("extern int bypass_p (rtx);\n\n");
+ printf ("extern int bypass_p (rtx_insn *);\n\n");
printf ("/* Insn latency time on data consumed by the 2nd insn.\n");
printf (" Use the function if bypass_p returns nonzero for\n");
printf (" the 1st insn. */\n");
- printf ("extern int insn_latency (rtx_insn *, rtx_insn *);\n\n");
+ printf ("extern int insn_latency (rtx, rtx);\n\n");
printf ("/* Maximal insn latency time possible of all bypasses for this insn.\n");
printf (" Use the function if bypass_p returns nonzero for\n");
printf (" the 1st insn. */\n");
printf (" implementation may require much memory. */\n");
printf ("extern int state_alts (state_t, rtx);\n");
printf ("#endif\n\n");
- printf ("extern int min_issue_delay (state_t, rtx);\n");
+ printf ("extern int min_issue_delay (state_t, rtx_insn *);\n");
printf ("/* The following function returns nonzero if no one insn\n");
printf (" can be issued in current DFA state. */\n");
printf ("extern int state_dead_lock_p (state_t);\n");
printf ("/* The following function outputs reservations for given\n");
printf (" insn as they are described in the corresponding\n");
printf (" define_insn_reservation. */\n");
- printf ("extern void print_reservation (FILE *, rtx);\n");
+ printf ("extern void print_reservation (FILE *, rtx_insn *);\n");
printf ("\n#if CPU_UNITS_QUERY\n");
printf ("/* The following function returns code of functional unit\n");
printf (" with given name (see define_cpu_unit). */\n");
printf ("#endif\n\n");
printf ("/* The following function returns true if insn\n");
printf (" has a dfa reservation. */\n");
- printf ("extern bool insn_has_dfa_reservation_p (rtx);\n\n");
+ printf ("extern bool insn_has_dfa_reservation_p (rtx_insn *);\n\n");
printf ("/* Clean insn code cache. It should be called if there\n");
printf (" is a chance that condition value in a\n");
printf (" define_insn_reservation will be changed after\n");
/* We make an exception here to provide stub definitions for
insn_*_length* / get_attr_enabled functions. */
puts ("#if !HAVE_ATTR_length\n"
- "extern int hook_int_rtx_unreachable (rtx);\n"
- "#define insn_default_length hook_int_rtx_unreachable\n"
- "#define insn_min_length hook_int_rtx_unreachable\n"
- "#define insn_variable_length_p hook_int_rtx_unreachable\n"
- "#define insn_current_length hook_int_rtx_unreachable\n"
+ "extern int hook_int_rtx_insn_unreachable (rtx_insn *);\n"
+ "#define insn_default_length hook_int_rtx_insn_unreachable\n"
+ "#define insn_min_length hook_int_rtx_insn_unreachable\n"
+ "#define insn_variable_length_p hook_int_rtx_insn_unreachable\n"
+ "#define insn_current_length hook_int_rtx_insn_unreachable\n"
"#include \"insn-addr.h\"\n"
"#endif\n"
"#if !HAVE_ATTR_enabled\n"
/* If the attribute name starts with a star, the remainder is the name of
the subroutine to use, instead of `get_attr_...'. */
if (attr->name[0] == '*')
- fprintf (outf, "%s (rtx uncast_insn ATTRIBUTE_UNUSED)\n", &attr->name[1]);
+ fprintf (outf, "%s (rtx_insn *insn ATTRIBUTE_UNUSED)\n", &attr->name[1]);
else if (attr->is_const == 0)
- fprintf (outf, "get_attr_%s (rtx uncast_insn ATTRIBUTE_UNUSED)\n", attr->name);
+ fprintf (outf, "get_attr_%s (rtx_insn *insn ATTRIBUTE_UNUSED)\n", attr->name);
else
{
fprintf (outf, "get_attr_%s (void)\n", attr->name);
fprintf (outf, "{\n");
- if (attr->name[0] == '*' || attr->is_const == 0)
- fprintf (outf, " rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);\n");
-
/* Find attributes that are worth caching in the conditions. */
cached_attr_count = 0;
attrs_seen_more_than_once = 0;
continue;
gcc_assert (GET_CODE (val->value) == CONST_STRING);
fprintf (dfa_file,
- "extern int internal_dfa_insn_code_%s (rtx);\n",
+ "extern int internal_dfa_insn_code_%s (rtx_insn *);\n",
XSTR (val->value, 0));
}
fprintf (dfa_file, "\n");
continue;
gcc_assert (GET_CODE (val->value) == CONST_STRING);
fprintf (latency_file,
- "extern int insn_default_latency_%s (rtx);\n",
+ "extern int insn_default_latency_%s (rtx_insn *);\n",
XSTR (val->value, 0));
}
fprintf (latency_file, "\n");
continue;
gcc_assert (GET_CODE (val->value) == CONST_STRING);
fprintf (attr_file,
- "extern int internal_dfa_insn_code_%s (rtx);\n"
- "extern int insn_default_latency_%s (rtx);\n",
+ "extern int internal_dfa_insn_code_%s (rtx_insn *);\n"
+ "extern int insn_default_latency_%s (rtx_insn *);\n",
XSTR (val->value, 0), XSTR (val->value, 0));
}
fprintf (attr_file, "\n");
- fprintf (attr_file, "int (*internal_dfa_insn_code) (rtx);\n");
- fprintf (attr_file, "int (*insn_default_latency) (rtx);\n");
+ fprintf (attr_file, "int (*internal_dfa_insn_code) (rtx_insn *);\n");
+ fprintf (attr_file, "int (*insn_default_latency) (rtx_insn *);\n");
fprintf (attr_file, "\n");
fprintf (attr_file, "void\n");
fprintf (attr_file, "init_sched_attrs (void)\n");
insn_code_name, COLLAPSE_NDFA_VALUE_NAME);
}
fprintf (output_file, "\n else\n {\n");
- fprintf (output_file, " %s = %s (%s);\n", insn_code_name,
- DFA_INSN_CODE_FUNC_NAME, insn_name);
+ fprintf (output_file,
+ " %s = %s (as_a <rtx_insn *> (%s));\n",
+ insn_code_name, DFA_INSN_CODE_FUNC_NAME, insn_name);
fprintf (output_file, " if (%s > %s)\n return %d;\n }\n",
insn_code_name, ADVANCE_CYCLE_VALUE_NAME, code);
}
DFA_INSN_CODES_LENGTH_VARIABLE_NAME,
DFA_INSN_CODES_VARIABLE_NAME);
fprintf (output_file, "\
-static inline int\n%s (rtx %s)\n\
+static inline int\n%s (rtx_insn *%s)\n\
{\n\
int uid = INSN_UID (%s);\n\
int %s;\n\n",
static void
output_min_issue_delay_func (void)
{
- fprintf (output_file, "int\n%s (%s %s, rtx %s)\n",
+ fprintf (output_file, "int\n%s (%s %s, rtx_insn *%s)\n",
MIN_ISSUE_DELAY_FUNC_NAME, STATE_TYPE_NAME, STATE_NAME,
INSN_PARAMETER_NAME);
fprintf (output_file, "{\n int %s;\n", INTERNAL_INSN_CODE_NAME);
decl_t decl;
struct bypass_decl *bypass;
- fprintf (output_file, "static int\n%s (int %s ATTRIBUTE_UNUSED,\n\tint %s ATTRIBUTE_UNUSED,\n\trtx_insn *%s ATTRIBUTE_UNUSED,\n\trtx_insn *%s ATTRIBUTE_UNUSED)\n",
+ fprintf (output_file, "static int\n%s (int %s ATTRIBUTE_UNUSED,\n\tint %s ATTRIBUTE_UNUSED,\n\trtx %s ATTRIBUTE_UNUSED,\n\trtx %s ATTRIBUTE_UNUSED)\n",
INTERNAL_INSN_LATENCY_FUNC_NAME, INTERNAL_INSN_CODE_NAME,
- INTERNAL_INSN2_CODE_NAME, INSN_PARAMETER_NAME,
- INSN2_PARAMETER_NAME);
+ INTERNAL_INSN2_CODE_NAME, "insn_or_const0",
+ "insn2_or_const0");
fprintf (output_file, "{\n");
if (DECL_INSN_RESERV (advance_cycle_insn_decl)->insn_num == 0)
INTERNAL_INSN_CODE_NAME, ADVANCE_CYCLE_VALUE_NAME,
INTERNAL_INSN2_CODE_NAME, ADVANCE_CYCLE_VALUE_NAME);
+ /* We've now rejected the case that
+ INTERNAL_INSN_CODE_NAME >= ADVANCE_CYCLE_VALUE_NAME
+ i.e. that
+ insn_code >= DFA__ADVANCE_CYCLE,
+ and similarly for insn2_code. */
+ fprintf (output_file,
+ " /* Within output_internal_insn_code_evaluation, the generated\n"
+ " code sets \"code\" to NDFA__COLLAPSE for const0_rtx, and\n"
+ " NDFA__COLLAPSE > DFA__ADVANCE_CYCLE. Hence we can't be\n"
+ " dealing with const0_rtx instances at this point. */\n");
+ if (collapse_flag)
+ fprintf (output_file,
+ " gcc_assert (NDFA__COLLAPSE > DFA__ADVANCE_CYCLE);\n");
+ fprintf (output_file,
+ (" gcc_assert (insn_or_const0 != const0_rtx);\n"
+ " rtx_insn *%s ATTRIBUTE_UNUSED = safe_as_a <rtx_insn *> (insn_or_const0);\n"),
+ INSN_PARAMETER_NAME);
+ fprintf (output_file,
+ (" gcc_assert (insn2_or_const0 != const0_rtx);\n"
+ " rtx_insn *%s ATTRIBUTE_UNUSED = safe_as_a <rtx_insn *> (insn2_or_const0);\n"),
+ INSN2_PARAMETER_NAME);
+
fprintf (output_file, " switch (%s)\n {\n", INTERNAL_INSN_CODE_NAME);
for (i = 0; i < description->decls_num; i++)
if (description->decls[i]->mode == dm_insn_reserv
static void
output_insn_latency_func (void)
{
- fprintf (output_file, "int\n%s (rtx_insn *%s, rtx_insn *%s)\n",
+ fprintf (output_file, "int\n%s (rtx %s, rtx %s)\n",
INSN_LATENCY_FUNC_NAME, INSN_PARAMETER_NAME, INSN2_PARAMETER_NAME);
fprintf (output_file, "{\n int %s, %s;\n",
INTERNAL_INSN_CODE_NAME, INTERNAL_INSN2_CODE_NAME);
int i, j;
fprintf (output_file,
- "void\n%s (FILE *%s, rtx %s ATTRIBUTE_UNUSED)\n{\n",
+ "void\n%s (FILE *%s, rtx_insn *%s ATTRIBUTE_UNUSED)\n{\n",
PRINT_RESERVATION_FUNC_NAME, FILE_PARAMETER_NAME,
INSN_PARAMETER_NAME);
output_insn_has_dfa_reservation_p (void)
{
fprintf (output_file,
- "bool\n%s (rtx %s ATTRIBUTE_UNUSED)\n{\n",
+ "bool\n%s (rtx_insn *%s ATTRIBUTE_UNUSED)\n{\n",
INSN_HAS_DFA_RESERVATION_P_FUNC_NAME,
INSN_PARAMETER_NAME);
}
int
-hook_int_rtx_unreachable (rtx)
+hook_int_rtx_insn_unreachable (rtx_insn *)
{
gcc_unreachable ();
}
extern int hook_int_const_tree_const_tree_1 (const_tree, const_tree);
extern int hook_int_rtx_0 (rtx);
extern int hook_int_rtx_1 (rtx);
-extern int hook_int_rtx_unreachable (rtx);
+extern int hook_int_rtx_insn_unreachable (rtx_insn *);
extern int hook_int_rtx_bool_0 (rtx, bool);
extern int hook_int_rtx_mode_as_bool_0 (rtx, enum machine_mode, addr_space_t,
bool);
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, get its maximum length. */
-extern int get_attr_length (rtx);
+extern int get_attr_length (rtx_insn *);
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, get its minimum length. */
-extern int get_attr_min_length (rtx);
+extern int get_attr_min_length (rtx_insn *);
/* Make a pass over all insns and compute their actual lengths by shortening
any branches of variable length if possible. */
depend on things like the values of operands. */
alternative_mask
-get_enabled_alternatives (rtx insn)
+get_enabled_alternatives (rtx_insn *insn)
{
/* Quick exit for asms and for targets that don't use the "enabled"
attribute. */
#define this_target_recog (&default_target_recog)
#endif
-alternative_mask get_enabled_alternatives (rtx);
+alternative_mask get_enabled_alternatives (rtx_insn *);
void recog_init ();
#endif
if (HAVE_return)
{
/* The return we make may have delay slots too. */
- rtx insn = gen_return ();
- insn = emit_jump_insn (insn);
+ rtx pat = gen_return ();
+ rtx_insn *insn = emit_jump_insn (pat);
set_return_jump_label (insn);
emit_barrier ();
if (num_delay_slots (insn) > 0)
correctly. */
#ifdef INSN_SETS_ARE_DELAYED
- if (INSN_SETS_ARE_DELAYED (seq->element (0)))
+ if (INSN_SETS_ARE_DELAYED (seq->insn (0)))
return 0;
#endif
#ifdef INSN_REFERENCES_ARE_DELAYED
- if (INSN_REFERENCES_ARE_DELAYED (seq->element (0)))
+ if (INSN_REFERENCES_ARE_DELAYED (seq->insn (0)))
return 0;
#endif
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
{
bool annul_p = false;
- rtx control = seq->element (0);
+ rtx_insn *control = seq->insn (0);
/* If this is a CALL_INSN and its delay slots, it is hard to track
the resource needs properly, so give up. */
#ifdef INSN_REFERENCES_ARE_DELAYED
if (! include_delayed_effects
- && INSN_REFERENCES_ARE_DELAYED (x))
+ && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
return;
#endif
#ifdef INSN_SETS_ARE_DELAYED
if (mark_type != MARK_SRC_DEST_CALL
- && INSN_SETS_ARE_DELAYED (x))
+ && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
return;
#endif
/* Estimate the cost of issuing INSN on DFA state STATE. */
static int
-estimate_insn_cost (rtx insn, state_t state)
+estimate_insn_cost (rtx_insn *insn, state_t state)
{
static state_t temp = NULL;
int cost;