+2004-07-08 Jerry Quinn <jlquinn@optonline.net>
+
+ * alias.c (nonlocal_mentioned_p, nonlocal_referenced_p,
+ nonlocal_set_p, init_alias_analysis): Use, LABEL_P, JUMP_P, CALL_P,
+ NONJUMP_INSN_P, INSN_P, NOTE_P, BARRIER_P.
+ * bb-reorder.c (mark_bb_for_unlikely_executed_section,
+ add_labels_and_missing_jumps, find_jump_block,
+ fix_crossing_unconditional_branches, add_reg_crossing_jump_notes):
+ Likewise.
+ * bt-load.c (btr_referenced_p, compute_defs_uses_and_gen,
+ link_btr_uses, move_btr_def): Likewise.
+ * builtins.c (expand_builtin_longjmp, expand_builtin_nonlocal_goto,
+ expand_builtin_expect_jump): Likewise.
+ * caller-save.c (save_call_clobbered_regs, insert_one_insn): Likewise.
+ * calls.c (expand_call, emit_library_call_value_1): Likewise.
+ * cfganal.c (forwarder_block_p): Likewise.
+ * cfgbuild.c (inside_basic_block_p, count_basic_blocks,
+ make_label_edge, rtl_make_eh_edge, make_edges, find_basic_blocks_1,
+ find_bb_boundaries): Likewise.
+ * cfgcleanup.c (try_simplify_condjump, try_forward_edges,
+ merge_blocks_move_predecessor_nojumps,
+ merge_blocks_move_successor_nojumps, insns_match_p,
+ flow_find_cross_jump, outgoing_edges_match, try_crossjump_to_edge,
+ try_optimize_cfg): Likewise.
+ * cfgexpand.c (expand_block, construct_exit_block): Likewise.
+ * cfglayout.c (skip_insns_after_block, label_for_bb,
+ record_effective_endpoints, insn_locators_initialize,
+ fixup_reorder_chain, update_unlikely_executed_notes): Likewise.
+ * cfgmainloop.c (create_loop_notes): Likewise.
+ * cfgrtl.c (delete_insn, delete_insn_chain,
+ create_basic_block_structure, rtl_delete_block, free_bb_for_insn,
+ update_bb_for_insn, rtl_merge_blocks, rtl_can_merge_blocks,
+ block_label, try_redirect_by_replacing_jump, last_loop_beg_note,
+ redirect_branch_edge, force_nonfallthru_and_redirect,
+ rtl_tidy_fallthru_edge, back_edge_of_syntactic_loop_p,
+ rtl_split_edge, commit_one_edge_insertion, print_rtl_with_bb,
+ update_br_prob_note, rtl_verify_flow_info_1, rtl_verify_flow_info,
+ purge_dead_edges, cfg_layout_redirect_edge_and_branch,
+ cfg_layout_delete_block, cfg_layout_can_merge_blocks_p,
+ cfg_layout_merge_blocks, rtl_block_ends_with_call_p,
+ need_fake_edge_p, rtl_flow_call_edges_add): Likewise.
+ * combine.c (combine_instructions, can_combine_p, try_combine,
+ find_split_point, record_dead_and_set_regs, reg_dead_at_p,
+ distribute_notes, distribute_links, insn_cuid): Likewise.
+ * cse.c (fold_rtx, cse_insn, cse_around_loop,
+ invalidate_skipped_block, cse_set_around_loop,
+ cse_end_of_basic_block, cse_main, cse_basic_block,
+ cse_condition_code_reg): Likewise.
+ * cselib.c (cselib_process_insn): Likewise.
+ * ddg.c (create_ddg): Likewise.
+ * df.c (df_insn_refs_record, df_bb_rd_local_compute, df_insns_modify):
+ Likewise.
+ * dwarf2out.c (dwarf2out_stack_adjust, dwarf2out_frame_debug,
+ gen_label_die, dwarf2out_var_location): Likewise.
+ * emit-rtl.c (get_first_nonnote_insn, get_last_nonnote_insn,
+ next_insn, previous_insn, next_nonnote_insn, prev_nonnote_insn,
+ last_call_insn, active_insn_p, next_label, prev_label,
+ link_cc0_insns, next_cc0_user, try_split, add_insn_after,
+ add_insn_before, remove_insn, add_function_usage_to,
+ reorder_insns, find_line_note, remove_unnecessary_notes,
+ emit_insn_after_1, classify_insn): Likewise.
+ * except.c (convert_from_eh_region_ranges_1, emit_to_new_bb_before,
+ connect_post_landing_pads, sjlj_mark_call_sites,
+ sjlj_emit_function_enter, sjlj_emit_function_exit, reachable_handlers,
+ can_throw_internal, can_throw_external, set_nothrow_function_flags,
+ convert_to_eh_region_ranges): Likewise.
+ * explow.c (optimize_save_area_alloca): Likewise.
+ * expr.c (expand_expr_real): Likewise.
+ * final.c (insn_current_reference_address, compute_alignments,
+ shorten_branches, final, scan_ahead_for_unlikely_executed_note,
+ final_scan_insn, output_asm_label, leaf_function_p): Likewise.
+ * flow.c (first_insn_after_basic_block_note, delete_dead_jumptables,
+ propagate_block_delete_insn, propagate_one_insn,
+ init_propagate_block_info, propagate_block, libcall_dead_p,
+ mark_set_1, attempt_auto_inc, find_auto_inc, try_pre_increment):
+ Likewise.
+ * function.c (instantiate_virtual_regs, reorder_blocks_1,
+ expand_function_start, expand_function_end, contains,
+ thread_prologue_and_epilogue_insns,
+ reposition_prologue_and_epilogue_notes): Likewise.
+ * gcse.c (constprop_register, bypass_conditional_jumps,
+ insert_insn_end_bb, gcse_after_reload): Likewise.
+ * genemit.c (gen_expand, gen_split): Likewise.
+ * genpeep.c (gen_peephole, main): Likewise.
+ * global.c (build_insn_chain): Likewise.
+ * graph.c (node_data, print_rtl_graph_with_bb): Likewise.
+ * haifa-sched.c (unlink_other_notes, unlink_line_notes,
+ get_block_head_tail, no_real_insns_p, rm_line_notes, save_line_notes,
+ restore_line_notes, rm_redundant_line_notes, rm_other_notes,
+ ok_for_early_queue_removal, set_priorities, sched_init): Likewise.
+ * ifcvt.c (count_bb_insns, first_active_insn, last_active_insn,
+ cond_exec_process_insns, end_ifcvt_sequence, noce_process_if_block,
+ merge_if_block, block_jumps_and_fallthru_p, find_if_block,
+ dead_or_predicable): Likewise.
+ * integrate.c (try_constants): Likewise.
+ * jump.c (rebuild_jump_labels, cleanup_barriers,
+ purge_line_number_notes, init_label_info, mark_all_labels,
+ squeeze_notes, get_label_before, get_label_after,
+ reversed_comparison_code_parts, simplejump_p, pc_set,
+ returnjump_p, onlyjump_p, follow_jumps, mark_jump_label,
+ delete_barrier, delete_prior_computation, delete_computation,
+ delete_related_insns, delete_for_peephole, redirect_jump):
+ Likewise.
+ * lcm.c (optimize_mode_switching): Likewise.
+ * local-alloc.c (validate_equiv_mem, update_equiv_regs, block_alloc):
+ Likewise.
+ * loop-doloop.c (doloop_valid_p, doloop_optimize): Likewise.
+ * loop-invariant.c (find_exits, find_invariants_bb): Likewise.
+ * loop-iv.c (simplify_using_assignment): Likewise.
+ * loop.c (compute_luids, loop_optimize, scan_loop, libcall_other_reg,
+ libcall_benefit, skip_consec_insns, move_movables, prescan_loop,
+ find_and_verify_loops, labels_in_range_p, for_each_insn_in_loop,
+ loop_bivs_init_find, strength_reduce, check_insn_for_bivs,
+ check_insn_for_givs, check_final_value, update_giv_derive,
+ basic_induction_var, product_cheap_p, check_dbra_loop,
+ loop_insn_first_p, last_use_this_basic_block,
+ canonicalize_condition, get_condition, loop_regs_scan, load_mems,
+ try_copy_prop, LOOP_BLOCK_NUM, loop_dump_aux): Likewise.
+ * modulo-sched.c (doloop_register_get, find_line_note, sms_schedule,
+ sms_schedule_by_order): Likewise.
+ * optabs.c (emit_no_conflict_block, emit_libcall_block): Likewise.
+ * postreload.c (reload_cse_simplify_operands, reload_combine,
+ reload_cse_move2add): Likewise.
+ * predict.c (can_predict_insn_p, estimate_probability,
+ expected_value_to_br_prob, process_note_predictions): Likewise.
+ * print-rtl.c (print_rtx, print_rtl, print_rtl_single): Likewise.
+ * profile.c (branch_prob): Likewise.
+ * ra-build.c (live_out_1, livethrough_conflicts_bb,
+ detect_webs_set_in_cond_jump): Likewise.
+ * ra-debug.c (ra_print_rtx_object, ra_debug_insns,
+ ra_print_rtl_with_bb): Likewise.
+ * ra-rewrite.c (insert_stores, rewrite_program2): Likewise.
+ * recog.c (next_insn_tests_no_inequality, find_single_use,
+ split_all_insns, peephole2_optimize, if_test_bypass_p): Likewise.
+ * reg-stack.c (next_flags_user, record_label_references,
+ emit_swap_insn, swap_rtx_condition, subst_stack_regs,
+ compensate_edge, convert_regs_1): Likewise.
+ * regclass.c (scan_one_insn): Likewise.
+ * regmove.c (optimize_reg_copy_1, optimize_reg_copy_2, fixup_match_2,
+ regmove_optimize, fixup_match_1, single_set_for_csa,
+ combine_stack_adjustments_for_block): Likewise.
+ * regrename.c (build_def_use, copyprop_hardreg_forward_1): Likewise.
+ * reload.c (find_reloads, find_reloads_address_1, subst_reloads,
+ find_equiv_reg): Likewise.
+ * reload1.c (reload, calculate_needs_all_insns, set_label_offsets,
+ reload_as_needed, emit_input_reload_insns, do_output_reload,
+ delete_output_reload, delete_address_reloads_1, fixup_abnormal_edges):
+ Likewise.
+ * reorg.c (find_end_label, emit_delay_sequence,
+ delete_from_delay_slot, delete_scheduled_jump, optimize_skip,
+ get_jump_flags, rare_destination, mostly_true_jump,
+ try_merge_delay_insns, redundant_insn, own_thread_p,
+ fill_simple_delay_slots, fill_slots_from_thread,
+ fill_eager_delay_slots, relax_delay_slots, make_return_insns,
+ dbr_schedule): Likewise.
+ * resource.c (find_basic_block, next_insn_no_annul,
+ find_dead_or_set_registers, mark_target_live_regs): Likewise.
+ * rtl.h (RTX_PREV): Likewise.
+ * rtlanal.c (global_reg_mentioned_p, no_labels_between_p,
+ no_jumps_between_p, reg_used_between_p, reg_referenced_between_p,
+ reg_set_p, find_last_value, dead_or_set_regno_p, find_reg_fusage,
+ find_regno_fusage, pure_call_p, replace_label, rtx_referenced_p_1,
+ tablejump_p, computed_jump_p, insns_safe_to_move_p,
+ find_first_parameter_load, can_hoist_insn_p): Likewise.
+ * sched-deps.c (get_condition, add_dependence, sched_analyze_2,
+ sched_analyze_insn, sched_analyze, add_forward_dependence): Likewise.
+ * sched-ebb.c (fix_basic_block_boundaries, add_deps_for_risky_insns,
+ schedule_ebbs): Likewise.
+ * sched-rgn.c (is_cfg_nonregular, find_conditional_protection,
+ is_conditionally_protected, can_schedule_ready_p,
+ add_branch_dependences, debug_dependencies): Likewise.
+ * stmt.c (emit_nop, expand_start_case, emit_jump_if_reachable):
+ Likewise.
+ * unroll.c (unroll_loop, copy_loop_body, back_branch_in_range_p,
+ reg_dead_after_loop, loop_find_equiv_value, loop_iterations,
+ set_dominates_use, ujump_to_loop_cont): Likewise.
+ * var-tracking.c (prologue_stack_adjust, vt_initialize): Likewise.
+ * varasm.c (output_constant_pool_1): Likewise.
+
2004-07-08 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
* tree-scalar-evolution.c: New file.
{
if (INSN_P (x))
{
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
{
if (INSN_P (x))
{
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
{
if (INSN_P (x))
{
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
}
}
}
- else if (GET_CODE (insn) == NOTE
+ else if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
copying_arguments = false;
}
for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
cur_insn = NEXT_INSN (cur_insn))
- if (GET_CODE (cur_insn) != NOTE
- && GET_CODE (cur_insn) != CODE_LABEL)
+ if (!NOTE_P (cur_insn)
+ && !LABEL_P (cur_insn))
{
insert_insn = cur_insn;
break;
if (src && (src != ENTRY_BLOCK_PTR))
{
- if (GET_CODE (BB_END (src)) != JUMP_INSN)
+ if (!JUMP_P (BB_END (src)))
/* bb just falls through. */
{
/* make sure there's only one successor */
only one executable instruction, which is an unconditional jump.
If so, we can use it. */
- if (GET_CODE (BB_HEAD (src)) == CODE_LABEL)
+ if (LABEL_P (BB_HEAD (src)))
for (insn = BB_HEAD (src);
!INSN_P (insn) && insn != NEXT_INSN (BB_END (src));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn)
&& insn == BB_END (src)
- && GET_CODE (insn) == JUMP_INSN
+ && JUMP_P (insn)
&& !any_condjump_p (insn))
{
source_bb = src;
/* Check to see if bb ends in a crossing (unconditional) jump. At
this point, no crossing jumps should be conditional. */
- if (GET_CODE (last_insn) == JUMP_INSN
+ if (JUMP_P (last_insn)
&& succ->crossing_edge)
{
rtx label2, table;
cur_insn = NEXT_INSN (cur_insn))
{
BLOCK_FOR_INSN (cur_insn) = cur_bb;
- if (GET_CODE (cur_insn) == JUMP_INSN)
+ if (JUMP_P (cur_insn))
jump_insn = cur_insn;
}
FOR_EACH_BB (bb)
for (e = bb->succ; e; e = e->succ_next)
if (e->crossing_edge
- && GET_CODE (BB_END (e->src)) == JUMP_INSN)
+ && JUMP_P (BB_END (e->src)))
REG_NOTES (BB_END (e->src)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
NULL_RTX,
REG_NOTES (BB_END
{
rtx set;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& (set = single_set (insn)))
{
rtx dest = SET_DEST (set);
user->next = info.users_this_bb;
info.users_this_bb = user;
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
HARD_REG_SET *clobbered = &call_used_reg_set;
HARD_REG_SET call_saved;
IOR_HARD_REG_SET (btrs_live_at_end[i], tmp);
can_throw = 1;
}
- if (can_throw || GET_CODE (insn) == JUMP_INSN)
+ if (can_throw || JUMP_P (insn))
{
int regno;
sbitmap_free (reaching_defs_of_reg);
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
int regno;
combine_btr_defs (def, btrs_live_in_range);
btr = def->btr;
add_btr_to_live_range (def);
- if (GET_CODE (insp) == CODE_LABEL)
+ if (LABEL_P (insp))
insp = NEXT_INSN (insp);
/* N.B.: insp is expected to be NOTE_INSN_BASIC_BLOCK now. Some
optimizations can result in insp being both first and last insn of
for (insp = BB_END (b); ! INSN_P (insp); insp = PREV_INSN (insp))
if (insp == BB_HEAD (b))
abort ();
- if (GET_CODE (insp) == JUMP_INSN || can_throw_internal (insp))
+ if (JUMP_P (insp) || can_throw_internal (insp))
insp = PREV_INSN (insp);
}
{
if (insn == last)
abort ();
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
REG_NOTES (insn));
break;
}
- else if (GET_CODE (insn) == CALL_INSN)
+ else if (CALL_P (insn))
break;
}
}
non-local goto. */
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
const0_rtx, REG_NOTES (insn));
break;
}
- else if (GET_CODE (insn) == CALL_INSN)
+ else if (CALL_P (insn))
break;
}
ret = get_insns ();
drop_through_label = get_last_insn ();
- if (drop_through_label && GET_CODE (drop_through_label) == NOTE)
+ if (drop_through_label && NOTE_P (drop_through_label))
drop_through_label = prev_nonnote_insn (drop_through_label);
- if (drop_through_label && GET_CODE (drop_through_label) != CODE_LABEL)
+ if (drop_through_label && !LABEL_P (drop_through_label))
drop_through_label = NULL_RTX;
end_sequence ();
{
rtx next = NEXT_INSN (insn);
- if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
+ if (JUMP_P (insn) && any_condjump_p (insn))
{
rtx ifelse = SET_SRC (pc_set (insn));
rtx then_dest = XEXP (ifelse, 1);
/* Otherwise check where we drop through. */
else if (else_dest == pc_rtx)
{
- if (next && GET_CODE (next) == NOTE)
+ if (next && NOTE_P (next))
next = next_nonnote_insn (next);
- if (next && GET_CODE (next) == JUMP_INSN
+ if (next && JUMP_P (next)
&& any_uncondjump_p (next))
temp = XEXP (SET_SRC (pc_set (next)), 0);
else
}
else if (then_dest == pc_rtx)
{
- if (next && GET_CODE (next) == NOTE)
+ if (next && NOTE_P (next))
next = next_nonnote_insn (next);
- if (next && GET_CODE (next) == JUMP_INSN
+ if (next && JUMP_P (next)
&& any_uncondjump_p (next))
temp = XEXP (SET_SRC (pc_set (next)), 0);
else
if (n_regs_saved)
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (TEST_HARD_REG_BIT (hard_regs_saved, regno))
- regno += insert_restore (chain, GET_CODE (insn) == JUMP_INSN,
+ regno += insert_restore (chain, JUMP_P (insn),
regno, MOVE_MAX_WORDS, save_mode);
}
}
isn't a problem. We do, however, assume here that CALL_INSNs don't
reference CC0. Guard against non-INSN's like CODE_LABEL. */
- if ((GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
+ if ((NONJUMP_INSN_P (insn) || JUMP_P (insn))
&& before_p
&& reg_referenced_p (cc0_rtx, PATTERN (insn)))
chain = chain->prev, insn = chain->insn;
/* Expansion of block moves possibly introduced a loop that may
not appear inside libcall block. */
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
failed = true;
if (failed)
than just a CALL_INSN above, so we must search for it here. */
rtx last = get_last_insn ();
- while (GET_CODE (last) != CALL_INSN)
+ while (!CALL_P (last))
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
just a CALL_INSN above, so we must search for it here. */
rtx last = get_last_insn ();
- while (GET_CODE (last) != CALL_INSN)
+ while (!CALL_P (last))
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
return false;
return (!INSN_P (insn)
- || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
+ || (JUMP_P (insn) && simplejump_p (insn))
|| !flow_active_insn_p (insn));
}
case CODE_LABEL:
/* Avoid creating of basic block for jumptables. */
return (NEXT_INSN (insn) == 0
- || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
+ || !JUMP_P (NEXT_INSN (insn))
|| (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
&& GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
{
/* Code labels and barriers causes current basic block to be
terminated at previous real insn. */
- if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
+ if ((LABEL_P (insn) || BARRIER_P (insn))
&& saw_insn)
count++, saw_insn = false;
static void
make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
{
- if (GET_CODE (label) != CODE_LABEL)
+ if (!LABEL_P (label))
abort ();
/* If the label was never emitted, this insn is junk, but avoid a
void
rtl_make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn)
{
- int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
+ int is_call = CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0;
rtx handlers, i;
handlers = reachable_handlers (insn);
int force_fallthru = 0;
edge e;
- if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL
+ if (LABEL_P (BB_HEAD (bb))
&& LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
break;
}
while (insn
- && GET_CODE (insn) == NOTE
+ && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
insn = NEXT_INSN (insn);
next = NEXT_INSN (insn);
- if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
+ if ((LABEL_P (insn) || BARRIER_P (insn))
&& head)
{
prev = create_basic_block_structure (head, end, bb_note, prev);
if (insn == BB_END (bb))
return;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
insn = NEXT_INSN (insn);
/* Scan insn chain and try to find new basic block boundaries. */
insn = next)
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
if (insn == BB_END (jump_block))
{
rtx insn = (target->succ->flags & EDGE_FALLTHRU
? BB_HEAD (target) : prev_nonnote_insn (BB_END (target)));
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
insn = NEXT_INSN (insn);
- for (; insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
+ for (; insn && !LABEL_P (insn) && !INSN_P (insn);
insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
break;
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
break;
/* Do not clean up branches to just past the end of a loop
recognition of some patterns. */
insn = PREV_INSN (BB_HEAD (target));
- if (insn && GET_CODE (insn) == NOTE
+ if (insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
break;
}
return;
barrier = next_nonnote_insn (BB_END (a));
- if (GET_CODE (barrier) != BARRIER)
+ if (!BARRIER_P (barrier))
abort ();
delete_insn (barrier);
/* There had better have been a barrier there. Delete it. */
barrier = NEXT_INSN (BB_END (b));
- if (barrier && GET_CODE (barrier) == BARRIER)
+ if (barrier && BARRIER_P (barrier))
delete_insn (barrier);
/* Move block and loop notes out of the chain so that we do not
??? We take the simple route for now and assume that if they're
equal, they were constructed identically. */
- if (GET_CODE (i1) == CALL_INSN
+ if (CALL_P (i1)
&& (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
CALL_INSN_FUNCTION_USAGE (i2))
|| SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)))
while (last1 != BB_HEAD (bb1) && !INSN_P (PREV_INSN (last1)))
last1 = PREV_INSN (last1);
- if (last1 != BB_HEAD (bb1) && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
+ if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1)))
last1 = PREV_INSN (last1);
while (last2 != BB_HEAD (bb2) && !INSN_P (PREV_INSN (last2)))
last2 = PREV_INSN (last2);
- if (last2 != BB_HEAD (bb2) && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
+ if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2)))
last2 = PREV_INSN (last2);
*f1 = last1;
unconditional jump, or a fake edge to exit. */
if (bb1->succ && !bb1->succ->succ_next
&& (bb1->succ->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
- && (GET_CODE (BB_END (bb1)) != JUMP_INSN || simplejump_p (BB_END (bb1))))
+ && (!JUMP_P (BB_END (bb1)) || simplejump_p (BB_END (bb1))))
return (bb2->succ && !bb2->succ->succ_next
&& (bb2->succ->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
- && (GET_CODE (BB_END (bb2)) != JUMP_INSN || simplejump_p (BB_END (bb2))));
+ && (!JUMP_P (BB_END (bb2)) || simplejump_p (BB_END (bb2))));
/* Match conditional jumps - this may get tricky when fallthru and branch
edges are crossed. */
/* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
/* Skip possible basic block header. */
- if (GET_CODE (newpos1) == CODE_LABEL)
+ if (LABEL_P (newpos1))
newpos1 = NEXT_INSN (newpos1);
- if (GET_CODE (newpos1) == NOTE)
+ if (NOTE_P (newpos1))
newpos1 = NEXT_INSN (newpos1);
redirect_from = split_block (src1, PREV_INSN (newpos1))->src;
if (b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
&& !(b->pred->flags & EDGE_COMPLEX)
- && GET_CODE (BB_HEAD (b)) == CODE_LABEL
+ && LABEL_P (BB_HEAD (b))
/* If the previous block ends with a branch to this
block, we can't delete the label. Normally this
is a condjump that is yet to be simplified, but
some element going to the same place as the
default (fallthru). */
&& (b->pred->src == ENTRY_BLOCK_PTR
- || GET_CODE (BB_END (b->pred->src)) != JUMP_INSN
+ || !JUMP_P (BB_END (b->pred->src))
|| ! label_is_jump_target_p (BB_HEAD (b),
BB_END (b->pred->src))))
{
if (!(mode & CLEANUP_CFGLAYOUT)
&& b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
- && GET_CODE (BB_HEAD (b)) != CODE_LABEL
+ && !LABEL_P (BB_HEAD (b))
&& FORWARDER_BLOCK_P (b)
/* Note that forwarder_block_p true ensures that
there is a successor for this block. */
else if (!(mode & CLEANUP_CFGLAYOUT)
/* If the jump insn has side effects,
we can't kill the edge. */
- && (GET_CODE (BB_END (b)) != JUMP_INSN
+ && (!JUMP_P (BB_END (b))
|| (reload_completed
? simplejump_p (BB_END (b))
: (onlyjump_p (BB_END (b))
/* Java emits line number notes in the top of labels.
??? Make this go away once line number notes are obsoleted. */
BB_HEAD (bb) = NEXT_INSN (last);
- if (GET_CODE (BB_HEAD (bb)) == NOTE)
+ if (NOTE_P (BB_HEAD (bb)))
BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
bsi_next (&bsi);
note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
expand_expr (else_exp, const0_rtx, VOIDmode, 0);
BB_END (bb) = last;
- if (GET_CODE (BB_END (bb)) == BARRIER)
+ if (BARRIER_P (BB_END (bb)))
BB_END (bb) = PREV_INSN (BB_END (bb));
update_bb_for_insn (bb);
new_edge = make_edge (new_bb, dest, 0);
new_edge->probability = REG_BR_PROB_BASE;
new_edge->count = new_bb->count;
- if (GET_CODE (BB_END (new_bb)) == BARRIER)
+ if (BARRIER_P (BB_END (new_bb)))
BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
update_bb_for_insn (new_bb);
expand_expr_stmt (stmt);
for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
{
- if (GET_CODE (last) == CALL_INSN && SIBLING_CALL_P (last))
+ if (CALL_P (last) && SIBLING_CALL_P (last))
{
edge e;
int probability = 0;
after the sibcall (to perform the function return). These confuse the
find_sub_basic_blocks code, so we need to get rid of these. */
last = NEXT_INSN (last);
- if (GET_CODE (last) != BARRIER)
+ if (!BARRIER_P (last))
abort ();
while (NEXT_INSN (last))
{
/* For instance an sqrt builtin expander expands if with
sibcall in the then and label for `else`. */
- if (GET_CODE (NEXT_INSN (last)) == CODE_LABEL)
+ if (LABEL_P (NEXT_INSN (last)))
break;
delete_insn (NEXT_INSN (last));
}
/* Find the the block tail. The last insn is the block is the insn
before a barrier and/or table jump insn. */
last = get_last_insn ();
- if (GET_CODE (last) == BARRIER)
+ if (BARRIER_P (last))
last = PREV_INSN (last);
if (JUMP_TABLE_DATA_P (last))
last = PREV_INSN (PREV_INSN (last));
end = get_last_insn ();
if (head == end)
return;
- while (NEXT_INSN (head) && GET_CODE (NEXT_INSN (head)) == NOTE)
+ while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
head = NEXT_INSN (head);
exit_block = create_basic_block (NEXT_INSN (head), end, EXIT_BLOCK_PTR->prev_bb);
exit_block->frequency = EXIT_BLOCK_PTR->frequency;
case CODE_LABEL:
if (NEXT_INSN (insn)
- && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && JUMP_P (NEXT_INSN (insn))
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
{
for (insn = last_insn; insn != BB_END (bb); insn = prev)
{
prev = PREV_INSN (insn);
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
switch (NOTE_LINE_NUMBER (insn))
{
case NOTE_INSN_LOOP_END:
{
rtx label = BB_HEAD (bb);
- if (GET_CODE (label) != CODE_LABEL)
+ if (!LABEL_P (label))
{
if (dump_file)
fprintf (dump_file, "Emitting label for block %d\n", bb->index);
for (insn = get_insns ();
insn
- && GET_CODE (insn) == NOTE
+ && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
insn = NEXT_INSN (insn))
continue;
epilogue_locator = loc;
if (active_insn_p (insn))
INSN_LOCATOR (insn) = loc;
- else if (GET_CODE (insn) == NOTE)
+ else if (NOTE_P (insn))
{
switch (NOTE_LINE_NUMBER (insn))
{
e_taken = e;
bb_end_insn = BB_END (bb);
- if (GET_CODE (bb_end_insn) == JUMP_INSN)
+ if (JUMP_P (bb_end_insn))
{
if (any_condjump_p (bb_end_insn))
{
note);
NOTE_BASIC_BLOCK (new_note) = bb;
}
- if (GET_CODE (BB_END (bb)) == JUMP_INSN
+ if (JUMP_P (BB_END (bb))
&& !any_condjump_p (BB_END (bb))
&& bb->succ->crossing_edge )
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
fprintf (dump_file, "duplicate of %i ",
bb->rbi->original->index);
else if (forwarder_block_p (bb)
- && GET_CODE (BB_HEAD (bb)) != CODE_LABEL)
+ && !LABEL_P (BB_HEAD (bb)))
fprintf (dump_file, "compensation ");
else
fprintf (dump_file, "bb %i ", bb->index);
for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
cur_insn = NEXT_INSN (cur_insn))
- if (GET_CODE (cur_insn) == NOTE
+ if (NOTE_P (cur_insn)
&& NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
NOTE_BASIC_BLOCK (cur_insn) = bb;
}
#ifdef ENABLE_CHECKING
/* Verify that there really are no loop notes. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
abort ();
#endif
front of the jump. */
insn = PREV_INSN (BB_HEAD (first[loop->num]));
if (insn
- && GET_CODE (insn) == BARRIER)
+ && BARRIER_P (insn))
insn = PREV_INSN (insn);
if (insn
- && GET_CODE (insn) == JUMP_INSN
+ && JUMP_P (insn)
&& any_uncondjump_p (insn)
&& onlyjump_p (insn))
{
/* Position the note correctly wrto barrier. */
insn = BB_END (last[loop->num]);
if (NEXT_INSN (insn)
- && GET_CODE (NEXT_INSN (insn)) == BARRIER)
+ && BARRIER_P (NEXT_INSN (insn)))
insn = NEXT_INSN (insn);
end = BB_END (last[loop->num]);
rtx note;
bool really_delete = true;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
/* Some labels can't be directly removed from the INSN chain, as they
might be references via variables, constant pool etc.
/* If deleting a jump, decrement the use count of the label. Deleting
the label itself should happen in the normal course of block merging. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& JUMP_LABEL (insn)
- && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
+ && LABEL_P (JUMP_LABEL (insn)))
LABEL_NUSES (JUMP_LABEL (insn))--;
/* Also if deleting an insn that references a label. */
else
{
while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
- && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
+ && LABEL_P (XEXP (note, 0)))
{
LABEL_NUSES (XEXP (note, 0))--;
remove_note (insn, note);
}
}
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
{
/* When deleting code in bulk (e.g. removing many unreachable
blocks) we can delete a label that's a target of the vector
before deleting the vector itself. */
- if (GET_CODE (label) != NOTE)
+ if (!NOTE_P (label))
LABEL_NUSES (label)--;
}
}
while (1)
{
next = NEXT_INSN (start);
- if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
+ if (NOTE_P (start) && !can_delete_note_p (start))
;
else
next = delete_insn (start);
rtx after;
- if (GET_CODE (head) == CODE_LABEL)
+ if (LABEL_P (head))
after = head;
else
{
if (!head && !end)
head = end = bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
- else if (GET_CODE (head) == CODE_LABEL && end)
+ else if (LABEL_P (head) && end)
{
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
if (head == end)
for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
break;
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
insn = BB_HEAD (b);
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
maybe_remove_eh_handler (insn);
/* Include any jump table following the basic block. */
/* Include any barrier that may follow the basic block. */
tmp = next_nonnote_insn (end);
- if (tmp && GET_CODE (tmp) == BARRIER)
+ if (tmp && BARRIER_P (tmp))
end = tmp;
/* Selectively delete the entire chain. */
{
rtx insn;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) != BARRIER)
+ if (!BARRIER_P (insn))
BLOCK_FOR_INSN (insn) = NULL;
}
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) != BARRIER)
+ if (!BARRIER_P (insn))
set_block_for_insn (insn, bb);
if (insn == BB_END (bb))
break;
int b_empty = 0;
/* If there was a CODE_LABEL beginning B, delete it. */
- if (GET_CODE (b_head) == CODE_LABEL)
+ if (LABEL_P (b_head))
{
/* Detect basic blocks with nothing but a label. This can happen
in particular at the end of a function. */
}
/* If there was a jump out of A, delete it. */
- if (GET_CODE (a_end) == JUMP_INSN)
+ if (JUMP_P (a_end))
{
rtx prev;
for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
- if (GET_CODE (prev) != NOTE
+ if (!NOTE_P (prev)
|| NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
|| prev == BB_HEAD (a))
break;
a_end = PREV_INSN (del_first);
}
- else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
+ else if (BARRIER_P (NEXT_INSN (a_end)))
del_first = NEXT_INSN (a_end);
/* Delete everything marked above as well as crap that might be
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */
- && (GET_CODE (BB_END (a)) != JUMP_INSN
+ && (!JUMP_P (BB_END (a))
|| (reload_completed
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
}
if (block == EXIT_BLOCK_PTR)
return NULL_RTX;
- if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
+ if (!LABEL_P (BB_HEAD (block)))
{
BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
}
/* Remove barriers but keep jumptables. */
while (insn)
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
if (PREV_INSN (insn))
NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
if (NEXT_INSN (insn))
PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
break;
insn = NEXT_INSN (insn);
}
delete_insn_chain (label, table);
barrier = next_nonnote_insn (BB_END (src));
- if (!barrier || GET_CODE (barrier) != BARRIER)
+ if (!barrier || !BARRIER_P (barrier))
emit_barrier_after (BB_END (src));
else
{
/* We don't want a block to end on a line-number note since that has
the potential of changing the code between -g and not -g. */
- while (GET_CODE (BB_END (e->src)) == NOTE
+ while (NOTE_P (BB_END (e->src))
&& NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
delete_insn (BB_END (e->src));
{
rtx last = insn;
- for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
+ for (insn = NEXT_INSN (insn); insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
insn = NEXT_INSN (insn))
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
/* We can only redirect non-fallthru edges of jump insn. */
if (e->flags & EDGE_FALLTHRU)
return NULL;
- else if (GET_CODE (insn) != JUMP_INSN)
+ else if (!JUMP_P (insn))
return NULL;
/* Recognize a tablejump and adjust all matching cases. */
for (bb_note = BB_HEAD (jump_block);
bb_note && bb_note != NEXT_INSN (BB_END (jump_block));
bb_note = NEXT_INSN (bb_note))
- if (GET_CODE (bb_note) == NOTE
+ if (NOTE_P (bb_note)
&& NOTE_LINE_NUMBER (bb_note) == NOTE_INSN_BASIC_BLOCK)
break;
new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
NOTE_BASIC_BLOCK (new_note) = jump_block;
jump_block->partition = COLD_PARTITION;
}
- if (GET_CODE (BB_END (jump_block)) == JUMP_INSN
+ if (JUMP_P (BB_END (jump_block))
&& !any_condjump_p (BB_END (jump_block))
&& jump_block->succ->crossing_edge )
REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST
If block B consisted only of this single jump, turn it into a deleted
note. */
q = BB_END (b);
- if (GET_CODE (q) == JUMP_INSN
+ if (JUMP_P (q)
&& onlyjump_p (q)
&& (any_uncondjump_p (q)
|| (b->succ == e && e->succ_next == NULL)))
/* We don't want a block to end on a line-number note since that has
the potential of changing the code between -g and not -g. */
- while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
+ while (NOTE_P (q) && NOTE_LINE_NUMBER (q) >= 0)
q = PREV_INSN (q);
}
for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
count++;
if (edge_in->dest != EXIT_BLOCK_PTR
&& PREV_INSN (BB_HEAD (edge_in->dest))
- && GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
+ && NOTE_P (PREV_INSN (BB_HEAD (edge_in->dest)))
&& (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
== NOTE_INSN_LOOP_BEG)
&& !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
{
before = NEXT_INSN (BB_END (edge_in->src));
if (before
- && GET_CODE (before) == NOTE
+ && NOTE_P (before)
&& NOTE_LINE_NUMBER (before) == NOTE_INSN_LOOP_END)
before = NEXT_INSN (before);
bb = create_basic_block (before, NULL, edge_in->src);
its return value. */
if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
&& e->src != ENTRY_BLOCK_PTR
- && GET_CODE (BB_END (e->src)) == CALL_INSN)
+ && CALL_P (BB_END (e->src)))
{
rtx next = next_nonnote_insn (BB_END (e->src));
/* Get the location correct wrt a code label, and "nice" wrt
a basic block note, and before everything else. */
tmp = BB_HEAD (bb);
- if (GET_CODE (tmp) == CODE_LABEL)
+ if (LABEL_P (tmp))
tmp = NEXT_INSN (tmp);
if (NOTE_INSN_BASIC_BLOCK_P (tmp))
tmp = NEXT_INSN (tmp);
if (tmp
- && GET_CODE (tmp) == NOTE
+ && NOTE_P (tmp)
&& NOTE_LINE_NUMBER (tmp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
tmp = NEXT_INSN (tmp);
if (tmp == BB_HEAD (bb))
We know this block has a single successor, so we can just emit
the queued insns before the jump. */
- if (GET_CODE (BB_END (bb)) == JUMP_INSN)
+ if (JUMP_P (BB_END (bb)))
for (before = BB_END (bb);
- GET_CODE (PREV_INSN (before)) == NOTE
+ NOTE_P (PREV_INSN (before))
&& NOTE_LINE_NUMBER (PREV_INSN (before)) ==
NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
;
bb_note = NULL_RTX;
for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
cur_insn = NEXT_INSN (cur_insn))
- if (GET_CODE (cur_insn) == NOTE
+ if (NOTE_P (cur_insn)
&& NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
{
bb_note = cur_insn;
new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
bb_note);
NOTE_BASIC_BLOCK (new_note) = bb;
- if (GET_CODE (BB_END (bb)) == JUMP_INSN
+ if (JUMP_P (BB_END (bb))
&& !any_condjump_p (BB_END (bb))
&& bb->succ->crossing_edge )
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
if (before)
delete_insn (before);
}
- else if (GET_CODE (last) == JUMP_INSN)
+ else if (JUMP_P (last))
abort ();
/* Mark the basic block for find_sub_basic_blocks. */
}
if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
- && GET_CODE (tmp_rtx) != NOTE
- && GET_CODE (tmp_rtx) != BARRIER)
+ && !NOTE_P (tmp_rtx)
+ && !BARRIER_P (tmp_rtx))
fprintf (outf, ";; Insn is not within a basic block\n");
else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
fprintf (outf, ";; Insn is in multiple basic blocks\n");
update_br_prob_note (basic_block bb)
{
rtx note;
- if (GET_CODE (BB_END (bb)) != JUMP_INSN)
+ if (!JUMP_P (BB_END (bb)))
return;
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
err = 1;
}
if (n_branch
- && (GET_CODE (BB_END (bb)) != JUMP_INSN
+ && (!JUMP_P (BB_END (bb))
|| (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
|| any_condjump_p (BB_END (bb))))))
{
error ("Wrong amount of branch edges after conditional jump %i", bb->index);
err = 1;
}
- if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
+ if (n_call && !CALL_P (BB_END (bb)))
{
error ("Call edges for non-call insn in bb %i", bb->index);
err = 1;
}
if (n_abnormal
- && (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
- && (GET_CODE (BB_END (bb)) != JUMP_INSN
+ && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
+ && (!JUMP_P (BB_END (bb))
|| any_condjump_p (BB_END (bb))
|| any_uncondjump_p (BB_END (bb))))
{
block. It ought to contain optional CODE_LABEL followed
by NOTE_BASIC_BLOCK. */
x = BB_HEAD (bb);
- if (GET_CODE (x) == CODE_LABEL)
+ if (LABEL_P (x))
{
if (BB_END (bb) == x)
{
rtx insn;
/* Ensure existence of barrier in BB with no fallthru edges. */
- for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
+ for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
insn = NEXT_INSN (insn))
if (!insn
- || (GET_CODE (insn) == NOTE
+ || (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
{
error ("missing barrier after block %i", bb->index);
else
for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == BARRIER
+ if (BARRIER_P (insn)
#ifndef CASE_DROPS_THROUGH
|| INSN_P (insn)
#else
case CODE_LABEL:
/* An addr_vec is placed outside any basic block. */
if (NEXT_INSN (x)
- && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
+ && JUMP_P (NEXT_INSN (x))
&& (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
x = NEXT_INSN (x);
}
if (INSN_P (x)
- && GET_CODE (x) == JUMP_INSN
+ && JUMP_P (x)
&& returnjump_p (x) && ! condjump_p (x)
- && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
+ && ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
fatal_insn ("return not followed by barrier", x);
if (curr_bb && x == BB_END (curr_bb))
curr_bb = NULL;
bool purged = false;
/* If this instruction cannot trap, remove REG_EH_REGION notes. */
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& (note = find_reg_note (insn, REG_EH_REGION, NULL)))
{
rtx eqnote;
}
else if (e->flags & EDGE_ABNORMAL_CALL)
{
- if (GET_CODE (BB_END (bb)) == CALL_INSN
+ if (CALL_P (BB_END (bb))
&& (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
|| INTVAL (XEXP (note, 0)) >= 0))
continue;
purged = true;
}
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx note;
edge b,f;
return purged;
}
- else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
+ else if (CALL_P (insn) && SIBLING_CALL_P (insn))
{
/* First, there should not be any EH or ABCALL edges resulting
from non-local gotos and the like. If there were, we shouldn't
if (e->flags & EDGE_FALLTHRU)
{
/* Redirect any branch edges unified with the fallthru one. */
- if (GET_CODE (BB_END (src)) == JUMP_INSN
+ if (JUMP_P (BB_END (src))
&& label_is_jump_target_p (BB_HEAD (e->dest),
BB_END (src)))
{
insn = bb->rbi->footer;
while (insn)
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
if (PREV_INSN (insn))
NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
if (NEXT_INSN (insn))
PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
break;
insn = NEXT_INSN (insn);
}
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */
- && (GET_CODE (BB_END (a)) != JUMP_INSN
+ && (!JUMP_P (BB_END (a))
|| (reload_completed
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
}
#endif
/* If there was a CODE_LABEL beginning B, delete it. */
- if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
+ if (LABEL_P (BB_HEAD (b)))
delete_insn (BB_HEAD (b));
/* We should have fallthru edge in a, or we can do dummy redirection to get
it cleaned up. */
- if (GET_CODE (BB_END (a)) == JUMP_INSN)
+ if (JUMP_P (BB_END (a)))
try_redirect_by_replacing_jump (a->succ, b, true);
- if (GET_CODE (BB_END (a)) == JUMP_INSN)
+ if (JUMP_P (BB_END (a)))
abort ();
/* Possible line number notes should appear in between. */
{
rtx insn = BB_END (bb);
- while (GET_CODE (insn) != CALL_INSN
+ while (!CALL_P (insn)
&& insn != BB_HEAD (bb)
&& keep_with_call_p (insn))
insn = PREV_INSN (insn);
- return (GET_CODE (insn) == CALL_INSN);
+ return (CALL_P (insn));
}
/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
if (!INSN_P (insn))
return false;
- if ((GET_CODE (insn) == CALL_INSN
+ if ((CALL_P (insn)
&& !SIBLING_CALL_P (insn)
&& !find_reg_note (insn, REG_NORETURN, NULL)
&& !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
/* Don't split the block between a call and an insn that should
remain in the same block as the call. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
while (split_at_insn != BB_END (bb)
&& keep_with_call_p (NEXT_INSN (split_at_insn)))
split_at_insn = NEXT_INSN (split_at_insn);
INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
label_tick++;
}
{
next = 0;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
label_tick++;
else if (INSN_P (insn))
/* If the linked insn has been replaced by a note, then there
is no point in pursuing this chain any further. */
- if (GET_CODE (link) == NOTE)
+ if (NOTE_P (link))
continue;
for (nextlinks = LOG_LINKS (link);
We need this special code because data flow connections
via CC0 do not get entered in LOG_LINKS. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (prev = prev_nonnote_insn (insn)) != 0
- && GET_CODE (prev) == INSN
+ && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev)))
{
if ((next = try_combine (insn, prev,
}
/* Do the same for an insn that explicitly references CC0. */
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& (prev = prev_nonnote_insn (insn)) != 0
- && GET_CODE (prev) == INSN
+ && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev))
&& GET_CODE (PATTERN (insn)) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
explicitly references CC0. If so, try this insn, that insn,
and its predecessor if it sets CC0. */
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
- if (GET_CODE (XEXP (links, 0)) == INSN
+ if (NONJUMP_INSN_P (XEXP (links, 0))
&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
- && GET_CODE (prev) == INSN
+ && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev))
&& (next = try_combine (insn, XEXP (links, 0),
prev, &new_direct_jump_p)) != 0)
}
}
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
record_dead_and_set_regs (insn);
retry:
/* Can't merge a function call. */
|| GET_CODE (src) == CALL
/* Don't eliminate a function call argument. */
- || (GET_CODE (i3) == CALL_INSN
+ || (CALL_P (i3)
&& (find_reg_fusage (i3, USE, dest)
|| (REG_P (dest)
&& REGNO (dest) < FIRST_PSEUDO_REGISTER
#ifdef AUTO_INC_DEC
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC
- && (GET_CODE (i3) == JUMP_INSN
+ && (JUMP_P (i3)
|| reg_used_between_p (XEXP (link, 0), insn, i3)
|| reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
return 0;
but that would be much slower, and this ought to be equivalent. */
p = prev_nonnote_insn (insn);
- if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
+ if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
&& ! all_adjacent)
return 0;
#endif
where I2 and I3 are adjacent to avoid making difficult register
usage tests. */
- if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
+ if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
&& REG_P (SET_SRC (PATTERN (i3)))
&& REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
&& find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
they are adjacent to each other or not. */
{
rtx p = prev_nonnote_insn (i3);
- if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
+ if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
&& sets_cc0_p (newi2pat))
{
undo_all ();
INSN_CODE (i3) = insn_code_number;
PATTERN (i3) = newpat;
- if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
+ if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
{
rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
mark_jump_label (PATTERN (i3), i3, 0);
if ((temp = next_nonnote_insn (i3)) == NULL_RTX
- || GET_CODE (temp) != BARRIER)
+ || !BARRIER_P (temp))
emit_barrier_after (i3);
}
*new_direct_jump_p = 1;
if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
- || GET_CODE (temp) != BARRIER)
+ || !BARRIER_P (temp))
emit_barrier_after (undobuf.other_insn);
}
if (seq
&& NEXT_INSN (seq) != NULL_RTX
&& NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
- && GET_CODE (seq) == INSN
+ && NONJUMP_INSN_P (seq)
&& GET_CODE (PATTERN (seq)) == SET
&& SET_DEST (PATTERN (seq)) == reg
&& ! reg_mentioned_p (reg,
SET_SRC (PATTERN (seq)))
- && GET_CODE (NEXT_INSN (seq)) == INSN
+ && NONJUMP_INSN_P (NEXT_INSN (seq))
&& GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
&& SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
&& memory_address_p (GET_MODE (x),
record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
/* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
beginning of function. */
- for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
+ for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
insn = prev_nonnote_insn (insn))
{
note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
break;
case REG_NON_LOCAL_GOTO:
- if (GET_CODE (i3) == JUMP_INSN)
+ if (JUMP_P (i3))
place = i3;
- else if (i2 && GET_CODE (i2) == JUMP_INSN)
+ else if (i2 && JUMP_P (i2))
place = i2;
else
abort ();
case REG_EH_REGION:
/* These notes must remain with the call or trapping instruction. */
- if (GET_CODE (i3) == CALL_INSN)
+ if (CALL_P (i3))
place = i3;
- else if (i2 && GET_CODE (i2) == CALL_INSN)
+ else if (i2 && CALL_P (i2))
place = i2;
else if (flag_non_call_exceptions)
{
case REG_SETJMP:
/* These notes must remain with the call. It should not be
possible for both I2 and I3 to be a call. */
- if (GET_CODE (i3) == CALL_INSN)
+ if (CALL_P (i3))
place = i3;
- else if (i2 && GET_CODE (i2) == CALL_INSN)
+ else if (i2 && CALL_P (i2))
place = i2;
else
abort ();
/* Don't attach REG_LABEL note to a JUMP_INSN which has
JUMP_LABEL already. Instead, decrement LABEL_NUSES. */
- if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
+ if (place && JUMP_P (place) && JUMP_LABEL (place))
{
if (JUMP_LABEL (place) != XEXP (note, 0))
abort ();
- if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
+ if (LABEL_P (JUMP_LABEL (place)))
LABEL_NUSES (JUMP_LABEL (place))--;
place = 0;
}
- if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
+ if (place2 && JUMP_P (place2) && JUMP_LABEL (place2))
{
if (JUMP_LABEL (place2) != XEXP (note, 0))
abort ();
- if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
+ if (LABEL_P (JUMP_LABEL (place2)))
LABEL_NUSES (JUMP_LABEL (place2))--;
place2 = 0;
}
/* If the insn previously containing this note still exists,
put it back where it was. Otherwise move it to the previous
insn. Adjust the corresponding REG_LIBCALL note. */
- if (GET_CODE (from_insn) != NOTE)
+ if (!NOTE_P (from_insn))
place = from_insn;
else
{
case REG_LIBCALL:
/* This is handled similarly to REG_RETVAL. */
- if (GET_CODE (from_insn) != NOTE)
+ if (!NOTE_P (from_insn))
place = from_insn;
else
{
use of A and put the death note there. */
if (from_insn
- && GET_CODE (from_insn) == CALL_INSN
+ && CALL_P (from_insn)
&& find_reg_fusage (from_insn, USE, XEXP (note, 0)))
place = from_insn;
else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
}
}
else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
- || (GET_CODE (tem) == CALL_INSN
+ || (CALL_P (tem)
&& find_reg_fusage (tem, USE, XEXP (note, 0))))
{
place = tem;
replace I3, I2, and I1 by I3 and I2. But in that case the
destination of I2 also remains unchanged. */
- if (GET_CODE (XEXP (link, 0)) == NOTE
+ if (NOTE_P (XEXP (link, 0))
|| (set = single_set (XEXP (link, 0))) == 0)
continue;
place = insn;
break;
}
- else if (GET_CODE (insn) == CALL_INSN
+ else if (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))
{
place = insn;
insn_cuid (rtx insn)
{
while (insn != 0 && INSN_UID (insn) > max_uid_cuid
- && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
+ && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
insn = NEXT_INSN (insn);
if (INSN_UID (insn) > max_uid_cuid)
rtx label = XEXP (base, 0);
rtx table_insn = NEXT_INSN (label);
- if (table_insn && GET_CODE (table_insn) == JUMP_INSN
+ if (table_insn && JUMP_P (table_insn)
&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
{
rtx table = PATTERN (table_insn);
return XVECEXP (table, 0,
offset / GET_MODE_SIZE (GET_MODE (table)));
}
- if (table_insn && GET_CODE (table_insn) == JUMP_INSN
+ if (table_insn && JUMP_P (table_insn)
&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
{
rtx table = PATTERN (table_insn);
Also determine whether there is a CLOBBER that invalidates
all memory references, or all references at varying addresses. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
{
{
/* Now emit a BARRIER after the unconditional jump. */
if (NEXT_INSN (insn) == 0
- || GET_CODE (NEXT_INSN (insn)) != BARRIER)
+ || !BARRIER_P (NEXT_INSN (insn)))
emit_barrier_after (insn);
/* We reemit the jump in as many cases as possible just in
/* Now emit a BARRIER after the unconditional jump. */
if (NEXT_INSN (insn) == 0
- || GET_CODE (NEXT_INSN (insn)) != BARRIER)
+ || !BARRIER_P (NEXT_INSN (insn)))
emit_barrier_after (insn);
}
else
/* Some registers are invalidated by subroutine calls. Memory is
invalidated by non-constant calls. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
}
/* A volatile ASM invalidates everything. */
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == ASM_OPERANDS
&& MEM_VOLATILE_P (PATTERN (insn)))
flush_hash_table ();
{
prev = PREV_INSN (prev);
}
- while (prev && GET_CODE (prev) == NOTE
+ while (prev && NOTE_P (prev)
&& NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
/* Do not swap the registers around if the previous instruction
note. We cannot do that because REG_EQUIV may provide an
uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
- if (prev != 0 && GET_CODE (prev) == INSN
+ if (prev != 0 && NONJUMP_INSN_P (prev)
&& GET_CODE (PATTERN (prev)) == SET
&& SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
&& ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
the condition being tested. */
last_jump_equiv_class = 0;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& n_sets == 1 && GET_CODE (x) == SET
&& GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
record_jump_equiv (insn, 0);
/* If the previous insn set CC0 and this insn no longer references CC0,
delete the previous insn. Here we use the fact that nothing expects CC0
to be valid over an insn, which is true until the final pass. */
- if (prev_insn && GET_CODE (prev_insn) == INSN
+ if (prev_insn && NONJUMP_INSN_P (prev_insn)
&& (tem = single_set (prev_insn)) != 0
&& SET_DEST (tem) == cc0_rtx
&& ! reg_mentioned_p (cc0_rtx, x))
/* If the jump at the end of the loop doesn't go to the start, we don't
do anything. */
for (insn = PREV_INSN (loop_start);
- insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
+ insn && (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) >= 0);
insn = PREV_INSN (insn))
;
if (insn == 0
- || GET_CODE (insn) != NOTE
+ || !NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
return;
accesses by not processing any instructions created after cse started. */
for (insn = NEXT_INSN (loop_start);
- GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
+ !CALL_P (insn) && !LABEL_P (insn)
&& INSN_UID (insn) < max_insn_uid
- && ! (GET_CODE (insn) == NOTE
+ && ! (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
insn = NEXT_INSN (insn))
{
{
rtx insn;
- for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
+ for (insn = start; insn && !LABEL_P (insn);
insn = NEXT_INSN (insn))
{
if (! INSN_P (insn))
continue;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
a label or CALL_INSN. */
for (p = prev_nonnote_insn (loop_start);
- p && GET_CODE (p) != CALL_INSN
- && GET_CODE (p) != CODE_LABEL;
+ p && !CALL_P (p)
+ && !LABEL_P (p);
p = prev_nonnote_insn (p))
if ((set = single_set (p)) != 0
&& REG_P (SET_DEST (set))
follow_jumps = skip_blocks = 0;
/* Scan to end of this basic block. */
- while (p && GET_CODE (p) != CODE_LABEL)
+ while (p && !LABEL_P (p))
{
/* Don't cse out the end of a loop. This makes a difference
only for the unusual loops that always execute at least once;
If we are running after loop.c has finished, we can ignore
the NOTE_INSN_LOOP_END. */
- if (! after_loop && GET_CODE (p) == NOTE
+ if (! after_loop && NOTE_P (p)
&& NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
break;
/* Don't cse over a call to setjmp; on some machines (eg VAX)
the regs restored by the longjmp come from
a later time than the setjmp. */
- if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
+ if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
&& find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
break;
especially if it is really an ASM_OPERANDS. */
if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
nsets += XVECLEN (PATTERN (p), 0);
- else if (GET_CODE (p) != NOTE)
+ else if (!NOTE_P (p))
nsets += 1;
/* Ignore insns made by CSE; they cannot affect the boundaries of
registers set in the block when following the jump. */
else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
- && GET_CODE (p) == JUMP_INSN
+ && JUMP_P (p)
&& GET_CODE (PATTERN (p)) == SET
&& GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
&& JUMP_LABEL (p) != 0
&& NEXT_INSN (JUMP_LABEL (p)) != 0)
{
for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
- if ((GET_CODE (q) != NOTE
+ if ((!NOTE_P (q)
|| NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
- || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
+ || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
&& find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
- && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
+ && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
break;
/* If we ran into a BARRIER, this code is an extension of the
basic block when the branch is taken. */
- if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
+ if (follow_jumps && q != 0 && BARRIER_P (q))
{
/* Don't allow ourself to keep walking around an
always-executed loop. */
PUT_MODE (NEXT_INSN (p), QImode);
}
/* Detect a branch around a block of code. */
- else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
+ else if (skip_blocks && q != 0 && !LABEL_P (q))
{
rtx tmp;
/* This is no_labels_between_p (p, q) with an added check for
reaching the end of a function (in case Q precedes P). */
for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
- if (GET_CODE (tmp) == CODE_LABEL)
+ if (LABEL_P (tmp))
break;
if (tmp == q)
for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) != NOTE
+ if (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) < 0)
INSN_CUID (insn) = ++i;
else
new_basic_block ();
/* TO might be a label. If so, protect it from being deleted. */
- if (to != 0 && GET_CODE (to) == CODE_LABEL)
+ if (to != 0 && LABEL_P (to))
++LABEL_NUSES (to);
for (insn = from; insn != to; insn = NEXT_INSN (insn))
/* If we haven't already found an insn where we added a LABEL_REF,
check this one. */
- if (GET_CODE (insn) == INSN && ! recorded_label_ref
+ if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
&& for_each_rtx (&PATTERN (insn), check_for_label_ref,
(void *) insn))
recorded_label_ref = 1;
want to count the use in that jump. */
if (to != 0 && NEXT_INSN (insn) == to
- && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
+ && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
{
struct cse_basic_block_data val;
rtx prev;
/* If TO was preceded by a BARRIER we are done with this block
because it has no continuation. */
prev = prev_nonnote_insn (to);
- if (prev && GET_CODE (prev) == BARRIER)
+ if (prev && BARRIER_P (prev))
{
free (qty_table + max_reg);
return insn;
to = val.last;
/* Prevent TO from being deleted if it is a label. */
- if (to != 0 && GET_CODE (to) == CODE_LABEL)
+ if (to != 0 && LABEL_P (to))
++LABEL_NUSES (to);
/* Back up so we process the first insn in the extension. */
if ((cse_jumps_altered == 0
|| (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
&& around_loop && to != 0
- && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
- && GET_CODE (insn) == JUMP_INSN
+ && NOTE_P (to) && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
+ && JUMP_P (insn)
&& JUMP_LABEL (insn) != 0
&& LABEL_NUSES (JUMP_LABEL (insn)) == 1)
cse_around_loop (JUMP_LABEL (insn));
to optimize. */
last_insn = BB_END (bb);
- if (GET_CODE (last_insn) != JUMP_INSN)
+ if (!JUMP_P (last_insn))
continue;
if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
cselib_current_insn = insn;
/* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
- if (GET_CODE (insn) == CODE_LABEL
- || (GET_CODE (insn) == CALL_INSN
+ if (LABEL_P (insn)
+ || (CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
- || (GET_CODE (insn) == INSN
+ || (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == ASM_OPERANDS
&& MEM_VOLATILE_P (PATTERN (insn))))
{
/* If this is a call instruction, forget anything stored in a
call clobbered register, or, if this is not a const call, in
memory. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (call_used_regs[i])
/* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
after we have processed the insn. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
{
if (! INSN_P (insn))
{
- if (! first_note && GET_CODE (insn) == NOTE
+ if (! first_note && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
first_note = insn;
continue;
}
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
if (g->closing_branch)
abort (); /* Found two branches in DDG. */
}
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx note;
rtx x;
df_uses_record (df, &PATTERN (insn),
DF_REF_REG_USE, bb, insn, 0);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx note;
bitmap_set_bit (seen, regno);
}
- if (GET_CODE (insn) == CALL_INSN && (df->flags & DF_HARD_REGS))
+ if (CALL_P (insn) && (df->flags & DF_HARD_REGS))
{
bitmap_operation (bb_info->rd_kill, bb_info->rd_kill,
call_killed_defs, BITMAP_IOR);
/* A non-const call should not have slipped through the net. If
it does, we need to create a new basic block. Ouch. The
same applies for a label. */
- if ((GET_CODE (insn) == CALL_INSN
+ if ((CALL_P (insn)
&& ! CONST_OR_PURE_CALL_P (insn))
- || GET_CODE (insn) == CODE_LABEL)
+ || LABEL_P (insn))
abort ();
uid = INSN_UID (insn);
if (prologue_epilogue_contains (insn) || sibcall_epilogue_contains (insn))
return;
- if (!flag_asynchronous_unwind_tables && GET_CODE (insn) == CALL_INSN)
+ if (!flag_asynchronous_unwind_tables && CALL_P (insn))
{
/* Extract the size of the args from the CALL rtx itself. */
insn = PATTERN (insn);
else if (!flag_asynchronous_unwind_tables && cfa.reg != STACK_POINTER_REGNUM)
return;
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
/* When we see a BARRIER, we know to reset args_size to 0. Usually
the compiler will have already emitted a stack adjustment, but
return;
}
- if (GET_CODE (insn) != INSN || clobbers_queued_reg_save (insn))
+ if (!NONJUMP_INSN_P (insn) || clobbers_queued_reg_save (insn))
flush_queued_reg_saves ();
if (! RTX_FRAME_RELATED_P (insn))
eliminated because of various optimizations. We still emit them
here so that it is possible to put breakpoints on them. */
if (insn
- && (GET_CODE (insn) == CODE_LABEL
- || ((GET_CODE (insn) == NOTE
+ && (LABEL_P (insn)
+ || ((NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL))))
{
/* When optimization is enabled (via -O) some parts of the compiler
last time. */
if (last_insn != NULL_RTX
&& last_insn == prev_insn
- && GET_CODE (prev_insn) == NOTE
+ && NOTE_P (prev_insn)
&& NOTE_LINE_NUMBER (prev_insn) == NOTE_INSN_VAR_LOCATION)
{
newloc->label = last_label;
while (insn)
{
insn = next_insn (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
while (insn)
{
insn = previous_insn (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
if (insn)
{
insn = NEXT_INSN (insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
if (insn)
{
insn = PREV_INSN (insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
}
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
rtx insn;
for (insn = get_last_insn ();
- insn && GET_CODE (insn) != CALL_INSN;
+ insn && !CALL_P (insn);
insn = PREV_INSN (insn))
;
int
active_insn_p (rtx insn)
{
- return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
- || (GET_CODE (insn) == INSN
+ return (CALL_P (insn) || JUMP_P (insn)
+ || (NONJUMP_INSN_P (insn)
&& (! reload_completed
|| (GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER))));
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
+ if (insn == 0 || LABEL_P (insn))
break;
}
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
+ if (insn == 0 || LABEL_P (insn))
break;
}
{
rtx user = next_nonnote_insn (insn);
- if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
+ if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
user = XVECEXP (PATTERN (user), 0, 0);
REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
return XEXP (note, 0);
insn = next_nonnote_insn (insn);
- if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
/* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
We may need to handle this specially. */
- if (after && GET_CODE (after) == BARRIER)
+ if (after && BARRIER_P (after))
{
has_barrier = 1;
after = NEXT_INSN (after);
/* Mark labels. */
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
mark_jump_label (PATTERN (insn), insn, 0);
njumps++;
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
- if (GET_CODE (trial) == CALL_INSN)
+ if (CALL_P (trial))
{
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
while (*p)
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))))
REG_NOTES (insn)
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
- if (GET_CODE (trial) == INSN)
+ if (NONJUMP_INSN_P (trial))
{
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
mark_label_nuses (PATTERN (insn));
insn = PREV_INSN (insn);
if (next)
{
PREV_INSN (next) = insn;
- if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
+ if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
}
else if (last_insn == after)
abort ();
}
- if (GET_CODE (after) != BARRIER
- && GET_CODE (insn) != BARRIER
+ if (!BARRIER_P (after)
+ && !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (after)))
{
set_block_for_insn (insn, bb);
either NOTE or LABEL. */
if (BB_END (bb) == after
/* Avoid clobbering of structure when creating new BB. */
- && GET_CODE (insn) != BARRIER
- && (GET_CODE (insn) != NOTE
+ && !BARRIER_P (insn)
+ && (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
BB_END (bb) = insn;
}
NEXT_INSN (after) = insn;
- if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
+ if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
{
rtx sequence = PATTERN (after);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
if (prev)
{
NEXT_INSN (prev) = insn;
- if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
+ if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
abort ();
}
- if (GET_CODE (before) != BARRIER
- && GET_CODE (insn) != BARRIER
+ if (!BARRIER_P (before)
+ && !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (before)))
{
set_block_for_insn (insn, bb);
either NOTE or LABEl. */
if (BB_HEAD (bb) == insn
/* Avoid clobbering of structure when creating new BB. */
- && GET_CODE (insn) != BARRIER
- && (GET_CODE (insn) != NOTE
+ && !BARRIER_P (insn)
+ && (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
abort ();
}
PREV_INSN (before) = insn;
- if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
+ if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
if (prev)
{
NEXT_INSN (prev) = next;
- if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
+ if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
if (next)
{
PREV_INSN (next) = prev;
- if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
+ if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
}
else if (last_insn == insn)
if (stack == 0)
abort ();
}
- if (GET_CODE (insn) != BARRIER
+ if (!BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (insn)))
{
if (INSN_P (insn))
{
/* Never ever delete the basic block note without deleting whole
basic block. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
abort ();
BB_HEAD (bb) = next;
}
void
add_function_usage_to (rtx call_insn, rtx call_fusage)
{
- if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
+ if (! call_insn || !CALL_P (call_insn))
abort ();
/* Put the register usage information on the CALL. If there is already
reorder_insns_nobb (from, to, after);
- if (GET_CODE (after) != BARRIER
+ if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
{
rtx x;
bb->flags |= BB_DIRTY;
- if (GET_CODE (from) != BARRIER
+ if (!BARRIER_P (from)
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (BB_END (bb2) == to)
return 0;
for (; insn; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) >= 0)
break;
next = NEXT_INSN (insn);
/* We're only interested in notes. */
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
continue;
switch (NOTE_LINE_NUMBER (insn))
break;
/* We're only interested in NOTEs. */
- if (GET_CODE (tmp) != NOTE)
+ if (!NOTE_P (tmp))
continue;
if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
rtx after_after;
basic_block bb;
- if (GET_CODE (after) != BARRIER
+ if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
{
bb->flags |= BB_DIRTY;
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
- if (GET_CODE (last) != BARRIER)
+ if (!BARRIER_P (last))
set_block_for_insn (last, bb);
- if (GET_CODE (last) != BARRIER)
+ if (!BARRIER_P (last))
set_block_for_insn (last, bb);
if (BB_END (bb) == after)
BB_END (bb) = last;
enum rtx_code
classify_insn (rtx x)
{
- if (GET_CODE (x) == CODE_LABEL)
+ if (LABEL_P (x))
return CODE_LABEL;
if (GET_CODE (x) == CALL)
return CALL_INSN;
for (insn = *pinsns; insn ; insn = next)
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
int kind = NOTE_LINE_NUMBER (insn);
if (kind == NOTE_INSN_EH_REGION_BEG
/* Calls can always potentially throw exceptions, unless
they have a REG_EH_REGION note with a value of 0 or less.
Which should be the only possible kind so far. */
- && (GET_CODE (insn) == CALL_INSN
+ && (CALL_P (insn)
/* If we wanted exceptions for non-call insns, then
any may_trap_p instruction could throw. */
|| (flag_non_call_exceptions
if (e->flags & EDGE_FALLTHRU)
force_nonfallthru (e);
last = emit_insn_before (seq, insn);
- if (GET_CODE (last) == BARRIER)
+ if (BARRIER_P (last))
last = PREV_INSN (last);
bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
update_bb_for_insn (bb);
end_sequence ();
barrier = emit_insn_before (seq, region->resume);
/* Avoid duplicate barrier. */
- if (GET_CODE (barrier) != BARRIER)
+ if (!BARRIER_P (barrier))
abort ();
delete_insn (barrier);
delete_insn (region->resume);
rtx note, before, p;
/* Reset value tracking at extended basic block boundaries. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
last_call_site = -2;
if (! INSN_P (insn))
/* Calls (and trapping insns) without notes are outside any
exception handling region in this function. Mark them as
no action. */
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))))
this_call_site = -1;
/* Don't separate a call from it's argument loads. */
before = insn;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
before = find_first_parameter_load (insn, NULL_RTX);
start_sequence ();
can_throw_internal instructions. */
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
- if (GET_CODE (fn_begin) == NOTE
+ if (NOTE_P (fn_begin)
&& (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
|| NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
break;
{
rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
for (; ; fn_begin = NEXT_INSN (fn_begin))
- if ((GET_CODE (fn_begin) == NOTE
+ if ((NOTE_P (fn_begin)
&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
|| fn_begin == last)
break;
else
{
insn = cfun->eh->sjlj_exit_after;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
insn = NEXT_INSN (insn);
emit_insn_after (seq, insn);
}
rtx handlers = NULL;
int region_number;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == RESX)
{
region_number = XINT (PATTERN (insn), 0);
if (! INSN_P (insn))
return false;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == RESX
&& XINT (PATTERN (insn), 0) > 0)
return can_throw_internal_1 (XINT (PATTERN (insn), 0));
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (! INSN_P (insn))
return false;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
assume it might throw. Given that the front end and middle
ends mark known NOTHROW functions, this isn't so wildly
inaccurate. */
- return (GET_CODE (insn) == CALL_INSN
+ return (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))));
}
{
TREE_NOTHROW (current_function_decl) = 0;
- if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
+ if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
return;
{
TREE_NOTHROW (current_function_decl) = 0;
- if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
+ if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
return;
rtx this_landing_pad;
insn = iter;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
- if (! (GET_CODE (insn) == CALL_INSN
+ if (! (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn)))))
continue;
{
rtx note;
- if (GET_CODE (insn) != INSN)
+ if (!NONJUMP_INSN_P (insn))
continue;
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
may_trap_p instruction may throw. */
&& GET_CODE (PATTERN (insn)) != CLOBBER
&& GET_CODE (PATTERN (insn)) != USE
- && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
+ && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
REG_NOTES (insn));
seq = NEXT_INSN (PREV_INSN (branch));
seq_uid = INSN_UID (seq);
- if (GET_CODE (branch) != JUMP_INSN)
+ if (!JUMP_P (branch))
/* This can happen for example on the PA; the objective is to know the
offset to address something in front of the start of the function.
Thus, we can treat it like a backward branch.
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
edge e;
- if (GET_CODE (label) != CODE_LABEL
+ if (!LABEL_P (label)
|| probably_never_executed_bb_p (bb))
continue;
max_log = LABEL_ALIGN (label);
is separated by the former loop start insn from the
NOTE_INSN_LOOP_BEG. */
}
- else if (GET_CODE (insn) == CODE_LABEL)
+ else if (LABEL_P (insn))
{
rtx next;
/* ADDR_VECs only take room if read-only data goes into the text
section. */
if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
- if (next && GET_CODE (next) == JUMP_INSN)
+ if (next && JUMP_P (next))
{
rtx nextbody = PATTERN (next);
if (GET_CODE (nextbody) == ADDR_VEC
max_log = 0;
max_skip = 0;
}
- else if (GET_CODE (insn) == BARRIER)
+ else if (BARRIER_P (insn))
{
rtx label;
for (label = insn; label && ! INSN_P (label);
label = NEXT_INSN (label))
- if (GET_CODE (label) == CODE_LABEL)
+ if (LABEL_P (label))
{
log = LABEL_ALIGN_AFTER_BARRIER (insn);
if (max_log < log)
{
int uid = INSN_UID (seq);
int log;
- log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
+ log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
uid_align[uid] = align_tab[0];
if (log)
{
int min_align;
addr_diff_vec_flags flags;
- if (GET_CODE (insn) != JUMP_INSN
+ if (!JUMP_P (insn)
|| GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
continue;
pat = PATTERN (insn);
insn_lengths[uid] = 0;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
int log = LABEL_TO_ALIGNMENT (insn);
if (log)
INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
- if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
- || GET_CODE (insn) == CODE_LABEL)
+ if (NOTE_P (insn) || BARRIER_P (insn)
+ || LABEL_P (insn))
continue;
if (INSN_DELETED_P (insn))
continue;
uid = INSN_UID (insn);
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
int log = LABEL_TO_ALIGNMENT (insn);
if (log > insn_current_align)
INSN_ADDRESSES (uid) = insn_current_address;
#ifdef CASE_VECTOR_SHORTEN_MODE
- if (optimize && GET_CODE (insn) == JUMP_INSN
+ if (optimize && JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
{
rtx body = PATTERN (insn);
if (! (varying_length[uid]))
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
continue;
}
- if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
{
rtx last = 0;
for (insn = first; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
if (last != 0
#ifdef USE_MAPPED_LOCATION
#ifdef HAVE_cc0
/* If CC tracking across branches is enabled, record the insn which
jumps to each branch only reached from one place. */
- if (optimize && GET_CODE (insn) == JUMP_INSN)
+ if (optimize && JUMP_P (insn))
{
rtx lab = JUMP_LABEL (insn);
if (lab && LABEL_NUSES (lab) == 1)
{
/* This can be triggered by bugs elsewhere in the compiler if
new insns are created after init_insn_lengths is called. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
insn_current_address = -1;
else
abort ();
for (temp = insn; temp; temp = NEXT_INSN (temp))
{
- if (GET_CODE (temp) == NOTE
+ if (NOTE_P (temp)
&& NOTE_LINE_NUMBER (temp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
return true;
- if (GET_CODE (temp) == NOTE
+ if (NOTE_P (temp)
&& NOTE_LINE_NUMBER (temp) == NOTE_INSN_BASIC_BLOCK)
{
bb_note_count++;
insn, and that branch is the only way to reach this label,
set the condition codes based on the branch and its
predecessor. */
- if (barrier && GET_CODE (barrier) == BARRIER
- && jump && GET_CODE (jump) == JUMP_INSN
+ if (barrier && BARRIER_P (barrier)
+ && jump && JUMP_P (jump)
&& (prev = prev_nonnote_insn (jump))
- && GET_CODE (prev) == INSN)
+ && NONJUMP_INSN_P (prev))
{
NOTICE_UPDATE_CC (PATTERN (prev), prev);
NOTICE_UPDATE_CC (PATTERN (jump), jump);
if (flag_reorder_blocks_and_partition)
{
rtx tmp_table, tmp_label;
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& tablejump_p (NEXT_INSN (insn), &tmp_label, &tmp_table))
{
/* Do nothing; Do NOT change the current section. */
app_on = 0;
}
if (NEXT_INSN (insn) != 0
- && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
+ && JUMP_P (NEXT_INSN (insn)))
{
rtx nextbody = PATTERN (NEXT_INSN (insn));
called function. Hence we don't preserve any CC-setting
actions in these insns and the CC must be marked as being
clobbered by the function. */
- if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
+ if (CALL_P (XVECEXP (body, 0, 0)))
{
CC_STATUS_INIT;
}
will cause an improper number of delay insns to be written. */
if (final_sequence == 0
&& prescan >= 0
- && GET_CODE (insn) == INSN && GET_CODE (body) == SET
+ && NONJUMP_INSN_P (insn) && GET_CODE (body) == SET
&& REG_P (SET_SRC (body))
&& REG_P (SET_DEST (body))
&& REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
do straightforwardly if the cc's were set up normally. */
if (cc_status.flags != 0
- && GET_CODE (insn) == JUMP_INSN
+ && JUMP_P (insn)
&& GET_CODE (body) == SET
&& SET_DEST (body) == pc_rtx
&& GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
{
rtx cond_rtx, then_rtx, else_rtx;
- if (GET_CODE (insn) != JUMP_INSN
+ if (!JUMP_P (insn)
&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
{
cond_rtx = XEXP (SET_SRC (set), 0);
current_output_insn = debug_insn = insn;
#if defined (DWARF2_UNWIND_INFO)
- if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
+ if (CALL_P (insn) && dwarf2out_do_frame ())
dwarf2out_frame_debug (insn);
#endif
prev != last_ignored_compare;
prev = PREV_INSN (prev))
{
- if (GET_CODE (prev) == NOTE)
+ if (NOTE_P (prev))
delete_insn (prev); /* Use delete_note. */
}
the unwind info. We've already done this for delay slots
and call instructions. */
#if defined (DWARF2_UNWIND_INFO)
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
#if !defined (HAVE_prologue)
&& !ACCUMULATE_OUTGOING_ARGS
#endif
if (GET_CODE (x) == LABEL_REF)
x = XEXP (x, 0);
- if (GET_CODE (x) == CODE_LABEL
- || (GET_CODE (x) == NOTE
+ if (LABEL_P (x)
+ || (NOTE_P (x)
&& NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
else
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& ! SIBLING_CALL_P (insn))
return 0;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
- && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
+ && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}
{
insn = XEXP (link, 0);
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& ! SIBLING_CALL_P (insn))
return 0;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
- && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
+ && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}
if (insn == NULL_RTX)
return NULL_RTX;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
insn = NEXT_INSN (insn);
if (!NOTE_INSN_BASIC_BLOCK_P (insn))
abort ();
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
- && GET_CODE (next) == JUMP_INSN
+ && JUMP_P (next)
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
{
real good way to fix up the reference to the deleted label
when the label is deleted, so we just allow it here. */
- if (inote && GET_CODE (inote) == CODE_LABEL)
+ if (inote && LABEL_P (inote))
{
rtx label = XEXP (inote, 0);
rtx next;
jump following it, but not the label itself. */
if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
&& (next = next_nonnote_insn (label)) != NULL
- && GET_CODE (next) == JUMP_INSN
+ && JUMP_P (next)
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
{
in a register clobbered by calls. Find all regs now live and
record this for them. */
- if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
+ if (CALL_P (insn) && (flags & PROP_REG_INFO))
EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
{ REG_N_CALLS_CROSSED (i)++; });
would have killed the values if they hadn't been deleted. */
mark_set_regs (pbi, PATTERN (insn), insn);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
regset live_at_end;
bool sibcall_p;
prev = PREV_INSN (insn);
#endif
- if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
+ if (! insn_is_dead && CALL_P (insn))
{
int i;
rtx note, cond;
/* If this block ends in a conditional branch, for each register
live from one side of the branch and not the other, record the
register as conditionally dead. */
- if (GET_CODE (BB_END (bb)) == JUMP_INSN
+ if (JUMP_P (BB_END (bb))
&& any_condjump_p (BB_END (bb)))
{
regset_head diff_head;
{
rtx insn, set;
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& (set = single_set (insn))
&& MEM_P (SET_DEST (set)))
{
/* If this is a call to `setjmp' et al, warn if any
non-volatile datum is live. */
if ((flags & PROP_REG_INFO)
- && GET_CODE (insn) == CALL_INSN
+ && CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
int i;
/* Find the call insn. */
- while (call != insn && GET_CODE (call) != CALL_INSN)
+ while (call != insn && !CALL_P (call))
call = NEXT_INSN (call);
/* If there is none, do nothing special,
if (y && (BLOCK_NUM (y) == blocknum)
&& (regno_first >= FIRST_PSEUDO_REGISTER
|| (asm_noperands (PATTERN (y)) < 0
- && ! ((GET_CODE (insn) == CALL_INSN
- || GET_CODE (y) == CALL_INSN)
+ && ! ((CALL_P (insn)
+ || CALL_P (y))
&& global_regs[regno_first]))))
LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
}
use of INCR_REG. If a use of INCR_REG was just placed in
the insn before INSN, make that the next use.
Otherwise, invalidate it. */
- if (GET_CODE (PREV_INSN (insn)) == INSN
+ if (NONJUMP_INSN_P (PREV_INSN (insn))
&& GET_CODE (PATTERN (PREV_INSN (insn))) == SET
&& SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
pbi->reg_next_use[regno] = PREV_INSN (insn);
/* If there are any calls between INSN and INCR, show
that REGNO now crosses them. */
for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
- if (GET_CODE (temp) == CALL_INSN)
+ if (CALL_P (temp))
REG_N_CALLS_CROSSED (regno)++;
/* Invalidate alias info for Q since we just changed its value. */
int regno;
int size = GET_MODE_SIZE (GET_MODE (x));
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return;
/* Here we detect use of an index register which might be good for
because if the incremented register is spilled and must be reloaded
there would be no way to store the incremented value back in memory. */
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return 0;
use = 0;
for (insn = insns; insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
{
as opposed to parm setup. */
emit_note (NOTE_INSN_FUNCTION_BEG);
- if (GET_CODE (get_last_insn ()) != NOTE)
+ if (!NOTE_P (get_last_insn ()))
emit_note (NOTE_INSN_DELETED);
parm_birth_insn = get_last_insn ();
rtx insn, seq;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
start_sequence ();
probe_stack_range (STACK_CHECK_PROTECT,
{
int i, j;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int count = 0;
/* Verify that there are no active instructions in the last block. */
label = BB_END (last);
- while (label && GET_CODE (label) != CODE_LABEL)
+ while (label && !LABEL_P (label))
{
if (active_insn_p (label))
break;
label = PREV_INSN (label);
}
- if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
+ if (BB_HEAD (last) == label && LABEL_P (label))
{
rtx epilogue_line_note = NULL_RTX;
for (seq = get_last_insn ();
seq && ! active_insn_p (seq);
seq = PREV_INSN (seq))
- if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
+ if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
{
epilogue_line_note = seq;
break;
continue;
jump = BB_END (bb);
- if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
+ if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
continue;
/* If we have an unconditional jump, we can replace that
rtx i;
rtx newinsn;
- if (GET_CODE (insn) != CALL_INSN
+ if (!CALL_P (insn)
|| ! SIBLING_CALL_P (insn))
continue;
for (insn = prologue_end; insn; insn = prev)
{
prev = PREV_INSN (insn);
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
/* Note that we cannot reorder the first insn in the
chain, since rest_of_compilation relies on that
for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
insn != prologue_end && insn;
insn = PREV_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
break;
/* If we didn't find one, make a copy of the first line number
for (insn = next_active_insn (prologue_end);
insn;
insn = PREV_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
emit_note_copy_after (insn, prologue_end);
break;
for (insn = epilogue_end; insn; insn = next)
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& (NOTE_LINE_NUMBER (insn) > 0
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
reorg has run. */
for (insn = f; insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
note = insn;
if (note == 0)
{
for (note = last; (note = NEXT_INSN (note));)
- if (GET_CODE (note) == NOTE
+ if (NOTE_P (note)
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
break;
}
/* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
- if (GET_CODE (last) == CODE_LABEL)
+ if (LABEL_P (last))
last = NEXT_INSN (last);
reorder_insns (note, note, last);
}
reorg has run. */
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
note = insn;
if (note == 0)
{
for (note = insn; (note = PREV_INSN (note));)
- if (GET_CODE (note) == NOTE
+ if (NOTE_P (note)
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
break;
}
}
/* Handle normal insns next. */
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& try_replace_reg (from, to, insn))
return 1;
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
if (setcc)
break;
instructions in presence of non-call exceptions. */
if (JUMP_P (insn)
- || (GET_CODE (insn) == INSN
+ || (NONJUMP_INSN_P (insn)
&& (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
{
#ifdef HAVE_cc0
/* It should always be the case that we can put these instructions
anywhere in the basic block with performing PRE optimizations.
Check this. */
- if (GET_CODE (insn) == INSN && pre
+ if (NONJUMP_INSN_P (insn) && pre
&& !TEST_BIT (antloc[bb->index], expr->bitmap_index)
&& !TEST_BIT (transp[bb->index], expr->bitmap_index))
abort ();
insn = NEXT_INSN (insn))
{
/* Is it a load - of the form (set (reg) (mem))? */
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& REG_P (SET_DEST (PATTERN (insn)))
&& MEM_P (SET_SRC (PATTERN (insn))))
|| (GET_CODE (next) == PARALLEL
&& GET_CODE (XVECEXP (next, 0, 0)) == CALL))
printf (" emit_call_insn (");
- else if (GET_CODE (next) == CODE_LABEL)
+ else if (LABEL_P (next))
printf (" emit_label (");
else if (GET_CODE (next) == MATCH_OPERAND
|| GET_CODE (next) == MATCH_DUP
|| (GET_CODE (next) == PARALLEL
&& GET_CODE (XVECEXP (next, 0, 0)) == CALL))
printf (" emit_call_insn (");
- else if (GET_CODE (next) == CODE_LABEL)
+ else if (LABEL_P (next))
printf (" emit_label (");
else if (GET_CODE (next) == MATCH_OPERAND
|| GET_CODE (next) == MATCH_OPERATOR
printf (" do { insn = NEXT_INSN (insn);\n");
printf (" if (insn == 0) goto L%d; }\n",
insn_code_number);
- printf (" while (GET_CODE (insn) == NOTE\n");
- printf ("\t || (GET_CODE (insn) == INSN\n");
+ printf (" while (NOTE_P (insn)\n");
+ printf ("\t || (NONJUMP_INSN_P (insn)\n");
printf ("\t && (GET_CODE (PATTERN (insn)) == USE\n");
printf ("\t\t || GET_CODE (PATTERN (insn)) == CLOBBER)));\n");
- printf (" if (GET_CODE (insn) == CODE_LABEL\n\
- || GET_CODE (insn) == BARRIER)\n goto L%d;\n",
+ printf (" if (LABEL_P (insn)\n\
+ || BARRIER_P (insn))\n goto L%d;\n",
insn_code_number);
}
/* Early out: no peepholes for insns followed by barriers. */
printf (" if (NEXT_INSN (ins1)\n");
- printf (" && GET_CODE (NEXT_INSN (ins1)) == BARRIER)\n");
+ printf (" && BARRIER_P (NEXT_INSN (ins1)))\n");
printf (" return 0;\n\n");
/* Read the machine description. */
});
}
- if (GET_CODE (first) != NOTE && GET_CODE (first) != BARRIER)
+ if (!NOTE_P (first) && !BARRIER_P (first))
{
c = new_insn_chain ();
c->prev = prev;
&& ! ((GET_CODE (PATTERN (first)) == ADDR_VEC
|| GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
&& prev_real_insn (first) != 0
- && GET_CODE (prev_real_insn (first)) == JUMP_INSN))
+ && JUMP_P (prev_real_insn (first))))
abort ();
break;
}
fprintf (fp, "node: {\n title: \"%s.%d\"\n color: %s\n \
label: \"%s %d\n",
current_function_name (), XINT (tmp_rtx, 0),
- GET_CODE (tmp_rtx) == NOTE ? "lightgrey"
- : GET_CODE (tmp_rtx) == INSN ? "green"
- : GET_CODE (tmp_rtx) == JUMP_INSN ? "darkgreen"
- : GET_CODE (tmp_rtx) == CALL_INSN ? "darkgreen"
- : GET_CODE (tmp_rtx) == CODE_LABEL ? "\
+ NOTE_P (tmp_rtx) ? "lightgrey"
+ : NONJUMP_INSN_P (tmp_rtx) ? "green"
+ : JUMP_P (tmp_rtx) ? "darkgreen"
+ : CALL_P (tmp_rtx) ? "darkgreen"
+ : LABEL_P (tmp_rtx) ? "\
darkgrey\n shape: ellipse" : "white",
GET_RTX_NAME (GET_CODE (tmp_rtx)), XINT (tmp_rtx, 0));
break;
}
/* Print the RTL. */
- if (GET_CODE (tmp_rtx) == NOTE)
+ if (NOTE_P (tmp_rtx))
{
const char *name = "";
if (NOTE_LINE_NUMBER (tmp_rtx) < 0)
if (start[INSN_UID (tmp_rtx)] < 0 && end[INSN_UID (tmp_rtx)] < 0)
{
- if (GET_CODE (tmp_rtx) == BARRIER)
+ if (BARRIER_P (tmp_rtx))
continue;
- if (GET_CODE (tmp_rtx) == NOTE
+ if (NOTE_P (tmp_rtx)
&& (1 || in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB))
continue;
}
{
/* Don't print edges to barriers. */
if (next_insn == 0
- || GET_CODE (next_insn) != BARRIER)
+ || !BARRIER_P (next_insn))
draw_edge (fp, XINT (tmp_rtx, 0),
next_insn ? INSN_UID (next_insn) : 999999, 0, 0);
else
do
next_insn = NEXT_INSN (next_insn);
while (next_insn
- && (GET_CODE (next_insn) == NOTE
- || GET_CODE (next_insn) == BARRIER));
+ && (NOTE_P (next_insn)
+ || BARRIER_P (next_insn)));
draw_edge (fp, XINT (tmp_rtx, 0),
next_insn ? INSN_UID (next_insn) : 999999, 0, 3);
{
rtx prev = PREV_INSN (insn);
- while (insn != tail && GET_CODE (insn) == NOTE)
+ while (insn != tail && NOTE_P (insn))
{
rtx next = NEXT_INSN (insn);
/* Delete the note from its current position. */
{
rtx prev = PREV_INSN (insn);
- while (insn != tail && GET_CODE (insn) == NOTE)
+ while (insn != tail && NOTE_P (insn))
{
rtx next = NEXT_INSN (insn);
basic block, or notes at the ends of basic blocks. */
while (head != tail)
{
- if (GET_CODE (head) == NOTE)
+ if (NOTE_P (head))
head = NEXT_INSN (head);
- else if (GET_CODE (tail) == NOTE)
+ else if (NOTE_P (tail))
tail = PREV_INSN (tail);
- else if (GET_CODE (head) == CODE_LABEL)
+ else if (LABEL_P (head))
head = NEXT_INSN (head);
else
break;
{
while (head != NEXT_INSN (tail))
{
- if (GET_CODE (head) != NOTE && GET_CODE (head) != CODE_LABEL)
+ if (!NOTE_P (head) && !LABEL_P (head))
return 0;
head = NEXT_INSN (head);
}
/* Farm out notes, and maybe save them in NOTE_LIST.
This is needed to keep the debugger from
getting completely deranged. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
prev = insn;
insn = unlink_line_notes (insn, next_tail);
next_tail = NEXT_INSN (tail);
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
line = insn;
else
LINE_NOTE (insn) = line;
of this block. If it happens to be the same, then we don't want to
emit another line number note here. */
for (line = head; line; line = PREV_INSN (line))
- if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
+ if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
break;
/* Walk the insns keeping track of the current line-number and inserting
the line-number notes as needed. */
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
line = insn;
/* This used to emit line number notes before every non-deleted note.
However, this confuses a debugger, because line notes not separated
by real instructions all end up at the same address. I can find no
use for line number notes before other notes, so none are emitted. */
- else if (GET_CODE (insn) != NOTE
+ else if (!NOTE_P (insn)
&& INSN_UID (insn) < old_max_uid
&& (note = LINE_NOTE (insn)) != 0
&& note != line
are already present. The remainder tend to occur at basic
block boundaries. */
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
/* If there are no active insns following, INSN is redundant. */
if (active_insn == 0)
line = insn;
active_insn = 0;
}
- else if (!((GET_CODE (insn) == NOTE
+ else if (!((NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
- || (GET_CODE (insn) == INSN
+ || (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER))))
active_insn++;
/* Farm out notes, and maybe save them in NOTE_LIST.
This is needed to keep the debugger from
getting completely deranged. */
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
prev = insn;
rtx dep_link = 0;
int dep_cost;
- if (GET_CODE (prev_insn) != NOTE)
+ if (!NOTE_P (prev_insn))
{
dep_link = find_insn_list (insn, INSN_DEPEND (prev_insn));
if (dep_link)
sched_max_insns_priority = 0;
for (insn = tail; insn != prev_head; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
continue;
n_insn++;
schedule differently depending on whether or not there are
line-number notes, i.e., depending on whether or not we're
generating debugging information. */
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
++luid;
if (insn == BB_END (b))
FOR_EACH_BB (b)
{
for (line = BB_HEAD (b); line; line = PREV_INSN (line))
- if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
+ if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
{
line_note_head[b->index] = line;
break;
{
if (INSN_P (line))
break;
- if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
+ if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
line_note_head[b->index] = line;
}
}
insn = BB_END (EXIT_BLOCK_PTR->prev_bb);
if (NEXT_INSN (insn) == 0
- || (GET_CODE (insn) != NOTE
- && GET_CODE (insn) != CODE_LABEL
+ || (!NOTE_P (insn)
+ && !LABEL_P (insn)
/* Don't emit a NOTE if it would end up before a BARRIER. */
- && GET_CODE (NEXT_INSN (insn)) != BARRIER))
+ && !BARRIER_P (NEXT_INSN (insn))))
{
emit_note_after (NOTE_INSN_DELETED, BB_END (EXIT_BLOCK_PTR->prev_bb));
/* Make insn to appear outside BB. */
while (1)
{
- if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
+ if (CALL_P (insn) || NONJUMP_INSN_P (insn))
count++;
if (insn == BB_END (bb))
{
rtx insn = BB_HEAD (bb);
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
if (insn == BB_END (bb))
return NULL_RTX;
insn = NEXT_INSN (insn);
}
- while (GET_CODE (insn) == NOTE)
+ while (NOTE_P (insn))
{
if (insn == BB_END (bb))
return NULL_RTX;
insn = NEXT_INSN (insn);
}
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return NULL_RTX;
return insn;
rtx insn = BB_END (bb);
rtx head = BB_HEAD (bb);
- while (GET_CODE (insn) == NOTE
- || GET_CODE (insn) == JUMP_INSN
+ while (NOTE_P (insn)
+ || JUMP_P (insn)
|| (skip_use_p
- && GET_CODE (insn) == INSN
+ && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == USE))
{
if (insn == head)
insn = PREV_INSN (insn);
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
return NULL_RTX;
return insn;
for (insn = start; ; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
goto insn_done;
- if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
+ if (!NONJUMP_INSN_P (insn) && !CALL_P (insn))
abort ();
/* Remove USE insns that get in the way. */
validate_change (insn, &PATTERN (insn), pattern, 1);
- if (GET_CODE (insn) == CALL_INSN && prob_val)
+ if (CALL_P (insn) && prob_val)
validate_change (insn, ®_NOTES (insn),
alloc_EXPR_LIST (REG_BR_PROB, prob_val,
REG_NOTES (insn)), 1);
As an exercise for the reader, build a general mechanism that
allows proper placement of required clobbers. */
for (insn = seq; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
|| recog_memoized (insn) == -1)
return NULL_RTX;
COND_EARLIEST to JUMP. Make sure the relevant data is still
intact. */
if (! insn_b
- || GET_CODE (insn_b) != INSN
+ || !NONJUMP_INSN_P (insn_b)
|| (set_b = single_set (insn_b)) == NULL_RTX
|| ! rtx_equal_p (x, SET_DEST (set_b))
|| reg_overlap_mentioned_p (x, SET_SRC (set_b))
{
if (find_reg_note (last, REG_NORETURN, NULL))
;
- else if (GET_CODE (last) == INSN
+ else if (NONJUMP_INSN_P (last)
&& GET_CODE (PATTERN (last)) == TRAP_IF
&& TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
;
/* There should still be something at the end of the THEN or ELSE
blocks taking us to our final destination. */
- else if (GET_CODE (last) == JUMP_INSN)
+ else if (JUMP_P (last))
;
else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
- && GET_CODE (last) == CALL_INSN
+ && CALL_P (last)
&& SIBLING_CALL_P (last))
;
else if ((combo_bb->succ->flags & EDGE_EH)
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return -1;
if (INSN_P (insn)
- && GET_CODE (insn) != JUMP_INSN
+ && !JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
n_insns++;
rtx last_insn = BB_END (then_bb);
while (last_insn
- && GET_CODE (last_insn) == NOTE
+ && NOTE_P (last_insn)
&& last_insn != BB_HEAD (then_bb))
last_insn = PREV_INSN (last_insn);
if (last_insn
- && GET_CODE (last_insn) == JUMP_INSN
+ && JUMP_P (last_insn)
&& ! simplejump_p (last_insn))
return FALSE;
head = BB_HEAD (merge_bb);
end = BB_END (merge_bb);
- if (GET_CODE (head) == CODE_LABEL)
+ if (LABEL_P (head))
head = NEXT_INSN (head);
- if (GET_CODE (head) == NOTE)
+ if (NOTE_P (head))
{
if (head == end)
{
head = NEXT_INSN (head);
}
- if (GET_CODE (end) == JUMP_INSN)
+ if (JUMP_P (end))
{
if (head == end)
{
/* Check for no calls or trapping operations. */
for (insn = head; ; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return FALSE;
if (INSN_P (insn))
{
/* Enforce consistency between the addresses in the regular insn flow
and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
- if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
+ if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
{
subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
apply_change_group ();
count doesn't drop to zero. */
for (insn = forced_labels; insn; insn = XEXP (insn, 1))
- if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
+ if (LABEL_P (XEXP (insn, 0)))
LABEL_NUSES (XEXP (insn, 0))++;
timevar_pop (TV_REBUILD_JUMP);
}
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
{
prev = prev_nonnote_insn (insn);
- if (GET_CODE (prev) == BARRIER)
+ if (BARRIER_P (prev))
delete_barrier (insn);
else if (prev != PREV_INSN (insn))
reorder_insns (insn, insn, prev);
even if it became empty. */
for (insn = f; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
/* Any previous line note was for the prologue; gdb wants a new
rtx insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
- else if (GET_CODE (insn) == JUMP_INSN)
+ else if (JUMP_P (insn))
JUMP_LABEL (insn) = 0;
- else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx note, next;
if (INSN_P (insn))
{
mark_jump_label (PATTERN (insn), insn, 0);
- if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
+ if (! INSN_DELETED_P (insn) && JUMP_P (insn))
{
/* When we know the LABEL_REF contained in a REG used in
an indirect jump, we'll have a REG_LABEL note so that
for (insn = start; insn != past_end; insn = next)
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
or make a new one if there is none. */
label = prev_nonnote_insn (insn);
- if (label == 0 || GET_CODE (label) != CODE_LABEL)
+ if (label == 0 || !LABEL_P (label))
{
rtx prev = PREV_INSN (insn);
or make a new one if there is none. */
label = next_nonnote_insn (insn);
- if (label == 0 || GET_CODE (label) != CODE_LABEL)
+ if (label == 0 || !LABEL_P (label))
{
label = gen_label_rtx ();
emit_label_after (label, insn);
return UNKNOWN;
for (prev = prev_nonnote_insn (insn);
- prev != 0 && GET_CODE (prev) != CODE_LABEL;
+ prev != 0 && !LABEL_P (prev);
prev = prev_nonnote_insn (prev))
{
rtx set = set_of (arg0, prev);
int
simplejump_p (rtx insn)
{
- return (GET_CODE (insn) == JUMP_INSN
+ return (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_DEST (PATTERN (insn))) == PC
&& GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
pc_set (rtx insn)
{
rtx pat;
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
return NULL_RTX;
pat = PATTERN (insn);
int
returnjump_p (rtx insn)
{
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
return 0;
return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
}
{
rtx set;
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
return 0;
set = single_set (insn);
for (depth = 0;
(depth < 10
&& (insn = next_active_insn (value)) != 0
- && GET_CODE (insn) == JUMP_INSN
+ && JUMP_P (insn)
&& ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
&& onlyjump_p (insn))
|| GET_CODE (PATTERN (insn)) == RETURN)
&& (next = NEXT_INSN (insn))
- && GET_CODE (next) == BARRIER);
+ && BARRIER_P (next));
depth++)
{
/* Don't chain through the insn that jumps into a loop
rtx tem;
if (!reload_completed)
for (tem = value; tem != insn; tem = NEXT_INSN (tem))
- if (GET_CODE (tem) == NOTE
+ if (NOTE_P (tem)
&& (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
/* ??? Optional. Disables some optimizations, but makes
gcov output more accurate with -O. */
/* Ignore remaining references to unreachable labels that
have been deleted. */
- if (GET_CODE (label) == NOTE
+ if (NOTE_P (label)
&& NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
break;
- if (GET_CODE (label) != CODE_LABEL)
+ if (!LABEL_P (label))
abort ();
/* Ignore references to labels of containing functions. */
if (insn)
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
JUMP_LABEL (insn) = label;
else
{
void
delete_barrier (rtx insn)
{
- if (GET_CODE (insn) != BARRIER)
+ if (!BARRIER_P (insn))
abort ();
delete_insn (insn);
rtx reg = XEXP (note, 0);
for (our_prev = prev_nonnote_insn (insn);
- our_prev && (GET_CODE (our_prev) == INSN
- || GET_CODE (our_prev) == CALL_INSN);
+ our_prev && (NONJUMP_INSN_P (our_prev)
+ || CALL_P (our_prev));
our_prev = prev_nonnote_insn (our_prev))
{
rtx pat = PATTERN (our_prev);
/* If we reach a CALL which is not calling a const function
or the callee pops the arguments, then give up. */
- if (GET_CODE (our_prev) == CALL_INSN
+ if (CALL_P (our_prev)
&& (! CONST_OR_PURE_CALL_P (our_prev)
|| GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
break;
break;
if (GET_CODE (pat) == USE
- && GET_CODE (XEXP (pat, 0)) == INSN)
+ && NONJUMP_INSN_P (XEXP (pat, 0)))
/* reorg creates USEs that look like this. We leave them
alone because reorg needs them for its own purposes. */
break;
if (reg_set_p (reg, pat))
{
- if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
+ if (side_effects_p (pat) && !CALL_P (our_prev))
break;
if (GET_CODE (pat) == PARALLEL)
will use them. So if the previous insn
exists to set the CC's, delete it
(unless it performs auto-increments, etc.). */
- if (prev && GET_CODE (prev) == INSN
+ if (prev && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev)))
{
if (sets_cc0_p (PATTERN (prev)) > 0
rtx
delete_related_insns (rtx insn)
{
- int was_code_label = (GET_CODE (insn) == CODE_LABEL);
+ int was_code_label = (LABEL_P (insn));
rtx note;
rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
/* If instruction is followed by a barrier,
delete the barrier too. */
- if (next != 0 && GET_CODE (next) == BARRIER)
+ if (next != 0 && BARRIER_P (next))
delete_insn (next);
/* If deleting a jump, decrement the count of the label,
and delete the label if it is now unused. */
- if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
+ if (JUMP_P (insn) && JUMP_LABEL (insn))
{
rtx lab = JUMP_LABEL (insn), lab_next;
/* Likewise if we're deleting a dispatch table. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
{
}
/* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL
/* This could also be a NOTE_INSN_DELETED_LABEL note. */
- && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
+ && LABEL_P (XEXP (note, 0)))
if (LABEL_NUSES (XEXP (note, 0)) == 0)
delete_related_insns (XEXP (note, 0));
- while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
+ while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
prev = PREV_INSN (prev);
/* If INSN was a label and a dispatch table follows it,
if (was_code_label
&& NEXT_INSN (insn) != 0
- && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && JUMP_P (NEXT_INSN (insn))
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
next = delete_related_insns (NEXT_INSN (insn));
/* If INSN was a label, delete insns following it if now unreachable. */
- if (was_code_label && prev && GET_CODE (prev) == BARRIER)
+ if (was_code_label && prev && BARRIER_P (prev))
{
enum rtx_code code;
while (next)
rtx next = NEXT_INSN (insn);
rtx prev = PREV_INSN (insn);
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
{
INSN_DELETED_P (insn) = 1;
function, move the function end note so that -Wreturn-type works. */
if (olabel && nlabel
&& NEXT_INSN (olabel)
- && GET_CODE (NEXT_INSN (olabel)) == NOTE
+ && NOTE_P (NEXT_INSN (olabel))
&& NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
if (eg->flags & EDGE_ABNORMAL)
{
emited = true;
- if (GET_CODE (BB_END (src_bb)) == JUMP_INSN)
+ if (JUMP_P (BB_END (src_bb)))
emit_insn_before (mode_set, BB_END (src_bb));
/* It doesn't make sense to switch to normal mode
after a CALL_INSN, so we're going to abort if we
the call (it wouldn't make sense, anyway). In
the case of EH edges, EH entry points also start
in normal mode, so a similar reasoning applies. */
- else if (GET_CODE (BB_END (src_bb)) == INSN)
+ else if (NONJUMP_INSN_P (BB_END (src_bb)))
emit_insn_after (mode_set, BB_END (src_bb));
else
abort ();
continue;
emited = true;
- if (GET_CODE (ptr->insn_ptr) == NOTE
+ if (NOTE_P (ptr->insn_ptr)
&& (NOTE_LINE_NUMBER (ptr->insn_ptr)
== NOTE_INSN_BASIC_BLOCK))
emit_insn_after (mode_set, ptr->insn_ptr);
if (find_reg_note (insn, REG_DEAD, reg))
return 1;
- if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
+ if (CALL_P (insn) && ! RTX_UNCHANGING_P (memref)
&& ! CONST_OR_PURE_CALL_P (insn))
return 0;
if (REG_N_REFS (regno) == 2
&& (rtx_equal_p (XEXP (note, 0), src)
|| ! equiv_init_varies_p (src))
- && GET_CODE (insn) == INSN
+ && NONJUMP_INSN_P (insn)
&& equiv_init_movable_p (PATTERN (insn), regno))
reg_equiv[regno].replace = 1;
}
insn = BB_END (BASIC_BLOCK (b));
while (1)
{
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
if (++insn_count > max_uid)
abort ();
if (insn == BB_HEAD (BASIC_BLOCK (b)))
insn = BB_HEAD (BASIC_BLOCK (b));
while (1)
{
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
insn_number++;
if (INSN_P (insn))
REG_P (r0))
&& (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
&& XEXP (link, 0) != 0
- && GET_CODE (XEXP (link, 0)) == INSN
+ && NONJUMP_INSN_P (XEXP (link, 0))
&& (set = single_set (XEXP (link, 0))) != 0
&& SET_DEST (set) == r0 && SET_SRC (set) == r0
&& (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
block, so clear any register number that combined within it. */
if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
- && GET_CODE (XEXP (note, 0)) == INSN
+ && NONJUMP_INSN_P (XEXP (note, 0))
&& GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
no_conflict_combined_regno = -1;
}
{
/* A called function may clobber any special registers required for
low-overhead looping. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
if (dump_file)
fprintf (dump_file, "Doloop: Function call in loop.\n");
/* Some targets (eg, PPC) use the count register for branch on table
instructions. ??? This should be a target specific check. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_VEC))
{
{
while (NEXT_INSN (doloop_pat) != NULL_RTX)
doloop_pat = NEXT_INSN (doloop_pat);
- if (GET_CODE (doloop_pat) == JUMP_INSN)
+ if (JUMP_P (doloop_pat))
doloop_pat = PATTERN (doloop_pat);
else
doloop_pat = NULL_RTX;
{
FOR_BB_INSNS (body[i], insn)
{
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& !CONST_OR_PURE_CALL_P (insn))
{
has_call = true;
find_invariants_insn (insn, always_reached, always_executed, df);
if (always_reached
- && GET_CODE (insn) == CALL_INSN
+ && CALL_P (insn)
&& !CONST_OR_PURE_CALL_P (insn))
always_reached = false;
}
ret = true;
note_stores (PATTERN (insn), mark_altered, altered);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
int i;
continue;
/* Don't assign luids to line-number NOTEs, so that the distance in
luids between two insns is not affected by -g. */
- if (GET_CODE (insn) != NOTE
+ if (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) <= 0)
uid_luid[INSN_UID (insn)] = ++i;
else
max_loop_num = 0;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
max_loop_num++;
}
for (p = NEXT_INSN (loop_start);
p != loop_end
- && GET_CODE (p) != CODE_LABEL && ! INSN_P (p)
- && (GET_CODE (p) != NOTE
+ && !LABEL_P (p) && ! INSN_P (p)
+ && (!NOTE_P (p)
|| (NOTE_LINE_NUMBER (p) != NOTE_INSN_LOOP_BEG
&& NOTE_LINE_NUMBER (p) != NOTE_INSN_LOOP_END));
p = NEXT_INSN (p))
Start scan from there.
But record in LOOP->TOP the place where the end-test jumps
back to so we can scan that after the end of the loop. */
- if (GET_CODE (p) == JUMP_INSN
+ if (JUMP_P (p)
/* Loop entry must be unconditional jump (and not a RETURN) */
&& any_uncondjump_p (p)
&& JUMP_LABEL (p) != 0
test above. */
if (INSN_UID (loop->scan_start) >= max_uid_for_loop
- || GET_CODE (loop->scan_start) != CODE_LABEL)
+ || !LABEL_P (loop->scan_start))
{
if (loop_dump_stream)
fprintf (loop_dump_stream, "\nLoop from %d to %d is phony.\n\n",
{
if (in_libcall && INSN_P (p) && find_reg_note (p, REG_RETVAL, NULL_RTX))
in_libcall--;
- if (GET_CODE (p) == INSN)
+ if (NONJUMP_INSN_P (p))
{
temp = find_reg_note (p, REG_LIBCALL, NULL_RTX);
if (temp)
Also, if the value loaded into the register
depends on the same register, this cannot be done. */
else if (SET_SRC (set) == const0_rtx
- && GET_CODE (NEXT_INSN (p)) == INSN
+ && NONJUMP_INSN_P (NEXT_INSN (p))
&& (set1 = single_set (NEXT_INSN (p)))
&& GET_CODE (set1) == SET
&& (GET_CODE (SET_DEST (set1)) == STRICT_LOW_PART)
/* Past a call insn, we get to insns which might not be executed
because the call might exit. This matters for insns that trap.
Constant and pure call insns always return, so they don't count. */
- else if (GET_CODE (p) == CALL_INSN && ! CONST_OR_PURE_CALL_P (p))
+ else if (CALL_P (p) && ! CONST_OR_PURE_CALL_P (p))
call_passed = 1;
/* Past a label or a jump, we get to insns for which we
can't count on whether or how many times they will be
only move out sets of trivial variables
(those not used after the loop). */
/* Similar code appears twice in strength_reduce. */
- else if ((GET_CODE (p) == CODE_LABEL || GET_CODE (p) == JUMP_INSN)
+ else if ((LABEL_P (p) || JUMP_P (p))
/* If we enter the loop in the middle, and scan around to the
beginning, don't set maybe_never for that. This must be an
unconditional jump, otherwise the code at the top of the
loop might never be executed. Unconditional jumps are
followed by a barrier then the loop_end. */
- && ! (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == loop->top
+ && ! (JUMP_P (p) && JUMP_LABEL (p) == loop->top
&& NEXT_INSN (NEXT_INSN (p)) == loop_end
&& any_uncondjump_p (p)))
maybe_never = 1;
- else if (GET_CODE (p) == NOTE)
+ else if (NOTE_P (p))
{
/* At the virtual top of a converted loop, insns are again known to
be executed: logically, the loop begins here even though the exit
loop_regs_scan (loop, 0);
for (update_start = loop_start;
PREV_INSN (update_start)
- && GET_CODE (PREV_INSN (update_start)) != CODE_LABEL;
+ && !LABEL_P (PREV_INSN (update_start));
update_start = PREV_INSN (update_start))
;
update_end = NEXT_INSN (loop_end);
for (update_start = loop_start;
PREV_INSN (update_start)
- && GET_CODE (PREV_INSN (update_start)) != CODE_LABEL;
+ && !LABEL_P (PREV_INSN (update_start));
update_start = PREV_INSN (update_start))
;
update_end = NEXT_INSN (loop_end);
if (flag_strength_reduce)
{
- if (update_end && GET_CODE (update_end) == CODE_LABEL)
+ if (update_end && LABEL_P (update_end))
/* Ensure our label doesn't go away. */
LABEL_NUSES (update_end)++;
reg_scan_update (update_start, update_end, loop_max_reg);
loop_max_reg = max_reg_num ();
- if (update_end && GET_CODE (update_end) == CODE_LABEL
+ if (update_end && LABEL_P (update_end)
&& --LABEL_NUSES (update_end) == 0)
delete_related_insns (update_end);
}
while (p != insn)
{
- if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
- || GET_CODE (p) == CALL_INSN)
+ if (INSN_P (p))
record_excess_regs (PATTERN (p), equiv, &output);
p = NEXT_INSN (p);
}
for (insn = XEXP (find_reg_note (last, REG_RETVAL, NULL_RTX), 0);
insn != last; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
benefit += 10; /* Assume at least this many insns in a library
routine. */
- else if (GET_CODE (insn) == INSN
+ else if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
benefit++;
/* If first insn of libcall sequence, skip to end. */
/* Do this at start of loop, since INSN is guaranteed to
be an insn here. */
- if (GET_CODE (insn) != NOTE
+ if (!NOTE_P (insn)
&& (temp = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
insn = XEXP (temp, 0);
do
insn = NEXT_INSN (insn);
- while (GET_CODE (insn) == NOTE);
+ while (NOTE_P (insn));
}
return insn;
{
/* If this is the first insn of a library call sequence,
something is very wrong. */
- if (GET_CODE (p) != NOTE
+ if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_LIBCALL, NULL_RTX)))
abort ();
/* If this is the last insn of a libcall sequence, then
delete every insn in the sequence except the last.
The last insn is handled in the normal manner. */
- if (GET_CODE (p) != NOTE
+ if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_RETVAL, NULL_RTX)))
{
temp = XEXP (temp, 0);
pointers, but when we skip over a NOTE we must fix
it up. Otherwise that code walks into the non-deleted
insn stream. */
- while (p && GET_CODE (p) == NOTE)
+ while (p && NOTE_P (p))
p = NEXT_INSN (temp) = NEXT_INSN (p);
if (m->insert_temp)
/* If first insn of libcall sequence, skip to end. */
/* Do this at start of loop, since p is guaranteed to
be an insn here. */
- if (GET_CODE (p) != NOTE
+ if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_LIBCALL, NULL_RTX)))
p = XEXP (temp, 0);
/* If last insn of libcall sequence, move all
insns except the last before the loop. The last
insn is handled in the normal manner. */
- if (GET_CODE (p) != NOTE
+ if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_RETVAL, NULL_RTX)))
{
rtx fn_address = 0;
rtx n;
rtx next;
- if (GET_CODE (temp) == NOTE)
+ if (NOTE_P (temp))
continue;
body = PATTERN (temp);
not counting USE or NOTE insns. */
for (next = NEXT_INSN (temp); next != p;
next = NEXT_INSN (next))
- if (! (GET_CODE (next) == INSN
+ if (! (NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) == USE)
- && GET_CODE (next) != NOTE)
+ && !NOTE_P (next))
break;
/* If that is the call, this may be the insn
function address into the register that the
call insn will use. flow.c will delete any
redundant stores that we have created. */
- if (GET_CODE (next) == CALL_INSN
+ if (CALL_P (next)
&& GET_CODE (body) == SET
&& REG_P (SET_DEST (body))
&& (n = find_reg_note (temp, REG_EQUAL,
/* We have the call insn.
If it uses the register we suspect it might,
load it with the correct address directly. */
- if (GET_CODE (temp) == CALL_INSN
+ if (CALL_P (temp)
&& fn_address != 0
&& reg_referenced_p (fn_reg, body))
loop_insn_emit_after (loop, 0, fn_address_insn,
gen_move_insn
(fn_reg, fn_address));
- if (GET_CODE (temp) == CALL_INSN)
+ if (CALL_P (temp))
{
i1 = loop_call_insn_hoist (loop, body);
/* Because the USAGE information potentially
end_sequence ();
i1 = loop_insn_hoist (loop, sequence);
}
- else if (GET_CODE (p) == CALL_INSN)
+ else if (CALL_P (p))
{
i1 = loop_call_insn_hoist (loop, PATTERN (p));
/* Because the USAGE information potentially
pointers, but when we skip over a NOTE we must fix
it up. Otherwise that code walks into the non-deleted
insn stream. */
- while (p && GET_CODE (p) == NOTE)
+ while (p && NOTE_P (p))
p = NEXT_INSN (temp) = NEXT_INSN (p);
if (m->insert_temp)
/* Go through all the instructions in the loop, making
all the register substitutions scheduled in REG_MAP. */
for (p = new_start; p != loop_end; p = NEXT_INSN (p))
- if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
- || GET_CODE (p) == CALL_INSN)
+ if (INSN_P (p))
{
replace_regs (PATTERN (p), reg_map, nregs, 0);
replace_regs (REG_NOTES (p), reg_map, nregs, 0);
/* If loop opts run twice, this was set on 1st pass for 2nd. */
loop_info->preconditioned = NOTE_PRECONDITIONED (end);
- for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
+ for (insn = start; insn && !LABEL_P (insn);
insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
loop_info->pre_header_has_call = 1;
break;
if (volatile_refs_p (PATTERN (insn)))
loop_info->has_volatile = 1;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_VEC))
loop_info->has_tablejump = 1;
current_loop = NULL;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
switch (NOTE_LINE_NUMBER (insn))
{
case NOTE_INSN_LOOP_BEG:
break;
}
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
{
/* In this case, we must invalidate our current loop and any
{
struct loop *this_loop = uid_loop[INSN_UID (insn)];
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
if (note)
invalidate_loops_containing_label (XEXP (note, 0));
}
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
continue;
mark_loop_jump (PATTERN (insn), this_loop);
/* Go backwards until we reach the start of the loop, a label,
or a JUMP_INSN. */
for (p = PREV_INSN (insn);
- GET_CODE (p) != CODE_LABEL
- && ! (GET_CODE (p) == NOTE
+ !LABEL_P (p)
+ && ! (NOTE_P (p)
&& NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_BEG)
- && GET_CODE (p) != JUMP_INSN;
+ && !JUMP_P (p);
p = PREV_INSN (p))
;
/* Make sure that the target of P is within the current loop. */
- if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p)
+ if (JUMP_P (p) && JUMP_LABEL (p)
&& uid_loop[INSN_UID (JUMP_LABEL (p))] != this_loop)
outer_loop = this_loop;
and move the block of code to the spot we found. */
if (! outer_loop
- && GET_CODE (p) == JUMP_INSN
+ && JUMP_P (p)
&& JUMP_LABEL (p) != 0
/* Just ignore jumps to labels that were never emitted.
These always indicate compilation errors. */
/* Search for possible garbage past the conditional jumps
and look for the last barrier. */
for (tmp = last_insn_to_move;
- tmp && GET_CODE (tmp) != CODE_LABEL; tmp = NEXT_INSN (tmp))
- if (GET_CODE (tmp) == BARRIER)
+ tmp && !LABEL_P (tmp); tmp = NEXT_INSN (tmp))
+ if (BARRIER_P (tmp))
last_insn_to_move = tmp;
for (loc = target; loc; loc = PREV_INSN (loc))
- if (GET_CODE (loc) == BARRIER
+ if (BARRIER_P (loc)
/* Don't move things inside a tablejump. */
&& ((loc2 = next_nonnote_insn (loc)) == 0
- || GET_CODE (loc2) != CODE_LABEL
+ || !LABEL_P (loc2)
|| (loc2 = next_nonnote_insn (loc2)) == 0
- || GET_CODE (loc2) != JUMP_INSN
+ || !JUMP_P (loc2)
|| (GET_CODE (PATTERN (loc2)) != ADDR_VEC
&& GET_CODE (PATTERN (loc2)) != ADDR_DIFF_VEC))
&& uid_loop[INSN_UID (loc)] == target_loop)
if (loc == 0)
for (loc = target; loc; loc = NEXT_INSN (loc))
- if (GET_CODE (loc) == BARRIER
+ if (BARRIER_P (loc)
/* Don't move things inside a tablejump. */
&& ((loc2 = next_nonnote_insn (loc)) == 0
- || GET_CODE (loc2) != CODE_LABEL
+ || !LABEL_P (loc2)
|| (loc2 = next_nonnote_insn (loc2)) == 0
- || GET_CODE (loc2) != JUMP_INSN
+ || !JUMP_P (loc2)
|| (GET_CODE (PATTERN (loc2)) != ADDR_VEC
&& GET_CODE (PATTERN (loc2)) != ADDR_DIFF_VEC))
&& uid_loop[INSN_UID (loc)] == target_loop)
{
while (insn && INSN_LUID (insn) <= end)
{
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
return 1;
insn = NEXT_INSN (insn);
}
returns, exits the loop, is a jump to a location that is still
behind the label, or is a jump to the loop start. */
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
{
rtx insn = p;
break;
}
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != RETURN
&& (!any_condjump_p (insn)
|| (JUMP_LABEL (insn) != 0
on whether they will be executed during each iteration. */
/* This code appears twice in strength_reduce. There is also similar
code in scan_loop. */
- if (GET_CODE (p) == JUMP_INSN
+ if (JUMP_P (p)
/* If we enter the loop in the middle, and scan around to the
beginning, don't set not_every_iteration for that.
This can be any kind of jump, since we want to know if insns
not_every_iteration = 1;
}
- else if (GET_CODE (p) == NOTE)
+ else if (NOTE_P (p))
{
/* At the virtual top of a converted loop, insns are again known to
be executed each iteration: logically, the loop begins here
Note that LOOP_TOP is only set for rotated loops and we need
this check for all loops, so compare against the CODE_LABEL
which immediately follows LOOP_START. */
- if (GET_CODE (p) == JUMP_INSN
+ if (JUMP_P (p)
&& JUMP_LABEL (p) == NEXT_INSN (loop->start))
past_loop_latch = 1;
if (not_every_iteration
&& !past_loop_latch
- && GET_CODE (p) == CODE_LABEL
+ && LABEL_P (p)
&& no_labels_between_p (p, loop->end)
&& loop_insn_first_p (p, loop->cont))
not_every_iteration = 0;
halting at first label. Also record any test condition. */
call_seen = 0;
- for (p = loop->start; p && GET_CODE (p) != CODE_LABEL; p = PREV_INSN (p))
+ for (p = loop->start; p && !LABEL_P (p); p = PREV_INSN (p))
{
rtx test;
note_insn = p;
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
call_seen = 1;
if (INSN_P (p))
/* Record any test of a biv that branches around the loop if no store
between it and the start of loop. We only care about tests with
constants and registers and only certain of those. */
- if (GET_CODE (p) == JUMP_INSN
+ if (JUMP_P (p)
&& JUMP_LABEL (p) != 0
&& next_real_insn (JUMP_LABEL (p)) == next_real_insn (loop->end)
&& (test = get_condition_for_loop (loop, p)) != 0
register substitutions scheduled in REG_MAP. */
for (p = loop->start; p != loop->end; p = NEXT_INSN (p))
- if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
- || GET_CODE (p) == CALL_INSN)
+ if (INSN_P (p))
{
replace_regs (PATTERN (p), reg_map, reg_map_size, 0);
replace_regs (REG_NOTES (p), reg_map, reg_map_size, 0);
rtx mult_val;
rtx *location;
- if (GET_CODE (p) == INSN
+ if (NONJUMP_INSN_P (p)
&& (set = single_set (p))
&& REG_P (SET_DEST (set)))
{
rtx set;
/* Look for a general induction variable in a register. */
- if (GET_CODE (p) == INSN
+ if (NONJUMP_INSN_P (p)
&& (set = single_set (p))
&& REG_P (SET_DEST (set))
&& ! regs->array[REGNO (SET_DEST (set))].may_not_optimize)
}
/* Look for givs which are memory addresses. */
- if (GET_CODE (p) == INSN)
+ if (NONJUMP_INSN_P (p))
find_mem_givs (loop, PATTERN (p), p, not_every_iteration,
maybe_multiple);
/* Update the status of whether giv can derive other givs. This can
change when we pass a label or an insn that updates a biv. */
- if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
- || GET_CODE (p) == CODE_LABEL)
+ if (INSN_P (p))
update_giv_derive (loop, p);
return p;
}
if (p == v->insn)
break;
- if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
- || GET_CODE (p) == CALL_INSN)
+ if (INSN_P (p))
{
/* It is possible for the BIV increment to use the GIV if we
have a cycle. Thus we must be sure to check each insn for
if (p == last_giv_use)
break;
- if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p)
+ if (JUMP_P (p) && JUMP_LABEL (p)
&& LABEL_NAME (JUMP_LABEL (p))
&& ((loop_insn_first_p (JUMP_LABEL (p), v->insn)
&& loop_insn_first_p (loop->start, JUMP_LABEL (p)))
for (bl = ivs->list; bl; bl = bl->next)
for (biv = bl->biv; biv; biv = biv->next_iv)
- if (GET_CODE (p) == CODE_LABEL || GET_CODE (p) == JUMP_INSN
+ if (LABEL_P (p) || JUMP_P (p)
|| biv->insn == p)
{
/* Skip if location is the same as a previous one. */
/* If this giv is conditionally set and we have passed a label,
it cannot derive anything. */
- if (GET_CODE (p) == CODE_LABEL && ! giv->always_computable)
+ if (LABEL_P (p) && ! giv->always_computable)
giv->cant_derive = 1;
/* Skip givs that have mult_val == 0, since
else
giv->cant_derive = 1;
}
- else if ((GET_CODE (p) == CODE_LABEL && ! biv->always_computable)
- || (GET_CODE (p) == JUMP_INSN && biv->maybe_multiple))
+ else if ((LABEL_P (p) && ! biv->always_computable)
+ || (JUMP_P (p) && biv->maybe_multiple))
giv->cant_derive = 1;
}
}
{
insn = PREV_INSN (insn);
}
- while (insn && GET_CODE (insn) == NOTE
+ while (insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG);
if (!insn)
case ASHIFTRT:
/* Similar, since this can be a sign extension. */
for (insn = PREV_INSN (p);
- (insn && GET_CODE (insn) == NOTE
+ (insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG);
insn = PREV_INSN (insn))
;
rtx next = NEXT_INSN (tmp);
if (++n_insns > 3
- || GET_CODE (tmp) != INSN
+ || !NONJUMP_INSN_P (tmp)
|| (GET_CODE (PATTERN (tmp)) == SET
&& GET_CODE (SET_SRC (PATTERN (tmp))) == MULT)
|| (GET_CODE (PATTERN (tmp)) == PARALLEL
rtx jump1;
if ((jump1 = prev_nonnote_insn (first_compare)) != loop->cont)
- if (GET_CODE (jump1) == JUMP_INSN)
+ if (JUMP_P (jump1))
return 0;
}
the loop, then we can safely optimize. */
for (p = loop_start; p; p = PREV_INSN (p))
{
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
break;
- if (GET_CODE (p) != JUMP_INSN)
+ if (!JUMP_P (p))
continue;
before_comparison = get_condition_for_loop (loop, p);
emit_jump_insn_before (tem, loop_end);
for (tem = PREV_INSN (loop_end);
- tem && GET_CODE (tem) != JUMP_INSN;
+ tem && !JUMP_P (tem);
tem = PREV_INSN (tem))
;
P is a note. */
if (INSN_UID (p) < max_uid_for_loop
&& INSN_UID (q) < max_uid_for_loop
- && GET_CODE (p) != NOTE)
+ && !NOTE_P (p))
return INSN_LUID (p) <= INSN_LUID (q);
if (INSN_UID (p) >= max_uid_for_loop
- || GET_CODE (p) == NOTE)
+ || NOTE_P (p))
p = NEXT_INSN (p);
if (INSN_UID (q) >= max_uid_for_loop)
q = NEXT_INSN (q);
{
rtx n;
for (n = insn;
- n && GET_CODE (n) != CODE_LABEL && GET_CODE (n) != JUMP_INSN;
+ n && !LABEL_P (n) && !JUMP_P (n);
n = NEXT_INSN (n))
{
if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (n))
if (op0 == cc0_rtx)
{
if ((prev = prev_nonnote_insn (prev)) == 0
- || GET_CODE (prev) != INSN
+ || !NONJUMP_INSN_P (prev)
|| (set = single_set (prev)) == 0
|| SET_DEST (set) != cc0_rtx)
return 0;
we don't want to bother dealing with it. */
if ((prev = prev_nonnote_insn (prev)) == 0
- || GET_CODE (prev) != INSN
+ || !NONJUMP_INSN_P (prev)
|| FIND_REG_INC_NOTE (prev, NULL_RTX))
break;
rtx set;
/* If this is not a standard conditional jump, we can't parse it. */
- if (GET_CODE (jump) != JUMP_INSN
+ if (!JUMP_P (jump)
|| ! any_condjump_p (jump))
return 0;
set = pc_set (jump);
}
}
- if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN)
+ if (LABEL_P (insn) || JUMP_P (insn))
memset (last_set, 0, regs->num * sizeof (rtx));
/* Invalidate all registers used for function argument passing.
We check rtx_varies_p for the same reason as below, to allow
optimizing PIC calculations. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx link;
for (link = CALL_INSN_FUNCTION_USAGE (insn);
/* We cannot use next_label here because it skips over normal insns. */
end_label = next_nonnote_insn (loop->end);
- if (end_label && GET_CODE (end_label) != CODE_LABEL)
+ if (end_label && !LABEL_P (end_label))
end_label = NULL_RTX;
/* Check to see if it's possible that some instructions in the loop are
p != NULL_RTX;
p = next_insn_in_loop (loop, p))
{
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
maybe_never = 1;
- else if (GET_CODE (p) == JUMP_INSN
+ else if (JUMP_P (p)
/* If we enter the loop in the middle, and scan
around to the beginning, don't set maybe_never
for that. This must be an unconditional jump,
otherwise the code at the top of the loop might
never be executed. Unconditional jumps are
followed a by barrier then loop end. */
- && ! (GET_CODE (p) == JUMP_INSN
+ && ! (JUMP_P (p)
&& JUMP_LABEL (p) == loop->top
&& NEXT_INSN (NEXT_INSN (p)) == loop->end
&& any_uncondjump_p (p)))
/* Find start of the extended basic block that enters the loop. */
for (p = loop->start;
- PREV_INSN (p) && GET_CODE (p) != CODE_LABEL;
+ PREV_INSN (p) && !LABEL_P (p);
p = PREV_INSN (p))
;
prev_ebb_head = p;
/* If this is a call which uses / clobbers this memory
location, we must not change the interface here. */
- if (GET_CODE (p) == CALL_INSN
+ if (CALL_P (p)
&& reg_mentioned_p (loop_info->mems[i].mem,
CALL_INSN_FUNCTION_USAGE (p)))
{
loop_info->mems[i].reg, written);
}
- if (GET_CODE (p) == CODE_LABEL
- || GET_CODE (p) == JUMP_INSN)
+ if (LABEL_P (p)
+ || JUMP_P (p))
maybe_never = 1;
}
label with the new one. */
if (label != NULL_RTX && end_label != NULL_RTX)
for (p = loop->start; p != loop->end; p = NEXT_INSN (p))
- if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == end_label)
+ if (JUMP_P (p) && JUMP_LABEL (p) == end_label)
redirect_jump (p, label, false);
cselib_finish ();
/* Only substitute within one extended basic block from the initializing
insn. */
- if (GET_CODE (insn) == CODE_LABEL && init_insn)
+ if (LABEL_P (insn) && init_insn)
break;
if (! INSN_P (insn))
/* The notes do not have an assigned block, so look at the next insn. */
#define LOOP_BLOCK_NUM(INSN) \
-((INSN) ? (GET_CODE (INSN) == NOTE \
+((INSN) ? (NOTE_P (INSN) \
? LOOP_BLOCK_NUM_1 (next_nonnote_insn (INSN)) \
: LOOP_BLOCK_NUM_1 (INSN)) \
: -1)
/* Print diagnostics to compare our concept of a loop with
what the loop notes say. */
if (! PREV_INSN (BB_HEAD (loop->first))
- || GET_CODE (PREV_INSN (BB_HEAD (loop->first))) != NOTE
+ || !NOTE_P (PREV_INSN (BB_HEAD (loop->first)))
|| NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (loop->first)))
!= NOTE_INSN_LOOP_BEG)
fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
INSN_UID (PREV_INSN (BB_HEAD (loop->first))));
if (! NEXT_INSN (BB_END (loop->last))
- || GET_CODE (NEXT_INSN (BB_END (loop->last))) != NOTE
+ || !NOTE_P (NEXT_INSN (BB_END (loop->last)))
|| NOTE_LINE_NUMBER (NEXT_INSN (BB_END (loop->last)))
!= NOTE_INSN_LOOP_END)
fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
{
rtx pattern, cmp, inc, reg, condition;
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
return NULL_RTX;
pattern = PATTERN (insn);
find_line_note (rtx insn)
{
for (; insn; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) >= 0)
break;
/* Don't handle BBs with calls or barriers, or !single_set insns. */
for (insn = head; insn != NEXT_INSN (tail); insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN
- || GET_CODE (insn) == BARRIER
- || (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN
+ if (CALL_P (insn)
+ || BARRIER_P (insn)
+ || (INSN_P (insn) && !JUMP_P (insn)
&& !single_set (insn) && GET_CODE (PATTERN (insn)) != USE))
break;
{
if (stats_file)
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
fprintf (stats_file, "SMS loop-with-call\n");
- else if (GET_CODE (insn) == BARRIER)
+ else if (BARRIER_P (insn))
fprintf (stats_file, "SMS loop-with-barrier\n");
else
fprintf (stats_file, "SMS loop-with-not-single-set\n");
if (!INSN_P (insn))
continue;
- if (GET_CODE (insn) == JUMP_INSN) /* Closing branch handled later. */
+ if (JUMP_P (insn)) /* Closing branch handled later. */
continue;
/* 1. compute sched window for u (start, end, step). */
return emit_insn (insns);
else
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) != INSN
+ if (!NONJUMP_INSN_P (insn)
|| find_reg_note (insn, REG_LIBCALL, NULL_RTX))
return emit_insn (insns);
if (flag_non_call_exceptions && may_trap_p (equiv))
{
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
goto (unless there is already a REG_EH_REGION note, in which case
we update it). */
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
/* Some ports use a loop to copy large arguments onto the stack.
Don't move anything outside such a loop. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
break;
}
/* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
right, so avoid the problem here. Likewise if we have a constant
and the insn pattern doesn't tell us the mode we need. */
- if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
+ if (LABEL_P (recog_data.operand[i])
|| (CONSTANT_P (recog_data.operand[i])
&& recog_data.operand_mode[i] == VOIDmode))
continue;
FOR_EACH_BB_REVERSE (bb)
{
insn = BB_HEAD (bb);
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
HARD_REG_SET live;
/* We cannot do our optimization across labels. Invalidating all the use
information we have would be costly, so we just note where the label
is and then later disable any optimization that would cross it. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
last_label_ruid = reload_combine_ruid;
- else if (GET_CODE (insn) == BARRIER)
+ else if (BARRIER_P (insn))
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
if (! fixed_regs[r])
reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
note_stores (PATTERN (insn), reload_combine_note_store, NULL);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx link;
}
}
- else if (GET_CODE (insn) == JUMP_INSN
+ else if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
{
rtx pat, note;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
move2add_last_label_luid = move2add_luid;
/* We're going to increment move2add_luid twice after a
/* If this is a CALL_INSN, all call used registers are stored with
unknown values. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
{
static bool
can_predict_insn_p (rtx insn)
{
- return (GET_CODE (insn) == JUMP_INSN
+ return (JUMP_P (insn)
&& any_condjump_p (insn)
&& BLOCK_FOR_INSN (insn)->succ->succ_next);
}
messages. */
for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
/* Constant and pure calls are hardly used to signalize
something exceptional. */
&& ! CONST_OR_PURE_CALL_P (insn))
/* Attach the combined probability to each conditional jump. */
FOR_EACH_BB (bb)
- if (GET_CODE (BB_END (bb)) == JUMP_INSN
+ if (JUMP_P (BB_END (bb))
&& any_condjump_p (BB_END (bb))
&& bb->succ->succ_next != NULL)
combine_predictions_for_insn (BB_END (bb), bb);
case JUMP_INSN:
/* Look for simple conditional branches. If we haven't got an
expected value yet, no point going further. */
- if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
+ if (!JUMP_P (insn) || ev == NULL_RTX
|| ! any_condjump_p (insn))
continue;
break;
for (insn = BB_END (bb); insn;
was_bb_head |= (insn == BB_HEAD (bb)), insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
{
if (was_bb_head)
break;
{
/* Noreturn calls cause program to exit, therefore they are
always predicted as not taken. */
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& find_reg_note (insn, REG_NORETURN, NULL))
contained_noreturn_call = 1;
continue;
/* When printing in VCG format we write INSNs, NOTE, LABEL, and BARRIER
in separate nodes and therefore have to handle them special here. */
if (dump_for_graph
- && (is_insn || GET_CODE (in_rtx) == NOTE
- || GET_CODE (in_rtx) == CODE_LABEL || GET_CODE (in_rtx) == BARRIER))
+ && (is_insn || NOTE_P (in_rtx)
+ || LABEL_P (in_rtx) || BARRIER_P (in_rtx)))
{
i = 3;
indent = 0;
print_node_brief (outfile, "", decl, 0);
}
#endif
- else if (i == 4 && GET_CODE (in_rtx) == NOTE)
+ else if (i == 4 && NOTE_P (in_rtx))
{
switch (NOTE_LINE_NUMBER (in_rtx))
{
fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx));
#endif
}
- else if (i == 6 && GET_CODE (in_rtx) == NOTE)
+ else if (i == 6 && NOTE_P (in_rtx))
{
/* This field is only used for NOTE_INSN_DELETED_LABEL, and
other times often contains garbage from INSN->NOTE death. */
else
#endif
if (flag_dump_unnumbered
- && (is_insn || GET_CODE (in_rtx) == NOTE))
+ && (is_insn || NOTE_P (in_rtx)))
fputc ('#', outfile);
else
fprintf (outfile, " %d", value);
}
if (dump_for_graph
- && (is_insn || GET_CODE (in_rtx) == NOTE
- || GET_CODE (in_rtx) == CODE_LABEL || GET_CODE (in_rtx) == BARRIER))
+ && (is_insn || NOTE_P (in_rtx)
+ || LABEL_P (in_rtx) || BARRIER_P (in_rtx)))
sawclose = 0;
else
{
case BARRIER:
for (tmp_rtx = rtx_first; tmp_rtx != 0; tmp_rtx = NEXT_INSN (tmp_rtx))
if (! flag_dump_unnumbered
- || GET_CODE (tmp_rtx) != NOTE || NOTE_LINE_NUMBER (tmp_rtx) < 0)
+ || !NOTE_P (tmp_rtx) || NOTE_LINE_NUMBER (tmp_rtx) < 0)
{
fputs (print_rtx_head, outfile);
print_rtx (tmp_rtx);
outfile = outf;
sawclose = 0;
if (! flag_dump_unnumbered
- || GET_CODE (x) != NOTE || NOTE_LINE_NUMBER (x) < 0)
+ || !NOTE_P (x) || NOTE_LINE_NUMBER (x) < 0)
{
fputs (print_rtx_head, outfile);
print_rtx (x);
while (insn != BB_END (bb))
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
/* Must ignore the line number notes that
immediately follow the end of an inline function
/* We want to access the root webpart. */
wp = find_web_part (wp);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
wp->crosses_call = 1;
else if (copy_insn_p (insn, &s, NULL))
source_regno = REGNO (GET_CODE (s) == SUBREG ? SUBREG_REG (s) : s);
bitmap_set_bit (all_defs, DF_REF_ID (info.defs[n]));
if (TEST_BIT (insns_with_deaths, INSN_UID (insn)))
deaths++;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
contains_call = 1;
}
if (insn == BB_END (bb))
{
basic_block bb;
FOR_EACH_BB (bb)
- if (GET_CODE (BB_END (bb)) == JUMP_INSN)
+ if (JUMP_P (BB_END (bb)))
{
struct df_link *link;
for (link = DF_INSN_DEFS (df, BB_END (bb)); link; link = link->next)
case LABEL_REF:
{
rtx sub = XEXP (x, 0);
- if (GET_CODE (sub) == NOTE
+ if (NOTE_P (sub)
&& NOTE_LINE_NUMBER (sub) == NOTE_INSN_DELETED_LABEL)
fprintf (file, "(deleted uid=%d)", INSN_UID (sub));
- else if (GET_CODE (sub) == CODE_LABEL)
+ else if (LABEL_P (sub))
fprintf (file, "L%d", CODE_LABEL_NUMBER (sub));
else
fprintf (file, "(nonlabel uid=%d)", INSN_UID (sub));
insn = PREV_INSN (insn);
for (i = count; i > 0 && insn; insn = NEXT_INSN (insn), i--)
{
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
fprintf (stderr, "\n");
ra_print_rtx_top (stderr, insn, (i == count || i == 1));
}
last_bb = NULL;
for (; insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
bb = NULL;
else
bb = BLOCK_FOR_INSN (insn);
fprintf (file, ";; Begin of basic block %d\n", bb->index);
last_bb = bb;
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
fputc ('\n', file);
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
/* Ignore basic block and maybe other notes not referencing
deleted things. */
/* If we reach a basic block border, which has more than one
outgoing edge, we simply forget all already emitted stores. */
- if (GET_CODE (insn) == BARRIER
+ if (BARRIER_P (insn)
|| JUMP_P (insn) || can_throw_internal (insn))
{
last_slot = NULL_RTX;
XXX Note, that sometimes reload barfs when we emit insns between
a call and the insn which copies the return register into a
pseudo. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
ri.need_load = 1;
else if (INSN_P (insn))
for (n = 0; n < info.num_uses; n++)
web->one_load = 0;
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
break;
}
if (next == 0)
return 0;
- return ((GET_CODE (next) == JUMP_INSN
- || GET_CODE (next) == INSN
- || GET_CODE (next) == CALL_INSN)
+ return (INSN_P (next)
&& ! inequality_comparisons_p (PATTERN (next)));
}
#endif
{
next = NEXT_INSN (insn);
if (next == 0
- || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
+ || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
return 0;
result = find_single_use_1 (dest, &PATTERN (next));
return 0;
for (next = next_nonnote_insn (insn);
- next != 0 && GET_CODE (next) != CODE_LABEL;
+ next != 0 && !LABEL_P (next);
next = next_nonnote_insn (next))
if (INSN_P (next) && dead_or_set_p (next, dest))
{
BB boundary we are interested in will be set to
previous one. */
- while (GET_CODE (last) == BARRIER)
+ while (BARRIER_P (last))
last = PREV_INSN (last);
SET_BIT (blocks, bb->index);
changed = true;
if (j >= MAX_INSNS_PER_PEEP2 + 1)
j -= MAX_INSNS_PER_PEEP2 + 1;
old_insn = peep2_insn_data[j].insn;
- if (GET_CODE (old_insn) != CALL_INSN)
+ if (!CALL_P (old_insn))
continue;
was_call = true;
new_insn = try;
while (new_insn != NULL_RTX)
{
- if (GET_CODE (new_insn) == CALL_INSN)
+ if (CALL_P (new_insn))
break;
new_insn = NEXT_INSN (new_insn);
}
if (j >= MAX_INSNS_PER_PEEP2 + 1)
j -= MAX_INSNS_PER_PEEP2 + 1;
old_insn = peep2_insn_data[j].insn;
- if (GET_CODE (old_insn) == CALL_INSN)
+ if (CALL_P (old_insn))
abort ();
}
break;
break;
for (x = try ; x != before_try ; x = PREV_INSN (x))
- if (GET_CODE (x) == CALL_INSN
+ if (CALL_P (x)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (x))
&& !find_reg_note (x, REG_EH_REGION, NULL)))
nfte = split_block (bb, x);
flags = (eh_edge->flags
& (EDGE_EH | EDGE_ABNORMAL));
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
flags |= EDGE_ABNORMAL_CALL;
nehe = make_edge (nfte->src, eh_edge->dest,
flags);
/* If we generated a jump instruction, it won't have
JUMP_LABEL set. Recompute after we're done. */
for (x = try; x != before_try; x = PREV_INSN (x))
- if (GET_CODE (x) == JUMP_INSN)
+ if (JUMP_P (x))
{
do_rebuild_jump_labels = true;
break;
in_set = single_set (in_insn);
if (! in_set)
{
- if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
+ if (JUMP_P (in_insn) || CALL_P (in_insn))
return false;
abort ();
}
if (INSN_P (insn) && reg_mentioned_p (ix86_flags_rtx, PATTERN (insn)))
return insn;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return NULL_RTX;
}
return NULL_RTX;
rtx label = XEXP (pat, 0);
rtx ref;
- if (GET_CODE (label) != CODE_LABEL)
+ if (!LABEL_P (label))
abort ();
/* If this is an undefined label, LABEL_REFS (label) contains
rtx limit = PREV_INSN (BB_HEAD (current_block));
while (tmp != limit)
{
- if (GET_CODE (tmp) == CODE_LABEL
- || GET_CODE (tmp) == CALL_INSN
+ if (LABEL_P (tmp)
+ || CALL_P (tmp)
|| NOTE_INSN_BASIC_BLOCK_P (tmp)
- || (GET_CODE (tmp) == INSN
+ || (NONJUMP_INSN_P (tmp)
&& stack_regs_mentioned (tmp)))
{
i1 = tmp;
insn = NEXT_INSN (insn);
if (INSN_P (insn) && reg_mentioned_p (dest, insn))
break;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return 0;
}
bool control_flow_insn_deleted = false;
int i;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
int top = regstack->top;
/* subst_stack_regs_pat may have deleted a no-op insn. If so, any
REG_UNUSED will already have been dealt with, so just return. */
- if (GET_CODE (insn) == NOTE || INSN_DELETED_P (insn))
+ if (NOTE_P (insn) || INSN_DELETED_P (insn))
return control_flow_insn_deleted;
/* If there is a REG_UNUSED note on a stack register on this insn,
tmpstack = regstack;
change_stack (BB_END (block), &tmpstack, target_stack,
- (GET_CODE (BB_END (block)) == JUMP_INSN
+ (JUMP_P (BB_END (block))
? EMIT_BEFORE : EMIT_AFTER));
}
else
/* Don't bother processing unless there is a stack reg
mentioned or if it's a CALL_INSN. */
if (stack_regs_mentioned (insn)
- || GET_CODE (insn) == CALL_INSN)
+ || CALL_P (insn))
{
if (file)
{
}
insn = BB_END (block);
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
insn = PREV_INSN (insn);
/* If the function is declared to return a value, but it returns one
We need not check for code_label here;
while a basic block can start with a code_label,
INSN could not be at the beginning of that block. */
- if (previnsn == 0 || GET_CODE (previnsn) == JUMP_INSN)
+ if (previnsn == 0 || JUMP_P (previnsn))
{
basic_block b;
FOR_EACH_BB (b)
/* If the insn in which SRC dies is a CALL_INSN, don't count it
as a call that has been crossed. Otherwise, count it. */
- if (q != p && GET_CODE (q) == CALL_INSN)
+ if (q != p && CALL_P (q))
{
/* Similarly, total calls for SREGNO, total calls beyond
the death note for DREGNO. */
PATTERN (q) = replace_rtx (PATTERN (q), dest, src);
- if (GET_CODE (q) == CALL_INSN)
+ if (CALL_P (q))
{
REG_N_CALLS_CROSSED (dregno)--;
REG_N_CALLS_CROSSED (sregno)++;
if (reg_set_p (src, p)
|| find_reg_note (p, REG_DEAD, dest)
- || (GET_CODE (p) == CALL_INSN && REG_N_CALLS_CROSSED (sregno) == 0))
+ || (CALL_P (p) && REG_N_CALLS_CROSSED (sregno) == 0))
break;
}
}
#ifdef AUTO_INC_DEC
for (p = PREV_INSN (insn); p; p = PREV_INSN (p))
{
- if (GET_CODE (p) == CODE_LABEL
- || GET_CODE (p) == JUMP_INSN)
+ if (LABEL_P (p)
+ || JUMP_P (p))
break;
if (! INSN_P (p))
continue;
}
for (p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
{
- if (GET_CODE (p) == CODE_LABEL
- || GET_CODE (p) == JUMP_INSN)
+ if (LABEL_P (p)
+ || JUMP_P (p))
break;
if (! INSN_P (p))
continue;
/* reg_set_p is overly conservative for CALL_INSNS, thinks that all
hard regs are clobbered. Thus, we only use it for src for
non-call insns. */
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
{
if (! dst_death)
num_calls++;
/* If we have passed a call instruction, and the
pseudo-reg DST is not already live across a call,
then don't perform the optimization. */
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
{
num_calls++;
for (length = s_length = 0, p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
{
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
replace_in_call_usage (& CALL_INSN_FUNCTION_USAGE (p),
REGNO (dst), src, p);
/* If we have passed a call instruction, and the pseudo-reg SRC is not
already live across a call, then don't perform the optimization. */
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
{
if (REG_N_CALLS_CROSSED (REGNO (src)) == 0)
break;
q = 0;
break;
}
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
num_calls2++;
}
if (q && set2 && SET_DEST (set2) == src && CONSTANT_P (SET_SRC (set2))
if (tmp)
return tmp;
- if (GET_CODE (insn) != INSN
+ if (!NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) != PARALLEL)
return NULL_RTX;
data.insn = insn;
data.memlist = memlist;
- if (GET_CODE (insn) != CALL_INSN && last_sp_set
+ if (!CALL_P (insn) && last_sp_set
&& !for_each_rtx (&PATTERN (insn), record_stack_memrefs, &data))
{
memlist = data.memlist;
/* Otherwise, we were not able to process the instruction.
Do not continue collecting data across such a one. */
if (last_sp_set
- && (GET_CODE (insn) == CALL_INSN
+ && (CALL_P (insn)
|| reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
{
if (last_sp_set && last_sp_adjust == 0)
*recog_data.operand_loc[i] = old_operands[i];
/* Step 2B: Can't rename function call argument registers. */
- if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
+ if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
scan_rtx (insn, &CALL_INSN_FUNCTION_USAGE (insn),
NO_REGS, terminate_all_read, OP_IN, 0);
/* Step 4B: If this is a call, any chain live at this point
requires a caller-saved reg. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
struct du_chain *p;
for (p = open_chains; p; p = p->next_chain)
recog_op_alt[i][alt].earlyclobber);
}
}
- else if (GET_CODE (insn) != CALL_INSN)
+ else if (!CALL_P (insn))
for (i = 0; i < n_ops + recog_data.n_dups; i++)
{
int opn = i < n_ops ? i : recog_data.dup_num[i - n_ops];
did_replacement:
/* Clobber call-clobbered registers. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
kill_value_regno (i, vd);
/* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
neither are insns that SET cc0. Insns that use CC0 are not allowed
to have any input reloads. */
- if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN)
+ if (JUMP_P (insn) || CALL_P (insn))
no_output_reloads = 1;
#ifdef HAVE_cc0
/* If we're replacing an operand with a LABEL_REF, we need
to make sure that there's a REG_LABEL note attached to
this instruction. */
- if (GET_CODE (insn) != JUMP_INSN
+ if (!JUMP_P (insn)
&& GET_CODE (substitution) == LABEL_REF
&& !find_reg_note (insn, REG_LABEL, XEXP (substitution, 0)))
REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL,
? XEXP (x, 0)
: reg_equiv_mem[regno]);
int icode = (int) add_optab->handlers[(int) Pmode].insn_code;
- if (insn && GET_CODE (insn) == INSN && equiv
+ if (insn && NONJUMP_INSN_P (insn) && equiv
&& memory_operand (equiv, GET_MODE (equiv))
#ifdef HAVE_cc0
&& ! sets_cc0_p (PATTERN (insn))
REG_LABEL note to indicate to flow which label this
register refers to. */
if (GET_CODE (*r->where) == LABEL_REF
- && GET_CODE (insn) == JUMP_INSN)
+ && JUMP_P (insn))
REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL,
XEXP (*r->where, 0),
REG_NOTES (insn));
{
p = PREV_INSN (p);
num++;
- if (p == 0 || GET_CODE (p) == CODE_LABEL
+ if (p == 0 || LABEL_P (p)
|| num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
return 0;
- if (GET_CODE (p) == INSN
+ if (NONJUMP_INSN_P (p)
/* If we don't want spill regs ... */
&& (! (reload_reg_p != 0
&& reload_reg_p != (short *) (HOST_WIDE_INT) 1)
/* Don't trust the conversion past a function call
if either of the two is in a call-clobbered register, or memory. */
- if (GET_CODE (p) == CALL_INSN)
+ if (CALL_P (p))
{
int i;
}
}
- if (GET_CODE (p) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (p))
+ if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
{
rtx link;
main reload loop in the most common case where register elimination
cannot be done. */
for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN)
+ if (INSN_P (insn))
note_stores (PATTERN (insn), mark_not_eliminable, NULL);
maybe_fix_stack_asms ();
if an insn has a variable address, gets a REG_EH_REGION
note added to it, and then gets converted into an load
from a constant address. */
- if (GET_CODE (equiv_insn) == NOTE
+ if (NOTE_P (equiv_insn)
|| can_throw_internal (equiv_insn))
;
else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
{
rtx *pnote;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
include REG_LABEL), we need to see what effects this has on the
known offsets at labels. */
- if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
+ if (LABEL_P (insn) || JUMP_P (insn)
|| (INSN_P (insn) && REG_NOTES (insn) != 0))
set_label_offsets (insn, insn, 0);
else if (x == insn
&& (tem = prev_nonnote_insn (insn)) != 0
- && GET_CODE (tem) == BARRIER)
+ && BARRIER_P (tem))
set_offsets_for_label (insn);
else
/* If neither of the above cases is true, compare each offset
/* If we pass a label, copy the offsets from the label information
into the current offsets of each elimination. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
set_offsets_for_label (insn);
else if (INSN_P (insn))
if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
{
eliminate_regs_in_insn (insn, 1);
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
update_eliminable_offsets ();
continue;
/* There may have been CLOBBER insns placed after INSN. So scan
between INSN and NEXT and use them to forget old reloads. */
for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
- if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
+ if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
note_stores (PATTERN (x), forget_old_reloads_1, NULL);
#ifdef AUTO_INC_DEC
#endif
}
/* A reload reg's contents are unknown after a label. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
CLEAR_HARD_REG_SET (reg_reloaded_valid);
/* Don't assume a reload reg is still good after a call insn
if it is a call-used reg, or if it contains a value that will
be partially clobbered by the call. */
- else if (GET_CODE (insn) == CALL_INSN)
+ else if (CALL_P (insn))
{
AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
rl->when_needed, old, rl->out, j, 0))
{
rtx temp = PREV_INSN (insn);
- while (temp && GET_CODE (temp) == NOTE)
+ while (temp && NOTE_P (temp))
temp = PREV_INSN (temp);
if (temp
- && GET_CODE (temp) == INSN
+ && NONJUMP_INSN_P (temp)
&& GET_CODE (PATTERN (temp)) == SET
&& SET_DEST (PATTERN (temp)) == old
/* Make sure we can access insn_operand_constraint. */
return;
/* If is a JUMP_INSN, we can't support output reloads yet. */
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
abort ();
emit_output_reload_insns (chain, rld + j, j);
for (i1 = NEXT_INSN (output_reload_insn);
i1 != insn; i1 = NEXT_INSN (i1))
{
- if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
+ if (LABEL_P (i1) || JUMP_P (i1))
return;
- if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
+ if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
&& reg_mentioned_p (reg, PATTERN (i1)))
{
/* If this is USE in front of INSN, we only have to check that
there are no more references than accounted for by inheritance. */
- while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
+ while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
{
n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
i1 = NEXT_INSN (i1);
since if they are the only uses, they are dead. */
if (set != 0 && SET_DEST (set) == reg)
continue;
- if (GET_CODE (i2) == CODE_LABEL
- || GET_CODE (i2) == JUMP_INSN)
+ if (LABEL_P (i2)
+ || JUMP_P (i2))
break;
- if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
+ if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
&& reg_mentioned_p (reg, PATTERN (i2)))
{
/* Some other ref remains; just delete the output reload we
delete_address_reloads (i2, insn);
delete_insn (i2);
}
- if (GET_CODE (i2) == CODE_LABEL
- || GET_CODE (i2) == JUMP_INSN)
+ if (LABEL_P (i2)
+ || JUMP_P (i2))
break;
}
it might have been inherited. */
for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
{
- if (GET_CODE (i2) == CODE_LABEL)
+ if (LABEL_P (i2))
break;
if (! INSN_P (i2))
continue;
}
return;
}
- if (GET_CODE (i2) == JUMP_INSN)
+ if (JUMP_P (i2))
break;
/* If DST is still live at CURRENT_INSN, check if it is used for
any reload. Note that even if CURRENT_INSN sets DST, we still
== (EDGE_ABNORMAL | EDGE_EH))
break;
}
- if (e && GET_CODE (BB_END (bb)) != CALL_INSN
+ if (e && !CALL_P (BB_END (bb))
&& !can_throw_internal (BB_END (bb)))
{
rtx insn = BB_END (bb), stop = NEXT_INSN (BB_END (bb));
break;
/* Get past the new insns generated. Allow notes, as the insns may
be already deleted. */
- while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
+ while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
&& !can_throw_internal (insn)
&& insn != BB_HEAD (bb))
insn = PREV_INSN (insn);
- if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
+ if (!CALL_P (insn) && !can_throw_internal (insn))
abort ();
BB_END (bb) = insn;
inserted = true;
label and we don't have to do anything else. */
insn = get_last_insn ();
- while (GET_CODE (insn) == NOTE
- || (GET_CODE (insn) == INSN
+ while (NOTE_P (insn)
+ || (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)))
insn = PREV_INSN (insn);
/* When a target threads its epilogue we might already have a
suitable return insn. If so put a label before it for the
end_of_function_label. */
- if (GET_CODE (insn) == BARRIER
- && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
+ if (BARRIER_P (insn)
+ && JUMP_P (PREV_INSN (insn))
&& GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
{
rtx temp = PREV_INSN (PREV_INSN (insn));
emit_label_after (end_of_function_label, temp);
}
- else if (GET_CODE (insn) == CODE_LABEL)
+ else if (LABEL_P (insn))
end_of_function_label = insn;
else
{
/* If INSN is followed by a BARRIER, delete the BARRIER since it will only
confuse further processing. Update LAST in case it was the last insn.
We will put the BARRIER back in later. */
- if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
+ if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
{
delete_related_insns (NEXT_INSN (insn));
last = get_last_insn ();
case REG_LABEL:
/* Keep the label reference count up to date. */
- if (GET_CODE (XEXP (note, 0)) == CODE_LABEL)
+ if (LABEL_P (XEXP (note, 0)))
LABEL_NUSES (XEXP (note, 0)) ++;
break;
last insn in that SEQUENCE to point to us. Similarly for the first
insn in the following insn if it is a SEQUENCE. */
- if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
+ if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
&& GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
= seq_insn;
- if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
+ if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
&& GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
seq_insn = PREV_INSN (NEXT_INSN (trial));
seq = PATTERN (seq_insn);
- if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == BARRIER)
+ if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
had_barrier = 1;
/* Create a delay list consisting of all the insns other than the one
annul flag. */
if (delay_list)
trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
- else if (GET_CODE (trial) == JUMP_INSN
- || GET_CODE (trial) == CALL_INSN
- || GET_CODE (trial) == INSN)
+ else if (INSN_P (trial))
INSN_ANNULLED_BRANCH_P (trial) = 0;
INSN_FROM_TARGET_P (insn) = 0;
a delay slot. It will be the last insn in the delay slot, if
it is. */
rtx trial = previous_insn (insn);
- if (GET_CODE (trial) == NOTE)
+ if (NOTE_P (trial))
trial = prev_nonnote_insn (trial);
if (sets_cc0_p (PATTERN (trial)) != 1
|| FIND_REG_INC_NOTE (trial, NULL_RTX))
flags = get_jump_flags (insn, JUMP_LABEL (insn));
if (trial == 0
- || GET_CODE (trial) != INSN
+ || !NONJUMP_INSN_P (trial)
|| GET_CODE (PATTERN (trial)) == SEQUENCE
|| recog_memoized (trial) < 0
|| (! eligible_for_annul_false (insn, 0, trial, flags)
if ((next_trial == next_active_insn (JUMP_LABEL (insn))
&& ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
|| (next_trial != 0
- && GET_CODE (next_trial) == JUMP_INSN
+ && JUMP_P (next_trial)
&& JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
&& (simplejump_p (next_trial)
|| GET_CODE (PATTERN (next_trial)) == RETURN)))
branch, thread our jump to the target of that branch. Don't
change this into a RETURN here, because it may not accept what
we have in the delay slot. We'll fix this up later. */
- if (next_trial && GET_CODE (next_trial) == JUMP_INSN
+ if (next_trial && JUMP_P (next_trial)
&& (simplejump_p (next_trial)
|| GET_CODE (PATTERN (next_trial)) == RETURN))
{
If LABEL is zero, then there is no way to determine the branch
direction. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& INSN_UID (insn) <= max_uid
&& label != 0
determine the branch prediction.
Non conditional branches are predicted as very likely taken. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
int prediction;
for (; insn; insn = next)
{
- if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
next = NEXT_INSN (insn);
before the next real insn, we assume the branch is to the top of
the loop. */
for (insn = PREV_INSN (target_label);
- insn && GET_CODE (insn) == NOTE;
+ insn && NOTE_P (insn);
insn = PREV_INSN (insn))
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
return 2;
before the next real insn, we assume the branch is to the loop branch
test. */
for (insn = NEXT_INSN (target_label);
- insn && GET_CODE (insn) == NOTE;
+ insn && NOTE_P (insn);
insn = PREV_INSN (insn))
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
return 1;
next_trial = next_nonnote_insn (trial);
/* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
- if (GET_CODE (trial) == INSN
+ if (NONJUMP_INSN_P (trial)
&& (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
continue;
/* See if we stopped on a filled insn. If we did, try to see if its
delay slots match. */
if (slot_number != num_slots
- && trial && GET_CODE (trial) == INSN
+ && trial && NONJUMP_INSN_P (trial)
&& GET_CODE (PATTERN (trial)) == SEQUENCE
&& ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
{
trial && insns_to_search > 0;
trial = PREV_INSN (trial), --insns_to_search)
{
- if (GET_CODE (trial) == CODE_LABEL)
+ if (LABEL_P (trial))
return 0;
if (! INSN_P (trial))
{
/* Stop for a CALL and its delay slots because it is difficult to
track its resource needs correctly. */
- if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
+ if (CALL_P (XVECEXP (pat, 0, 0)))
return 0;
/* Stop for an INSN or JUMP_INSN with delayed effects and its delay
mark_referenced_resources (insn, &needed, 1);
/* If TARGET is a SEQUENCE, get the main insn. */
- if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
+ if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
target_main = XVECEXP (PATTERN (target), 0, 0);
if (resource_conflicts_p (&needed, &set)
delay_list = XEXP (delay_list, 1);
}
- if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
+ if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
return 0;
for (trial = PREV_INSN (target),
insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
- trial && GET_CODE (trial) != CODE_LABEL && insns_to_search > 0;
+ trial && !LABEL_P (trial) && insns_to_search > 0;
trial = PREV_INSN (trial), --insns_to_search)
{
- if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
- && GET_CODE (trial) != JUMP_INSN)
+ if (!INSN_P (trial))
continue;
pat = PATTERN (trial);
{
/* If this is a CALL_INSN and its delay slots, it is hard to track
the resource needs properly, so give up. */
- if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
+ if (CALL_P (XVECEXP (pat, 0, 0)))
return 0;
/* If this is an INSN or JUMP_INSN with delayed effects, it
active_insn = next_active_insn (PREV_INSN (thread));
for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CODE_LABEL
+ if (LABEL_P (insn)
&& (insn != label || LABEL_NUSES (insn) != 1))
return 0;
/* Ensure that we reach a BARRIER before any insn or label. */
for (insn = prev_nonnote_insn (thread);
- insn == 0 || GET_CODE (insn) != BARRIER;
+ insn == 0 || !BARRIER_P (insn);
insn = prev_nonnote_insn (insn))
if (insn == 0
- || GET_CODE (insn) == CODE_LABEL
- || (GET_CODE (insn) == INSN
+ || LABEL_P (insn)
+ || (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER))
return 0;
insn = unfilled_slots_base[i];
if (insn == 0
|| INSN_DELETED_P (insn)
- || (GET_CODE (insn) == INSN
+ || (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
- || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
+ || (JUMP_P (insn) && non_jumps_p)
+ || (!JUMP_P (insn) && ! non_jumps_p))
continue;
/* It may have been that this insn used to need delay slots, but
slots_filled = 0;
delay_list = 0;
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
flags = get_jump_flags (insn, JUMP_LABEL (insn));
else
flags = get_jump_flags (insn, NULL_RTX);
if ((trial = next_active_insn (insn))
- && GET_CODE (trial) == JUMP_INSN
+ && JUMP_P (trial)
&& simplejump_p (trial)
&& eligible_for_delay (insn, slots_filled, trial, flags)
&& no_labels_between_p (insn, trial)
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
if (slots_filled != slots_to_fill
&& delay_list == 0
- && GET_CODE (insn) == JUMP_INSN
+ && JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
delay_list = optimize_skip (insn);
Presumably, we should also check to see if we could get
back to this function via `setjmp'. */
&& ! can_throw_internal (insn)
- && (GET_CODE (insn) != JUMP_INSN
+ && (!JUMP_P (insn)
|| ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& ! simplejump_p (insn)
&& JUMP_LABEL (insn) != 0)))
CLEAR_RESOURCE (&needed);
CLEAR_RESOURCE (&set);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
mark_referenced_resources (insn, &needed, 1);
{
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
mark_referenced_resources (insn, &needed, 1);
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
target = JUMP_LABEL (insn);
}
{
next_trial = next_nonnote_insn (trial);
- if (GET_CODE (trial) == CODE_LABEL
- || GET_CODE (trial) == BARRIER)
+ if (LABEL_P (trial)
+ || BARRIER_P (trial))
break;
/* We must have an INSN, JUMP_INSN, or CALL_INSN. */
trial_delay = trial;
/* Stop our search when seeing an unconditional jump. */
- if (GET_CODE (trial_delay) == JUMP_INSN)
+ if (JUMP_P (trial_delay))
break;
/* See if we have a resource problem before we try to
set.cc = 1;
/* If this is a call or jump, we might not get here. */
- if (GET_CODE (trial_delay) == CALL_INSN
- || GET_CODE (trial_delay) == JUMP_INSN)
+ if (CALL_P (trial_delay)
+ || JUMP_P (trial_delay))
maybe_never = 1;
}
Don't do this if the insn at the branch target is a branch. */
if (slots_to_fill != slots_filled
&& trial
- && GET_CODE (trial) == JUMP_INSN
+ && JUMP_P (trial)
&& simplejump_p (trial)
&& (target == 0 || JUMP_LABEL (trial) == target)
&& (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
- && ! (GET_CODE (next_trial) == INSN
+ && ! (NONJUMP_INSN_P (next_trial)
&& GET_CODE (PATTERN (next_trial)) == SEQUENCE)
- && GET_CODE (next_trial) != JUMP_INSN
+ && !JUMP_P (next_trial)
&& ! insn_references_resource_p (next_trial, &set, 1)
&& ! insn_sets_resource_p (next_trial, &set, 1)
&& ! insn_sets_resource_p (next_trial, &needed, 1)
/* If this is an unconditional jump, then try to get insns from the
target of the jump. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& simplejump_p (insn)
&& slots_filled != slots_to_fill)
delay_list
for (trial = get_last_insn (); ! stop_search_p (trial, 1);
trial = PREV_INSN (trial))
{
- if (GET_CODE (trial) == NOTE)
+ if (NOTE_P (trial))
continue;
pat = PATTERN (trial);
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
rtx pat, old_trial;
/* If we have passed a label, we no longer own this thread. */
- if (GET_CODE (trial) == CODE_LABEL)
+ if (LABEL_P (trial))
{
own_thread = 0;
continue;
label lest it be deleted by delete_related_insns. */
note = find_reg_note (trial, REG_LABEL, 0);
/* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
- if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
+ if (note && LABEL_P (XEXP (note, 0)))
LABEL_NUSES (XEXP (note, 0))++;
delete_related_insns (trial);
- if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
+ if (note && LABEL_P (XEXP (note, 0)))
LABEL_NUSES (XEXP (note, 0))--;
}
else
a PRE_INC. We also can't do this if there's overlap of source and
destination. Overlap may happen for larger-than-register-size modes. */
- if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
+ if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
&& REG_P (SET_SRC (pat))
&& REG_P (SET_DEST (pat))
&& !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
{
rtx next = next_nonnote_insn (trial);
- if (next && GET_CODE (next) == INSN
+ if (next && NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) != USE
&& ! reg_set_p (SET_DEST (pat), next)
&& ! reg_set_p (SET_SRC (pat), next)
/* If we stopped on a branch insn that has delay slots, see if we can
steal some of the insns in those slots. */
- if (trial && GET_CODE (trial) == INSN
+ if (trial && NONJUMP_INSN_P (trial)
&& GET_CODE (PATTERN (trial)) == SEQUENCE
- && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
+ && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
{
/* If this is the `true' thread, we will want to follow the jump,
so we can only do this if we have taken everything up to here. */
arithmetic insn after the jump insn and put the arithmetic insn in the
delay slot. If we can't do this, return. */
if (delay_list == 0 && likely && new_thread
- && GET_CODE (new_thread) == INSN
+ && NONJUMP_INSN_P (new_thread)
&& GET_CODE (PATTERN (new_thread)) != ASM_INPUT
&& asm_noperands (PATTERN (new_thread)) < 0)
{
trial = new_thread;
pat = PATTERN (trial);
- if (GET_CODE (trial) != INSN
+ if (!NONJUMP_INSN_P (trial)
|| GET_CODE (pat) != SET
|| ! eligible_for_delay (insn, 0, trial, flags)
|| can_throw_internal (trial))
if (! thread_if_true)
abort ();
- if (new_thread && GET_CODE (new_thread) == JUMP_INSN
+ if (new_thread && JUMP_P (new_thread)
&& (simplejump_p (new_thread)
|| GET_CODE (PATTERN (new_thread)) == RETURN)
&& redirect_with_delay_list_safe_p (insn,
if (new_thread == 0)
label = find_end_label ();
- else if (GET_CODE (new_thread) == CODE_LABEL)
+ else if (LABEL_P (new_thread))
label = new_thread;
else
label = get_label_before (new_thread);
insn = unfilled_slots_base[i];
if (insn == 0
|| INSN_DELETED_P (insn)
- || GET_CODE (insn) != JUMP_INSN
+ || !JUMP_P (insn)
|| ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
continue;
/* If this is a jump insn, see if it now jumps to a jump, jumps to
the next insn, or jumps to a label that is not the last of a
group of consecutive labels. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& (target_label = JUMP_LABEL (insn)) != 0)
{
/* See if this jump branches around an unconditional jump.
If so, invert this jump and point it to the target of the
second jump. */
- if (next && GET_CODE (next) == JUMP_INSN
+ if (next && JUMP_P (next)
&& (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
&& target_label
&& next_active_insn (target_label) == next_active_insn (next)
Don't do this if we expect the conditional branch to be true, because
we would then be making the more common case longer. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
&& (other = prev_active_insn (insn)) != 0
&& (condjump_p (other) || condjump_in_parallel_p (other))
}
/* Now look only at cases where we have filled a delay slot. */
- if (GET_CODE (insn) != INSN
+ if (!NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) != SEQUENCE)
continue;
if (optimize_size
&& GET_CODE (PATTERN (delay_insn)) == RETURN
&& next
- && GET_CODE (next) == JUMP_INSN
+ && JUMP_P (next)
&& GET_CODE (PATTERN (next)) == RETURN)
{
rtx after;
}
/* Now look only at the cases where we have a filled JUMP_INSN. */
- if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
+ if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
|| ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
|| condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
continue;
delay list and that insn is redundant, thread the jump. */
if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
&& XVECLEN (PATTERN (trial), 0) == 2
- && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
+ && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
&& (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
|| GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
&& redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
/* See if this is an unconditional jump around a single insn which is
identical to the one in its delay slot. In this case, we can just
delete the branch and the insn in its delay slot. */
- if (next && GET_CODE (next) == INSN
+ if (next && NONJUMP_INSN_P (next)
&& prev_label (next_active_insn (next)) == target_label
&& simplejump_p (insn)
&& XVECLEN (pat, 0) == 2
annulled jumps, though. Again, don't convert a jump to a RETURN
here. */
if (! INSN_ANNULLED_BRANCH_P (delay_insn)
- && next && GET_CODE (next) == JUMP_INSN
+ && next && JUMP_P (next)
&& (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
&& next_active_insn (target_label) == next_active_insn (next)
&& no_labels_between_p (insn, next))
made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
into a RETURN to jump to it. */
for (insn = first; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
+ if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
{
real_return_label = get_label_before (insn);
break;
/* Only look at filled JUMP_INSNs that go to the end of function
label. */
- if (GET_CODE (insn) != INSN
+ if (!NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) != SEQUENCE
- || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
+ || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
|| JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
continue;
{
if (INSN_UID (insn) > max_uid)
max_uid = INSN_UID (insn);
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
epilogue_insn = insn;
}
INSN_FROM_TARGET_P (insn) = 0;
/* Skip vector tables. We can't get attributes for them. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
continue;
obstack_ptr_grow (&unfilled_slots_obstack, insn);
/* Ensure all jumps go to the last of a set of consecutive labels. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn) != 0
&& ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
{
next = NEXT_INSN (insn);
- if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
+ if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
&& INSN_P (XEXP (PATTERN (insn), 0)))
next = delete_related_insns (insn);
}
for (insn = first; insn; insn = NEXT_INSN (insn))
{
if (! INSN_DELETED_P (insn)
- && GET_CODE (insn) == INSN
+ && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
{
{
int pred_flags;
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
{
rtx pat = PATTERN (insn);
if (GET_CODE (pat) == SEQUENCE)
insn = XVECEXP (pat, 0, 0);
}
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
continue;
pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
/* Scan backwards to the previous BARRIER. Then see if we can find a
label that starts a basic block. Return the basic block number. */
for (insn = prev_nonnote_insn (insn);
- insn && GET_CODE (insn) != BARRIER && search_limit != 0;
+ insn && !BARRIER_P (insn) && search_limit != 0;
insn = prev_nonnote_insn (insn), --search_limit)
;
/* See if any of the upcoming CODE_LABELs start a basic block. If we reach
anything other than a CODE_LABEL or note, we can't find this code. */
for (insn = next_nonnote_insn (insn);
- insn && GET_CODE (insn) == CODE_LABEL;
+ insn && LABEL_P (insn);
insn = next_nonnote_insn (insn))
{
FOR_EACH_BB (bb)
{
/* If INSN is an annulled branch, skip any insns from the target
of the branch. */
- if ((GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN
- || GET_CODE (insn) == INSN)
+ if (INSN_P (insn)
&& INSN_ANNULLED_BRANCH_P (insn)
&& NEXT_INSN (PREV_INSN (insn)) != insn)
{
}
insn = NEXT_INSN (insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
{
this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
- if (GET_CODE (this_jump_insn) == JUMP_INSN)
+ if (JUMP_P (this_jump_insn))
break;
}
}
break;
}
- if (GET_CODE (this_jump_insn) == JUMP_INSN)
+ if (JUMP_P (this_jump_insn))
{
if (jump_count++ < 10)
{
start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
stop_insn = target;
- if (GET_CODE (start_insn) == INSN
+ if (NONJUMP_INSN_P (start_insn)
&& GET_CODE (PATTERN (start_insn)) == SEQUENCE)
start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
- if (GET_CODE (stop_insn) == INSN
+ if (NONJUMP_INSN_P (stop_insn)
&& GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
stop_insn = next_insn (PREV_INSN (stop_insn));
&& INSN_P (XEXP (PATTERN (insn), 0)))
real_insn = XEXP (PATTERN (insn), 0);
- if (GET_CODE (real_insn) == CALL_INSN)
+ if (CALL_P (real_insn))
{
/* CALL clobbers all call-used regs that aren't fixed except
sp, ap, and fp. Do this before setting the result of the
parameters. But they might be early. A CALL_INSN will usually
clobber registers used for parameters. It isn't worth bothering
with the unlikely case when it won't. */
- if ((GET_CODE (real_insn) == INSN
+ if ((NONJUMP_INSN_P (real_insn)
&& GET_CODE (PATTERN (real_insn)) != USE
&& GET_CODE (PATTERN (real_insn)) != CLOBBER)
- || GET_CODE (real_insn) == JUMP_INSN
- || GET_CODE (real_insn) == CALL_INSN)
+ || JUMP_P (real_insn)
+ || CALL_P (real_insn))
{
for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD
}
}
- else if (GET_CODE (real_insn) == CODE_LABEL)
+ else if (LABEL_P (real_insn))
{
/* A label clobbers the pending dead registers since neither
reload nor jump will propagate a value across a label. */
/* The beginning of the epilogue corresponds to the end of the
RTL chain when there are no epilogue insns. Certain resources
are implicitly required at that point. */
- else if (GET_CODE (real_insn) == NOTE
+ else if (NOTE_P (real_insn)
&& NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
}
/* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed.
*/
-#define RTX_PREV(X) ((GET_CODE (X) == INSN \
- || GET_CODE (X) == CALL_INSN \
- || GET_CODE (X) == JUMP_INSN \
- || GET_CODE (X) == NOTE \
- || GET_CODE (X) == BARRIER \
- || GET_CODE (X) == CODE_LABEL) \
+#define RTX_PREV(X) ((INSN_P (X) \
+ || NOTE_P (X) \
+ || BARRIER_P (X) \
+ || LABEL_P (X)) \
&& PREV_INSN (X) != NULL \
&& NEXT_INSN (PREV_INSN (X)) == X \
? PREV_INSN (X) : NULL)
{
if (INSN_P (x))
{
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
if (beg == end)
return 0;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
return 0;
return 1;
}
{
rtx p;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
- if (GET_CODE (p) == JUMP_INSN)
+ if (JUMP_P (p))
return 0;
return 1;
}
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& (find_reg_fusage (insn, USE, reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return 1;
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_referenced_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))))
return 1;
return 0;
check if a side-effect of the insn clobbers REG. */
if (INSN_P (insn)
&& (FIND_REG_INC_NOTE (insn, reg)
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
/* We'd like to test call_used_regs here, but rtlanal.c can't
reference that variable due to its use in genattrtab. So
we'll just be more conservative.
{
rtx p;
- for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
+ for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
p = PREV_INSN (p))
if (INSN_P (p))
{
if (find_regno_note (insn, REG_DEAD, test_regno))
return 1;
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& find_regno_fusage (insn, CLOBBER, test_regno))
return 1;
{
/* If it's not a CALL_INSN, it can't possibly have a
CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
- if (GET_CODE (insn) != CALL_INSN)
+ if (!CALL_P (insn))
return 0;
if (! datum)
to pseudo registers, so don't bother checking. */
if (regno >= FIRST_PSEUDO_REGISTER
- || GET_CODE (insn) != CALL_INSN )
+ || !CALL_P (insn) )
return 0;
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
{
rtx link;
- if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
+ if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
return 0;
/* Look for the note that differentiates const and pure functions. */
/* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
field. This is not handled by for_each_rtx because it doesn't
handle unprinted ('0') fields. */
- if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
+ if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
JUMP_LABEL (l) = new_label;
if ((GET_CODE (l) == LABEL_REF
return y == NULL_RTX;
/* Return true if a label_ref *BODY refers to label Y. */
- if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
+ if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
return XEXP (*body, 0) == y;
/* If *BODY is a reference to pool constant traverse the constant. */
{
rtx label, table;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (label = JUMP_LABEL (insn)) != NULL_RTX
&& (table = next_active_insn (label)) != NULL_RTX
- && GET_CODE (table) == JUMP_INSN
+ && JUMP_P (table)
&& (GET_CODE (PATTERN (table)) == ADDR_VEC
|| GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
{
computed_jump_p (rtx insn)
{
int i;
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx pat = PATTERN (insn);
while (r)
{
- if (GET_CODE (r) == NOTE)
+ if (NOTE_P (r))
{
switch (NOTE_LINE_NUMBER (r))
{
/* It is possible that some loads got CSEed from one call to
another. Stop in that case. */
- if (GET_CODE (before) == CALL_INSN)
+ if (CALL_P (before))
break;
/* Our caller needs either ensure that we will find all sets
(in case code has not been optimized yet), or take care
for possible labels in a way by setting boundary to preceding
CODE_LABEL. */
- if (GET_CODE (before) == CODE_LABEL)
+ if (LABEL_P (before))
{
if (before != boundary)
abort ();
return false;
/* We can move CALL_INSN, but we need to check that all caller clobbered
regs are dead. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return false;
/* In future we will handle hoisting of libcall sequences, but
give up for now. */
return 0;
if (GET_CODE (pat) == COND_EXEC)
return COND_EXEC_TEST (pat);
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
return 0;
if (GET_CODE (pat) != SET || SET_SRC (pat) != pc_rtx)
return 0;
/* We can get a dependency on deleted insns due to optimizations in
the register allocation and reloading or due to splitting. Any
such dependency is useless and can be ignored. */
- if (GET_CODE (elem) == NOTE)
+ if (NOTE_P (elem))
return 0;
/* flow.c doesn't handle conditional lifetimes entirely correctly;
/* ??? add_dependence is the wrong place to be eliding dependencies,
as that forgets that the condition expressions themselves may
be dependent. */
- if (GET_CODE (insn) != CALL_INSN && GET_CODE (elem) != CALL_INSN)
+ if (!CALL_P (insn) && !CALL_P (elem))
{
cond1 = get_condition (insn);
cond2 = get_condition (elem);
No need for interblock dependences with calls, since
calls are not moved between blocks. Note: the edge where
elem is a CALL is still required. */
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& (INSN_BB (elem) != INSN_BB (insn)))
return 0;
#endif
}
for (u = deps->last_pending_memory_flush; u; u = XEXP (u, 1))
- if (GET_CODE (XEXP (u, 0)) != JUMP_INSN
+ if (!JUMP_P (XEXP (u, 0))
|| deps_may_trap_p (x))
add_dependence (insn, XEXP (u, 0), REG_DEP_ANTI);
sched_analyze_2 (deps, x, insn);
/* Mark registers CLOBBERED or used by called function. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
{
reg_pending_barrier = MOVE_BARRIER;
}
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx next;
next = next_nonnote_insn (insn);
- if (next && GET_CODE (next) == BARRIER)
+ if (next && BARRIER_P (next))
reg_pending_barrier = TRUE_BARRIER;
else
{
/* Before reload, if the previous block ended in a call, show that
we are inside a post-call group, so as to keep the lifetimes of
hard registers correct. */
- if (! reload_completed && GET_CODE (head) != CODE_LABEL)
+ if (! reload_completed && !LABEL_P (head))
{
insn = prev_nonnote_insn (head);
- if (insn && GET_CODE (insn) == CALL_INSN)
+ if (insn && CALL_P (insn))
deps->in_post_call_group_p = post_call_initial;
}
for (insn = head;; insn = NEXT_INSN (insn))
{
rtx link, end_seq, r0, set;
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
+ if (NONJUMP_INSN_P (insn) || JUMP_P (insn))
{
/* Clear out the stale LOG_LINKS from flow. */
free_INSN_LIST_list (&LOG_LINKS (insn));
/* Make each JUMP_INSN a scheduling barrier for memory
references. */
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
/* Keep the list a reasonable size. */
if (deps->pending_flush_length++ > MAX_PENDING_LIST_LENGTH)
sched_analyze_insn (deps, PATTERN (insn), insn, loop_notes);
loop_notes = 0;
}
- else if (GET_CODE (insn) == CALL_INSN)
+ else if (CALL_P (insn))
{
int i;
/* See comments on reemit_notes as to why we do this.
??? Actually, the reemit_notes just say what is done, not why. */
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
the outermost libcall sequence. */
&& deps->libcall_block_tail_insn == 0
/* The sequence must start with a clobber of a register. */
- && GET_CODE (insn) == INSN
+ && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == CLOBBER
&& (r0 = XEXP (PATTERN (insn), 0), REG_P (r0))
&& REG_P (XEXP (PATTERN (insn), 0))
However, if we have enabled checking we might as well go
ahead and verify that add_dependence worked properly. */
- if (GET_CODE (from) == NOTE
+ if (NOTE_P (from)
|| INSN_DELETED_P (from)
|| (forward_dependency_cache != NULL
&& bitmap_bit_p (&forward_dependency_cache[INSN_LUID (from)],
for (; insn != aftertail; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
abort ();
/* Create new basic blocks just before first insn. */
if (inside_basic_block_p (insn))
rtx note;
/* Re-emit the basic block note for newly found BB header. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
note = emit_note_after (NOTE_INSN_BASIC_BLOCK, insn);
head = insn;
delete_insn_chain (head, insn);
/* We keep some notes in the way that may split barrier from the
jump. */
- if (GET_CODE (next) == BARRIER)
+ if (BARRIER_P (next))
{
emit_barrier_after (prev_nonnote_insn (head));
delete_insn (next);
BB_END (curr_bb) = insn;
add_missing_bbs (BB_HEAD (curr_bb), bb, curr_bb->prev_bb);
}
- note = GET_CODE (head) == CODE_LABEL ? NEXT_INSN (head) : head;
+ note = LABEL_P (head) ? NEXT_INSN (head) : head;
NOTE_BASIC_BLOCK (note) = curr_bb;
update_bb_for_insn (curr_bb);
bb = curr_bb->next_bb;
basic_block last_block = NULL, bb;
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
bb = BLOCK_FOR_INSN (insn);
bb->aux = last_block;
edge e;
tail = BB_END (bb);
if (bb->next_bb == EXIT_BLOCK_PTR
- || GET_CODE (BB_HEAD (bb->next_bb)) == CODE_LABEL)
+ || LABEL_P (BB_HEAD (bb->next_bb)))
break;
for (e = bb->succ; e; e = e->succ_next)
if ((e->flags & EDGE_FALLTHRU) != 0)
a note or two. */
while (head != tail)
{
- if (GET_CODE (head) == NOTE)
+ if (NOTE_P (head))
head = NEXT_INSN (head);
- else if (GET_CODE (tail) == NOTE)
+ else if (NOTE_P (tail))
tail = PREV_INSN (tail);
- else if (GET_CODE (head) == CODE_LABEL)
+ else if (LABEL_P (head))
head = NEXT_INSN (head);
else
break;
rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
if (note
- && ! (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && ! (JUMP_P (NEXT_INSN (insn))
&& find_reg_note (NEXT_INSN (insn), REG_LABEL,
XEXP (note, 0))))
return 1;
&& IS_REACHABLE (INSN_BB (next), load_insn_bb)
&& load_insn_bb != INSN_BB (next)
&& GET_MODE (link) == VOIDmode
- && (GET_CODE (next) == JUMP_INSN
+ && (JUMP_P (next)
|| find_conditional_protection (next, load_insn_bb)))
return 1;
}
/* Must be a DEF-USE dependence upon non-branch. */
if (GET_MODE (link) != VOIDmode
- || GET_CODE (insn1) == JUMP_INSN)
+ || JUMP_P (insn1))
continue;
/* Must exist a path: region-entry -> ... -> bb_trg -> ... load_insn. */
static int
can_schedule_ready_p (rtx insn)
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
last_was_jump = 1;
/* An interblock motion? */
insn = tail;
last = 0;
- while (GET_CODE (insn) == CALL_INSN
- || GET_CODE (insn) == JUMP_INSN
- || (GET_CODE (insn) == INSN
+ while (CALL_P (insn)
+ || JUMP_P (insn)
+ || (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
|| can_throw_internal (insn)
#endif
|| (!reload_completed
&& sets_likely_spilled (PATTERN (insn)))))
- || GET_CODE (insn) == NOTE)
+ || NOTE_P (insn))
{
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
{
if (last != 0 && !find_insn_list (insn, LOG_LINKS (last)))
{
{
int n;
fprintf (sched_dump, ";; %6d ", INSN_UID (insn));
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_P (insn))
{
n = NOTE_LINE_NUMBER (insn);
if (n < 0)
last_insn = get_last_insn ();
if (!optimize
- && (GET_CODE (last_insn) == CODE_LABEL
- || (GET_CODE (last_insn) == NOTE
+ && (LABEL_P (last_insn)
+ || (NOTE_P (last_insn)
&& prev_real_insn (last_insn) == 0)))
emit_insn (gen_nop ());
}
/* Make sure case_stmt.start points to something that won't
need any transformation before expand_end_case. */
- if (GET_CODE (get_last_insn ()) != NOTE)
+ if (!NOTE_P (get_last_insn ()))
emit_note (NOTE_INSN_DELETED);
thiscase->data.case_stmt.start = get_last_insn ();
static void
emit_jump_if_reachable (rtx label)
{
- if (GET_CODE (get_last_insn ()) != BARRIER)
+ if (!BARRIER_P (get_last_insn ()))
emit_jump (label);
}
\f
/* If the last instruction is not a BARRIER or a JUMP_INSN, then
don't do anything. */
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
{
/* Delete the jump insn. This will delete the barrier also. */
last_loop_insn = PREV_INSN (last_loop_insn);
}
- if (ujump && GET_CODE (last_loop_insn) == JUMP_INSN)
+ if (ujump && JUMP_P (last_loop_insn))
{
#ifdef HAVE_cc0
rtx prev = PREV_INSN (last_loop_insn);
Just return without unrolling the loop in such cases. */
insn = loop_start;
- while (GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != JUMP_INSN)
+ while (!LABEL_P (insn) && !JUMP_P (insn))
insn = NEXT_INSN (insn);
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return;
}
insert_before = NEXT_INSN (last_loop_insn);
/* Set copy_end to the insn before the jump at the end of the loop. */
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
copy_end = PREV_INSN (PREV_INSN (last_loop_insn));
- else if (GET_CODE (last_loop_insn) == JUMP_INSN)
+ else if (JUMP_P (last_loop_insn))
{
copy_end = PREV_INSN (last_loop_insn);
#ifdef HAVE_cc0
/* Set insert_before to the jump insn at the end of the loop.
Set copy_end to before the jump insn at the end of the loop. */
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
{
insert_before = PREV_INSN (last_loop_insn);
copy_end = PREV_INSN (insert_before);
}
- else if (GET_CODE (last_loop_insn) == JUMP_INSN)
+ else if (JUMP_P (last_loop_insn))
{
insert_before = last_loop_insn;
#ifdef HAVE_cc0
/* Normal case: Must copy the compare and branch instructions at the
end of the loop. */
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
{
/* Loop ends with an unconditional jump and a barrier.
Handle this like above, don't copy jump and barrier.
insert_before = PREV_INSN (last_loop_insn);
copy_end = PREV_INSN (insert_before);
}
- else if (GET_CODE (last_loop_insn) == JUMP_INSN)
+ else if (JUMP_P (last_loop_insn))
{
/* Set insert_before to immediately after the JUMP_INSN, so that
NOTEs at the end of the loop will be correctly handled by
exit_label = gen_label_rtx ();
insn = loop_start;
- while (GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != JUMP_INSN)
+ while (!LABEL_P (insn) && !JUMP_P (insn))
insn = NEXT_INSN (insn);
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
/* The loop starts with a jump down to the exit condition test.
Start copying the loop after the barrier following this
/* This should always be the first label in the loop. */
start_label = NEXT_INSN (copy_start);
/* There may be a line number note and/or a loop continue note here. */
- while (GET_CODE (start_label) == NOTE)
+ while (NOTE_P (start_label))
start_label = NEXT_INSN (start_label);
- if (GET_CODE (start_label) != CODE_LABEL)
+ if (!LABEL_P (start_label))
{
/* This can happen as a result of jump threading. If the first insns in
the loop test the same condition as the loop's backward jump, or the
}
if (unroll_type == UNROLL_NAIVE
- && GET_CODE (last_loop_insn) == BARRIER
- && GET_CODE (PREV_INSN (last_loop_insn)) == JUMP_INSN
+ && BARRIER_P (last_loop_insn)
+ && JUMP_P (PREV_INSN (last_loop_insn))
&& start_label != JUMP_LABEL (PREV_INSN (last_loop_insn)))
{
/* In this case, we must copy the jump and barrier, because they will
}
if (unroll_type == UNROLL_NAIVE
- && GET_CODE (last_loop_insn) == JUMP_INSN
+ && JUMP_P (last_loop_insn)
&& start_label != JUMP_LABEL (last_loop_insn))
{
/* ??? The loop ends with a conditional branch that does not branch back
{
rtx note;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
local_label[CODE_LABEL_NUMBER (insn)] = 1;
- else if (GET_CODE (insn) == JUMP_INSN)
+ else if (JUMP_P (insn))
{
if (JUMP_LABEL (insn))
set_label_in_map (map,
/* If a register is used in the jump insn, we must not duplicate it
since it will also be used outside the loop. */
- if (GET_CODE (copy_end) == JUMP_INSN)
+ if (JUMP_P (copy_end))
copy_end_luid--;
/* If we have a target that uses cc0, then we also must not duplicate
the insn that sets cc0 before the jump insn, if one is present. */
#ifdef HAVE_cc0
- if (GET_CODE (copy_end) == JUMP_INSN
+ if (JUMP_P (copy_end)
&& sets_cc0_p (PREV_INSN (copy_end)))
copy_end_luid--;
#endif
and then reset it inside the loop when get to the last
copy. */
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
copy_end = PREV_INSN (PREV_INSN (last_loop_insn));
- else if (GET_CODE (last_loop_insn) == JUMP_INSN)
+ else if (JUMP_P (last_loop_insn))
{
copy_end = PREV_INSN (last_loop_insn);
#ifdef HAVE_cc0
if (i == unroll_number - 1)
{
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
copy_end = PREV_INSN (PREV_INSN (last_loop_insn));
else
copy_end = last_loop_insn;
}
emit_label_after (labels[0], PREV_INSN (loop_start));
- if (GET_CODE (last_loop_insn) == BARRIER)
+ if (BARRIER_P (last_loop_insn))
{
insert_before = PREV_INSN (last_loop_insn);
copy_end = PREV_INSN (insert_before);
if (unroll_type == UNROLL_MODULO)
{
insn = NEXT_INSN (copy_end);
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
+ if (NONJUMP_INSN_P (insn) || JUMP_P (insn))
PATTERN (insn) = remap_split_bivs (loop, PATTERN (insn));
}
associated LABEL_DECL to point to one of the new label instances. */
/* ??? Likewise, we can't delete a NOTE_INSN_DELETED_LABEL note. */
if (insn != start_label
- && ! (GET_CODE (insn) == CODE_LABEL && LABEL_NAME (insn))
- && ! (GET_CODE (insn) == NOTE
+ && ! (LABEL_P (insn) && LABEL_NAME (insn))
+ && ! (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL))
insn = delete_related_insns (insn);
else
}
}
- if (label && GET_CODE (label) == CODE_LABEL)
+ if (label && LABEL_P (label))
JUMP_LABEL (copy) = label;
else
{
do
{
insn = NEXT_INSN (insn);
- if ((GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN)
+ if (INSN_P (insn)
&& map->insn_map[INSN_UID (insn)])
final_reg_note_copy (®_NOTES (map->insn_map[INSN_UID (insn)]), map);
}
instructions before the last insn in the loop, COPY_NOTES_FROM
can be a NOTE_INSN_LOOP_CONT note if there is no VTOP note,
as in a do .. while loop. */
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& ((NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_VTOP
/* Stop before we get to the backward branch at the end of the loop. */
loop_end = prev_nonnote_insn (loop_end);
- if (GET_CODE (loop_end) == BARRIER)
+ if (BARRIER_P (loop_end))
loop_end = PREV_INSN (loop_end);
/* Check in case insn has been deleted, search forward for first non
for (p = NEXT_INSN (insn); p != loop_end; p = NEXT_INSN (p))
{
- if (GET_CODE (p) == JUMP_INSN)
+ if (JUMP_P (p))
{
target_insn = JUMP_LABEL (p);
if (set && rtx_equal_p (SET_DEST (set), reg))
break;
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
if (GET_CODE (PATTERN (insn)) == RETURN)
break;
ret = reg;
for (insn = PREV_INSN (loop_start); insn; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
break;
else if (INSN_P (insn) && reg_set_p (reg, insn))
/* ??? We should probably try harder to find the jump insn
at the end of the loop. The following code assumes that
the last loop insn is a jump to the top of the loop. */
- if (GET_CODE (last_loop_insn) != JUMP_INSN)
+ if (!JUMP_P (last_loop_insn))
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
do
{
- if (GET_CODE (temp) == JUMP_INSN)
+ if (JUMP_P (temp))
{
/* There are some kinds of jumps we can't deal with easily. */
if (JUMP_LABEL (temp) == 0)
while (INSN_UID (p) != first_uid)
{
- if (GET_CODE (p) == JUMP_INSN)
+ if (JUMP_P (p))
passed_jump = 1;
/* Could not find FIRST_UID. */
if (p == copy_end)
{
/* If we see a CODE_LABEL between FIRST_UID and LAST_UID, then we
can not be sure that FIRST_UID dominates LAST_UID. */
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
return 0;
/* Could not find LAST_UID, but we reached the end of the loop, so
it must be safe. */
/* Examine insn after loop continuation note. Return if not a label. */
label = next_nonnote_insn (loop_cont);
- if (label == 0 || GET_CODE (label) != CODE_LABEL)
+ if (label == 0 || !LABEL_P (label))
return NULL_RTX;
/* Return the loop start if the branch label matches the code label. */
end = NEXT_INSN (BB_END (bb));
for (insn = BB_HEAD (bb); insn != end; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
break;
}
note_uses (&PATTERN (insn), count_uses_1, insn);
note_stores (PATTERN (insn), count_stores, insn);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
VTI (bb)->n_mos++;
}
}
}
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
case LABEL_REF:
tmp = XEXP (x, 0);
if (INSN_DELETED_P (tmp)
- || (GET_CODE (tmp) == NOTE
+ || (NOTE_P (tmp)
&& NOTE_LINE_NUMBER (tmp) == NOTE_INSN_DELETED))
{
abort ();